Python용 AWS SDK (Boto3)

Prev Next

Python용 SDK for S3 API

AWS S3에서 제공하는 Python용 SDK를 이용해 네이버 클라우드 플랫폼의 Object Storage 서비스를 사용하는 예제입니다. AWS Python SDK 1.6.19 버전을 기준으로 작성되었습니다.

SDK 설치

SDK를 설치하는 방법은 다음과 같습니다.

pip install boto3==1.6.19
참고

예제

예제는 다음과 같습니다.

참고

예제에서 사용하는 access_key, secret_key 값에는 등록한 API 인증 키 정보를 입력해야 합니다.

버킷 생성

버킷 생성의 예제는 다음과 같습니다.

import boto3

service_name = 's3'
endpoint_url = 'https://kr.object.fin-ncloudstorage.com'
region_name = 'fin-standard'
access_key = 'ACCESS_KEY'
secret_key = 'SECRET_KEY'

if __name__ == "__main__":
    s3 = boto3.client(service_name, endpoint_url=endpoint_url, aws_access_key_id=access_key,
                      aws_secret_access_key=secret_key)

    bucket_name = 'sample-bucket'

    s3.create_bucket(Bucket=bucket_name)

버킷 목록 조회

버킷 목록 조회의 예제는 다음과 같습니다.

import boto3

service_name = 's3'
endpoint_url = 'https://kr.object.fin-ncloudstorage.com'
region_name = 'fin-standard'
access_key = 'ACCESS_KEY'
secret_key = 'SECRET_KEY'

if __name__ == "__main__":
    s3 = boto3.client(service_name, endpoint_url=endpoint_url, aws_access_key_id=access_key,
                      aws_secret_access_key=secret_key)

    response = s3.list_buckets()
    
    for bucket in response.get('Buckets', []):
        print (bucket.get('Name'))

버킷 삭제

버킷 삭제의 예제는 다음과 같습니다.

import boto3

service_name = 's3'
endpoint_url = 'https://kr.object.fin-ncloudstorage.com'
region_name = 'fin-standard'
access_key = 'ACCESS_KEY'
secret_key = 'SECRET_KEY'

if __name__ == "__main__":
    s3 = boto3.client(service_name, endpoint_url=endpoint_url, aws_access_key_id=access_key,
                      aws_secret_access_key=secret_key)

    bucket_name = 'sample-bucket'

    s3.delete_bucket(Bucket=bucket_name)

파일 업로드

파일 업로드의 예제는 다음과 같습니다.

import boto3

service_name = 's3'
endpoint_url = 'https://kr.object.fin-ncloudstorage.com'
region_name = 'fin-standard'
access_key = 'ACCESS_KEY'
secret_key = 'SECRET_KEY'

if __name__ == "__main__":
    s3 = boto3.client(service_name, endpoint_url=endpoint_url, aws_access_key_id=access_key,
                      aws_secret_access_key=secret_key)

    bucket_name = 'sample-bucket'

    # create folder
    object_name = 'sample-folder/'

    s3.put_object(Bucket=bucket_name, Key=object_name)

    # upload file
    object_name = 'sample-object'
    local_file_path = '/tmp/test.txt'

    s3.upload_file(local_file_path, bucket_name, object_name)

파일 목록 조회

파일 목록 조회의 예제는 다음과 같습니다.

import boto3

service_name = 's3'
endpoint_url = 'https://kr.object.fin-ncloudstorage.com'
region_name = 'fin-standard'
access_key = 'ACCESS_KEY'
secret_key = 'SECRET_KEY'

if __name__ == "__main__":
    s3 = boto3.client(service_name, endpoint_url=endpoint_url, aws_access_key_id=access_key,
                      aws_secret_access_key=secret_key)

    bucket_name = 'sample-bucket'

    # list all in the bucket
    max_keys = 300
    response = s3.list_objects(Bucket=bucket_name, MaxKeys=max_keys)

    print('list all in the bucket')

    while True:
        print('IsTruncated=%r' % response.get('IsTruncated'))
        print('Marker=%s' % response.get('Marker'))
        print('NextMarker=%s' % response.get('NextMarker'))

        print('Object List')
        for content in response.get('Contents'):
            print(' Name=%s, Size=%d, Owner=%s' % \
                  (content.get('Key'), content.get('Size'), content.get('Owner').get('ID')))

        if response.get('IsTruncated'):
            response = s3.list_objects(Bucket=bucket_name, MaxKeys=max_keys,
                                       Marker=response.get('NextMarker'))
        else:
            break

    # top level folders and files in the bucket
    delimiter = '/'
    max_keys = 300

    response = s3.list_objects(Bucket=bucket_name, Delimiter=delimiter, MaxKeys=max_keys)

    print('top level folders and files in the bucket')

    while True:
        print('IsTruncated=%r' % response.get('IsTruncated'))
        print('Marker=%s' % response.get('Marker'))
        print('NextMarker=%s' % response.get('NextMarker'))

        print('Folder List')
        for folder in response.get('CommonPrefixes'):
            print(' Name=%s' % folder.get('Prefix'))

        print('File List')
        for content in response.get('Contents'):
            print(' Name=%s, Size=%d, Owner=%s' % \
                  (content.get('Key'), content.get('Size'), content.get('Owner').get('ID')))

        if response.get('IsTruncated'):
            response = s3.list_objects(Bucket=bucket_name, Delimiter=delimiter, MaxKeys=max_keys,
                                       Marker=response.get('NextMarker'))
        else:
            break

파일 다운로드

파일 다운로드의 예제는 다음과 같습니다.

import boto3

service_name = 's3'
endpoint_url = 'https://kr.object.fin-ncloudstorage.com'
region_name = 'fin-standard'
access_key = 'ACCESS_KEY'
secret_key = 'SECRET_KEY'

if __name__ == "__main__":
    s3 = boto3.client(service_name, endpoint_url=endpoint_url, aws_access_key_id=access_key,
                      aws_secret_access_key=secret_key)
    bucket_name = 'sample-bucket'

    object_name = 'sample-object'
    local_file_path = '/tmp/test.txt'

    s3.download_file(bucket_name, object_name, local_file_path)

파일 삭제

파일 삭제의 예제는 다음과 같습니다.

import boto3

service_name = 's3'
endpoint_url = 'https://kr.object.fin-ncloudstorage.com'
region_name = 'fin-standard'
access_key = 'ACCESS_KEY'
secret_key = 'SECRET_KEY'

if __name__ == "__main__":
    s3 = boto3.client(service_name, endpoint_url=endpoint_url, aws_access_key_id=access_key,
                      aws_secret_access_key=secret_key)

    bucket_name = 'sample-bucket'
    object_name = 'sample-object'

    s3.delete_object(Bucket=bucket_name, Key=object_name)

ACL 설정

ACL 설정의 예제는 다음과 같습니다.

import boto3

service_name = 's3'
endpoint_url = 'https://kr.object.fin-ncloudstorage.com'
region_name = 'fin-standard'
access_key = 'ACCESS_KEY'
secret_key = 'SECRET_KEY'

if __name__ == "__main__":
    s3 = boto3.client(service_name, endpoint_url=endpoint_url, aws_access_key_id=access_key,
                      aws_secret_access_key=secret_key)
    bucket_name = 'sample-bucket'

    # set bucket ACL
    # add read permission to anonymous
    s3.put_bucket_acl(Bucket=bucket_name, ACL='public-read')

    response = s3.get_bucket_acl(Bucket=bucket_name)

    # set object ACL
    # add read permission to user by ID
    object_name = 'sample-object'
    owner_id = 'test-owner-id'
    target_id = 'test-user-id'

    s3.put_object_acl(Bucket=bucket_name, Key=object_name,
                      AccessControlPolicy={
                          'Grants': [
                              {
                                  'Grantee': {
                                      'ID': owner_id,
                                      'Type': 'CanonicalUser'
                                  },
                                  'Permission': 'FULL_CONTROL'
                              },
                              {
                                  'Grantee': {
                                      'ID': target_id,
                                      'Type': 'CanonicalUser'
                                  },
                                  'Permission': 'READ'
                              }
                          ],
                          'Owner': {
                              'ID': owner_id
                          }
                      })

    response = s3.get_object_acl(Bucket=bucket_name, Key=object_name)

멀티파트 업로드

멀티파트 업로드의 예제는 다음과 같습니다.

주의

멀티파트 업로드를 완료하지 않을 경우에는 잔여 파일이 버킷에 남게 되며, 잔여 파일은 버킷 용량에 포함되어 과금됩니다. 다음 방법을 통해 불완전한 멀티파트 객체를 삭제하여 불필요하게 과금되지 않도록 주의해 주십시오.

import boto3

service_name = 's3'
endpoint_url = 'https://kr.object.fin-ncloudstorage.com'
region_name = 'fin-standard'
access_key = 'ACCESS_KEY_ID'
secret_key = 'SECRET_KEY'

if __name__ == "__main__":
    s3 = boto3.client(service_name, endpoint_url=endpoint_url, aws_access_key_id=access_key,
                      aws_secret_access_key=secret_key)
    bucket_name = 'sample-bucket'
    object_name = 'sample-large-object'
    
    local_file = '/tmp/sample.file'

    # initialize and get upload ID
    create_multipart_upload_response = s3.create_multipart_upload(Bucket=bucket_name, Key=object_name)
    upload_id = create_multipart_upload_response['UploadId']

    part_size = 10 * 1024 * 1024
    parts = []

    # upload parts
    with open(local_file, 'rb') as f:
        part_number = 1
        while True:
            data = f.read(part_size)
            if not len(data):
                break
            upload_part_response = s3.upload_part(Bucket=bucket_name, Key=object_name, PartNumber=part_number, UploadId=upload_id, Body=data)
            parts.append({
                'PartNumber': part_number,
                'ETag': upload_part_response['ETag']
            })
            part_number += 1

    multipart_upload = {'Parts': parts}

    # abort multipart upload
    # s3.abort_multipart_upload(Bucket=bucket_name, Key=object_name, UploadId=upload_id)

    # complete multipart upload
    s3.complete_multipart_upload(Bucket=bucket_name, Key=object_name, UploadId=upload_id, MultipartUpload=multipart_upload)

고객 제공 키를 사용한 암호화(SSE-C) 요청

고객 제공 키를 사용한 암호화(SSE-C) 요청의 예제는 다음과 같습니다.

참고

SSE-C 기반으로 객체를 암호화할 경우, 콘솔에서 일부 요청을 사용할 수 없습니다.

import boto3
import secrets

service_name = 's3'
endpoint_url = 'https://kr.object.fin-ncloudstorage.com'
region_name = 'fin-standard'
access_key = 'ACCESS_KEY_ID'
secret_key = 'SECRET_KEY'

if __name__ == "__main__":
    # S3 client
    s3 = boto3.client(service_name, endpoint_url=endpoint_url, aws_access_key_id=access_key,
                      aws_secret_access_key=secret_key)

    # create encryption key
    sse_key = secrets.token_bytes(32)
    sse_conf = { "SSECustomerKey": sse_key, "SSECustomerAlgorithm": "AES256"}

    bucket_name = 'sample-bucket'
    object_name = 'sample-object'

    # upload object
    local_file_path = '/tmp/sample.txt'
    s3.upload_file(local_file_path, bucket_name, object_name, sse_conf)

    # download object
    download_file_path = '/tmp/sample-download.txt'
    s3.download_file(bucket_name, object_name, download_file_path, sse_conf)

CORS (Cross-Origin Resource Sharing) 설정

CORS 설정의 예제는 다음과 같습니다.

import boto3

service_name = 's3'
endpoint_url = 'https://kr.object.fin-ncloudstorage.com'
region_name = 'fin-standard'
access_key = 'ACCESS_KEY_ID'
secret_key = 'SECRET_KEY'

if __name__ == "__main__":
    s3 = boto3.client(service_name, endpoint_url=endpoint_url, aws_access_key_id=access_key,
                      aws_secret_access_key=secret_key)

    bucket_name = 'sample-bucket'

# Define the configuration rules
    cors_configuration = {
        'CORSRules': [{
            'AllowedHeaders': ['*'],
            'AllowedMethods': ['GET', 'PUT'],
            'AllowedOrigins': ['*'],
            'MaxAgeSeconds': 3000
        }]
    }

    # Set CORS configuration
    s3.put_bucket_cors(Bucket=bucket_name,
                    CORSConfiguration=cors_configuration)

    # Get CORS configuration
    response = s3.get_bucket_cors(Bucket=bucket_name)
    print(response['CORSRules'])