123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657 |
- import boto3
- from botocore.exceptions import ClientError
- import os.path
- # Arguments
- folder_to_sync = './test/data'
- bucket_target = 'my-bucket'
- # Create connection
- s3 = boto3.client('s3', use_ssl=False, endpoint_url="http://172.17.0.2:9000", aws_access_key_id="minio", aws_secret_access_key="miniokey")
- # Check bucket
- try:
- s3.head_bucket(Bucket=bucket_target)
- except ClientError as e:
- print('Create bucket', bucket_target)
- s3.create_bucket(Bucket=bucket_target)
- # Get file list
- # Source : https://stackoverflow.com/questions/3207219/how-do-i-list-all-files-of-a-directory
- local_file_list = os.listdir(folder_to_sync)
- try:
- bucket_file_list = [obj['Key'] for obj in s3.list_objects(Bucket=bucket_target)['Contents']]
- except:
- bucket_file_list = [] # No key 'Contents' if empty bucket
- # Upload
- print("Uploading new files ...")
- for file_name in local_file_list:
- print(file_name, end=' ')
- file_path = folder_to_sync + '/' + file_name
- if file_name in bucket_file_list:
- # Todo : compare modification dates
- print('(Not updated)')
- elif os.path.isfile(file_path):
- res = s3.upload_file(file_path, bucket_target, file_name)
- print('(New file)')
- else:
- print('(Skipped)')
- print("Done")
- # Delete files
- print("Deleting removed files ...")
- for file_name in bucket_file_list:
- print(file_name, end=' ')
- if file_name in local_file_list:
- print('(Still present)')
- else:
- s3.delete_object(Bucket=bucket_target, Key=file_name)
- print('(Deleted)')
- print("Done")
|