12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152 |
- import boto3
- from botocore.exceptions import ClientError
- import os.path
- # Arguments
- folder_to_sync = './test/data'
- bucket_target = 'my-bucket'
- # Create connection
- s3 = boto3.client('s3', use_ssl=False, endpoint_url="http://172.17.0.2:9000", aws_access_key_id="minio", aws_secret_access_key="miniokey")
- # Check bucket
- try:
- s3.head_bucket(Bucket=bucket_target)
- except ClientError as e:
- s3.create_bucket(Bucket=bucket_target)
- # Get file list
- # Source : https://stackoverflow.com/questions/3207219/how-do-i-list-all-files-of-a-directory
- local_file_list = os.listdir(folder_to_sync)
- bucket_file_list = [obj['Key'] for obj in s3.list_objects(Bucket=bucket_target)['Contents']]
- # Upload
- print("Uploading new files ...")
- for file_name in local_file_list:
- print(file_name, end=' ')
- file_path = folder_to_sync + '/' + file_name
- if file_name in bucket_file_list:
- # Todo : compare modification dates
- print('(Not updated)')
- elif os.path.isfile(file_path):
- res = s3.upload_file(file_path, bucket_target, file_name)
- print('(New file)')
- else:
- print('(Skipped)')
- print("Done")
- # Delete files
- print("Deleting removed files ...")
- for file_name in bucket_file_list:
- print(file_name, end=' ')
- if file_name in local_file_list:
- print('(Still present)')
- else:
- s3.delete_object(Bucket=bucket_target, Key=file_name)
- print('(Deleted)')
- print("Done")
|