10 examples of 'boto3 upload directory to s3' in Python

Every line of 'boto3 upload directory to s3' code snippets is scanned for vulnerabilities by our powerful machine learning engine that combs millions of open source libraries, ensuring your Python code is secure.

All examples are scanned by Snyk Code

By copying the Snyk Code Snippets you agree to
112def upload(bucket, key, filename, session=None):
113 """Upload file to S3 bucket."""
114 s3_client = _get_client(session)
115 LOGGER.info('Uploading %s to %s/%s', filename, bucket, key)
116 s3_client.upload_file(filename, bucket, key)
30def upload(source_file, bucket_name, object_key):
31 s3 = boto3.resource('s3')
32
33 # Uploads the source file to the specified s3 bucket by using a
34 # managed uploader. The uploader automatically splits large
35 # files and uploads parts in parallel for faster uploads.
36 try:
37 s3.Bucket(bucket_name).upload_file(source_file, object_key)
38 except Exception as e:
39 print(e)
21def upload():
22 try:
23 cos = ibm_boto3.resource('s3',
24 ibm_api_key_id='apikey',
25 ibm_service_instance_id='resource_instance_id',
26 ibm_auth_endpoint='https://iam.bluemix.net/oidc/token',
27 config=Config(signature_version='oauth'),
28 endpoint_url='https://s3-api.us-geo.objectstorage.softlayer.net')
29
30 zipFileName = 'cozmo-photos'
31 shutil.make_archive(zipFileName, 'zip', '../1-take-pictures/pictures')
32 print("Done: Zipping Pictures")
33
34 container = 'tensorflow'
35 cos.create_bucket(Bucket=container)
36
37 with open('./' + zipFileName + '.zip', 'rb') as local:
38 cos.Object(
39 container,
40 zipFileName + '.zip').upload_file(zipFileName + '.zip')
41 print("Done: Uploading Pictures")
42
43 except Exception as e:
44 print("Error: Uploading Pictures")
45 print(e)
46
47 return
100def main():
101 """
102 Starting point of the program.
103 """
104 s3hook =create_s3_client()
105
106 buckets_available(s3hook)
107
108 # create_bucket(s3hook,'bdd100k')
109 # create_bucket(s3hook, 'cityscapes50cities')
110
111 url = "http://dl.yf.io/bdd-data/v1/videos/samples-1k.zip"
112 # url = "http://dl.yf.io/bdd-data/v1/videos/test.zip"
113 # url = "http://dl.yf.io/bdd-data/v1/videos/train.zip"
114 # url = "http://dl.yf.io/bdd-data/v1/videos/val.zip"
115
116 upload_to_S3_bucket(s3hook, bucket_name='bdd100k', url=url, key = 'samples-1k')
21def upload_to_amazon(bucket_name, file_path):
22
23
24 #Use environmental variables to authenticalt S3
25 c = boto.connect_s3()
26 b = c.get_bucket(bucket_name)
27
28 file_name = os.path.basename(file_path)
29
30 source_path = file_path
31 source_size = os.stat(source_path).st_size
32
33 # Create a multipart upload request
34 mp = b.initiate_multipart_upload(file_name)
35
36 # Use a chunk size of 50 MiB (feel free to change this)
37 chunk_size = 52428800
38 chunk_count = int(math.ceil(source_size / float(chunk_size)))
39
40 # Send the file parts, using FileChunkIO to create a file-like object
41 # that points to a certain byte range within the original file. We
42 # set bytes to never exceed the original file size.
43 for i in range(chunk_count):
44 print('Uploading chunk %s of %s.' %(i+1, chunk_count))
45 offset = chunk_size * i
46 bytes = min(chunk_size, source_size - offset)
47 with FileChunkIO(source_path, 'r', offset=offset,bytes=bytes) as fp:
48 mp.upload_part_from_file(fp, part_num=i + 1)
49
50 # Finish the upload
51 mp.complete_upload()
52
53 b.set_acl('public-read', file_name)
54
55 url = get_s3_url(bucket_name, file_name)
56 return url
32def upload_file(src_path, dst_url):
33 """Upload a local file on S3.
34
35 If the file already exists it is overwritten.
36
37 :param src_path: Source local filesystem path
38 :param dst_url: Destination S3 URL
39 """
40 parsed_url = urlparse(dst_url)
41 dst_bucket = parsed_url.netloc
42 dst_key = parsed_url.path[1:]
43
44 client = boto3.client('s3')
45 client.upload_file(src_path, dst_bucket, dst_key)
88def upload_files(self):
89 logger = self.get_logger('upload_files')
90 bucket = self.bucket
91 static_path = self.static_path
92 with self.branch.fetch(self.commit.ref) as path:
93 for filename in self.files:
94 key = Key(bucket)
95 key.key = '{0}/{1}'.format(self.key_prefix, filename)
96 fullname = os.path.join(static_path, filename)
97 logger.debug('uploading %r -> %r...', key.key, fullname)
98 key.set_contents_from_filename(
99 filename=os.path.join(path, fullname),
100 replace=True,
101 policy='public-read',
102 reduced_redundancy=True,
103 headers={'Cache-Control': 'max-age=31556926,public'}
104 )
32def download_from_s3(bucket_name, key_name, local_out_dir='/tmp'):
33 cfg = Config()
34 # connect to the bucket
35 conn = boto.connect_s3(cfg.get("aws", "access_key_id"),
36 cfg.get("aws", "secret_access_key"))
37
38 ret_val = (False, None)
39
40 try:
41 print("# S3: Fetching Bucket: {0} / Key: {1}".format(bucket_name, key_name))
42 bucket = conn.get_bucket(bucket_name)
43 key = bucket.get_key(key_name)
44 if key:
45 local_file = os.path.join(local_out_dir, os.path.basename(key_name))
46 print '# S3: Saving contents to Local File - {0}'.format(local_file)
47 key.get_contents_to_filename(local_file, response_headers={
48 'response-content-type': 'video/avi'
49 })
50 ret_val = (True, os.path.abspath(local_file))
51 except boto.exception.S3ResponseError as err:
52 print(err)
53
54 return ret_val
28def _upload(bucket_name, key_name, data):
29 # Cache to avoid download to same instance
30 download_as_string.key(bucket_name, key_name).set(data)
31 # Upload
32 bucket = _get_bucket(bucket_name)
33 key = bucket.new_key(key_name)
34 key.set_contents_from_string(data)
141def upload_file(bucket, key, local_file, s3_client):
142 """
143 Uploads a given file to the s3 key in the bucket
144 """
145 import boto3
146 s3_client.upload_file(local_file, bucket, key)
147
148 return

Related snippets