Every line of 'boto3 upload_file' code snippets is scanned for vulnerabilities by our powerful machine learning engine that combs millions of open source libraries, ensuring your Python code is secure.
112 def upload(bucket, key, filename, session=None): 113 """Upload file to S3 bucket.""" 114 s3_client = _get_client(session) 115 LOGGER.info('Uploading %s to %s/%s', filename, bucket, key) 116 s3_client.upload_file(filename, bucket, key)
141 def upload_file(bucket, key, local_file, s3_client): 142 """ 143 Uploads a given file to the s3 key in the bucket 144 """ 145 import boto3 146 s3_client.upload_file(local_file, bucket, key) 147 148 return
27 def s3_upload(self, _file): 28 # Upload the File 29 sml = self.boto.new_key(_file.filepath) 30 sml.set_contents_from_string(_file.source_file.read())
30 def upload(source_file, bucket_name, object_key): 31 s3 = boto3.resource('s3') 32 33 # Uploads the source file to the specified s3 bucket by using a 34 # managed uploader. The uploader automatically splits large 35 # files and uploads parts in parallel for faster uploads. 36 try: 37 s3.Bucket(bucket_name).upload_file(source_file, object_key) 38 except Exception as e: 39 print(e)
21 def upload_to_amazon(bucket_name, file_path): 22 23 24 #Use environmental variables to authenticalt S3 25 c = boto.connect_s3() 26 b = c.get_bucket(bucket_name) 27 28 file_name = os.path.basename(file_path) 29 30 source_path = file_path 31 source_size = os.stat(source_path).st_size 32 33 # Create a multipart upload request 34 mp = b.initiate_multipart_upload(file_name) 35 36 # Use a chunk size of 50 MiB (feel free to change this) 37 chunk_size = 52428800 38 chunk_count = int(math.ceil(source_size / float(chunk_size))) 39 40 # Send the file parts, using FileChunkIO to create a file-like object 41 # that points to a certain byte range within the original file. We 42 # set bytes to never exceed the original file size. 43 for i in range(chunk_count): 44 print('Uploading chunk %s of %s.' %(i+1, chunk_count)) 45 offset = chunk_size * i 46 bytes = min(chunk_size, source_size - offset) 47 with FileChunkIO(source_path, 'r', offset=offset,bytes=bytes) as fp: 48 mp.upload_part_from_file(fp, part_num=i + 1) 49 50 # Finish the upload 51 mp.complete_upload() 52 53 b.set_acl('public-read', file_name) 54 55 url = get_s3_url(bucket_name, file_name) 56 return url
32 def upload_file(src_path, dst_url): 33 """Upload a local file on S3. 34 35 If the file already exists it is overwritten. 36 37 :param src_path: Source local filesystem path 38 :param dst_url: Destination S3 URL 39 """ 40 parsed_url = urlparse(dst_url) 41 dst_bucket = parsed_url.netloc 42 dst_key = parsed_url.path[1:] 43 44 client = boto3.client('s3') 45 client.upload_file(src_path, dst_bucket, dst_key)
35 @LPic.mute_log 36 def upload(self, file, prefix=''): 37 ret = self.client.put_object_from_local_file( 38 Bucket=self.cloud['Bucket'], 39 LocalFilePath=file, 40 Key=prefix + os.path.basename(file) 41 ) 42 return bool(ret.get('ETag'))
28 def _upload(bucket_name, key_name, data): 29 # Cache to avoid download to same instance 30 download_as_string.key(bucket_name, key_name).set(data) 31 # Upload 32 bucket = _get_bucket(bucket_name) 33 key = bucket.new_key(key_name) 34 key.set_contents_from_string(data)
21 def upload(): 22 try: 23 cos = ibm_boto3.resource('s3', 24 ibm_api_key_id='apikey', 25 ibm_service_instance_id='resource_instance_id', 26 ibm_auth_endpoint='https://iam.bluemix.net/oidc/token', 27 config=Config(signature_version='oauth'), 28 endpoint_url='https://s3-api.us-geo.objectstorage.softlayer.net') 29 30 zipFileName = 'cozmo-photos' 31 shutil.make_archive(zipFileName, 'zip', '../1-take-pictures/pictures') 32 print("Done: Zipping Pictures") 33 34 container = 'tensorflow' 35 cos.create_bucket(Bucket=container) 36 37 with open('./' + zipFileName + '.zip', 'rb') as local: 38 cos.Object( 39 container, 40 zipFileName + '.zip').upload_file(zipFileName + '.zip') 41 print("Done: Uploading Pictures") 42 43 except Exception as e: 44 print("Error: Uploading Pictures") 45 print(e) 46 47 return
13 def upload(self,key,text): 14 15 k = Key(self.bucket) 16 k.key = key 17 return k.set_contents_from_string(text)