10 examples of 'python read file from s3' in Python

Every line of 'python read file from s3' code snippets is scanned for vulnerabilities by our powerful machine learning engine that combs millions of open source libraries, ensuring your Python code is secure.

All examples are scanned by Snyk Code

By copying the Snyk Code Snippets you agree to
71def read_from_s3(bucket, key):
72 client = boto3.client('s3')
73 obj = client.get_object(Bucket=bucket, Key=key)
74 return obj['Body'].read()
46def s3_read(source: str, profile_name: Optional[str] = None) -> bytes:
47 """
48 Read a file from an S3 source.
49
50 Parameters
51 ----------
52 source : str
53 Path starting with s3://, e.g. 's3://bucket-name/key/foo.bar'
54 profile_name : str, optional
55 AWS profile
56
57 Returns
58 -------
59 content : bytes
60
61 Raises
62 ------
63 botocore.exceptions.NoCredentialsError
64 Botocore is not able to find your credentials. Either specify
65 profile_name or add the environment variables AWS_ACCESS_KEY_ID,
66 AWS_SECRET_ACCESS_KEY and AWS_SESSION_TOKEN.
67 See https://boto3.readthedocs.io/en/latest/guide/configuration.html
68 """
69 session = boto3.session.Session(profile_name=profile_name)
70 s3 = session.client("s3")
71 bucket_name, key = _s3_path_split(source)
72 s3_object = s3.get_object(Bucket=bucket_name, Key=key)
73 body = s3_object["Body"]
74 return body.read()
12def get_object_content_from_s3(bucket, key):
13 """
14 Get the file contents from an S3 object.
15
16 Args:
17 bucket (str): The S3 bucket the object is stored in.
18 key (str): The S3 object key identified.
19
20 Returns:
21 str: The string contents of the file object.
22
23 """
24 region = settings.S3_DEFAULT_REGION
25 s3_object = boto3.resource("s3", region_name=region).Object(bucket, key)
26 s3_object = s3_object.get()
27
28 object_bytes = s3_object["Body"].read()
29
30 gzipped = (
31 key.endswith(".gz")
32 or s3_object.get("ContentType", None) == "application/x-gzip"
33 )
34
35 try:
36 if gzipped:
37 content = gzip.decompress(object_bytes).decode("utf-8")
38 else:
39 content = object_bytes.decode("utf-8")
40 except UnicodeDecodeError as ex:
41 logger.exception(
42 _("Failed to decode content of %(key)s: %(error)s"),
43 {"key": key, "error": ex},
44 )
45 raise
46
47 return content
32def download_from_s3(bucket_name, key_name, local_out_dir='/tmp'):
33 cfg = Config()
34 # connect to the bucket
35 conn = boto.connect_s3(cfg.get("aws", "access_key_id"),
36 cfg.get("aws", "secret_access_key"))
37
38 ret_val = (False, None)
39
40 try:
41 print("# S3: Fetching Bucket: {0} / Key: {1}".format(bucket_name, key_name))
42 bucket = conn.get_bucket(bucket_name)
43 key = bucket.get_key(key_name)
44 if key:
45 local_file = os.path.join(local_out_dir, os.path.basename(key_name))
46 print '# S3: Saving contents to Local File - {0}'.format(local_file)
47 key.get_contents_to_filename(local_file, response_headers={
48 'response-content-type': 'video/avi'
49 })
50 ret_val = (True, os.path.abspath(local_file))
51 except boto.exception.S3ResponseError as err:
52 print(err)
53
54 return ret_val
141def upload_file(bucket, key, local_file, s3_client):
142 """
143 Uploads a given file to the s3 key in the bucket
144 """
145 import boto3
146 s3_client.upload_file(local_file, bucket, key)
147
148 return
21def upload_to_amazon(bucket_name, file_path):
22
23
24 #Use environmental variables to authenticalt S3
25 c = boto.connect_s3()
26 b = c.get_bucket(bucket_name)
27
28 file_name = os.path.basename(file_path)
29
30 source_path = file_path
31 source_size = os.stat(source_path).st_size
32
33 # Create a multipart upload request
34 mp = b.initiate_multipart_upload(file_name)
35
36 # Use a chunk size of 50 MiB (feel free to change this)
37 chunk_size = 52428800
38 chunk_count = int(math.ceil(source_size / float(chunk_size)))
39
40 # Send the file parts, using FileChunkIO to create a file-like object
41 # that points to a certain byte range within the original file. We
42 # set bytes to never exceed the original file size.
43 for i in range(chunk_count):
44 print('Uploading chunk %s of %s.' %(i+1, chunk_count))
45 offset = chunk_size * i
46 bytes = min(chunk_size, source_size - offset)
47 with FileChunkIO(source_path, 'r', offset=offset,bytes=bytes) as fp:
48 mp.upload_part_from_file(fp, part_num=i + 1)
49
50 # Finish the upload
51 mp.complete_upload()
52
53 b.set_acl('public-read', file_name)
54
55 url = get_s3_url(bucket_name, file_name)
56 return url
156def s3_download(run_name, file_name):
157 s3_run_path = 's3://{}/results/{}'.format(
158 s3_bucket, run_name)
159 s3_file_path = join(s3_run_path, file_name)
160
161 run_path = join(results_path, run_name)
162 call(['aws', 's3', 'cp', s3_file_path, run_path + '/'])
44def fetch_file(file, bucket):
45 s3_client.download_file(bucket, file, save_to)
27def s3_upload(self, _file):
28 # Upload the File
29 sml = self.boto.new_key(_file.filepath)
30 sml.set_contents_from_string(_file.source_file.read())
45def delete_file(bucketname, filename, aws_access_key='', aws_secret_key=''):
46 if not if_file_exist(bucketname, filename, aws_access_key, aws_secret_key):
47 return
48 bucket = get_bucket(bucketname, aws_access_key, aws_secret_key)
49 k = Key(bucket)
50 k.key = filename
51 bucket.delete_key(k)

Related snippets