Every line of 'read file from s3 python' code snippets is scanned for vulnerabilities by our powerful machine learning engine that combs millions of open source libraries, ensuring your Python code is secure.
71 def read_from_s3(bucket, key): 72 client = boto3.client('s3') 73 obj = client.get_object(Bucket=bucket, Key=key) 74 return obj['Body'].read()
46 def s3_read(source: str, profile_name: Optional[str] = None) -> bytes: 47 """ 48 Read a file from an S3 source. 49 50 Parameters 51 ---------- 52 source : str 53 Path starting with s3://, e.g. 's3://bucket-name/key/foo.bar' 54 profile_name : str, optional 55 AWS profile 56 57 Returns 58 ------- 59 content : bytes 60 61 Raises 62 ------ 63 botocore.exceptions.NoCredentialsError 64 Botocore is not able to find your credentials. Either specify 65 profile_name or add the environment variables AWS_ACCESS_KEY_ID, 66 AWS_SECRET_ACCESS_KEY and AWS_SESSION_TOKEN. 67 See https://boto3.readthedocs.io/en/latest/guide/configuration.html 68 """ 69 session = boto3.session.Session(profile_name=profile_name) 70 s3 = session.client("s3") 71 bucket_name, key = _s3_path_split(source) 72 s3_object = s3.get_object(Bucket=bucket_name, Key=key) 73 body = s3_object["Body"] 74 return body.read()
12 def get_object_content_from_s3(bucket, key): 13 """ 14 Get the file contents from an S3 object. 15 16 Args: 17 bucket (str): The S3 bucket the object is stored in. 18 key (str): The S3 object key identified. 19 20 Returns: 21 str: The string contents of the file object. 22 23 """ 24 region = settings.S3_DEFAULT_REGION 25 s3_object = boto3.resource("s3", region_name=region).Object(bucket, key) 26 s3_object = s3_object.get() 27 28 object_bytes = s3_object["Body"].read() 29 30 gzipped = ( 31 key.endswith(".gz") 32 or s3_object.get("ContentType", None) == "application/x-gzip" 33 ) 34 35 try: 36 if gzipped: 37 content = gzip.decompress(object_bytes).decode("utf-8") 38 else: 39 content = object_bytes.decode("utf-8") 40 except UnicodeDecodeError as ex: 41 logger.exception( 42 _("Failed to decode content of %(key)s: %(error)s"), 43 {"key": key, "error": ex}, 44 ) 45 raise 46 47 return content
45 def delete_file(bucketname, filename, aws_access_key='', aws_secret_key=''): 46 if not if_file_exist(bucketname, filename, aws_access_key, aws_secret_key): 47 return 48 bucket = get_bucket(bucketname, aws_access_key, aws_secret_key) 49 k = Key(bucket) 50 k.key = filename 51 bucket.delete_key(k)
15 def set_contents_from_file(self, file_obj): 16 self.remote.s3.upload_fileobj(file_obj, self.remote.bucket_name, self.path)
27 def s3_upload(self, _file): 28 # Upload the File 29 sml = self.boto.new_key(_file.filepath) 30 sml.set_contents_from_string(_file.source_file.read())
141 def upload_file(bucket, key, local_file, s3_client): 142 """ 143 Uploads a given file to the s3 key in the bucket 144 """ 145 import boto3 146 s3_client.upload_file(local_file, bucket, key) 147 148 return
32 def download_from_s3(bucket_name, key_name, local_out_dir='/tmp'): 33 cfg = Config() 34 # connect to the bucket 35 conn = boto.connect_s3(cfg.get("aws", "access_key_id"), 36 cfg.get("aws", "secret_access_key")) 37 38 ret_val = (False, None) 39 40 try: 41 print("# S3: Fetching Bucket: {0} / Key: {1}".format(bucket_name, key_name)) 42 bucket = conn.get_bucket(bucket_name) 43 key = bucket.get_key(key_name) 44 if key: 45 local_file = os.path.join(local_out_dir, os.path.basename(key_name)) 46 print '# S3: Saving contents to Local File - {0}'.format(local_file) 47 key.get_contents_to_filename(local_file, response_headers={ 48 'response-content-type': 'video/avi' 49 }) 50 ret_val = (True, os.path.abspath(local_file)) 51 except boto.exception.S3ResponseError as err: 52 print(err) 53 54 return ret_val