pip install boto3
CRUD Specify the access key and secret key used here by environment variables. It will be a big deal if someone knows it. region is the name of the region you are using. Example) Ohio us-east-2
def create_img_s3(path, img):#Save image
s3 = boto3.client('s3', aws_access_key_id=accesskey, aws_secret_access_key=secretkey, region_name=region)
img.thumbnail((900, 1200), Image.ANTIALIAS)
out = BytesIO()
img.save(out, "PNG")
s3.put_object(Bucket=bucket_name, Key=path, Body=out.getvalue())
url = "https://"+bucket_name+".s3-"+region+".amazonaws.com/"+path
return url
def create_csv_s3(path, dataframe):#Convert from DataFrame to CSV and save as CSV
out2 = StringIO()
dataframe.to_csv(out2, encoding='utf_8_sig')
s3 = boto3.client('s3', aws_access_key_id=accesskey, aws_secret_access_key=secretkey, region_name=region)
s3.put_object(Bucket=bucket_name, Key=path, Body=out2.getvalue().encode("utf-8_sig"))
url = "https://" + bucket_name + ".s3-" + region + ".amazonaws.com/" + path
return url
def readImg_s3(imgpath):
s3 = boto3.client('s3', aws_access_key_id=accesskey, aws_secret_access_key=secretkey, region_name=region)
obj = s3.Object(Bucket=bucket_name, Key=imgpath)
print(obj)
print(obj["Body"])
def readDir_s3(dirpath):#Get a list of data in a specific folder
s3 = boto3.client('s3', aws_access_key_id=accesskey, aws_secret_access_key=secretkey, region_name=region)
res = s3.list_objects_v2(Bucket=bucket_name, Prefix=dirpath, Delimiter='/')
urls = []
for data in res["Contents"]:
data_url = data["Key"]
if dirpath != data_url:
url = "https://" + bucket_name + ".s3-" + region + ".amazonaws.com/" + data_url
urls.append(url)
return urls
Recommended Posts