When trying to upload some .json file to AWS S3 using the .generate_presigned_post method from the bot3 client, it always returns a 400 response, even with the correct access keys and ids.
Since the bucket will be initialized multiple times, I've created a class which technically replicates the content in the following simple tutorial explaining how to use the generate_presigned_post method in python: https://www.youtube.com/watch?v=1D9ggTJ9Ejc. I've tried to change the bucket access to public however still got the same response.
Here's a snipped of the code I'm using:
import boto3
from typing import Union
import requests
from pydantic import BaseModel
import logging
...
class S3Bucket(BaseModel):
access_key: str
secret_key: str
bucket_id: str
file_name: str
expires_in: int = 60
response_data: Union[dict, None] = None
def set_response_data(self, response_data: dict):
self.response_data = response_data
def get_access_data(self) -> None:
try:
s3_client = boto3.client(
"s3",
aws_access_key_id=self.access_key,
aws_secret_access_key=self.secret_key,
)
if not s3_client:
raise ValueError("S3 client not found")
aws_response = s3_client.generate_presigned_post(
Bucket=self.bucket_id,
Key=self.file_name,
ExpiresIn=self.expires_in,
Fields={"Content-Type": "application/json"},
)
if not aws_response:
raise ValueError("AWS response not found")
self.set_response_data(aws_response)
except ValueError as e:
logger.error(e)
def upload_file(self) -> None:
try:
if not self.response_data:
self.get_access_data()
files = {"file": open(self.file_name, "rb")}
logger.error(self.response_data)
response = requests.post(
self.response_data["url"],
data=self.response_data["fields"],
files=files,
)
if not response:
raise ValueError("Response not found")
if response.status_code == 200:
logger.debug("Successfully uploaded to S3")
else:
logger.error("Failed to upload to S3")
except ValueError as e:
logger.error(e)
Then, the function which calls the class:
def migrate(db) -> None:
db_data = get_json_data(db)
if not db_data:
logger.error("Failed to get data from db")
with open("./data/db_data.json", "w+") as f:
f.write(db_data)
logger.debug("Successfully wrote to file")
s3 = S3Bucket(
access_key=os.environ["AWS_ACCESS_KEY"],
secret_key=os.environ["AWS_SECRET_KEY"],
bucket_id=os.environ["AWS_BUCKET_ID"],
file_name="./data/db_data.json",
)
try:
s3.upload_file()
except Exception as e:
logger.error(e)
finally:
os.remove("./data/db_data.json")
logger.debug("Done")