Added the secret key and access key while configuring S3.

This commit is contained in:
ashutosh.nehete 2025-06-09 09:42:08 +00:00
parent c77eccf8c6
commit 6d848568ab

View File

@ -20,6 +20,8 @@ DB_NAME_STAGE = os.getenv('DB_NAME_STAGE')
DB_NAME_GITA = os.getenv('DB_NAME_GITA') DB_NAME_GITA = os.getenv('DB_NAME_GITA')
DB_NAME_MEDIAWIKI = os.getenv('DB_NAME_MEDIAWIKI') DB_NAME_MEDIAWIKI = os.getenv('DB_NAME_MEDIAWIKI')
DB_NAME_REDMINE = os.getenv('DB_NAME_REDMINE') DB_NAME_REDMINE = os.getenv('DB_NAME_REDMINE')
ACCESS_KEY = os.getenv('ACCESS_KEY')
SECRET_KEY = os.getenv('SECRET_KEY')
# --- AWS S3 Configuration --- # --- AWS S3 Configuration ---
S3_BUCKET_NAME = os.getenv('S3_BUCKET_NAME') S3_BUCKET_NAME = os.getenv('S3_BUCKET_NAME')
@ -30,7 +32,7 @@ S3_REGION = os.getenv('S3_REGION')
if sys.platform.startswith('win'): if sys.platform.startswith('win'):
# Paths for Windows # Paths for Windows
# You can also load these from .env if you prefer fine-grained control # You can also load these from .env if you prefer fine-grained control
BACKUP_DIR = os.getenv('WIN_BACKUP_DIR', "C:/gita/database/backup") # Default if not in .env BACKUP_DIR = os.getenv('WIN_BACKUP_DIR', "C:/gita/database1/backup") # Default if not in .env
MYSQLDUMP_PATH = os.getenv('WIN_MYSQLDUMP_PATH', r'C:\Program Files\MySQL\MySQL Server 8.0\bin\mysqldump.exe') MYSQLDUMP_PATH = os.getenv('WIN_MYSQLDUMP_PATH', r'C:\Program Files\MySQL\MySQL Server 8.0\bin\mysqldump.exe')
LOG_FILE = os.getenv('WIN_LOG_FILE', r'C:\gita\backup_log.txt') LOG_FILE = os.getenv('WIN_LOG_FILE', r'C:\gita\backup_log.txt')
GIT_EXECUTABLE = "git" # Assuming git is in PATH on Windows GIT_EXECUTABLE = "git" # Assuming git is in PATH on Windows
@ -138,7 +140,10 @@ def upload_to_s3(file_paths):
try: try:
logging.info(f"Attempting to connect to AWS S3 bucket: {S3_BUCKET_NAME} in region: {S3_REGION}") logging.info(f"Attempting to connect to AWS S3 bucket: {S3_BUCKET_NAME} in region: {S3_REGION}")
s3_client = boto3.client('s3', region_name=S3_REGION) s3_client = boto3.client('s3',
region_name=S3_REGION,
aws_access_key_id=ACCESS_KEY,
aws_secret_access_key=SECRET_KEY)
for file_path in file_paths: for file_path in file_paths:
if not os.path.exists(file_path): if not os.path.exists(file_path):