- Handle GZIP in database backup process, so that all the backups are zipped before uploaded to s3

- Add fag to check if emails should be send on process complete like did for GIT and S3
This commit is contained in:
Vikas Nale 2025-07-05 11:23:27 +05:30
parent 10001f00c2
commit 0bd733ecb8
5 changed files with 83 additions and 16 deletions

View File

@ -5,8 +5,30 @@ DB_PASSWORD=xxxxxxx
DB_LIST_TO_BACKUP=MarcoBMSProd,MarcoBMSStage,gitea,mediawiki,redmine
DATABASE_CONFIGS = [
{
'DB_HOST': '147.93.98.152',
'DB_USER': 'devuser',
'DB_PASSWORD': 'AppUser@123$',
'DB_NAME': 'gitea,mediawiki,redmine',
'DB_TYPE': 'mysql' # Add database type if you have mixed databases (mysql, postgres, etc.)
},
{
'DB_HOST': '147.93.98.152',
'DB_USER': 'devuser',
'DB_PASSWORD': 'AppUser@123$',
'DB_NAME': 'MarcoBMSProd',
'DB_TYPE': 'mysql'
},
]
## Actionalble Blocks
UPLOAD_TO_S3=true
UPLOAD_TO_GIT=false
SEND_EMAIL=false
## AWS S3 Configuration
ACCESS_KEY=xxxxxxx

View File

@ -7,6 +7,7 @@ import boto3
from botocore.exceptions import ClientError
from dotenv import load_dotenv # Import load_dotenv
from email_utils import send_email
from gzip_util import gzip_file
# Load environment variables from .env file
load_dotenv()
@ -88,8 +89,13 @@ def start_backup(database):
with open(backup_file_path, "w", encoding="utf-8") as out_file:
subprocess.run(command, stdout=out_file, check=True, text=True)
logging.info(f"Successfully backed up {database}.")
# Compress and delete original
gzipped_path = gzip_file(backup_file_path, delete_original=True)
email_body_parts.append(f"Successfully backed up {database}.")
return backup_file_path
#return backup_file_path
return gzipped_path
except subprocess.CalledProcessError as e:
logging.error(f"MySQL dump failed for {database}: {e}")
logging.error(f"Command: {' '.join(e.cmd)}")
@ -271,22 +277,31 @@ if __name__ == "__main__":
# Send HTML email to multiple recipients
# Final stage: send the mail
email_body_parts.append(f"<br><br><br>Starting sending mail")
email_body ="<br>".join(email_body_parts) # for plain text
EMAIL_RECEIVERS = os.getenv('EMAIL_RECEIVERS')
if EMAIL_RECEIVERS:
# Split the string by the comma delimiter
email_receivers_array = [item.strip() for item in EMAIL_RECEIVERS.split(',')]
send_email(
subject=os.getenv("EMAIL_SUBJECT", "Database backup process"),
body=email_body,
to_emails=email_receivers_array,
html=True
)
logging.info(f"Send Mail to: {email_receivers_array}")
if os.getenv('SEND_EMAIL', 'false').lower() == 'true':
email_body_parts.append(f"<br><br>Backup completed for folloing files:")
email_body_parts.append(f"-------------------------------------------------------------")
for file_name in backup_files_created:
email_body_parts.append(f"{file_name}")
email_body_parts.append(f"<br><br><br>Starting sending mail")
email_body ="<br>".join(email_body_parts) # for plain text
EMAIL_RECEIVERS = os.getenv('EMAIL_RECEIVERS')
if EMAIL_RECEIVERS:
# Split the string by the comma delimiter
email_receivers_array = [item.strip() for item in EMAIL_RECEIVERS.split(',')]
send_email(
subject=os.getenv("EMAIL_SUBJECT", "Database backup process"),
body=email_body,
to_emails=email_receivers_array,
html=True
)
logging.info(f"Send Mail to: {email_receivers_array}")
else:
logging.info(f"database list array (DB_LIST_TO_BACKUP) not found or is empty.")
else:
logging.info(f"database list array (DB_LIST_TO_BACKUP) not found or is empty.")
logging.info("Disabled EMAIL send process...")
email_body_parts.append(f"Disabled EMAIL send process...")
logging.info("--- Database Backup Process Completed Successfully ---")

View File

@ -0,0 +1,30 @@
import gzip
import shutil
import os
def gzip_file(source_file, delete_original=False):
"""
Compress any file using gzip.
Args:
source_file (str): Full path to the file to compress.
delete_original (bool): Whether to delete the original file after compression.
Returns:
str: Path to the gzipped file.
"""
if not os.path.isfile(source_file):
raise FileNotFoundError(f"File not found: {source_file}")
gzipped_file = source_file + '.gz'
with open(source_file, 'rb') as f_in:
with gzip.open(gzipped_file, 'wb') as f_out:
shutil.copyfileobj(f_in, f_out)
if delete_original:
os.remove(source_file)
print(f"Original file deleted: {source_file}")
print(f"Gzipped file created: {gzipped_file}")
return gzipped_file