- Add detaied logging
- make db list to backup dynamic receivd from env file - send mail on complete
This commit is contained in:
parent
c039efa4bf
commit
4ff44fac77
50
backup/.env
50
backup/.env
@ -1,25 +1,35 @@
|
||||
# Database Configuration
|
||||
DB_HOST=1xxxx
|
||||
## Database Configuration
|
||||
DB_HOST=147.93.98.152
|
||||
DB_USER=devuser
|
||||
DB_PASSWORD=xxxxx
|
||||
DB_NAME_PROD=MarcoBMSProd
|
||||
DB_NAME_STAGE=MarcoBMSStage
|
||||
DB_NAME_GITA=gitea
|
||||
DB_NAME_MEDIAWIKI=mediawiki
|
||||
DB_NAME_REDMINE=redmine
|
||||
DB_PASSWORD=xxxxxxx
|
||||
|
||||
# AWS S3 Configuration
|
||||
ACCESS_KEY=xxxxxxxx
|
||||
SECRET_KEY=xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
|
||||
S3_BUCKET_NAME=your-s3-bucket-name
|
||||
DB_LIST_TO_BACKUP=MarcoBMSProd,MarcoBMSStage,gitea,mediawiki,redmine
|
||||
|
||||
UPLOAD_TO_S3=true
|
||||
UPLOAD_TO_GIT=false
|
||||
|
||||
## AWS S3 Configuration
|
||||
ACCESS_KEY=xxxxxxx
|
||||
SECRET_KEY=xxxxxx
|
||||
S3_BUCKET_NAME=xxxxxxxx
|
||||
S3_REGION=us-east-1
|
||||
|
||||
# Windows Specific Paths (if applicable, uncomment and adjust)
|
||||
# WIN_BACKUP_DIR=C:/gita/database/backup
|
||||
# WIN_MYSQLDUMP_PATH=C:/Program Files/MySQL/MySQL Server 8.0/bin/mysqldump.exe
|
||||
# WIN_LOG_FILE=C:/gita/backup_log.txt
|
||||
## send mail once process is complete
|
||||
EMAIL_HOST=smtp.gmail.com
|
||||
EMAIL_PORT=587
|
||||
EMAIL_USER=marcoioitsoft@gmail.com
|
||||
EMAIL_PASS= ""
|
||||
EMAIL_SUBJECT="Database backup process"
|
||||
EMAIL_RECEIVERS=vikas@marcoaiot.com,hr@marcoaiot.com
|
||||
|
||||
# Linux Specific Paths (if applicable, uncomment and adjust)
|
||||
# LINUX_BACKUP_DIR=/var/lib/mysql-backups
|
||||
# LINUX_MYSQLDUMP_PATH=/usr/bin/mysqldump
|
||||
# LINUX_LOG_FILE=/var/log/mysql_backup.log
|
||||
## Windows Specific Paths (if applicable, uncomment and adjust)
|
||||
BACKUP_DIR=E:\Office\Marco\gitea\database\test
|
||||
MYSQLDUMP_PATH=C:/Program Files/MySQL/MySQL Server 8.0/bin/mysqldump.exe
|
||||
LOG_DIR=E:\Office\Marco\gitea\database\test\
|
||||
LOG_FILE=E:\Office\Marco\gitea\database\test\backup_log.txt
|
||||
|
||||
## Linux Specific Paths (if applicable, uncomment and adjust)
|
||||
# BACKUP_DIR=/var/lib/mysql-backups
|
||||
# MYSQLDUMP_PATH=/usr/bin/mysqldump
|
||||
# LOG_FILE=/var/www/apps/db-backup-script/logs/mysql_backup.log
|
||||
# LOG_DIR=/var/www/apps/db-backup-script/logs
|
@ -6,6 +6,7 @@ import sys
|
||||
import boto3
|
||||
from botocore.exceptions import ClientError
|
||||
from dotenv import load_dotenv # Import load_dotenv
|
||||
from email_utils import send_email
|
||||
|
||||
# Load environment variables from .env file
|
||||
load_dotenv()
|
||||
@ -15,11 +16,7 @@ load_dotenv()
|
||||
DB_HOST = os.getenv('DB_HOST')
|
||||
DB_USER = os.getenv('DB_USER')
|
||||
DB_PASSWORD = os.getenv('DB_PASSWORD')
|
||||
DB_NAME_PROD = os.getenv('DB_NAME_PROD')
|
||||
DB_NAME_STAGE = os.getenv('DB_NAME_STAGE')
|
||||
DB_NAME_GITA = os.getenv('DB_NAME_GITA')
|
||||
DB_NAME_MEDIAWIKI = os.getenv('DB_NAME_MEDIAWIKI')
|
||||
DB_NAME_REDMINE = os.getenv('DB_NAME_REDMINE')
|
||||
|
||||
ACCESS_KEY = os.getenv('ACCESS_KEY')
|
||||
SECRET_KEY = os.getenv('SECRET_KEY')
|
||||
|
||||
@ -27,20 +24,25 @@ SECRET_KEY = os.getenv('SECRET_KEY')
|
||||
S3_BUCKET_NAME = os.getenv('S3_BUCKET_NAME')
|
||||
S3_REGION = os.getenv('S3_REGION')
|
||||
|
||||
# Initialize a message list
|
||||
email_body_parts = []
|
||||
|
||||
# --- Platform-Specific Paths ---
|
||||
# Determine OS and set paths accordingly
|
||||
if sys.platform.startswith('win'):
|
||||
# Paths for Windows
|
||||
# You can also load these from .env if you prefer fine-grained control
|
||||
BACKUP_DIR = os.getenv('WIN_BACKUP_DIR', "C:/gita/database/backup") # Default if not in .env
|
||||
MYSQLDUMP_PATH = os.getenv('WIN_MYSQLDUMP_PATH', r'C:\Program Files\MySQL\MySQL Server 8.0\bin\mysqldump.exe')
|
||||
LOG_FILE = os.getenv('WIN_LOG_FILE', r'C:\gita\backup_log.txt')
|
||||
BACKUP_DIR = os.getenv('BACKUP_DIR', "C:/gita/database/backup") # Default if not in .env
|
||||
MYSQLDUMP_PATH = os.getenv('MYSQLDUMP_PATH', r'C:\Program Files\MySQL\MySQL Server 8.0\bin\mysqldump.exe')
|
||||
LOG_DIR = os.getenv('BACKUP_DIR', "C:/gita/database/backup") # Default if not in .env
|
||||
LOG_FILE = os.getenv('LOG_FILE', r'C:\gita\backup_log.txt')
|
||||
GIT_EXECUTABLE = "git" # Assuming git is in PATH on Windows
|
||||
else:
|
||||
# Paths for Ubuntu/Linux
|
||||
BACKUP_DIR = os.getenv('LINUX_BACKUP_DIR', "/var/lib/mysql-backups") # Default if not in .env
|
||||
MYSQLDUMP_PATH = os.getenv('LINUX_MYSQLDUMP_PATH', "/usr/bin/mysqldump")
|
||||
LOG_FILE = os.getenv('LINUX_LOG_FILE', "/var/log/mysql_backup.log")
|
||||
BACKUP_DIR = os.getenv('BACKUP_DIR', "/var/lib/mysql-backups") # Default if not in .env
|
||||
MYSQLDUMP_PATH = os.getenv('MYSQLDUMP_PATH', "/usr/bin/mysqldump")
|
||||
LOG_FILE = os.getenv('LOG_FILE', "/var/logs/mysql-backup/mysql_backup.log")
|
||||
LOG_DIR = os.getenv('LOG_DIR', "/var/logs/mysql-backup")
|
||||
GIT_EXECUTABLE = "git" # Assuming git is in PATH on Linux
|
||||
|
||||
# --- Logging Setup ---
|
||||
@ -71,6 +73,12 @@ def build_mysqldump_command(database_name):
|
||||
database_name
|
||||
]
|
||||
return command
|
||||
def clean_backup_folder():
|
||||
folder_path = BACKUP_DIR
|
||||
for filename in os.listdir(folder_path):
|
||||
file_path = os.path.join(folder_path, filename)
|
||||
if os.path.isfile(file_path):
|
||||
os.remove(file_path)
|
||||
|
||||
def start_backup(database):
|
||||
backup_file_path = build_path(database)
|
||||
@ -80,10 +88,12 @@ def start_backup(database):
|
||||
with open(backup_file_path, "w", encoding="utf-8") as out_file:
|
||||
subprocess.run(command, stdout=out_file, check=True, text=True)
|
||||
logging.info(f"Successfully backed up {database}.")
|
||||
email_body_parts.append(f"Successfully backed up {database}.")
|
||||
return backup_file_path
|
||||
except subprocess.CalledProcessError as e:
|
||||
logging.error(f"MySQL dump failed for {database}: {e}")
|
||||
logging.error(f"Command: {' '.join(e.cmd)}")
|
||||
email_body_parts.append(f"MySQL dump failed for {database}: {e}")
|
||||
if e.stderr:
|
||||
logging.error(f"Stderr: {e.stderr}")
|
||||
return None
|
||||
@ -128,14 +138,33 @@ def upload_to_git():
|
||||
finally:
|
||||
os.chdir(original_cwd)
|
||||
|
||||
def remove_before_first_dash(input_string):
|
||||
"""
|
||||
Removes the part of a string before the first dash.
|
||||
|
||||
Args:
|
||||
input_string: The string to process.
|
||||
|
||||
Returns:
|
||||
The part of the string after the first dash, or the original string
|
||||
if no dash is found.
|
||||
"""
|
||||
parts = input_string.split('-', 1) # Split only at the first dash
|
||||
if len(parts) > 1:
|
||||
return parts[1]
|
||||
else:
|
||||
return input_string
|
||||
|
||||
def upload_to_s3(file_paths):
|
||||
if not file_paths:
|
||||
logging.info("No backup files to upload to S3.")
|
||||
return
|
||||
|
||||
# Basic validation for S3 configuration
|
||||
if not S3_BUCKET_NAME or not S3_REGION:
|
||||
logging.error("S3_BUCKET_NAME or S3_REGION is not set. Cannot upload to S3. Please check your .env file.")
|
||||
if not all([S3_BUCKET_NAME, S3_REGION, ACCESS_KEY, SECRET_KEY]):
|
||||
logging.error("S3_BUCKET_NAME, S3_REGION, ACCESS_KEY or SECRET_KEY is not set. Cannot upload to S3. Please check your .env file.")
|
||||
email_body_parts.append(f"S3_BUCKET_NAME, S3_REGION, ACCESS_KEY or SECRET_KEY is not set. Cannot upload to S3. Please check your .env file.")
|
||||
|
||||
return
|
||||
|
||||
try:
|
||||
@ -151,23 +180,35 @@ def upload_to_s3(file_paths):
|
||||
continue
|
||||
|
||||
s3_object_key = os.path.basename(file_path)
|
||||
s3_object_key = remove_before_first_dash(s3_object_key)
|
||||
try:
|
||||
logging.info(f"Uploading {s3_object_key} to s3://{S3_BUCKET_NAME}/{s3_object_key}")
|
||||
s3_client.upload_file(file_path, S3_BUCKET_NAME, s3_object_key)
|
||||
logging.info(f"Successfully uploaded {s3_object_key} to S3.")
|
||||
email_body_parts.append(f"Successfully uploaded {s3_object_key} to S3.")
|
||||
|
||||
except ClientError as ce:
|
||||
logging.error(f"Failed to upload {s3_object_key} to S3: {ce}")
|
||||
email_body_parts.append("Failed to upload {s3_object_key} to S3: {ce}")
|
||||
|
||||
if ce.response['Error']['Code'] == 'AccessDenied':
|
||||
logging.error("S3 upload access denied. Check your AWS credentials and bucket policy.")
|
||||
email_body_parts.append("S3 upload access denied. Check your AWS credentials and bucket policy.")
|
||||
elif ce.response['Error']['Code'] == 'NoSuchBucket':
|
||||
logging.error(f"S3 bucket '{S3_BUCKET_NAME}' does not exist or you don't have access.")
|
||||
email_body_parts.append(f"S3 bucket '{S3_BUCKET_NAME}' does not exist or you don't have access.")
|
||||
|
||||
else:
|
||||
logging.error(f"S3 ClientError details: {ce.response['Error']['Code']} - {ce.response['Error']['Message']}")
|
||||
email_body_parts.append(f"S3 ClientError details: {ce.response['Error']['Code']} - {ce.response['Error']['Message']}")
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"An unexpected error occurred during S3 upload of {s3_object_key}: {e}")
|
||||
logging.info("All S3 uploads attempted.")
|
||||
except Exception as e:
|
||||
logging.critical(f"Failed to initialize S3 client or a critical S3 error occurred: {e}")
|
||||
email_body_parts.append(f"Failed to initialize S3 client or a critical S3 error occurred: {e}")
|
||||
|
||||
|
||||
|
||||
# --- Main Execution (unchanged from previous version) ---
|
||||
@ -176,34 +217,80 @@ if __name__ == "__main__":
|
||||
backup_files_created = []
|
||||
|
||||
# Basic validation that essential DB connection variables are loaded
|
||||
if not all([DB_HOST, DB_USER, DB_PASSWORD, DB_NAME_PROD]):
|
||||
if not all([DB_HOST, DB_USER, DB_PASSWORD]):
|
||||
logging.critical("Missing essential database connection variables. Please check your .env file.")
|
||||
exit(1)
|
||||
|
||||
try:
|
||||
os.makedirs(BACKUP_DIR, exist_ok=True)
|
||||
logging.info(f"Ensured backup directory exists: {BACKUP_DIR}")
|
||||
os.makedirs(LOG_DIR, exist_ok=True)
|
||||
logging.info(f"Ensured log directory exists: {LOG_DIR}")
|
||||
|
||||
databases_to_backup = [
|
||||
DB_NAME_PROD,
|
||||
DB_NAME_STAGE,
|
||||
DB_NAME_GITA,
|
||||
DB_NAME_REDMINE,
|
||||
DB_NAME_MEDIAWIKI
|
||||
]
|
||||
# Get the database list array from the environment variable
|
||||
DB_LIST_TO_BACKUP = os.getenv('DB_LIST_TO_BACKUP')
|
||||
|
||||
for db_name in databases_to_backup:
|
||||
# Check if the variable exists and is not empty before splitting
|
||||
if DB_LIST_TO_BACKUP:
|
||||
# Split the string by the comma delimiter
|
||||
databases_array_to_backup = [item.strip() for item in DB_LIST_TO_BACKUP.split(',')]
|
||||
logging.info(f"Backup databases for: {databases_array_to_backup}")
|
||||
else:
|
||||
logging.error(f"database list array (DB_LIST_TO_BACKUP) not found or is empty.")
|
||||
|
||||
clean_backup_folder()
|
||||
email_body_parts.append(f"Starting backup for database: {databases_array_to_backup}")
|
||||
email_body_parts.append(f"-------------------------------------------------------------")
|
||||
for db_name in databases_array_to_backup:
|
||||
file_path = start_backup(db_name)
|
||||
if file_path:
|
||||
backup_files_created.append(file_path)
|
||||
email_body_parts.append(f"")
|
||||
|
||||
logging.info("Starting Git upload process...")
|
||||
upload_to_git()
|
||||
email_body_parts.append(f"Starting Git upload process...")
|
||||
email_body_parts.append(f"-------------------------------------------------------------")
|
||||
|
||||
if os.getenv('UPLOAD_TO_GIT', 'false').lower() == 'true':
|
||||
logging.info("Starting Git upload process...")
|
||||
upload_to_git()
|
||||
else:
|
||||
logging.info("Disabled Git upload process...")
|
||||
email_body_parts.append(f"Disabled Git upload process...")
|
||||
|
||||
email_body_parts.append(f"<br>Starting S3 upload process... ")
|
||||
email_body_parts.append(f"-------------------------------------------------------------")
|
||||
|
||||
|
||||
if os.getenv('UPLOAD_TO_S3', 'false').lower() == 'true':
|
||||
logging.info("Starting S3 upload process...")
|
||||
upload_to_s3(backup_files_created)
|
||||
else:
|
||||
logging.info("Disabled S3 upload process...")
|
||||
email_body_parts.append(f"Disabled S3 upload process...")
|
||||
|
||||
|
||||
# Send HTML email to multiple recipients
|
||||
# Final stage: send the mail
|
||||
email_body_parts.append(f"<br><br><br>Starting sending mail")
|
||||
email_body ="<br>".join(email_body_parts) # for plain text
|
||||
EMAIL_RECEIVERS = os.getenv('EMAIL_RECEIVERS')
|
||||
if EMAIL_RECEIVERS:
|
||||
# Split the string by the comma delimiter
|
||||
email_receivers_array = [item.strip() for item in EMAIL_RECEIVERS.split(',')]
|
||||
send_email(
|
||||
subject=os.getenv("EMAIL_SUBJECT", "Database backup process"),
|
||||
body=email_body,
|
||||
to_emails=email_receivers_array,
|
||||
html=True
|
||||
)
|
||||
logging.info(f"Send Mail to: {email_receivers_array}")
|
||||
else:
|
||||
logging.info(f"database list array (DB_LIST_TO_BACKUP) not found or is empty.")
|
||||
|
||||
logging.info("Starting S3 upload process...")
|
||||
upload_to_s3(backup_files_created)
|
||||
|
||||
logging.info("--- Database Backup Process Completed Successfully ---")
|
||||
|
||||
|
||||
exit(0)
|
||||
|
||||
except Exception as ex:
|
||||
|
36
backup/email_utils.py
Normal file
36
backup/email_utils.py
Normal file
@ -0,0 +1,36 @@
|
||||
import os
|
||||
import smtplib
|
||||
from email.mime.text import MIMEText
|
||||
from email.mime.multipart import MIMEMultipart
|
||||
from dotenv import load_dotenv
|
||||
|
||||
load_dotenv()
|
||||
|
||||
EMAIL_HOST = os.getenv("EMAIL_HOST", "smtp.gmail.com")
|
||||
EMAIL_PORT = int(os.getenv("EMAIL_PORT", 587))
|
||||
EMAIL_USER = os.getenv("EMAIL_USER")
|
||||
EMAIL_PASS = os.getenv("EMAIL_PASS")
|
||||
|
||||
def send_email(subject, body, to_emails, html=False):
|
||||
if isinstance(to_emails, str):
|
||||
to_emails = [to_emails]
|
||||
|
||||
msg = MIMEMultipart()
|
||||
msg["From"] = EMAIL_USER
|
||||
msg["To"] = ", ".join(to_emails)
|
||||
msg["Subject"] = subject
|
||||
|
||||
if html:
|
||||
msg.attach(MIMEText(body, "html"))
|
||||
else:
|
||||
msg.attach(MIMEText(body, "plain"))
|
||||
|
||||
try:
|
||||
with smtplib.SMTP(EMAIL_HOST, EMAIL_PORT) as server:
|
||||
server.starttls()
|
||||
server.login(EMAIL_USER, EMAIL_PASS)
|
||||
server.sendmail(EMAIL_USER, to_emails, msg.as_string())
|
||||
return True
|
||||
except Exception as e:
|
||||
print(f"Error sending email: {e}")
|
||||
return False
|
Loading…
x
Reference in New Issue
Block a user