342 lines
15 KiB
Python
342 lines
15 KiB
Python
import os
|
|
import datetime
|
|
import subprocess
|
|
import logging
|
|
import sys
|
|
import boto3
|
|
from botocore.exceptions import ClientError
|
|
from email_utils import send_email
|
|
from gzip_util import gzip_file
|
|
import json # For reading JSON config
|
|
|
|
|
|
# Initialize a message list
|
|
email_body_parts = []
|
|
BACKUP_DIR = "/var/www/apps/db-backup-script/backups" # Example default
|
|
MYSQLDUMP_PATH = "/usr/bin/mysqldump" # Example default
|
|
LOG_DIR = "/var/www/apps/db-backup-script/logs"
|
|
LOG_FILE = "/var/www/apps/db-backup-script/logs/mysql_backup.log"
|
|
|
|
|
|
# Assuming your JSON file is named 'config.json' and is in the same directory
|
|
CONFIG_FILE_PATH = 'config.json'
|
|
|
|
def load_config(file_path):
|
|
with open(file_path, 'r') as f:
|
|
return json.load(f)
|
|
|
|
def set_platform_specific_paths(config):
|
|
# Determine OS and set paths accordingly
|
|
if sys.platform.startswith('win'):
|
|
# Paths for Windows
|
|
# You can also load these from .env if you prefer fine-grained control
|
|
BACKUP_DIR = config['WINDOWS_PATHS']['BACKUP_DIR'] # Default if not in .env
|
|
MYSQLDUMP_PATH = config['WINDOWS_PATHS']['MYSQLDUMP_PATH']
|
|
LOG_DIR = config['WINDOWS_PATHS']['LOG_DIR']
|
|
LOG_FILE = config['WINDOWS_PATHS']['LOG_FILE']
|
|
else:
|
|
if 'LINUX_PATHS' in config:
|
|
# Paths for Ubuntu/Linux
|
|
BACKUP_DIR = config['LINUX_PATHS']['BACKUP_DIR']
|
|
MYSQLDUMP_PATH = config['LINUX_PATHS']['MYSQLDUMP_PATH']
|
|
LOG_DIR = config['LINUX_PATHS']['LOG_DIR']
|
|
LOG_FILE = config['LINUX_PATHS']['LOG_FILE']
|
|
else:
|
|
print("Error: Linux paths not configured in JSON.")
|
|
# Handle this error appropriately, e.g., exit or use default Linux paths.
|
|
BACKUP_DIR = "/var/www/apps/db-backup-script/backups" # Example default
|
|
MYSQLDUMP_PATH = "/usr/bin/mysqldump" # Example default
|
|
LOG_DIR = "/var/www/apps/db-backup-script/logs"
|
|
LOG_FILE = "/var/www/apps/db-backup-script/logs/mysql_backup.log"
|
|
|
|
try:
|
|
os.makedirs(BACKUP_DIR, exist_ok=True)
|
|
logging.info(f"Ensured backup directory exists: {BACKUP_DIR}")
|
|
os.makedirs(LOG_DIR, exist_ok=True)
|
|
logging.info(f"Ensured log directory exists: {LOG_DIR}")
|
|
except Exception as ex:
|
|
logging.critical(f"---Unable to create required folders. Database Backup Process Failed: {ex} ---")
|
|
exit(1)
|
|
|
|
# --- Logging Setup ---
|
|
# Configure logging to write messages to a file and to the console
|
|
logging.basicConfig(
|
|
level=logging.INFO,
|
|
format='%(asctime)s - %(levelname)s - %(message)s',
|
|
handlers=[
|
|
logging.FileHandler(LOG_FILE),
|
|
logging.StreamHandler()
|
|
]
|
|
)
|
|
|
|
# --- Helper Functions (unchanged from previous version) ---
|
|
|
|
def build_path(database_name):
|
|
timestamp = datetime.datetime.now().strftime('%Y%m%d%H%M%S')
|
|
backup_filename = f"{timestamp}-{database_name}-database-backup.sql"
|
|
backup_path = os.path.join(BACKUP_DIR, backup_filename)
|
|
return backup_path
|
|
|
|
def build_mysqldump_command(db_conf):
|
|
command = [
|
|
MYSQLDUMP_PATH,
|
|
f"-h{db_conf["DB_HOST"]}",
|
|
f"-u{db_conf["DB_USER"]}",
|
|
f"-p{db_conf["DB_PASSWORD"]}",
|
|
db_conf["DB_NAME"]
|
|
]
|
|
return command
|
|
|
|
|
|
def clean_backup_folder():
|
|
folder_path = BACKUP_DIR
|
|
for filename in os.listdir(folder_path):
|
|
file_path = os.path.join(folder_path, filename)
|
|
if os.path.isfile(file_path):
|
|
os.remove(file_path)
|
|
|
|
def start_backup(db_conf):
|
|
backup_file_path = build_path(db_conf["DB_NAME"])
|
|
command = build_mysqldump_command(db_conf)
|
|
try:
|
|
logging.info(f"Starting backup for database: {db_conf["DB_NAME"]} to {backup_file_path}")
|
|
with open(backup_file_path, "w", encoding="utf-8") as out_file:
|
|
subprocess.run(command, stdout=out_file, check=True, text=True)
|
|
logging.info(f"Successfully backed up {db_conf["DB_NAME"]}.")
|
|
|
|
# Compress and delete original
|
|
gzipped_path = gzip_file(backup_file_path, delete_original=True)
|
|
|
|
email_body_parts.append(f"Successfully backed up {db_conf["DB_NAME"]}.")
|
|
#return backup_file_path
|
|
return gzipped_path
|
|
except subprocess.CalledProcessError as e:
|
|
logging.error(f"MySQL dump failed for {db_conf["DB_NAME"]}: {e}")
|
|
logging.error(f"Command: {' '.join(e.cmd)}")
|
|
email_body_parts.append(f"MySQL dump failed for {db_conf["DB_NAME"]}: {e}")
|
|
if e.stderr:
|
|
logging.error(f"Stderr: {e.stderr}")
|
|
return None
|
|
except FileNotFoundError:
|
|
logging.error(f"Error: mysqldump not found at '{MYSQLDUMP_PATH}'. Please verify the path.")
|
|
if sys.platform.startswith('win'):
|
|
logging.error(r"On Windows, ensure MySQL is installed and 'mysqldump.exe' path is correct (e.g., C:\Program Files\MySQL\MySQL Server 8.0\bin\mysqldump.exe)")
|
|
else:
|
|
logging.error("On Ubuntu/Linux, you might need to install 'mysql-client' package: sudo apt install mysql-client")
|
|
return None
|
|
except Exception as e:
|
|
logging.error(f"An unexpected error occurred during backup of {db_conf["DB_NAME"]}: {e}")
|
|
return None
|
|
|
|
def upload_to_git():
|
|
original_cwd = os.getcwd()
|
|
GIT_EXECUTABLE = "git"
|
|
try:
|
|
logging.info(f"Changing directory to {BACKUP_DIR} for Git operations.")
|
|
os.chdir(BACKUP_DIR)
|
|
|
|
logging.info("Staging all changes in Git repository...")
|
|
subprocess.run([GIT_EXECUTABLE, "add", "."], check=True, text=True)
|
|
commit_message = f"Automated database backup commit on {datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')}"
|
|
logging.info(f"Committing changes with message: '{commit_message}'")
|
|
subprocess.run([GIT_EXECUTABLE, "commit", "-m", commit_message], check=True, text=True)
|
|
logging.info("Pushing changes to remote Git repository...")
|
|
subprocess.run([GIT_EXECUTABLE, "push"], check=True, text=True)
|
|
|
|
logging.info("Backup files successfully pushed to Git repository.")
|
|
except subprocess.CalledProcessError as e:
|
|
logging.error(f"Git upload failed: {e}")
|
|
logging.error(f"Command: {' '.join(e.cmd)}")
|
|
if e.stderr:
|
|
logging.error(f"Stderr: {e.stderr}")
|
|
raise
|
|
except FileNotFoundError:
|
|
logging.error(f"Error: Git executable ('{GIT_EXECUTABLE}') not found. Ensure Git is installed and in your system's PATH.")
|
|
raise
|
|
except Exception as e:
|
|
logging.error(f"An unexpected error occurred during Git upload: {e}")
|
|
raise
|
|
finally:
|
|
os.chdir(original_cwd)
|
|
|
|
def remove_before_first_dash(input_string):
|
|
"""
|
|
Removes the part of a string before the first dash.
|
|
|
|
Args:
|
|
input_string: The string to process.
|
|
|
|
Returns:
|
|
The part of the string after the first dash, or the original string
|
|
if no dash is found.
|
|
"""
|
|
parts = input_string.split('-', 1) # Split only at the first dash
|
|
if len(parts) > 1:
|
|
return parts[1]
|
|
else:
|
|
return input_string
|
|
|
|
def upload_to_s3(aws_s3_configuration, file_paths):
|
|
if not file_paths:
|
|
logging.info("No backup files to upload to S3.")
|
|
return
|
|
|
|
# Basic validation for S3 configuration
|
|
if not all([aws_s3_configuration["S3_BUCKET_NAME"], aws_s3_configuration["S3_REGION"], aws_s3_configuration["ACCESS_KEY"], aws_s3_configuration["SECRET_KEY"]]):
|
|
logging.error("S3_BUCKET_NAME, S3_REGION, ACCESS_KEY or SECRET_KEY is not set. Cannot upload to S3. Please check your .env file.")
|
|
email_body_parts.append(f"S3_BUCKET_NAME, S3_REGION, ACCESS_KEY or SECRET_KEY is not set. Cannot upload to S3. Please check your .env file.")
|
|
|
|
return
|
|
|
|
try:
|
|
logging.info(f"Attempting to connect to AWS S3 bucket: {aws_s3_configuration["S3_BUCKET_NAME"]} in region: {aws_s3_configuration["S3_REGION"]}")
|
|
s3_client = boto3.client('s3',
|
|
region_name=aws_s3_configuration["S3_REGION"],
|
|
aws_access_key_id=aws_s3_configuration["ACCESS_KEY"],
|
|
aws_secret_access_key=aws_s3_configuration["SECRET_KEY"])
|
|
|
|
for file_path in file_paths:
|
|
if not os.path.exists(file_path):
|
|
logging.warning(f"File not found, skipping S3 upload: {file_path}")
|
|
continue
|
|
|
|
s3_object_key = os.path.basename(file_path)
|
|
s3_object_key = remove_before_first_dash(s3_object_key)
|
|
try:
|
|
logging.info(f"Uploading {s3_object_key} to s3://{aws_s3_configuration["S3_BUCKET_NAME"]}/{s3_object_key}")
|
|
s3_client.upload_file(
|
|
file_path,
|
|
aws_s3_configuration["S3_BUCKET_NAME"],
|
|
s3_object_key,
|
|
ExtraArgs={
|
|
'StorageClass': 'GLACIER_IR', # Glacier Instant Retrieval
|
|
'ServerSideEncryption': 'AES256'
|
|
})
|
|
logging.info(f"Uploaded to {aws_s3_configuration["S3_BUCKET_NAME"]}/{s3_object_key} with Glacier Instant Retrieval")
|
|
email_body_parts.append(f"Successfully uploaded {s3_object_key} to S3.")
|
|
|
|
except ClientError as ce:
|
|
logging.error(f"Failed to upload {s3_object_key} to S3: {ce}")
|
|
email_body_parts.append("Failed to upload {s3_object_key} to S3: {ce}")
|
|
|
|
if ce.response['Error']['Code'] == 'AccessDenied':
|
|
logging.error("S3 upload access denied. Check your AWS credentials and bucket policy.")
|
|
email_body_parts.append("S3 upload access denied. Check your AWS credentials and bucket policy.")
|
|
elif ce.response['Error']['Code'] == 'NoSuchBucket':
|
|
logging.error(f"S3 bucket '{aws_s3_configuration["S3_BUCKET_NAME"]}' does not exist or you don't have access.")
|
|
email_body_parts.append(f"S3 bucket '{aws_s3_configuration["S3_BUCKET_NAME"]}' does not exist or you don't have access.")
|
|
|
|
else:
|
|
logging.error(f"S3 ClientError details: {ce.response['Error']['Code']} - {ce.response['Error']['Message']}")
|
|
email_body_parts.append(f"S3 ClientError details: {ce.response['Error']['Code']} - {ce.response['Error']['Message']}")
|
|
|
|
except Exception as e:
|
|
logging.error(f"An unexpected error occurred during S3 upload of {s3_object_key}: {e}")
|
|
logging.info("All S3 uploads attempted.")
|
|
except Exception as e:
|
|
logging.critical(f"Failed to initialize S3 client or a critical S3 error occurred: {e}")
|
|
email_body_parts.append(f"Failed to initialize S3 client or a critical S3 error occurred: {e}")
|
|
|
|
|
|
|
|
# --- Main Execution (unchanged from previous version) ---
|
|
if __name__ == "__main__":
|
|
logging.info("--- Database Backup Process Started ---")
|
|
backup_files_created = []
|
|
|
|
config = load_config(CONFIG_FILE_PATH)
|
|
set_platform_specific_paths(config)
|
|
|
|
clean_backup_folder()
|
|
|
|
# You would then loop through db_configs and perform backups as in the previous example
|
|
db_configs = config['DATABASE_CONFIGS']
|
|
email_body_parts.append(f"Starting backup for database")
|
|
email_body_parts.append(f"-------------------------------------------------------------")
|
|
|
|
for db_conf in db_configs:
|
|
print( db_conf)
|
|
# Basic validation that essential DB connection variables are loaded
|
|
if not all([db_conf["DB_HOST"], db_conf["DB_USER"], db_conf["DB_PASSWORD"]]):
|
|
logging.critical("Missing essential database connection variables. Please check your .env file.")
|
|
continue
|
|
|
|
email_body_parts.append(f"Starting backup for database: {db_conf["DB_NAME"]}")
|
|
file_path = start_backup(db_conf)
|
|
if file_path:
|
|
backup_files_created.append(file_path)
|
|
|
|
email_body_parts.append(f"")
|
|
|
|
|
|
try:
|
|
|
|
email_body_parts.append(f"Starting Git upload process...")
|
|
email_body_parts.append(f"-------------------------------------------------------------")
|
|
|
|
do_upload_to_git = config['ACTIONS']['DO_UPLOAD_TO_GIT']
|
|
if do_upload_to_git:
|
|
logging.info("Starting Git upload process...")
|
|
upload_to_git()
|
|
else:
|
|
logging.info("Disabled Git upload process...")
|
|
email_body_parts.append(f"Disabled Git upload process...")
|
|
|
|
email_body_parts.append(f"<br>Starting S3 upload process... ")
|
|
email_body_parts.append(f"-------------------------------------------------------------")
|
|
|
|
|
|
do_upload_to_s3 = config['ACTIONS']['DO_UPLOAD_TO_S3']
|
|
if do_upload_to_s3:
|
|
logging.info("Starting S3 upload process...")
|
|
aws_s3_configuration = config['AWS_S3_CONFIGURATION']
|
|
upload_to_s3(aws_s3_configuration, backup_files_created)
|
|
else:
|
|
logging.info("Disabled S3 upload process...")
|
|
email_body_parts.append(f"Disabled S3 upload process...")
|
|
|
|
|
|
# Send HTML email to multiple recipients
|
|
# Final stage: send the mail
|
|
do_send_email = config['ACTIONS']['DO_SEND_EMAIL']
|
|
if do_send_email :
|
|
email_body_parts.append(f"<br><br>Backup completed for folloing files:")
|
|
email_body_parts.append(f"-------------------------------------------------------------")
|
|
|
|
for file_name in backup_files_created:
|
|
email_body_parts.append(f"{file_name}")
|
|
|
|
email_body_parts.append(f"<br><br><br>Starting sending mail")
|
|
email_body ="<br>".join(email_body_parts) # for plain text
|
|
email_config = config['EMAIL_CONFIGURATION']
|
|
if email_config["EMAIL_RECEIVERS"]:
|
|
# Split the string by the comma delimiter
|
|
email_receivers_array = [item.strip() for item in email_config["EMAIL_RECEIVERS"].split(',')]
|
|
if email_config["EMAIL_SUBJECT"] and email_config["EMAIL_SUBJECT"].strip(): # Check if not None and not just whitespace
|
|
email_subject = email_config["EMAIL_SUBJECT"].strip()
|
|
else:
|
|
email_subject = "Database backup process"
|
|
|
|
send_email(
|
|
email_config["EMAIL_HOST"], email_config["EMAIL_PORT"], email_config["EMAIL_USER"], email_config["EMAIL_PASS"],
|
|
subject=email_subject,
|
|
body=email_body,
|
|
to_emails=email_receivers_array,
|
|
html=True
|
|
)
|
|
logging.info(f"Send Mail to: {email_receivers_array}")
|
|
else:
|
|
logging.info(f"database list array (DB_LIST_TO_BACKUP) not found or is empty.")
|
|
else:
|
|
logging.info("Disabled EMAIL send process...")
|
|
email_body_parts.append(f"Disabled EMAIL send process...")
|
|
|
|
logging.info("--- Database Backup Process Completed Successfully ---")
|
|
|
|
|
|
exit(0)
|
|
|
|
except Exception as ex:
|
|
logging.critical(f"--- Database Backup Process Failed: {ex} ---")
|
|
exit(1)
|