diff --git a/backup/.env b/backup/.env new file mode 100644 index 0000000..b887b45 --- /dev/null +++ b/backup/.env @@ -0,0 +1,23 @@ +# Database Configuration +DB_HOST=1xxxx +DB_USER=devuser +DB_PASSWORD=xxxxx +DB_NAME_PROD=MarcoBMSProd +DB_NAME_STAGE=MarcoBMSStage +DB_NAME_GITA=gitea +DB_NAME_MEDIAWIKI=mediawiki +DB_NAME_REDMINE=redmine + +# AWS S3 Configuration +S3_BUCKET_NAME=your-s3-bucket-name +S3_REGION=us-east-1 + +# Windows Specific Paths (if applicable, uncomment and adjust) +# WIN_BACKUP_DIR=C:/gita/database/backup +# WIN_MYSQLDUMP_PATH=C:/Program Files/MySQL/MySQL Server 8.0/bin/mysqldump.exe +# WIN_LOG_FILE=C:/gita/backup_log.txt + +# Linux Specific Paths (if applicable, uncomment and adjust) +# LINUX_BACKUP_DIR=/var/lib/mysql-backups +# LINUX_MYSQLDUMP_PATH=/usr/bin/mysqldump +# LINUX_LOG_FILE=/var/log/mysql_backup.log \ No newline at end of file diff --git a/backup/Technical documentation for system admins.docx b/backup/Technical documentation for system admins.docx new file mode 100644 index 0000000..2701ed5 Binary files /dev/null and b/backup/Technical documentation for system admins.docx differ diff --git a/backup/database_backup-script.py b/backup/database_backup-script.py index daa057f..c884fb2 100644 --- a/backup/database_backup-script.py +++ b/backup/database_backup-script.py @@ -1,83 +1,206 @@ import os import datetime import subprocess -import zipfile +import logging +import sys +import boto3 +from botocore.exceptions import ClientError +from dotenv import load_dotenv # Import load_dotenv -# Configuration -DB_HOST = '147.93.98.152' -DB_USER = 'devuser' -DB_PASSWORD = 'AppUser@123$' -DB_NAME_PROD = 'MarcoBMSProd' -DB_NAME_STAGE = 'MarcoBMSStage' -DB_NAME_GITA = 'gitea' -DB_NAME_MEDIAWIKI = 'mediawiki' -DB_NAME_REDMINE = 'redmine' -BACKUP_DIR = "C:/gita/database/backup" -MYSQLDUMP_PATH = r'C:\Program Files\MySQL\MySQL Server 8.0\bin\mysqldump.exe' -LOG_FILE = r'C:\gita\backup_log.txt' -print(BACKUP_DIR) -# Generate backup filename with timestamp +# Load environment variables from .env file +load_dotenv() + +# --- Configuration --- +# Variables are now loaded from the .env file using os.getenv() +DB_HOST = os.getenv('DB_HOST') +DB_USER = os.getenv('DB_USER') +DB_PASSWORD = os.getenv('DB_PASSWORD') +DB_NAME_PROD = os.getenv('DB_NAME_PROD') +DB_NAME_STAGE = os.getenv('DB_NAME_STAGE') +DB_NAME_GITA = os.getenv('DB_NAME_GITA') +DB_NAME_MEDIAWIKI = os.getenv('DB_NAME_MEDIAWIKI') +DB_NAME_REDMINE = os.getenv('DB_NAME_REDMINE') + +# --- AWS S3 Configuration --- +S3_BUCKET_NAME = os.getenv('S3_BUCKET_NAME') +S3_REGION = os.getenv('S3_REGION') + +# --- Platform-Specific Paths --- +# Determine OS and set paths accordingly +if sys.platform.startswith('win'): + # Paths for Windows + # You can also load these from .env if you prefer fine-grained control + BACKUP_DIR = os.getenv('WIN_BACKUP_DIR', "C:/gita/database/backup") # Default if not in .env + MYSQLDUMP_PATH = os.getenv('WIN_MYSQLDUMP_PATH', r'C:\Program Files\MySQL\MySQL Server 8.0\bin\mysqldump.exe') + LOG_FILE = os.getenv('WIN_LOG_FILE', r'C:\gita\backup_log.txt') + GIT_EXECUTABLE = "git" # Assuming git is in PATH on Windows +else: + # Paths for Ubuntu/Linux + BACKUP_DIR = os.getenv('LINUX_BACKUP_DIR', "/var/lib/mysql-backups") # Default if not in .env + MYSQLDUMP_PATH = os.getenv('LINUX_MYSQLDUMP_PATH', "/usr/bin/mysqldump") + LOG_FILE = os.getenv('LINUX_LOG_FILE', "/var/log/mysql_backup.log") + GIT_EXECUTABLE = "git" # Assuming git is in PATH on Linux + +# --- Logging Setup --- +# Configure logging to write messages to a file and to the console +logging.basicConfig( + level=logging.INFO, + format='%(asctime)s - %(levelname)s - %(message)s', + handlers=[ + logging.FileHandler(LOG_FILE), + logging.StreamHandler() + ] +) + +# --- Helper Functions (unchanged from previous version) --- def build_path(database_name): - timestamp = datetime.datetime.now().strftime('%Y%m%d%H%M') - backup_filename = f"{timestamp} - {database_name} database backup.sql" + timestamp = datetime.datetime.now().strftime('%Y%m%d%H%M%S') + backup_filename = f"{timestamp}-{database_name}-database-backup.sql" backup_path = os.path.join(BACKUP_DIR, backup_filename) return backup_path -# Perform backup using mysqldump -def build_command(name): +def build_mysqldump_command(database_name): command = [ MYSQLDUMP_PATH, f"-h{DB_HOST}", f"-u{DB_USER}", f"-p{DB_PASSWORD}", + database_name ] - - command.append(name) return command def start_backup(database): - - with open(build_path(database), "w") as out_file: - subprocess.run(build_command(database), stdout=out_file, check=True) + backup_file_path = build_path(database) + command = build_mysqldump_command(database) + try: + logging.info(f"Starting backup for database: {database} to {backup_file_path}") + with open(backup_file_path, "w", encoding="utf-8") as out_file: + subprocess.run(command, stdout=out_file, check=True, text=True) + logging.info(f"Successfully backed up {database}.") + return backup_file_path + except subprocess.CalledProcessError as e: + logging.error(f"MySQL dump failed for {database}: {e}") + logging.error(f"Command: {' '.join(e.cmd)}") + if e.stderr: + logging.error(f"Stderr: {e.stderr}") + return None + except FileNotFoundError: + logging.error(f"Error: mysqldump not found at '{MYSQLDUMP_PATH}'. Please verify the path.") + if sys.platform.startswith('win'): + logging.error(r"On Windows, ensure MySQL is installed and 'mysqldump.exe' path is correct (e.g., C:\Program Files\MySQL\MySQL Server 8.0\bin\mysqldump.exe)") + else: + logging.error("On Ubuntu/Linux, you might need to install 'mysql-client' package: sudo apt install mysql-client") + return None + except Exception as e: + logging.error(f"An unexpected error occurred during backup of {database}: {e}") + return None def upload_to_git(): + original_cwd = os.getcwd() try: - # Move to backup directory + logging.info(f"Changing directory to {BACKUP_DIR} for Git operations.") os.chdir(BACKUP_DIR) - # Git commands - subprocess.run(["git", "add", "."], check=True) - commit_message = f"Backup commit on {datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')}" - subprocess.run(["git", "commit", "-m", commit_message], check=True) - subprocess.run(["git", "push"], check=True) + logging.info("Staging all changes in Git repository...") + subprocess.run([GIT_EXECUTABLE, "add", "."], check=True, text=True) + commit_message = f"Automated database backup commit on {datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')}" + logging.info(f"Committing changes with message: '{commit_message}'") + subprocess.run([GIT_EXECUTABLE, "commit", "-m", commit_message], check=True, text=True) + logging.info("Pushing changes to remote Git repository...") + subprocess.run([GIT_EXECUTABLE, "push"], check=True, text=True) - print("Backup files pushed to Git repository.") + logging.info("Backup files successfully pushed to Git repository.") except subprocess.CalledProcessError as e: - print(f"Git upload failed: {e}") + logging.error(f"Git upload failed: {e}") + logging.error(f"Command: {' '.join(e.cmd)}") + if e.stderr: + logging.error(f"Stderr: {e.stderr}") + raise + except FileNotFoundError: + logging.error(f"Error: Git executable ('{GIT_EXECUTABLE}') not found. Ensure Git is installed and in your system's PATH.") + raise + except Exception as e: + logging.error(f"An unexpected error occurred during Git upload: {e}") + raise + finally: + os.chdir(original_cwd) -# Run backup and log -try: - print("Backup started.") - - # Run MySQL backup for production - start_backup(DB_NAME_PROD) - - # Run MySQL backup for staging - start_backup(DB_NAME_STAGE) +def upload_to_s3(file_paths): + if not file_paths: + logging.info("No backup files to upload to S3.") + return - # Run MySQL backup for Gita - start_backup(DB_NAME_GITA) + # Basic validation for S3 configuration + if not S3_BUCKET_NAME or not S3_REGION: + logging.error("S3_BUCKET_NAME or S3_REGION is not set. Cannot upload to S3. Please check your .env file.") + return - # Run MySQL backup for Redmine - start_backup(DB_NAME_REDMINE) - - # Run MySQL backup for Wiki - start_backup(DB_NAME_MEDIAWIKI) + try: + logging.info(f"Attempting to connect to AWS S3 bucket: {S3_BUCKET_NAME} in region: {S3_REGION}") + s3_client = boto3.client('s3', region_name=S3_REGION) - upload_to_git() - print("Backup process completed successfully.") -except subprocess.CalledProcessError as e: - print(f"Backup failed: {e}") + for file_path in file_paths: + if not os.path.exists(file_path): + logging.warning(f"File not found, skipping S3 upload: {file_path}") + continue -exit(0) \ No newline at end of file + s3_object_key = os.path.basename(file_path) + try: + logging.info(f"Uploading {s3_object_key} to s3://{S3_BUCKET_NAME}/{s3_object_key}") + s3_client.upload_file(file_path, S3_BUCKET_NAME, s3_object_key) + logging.info(f"Successfully uploaded {s3_object_key} to S3.") + except ClientError as ce: + logging.error(f"Failed to upload {s3_object_key} to S3: {ce}") + if ce.response['Error']['Code'] == 'AccessDenied': + logging.error("S3 upload access denied. Check your AWS credentials and bucket policy.") + elif ce.response['Error']['Code'] == 'NoSuchBucket': + logging.error(f"S3 bucket '{S3_BUCKET_NAME}' does not exist or you don't have access.") + else: + logging.error(f"S3 ClientError details: {ce.response['Error']['Code']} - {ce.response['Error']['Message']}") + except Exception as e: + logging.error(f"An unexpected error occurred during S3 upload of {s3_object_key}: {e}") + logging.info("All S3 uploads attempted.") + except Exception as e: + logging.critical(f"Failed to initialize S3 client or a critical S3 error occurred: {e}") + + +# --- Main Execution (unchanged from previous version) --- +if __name__ == "__main__": + logging.info("--- Database Backup Process Started ---") + backup_files_created = [] + + # Basic validation that essential DB connection variables are loaded + if not all([DB_HOST, DB_USER, DB_PASSWORD, DB_NAME_PROD]): + logging.critical("Missing essential database connection variables. Please check your .env file.") + exit(1) + + try: + os.makedirs(BACKUP_DIR, exist_ok=True) + logging.info(f"Ensured backup directory exists: {BACKUP_DIR}") + + databases_to_backup = [ + DB_NAME_PROD, + DB_NAME_STAGE, + DB_NAME_GITA, + DB_NAME_REDMINE, + DB_NAME_MEDIAWIKI + ] + + for db_name in databases_to_backup: + file_path = start_backup(db_name) + if file_path: + backup_files_created.append(file_path) + + logging.info("Starting Git upload process...") + upload_to_git() + + logging.info("Starting S3 upload process...") + upload_to_s3(backup_files_created) + + logging.info("--- Database Backup Process Completed Successfully ---") + exit(0) + + except Exception as ex: + logging.critical(f"--- Database Backup Process Failed: {ex} ---") + exit(1)