Modify script to support both windows and ubuntu system
Modify script to upload backups on s3 Add documentation
This commit is contained in:
parent
d6ae3e4cf2
commit
c77eccf8c6
23
backup/.env
Normal file
23
backup/.env
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
# Database Configuration
|
||||||
|
DB_HOST=1xxxx
|
||||||
|
DB_USER=devuser
|
||||||
|
DB_PASSWORD=xxxxx
|
||||||
|
DB_NAME_PROD=MarcoBMSProd
|
||||||
|
DB_NAME_STAGE=MarcoBMSStage
|
||||||
|
DB_NAME_GITA=gitea
|
||||||
|
DB_NAME_MEDIAWIKI=mediawiki
|
||||||
|
DB_NAME_REDMINE=redmine
|
||||||
|
|
||||||
|
# AWS S3 Configuration
|
||||||
|
S3_BUCKET_NAME=your-s3-bucket-name
|
||||||
|
S3_REGION=us-east-1
|
||||||
|
|
||||||
|
# Windows Specific Paths (if applicable, uncomment and adjust)
|
||||||
|
# WIN_BACKUP_DIR=C:/gita/database/backup
|
||||||
|
# WIN_MYSQLDUMP_PATH=C:/Program Files/MySQL/MySQL Server 8.0/bin/mysqldump.exe
|
||||||
|
# WIN_LOG_FILE=C:/gita/backup_log.txt
|
||||||
|
|
||||||
|
# Linux Specific Paths (if applicable, uncomment and adjust)
|
||||||
|
# LINUX_BACKUP_DIR=/var/lib/mysql-backups
|
||||||
|
# LINUX_MYSQLDUMP_PATH=/usr/bin/mysqldump
|
||||||
|
# LINUX_LOG_FILE=/var/log/mysql_backup.log
|
BIN
backup/Technical documentation for system admins.docx
Normal file
BIN
backup/Technical documentation for system admins.docx
Normal file
Binary file not shown.
@ -1,83 +1,206 @@
|
|||||||
import os
|
import os
|
||||||
import datetime
|
import datetime
|
||||||
import subprocess
|
import subprocess
|
||||||
import zipfile
|
import logging
|
||||||
|
import sys
|
||||||
|
import boto3
|
||||||
|
from botocore.exceptions import ClientError
|
||||||
|
from dotenv import load_dotenv # Import load_dotenv
|
||||||
|
|
||||||
# Configuration
|
# Load environment variables from .env file
|
||||||
DB_HOST = '147.93.98.152'
|
load_dotenv()
|
||||||
DB_USER = 'devuser'
|
|
||||||
DB_PASSWORD = 'AppUser@123$'
|
# --- Configuration ---
|
||||||
DB_NAME_PROD = 'MarcoBMSProd'
|
# Variables are now loaded from the .env file using os.getenv()
|
||||||
DB_NAME_STAGE = 'MarcoBMSStage'
|
DB_HOST = os.getenv('DB_HOST')
|
||||||
DB_NAME_GITA = 'gitea'
|
DB_USER = os.getenv('DB_USER')
|
||||||
DB_NAME_MEDIAWIKI = 'mediawiki'
|
DB_PASSWORD = os.getenv('DB_PASSWORD')
|
||||||
DB_NAME_REDMINE = 'redmine'
|
DB_NAME_PROD = os.getenv('DB_NAME_PROD')
|
||||||
BACKUP_DIR = "C:/gita/database/backup"
|
DB_NAME_STAGE = os.getenv('DB_NAME_STAGE')
|
||||||
MYSQLDUMP_PATH = r'C:\Program Files\MySQL\MySQL Server 8.0\bin\mysqldump.exe'
|
DB_NAME_GITA = os.getenv('DB_NAME_GITA')
|
||||||
LOG_FILE = r'C:\gita\backup_log.txt'
|
DB_NAME_MEDIAWIKI = os.getenv('DB_NAME_MEDIAWIKI')
|
||||||
print(BACKUP_DIR)
|
DB_NAME_REDMINE = os.getenv('DB_NAME_REDMINE')
|
||||||
# Generate backup filename with timestamp
|
|
||||||
|
# --- AWS S3 Configuration ---
|
||||||
|
S3_BUCKET_NAME = os.getenv('S3_BUCKET_NAME')
|
||||||
|
S3_REGION = os.getenv('S3_REGION')
|
||||||
|
|
||||||
|
# --- Platform-Specific Paths ---
|
||||||
|
# Determine OS and set paths accordingly
|
||||||
|
if sys.platform.startswith('win'):
|
||||||
|
# Paths for Windows
|
||||||
|
# You can also load these from .env if you prefer fine-grained control
|
||||||
|
BACKUP_DIR = os.getenv('WIN_BACKUP_DIR', "C:/gita/database/backup") # Default if not in .env
|
||||||
|
MYSQLDUMP_PATH = os.getenv('WIN_MYSQLDUMP_PATH', r'C:\Program Files\MySQL\MySQL Server 8.0\bin\mysqldump.exe')
|
||||||
|
LOG_FILE = os.getenv('WIN_LOG_FILE', r'C:\gita\backup_log.txt')
|
||||||
|
GIT_EXECUTABLE = "git" # Assuming git is in PATH on Windows
|
||||||
|
else:
|
||||||
|
# Paths for Ubuntu/Linux
|
||||||
|
BACKUP_DIR = os.getenv('LINUX_BACKUP_DIR', "/var/lib/mysql-backups") # Default if not in .env
|
||||||
|
MYSQLDUMP_PATH = os.getenv('LINUX_MYSQLDUMP_PATH', "/usr/bin/mysqldump")
|
||||||
|
LOG_FILE = os.getenv('LINUX_LOG_FILE', "/var/log/mysql_backup.log")
|
||||||
|
GIT_EXECUTABLE = "git" # Assuming git is in PATH on Linux
|
||||||
|
|
||||||
|
# --- Logging Setup ---
|
||||||
|
# Configure logging to write messages to a file and to the console
|
||||||
|
logging.basicConfig(
|
||||||
|
level=logging.INFO,
|
||||||
|
format='%(asctime)s - %(levelname)s - %(message)s',
|
||||||
|
handlers=[
|
||||||
|
logging.FileHandler(LOG_FILE),
|
||||||
|
logging.StreamHandler()
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
# --- Helper Functions (unchanged from previous version) ---
|
||||||
|
|
||||||
def build_path(database_name):
|
def build_path(database_name):
|
||||||
timestamp = datetime.datetime.now().strftime('%Y%m%d%H%M')
|
timestamp = datetime.datetime.now().strftime('%Y%m%d%H%M%S')
|
||||||
backup_filename = f"{timestamp} - {database_name} database backup.sql"
|
backup_filename = f"{timestamp}-{database_name}-database-backup.sql"
|
||||||
backup_path = os.path.join(BACKUP_DIR, backup_filename)
|
backup_path = os.path.join(BACKUP_DIR, backup_filename)
|
||||||
return backup_path
|
return backup_path
|
||||||
|
|
||||||
# Perform backup using mysqldump
|
def build_mysqldump_command(database_name):
|
||||||
def build_command(name):
|
|
||||||
command = [
|
command = [
|
||||||
MYSQLDUMP_PATH,
|
MYSQLDUMP_PATH,
|
||||||
f"-h{DB_HOST}",
|
f"-h{DB_HOST}",
|
||||||
f"-u{DB_USER}",
|
f"-u{DB_USER}",
|
||||||
f"-p{DB_PASSWORD}",
|
f"-p{DB_PASSWORD}",
|
||||||
|
database_name
|
||||||
]
|
]
|
||||||
|
|
||||||
command.append(name)
|
|
||||||
return command
|
return command
|
||||||
|
|
||||||
def start_backup(database):
|
def start_backup(database):
|
||||||
|
backup_file_path = build_path(database)
|
||||||
with open(build_path(database), "w") as out_file:
|
command = build_mysqldump_command(database)
|
||||||
subprocess.run(build_command(database), stdout=out_file, check=True)
|
try:
|
||||||
|
logging.info(f"Starting backup for database: {database} to {backup_file_path}")
|
||||||
|
with open(backup_file_path, "w", encoding="utf-8") as out_file:
|
||||||
|
subprocess.run(command, stdout=out_file, check=True, text=True)
|
||||||
|
logging.info(f"Successfully backed up {database}.")
|
||||||
|
return backup_file_path
|
||||||
|
except subprocess.CalledProcessError as e:
|
||||||
|
logging.error(f"MySQL dump failed for {database}: {e}")
|
||||||
|
logging.error(f"Command: {' '.join(e.cmd)}")
|
||||||
|
if e.stderr:
|
||||||
|
logging.error(f"Stderr: {e.stderr}")
|
||||||
|
return None
|
||||||
|
except FileNotFoundError:
|
||||||
|
logging.error(f"Error: mysqldump not found at '{MYSQLDUMP_PATH}'. Please verify the path.")
|
||||||
|
if sys.platform.startswith('win'):
|
||||||
|
logging.error(r"On Windows, ensure MySQL is installed and 'mysqldump.exe' path is correct (e.g., C:\Program Files\MySQL\MySQL Server 8.0\bin\mysqldump.exe)")
|
||||||
|
else:
|
||||||
|
logging.error("On Ubuntu/Linux, you might need to install 'mysql-client' package: sudo apt install mysql-client")
|
||||||
|
return None
|
||||||
|
except Exception as e:
|
||||||
|
logging.error(f"An unexpected error occurred during backup of {database}: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
def upload_to_git():
|
def upload_to_git():
|
||||||
|
original_cwd = os.getcwd()
|
||||||
try:
|
try:
|
||||||
# Move to backup directory
|
logging.info(f"Changing directory to {BACKUP_DIR} for Git operations.")
|
||||||
os.chdir(BACKUP_DIR)
|
os.chdir(BACKUP_DIR)
|
||||||
|
|
||||||
# Git commands
|
logging.info("Staging all changes in Git repository...")
|
||||||
subprocess.run(["git", "add", "."], check=True)
|
subprocess.run([GIT_EXECUTABLE, "add", "."], check=True, text=True)
|
||||||
commit_message = f"Backup commit on {datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')}"
|
commit_message = f"Automated database backup commit on {datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')}"
|
||||||
subprocess.run(["git", "commit", "-m", commit_message], check=True)
|
logging.info(f"Committing changes with message: '{commit_message}'")
|
||||||
subprocess.run(["git", "push"], check=True)
|
subprocess.run([GIT_EXECUTABLE, "commit", "-m", commit_message], check=True, text=True)
|
||||||
|
logging.info("Pushing changes to remote Git repository...")
|
||||||
|
subprocess.run([GIT_EXECUTABLE, "push"], check=True, text=True)
|
||||||
|
|
||||||
print("Backup files pushed to Git repository.")
|
logging.info("Backup files successfully pushed to Git repository.")
|
||||||
except subprocess.CalledProcessError as e:
|
except subprocess.CalledProcessError as e:
|
||||||
print(f"Git upload failed: {e}")
|
logging.error(f"Git upload failed: {e}")
|
||||||
|
logging.error(f"Command: {' '.join(e.cmd)}")
|
||||||
|
if e.stderr:
|
||||||
|
logging.error(f"Stderr: {e.stderr}")
|
||||||
|
raise
|
||||||
|
except FileNotFoundError:
|
||||||
|
logging.error(f"Error: Git executable ('{GIT_EXECUTABLE}') not found. Ensure Git is installed and in your system's PATH.")
|
||||||
|
raise
|
||||||
|
except Exception as e:
|
||||||
|
logging.error(f"An unexpected error occurred during Git upload: {e}")
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
os.chdir(original_cwd)
|
||||||
|
|
||||||
# Run backup and log
|
def upload_to_s3(file_paths):
|
||||||
try:
|
if not file_paths:
|
||||||
print("Backup started.")
|
logging.info("No backup files to upload to S3.")
|
||||||
|
return
|
||||||
# Run MySQL backup for production
|
|
||||||
start_backup(DB_NAME_PROD)
|
|
||||||
|
|
||||||
# Run MySQL backup for staging
|
|
||||||
start_backup(DB_NAME_STAGE)
|
|
||||||
|
|
||||||
# Run MySQL backup for Gita
|
# Basic validation for S3 configuration
|
||||||
start_backup(DB_NAME_GITA)
|
if not S3_BUCKET_NAME or not S3_REGION:
|
||||||
|
logging.error("S3_BUCKET_NAME or S3_REGION is not set. Cannot upload to S3. Please check your .env file.")
|
||||||
|
return
|
||||||
|
|
||||||
# Run MySQL backup for Redmine
|
try:
|
||||||
start_backup(DB_NAME_REDMINE)
|
logging.info(f"Attempting to connect to AWS S3 bucket: {S3_BUCKET_NAME} in region: {S3_REGION}")
|
||||||
|
s3_client = boto3.client('s3', region_name=S3_REGION)
|
||||||
# Run MySQL backup for Wiki
|
|
||||||
start_backup(DB_NAME_MEDIAWIKI)
|
|
||||||
|
|
||||||
upload_to_git()
|
for file_path in file_paths:
|
||||||
print("Backup process completed successfully.")
|
if not os.path.exists(file_path):
|
||||||
except subprocess.CalledProcessError as e:
|
logging.warning(f"File not found, skipping S3 upload: {file_path}")
|
||||||
print(f"Backup failed: {e}")
|
continue
|
||||||
|
|
||||||
exit(0)
|
s3_object_key = os.path.basename(file_path)
|
||||||
|
try:
|
||||||
|
logging.info(f"Uploading {s3_object_key} to s3://{S3_BUCKET_NAME}/{s3_object_key}")
|
||||||
|
s3_client.upload_file(file_path, S3_BUCKET_NAME, s3_object_key)
|
||||||
|
logging.info(f"Successfully uploaded {s3_object_key} to S3.")
|
||||||
|
except ClientError as ce:
|
||||||
|
logging.error(f"Failed to upload {s3_object_key} to S3: {ce}")
|
||||||
|
if ce.response['Error']['Code'] == 'AccessDenied':
|
||||||
|
logging.error("S3 upload access denied. Check your AWS credentials and bucket policy.")
|
||||||
|
elif ce.response['Error']['Code'] == 'NoSuchBucket':
|
||||||
|
logging.error(f"S3 bucket '{S3_BUCKET_NAME}' does not exist or you don't have access.")
|
||||||
|
else:
|
||||||
|
logging.error(f"S3 ClientError details: {ce.response['Error']['Code']} - {ce.response['Error']['Message']}")
|
||||||
|
except Exception as e:
|
||||||
|
logging.error(f"An unexpected error occurred during S3 upload of {s3_object_key}: {e}")
|
||||||
|
logging.info("All S3 uploads attempted.")
|
||||||
|
except Exception as e:
|
||||||
|
logging.critical(f"Failed to initialize S3 client or a critical S3 error occurred: {e}")
|
||||||
|
|
||||||
|
|
||||||
|
# --- Main Execution (unchanged from previous version) ---
|
||||||
|
if __name__ == "__main__":
|
||||||
|
logging.info("--- Database Backup Process Started ---")
|
||||||
|
backup_files_created = []
|
||||||
|
|
||||||
|
# Basic validation that essential DB connection variables are loaded
|
||||||
|
if not all([DB_HOST, DB_USER, DB_PASSWORD, DB_NAME_PROD]):
|
||||||
|
logging.critical("Missing essential database connection variables. Please check your .env file.")
|
||||||
|
exit(1)
|
||||||
|
|
||||||
|
try:
|
||||||
|
os.makedirs(BACKUP_DIR, exist_ok=True)
|
||||||
|
logging.info(f"Ensured backup directory exists: {BACKUP_DIR}")
|
||||||
|
|
||||||
|
databases_to_backup = [
|
||||||
|
DB_NAME_PROD,
|
||||||
|
DB_NAME_STAGE,
|
||||||
|
DB_NAME_GITA,
|
||||||
|
DB_NAME_REDMINE,
|
||||||
|
DB_NAME_MEDIAWIKI
|
||||||
|
]
|
||||||
|
|
||||||
|
for db_name in databases_to_backup:
|
||||||
|
file_path = start_backup(db_name)
|
||||||
|
if file_path:
|
||||||
|
backup_files_created.append(file_path)
|
||||||
|
|
||||||
|
logging.info("Starting Git upload process...")
|
||||||
|
upload_to_git()
|
||||||
|
|
||||||
|
logging.info("Starting S3 upload process...")
|
||||||
|
upload_to_s3(backup_files_created)
|
||||||
|
|
||||||
|
logging.info("--- Database Backup Process Completed Successfully ---")
|
||||||
|
exit(0)
|
||||||
|
|
||||||
|
except Exception as ex:
|
||||||
|
logging.critical(f"--- Database Backup Process Failed: {ex} ---")
|
||||||
|
exit(1)
|
||||||
|
Loading…
x
Reference in New Issue
Block a user