Compare commits

...

7 Commits

Author SHA1 Message Date
10001f00c2 Merge branch 'main' of https://git.marcoaiot.com/admin/release-scripts 2025-06-28 16:47:58 +05:30
4ff44fac77 - Add detaied logging
- make db list to backup dynamic receivd from env file
- send mail on complete
2025-06-28 16:03:35 +05:30
c039efa4bf - Add Logger to the script
- Use ful path for config file so that it can be available if run from cron job
2025-06-17 15:36:55 +05:30
8804a87b9d Corrected the back dir path 2025-06-09 09:44:29 +00:00
9016f5bd00 Added the secret key and access key 2025-06-09 09:43:19 +00:00
6d848568ab Added the secret key and access key while configuring S3. 2025-06-09 09:42:08 +00:00
c77eccf8c6 Modify script to support both windows and ubuntu system
Modify script to upload backups on s3
Add documentation
2025-06-05 15:55:06 +05:30
5 changed files with 361 additions and 55 deletions

35
backup/.env Normal file
View File

@ -0,0 +1,35 @@
## Database Configuration
DB_HOST=147.93.98.152
DB_USER=devuser
DB_PASSWORD=xxxxxxx
DB_LIST_TO_BACKUP=MarcoBMSProd,MarcoBMSStage,gitea,mediawiki,redmine
UPLOAD_TO_S3=true
UPLOAD_TO_GIT=false
## AWS S3 Configuration
ACCESS_KEY=xxxxxxx
SECRET_KEY=xxxxxx
S3_BUCKET_NAME=xxxxxxxx
S3_REGION=us-east-1
## send mail once process is complete
EMAIL_HOST=smtp.gmail.com
EMAIL_PORT=587
EMAIL_USER=marcoioitsoft@gmail.com
EMAIL_PASS= ""
EMAIL_SUBJECT="Database backup process"
EMAIL_RECEIVERS=vikas@marcoaiot.com,hr@marcoaiot.com
## Windows Specific Paths (if applicable, uncomment and adjust)
BACKUP_DIR=E:\Office\Marco\gitea\database\test
MYSQLDUMP_PATH=C:/Program Files/MySQL/MySQL Server 8.0/bin/mysqldump.exe
LOG_DIR=E:\Office\Marco\gitea\database\test\
LOG_FILE=E:\Office\Marco\gitea\database\test\backup_log.txt
## Linux Specific Paths (if applicable, uncomment and adjust)
# BACKUP_DIR=/var/lib/mysql-backups
# MYSQLDUMP_PATH=/usr/bin/mysqldump
# LOG_FILE=/var/www/apps/db-backup-script/logs/mysql_backup.log
# LOG_DIR=/var/www/apps/db-backup-script/logs

Binary file not shown.

View File

@ -1,83 +1,298 @@
import os
import datetime
import subprocess
import zipfile
import logging
import sys
import boto3
from botocore.exceptions import ClientError
from dotenv import load_dotenv # Import load_dotenv
from email_utils import send_email
# Configuration
DB_HOST = '147.93.98.152'
DB_USER = 'devuser'
DB_PASSWORD = 'AppUser@123$'
DB_NAME_PROD = 'MarcoBMSProd'
DB_NAME_STAGE = 'MarcoBMSStage'
DB_NAME_GITA = 'gitea'
DB_NAME_MEDIAWIKI = 'mediawiki'
DB_NAME_REDMINE = 'redmine'
BACKUP_DIR = "C:/gita/database/backup"
MYSQLDUMP_PATH = r'C:\Program Files\MySQL\MySQL Server 8.0\bin\mysqldump.exe'
LOG_FILE = r'C:\gita\backup_log.txt'
print(BACKUP_DIR)
# Generate backup filename with timestamp
# Load environment variables from .env file
load_dotenv()
# --- Configuration ---
# Variables are now loaded from the .env file using os.getenv()
DB_HOST = os.getenv('DB_HOST')
DB_USER = os.getenv('DB_USER')
DB_PASSWORD = os.getenv('DB_PASSWORD')
ACCESS_KEY = os.getenv('ACCESS_KEY')
SECRET_KEY = os.getenv('SECRET_KEY')
# --- AWS S3 Configuration ---
S3_BUCKET_NAME = os.getenv('S3_BUCKET_NAME')
S3_REGION = os.getenv('S3_REGION')
# Initialize a message list
email_body_parts = []
# --- Platform-Specific Paths ---
# Determine OS and set paths accordingly
if sys.platform.startswith('win'):
# Paths for Windows
# You can also load these from .env if you prefer fine-grained control
BACKUP_DIR = os.getenv('BACKUP_DIR', "C:/gita/database/backup") # Default if not in .env
MYSQLDUMP_PATH = os.getenv('MYSQLDUMP_PATH', r'C:\Program Files\MySQL\MySQL Server 8.0\bin\mysqldump.exe')
LOG_DIR = os.getenv('BACKUP_DIR', "C:/gita/database/backup") # Default if not in .env
LOG_FILE = os.getenv('LOG_FILE', r'C:\gita\backup_log.txt')
GIT_EXECUTABLE = "git" # Assuming git is in PATH on Windows
else:
# Paths for Ubuntu/Linux
BACKUP_DIR = os.getenv('BACKUP_DIR', "/var/lib/mysql-backups") # Default if not in .env
MYSQLDUMP_PATH = os.getenv('MYSQLDUMP_PATH', "/usr/bin/mysqldump")
LOG_FILE = os.getenv('LOG_FILE', "/var/logs/mysql-backup/mysql_backup.log")
LOG_DIR = os.getenv('LOG_DIR', "/var/logs/mysql-backup")
GIT_EXECUTABLE = "git" # Assuming git is in PATH on Linux
# --- Logging Setup ---
# Configure logging to write messages to a file and to the console
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s - %(levelname)s - %(message)s',
handlers=[
logging.FileHandler(LOG_FILE),
logging.StreamHandler()
]
)
# --- Helper Functions (unchanged from previous version) ---
def build_path(database_name):
timestamp = datetime.datetime.now().strftime('%Y%m%d%H%M')
backup_filename = f"{timestamp} - {database_name} database backup.sql"
timestamp = datetime.datetime.now().strftime('%Y%m%d%H%M%S')
backup_filename = f"{timestamp}-{database_name}-database-backup.sql"
backup_path = os.path.join(BACKUP_DIR, backup_filename)
return backup_path
# Perform backup using mysqldump
def build_command(name):
def build_mysqldump_command(database_name):
command = [
MYSQLDUMP_PATH,
f"-h{DB_HOST}",
f"-u{DB_USER}",
f"-p{DB_PASSWORD}",
database_name
]
command.append(name)
return command
def clean_backup_folder():
folder_path = BACKUP_DIR
for filename in os.listdir(folder_path):
file_path = os.path.join(folder_path, filename)
if os.path.isfile(file_path):
os.remove(file_path)
def start_backup(database):
with open(build_path(database), "w") as out_file:
subprocess.run(build_command(database), stdout=out_file, check=True)
backup_file_path = build_path(database)
command = build_mysqldump_command(database)
try:
logging.info(f"Starting backup for database: {database} to {backup_file_path}")
with open(backup_file_path, "w", encoding="utf-8") as out_file:
subprocess.run(command, stdout=out_file, check=True, text=True)
logging.info(f"Successfully backed up {database}.")
email_body_parts.append(f"Successfully backed up {database}.")
return backup_file_path
except subprocess.CalledProcessError as e:
logging.error(f"MySQL dump failed for {database}: {e}")
logging.error(f"Command: {' '.join(e.cmd)}")
email_body_parts.append(f"MySQL dump failed for {database}: {e}")
if e.stderr:
logging.error(f"Stderr: {e.stderr}")
return None
except FileNotFoundError:
logging.error(f"Error: mysqldump not found at '{MYSQLDUMP_PATH}'. Please verify the path.")
if sys.platform.startswith('win'):
logging.error(r"On Windows, ensure MySQL is installed and 'mysqldump.exe' path is correct (e.g., C:\Program Files\MySQL\MySQL Server 8.0\bin\mysqldump.exe)")
else:
logging.error("On Ubuntu/Linux, you might need to install 'mysql-client' package: sudo apt install mysql-client")
return None
except Exception as e:
logging.error(f"An unexpected error occurred during backup of {database}: {e}")
return None
def upload_to_git():
original_cwd = os.getcwd()
try:
# Move to backup directory
logging.info(f"Changing directory to {BACKUP_DIR} for Git operations.")
os.chdir(BACKUP_DIR)
# Git commands
subprocess.run(["git", "add", "."], check=True)
commit_message = f"Backup commit on {datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')}"
subprocess.run(["git", "commit", "-m", commit_message], check=True)
subprocess.run(["git", "push"], check=True)
logging.info("Staging all changes in Git repository...")
subprocess.run([GIT_EXECUTABLE, "add", "."], check=True, text=True)
commit_message = f"Automated database backup commit on {datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')}"
logging.info(f"Committing changes with message: '{commit_message}'")
subprocess.run([GIT_EXECUTABLE, "commit", "-m", commit_message], check=True, text=True)
logging.info("Pushing changes to remote Git repository...")
subprocess.run([GIT_EXECUTABLE, "push"], check=True, text=True)
print("Backup files pushed to Git repository.")
logging.info("Backup files successfully pushed to Git repository.")
except subprocess.CalledProcessError as e:
print(f"Git upload failed: {e}")
logging.error(f"Git upload failed: {e}")
logging.error(f"Command: {' '.join(e.cmd)}")
if e.stderr:
logging.error(f"Stderr: {e.stderr}")
raise
except FileNotFoundError:
logging.error(f"Error: Git executable ('{GIT_EXECUTABLE}') not found. Ensure Git is installed and in your system's PATH.")
raise
except Exception as e:
logging.error(f"An unexpected error occurred during Git upload: {e}")
raise
finally:
os.chdir(original_cwd)
# Run backup and log
try:
print("Backup started.")
def remove_before_first_dash(input_string):
"""
Removes the part of a string before the first dash.
# Run MySQL backup for production
start_backup(DB_NAME_PROD)
Args:
input_string: The string to process.
# Run MySQL backup for staging
start_backup(DB_NAME_STAGE)
Returns:
The part of the string after the first dash, or the original string
if no dash is found.
"""
parts = input_string.split('-', 1) # Split only at the first dash
if len(parts) > 1:
return parts[1]
else:
return input_string
# Run MySQL backup for Gita
start_backup(DB_NAME_GITA)
def upload_to_s3(file_paths):
if not file_paths:
logging.info("No backup files to upload to S3.")
return
# Run MySQL backup for Redmine
start_backup(DB_NAME_REDMINE)
# Basic validation for S3 configuration
if not all([S3_BUCKET_NAME, S3_REGION, ACCESS_KEY, SECRET_KEY]):
logging.error("S3_BUCKET_NAME, S3_REGION, ACCESS_KEY or SECRET_KEY is not set. Cannot upload to S3. Please check your .env file.")
email_body_parts.append(f"S3_BUCKET_NAME, S3_REGION, ACCESS_KEY or SECRET_KEY is not set. Cannot upload to S3. Please check your .env file.")
# Run MySQL backup for Wiki
start_backup(DB_NAME_MEDIAWIKI)
return
try:
logging.info(f"Attempting to connect to AWS S3 bucket: {S3_BUCKET_NAME} in region: {S3_REGION}")
s3_client = boto3.client('s3',
region_name=S3_REGION,
aws_access_key_id=ACCESS_KEY,
aws_secret_access_key=SECRET_KEY)
for file_path in file_paths:
if not os.path.exists(file_path):
logging.warning(f"File not found, skipping S3 upload: {file_path}")
continue
s3_object_key = os.path.basename(file_path)
s3_object_key = remove_before_first_dash(s3_object_key)
try:
logging.info(f"Uploading {s3_object_key} to s3://{S3_BUCKET_NAME}/{s3_object_key}")
s3_client.upload_file(file_path, S3_BUCKET_NAME, s3_object_key)
logging.info(f"Successfully uploaded {s3_object_key} to S3.")
email_body_parts.append(f"Successfully uploaded {s3_object_key} to S3.")
except ClientError as ce:
logging.error(f"Failed to upload {s3_object_key} to S3: {ce}")
email_body_parts.append("Failed to upload {s3_object_key} to S3: {ce}")
if ce.response['Error']['Code'] == 'AccessDenied':
logging.error("S3 upload access denied. Check your AWS credentials and bucket policy.")
email_body_parts.append("S3 upload access denied. Check your AWS credentials and bucket policy.")
elif ce.response['Error']['Code'] == 'NoSuchBucket':
logging.error(f"S3 bucket '{S3_BUCKET_NAME}' does not exist or you don't have access.")
email_body_parts.append(f"S3 bucket '{S3_BUCKET_NAME}' does not exist or you don't have access.")
else:
logging.error(f"S3 ClientError details: {ce.response['Error']['Code']} - {ce.response['Error']['Message']}")
email_body_parts.append(f"S3 ClientError details: {ce.response['Error']['Code']} - {ce.response['Error']['Message']}")
except Exception as e:
logging.error(f"An unexpected error occurred during S3 upload of {s3_object_key}: {e}")
logging.info("All S3 uploads attempted.")
except Exception as e:
logging.critical(f"Failed to initialize S3 client or a critical S3 error occurred: {e}")
email_body_parts.append(f"Failed to initialize S3 client or a critical S3 error occurred: {e}")
# --- Main Execution (unchanged from previous version) ---
if __name__ == "__main__":
logging.info("--- Database Backup Process Started ---")
backup_files_created = []
# Basic validation that essential DB connection variables are loaded
if not all([DB_HOST, DB_USER, DB_PASSWORD]):
logging.critical("Missing essential database connection variables. Please check your .env file.")
exit(1)
try:
os.makedirs(BACKUP_DIR, exist_ok=True)
logging.info(f"Ensured backup directory exists: {BACKUP_DIR}")
os.makedirs(LOG_DIR, exist_ok=True)
logging.info(f"Ensured log directory exists: {LOG_DIR}")
# Get the database list array from the environment variable
DB_LIST_TO_BACKUP = os.getenv('DB_LIST_TO_BACKUP')
# Check if the variable exists and is not empty before splitting
if DB_LIST_TO_BACKUP:
# Split the string by the comma delimiter
databases_array_to_backup = [item.strip() for item in DB_LIST_TO_BACKUP.split(',')]
logging.info(f"Backup databases for: {databases_array_to_backup}")
else:
logging.error(f"database list array (DB_LIST_TO_BACKUP) not found or is empty.")
clean_backup_folder()
email_body_parts.append(f"Starting backup for database: {databases_array_to_backup}")
email_body_parts.append(f"-------------------------------------------------------------")
for db_name in databases_array_to_backup:
file_path = start_backup(db_name)
if file_path:
backup_files_created.append(file_path)
email_body_parts.append(f"")
email_body_parts.append(f"Starting Git upload process...")
email_body_parts.append(f"-------------------------------------------------------------")
if os.getenv('UPLOAD_TO_GIT', 'false').lower() == 'true':
logging.info("Starting Git upload process...")
upload_to_git()
print("Backup process completed successfully.")
except subprocess.CalledProcessError as e:
print(f"Backup failed: {e}")
else:
logging.info("Disabled Git upload process...")
email_body_parts.append(f"Disabled Git upload process...")
exit(0)
email_body_parts.append(f"<br>Starting S3 upload process... ")
email_body_parts.append(f"-------------------------------------------------------------")
if os.getenv('UPLOAD_TO_S3', 'false').lower() == 'true':
logging.info("Starting S3 upload process...")
upload_to_s3(backup_files_created)
else:
logging.info("Disabled S3 upload process...")
email_body_parts.append(f"Disabled S3 upload process...")
# Send HTML email to multiple recipients
# Final stage: send the mail
email_body_parts.append(f"<br><br><br>Starting sending mail")
email_body ="<br>".join(email_body_parts) # for plain text
EMAIL_RECEIVERS = os.getenv('EMAIL_RECEIVERS')
if EMAIL_RECEIVERS:
# Split the string by the comma delimiter
email_receivers_array = [item.strip() for item in EMAIL_RECEIVERS.split(',')]
send_email(
subject=os.getenv("EMAIL_SUBJECT", "Database backup process"),
body=email_body,
to_emails=email_receivers_array,
html=True
)
logging.info(f"Send Mail to: {email_receivers_array}")
else:
logging.info(f"database list array (DB_LIST_TO_BACKUP) not found or is empty.")
logging.info("--- Database Backup Process Completed Successfully ---")
exit(0)
except Exception as ex:
logging.critical(f"--- Database Backup Process Failed: {ex} ---")
exit(1)

36
backup/email_utils.py Normal file
View File

@ -0,0 +1,36 @@
import os
import smtplib
from email.mime.text import MIMEText
from email.mime.multipart import MIMEMultipart
from dotenv import load_dotenv
load_dotenv()
EMAIL_HOST = os.getenv("EMAIL_HOST", "smtp.gmail.com")
EMAIL_PORT = int(os.getenv("EMAIL_PORT", 587))
EMAIL_USER = os.getenv("EMAIL_USER")
EMAIL_PASS = os.getenv("EMAIL_PASS")
def send_email(subject, body, to_emails, html=False):
if isinstance(to_emails, str):
to_emails = [to_emails]
msg = MIMEMultipart()
msg["From"] = EMAIL_USER
msg["To"] = ", ".join(to_emails)
msg["Subject"] = subject
if html:
msg.attach(MIMEText(body, "html"))
else:
msg.attach(MIMEText(body, "plain"))
try:
with smtplib.SMTP(EMAIL_HOST, EMAIL_PORT) as server:
server.starttls()
server.login(EMAIL_USER, EMAIL_PASS)
server.sendmail(EMAIL_USER, to_emails, msg.as_string())
return True
except Exception as e:
print(f"Error sending email: {e}")
return False

View File

@ -1,10 +1,24 @@
import requests
from dotenv import load_dotenv
import os
import logging, traceback
load_dotenv(dotenv_path="config.env", override=True)
logging.basicConfig(
level=logging.INFO,
format='[%(asctime)s] %(levelname)s: %(message)s',
handlers=[
logging.FileHandler("app.log"),
logging.StreamHandler() # logs to console
]
)
script_dir = os.path.dirname(os.path.abspath(__file__))
load_dotenv(dotenv_path=f"{script_dir}/config.env", override=True)
base_url = os.getenv("API_BASE_URL")
print(f"base_url: {base_url}")
def login():
payload = {
"username": os.getenv("USERNAME"),
@ -30,13 +44,19 @@ def project_proccess(jwt):
return response
try:
logging.info("Script started")
jwt = login()
logging.info("Login Success")
response = project_proccess(jwt) # Call your function
if response.status_code == 200:
print("Email sent")
logging.info("Script started")
else:
print(f"Failed with response: {response}")
except Exception as e:
print(f"An error occurred: {e}")
logging.info(f"Failed with response: {response}")
except Exception:
msg = traceback.format_exc()
logging.error("An error occurred:\n%s", msg)
print(f"An error occurred: {msg }")