diff --git a/database-backup/.env b/database-backup/.env
index 6865c23..91cea23 100644
--- a/database-backup/.env
+++ b/database-backup/.env
@@ -1,57 +1,63 @@
-## Database Configuration
-DB_HOST=147.93.98.152
-DB_USER=devuser
-DB_PASSWORD=xxxxxxx
-
-DB_LIST_TO_BACKUP=MarcoBMSProd,MarcoBMSStage,gitea,mediawiki,redmine
-
-DATABASE_CONFIGS = [
+{
+ "DATABASE_CONFIGS": [
{
- 'DB_HOST': '147.93.98.152',
- 'DB_USER': 'devuser',
- 'DB_PASSWORD': 'AppUser@123$',
- 'DB_NAME': 'gitea,mediawiki,redmine',
- 'DB_TYPE': 'mysql' # Add database type if you have mixed databases (mysql, postgres, etc.)
+ "DB_HOST": "147.93.98.152",
+ "DB_USER": "devuser",
+ "DB_PASSWORD": "xxx",
+ "DB_NAME": "gitea",
+ "DB_TYPE": "mysql"
},
{
- 'DB_HOST': '147.93.98.152',
- 'DB_USER': 'devuser',
- 'DB_PASSWORD': 'AppUser@123$',
- 'DB_NAME': 'MarcoBMSProd',
- 'DB_TYPE': 'mysql'
+ "DB_HOST": "147.93.98.152",
+ "DB_USER": "devuser",
+ "DB_PASSWORD": "xxx",
+ "DB_NAME": "mediawiki",
+ "DB_TYPE": "mysql"
},
-]
-
-
-
-## Actionalble Blocks
-UPLOAD_TO_S3=true
-UPLOAD_TO_GIT=false
-SEND_EMAIL=false
-
-
-## AWS S3 Configuration
-ACCESS_KEY=xxxxxxx
-SECRET_KEY=xxxxxx
-S3_BUCKET_NAME=xxxxxxxx
-S3_REGION=us-east-1
-
-## send mail once process is complete
-EMAIL_HOST=smtp.gmail.com
-EMAIL_PORT=587
-EMAIL_USER=marcoioitsoft@gmail.com
-EMAIL_PASS= ""
-EMAIL_SUBJECT="Database backup process"
-EMAIL_RECEIVERS=vikas@marcoaiot.com,hr@marcoaiot.com
-
-## Windows Specific Paths (if applicable, uncomment and adjust)
-BACKUP_DIR=E:\Office\Marco\gitea\database\test
-MYSQLDUMP_PATH=C:/Program Files/MySQL/MySQL Server 8.0/bin/mysqldump.exe
-LOG_DIR=E:\Office\Marco\gitea\database\test\
-LOG_FILE=E:\Office\Marco\gitea\database\test\backup_log.txt
-
-## Linux Specific Paths (if applicable, uncomment and adjust)
-# BACKUP_DIR=/var/lib/mysql-backups
-# MYSQLDUMP_PATH=/usr/bin/mysqldump
-# LOG_FILE=/var/www/apps/db-backup-script/logs/mysql_backup.log
-# LOG_DIR=/var/www/apps/db-backup-script/logs
\ No newline at end of file
+ {
+ "DB_HOST": "147.93.98.152",
+ "DB_USER": "devuser",
+ "DB_PASSWORD": "xxx",
+ "DB_NAME": "redmine",
+ "DB_TYPE": "mysql"
+ },
+ {
+ "DB_HOST": "147.93.98.152",
+ "DB_USER": "devuser",
+ "DB_PASSWORD": "xxx",
+ "DB_NAME": "MarcoBMSProd",
+ "DB_TYPE": "mysql"
+ }
+ ],
+ "ACTIONS": {
+ "UPLOAD_TO_S3": true,
+ "UPLOAD_TO_GIT": false,
+ "SEND_EMAIL": false
+ },
+ "AWS_S3_CONFIGURATION": {
+ "ACCESS_KEY": "xxxxxxx",
+ "SECRET_KEY": "xxxxxx",
+ "S3_BUCKET_NAME": "xxxxxxxx",
+ "S3_REGION": "us-east-1"
+ },
+ "EMAIL_CONFIGURATION": {
+ "EMAIL_HOST": "smtp.gmail.com",
+ "EMAIL_PORT": 587,
+ "EMAIL_USER": "marcoioitsoft@gmail.com",
+ "EMAIL_PASS": "",
+ "EMAIL_SUBJECT": "Database backup process",
+ "EMAIL_RECEIVERS": "vikas@marcoaiot.com,hr@marcoaiot.com"
+ },
+ "WINDOWS_PATHS": {
+ "BACKUP_DIR": "E:/Office/Marco/gitea/database/test",
+ "MYSQLDUMP_PATH": "C:/Program Files/MySQL/MySQL Server 8.0/bin/mysqldump.exe",
+ "LOG_DIR": "E:/Office/Marco/gitea/database/test/",
+ "LOG_FILE": "E:/Office/Marco/gitea/database/test/backup_log.txt"
+ },
+ "LINUX_PATHS": {
+ "BACKUP_DIR": "/var/lib/mysql-backups",
+ "MYSQLDUMP_PATH": "/usr/bin/mysqldump",
+ "LOG_FILE": "/var/www/apps/db-backup-script/logs/mysql_backup.log",
+ "LOG_DIR": "/var/www/apps/db-backup-script/logs"
+ }
+}
\ No newline at end of file
diff --git a/database-backup/config.json b/database-backup/config.json
new file mode 100644
index 0000000..a37b89c
--- /dev/null
+++ b/database-backup/config.json
@@ -0,0 +1,63 @@
+{
+ "DATABASE_CONFIGS": [
+ {
+ "DB_HOST": "147.93.98.152",
+ "DB_USER": "devuser",
+ "DB_PASSWORD": "xxx",
+ "DB_NAME": "gitea",
+ "DB_TYPE": "mysql"
+ },
+ {
+ "DB_HOST": "147.93.98.152",
+ "DB_USER": "devuser",
+ "DB_PASSWORD": "xxx",
+ "DB_NAME": "mediawiki",
+ "DB_TYPE": "mysql"
+ },
+ {
+ "DB_HOST": "147.93.98.152",
+ "DB_USER": "devuser",
+ "DB_PASSWORD": "xxx",
+ "DB_NAME": "redmine",
+ "DB_TYPE": "mysql"
+ },
+ {
+ "DB_HOST": "147.93.98.152",
+ "DB_USER": "devuser",
+ "DB_PASSWORD": "xxx",
+ "DB_NAME": "MarcoBMSProd",
+ "DB_TYPE": "mysql"
+ }
+ ],
+ "ACTIONS": {
+ "DO_UPLOAD_TO_S3": true,
+ "DO_UPLOAD_TO_GIT": false,
+ "DO_SEND_EMAIL": false
+ },
+ "AWS_S3_CONFIGURATION": {
+ "ACCESS_KEY": "xxxxxxx",
+ "SECRET_KEY": "xxxxxx",
+ "S3_BUCKET_NAME": "xxxxxxxx",
+ "S3_REGION": "us-east-1"
+ },
+ "EMAIL_CONFIGURATION": {
+ "EMAIL_HOST": "smtp.gmail.com",
+ "EMAIL_PORT": 587,
+ "EMAIL_USER": "marcoioitsoft@gmail.com",
+ "EMAIL_PASS": "xxxxx",
+ "EMAIL_SUBJECT": "Database backup process",
+ "EMAIL_RECEIVERS": "vikas@marcoaiot.com,hr@marcoaiot.com"
+ },
+ "WINDOWS_PATHS": {
+ "BACKUP_DIR": "E:/Office/Marco/gitea/database/test",
+ "MYSQLDUMP_PATH": "C:/Program Files/MySQL/MySQL Server 8.0/bin/mysqldump.exe",
+ "LOG_DIR": "E:/Office/Marco/gitea/database/test/",
+ "LOG_FILE": "E:/Office/Marco/gitea/database/test/backup_log.txt"
+ },
+ "LINUX_PATHS": {
+ "MYSQLDUMP_PATH": "/usr/bin/mysqldump",
+ "BACKUP_DIR": "/var/www/apps/db-backup-script",
+ "LOG_FILE": "/var/www/apps/db-backup-script/logs/mysql_backup.log",
+ "LOG_DIR": "/var/www/apps/db-backup-script/logs"
+ }
+}
\ No newline at end of file
diff --git a/database-backup/database_backup-script.py b/database-backup/database_backup-script.py
index 2ae8ad5..d0fa2f8 100644
--- a/database-backup/database_backup-script.py
+++ b/database-backup/database_backup-script.py
@@ -5,46 +5,58 @@ import logging
import sys
import boto3
from botocore.exceptions import ClientError
-from dotenv import load_dotenv # Import load_dotenv
from email_utils import send_email
from gzip_util import gzip_file
+import json # For reading JSON config
-# Load environment variables from .env file
-load_dotenv()
-
-# --- Configuration ---
-# Variables are now loaded from the .env file using os.getenv()
-DB_HOST = os.getenv('DB_HOST')
-DB_USER = os.getenv('DB_USER')
-DB_PASSWORD = os.getenv('DB_PASSWORD')
-
-ACCESS_KEY = os.getenv('ACCESS_KEY')
-SECRET_KEY = os.getenv('SECRET_KEY')
-
-# --- AWS S3 Configuration ---
-S3_BUCKET_NAME = os.getenv('S3_BUCKET_NAME')
-S3_REGION = os.getenv('S3_REGION')
# Initialize a message list
email_body_parts = []
+BACKUP_DIR = "/var/www/apps/db-backup-script-test/backups" # Example default
+MYSQLDUMP_PATH = "/usr/bin/mysqldump" # Example default
+LOG_DIR = "/var/www/apps/db-backup-script-test/logs"
+LOG_FILE = "/var/www/apps/db-backup-script-test/logs/mysql_backup.log"
-# --- Platform-Specific Paths ---
-# Determine OS and set paths accordingly
-if sys.platform.startswith('win'):
- # Paths for Windows
- # You can also load these from .env if you prefer fine-grained control
- BACKUP_DIR = os.getenv('BACKUP_DIR', "C:/gita/database/backup") # Default if not in .env
- MYSQLDUMP_PATH = os.getenv('MYSQLDUMP_PATH', r'C:\Program Files\MySQL\MySQL Server 8.0\bin\mysqldump.exe')
- LOG_DIR = os.getenv('BACKUP_DIR', "C:/gita/database/backup") # Default if not in .env
- LOG_FILE = os.getenv('LOG_FILE', r'C:\gita\backup_log.txt')
- GIT_EXECUTABLE = "git" # Assuming git is in PATH on Windows
-else:
- # Paths for Ubuntu/Linux
- BACKUP_DIR = os.getenv('BACKUP_DIR', "/var/lib/mysql-backups") # Default if not in .env
- MYSQLDUMP_PATH = os.getenv('MYSQLDUMP_PATH', "/usr/bin/mysqldump")
- LOG_FILE = os.getenv('LOG_FILE', "/var/logs/mysql-backup/mysql_backup.log")
- LOG_DIR = os.getenv('LOG_DIR', "/var/logs/mysql-backup")
- GIT_EXECUTABLE = "git" # Assuming git is in PATH on Linux
+
+# Assuming your JSON file is named 'config.json' and is in the same directory
+CONFIG_FILE_PATH = 'config.json'
+
+def load_config(file_path):
+ with open(file_path, 'r') as f:
+ return json.load(f)
+
+def set_platform_specific_paths(config):
+ # Determine OS and set paths accordingly
+ if sys.platform.startswith('win'):
+ # Paths for Windows
+ # You can also load these from .env if you prefer fine-grained control
+ BACKUP_DIR = config['WINDOWS_PATHS']['BACKUP_DIR'] # Default if not in .env
+ MYSQLDUMP_PATH = config['WINDOWS_PATHS']['MYSQLDUMP_PATH']
+ LOG_DIR = config['WINDOWS_PATHS']['LOG_DIR']
+ LOG_FILE = config['WINDOWS_PATHS']['LOG_FILE']
+ else:
+ if 'LINUX_PATHS' in config:
+ # Paths for Ubuntu/Linux
+ BACKUP_DIR = config['LINUX_PATHS']['BACKUP_DIR']
+ MYSQLDUMP_PATH = config['LINUX_PATHS']['MYSQLDUMP_PATH']
+ LOG_DIR = config['LINUX_PATHS']['LOG_DIR']
+ LOG_FILE = config['LINUX_PATHS']['LOG_FILE']
+ else:
+ print("Error: Linux paths not configured in JSON.")
+ # Handle this error appropriately, e.g., exit or use default Linux paths.
+ BACKUP_DIR = "/var/www/apps/db-backup-script-test/backups" # Example default
+ MYSQLDUMP_PATH = "/usr/bin/mysqldump" # Example default
+ LOG_DIR = "/var/www/apps/db-backup-script-test/logs"
+ LOG_FILE = "/var/www/apps/db-backup-script-test/logs/mysql_backup.log"
+
+ try:
+ os.makedirs(BACKUP_DIR, exist_ok=True)
+ logging.info(f"Ensured backup directory exists: {BACKUP_DIR}")
+ os.makedirs(LOG_DIR, exist_ok=True)
+ logging.info(f"Ensured log directory exists: {LOG_DIR}")
+ except Exception as ex:
+ logging.critical(f"---Unable to create required folders. Database Backup Process Failed: {ex} ---")
+ exit(1)
# --- Logging Setup ---
# Configure logging to write messages to a file and to the console
@@ -65,15 +77,17 @@ def build_path(database_name):
backup_path = os.path.join(BACKUP_DIR, backup_filename)
return backup_path
-def build_mysqldump_command(database_name):
+def build_mysqldump_command(db_conf):
command = [
MYSQLDUMP_PATH,
- f"-h{DB_HOST}",
- f"-u{DB_USER}",
- f"-p{DB_PASSWORD}",
- database_name
+ f"-h{db_conf["DB_HOST"]}",
+ f"-u{db_conf["DB_USER"]}",
+ f"-p{db_conf["DB_PASSWORD"]}",
+ db_conf["DB_NAME"]
]
return command
+
+
def clean_backup_folder():
folder_path = BACKUP_DIR
for filename in os.listdir(folder_path):
@@ -81,25 +95,25 @@ def clean_backup_folder():
if os.path.isfile(file_path):
os.remove(file_path)
-def start_backup(database):
- backup_file_path = build_path(database)
- command = build_mysqldump_command(database)
+def start_backup(db_conf):
+ backup_file_path = build_path(db_conf["DB_NAME"])
+ command = build_mysqldump_command(db_conf)
try:
- logging.info(f"Starting backup for database: {database} to {backup_file_path}")
+ logging.info(f"Starting backup for database: {db_conf["DB_NAME"]} to {backup_file_path}")
with open(backup_file_path, "w", encoding="utf-8") as out_file:
subprocess.run(command, stdout=out_file, check=True, text=True)
- logging.info(f"Successfully backed up {database}.")
+ logging.info(f"Successfully backed up {db_conf["DB_NAME"]}.")
# Compress and delete original
gzipped_path = gzip_file(backup_file_path, delete_original=True)
- email_body_parts.append(f"Successfully backed up {database}.")
+ email_body_parts.append(f"Successfully backed up {db_conf["DB_NAME"]}.")
#return backup_file_path
return gzipped_path
except subprocess.CalledProcessError as e:
- logging.error(f"MySQL dump failed for {database}: {e}")
+ logging.error(f"MySQL dump failed for {db_conf["DB_NAME"]}: {e}")
logging.error(f"Command: {' '.join(e.cmd)}")
- email_body_parts.append(f"MySQL dump failed for {database}: {e}")
+ email_body_parts.append(f"MySQL dump failed for {db_conf["DB_NAME"]}: {e}")
if e.stderr:
logging.error(f"Stderr: {e.stderr}")
return None
@@ -111,11 +125,12 @@ def start_backup(database):
logging.error("On Ubuntu/Linux, you might need to install 'mysql-client' package: sudo apt install mysql-client")
return None
except Exception as e:
- logging.error(f"An unexpected error occurred during backup of {database}: {e}")
+ logging.error(f"An unexpected error occurred during backup of {db_conf["DB_NAME"]}: {e}")
return None
def upload_to_git():
original_cwd = os.getcwd()
+ GIT_EXECUTABLE = "git"
try:
logging.info(f"Changing directory to {BACKUP_DIR} for Git operations.")
os.chdir(BACKUP_DIR)
@@ -161,24 +176,24 @@ def remove_before_first_dash(input_string):
else:
return input_string
-def upload_to_s3(file_paths):
+def upload_to_s3(aws_s3_configuration, file_paths):
if not file_paths:
logging.info("No backup files to upload to S3.")
return
# Basic validation for S3 configuration
- if not all([S3_BUCKET_NAME, S3_REGION, ACCESS_KEY, SECRET_KEY]):
+ if not all([aws_s3_configuration["S3_BUCKET_NAME"], aws_s3_configuration["S3_REGION"], aws_s3_configuration["ACCESS_KEY"], aws_s3_configuration["SECRET_KEY"]]):
logging.error("S3_BUCKET_NAME, S3_REGION, ACCESS_KEY or SECRET_KEY is not set. Cannot upload to S3. Please check your .env file.")
email_body_parts.append(f"S3_BUCKET_NAME, S3_REGION, ACCESS_KEY or SECRET_KEY is not set. Cannot upload to S3. Please check your .env file.")
return
try:
- logging.info(f"Attempting to connect to AWS S3 bucket: {S3_BUCKET_NAME} in region: {S3_REGION}")
+ logging.info(f"Attempting to connect to AWS S3 bucket: {aws_s3_configuration["S3_BUCKET_NAME"]} in region: {aws_s3_configuration["S3_REGION"]}")
s3_client = boto3.client('s3',
- region_name=S3_REGION,
- aws_access_key_id=ACCESS_KEY,
- aws_secret_access_key=SECRET_KEY)
+ region_name=aws_s3_configuration["S3_REGION"],
+ aws_access_key_id=aws_s3_configuration["ACCESS_KEY"],
+ aws_secret_access_key=aws_s3_configuration["SECRET_KEY"])
for file_path in file_paths:
if not os.path.exists(file_path):
@@ -188,8 +203,8 @@ def upload_to_s3(file_paths):
s3_object_key = os.path.basename(file_path)
s3_object_key = remove_before_first_dash(s3_object_key)
try:
- logging.info(f"Uploading {s3_object_key} to s3://{S3_BUCKET_NAME}/{s3_object_key}")
- s3_client.upload_file(file_path, S3_BUCKET_NAME, s3_object_key)
+ logging.info(f"Uploading {s3_object_key} to s3://{aws_s3_configuration["S3_BUCKET_NAME"]}/{s3_object_key}")
+ s3_client.upload_file(file_path, aws_s3_configuration["S3_BUCKET_NAME"], s3_object_key)
logging.info(f"Successfully uploaded {s3_object_key} to S3.")
email_body_parts.append(f"Successfully uploaded {s3_object_key} to S3.")
@@ -201,8 +216,8 @@ def upload_to_s3(file_paths):
logging.error("S3 upload access denied. Check your AWS credentials and bucket policy.")
email_body_parts.append("S3 upload access denied. Check your AWS credentials and bucket policy.")
elif ce.response['Error']['Code'] == 'NoSuchBucket':
- logging.error(f"S3 bucket '{S3_BUCKET_NAME}' does not exist or you don't have access.")
- email_body_parts.append(f"S3 bucket '{S3_BUCKET_NAME}' does not exist or you don't have access.")
+ logging.error(f"S3 bucket '{aws_s3_configuration["S3_BUCKET_NAME"]}' does not exist or you don't have access.")
+ email_body_parts.append(f"S3 bucket '{aws_s3_configuration["S3_BUCKET_NAME"]}' does not exist or you don't have access.")
else:
logging.error(f"S3 ClientError details: {ce.response['Error']['Code']} - {ce.response['Error']['Message']}")
@@ -222,41 +237,38 @@ if __name__ == "__main__":
logging.info("--- Database Backup Process Started ---")
backup_files_created = []
- # Basic validation that essential DB connection variables are loaded
- if not all([DB_HOST, DB_USER, DB_PASSWORD]):
- logging.critical("Missing essential database connection variables. Please check your .env file.")
- exit(1)
+ config = load_config(CONFIG_FILE_PATH)
+ set_platform_specific_paths(config)
+
+ clean_backup_folder()
+
+ # You would then loop through db_configs and perform backups as in the previous example
+ db_configs = config['DATABASE_CONFIGS']
+ email_body_parts.append(f"Starting backup for database")
+ email_body_parts.append(f"-------------------------------------------------------------")
+
+ for db_conf in db_configs:
+ print( db_conf)
+ # Basic validation that essential DB connection variables are loaded
+ if not all([db_conf["DB_HOST"], db_conf["DB_USER"], db_conf["DB_PASSWORD"]]):
+ logging.critical("Missing essential database connection variables. Please check your .env file.")
+ continue
+
+ email_body_parts.append(f"Starting backup for database: {db_conf["DB_NAME"]}")
+ file_path = start_backup(db_conf)
+ if file_path:
+ backup_files_created.append(file_path)
+
+ email_body_parts.append(f"")
+
try:
- os.makedirs(BACKUP_DIR, exist_ok=True)
- logging.info(f"Ensured backup directory exists: {BACKUP_DIR}")
- os.makedirs(LOG_DIR, exist_ok=True)
- logging.info(f"Ensured log directory exists: {LOG_DIR}")
-
- # Get the database list array from the environment variable
- DB_LIST_TO_BACKUP = os.getenv('DB_LIST_TO_BACKUP')
-
- # Check if the variable exists and is not empty before splitting
- if DB_LIST_TO_BACKUP:
- # Split the string by the comma delimiter
- databases_array_to_backup = [item.strip() for item in DB_LIST_TO_BACKUP.split(',')]
- logging.info(f"Backup databases for: {databases_array_to_backup}")
- else:
- logging.error(f"database list array (DB_LIST_TO_BACKUP) not found or is empty.")
-
- clean_backup_folder()
- email_body_parts.append(f"Starting backup for database: {databases_array_to_backup}")
- email_body_parts.append(f"-------------------------------------------------------------")
- for db_name in databases_array_to_backup:
- file_path = start_backup(db_name)
- if file_path:
- backup_files_created.append(file_path)
- email_body_parts.append(f"")
-
+
email_body_parts.append(f"Starting Git upload process...")
email_body_parts.append(f"-------------------------------------------------------------")
- if os.getenv('UPLOAD_TO_GIT', 'false').lower() == 'true':
+ do_upload_to_git = config['ACTIONS']['DO_UPLOAD_TO_GIT']
+ if do_upload_to_git:
logging.info("Starting Git upload process...")
upload_to_git()
else:
@@ -267,9 +279,11 @@ if __name__ == "__main__":
email_body_parts.append(f"-------------------------------------------------------------")
- if os.getenv('UPLOAD_TO_S3', 'false').lower() == 'true':
+ do_upload_to_s3 = config['ACTIONS']['DO_UPLOAD_TO_S3']
+ if do_upload_to_s3:
logging.info("Starting S3 upload process...")
- upload_to_s3(backup_files_created)
+ aws_s3_configuration = config['AWS_S3_CONFIGURATION']
+ upload_to_s3(aws_s3_configuration, backup_files_created)
else:
logging.info("Disabled S3 upload process...")
email_body_parts.append(f"Disabled S3 upload process...")
@@ -277,7 +291,8 @@ if __name__ == "__main__":
# Send HTML email to multiple recipients
# Final stage: send the mail
- if os.getenv('SEND_EMAIL', 'false').lower() == 'true':
+ do_send_email = config['ACTIONS']['DO_SEND_EMAIL']
+ if do_send_email :
email_body_parts.append(f"
Backup completed for folloing files:")
email_body_parts.append(f"-------------------------------------------------------------")
@@ -286,12 +301,18 @@ if __name__ == "__main__":
email_body_parts.append(f"
Starting sending mail")
email_body ="
".join(email_body_parts) # for plain text
- EMAIL_RECEIVERS = os.getenv('EMAIL_RECEIVERS')
- if EMAIL_RECEIVERS:
+ email_config = config['EMAIL_CONFIGURATION']
+ if email_config["EMAIL_RECEIVERS"]:
# Split the string by the comma delimiter
- email_receivers_array = [item.strip() for item in EMAIL_RECEIVERS.split(',')]
+ email_receivers_array = [item.strip() for item in email_config["EMAIL_RECEIVERS"].split(',')]
+ if email_config["EMAIL_SUBJECT"] and email_config["EMAIL_SUBJECT"].strip(): # Check if not None and not just whitespace
+ email_subject = email_config["EMAIL_SUBJECT"].strip()
+ else:
+ email_subject = "Database backup process"
+
send_email(
- subject=os.getenv("EMAIL_SUBJECT", "Database backup process"),
+ email_config["EMAIL_HOST"], email_config["EMAIL_PORT"], email_config["EMAIL_USER"], email_config["EMAIL_PASS"],
+ subject=email_subject,
body=email_body,
to_emails=email_receivers_array,
html=True
diff --git a/database-backup/email_utils.py b/database-backup/email_utils.py
index f2da80d..c3dd4c2 100644
--- a/database-backup/email_utils.py
+++ b/database-backup/email_utils.py
@@ -2,16 +2,9 @@ import os
import smtplib
from email.mime.text import MIMEText
from email.mime.multipart import MIMEMultipart
-from dotenv import load_dotenv
-load_dotenv()
-EMAIL_HOST = os.getenv("EMAIL_HOST", "smtp.gmail.com")
-EMAIL_PORT = int(os.getenv("EMAIL_PORT", 587))
-EMAIL_USER = os.getenv("EMAIL_USER")
-EMAIL_PASS = os.getenv("EMAIL_PASS")
-
-def send_email(subject, body, to_emails, html=False):
+def send_email(EMAIL_HOST,EMAIL_PORT, EMAIL_USER,EMAIL_PASS, subject, body, to_emails, html=False):
if isinstance(to_emails, str):
to_emails = [to_emails]