Compare commits
1 Commits
main
...
image_thum
Author | SHA1 | Date | |
---|---|---|---|
a654136ae9 |
@ -1,63 +1,57 @@
|
|||||||
{
|
## Database Configuration
|
||||||
"DATABASE_CONFIGS": [
|
DB_HOST=147.93.98.152
|
||||||
|
DB_USER=devuser
|
||||||
|
DB_PASSWORD=xxxxxxx
|
||||||
|
|
||||||
|
DB_LIST_TO_BACKUP=MarcoBMSProd,MarcoBMSStage,gitea,mediawiki,redmine
|
||||||
|
|
||||||
|
DATABASE_CONFIGS = [
|
||||||
{
|
{
|
||||||
"DB_HOST": "147.93.98.152",
|
'DB_HOST': '147.93.98.152',
|
||||||
"DB_USER": "devuser",
|
'DB_USER': 'devuser',
|
||||||
"DB_PASSWORD": "xxx",
|
'DB_PASSWORD': 'AppUser@123$',
|
||||||
"DB_NAME": "gitea",
|
'DB_NAME': 'gitea,mediawiki,redmine',
|
||||||
"DB_TYPE": "mysql"
|
'DB_TYPE': 'mysql' # Add database type if you have mixed databases (mysql, postgres, etc.)
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"DB_HOST": "147.93.98.152",
|
'DB_HOST': '147.93.98.152',
|
||||||
"DB_USER": "devuser",
|
'DB_USER': 'devuser',
|
||||||
"DB_PASSWORD": "xxx",
|
'DB_PASSWORD': 'AppUser@123$',
|
||||||
"DB_NAME": "mediawiki",
|
'DB_NAME': 'MarcoBMSProd',
|
||||||
"DB_TYPE": "mysql"
|
'DB_TYPE': 'mysql'
|
||||||
},
|
},
|
||||||
{
|
]
|
||||||
"DB_HOST": "147.93.98.152",
|
|
||||||
"DB_USER": "devuser",
|
|
||||||
"DB_PASSWORD": "xxx",
|
|
||||||
"DB_NAME": "redmine",
|
## Actionalble Blocks
|
||||||
"DB_TYPE": "mysql"
|
UPLOAD_TO_S3=true
|
||||||
},
|
UPLOAD_TO_GIT=false
|
||||||
{
|
SEND_EMAIL=false
|
||||||
"DB_HOST": "147.93.98.152",
|
|
||||||
"DB_USER": "devuser",
|
|
||||||
"DB_PASSWORD": "xxx",
|
## AWS S3 Configuration
|
||||||
"DB_NAME": "MarcoBMSProd",
|
ACCESS_KEY=xxxxxxx
|
||||||
"DB_TYPE": "mysql"
|
SECRET_KEY=xxxxxx
|
||||||
}
|
S3_BUCKET_NAME=xxxxxxxx
|
||||||
],
|
S3_REGION=us-east-1
|
||||||
"ACTIONS": {
|
|
||||||
"UPLOAD_TO_S3": true,
|
## send mail once process is complete
|
||||||
"UPLOAD_TO_GIT": false,
|
EMAIL_HOST=smtp.gmail.com
|
||||||
"SEND_EMAIL": false
|
EMAIL_PORT=587
|
||||||
},
|
EMAIL_USER=marcoioitsoft@gmail.com
|
||||||
"AWS_S3_CONFIGURATION": {
|
EMAIL_PASS= ""
|
||||||
"ACCESS_KEY": "xxxxxxx",
|
EMAIL_SUBJECT="Database backup process"
|
||||||
"SECRET_KEY": "xxxxxx",
|
EMAIL_RECEIVERS=vikas@marcoaiot.com,hr@marcoaiot.com
|
||||||
"S3_BUCKET_NAME": "xxxxxxxx",
|
|
||||||
"S3_REGION": "us-east-1"
|
## Windows Specific Paths (if applicable, uncomment and adjust)
|
||||||
},
|
BACKUP_DIR=E:\Office\Marco\gitea\database\test
|
||||||
"EMAIL_CONFIGURATION": {
|
MYSQLDUMP_PATH=C:/Program Files/MySQL/MySQL Server 8.0/bin/mysqldump.exe
|
||||||
"EMAIL_HOST": "smtp.gmail.com",
|
LOG_DIR=E:\Office\Marco\gitea\database\test\
|
||||||
"EMAIL_PORT": 587,
|
LOG_FILE=E:\Office\Marco\gitea\database\test\backup_log.txt
|
||||||
"EMAIL_USER": "marcoioitsoft@gmail.com",
|
|
||||||
"EMAIL_PASS": "",
|
## Linux Specific Paths (if applicable, uncomment and adjust)
|
||||||
"EMAIL_SUBJECT": "Database backup process",
|
# BACKUP_DIR=/var/lib/mysql-backups
|
||||||
"EMAIL_RECEIVERS": "vikas@marcoaiot.com,hr@marcoaiot.com"
|
# MYSQLDUMP_PATH=/usr/bin/mysqldump
|
||||||
},
|
# LOG_FILE=/var/www/apps/db-backup-script/logs/mysql_backup.log
|
||||||
"WINDOWS_PATHS": {
|
# LOG_DIR=/var/www/apps/db-backup-script/logs
|
||||||
"BACKUP_DIR": "E:/Office/Marco/gitea/database/test",
|
|
||||||
"MYSQLDUMP_PATH": "C:/Program Files/MySQL/MySQL Server 8.0/bin/mysqldump.exe",
|
|
||||||
"LOG_DIR": "E:/Office/Marco/gitea/database/test/",
|
|
||||||
"LOG_FILE": "E:/Office/Marco/gitea/database/test/backup_log.txt"
|
|
||||||
},
|
|
||||||
"LINUX_PATHS": {
|
|
||||||
"BACKUP_DIR": "/var/lib/mysql-backups",
|
|
||||||
"MYSQLDUMP_PATH": "/usr/bin/mysqldump",
|
|
||||||
"LOG_FILE": "/var/www/apps/db-backup-script/logs/mysql_backup.log",
|
|
||||||
"LOG_DIR": "/var/www/apps/db-backup-script/logs"
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,63 +0,0 @@
|
|||||||
{
|
|
||||||
"DATABASE_CONFIGS": [
|
|
||||||
{
|
|
||||||
"DB_HOST": "147.93.98.152",
|
|
||||||
"DB_USER": "devuser",
|
|
||||||
"DB_PASSWORD": "xxx",
|
|
||||||
"DB_NAME": "gitea",
|
|
||||||
"DB_TYPE": "mysql"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"DB_HOST": "147.93.98.152",
|
|
||||||
"DB_USER": "devuser",
|
|
||||||
"DB_PASSWORD": "xxx",
|
|
||||||
"DB_NAME": "mediawiki",
|
|
||||||
"DB_TYPE": "mysql"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"DB_HOST": "147.93.98.152",
|
|
||||||
"DB_USER": "devuser",
|
|
||||||
"DB_PASSWORD": "xxx",
|
|
||||||
"DB_NAME": "redmine",
|
|
||||||
"DB_TYPE": "mysql"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"DB_HOST": "147.93.98.152",
|
|
||||||
"DB_USER": "devuser",
|
|
||||||
"DB_PASSWORD": "xxx",
|
|
||||||
"DB_NAME": "MarcoBMSProd",
|
|
||||||
"DB_TYPE": "mysql"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"ACTIONS": {
|
|
||||||
"DO_UPLOAD_TO_S3": true,
|
|
||||||
"DO_UPLOAD_TO_GIT": false,
|
|
||||||
"DO_SEND_EMAIL": false
|
|
||||||
},
|
|
||||||
"AWS_S3_CONFIGURATION": {
|
|
||||||
"ACCESS_KEY": "xxxxxxx",
|
|
||||||
"SECRET_KEY": "xxxxxx",
|
|
||||||
"S3_BUCKET_NAME": "xxxxxxxx",
|
|
||||||
"S3_REGION": "us-east-1"
|
|
||||||
},
|
|
||||||
"EMAIL_CONFIGURATION": {
|
|
||||||
"EMAIL_HOST": "mail.marcoaiot.com",
|
|
||||||
"EMAIL_PORT": 587,
|
|
||||||
"EMAIL_USER": "admin@marcoaiot.com",
|
|
||||||
"EMAIL_PASS": "xxx",
|
|
||||||
"EMAIL_SUBJECT": "Database backup process",
|
|
||||||
"EMAIL_RECEIVERS": "vikas@marcoaiot.com,umesh@marcoaiot.com"
|
|
||||||
},
|
|
||||||
"WINDOWS_PATHS": {
|
|
||||||
"BACKUP_DIR": "E:/Office/Marco/gitea/database/test",
|
|
||||||
"MYSQLDUMP_PATH": "C:/Program Files/MySQL/MySQL Server 8.0/bin/mysqldump.exe",
|
|
||||||
"LOG_DIR": "E:/Office/Marco/gitea/database/test/",
|
|
||||||
"LOG_FILE": "E:/Office/Marco/gitea/database/test/backup_log.txt"
|
|
||||||
},
|
|
||||||
"LINUX_PATHS": {
|
|
||||||
"MYSQLDUMP_PATH": "/usr/bin/mysqldump",
|
|
||||||
"BACKUP_DIR": "/var/www/apps/db-backup-script",
|
|
||||||
"LOG_FILE": "/var/www/apps/db-backup-script/logs/mysql_backup.log",
|
|
||||||
"LOG_DIR": "/var/www/apps/db-backup-script/logs"
|
|
||||||
}
|
|
||||||
}
|
|
@ -5,58 +5,46 @@ import logging
|
|||||||
import sys
|
import sys
|
||||||
import boto3
|
import boto3
|
||||||
from botocore.exceptions import ClientError
|
from botocore.exceptions import ClientError
|
||||||
|
from dotenv import load_dotenv # Import load_dotenv
|
||||||
from email_utils import send_email
|
from email_utils import send_email
|
||||||
from gzip_util import gzip_file
|
from gzip_util import gzip_file
|
||||||
import json # For reading JSON config
|
|
||||||
|
|
||||||
|
# Load environment variables from .env file
|
||||||
|
load_dotenv()
|
||||||
|
|
||||||
|
# --- Configuration ---
|
||||||
|
# Variables are now loaded from the .env file using os.getenv()
|
||||||
|
DB_HOST = os.getenv('DB_HOST')
|
||||||
|
DB_USER = os.getenv('DB_USER')
|
||||||
|
DB_PASSWORD = os.getenv('DB_PASSWORD')
|
||||||
|
|
||||||
|
ACCESS_KEY = os.getenv('ACCESS_KEY')
|
||||||
|
SECRET_KEY = os.getenv('SECRET_KEY')
|
||||||
|
|
||||||
|
# --- AWS S3 Configuration ---
|
||||||
|
S3_BUCKET_NAME = os.getenv('S3_BUCKET_NAME')
|
||||||
|
S3_REGION = os.getenv('S3_REGION')
|
||||||
|
|
||||||
# Initialize a message list
|
# Initialize a message list
|
||||||
email_body_parts = []
|
email_body_parts = []
|
||||||
BACKUP_DIR = "/var/www/apps/db-backup-script/backups" # Example default
|
|
||||||
MYSQLDUMP_PATH = "/usr/bin/mysqldump" # Example default
|
|
||||||
LOG_DIR = "/var/www/apps/db-backup-script/logs"
|
|
||||||
LOG_FILE = "/var/www/apps/db-backup-script/logs/mysql_backup.log"
|
|
||||||
|
|
||||||
|
# --- Platform-Specific Paths ---
|
||||||
# Assuming your JSON file is named 'config.json' and is in the same directory
|
# Determine OS and set paths accordingly
|
||||||
CONFIG_FILE_PATH = 'config.json'
|
if sys.platform.startswith('win'):
|
||||||
|
# Paths for Windows
|
||||||
def load_config(file_path):
|
# You can also load these from .env if you prefer fine-grained control
|
||||||
with open(file_path, 'r') as f:
|
BACKUP_DIR = os.getenv('BACKUP_DIR', "C:/gita/database/backup") # Default if not in .env
|
||||||
return json.load(f)
|
MYSQLDUMP_PATH = os.getenv('MYSQLDUMP_PATH', r'C:\Program Files\MySQL\MySQL Server 8.0\bin\mysqldump.exe')
|
||||||
|
LOG_DIR = os.getenv('BACKUP_DIR', "C:/gita/database/backup") # Default if not in .env
|
||||||
def set_platform_specific_paths(config):
|
LOG_FILE = os.getenv('LOG_FILE', r'C:\gita\backup_log.txt')
|
||||||
# Determine OS and set paths accordingly
|
GIT_EXECUTABLE = "git" # Assuming git is in PATH on Windows
|
||||||
if sys.platform.startswith('win'):
|
else:
|
||||||
# Paths for Windows
|
# Paths for Ubuntu/Linux
|
||||||
# You can also load these from .env if you prefer fine-grained control
|
BACKUP_DIR = os.getenv('BACKUP_DIR', "/var/lib/mysql-backups") # Default if not in .env
|
||||||
BACKUP_DIR = config['WINDOWS_PATHS']['BACKUP_DIR'] # Default if not in .env
|
MYSQLDUMP_PATH = os.getenv('MYSQLDUMP_PATH', "/usr/bin/mysqldump")
|
||||||
MYSQLDUMP_PATH = config['WINDOWS_PATHS']['MYSQLDUMP_PATH']
|
LOG_FILE = os.getenv('LOG_FILE', "/var/logs/mysql-backup/mysql_backup.log")
|
||||||
LOG_DIR = config['WINDOWS_PATHS']['LOG_DIR']
|
LOG_DIR = os.getenv('LOG_DIR', "/var/logs/mysql-backup")
|
||||||
LOG_FILE = config['WINDOWS_PATHS']['LOG_FILE']
|
GIT_EXECUTABLE = "git" # Assuming git is in PATH on Linux
|
||||||
else:
|
|
||||||
if 'LINUX_PATHS' in config:
|
|
||||||
# Paths for Ubuntu/Linux
|
|
||||||
BACKUP_DIR = config['LINUX_PATHS']['BACKUP_DIR']
|
|
||||||
MYSQLDUMP_PATH = config['LINUX_PATHS']['MYSQLDUMP_PATH']
|
|
||||||
LOG_DIR = config['LINUX_PATHS']['LOG_DIR']
|
|
||||||
LOG_FILE = config['LINUX_PATHS']['LOG_FILE']
|
|
||||||
else:
|
|
||||||
print("Error: Linux paths not configured in JSON.")
|
|
||||||
# Handle this error appropriately, e.g., exit or use default Linux paths.
|
|
||||||
BACKUP_DIR = "/var/www/apps/db-backup-script/backups" # Example default
|
|
||||||
MYSQLDUMP_PATH = "/usr/bin/mysqldump" # Example default
|
|
||||||
LOG_DIR = "/var/www/apps/db-backup-script/logs"
|
|
||||||
LOG_FILE = "/var/www/apps/db-backup-script/logs/mysql_backup.log"
|
|
||||||
|
|
||||||
try:
|
|
||||||
os.makedirs(BACKUP_DIR, exist_ok=True)
|
|
||||||
logging.info(f"Ensured backup directory exists: {BACKUP_DIR}")
|
|
||||||
os.makedirs(LOG_DIR, exist_ok=True)
|
|
||||||
logging.info(f"Ensured log directory exists: {LOG_DIR}")
|
|
||||||
except Exception as ex:
|
|
||||||
logging.critical(f"---Unable to create required folders. Database Backup Process Failed: {ex} ---")
|
|
||||||
exit(1)
|
|
||||||
|
|
||||||
# --- Logging Setup ---
|
# --- Logging Setup ---
|
||||||
# Configure logging to write messages to a file and to the console
|
# Configure logging to write messages to a file and to the console
|
||||||
@ -77,17 +65,15 @@ def build_path(database_name):
|
|||||||
backup_path = os.path.join(BACKUP_DIR, backup_filename)
|
backup_path = os.path.join(BACKUP_DIR, backup_filename)
|
||||||
return backup_path
|
return backup_path
|
||||||
|
|
||||||
def build_mysqldump_command(db_conf):
|
def build_mysqldump_command(database_name):
|
||||||
command = [
|
command = [
|
||||||
MYSQLDUMP_PATH,
|
MYSQLDUMP_PATH,
|
||||||
f"-h{db_conf["DB_HOST"]}",
|
f"-h{DB_HOST}",
|
||||||
f"-u{db_conf["DB_USER"]}",
|
f"-u{DB_USER}",
|
||||||
f"-p{db_conf["DB_PASSWORD"]}",
|
f"-p{DB_PASSWORD}",
|
||||||
db_conf["DB_NAME"]
|
database_name
|
||||||
]
|
]
|
||||||
return command
|
return command
|
||||||
|
|
||||||
|
|
||||||
def clean_backup_folder():
|
def clean_backup_folder():
|
||||||
folder_path = BACKUP_DIR
|
folder_path = BACKUP_DIR
|
||||||
for filename in os.listdir(folder_path):
|
for filename in os.listdir(folder_path):
|
||||||
@ -95,25 +81,25 @@ def clean_backup_folder():
|
|||||||
if os.path.isfile(file_path):
|
if os.path.isfile(file_path):
|
||||||
os.remove(file_path)
|
os.remove(file_path)
|
||||||
|
|
||||||
def start_backup(db_conf):
|
def start_backup(database):
|
||||||
backup_file_path = build_path(db_conf["DB_NAME"])
|
backup_file_path = build_path(database)
|
||||||
command = build_mysqldump_command(db_conf)
|
command = build_mysqldump_command(database)
|
||||||
try:
|
try:
|
||||||
logging.info(f"Starting backup for database: {db_conf["DB_NAME"]} to {backup_file_path}")
|
logging.info(f"Starting backup for database: {database} to {backup_file_path}")
|
||||||
with open(backup_file_path, "w", encoding="utf-8") as out_file:
|
with open(backup_file_path, "w", encoding="utf-8") as out_file:
|
||||||
subprocess.run(command, stdout=out_file, check=True, text=True)
|
subprocess.run(command, stdout=out_file, check=True, text=True)
|
||||||
logging.info(f"Successfully backed up {db_conf["DB_NAME"]}.")
|
logging.info(f"Successfully backed up {database}.")
|
||||||
|
|
||||||
# Compress and delete original
|
# Compress and delete original
|
||||||
gzipped_path = gzip_file(backup_file_path, delete_original=True)
|
gzipped_path = gzip_file(backup_file_path, delete_original=True)
|
||||||
|
|
||||||
email_body_parts.append(f"Successfully backed up {db_conf["DB_NAME"]}.")
|
email_body_parts.append(f"Successfully backed up {database}.")
|
||||||
#return backup_file_path
|
#return backup_file_path
|
||||||
return gzipped_path
|
return gzipped_path
|
||||||
except subprocess.CalledProcessError as e:
|
except subprocess.CalledProcessError as e:
|
||||||
logging.error(f"MySQL dump failed for {db_conf["DB_NAME"]}: {e}")
|
logging.error(f"MySQL dump failed for {database}: {e}")
|
||||||
logging.error(f"Command: {' '.join(e.cmd)}")
|
logging.error(f"Command: {' '.join(e.cmd)}")
|
||||||
email_body_parts.append(f"MySQL dump failed for {db_conf["DB_NAME"]}: {e}")
|
email_body_parts.append(f"MySQL dump failed for {database}: {e}")
|
||||||
if e.stderr:
|
if e.stderr:
|
||||||
logging.error(f"Stderr: {e.stderr}")
|
logging.error(f"Stderr: {e.stderr}")
|
||||||
return None
|
return None
|
||||||
@ -125,12 +111,11 @@ def start_backup(db_conf):
|
|||||||
logging.error("On Ubuntu/Linux, you might need to install 'mysql-client' package: sudo apt install mysql-client")
|
logging.error("On Ubuntu/Linux, you might need to install 'mysql-client' package: sudo apt install mysql-client")
|
||||||
return None
|
return None
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.error(f"An unexpected error occurred during backup of {db_conf["DB_NAME"]}: {e}")
|
logging.error(f"An unexpected error occurred during backup of {database}: {e}")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def upload_to_git():
|
def upload_to_git():
|
||||||
original_cwd = os.getcwd()
|
original_cwd = os.getcwd()
|
||||||
GIT_EXECUTABLE = "git"
|
|
||||||
try:
|
try:
|
||||||
logging.info(f"Changing directory to {BACKUP_DIR} for Git operations.")
|
logging.info(f"Changing directory to {BACKUP_DIR} for Git operations.")
|
||||||
os.chdir(BACKUP_DIR)
|
os.chdir(BACKUP_DIR)
|
||||||
@ -176,24 +161,24 @@ def remove_before_first_dash(input_string):
|
|||||||
else:
|
else:
|
||||||
return input_string
|
return input_string
|
||||||
|
|
||||||
def upload_to_s3(aws_s3_configuration, file_paths):
|
def upload_to_s3(file_paths):
|
||||||
if not file_paths:
|
if not file_paths:
|
||||||
logging.info("No backup files to upload to S3.")
|
logging.info("No backup files to upload to S3.")
|
||||||
return
|
return
|
||||||
|
|
||||||
# Basic validation for S3 configuration
|
# Basic validation for S3 configuration
|
||||||
if not all([aws_s3_configuration["S3_BUCKET_NAME"], aws_s3_configuration["S3_REGION"], aws_s3_configuration["ACCESS_KEY"], aws_s3_configuration["SECRET_KEY"]]):
|
if not all([S3_BUCKET_NAME, S3_REGION, ACCESS_KEY, SECRET_KEY]):
|
||||||
logging.error("S3_BUCKET_NAME, S3_REGION, ACCESS_KEY or SECRET_KEY is not set. Cannot upload to S3. Please check your .env file.")
|
logging.error("S3_BUCKET_NAME, S3_REGION, ACCESS_KEY or SECRET_KEY is not set. Cannot upload to S3. Please check your .env file.")
|
||||||
email_body_parts.append(f"S3_BUCKET_NAME, S3_REGION, ACCESS_KEY or SECRET_KEY is not set. Cannot upload to S3. Please check your .env file.")
|
email_body_parts.append(f"S3_BUCKET_NAME, S3_REGION, ACCESS_KEY or SECRET_KEY is not set. Cannot upload to S3. Please check your .env file.")
|
||||||
|
|
||||||
return
|
return
|
||||||
|
|
||||||
try:
|
try:
|
||||||
logging.info(f"Attempting to connect to AWS S3 bucket: {aws_s3_configuration["S3_BUCKET_NAME"]} in region: {aws_s3_configuration["S3_REGION"]}")
|
logging.info(f"Attempting to connect to AWS S3 bucket: {S3_BUCKET_NAME} in region: {S3_REGION}")
|
||||||
s3_client = boto3.client('s3',
|
s3_client = boto3.client('s3',
|
||||||
region_name=aws_s3_configuration["S3_REGION"],
|
region_name=S3_REGION,
|
||||||
aws_access_key_id=aws_s3_configuration["ACCESS_KEY"],
|
aws_access_key_id=ACCESS_KEY,
|
||||||
aws_secret_access_key=aws_s3_configuration["SECRET_KEY"])
|
aws_secret_access_key=SECRET_KEY)
|
||||||
|
|
||||||
for file_path in file_paths:
|
for file_path in file_paths:
|
||||||
if not os.path.exists(file_path):
|
if not os.path.exists(file_path):
|
||||||
@ -203,16 +188,9 @@ def upload_to_s3(aws_s3_configuration, file_paths):
|
|||||||
s3_object_key = os.path.basename(file_path)
|
s3_object_key = os.path.basename(file_path)
|
||||||
s3_object_key = remove_before_first_dash(s3_object_key)
|
s3_object_key = remove_before_first_dash(s3_object_key)
|
||||||
try:
|
try:
|
||||||
logging.info(f"Uploading {s3_object_key} to s3://{aws_s3_configuration["S3_BUCKET_NAME"]}/{s3_object_key}")
|
logging.info(f"Uploading {s3_object_key} to s3://{S3_BUCKET_NAME}/{s3_object_key}")
|
||||||
s3_client.upload_file(
|
s3_client.upload_file(file_path, S3_BUCKET_NAME, s3_object_key)
|
||||||
file_path,
|
logging.info(f"Successfully uploaded {s3_object_key} to S3.")
|
||||||
aws_s3_configuration["S3_BUCKET_NAME"],
|
|
||||||
s3_object_key,
|
|
||||||
ExtraArgs={
|
|
||||||
'StorageClass': 'GLACIER_IR', # Glacier Instant Retrieval
|
|
||||||
'ServerSideEncryption': 'AES256'
|
|
||||||
})
|
|
||||||
logging.info(f"Uploaded to {aws_s3_configuration["S3_BUCKET_NAME"]}/{s3_object_key} with Glacier Instant Retrieval")
|
|
||||||
email_body_parts.append(f"Successfully uploaded {s3_object_key} to S3.")
|
email_body_parts.append(f"Successfully uploaded {s3_object_key} to S3.")
|
||||||
|
|
||||||
except ClientError as ce:
|
except ClientError as ce:
|
||||||
@ -223,8 +201,8 @@ def upload_to_s3(aws_s3_configuration, file_paths):
|
|||||||
logging.error("S3 upload access denied. Check your AWS credentials and bucket policy.")
|
logging.error("S3 upload access denied. Check your AWS credentials and bucket policy.")
|
||||||
email_body_parts.append("S3 upload access denied. Check your AWS credentials and bucket policy.")
|
email_body_parts.append("S3 upload access denied. Check your AWS credentials and bucket policy.")
|
||||||
elif ce.response['Error']['Code'] == 'NoSuchBucket':
|
elif ce.response['Error']['Code'] == 'NoSuchBucket':
|
||||||
logging.error(f"S3 bucket '{aws_s3_configuration["S3_BUCKET_NAME"]}' does not exist or you don't have access.")
|
logging.error(f"S3 bucket '{S3_BUCKET_NAME}' does not exist or you don't have access.")
|
||||||
email_body_parts.append(f"S3 bucket '{aws_s3_configuration["S3_BUCKET_NAME"]}' does not exist or you don't have access.")
|
email_body_parts.append(f"S3 bucket '{S3_BUCKET_NAME}' does not exist or you don't have access.")
|
||||||
|
|
||||||
else:
|
else:
|
||||||
logging.error(f"S3 ClientError details: {ce.response['Error']['Code']} - {ce.response['Error']['Message']}")
|
logging.error(f"S3 ClientError details: {ce.response['Error']['Code']} - {ce.response['Error']['Message']}")
|
||||||
@ -244,38 +222,41 @@ if __name__ == "__main__":
|
|||||||
logging.info("--- Database Backup Process Started ---")
|
logging.info("--- Database Backup Process Started ---")
|
||||||
backup_files_created = []
|
backup_files_created = []
|
||||||
|
|
||||||
config = load_config(CONFIG_FILE_PATH)
|
# Basic validation that essential DB connection variables are loaded
|
||||||
set_platform_specific_paths(config)
|
if not all([DB_HOST, DB_USER, DB_PASSWORD]):
|
||||||
|
logging.critical("Missing essential database connection variables. Please check your .env file.")
|
||||||
clean_backup_folder()
|
exit(1)
|
||||||
|
|
||||||
# You would then loop through db_configs and perform backups as in the previous example
|
|
||||||
db_configs = config['DATABASE_CONFIGS']
|
|
||||||
email_body_parts.append(f"Starting backup for database")
|
|
||||||
email_body_parts.append(f"-------------------------------------------------------------")
|
|
||||||
|
|
||||||
for db_conf in db_configs:
|
|
||||||
print( db_conf)
|
|
||||||
# Basic validation that essential DB connection variables are loaded
|
|
||||||
if not all([db_conf["DB_HOST"], db_conf["DB_USER"], db_conf["DB_PASSWORD"]]):
|
|
||||||
logging.critical("Missing essential database connection variables. Please check your .env file.")
|
|
||||||
continue
|
|
||||||
|
|
||||||
email_body_parts.append(f"Starting backup for database: {db_conf["DB_NAME"]}")
|
|
||||||
file_path = start_backup(db_conf)
|
|
||||||
if file_path:
|
|
||||||
backup_files_created.append(file_path)
|
|
||||||
|
|
||||||
email_body_parts.append(f"")
|
|
||||||
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
os.makedirs(BACKUP_DIR, exist_ok=True)
|
||||||
|
logging.info(f"Ensured backup directory exists: {BACKUP_DIR}")
|
||||||
|
os.makedirs(LOG_DIR, exist_ok=True)
|
||||||
|
logging.info(f"Ensured log directory exists: {LOG_DIR}")
|
||||||
|
|
||||||
|
# Get the database list array from the environment variable
|
||||||
|
DB_LIST_TO_BACKUP = os.getenv('DB_LIST_TO_BACKUP')
|
||||||
|
|
||||||
|
# Check if the variable exists and is not empty before splitting
|
||||||
|
if DB_LIST_TO_BACKUP:
|
||||||
|
# Split the string by the comma delimiter
|
||||||
|
databases_array_to_backup = [item.strip() for item in DB_LIST_TO_BACKUP.split(',')]
|
||||||
|
logging.info(f"Backup databases for: {databases_array_to_backup}")
|
||||||
|
else:
|
||||||
|
logging.error(f"database list array (DB_LIST_TO_BACKUP) not found or is empty.")
|
||||||
|
|
||||||
|
clean_backup_folder()
|
||||||
|
email_body_parts.append(f"Starting backup for database: {databases_array_to_backup}")
|
||||||
|
email_body_parts.append(f"-------------------------------------------------------------")
|
||||||
|
for db_name in databases_array_to_backup:
|
||||||
|
file_path = start_backup(db_name)
|
||||||
|
if file_path:
|
||||||
|
backup_files_created.append(file_path)
|
||||||
|
email_body_parts.append(f"")
|
||||||
|
|
||||||
email_body_parts.append(f"Starting Git upload process...")
|
email_body_parts.append(f"Starting Git upload process...")
|
||||||
email_body_parts.append(f"-------------------------------------------------------------")
|
email_body_parts.append(f"-------------------------------------------------------------")
|
||||||
|
|
||||||
do_upload_to_git = config['ACTIONS']['DO_UPLOAD_TO_GIT']
|
if os.getenv('UPLOAD_TO_GIT', 'false').lower() == 'true':
|
||||||
if do_upload_to_git:
|
|
||||||
logging.info("Starting Git upload process...")
|
logging.info("Starting Git upload process...")
|
||||||
upload_to_git()
|
upload_to_git()
|
||||||
else:
|
else:
|
||||||
@ -286,11 +267,9 @@ if __name__ == "__main__":
|
|||||||
email_body_parts.append(f"-------------------------------------------------------------")
|
email_body_parts.append(f"-------------------------------------------------------------")
|
||||||
|
|
||||||
|
|
||||||
do_upload_to_s3 = config['ACTIONS']['DO_UPLOAD_TO_S3']
|
if os.getenv('UPLOAD_TO_S3', 'false').lower() == 'true':
|
||||||
if do_upload_to_s3:
|
|
||||||
logging.info("Starting S3 upload process...")
|
logging.info("Starting S3 upload process...")
|
||||||
aws_s3_configuration = config['AWS_S3_CONFIGURATION']
|
upload_to_s3(backup_files_created)
|
||||||
upload_to_s3(aws_s3_configuration, backup_files_created)
|
|
||||||
else:
|
else:
|
||||||
logging.info("Disabled S3 upload process...")
|
logging.info("Disabled S3 upload process...")
|
||||||
email_body_parts.append(f"Disabled S3 upload process...")
|
email_body_parts.append(f"Disabled S3 upload process...")
|
||||||
@ -298,8 +277,7 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
# Send HTML email to multiple recipients
|
# Send HTML email to multiple recipients
|
||||||
# Final stage: send the mail
|
# Final stage: send the mail
|
||||||
do_send_email = config['ACTIONS']['DO_SEND_EMAIL']
|
if os.getenv('SEND_EMAIL', 'false').lower() == 'true':
|
||||||
if do_send_email :
|
|
||||||
email_body_parts.append(f"<br><br>Backup completed for folloing files:")
|
email_body_parts.append(f"<br><br>Backup completed for folloing files:")
|
||||||
email_body_parts.append(f"-------------------------------------------------------------")
|
email_body_parts.append(f"-------------------------------------------------------------")
|
||||||
|
|
||||||
@ -308,18 +286,12 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
email_body_parts.append(f"<br><br><br>Starting sending mail")
|
email_body_parts.append(f"<br><br><br>Starting sending mail")
|
||||||
email_body ="<br>".join(email_body_parts) # for plain text
|
email_body ="<br>".join(email_body_parts) # for plain text
|
||||||
email_config = config['EMAIL_CONFIGURATION']
|
EMAIL_RECEIVERS = os.getenv('EMAIL_RECEIVERS')
|
||||||
if email_config["EMAIL_RECEIVERS"]:
|
if EMAIL_RECEIVERS:
|
||||||
# Split the string by the comma delimiter
|
# Split the string by the comma delimiter
|
||||||
email_receivers_array = [item.strip() for item in email_config["EMAIL_RECEIVERS"].split(',')]
|
email_receivers_array = [item.strip() for item in EMAIL_RECEIVERS.split(',')]
|
||||||
if email_config["EMAIL_SUBJECT"] and email_config["EMAIL_SUBJECT"].strip(): # Check if not None and not just whitespace
|
|
||||||
email_subject = email_config["EMAIL_SUBJECT"].strip()
|
|
||||||
else:
|
|
||||||
email_subject = "Database backup process"
|
|
||||||
|
|
||||||
send_email(
|
send_email(
|
||||||
email_config["EMAIL_HOST"], email_config["EMAIL_PORT"], email_config["EMAIL_USER"], email_config["EMAIL_PASS"],
|
subject=os.getenv("EMAIL_SUBJECT", "Database backup process"),
|
||||||
subject=email_subject,
|
|
||||||
body=email_body,
|
body=email_body,
|
||||||
to_emails=email_receivers_array,
|
to_emails=email_receivers_array,
|
||||||
html=True
|
html=True
|
||||||
|
@ -2,9 +2,16 @@ import os
|
|||||||
import smtplib
|
import smtplib
|
||||||
from email.mime.text import MIMEText
|
from email.mime.text import MIMEText
|
||||||
from email.mime.multipart import MIMEMultipart
|
from email.mime.multipart import MIMEMultipart
|
||||||
|
from dotenv import load_dotenv
|
||||||
|
|
||||||
|
load_dotenv()
|
||||||
|
|
||||||
def send_email(EMAIL_HOST,EMAIL_PORT, EMAIL_USER,EMAIL_PASS, subject, body, to_emails, html=False):
|
EMAIL_HOST = os.getenv("EMAIL_HOST", "smtp.gmail.com")
|
||||||
|
EMAIL_PORT = int(os.getenv("EMAIL_PORT", 587))
|
||||||
|
EMAIL_USER = os.getenv("EMAIL_USER")
|
||||||
|
EMAIL_PASS = os.getenv("EMAIL_PASS")
|
||||||
|
|
||||||
|
def send_email(subject, body, to_emails, html=False):
|
||||||
if isinstance(to_emails, str):
|
if isinstance(to_emails, str):
|
||||||
to_emails = [to_emails]
|
to_emails = [to_emails]
|
||||||
|
|
||||||
|
10
image-thumbnail/.env
Normal file
10
image-thumbnail/.env
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
## Database Configuration
|
||||||
|
DB_HOST=147.93.98.152
|
||||||
|
DB_USER=devuser
|
||||||
|
DB_PASSWORD=xxxxxxx
|
||||||
|
|
||||||
|
## AWS S3 Configuration
|
||||||
|
ACCESS_KEY=xxxxxxx
|
||||||
|
SECRET_KEY=xxxxxx
|
||||||
|
S3_BUCKET_NAME=xxxxxxxx
|
||||||
|
S3_REGION=us-east-1
|
236
image-thumbnail/image-processor.py
Normal file
236
image-thumbnail/image-processor.py
Normal file
@ -0,0 +1,236 @@
|
|||||||
|
import mysql.connector
|
||||||
|
import os
|
||||||
|
import base64
|
||||||
|
import logging # Import logging
|
||||||
|
from datetime import datetime
|
||||||
|
from dotenv import load_dotenv # Import load_dotenv
|
||||||
|
|
||||||
|
# Import the utility functions
|
||||||
|
from s3_utils import get_s3_client, download_image_from_s3, upload_object_to_s3, generate_thumbnail_s3_key
|
||||||
|
from image_processing_utils import create_thumbnail_from_bytes, create_thumbnail_from_base64
|
||||||
|
|
||||||
|
# --- Logging Configuration ---
|
||||||
|
# Create a logger object
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
logger.setLevel(logging.INFO) # Set the default logging level
|
||||||
|
|
||||||
|
# Create handlers
|
||||||
|
# Console handler
|
||||||
|
console_handler = logging.StreamHandler()
|
||||||
|
console_handler.setLevel(logging.INFO) # Console shows INFO and above
|
||||||
|
|
||||||
|
# File handler
|
||||||
|
log_file_name = f"image_processor_{datetime.now().strftime('%Y%m%d_%H%M%S')}.log"
|
||||||
|
file_handler = logging.FileHandler(log_file_name)
|
||||||
|
file_handler.setLevel(logging.DEBUG) # File captures all DEBUG messages and above
|
||||||
|
|
||||||
|
# Create formatters and add them to handlers
|
||||||
|
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
|
||||||
|
console_handler.setFormatter(formatter)
|
||||||
|
file_handler.setFormatter(formatter)
|
||||||
|
|
||||||
|
# Add handlers to the logger
|
||||||
|
logger.addHandler(console_handler)
|
||||||
|
logger.addHandler(file_handler)
|
||||||
|
|
||||||
|
logger.info(f"Logging initialized. Full logs available in '{log_file_name}'")
|
||||||
|
|
||||||
|
|
||||||
|
# --- Configuration ---
|
||||||
|
DB_CONFIG = {
|
||||||
|
'host': 'your_mysql_host',
|
||||||
|
'user': 'your_mysql_user',
|
||||||
|
'password': 'your_mysql_password',
|
||||||
|
'database': 'your_mysql_database'
|
||||||
|
}
|
||||||
|
|
||||||
|
AWS_CONFIG = {
|
||||||
|
'aws_access_key_id': 'YOUR_AWS_ACCESS_KEY_ID',
|
||||||
|
'aws_secret_access_key': 'YOUR_AWS_SECRET_ACCESS_KEY',
|
||||||
|
'region_name': 'your_aws_region' # e.g., 'us-east-1'
|
||||||
|
}
|
||||||
|
|
||||||
|
S3_BUCKET_NAME = 'your-s3-bucket-name'
|
||||||
|
THUMBNAIL_PREFIX = 'thumbnails/' # Folder where thumbnails will be stored in S3
|
||||||
|
THUMBNAIL_SIZE = (128, 128) # Width, Height for thumbnails
|
||||||
|
THUMBNAIL_OUTPUT_FORMAT = 'JPEG' # Output format for thumbnails
|
||||||
|
|
||||||
|
# --- Helper to connect to DB and S3 ---
|
||||||
|
def get_db_connection():
|
||||||
|
try:
|
||||||
|
mydb = mysql.connector.connect(**DB_CONFIG)
|
||||||
|
logger.info("Successfully connected to MySQL database.")
|
||||||
|
return mydb
|
||||||
|
except mysql.connector.Error as err:
|
||||||
|
logger.error(f"Error connecting to MySQL: {err}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
def get_s3_connection():
|
||||||
|
s3_client = get_s3_client(**AWS_CONFIG)
|
||||||
|
if s3_client:
|
||||||
|
logger.info("Successfully obtained S3 client.")
|
||||||
|
else:
|
||||||
|
logger.error("Failed to obtain S3 client.")
|
||||||
|
return s3_client
|
||||||
|
|
||||||
|
# --- Core Processing Logic ---
|
||||||
|
def process_single_image(mycursor, s3_client, image_id, original_image_source_key, base64_image_data=None):
|
||||||
|
"""
|
||||||
|
Handles the creation and upload of a thumbnail for a single image,
|
||||||
|
and updates the database.
|
||||||
|
"""
|
||||||
|
logger.info(f"Processing image ID: {image_id}")
|
||||||
|
image_data_bytes = None
|
||||||
|
source_type = "unknown"
|
||||||
|
|
||||||
|
if original_image_source_key:
|
||||||
|
source_type = "S3"
|
||||||
|
logger.debug(f"Attempting to download image from S3: '{original_image_source_key}'")
|
||||||
|
image_data_bytes = download_image_from_s3(s3_client, S3_BUCKET_NAME, original_image_source_key)
|
||||||
|
elif base64_image_data:
|
||||||
|
source_type = "Base64"
|
||||||
|
logger.debug(f"Attempting to decode base64 image data for image ID: {image_id}")
|
||||||
|
image_data_bytes = base64.b64decode(base64_image_data) # We decode here, then pass bytes to create_thumbnail_from_bytes
|
||||||
|
else:
|
||||||
|
logger.warning(f"No valid image source (S3 link or base64) for image ID {image_id}. Skipping.")
|
||||||
|
return
|
||||||
|
|
||||||
|
if image_data_bytes:
|
||||||
|
logger.debug(f"Image data ({source_type}) obtained for ID: {image_id}. Creating thumbnail...")
|
||||||
|
thumbnail_bytes = create_thumbnail_from_bytes(
|
||||||
|
image_data_bytes,
|
||||||
|
size=THUMBNAIL_SIZE,
|
||||||
|
output_format=THUMBNAIL_OUTPUT_FORMAT
|
||||||
|
)
|
||||||
|
|
||||||
|
if thumbnail_bytes:
|
||||||
|
# Determine the key for the thumbnail
|
||||||
|
if original_image_source_key:
|
||||||
|
thumbnail_s3_key = generate_thumbnail_s3_key(original_image_source_key, THUMBNAIL_PREFIX)
|
||||||
|
else:
|
||||||
|
# If only base64 data, we need a way to derive a unique key.
|
||||||
|
# A simple approach: use the ID and a timestamp or hash.
|
||||||
|
# In a real scenario, you might want to store the original filename
|
||||||
|
# or a hash of the base64 data to avoid collisions.
|
||||||
|
thumbnail_s3_key = f"{THUMBNAIL_PREFIX}base64_image_{image_id}.{THUMBNAIL_OUTPUT_FORMAT.lower()}"
|
||||||
|
logger.debug(f"Generated base64 thumbnail S3 key: '{thumbnail_s3_key}' for ID: {image_id}")
|
||||||
|
|
||||||
|
content_type = f'image/{THUMBNAIL_OUTPUT_FORMAT.lower()}' # e.g., image/jpeg
|
||||||
|
if upload_object_to_s3(s3_client, S3_BUCKET_NAME, thumbnail_s3_key, thumbnail_bytes, content_type=content_type):
|
||||||
|
# Update database with new thumbnailLink
|
||||||
|
update_sql = "UPDATE images SET thumbnailLink = %s WHERE id = %s"
|
||||||
|
try:
|
||||||
|
mycursor.execute(update_sql, (thumbnail_s3_key, image_id))
|
||||||
|
mydb.commit()
|
||||||
|
logger.info(f"Successfully updated database for image ID {image_id} with thumbnail: '{thumbnail_s3_key}'")
|
||||||
|
except mysql.connector.Error as err:
|
||||||
|
logger.error(f"Failed to update database for image ID {image_id}: {err}")
|
||||||
|
mydb.rollback() # Rollback if update fails
|
||||||
|
else:
|
||||||
|
logger.error(f"Failed to upload thumbnail for image ID {image_id} to S3.")
|
||||||
|
else:
|
||||||
|
logger.error(f"Failed to create thumbnail bytes for image ID {image_id}.")
|
||||||
|
else:
|
||||||
|
logger.error(f"No image data retrieved from {source_type} for image ID {image_id}. Skipping thumbnail creation.")
|
||||||
|
|
||||||
|
def process_images_from_s3_link():
|
||||||
|
"""
|
||||||
|
Processes images where `imageLink` is present and `thumbnailLink` is NULL.
|
||||||
|
"""
|
||||||
|
logger.info("Starting processing of images from S3 links...")
|
||||||
|
mydb = get_db_connection()
|
||||||
|
if not mydb:
|
||||||
|
logger.critical("Could not establish database connection. Aborting S3 link processing.")
|
||||||
|
return
|
||||||
|
|
||||||
|
s3 = get_s3_connection()
|
||||||
|
if not s3:
|
||||||
|
logger.critical("Could not establish S3 connection. Aborting S3 link processing.")
|
||||||
|
mydb.close()
|
||||||
|
return
|
||||||
|
|
||||||
|
mycursor = mydb.cursor()
|
||||||
|
|
||||||
|
try:
|
||||||
|
sql = "SELECT id, imageLink FROM images WHERE imageLink IS NOT NULL AND thumbnailLink IS NULL"
|
||||||
|
mycursor.execute(sql)
|
||||||
|
records = mycursor.fetchall()
|
||||||
|
|
||||||
|
if not records:
|
||||||
|
logger.info("No images found with S3 link and null thumbnailLink. Skipping this step.")
|
||||||
|
return
|
||||||
|
|
||||||
|
logger.info(f"Found {len(records)} images with S3 link and null thumbnailLink. Beginning processing...")
|
||||||
|
|
||||||
|
for image_id, image_link in records:
|
||||||
|
logger.info(f"Initiating processing for image ID: {image_id}, S3 Link: '{image_link}'")
|
||||||
|
process_single_image(mycursor, s3, image_id, image_link)
|
||||||
|
|
||||||
|
except mysql.connector.Error as err:
|
||||||
|
logger.error(f"MySQL Error during S3 link processing: {err}")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"An unexpected error occurred during S3 link processing: {e}", exc_info=True) # exc_info=True to log traceback
|
||||||
|
finally:
|
||||||
|
if 'mycursor' in locals() and mycursor:
|
||||||
|
mycursor.close()
|
||||||
|
if mydb.is_connected():
|
||||||
|
mydb.close()
|
||||||
|
logger.info("MySQL connection closed after S3 link processing.")
|
||||||
|
|
||||||
|
def process_images_from_base64_string():
|
||||||
|
"""
|
||||||
|
Processes images where `imageDataBase64` is present and `thumbnailLink` is NULL.
|
||||||
|
"""
|
||||||
|
logger.info("Starting processing of images from Base64 strings...")
|
||||||
|
mydb = get_db_connection()
|
||||||
|
if not mydb:
|
||||||
|
logger.critical("Could not establish database connection. Aborting Base64 processing.")
|
||||||
|
return
|
||||||
|
|
||||||
|
s3 = get_s3_connection()
|
||||||
|
if not s3:
|
||||||
|
logger.critical("Could not establish S3 connection. Aborting Base64 processing.")
|
||||||
|
mydb.close()
|
||||||
|
return
|
||||||
|
|
||||||
|
mycursor = mydb.cursor()
|
||||||
|
|
||||||
|
try:
|
||||||
|
sql = "SELECT id, imageLink, imageDataBase64 FROM images WHERE imageDataBase64 IS NOT NULL AND thumbnailLink IS NULL"
|
||||||
|
mycursor.execute(sql)
|
||||||
|
records = mycursor.fetchall()
|
||||||
|
|
||||||
|
if not records:
|
||||||
|
logger.info("No images found with base64 data and null thumbnailLink. Skipping this step.")
|
||||||
|
return
|
||||||
|
|
||||||
|
logger.info(f"Found {len(records)} images with base64 data and null thumbnailLink. Beginning processing...")
|
||||||
|
|
||||||
|
for image_id, image_link, base64_data in records:
|
||||||
|
logger.info(f"Initiating processing for image ID: {image_id} from Base64 data.")
|
||||||
|
if base64_data:
|
||||||
|
process_single_image(mycursor, s3, image_id, None, base64_data)
|
||||||
|
else:
|
||||||
|
logger.warning(f"Base64 data for image ID {image_id} is unexpectedly NULL after query. Skipping.")
|
||||||
|
|
||||||
|
except mysql.connector.Error as err:
|
||||||
|
logger.error(f"MySQL Error during Base64 processing: {err}")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"An unexpected error occurred during Base64 processing: {e}", exc_info=True) # exc_info=True to log traceback
|
||||||
|
finally:
|
||||||
|
if 'mycursor' in locals() and mycursor:
|
||||||
|
mycursor.close()
|
||||||
|
if mydb.is_connected():
|
||||||
|
mydb.close()
|
||||||
|
logger.info("MySQL connection closed after Base64 processing.")
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
logger.info("Application started.")
|
||||||
|
# Process images from S3 links first
|
||||||
|
process_images_from_s3_link()
|
||||||
|
logger.info("--- Finished processing images from S3 links ---")
|
||||||
|
|
||||||
|
# Then process images from base64 strings
|
||||||
|
process_images_from_base64_string()
|
||||||
|
logger.info("--- Finished processing images from base64 strings ---")
|
||||||
|
logger.info("All image thumbnail processing complete. Application exiting.")
|
45
image-thumbnail/image_processing_utils.py
Normal file
45
image-thumbnail/image_processing_utils.py
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
from PIL import Image
|
||||||
|
import io
|
||||||
|
import base64
|
||||||
|
import logging # Import logging
|
||||||
|
|
||||||
|
# Get a logger instance for this module
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
def create_thumbnail_from_bytes(image_data_bytes, size=(128, 128), output_format='JPEG'):
|
||||||
|
"""
|
||||||
|
Creates a thumbnail from image data (bytes).
|
||||||
|
Returns the thumbnail as bytes.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
img = Image.open(io.BytesIO(image_data_bytes))
|
||||||
|
img.thumbnail(size)
|
||||||
|
|
||||||
|
thumbnail_buffer = io.BytesIO()
|
||||||
|
# Ensure the output format is supported by Pillow and the original image
|
||||||
|
if img.mode == 'RGBA' and output_format == 'JPEG':
|
||||||
|
# Convert RGBA to RGB for JPEG output to avoid errors with alpha channel
|
||||||
|
img = img.convert('RGB')
|
||||||
|
logger.debug("Converted RGBA image to RGB for JPEG output.")
|
||||||
|
img.save(thumbnail_buffer, format=output_format)
|
||||||
|
thumbnail_buffer.seek(0)
|
||||||
|
logger.info(f"Thumbnail created from bytes. Size: {size}, Format: {output_format}")
|
||||||
|
return thumbnail_buffer.getvalue()
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error creating thumbnail from bytes: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
def create_thumbnail_from_base64(base64_string, size=(128, 128), output_format='JPEG'):
|
||||||
|
"""
|
||||||
|
Decodes a base64 image string, creates a thumbnail, and returns the thumbnail as bytes.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
decoded_image_data = base64.b64decode(base64_string)
|
||||||
|
logger.debug("Base64 string decoded successfully.")
|
||||||
|
return create_thumbnail_from_bytes(decoded_image_data, size, output_format)
|
||||||
|
except base64.binascii.Error as e:
|
||||||
|
logger.error(f"Invalid base64 string provided: {e}")
|
||||||
|
return None
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error creating thumbnail from base64 string: {e}")
|
||||||
|
return None
|
71
image-thumbnail/s3_utils.py
Normal file
71
image-thumbnail/s3_utils.py
Normal file
@ -0,0 +1,71 @@
|
|||||||
|
import boto3
|
||||||
|
import os
|
||||||
|
import logging # Import logging
|
||||||
|
|
||||||
|
# Get a logger instance for this module
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
def get_s3_client(aws_access_key_id, aws_secret_access_key, region_name):
|
||||||
|
"""
|
||||||
|
Initializes and returns an S3 client.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
s3_client = boto3.client(
|
||||||
|
's3',
|
||||||
|
aws_access_key_id=aws_access_key_id,
|
||||||
|
aws_secret_access_key=aws_secret_access_key,
|
||||||
|
region_name=region_name
|
||||||
|
)
|
||||||
|
logger.debug("S3 client initialized successfully.")
|
||||||
|
return s3_client
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to initialize S3 client: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
def download_image_from_s3(s3_client, bucket_name, object_key):
|
||||||
|
"""
|
||||||
|
Downloads an image from S3 and returns its binary data.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
response = s3_client.get_object(Bucket=bucket_name, Key=object_key)
|
||||||
|
image_data = response['Body'].read()
|
||||||
|
logger.info(f"Downloaded '{object_key}' from S3 bucket '{bucket_name}'.")
|
||||||
|
return image_data
|
||||||
|
except s3_client.exceptions.NoSuchKey:
|
||||||
|
logger.warning(f"S3 object '{object_key}' not found in bucket '{bucket_name}'.")
|
||||||
|
return None
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error downloading '{object_key}' from S3 bucket '{bucket_name}': {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
def upload_object_to_s3(s3_client, bucket_name, object_key, data_bytes, content_type='application/octet-stream'):
|
||||||
|
"""
|
||||||
|
Uploads binary data to S3.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
s3_client.put_object(
|
||||||
|
Bucket=bucket_name,
|
||||||
|
Key=object_key,
|
||||||
|
Body=data_bytes,
|
||||||
|
ContentType=content_type
|
||||||
|
)
|
||||||
|
logger.info(f"Uploaded object to s3://{bucket_name}/{object_key} with content type '{content_type}'.")
|
||||||
|
return True
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error uploading '{object_key}' to S3 bucket '{bucket_name}': {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
def generate_thumbnail_s3_key(original_s3_key, thumbnail_prefix):
|
||||||
|
"""
|
||||||
|
Generates a suitable S3 key for a thumbnail based on the original key.
|
||||||
|
E.g., original/path/image.jpg -> thumbnails/original/path/image.jpg
|
||||||
|
"""
|
||||||
|
thumbnail_name = os.path.basename(original_s3_key)
|
||||||
|
thumbnail_directory = os.path.dirname(original_s3_key)
|
||||||
|
|
||||||
|
if thumbnail_directory:
|
||||||
|
thumbnail_key = f"{thumbnail_prefix}{thumbnail_directory}/{thumbnail_name}"
|
||||||
|
else:
|
||||||
|
thumbnail_key = f"{thumbnail_prefix}{thumbnail_name}"
|
||||||
|
logger.debug(f"Generated thumbnail S3 key: '{thumbnail_key}' from original: '{original_s3_key}'")
|
||||||
|
return thumbnail_key
|
@ -1,26 +0,0 @@
|
|||||||
{
|
|
||||||
"SMPT": {
|
|
||||||
"SMPTSERVER": "smtp.gmail.com",
|
|
||||||
"PORT": 587,
|
|
||||||
"SENDER_EMAIL": "marcoioitsoft@gmail.com",
|
|
||||||
"SENDER_PASSWORD": "qrtq wfuj hwpp fhqr",
|
|
||||||
"RECIPIENT_EMAILS": "ashutosh.nehete@marcoaiot.com,vikas@marcoaiot.com,amol@marcosolutions.co.in,vinod@marcofire.in,umesh@marcoaiot.com"
|
|
||||||
},
|
|
||||||
"API": {
|
|
||||||
"BASE_URL": "https://stageapi.marcoaiot.com/api",
|
|
||||||
"USERNAME": "admin@marcoaiot.com",
|
|
||||||
"PASSWORD": "User@123",
|
|
||||||
"TENANTID": "b3466e83-7e11-464c-b93a-daf047838b26"
|
|
||||||
},
|
|
||||||
"WEB":{
|
|
||||||
"BASE_URL": "https://stageapp.marcoaiot.com"
|
|
||||||
},
|
|
||||||
"MONGODB":{
|
|
||||||
"MONGO_CONNECTION_STRING": "mongodb://localhost:27017",
|
|
||||||
"DATABASE_NAME": "MarcoBMS_Caches",
|
|
||||||
"COLLECTION_NAME": "ProjectReportMail"
|
|
||||||
},
|
|
||||||
"UNIQUE_IDENTIFIER":{
|
|
||||||
"PROJECT_IDS":"2618eb89-2823-11f0-9d9e-bc241163f504,08dda31f-25c6-4ad7-8252-14a64ba96fce,08dda508-e7df-4e90-86a5-4c2d30e32069,08dda7db-1f08-4db4-863a-c494201a1156,08dda8cd-0522-47d8-82ef-407276490b68,08dda8cd-8dc2-4e0a-8638-f16f63e16afe,2618f2ef-2823-11f0-9d9e-bc241163f504"
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,619 +0,0 @@
|
|||||||
<!DOCTYPE html>
|
|
||||||
<html>
|
|
||||||
|
|
||||||
<head>
|
|
||||||
<meta charset="UTF-8">
|
|
||||||
<title>Daily Progress Report</title>
|
|
||||||
<style>
|
|
||||||
body {
|
|
||||||
margin: 0;
|
|
||||||
padding: 0;
|
|
||||||
font-family: Arial, sans-serif;
|
|
||||||
background: #f5f5f5;
|
|
||||||
color: #333;
|
|
||||||
}
|
|
||||||
|
|
||||||
.container {
|
|
||||||
max-width: 1100px;
|
|
||||||
margin: 20px auto;
|
|
||||||
background: #fff;
|
|
||||||
border-radius: 8px;
|
|
||||||
overflow: hidden;
|
|
||||||
box-shadow: 0 2px 6px rgba(0, 0, 0, 0.1);
|
|
||||||
}
|
|
||||||
|
|
||||||
.header {
|
|
||||||
background: #49bf3c;
|
|
||||||
color: #fff;
|
|
||||||
padding: 20px;
|
|
||||||
display: flex;
|
|
||||||
justify-content: space-between;
|
|
||||||
align-items: center;
|
|
||||||
flex-wrap: wrap;
|
|
||||||
}
|
|
||||||
|
|
||||||
.header h1 {
|
|
||||||
font-size: 22px;
|
|
||||||
margin: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
.header .project-info {
|
|
||||||
font-size: 14px;
|
|
||||||
text-align: right;
|
|
||||||
}
|
|
||||||
|
|
||||||
.status-note {
|
|
||||||
font-size: 12px;
|
|
||||||
color: #555;
|
|
||||||
padding: 15px 20px 0 20px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.status-cards {
|
|
||||||
display: flex;
|
|
||||||
justify-content: space-between;
|
|
||||||
gap: 15px;
|
|
||||||
padding: 20px;
|
|
||||||
flex-wrap: wrap;
|
|
||||||
}
|
|
||||||
|
|
||||||
.card {
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
flex: 1;
|
|
||||||
min-width: 200px;
|
|
||||||
border: 1px solid #ddd;
|
|
||||||
border-radius: 8px;
|
|
||||||
padding: 15px;
|
|
||||||
text-align: center;
|
|
||||||
background: #fff;
|
|
||||||
box-shadow: 0 2px 6px rgba(0, 0, 0, 0.1);
|
|
||||||
/* <-- added shadow */
|
|
||||||
transition: transform 0.2s ease, box-shadow 0.2s ease;
|
|
||||||
border-top: 1px solid #49bf3c;
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
.card-link {
|
|
||||||
position: absolute;
|
|
||||||
top: 8px;
|
|
||||||
right: 8px;
|
|
||||||
width: 28px;
|
|
||||||
height: 28px;
|
|
||||||
display: none;
|
|
||||||
align-items: center;
|
|
||||||
justify-content: center;
|
|
||||||
border-radius: 6px;
|
|
||||||
text-decoration: none;
|
|
||||||
/* arrow color */
|
|
||||||
background: #f5f5f5;
|
|
||||||
/* badge background */
|
|
||||||
box-shadow: 0 1px 3px rgba(0, 0, 0, .15);
|
|
||||||
}
|
|
||||||
|
|
||||||
.card-link-success {
|
|
||||||
color: #49bf3c;
|
|
||||||
}
|
|
||||||
|
|
||||||
.card-link-warning {
|
|
||||||
color: #ffc107;
|
|
||||||
}
|
|
||||||
|
|
||||||
.card-link-primary {
|
|
||||||
color: #007bff;
|
|
||||||
}
|
|
||||||
|
|
||||||
.card-link:hover {
|
|
||||||
background: #ececec;
|
|
||||||
}
|
|
||||||
|
|
||||||
.card-link:focus {
|
|
||||||
outline: 2px solid #9ca3af;
|
|
||||||
outline-offset: 2px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.card-link .arrow {
|
|
||||||
font-size: 16px;
|
|
||||||
line-height: 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
.card:hover .card-link {
|
|
||||||
display: inline-flex;
|
|
||||||
/* <— show only on this card */
|
|
||||||
}
|
|
||||||
|
|
||||||
.card:hover {
|
|
||||||
transform: translateY(-3px);
|
|
||||||
box-shadow: 0 4px 12px rgba(0, 0, 0, 0.15);
|
|
||||||
}
|
|
||||||
|
|
||||||
.card h3 {
|
|
||||||
font-size: 14px;
|
|
||||||
margin: 0 0 10px 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
.card p {
|
|
||||||
margin: 5px 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
.card .value {
|
|
||||||
font-size: 22px;
|
|
||||||
font-weight: bold;
|
|
||||||
}
|
|
||||||
|
|
||||||
.card-title {
|
|
||||||
font-size: 0.9rem;
|
|
||||||
text-transform: uppercase;
|
|
||||||
font-weight: 600;
|
|
||||||
color: #6c757d;
|
|
||||||
}
|
|
||||||
|
|
||||||
.attendance {
|
|
||||||
color: #49bf3c;
|
|
||||||
}
|
|
||||||
|
|
||||||
.tasks {
|
|
||||||
color: #007bff;
|
|
||||||
}
|
|
||||||
|
|
||||||
.completion {
|
|
||||||
color: #28a745;
|
|
||||||
}
|
|
||||||
|
|
||||||
.activities {
|
|
||||||
padding: 20px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.activities h2 {
|
|
||||||
font-size: 18px;
|
|
||||||
margin-bottom: 10px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.table {
|
|
||||||
width: 100%;
|
|
||||||
border-collapse: collapse;
|
|
||||||
font-size: 14px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.table th,
|
|
||||||
.table td {
|
|
||||||
border: 1px solid #ddd;
|
|
||||||
padding: 8px;
|
|
||||||
text-align: center;
|
|
||||||
}
|
|
||||||
|
|
||||||
.table th {
|
|
||||||
background: #f0f0f0;
|
|
||||||
}
|
|
||||||
|
|
||||||
.footer {
|
|
||||||
/* background: #b10000; */
|
|
||||||
background: #49bf3c;
|
|
||||||
color: #fff;
|
|
||||||
text-align: center;
|
|
||||||
padding: 15px;
|
|
||||||
font-size: 12px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.footer a {
|
|
||||||
color: #fff;
|
|
||||||
margin: 0 8px;
|
|
||||||
text-decoration: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Responsive */
|
|
||||||
@media (max-width: 600px) {
|
|
||||||
.header {
|
|
||||||
flex-direction: column;
|
|
||||||
text-align: center;
|
|
||||||
}
|
|
||||||
|
|
||||||
.header .project-info {
|
|
||||||
text-align: center;
|
|
||||||
margin-top: 10px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.status-cards {
|
|
||||||
flex-direction: column;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
.legend {
|
|
||||||
margin-top: 10px;
|
|
||||||
display: flex;
|
|
||||||
justify-content: flex-start;
|
|
||||||
flex-wrap: wrap;
|
|
||||||
gap: 8px;
|
|
||||||
font-size: 12px;
|
|
||||||
color: #555;
|
|
||||||
}
|
|
||||||
|
|
||||||
.legend-item {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
gap: 4px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.legend-color {
|
|
||||||
width: 10px;
|
|
||||||
height: 10px;
|
|
||||||
border-radius: 2px;
|
|
||||||
display: inline-block;
|
|
||||||
}
|
|
||||||
|
|
||||||
.legend-red {
|
|
||||||
background: #e63946;
|
|
||||||
}
|
|
||||||
|
|
||||||
.legend-blue {
|
|
||||||
background: #007bff;
|
|
||||||
}
|
|
||||||
|
|
||||||
.legend-green {
|
|
||||||
background: #28a745;
|
|
||||||
}
|
|
||||||
|
|
||||||
.legend-gray {
|
|
||||||
background: #ccc;
|
|
||||||
}
|
|
||||||
|
|
||||||
.legend-yellow {
|
|
||||||
background: #ffc107;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
.donut {
|
|
||||||
--percentage: 0;
|
|
||||||
/* Change this per chart */
|
|
||||||
--danger: #e63946;
|
|
||||||
--primary: #007bff;
|
|
||||||
--warning: #ffc107;
|
|
||||||
--success: #28a745;
|
|
||||||
/* Fill color */
|
|
||||||
--track: #e9ecef;
|
|
||||||
/* Background track */
|
|
||||||
--size: 200px;
|
|
||||||
/* Default size */
|
|
||||||
--thickness: 20px;
|
|
||||||
/* Default thickness */
|
|
||||||
|
|
||||||
width: var(--size);
|
|
||||||
height: var(--size);
|
|
||||||
border-radius: 50%;
|
|
||||||
/* background: conic-gradient(var(--danger) calc(var(--percentage) * 1%),
|
|
||||||
var(--track) 0); */
|
|
||||||
background: conic-gradient(var(--track) 100%);
|
|
||||||
position: relative;
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
justify-content: center;
|
|
||||||
font-family: Arial, sans-serif;
|
|
||||||
font-weight: bold;
|
|
||||||
color: #333;
|
|
||||||
}
|
|
||||||
|
|
||||||
.donut::before {
|
|
||||||
content: "";
|
|
||||||
position: absolute;
|
|
||||||
width: calc(var(--size) - var(--thickness));
|
|
||||||
height: calc(var(--size) - var(--thickness));
|
|
||||||
border-radius: 50%;
|
|
||||||
background: #fff;
|
|
||||||
/* Inner cut-out */
|
|
||||||
}
|
|
||||||
|
|
||||||
.donut span {
|
|
||||||
position: absolute;
|
|
||||||
font-size: calc(var(--size) / 6);
|
|
||||||
width:80%;
|
|
||||||
box-sizing:border-box;
|
|
||||||
display:flex;
|
|
||||||
gap: 4px;
|
|
||||||
flex-wrap: wrap;
|
|
||||||
justify-content:center;
|
|
||||||
align-items:center;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Variants */
|
|
||||||
.donut.thin {
|
|
||||||
--size: 80px;
|
|
||||||
--thickness: 12px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.donut.medium {
|
|
||||||
--size: 120px;
|
|
||||||
--thickness: 15px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.donut.large {
|
|
||||||
--size: 180px;
|
|
||||||
--thickness: 35px;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Color variants */
|
|
||||||
.donut-success {
|
|
||||||
background: conic-gradient(var(--success) calc(var(--percentage) * 1%),
|
|
||||||
var(--track) 0);
|
|
||||||
}
|
|
||||||
|
|
||||||
.donut-warning {
|
|
||||||
background: conic-gradient(var(--warning) calc(var(--percentage) * 1%),
|
|
||||||
var(--track) 0);
|
|
||||||
}
|
|
||||||
|
|
||||||
.donut-danger {
|
|
||||||
background: conic-gradient(var(--danger) calc(var(--percentage) * 1%),
|
|
||||||
var(--track) 0);
|
|
||||||
}
|
|
||||||
|
|
||||||
.donut-primary {
|
|
||||||
background: conic-gradient(var(--primary) calc(var(--percentage) * 1%),
|
|
||||||
var(--track) 0);
|
|
||||||
}
|
|
||||||
|
|
||||||
.values {
|
|
||||||
display: flex;
|
|
||||||
flex-wrap: nowrap;
|
|
||||||
gap: 8px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.values p {
|
|
||||||
margin: 0;
|
|
||||||
white-space: nowrap;
|
|
||||||
overflow: hidden;
|
|
||||||
text-overflow: ellipsis;
|
|
||||||
max-width: 200px;
|
|
||||||
}
|
|
||||||
</style>
|
|
||||||
</head>
|
|
||||||
|
|
||||||
<body>
|
|
||||||
<div class="container">
|
|
||||||
<!-- Header -->
|
|
||||||
<div class="header">
|
|
||||||
<h1>Daily Progress Report</h1>
|
|
||||||
<div class="project-info">
|
|
||||||
<strong>Project:</strong> {{projectName}}<br>
|
|
||||||
<strong>Date:</strong> {{date}}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<!-- Status Note -->
|
|
||||||
<div class="status-note">
|
|
||||||
* Project Status Reported - Generated at {{timeStamp}} UTC
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<!-- Status Cards -->
|
|
||||||
<div class="status-cards">
|
|
||||||
<div class="card">
|
|
||||||
<a class="card-link card-link-warning" href={{webUrl}} aria-label="Open original website" title="Open website" target="_blank"
|
|
||||||
rel="noopener">
|
|
||||||
<span class="arrow">↗</span>
|
|
||||||
</a>
|
|
||||||
<h4 class="card-title">TODAY'S ATTENDANCE</h4>
|
|
||||||
<div style="display:flex; flex-wrap:wrap;">
|
|
||||||
|
|
||||||
<!-- Left Column -->
|
|
||||||
<div style="width:50%; box-sizing:border-box;display:flex; justify-content:center; align-items:center;">
|
|
||||||
<!-- Medium -->
|
|
||||||
<div class="donut thin donut-warning" style="--percentage: {{attendancePercentage}};">
|
|
||||||
<span>
|
|
||||||
<p style="color:#ffc107;">{{todaysAttendances}}</p><p>/</p><p style="color:#ccc;">{{totalEmployees}}</p>
|
|
||||||
</span>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<!-- Right Column -->
|
|
||||||
<div class="legend" style="width:50%; padding:15px; box-sizing:border-box;">
|
|
||||||
<div class="legend-item"
|
|
||||||
style="margin-bottom:10px;text-align: left; display:left; justify-content:left; align-items:left!important;; ">
|
|
||||||
<span class="legend-color legend-yellow"></span> Today's Attendance
|
|
||||||
</div>
|
|
||||||
<div class="legend-item"
|
|
||||||
style="margin-bottom:10px; text-align: left; display:left; justify-content:left; align-items:left!important;;">
|
|
||||||
<span class="legend-color legend-gray"></span> Total Employees
|
|
||||||
</div>
|
|
||||||
|
|
||||||
</div>
|
|
||||||
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="card">
|
|
||||||
<a class="card-link card-link-primary" href={{webUrl}} aria-label="Open original website" title="Open website" target="_blank"
|
|
||||||
rel="noopener">
|
|
||||||
<span class="arrow">↗</span>
|
|
||||||
</a>
|
|
||||||
<h4 class="card-title">DAILY TASKS COMPLETED</h4>
|
|
||||||
<div style="display:flex; flex-wrap:wrap;">
|
|
||||||
|
|
||||||
<!-- Left Column -->
|
|
||||||
<div style="width:50%; box-sizing:border-box;display:flex; justify-content:center; align-items:center;">
|
|
||||||
<!-- Medium -->
|
|
||||||
<div class="donut thin donut-primary" style="--percentage: {{taskPercentage}};">
|
|
||||||
<span >
|
|
||||||
<p style="color:#007bff;">{{totalCompletedTask}}</p><p>/</p><p style="color:#ccc;">{{totalPlannedTask}}</p>
|
|
||||||
</span>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<!-- Right Column -->
|
|
||||||
<div class="legend" style="width:50%; padding:15px; box-sizing:border-box;">
|
|
||||||
<div class="legend-item"
|
|
||||||
style="margin-bottom:10px;text-align: left; display:left; justify-content:left; align-items:left!important;; ">
|
|
||||||
<span class="legend-color legend-blue"></span> Completed Work
|
|
||||||
</div>
|
|
||||||
<div class="legend-item"
|
|
||||||
style="margin-bottom:10px; text-align: left; display:left; justify-content:left; align-items:left!important;;">
|
|
||||||
<span class="legend-color legend-gray"></span> Planned Work
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div style="text-align: end!important;">
|
|
||||||
<p style="font-size: xx-small;color: #ccc;">*Today's Total Work</p>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="card">
|
|
||||||
<a class="card-link card-link-success" href={{webUrl}} aria-label="Open original website" title="Open website" target="_blank"
|
|
||||||
rel="noopener">
|
|
||||||
<span class="arrow">↗</span>
|
|
||||||
</a>
|
|
||||||
<h4 class="card-title">PROJECT COMPLETION STATUS</h4>
|
|
||||||
<div style="display:flex; flex-wrap:wrap;">
|
|
||||||
|
|
||||||
<!-- Left Column -->
|
|
||||||
<div style="width:50%; box-sizing:border-box;display:flex; justify-content:center; align-items:center;">
|
|
||||||
<!-- Medium -->
|
|
||||||
<div class="donut thin donut-success" style="--percentage: {{completionStatus}};">
|
|
||||||
<span>
|
|
||||||
<p style="color:#28a745;">{{totalCompletedWork}}</p> <p>/</p> <p style="color:#ccc;">{{totalPlannedWork}}</p>
|
|
||||||
</span>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<!-- Right Column -->
|
|
||||||
<div class="legend" style="width:50%; padding:15px; box-sizing:border-box;">
|
|
||||||
<div class="legend-item"
|
|
||||||
style="margin-bottom:10px;text-align: left; display:left; justify-content:left; align-items:left!important;; ">
|
|
||||||
<span class="legend-color legend-green"></span> Completed Work
|
|
||||||
</div>
|
|
||||||
<div class="legend-item"
|
|
||||||
style="margin-bottom:10px; text-align: left; display:left; justify-content:left; align-items:left!important;;">
|
|
||||||
<span class="legend-color legend-gray"></span> Planned Work
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div style="text-align: end!important;">
|
|
||||||
<p style="font-size: xx-small;color: #ccc;">*Project's Total Work</p>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="card">
|
|
||||||
<a class="card-link" href={{webUrl}} aria-label="Open original website" title="Open website" target="_blank"
|
|
||||||
rel="noopener">
|
|
||||||
<span class="arrow">↗</span>
|
|
||||||
</a>
|
|
||||||
<div>
|
|
||||||
<h4 class="card-title">Pending Attendance</h4>
|
|
||||||
</div>
|
|
||||||
<table style="width: 100%;">
|
|
||||||
<tr>
|
|
||||||
<td style="text-align: left;">Regularization Pending</td>
|
|
||||||
<td style="text-align: right;">{{regularizationPending}}</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td style="text-align: left;">Checkout Pending</td>
|
|
||||||
<td style="text-align: right;">{{checkoutPending}}</td>
|
|
||||||
</tr>
|
|
||||||
</table>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="card">
|
|
||||||
<a class="card-link" href={{webUrl}} aria-label="Open original website" title="Open website" target="_blank"
|
|
||||||
rel="noopener">
|
|
||||||
<span class="arrow">↗</span>
|
|
||||||
</a>
|
|
||||||
<div>
|
|
||||||
<h4 class="card-title">Activity Report Pending</h4>
|
|
||||||
</div>
|
|
||||||
<table style="width: 100%;">
|
|
||||||
<tr>
|
|
||||||
<td style="text-align: left;">Total Pending Tasks</td>
|
|
||||||
<td style="text-align: right;">{{reportPending}}</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td style="text-align: left;">Today's Assigned Tasks</td>
|
|
||||||
<td style="text-align: right;">{{todaysAssignTasks}}</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td style="text-align: left;">Today's Completed Tasks</td>
|
|
||||||
<td style="text-align: right;">{{todaysCompletedTasks}}</td>
|
|
||||||
</tr>
|
|
||||||
</table>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{% if teamOnSite and teamOnSite|length > 0 %}
|
|
||||||
<div class="card">
|
|
||||||
<a class="card-link" href={{webUrl}} aria-label="Open original website" title="Open website" target="_blank"
|
|
||||||
rel="noopener">
|
|
||||||
<span class="arrow">↗</span>
|
|
||||||
</a>
|
|
||||||
<!-- Row 1: Header -->
|
|
||||||
<div>
|
|
||||||
<h4 class="card-title">Team Strength on Site</h4>
|
|
||||||
</div>
|
|
||||||
<table style="width: 100%;">
|
|
||||||
{% for a in teamOnSite %}
|
|
||||||
<tr>
|
|
||||||
<td style="text-align: left;">{{a.roleName}}</td>
|
|
||||||
<td style="text-align: right;">{{a.numberofEmployees}}</td>
|
|
||||||
</tr>
|
|
||||||
{% endfor %}
|
|
||||||
</table>
|
|
||||||
</div>
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
<!-- Activities
|
|
||||||
{% if performedTasks and performedTasks|length > 0 %}
|
|
||||||
<div class="activities">
|
|
||||||
<h2>Activities (Tasks) Performed {{date}}</h2>
|
|
||||||
<table class="table">
|
|
||||||
<thead>
|
|
||||||
<tr>
|
|
||||||
<th>Activity/Location</th>
|
|
||||||
<th>Assigned Today/Pending</th>
|
|
||||||
<th>Completed Today</th>
|
|
||||||
<th>Date</th>
|
|
||||||
<th>Team Members</th>
|
|
||||||
<th>Comment</th>
|
|
||||||
</tr>
|
|
||||||
</thead>
|
|
||||||
<tbody>
|
|
||||||
{% for a in performedTasks %}
|
|
||||||
<tr>
|
|
||||||
<td>{{a.activity}} {{a.location}}</td>
|
|
||||||
<td>{{a.assignedToday}} / {{a.pending}}</td>
|
|
||||||
<td>{{a.completedToday}}</td>
|
|
||||||
<td>{{date}}</td>
|
|
||||||
<td></td>
|
|
||||||
<td>{{a.comment}}</td>
|
|
||||||
</tr>
|
|
||||||
{% endfor %}
|
|
||||||
</tbody>
|
|
||||||
</table>
|
|
||||||
</div>
|
|
||||||
{% endif %} -->
|
|
||||||
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<!-- Footer -->
|
|
||||||
<div class="footer" style="display:flex; flex-wrap:wrap;">
|
|
||||||
<div style="width: 50%;text-align: left;">
|
|
||||||
Contact Us: contact[at]marcoaiot.com<br>
|
|
||||||
Marco AIoT technologies Pvt. Ltd. ©2025 All Rights Reserved
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div style="width: 50%; text-align: right;">
|
|
||||||
<!-- <a href="#">Instagram</a> | -->
|
|
||||||
<a href="https://www.linkedin.com/company/marco-aiot">Linkedin</a> |
|
|
||||||
|
|
||||||
<a href="https://x.com/marcoaiot"><img src="https://cdn.marcoaiot.com/icons/brands/twitter.png" style="height: 15px;" /></a> |
|
|
||||||
|
|
||||||
<a href="#"><img src="https://cdn.marcoaiot.com/icons/brands/facebook.png" style="height: 15px;" /></a> |
|
|
||||||
<a href="#"><img src="https://cdn.marcoaiot.com/icons/brands/instagram.png" style="height: 15px;" /></a>
|
|
||||||
<!-- <a href="#"><img src="https://cdn.marcoaiot.com/icons/brands/youtube.png" style="height: 15px;" /></a> | <a
|
|
||||||
href="#">LinkedIn</a> |
|
|
||||||
<a href="#">YouTube</a> -->
|
|
||||||
</div>
|
|
||||||
|
|
||||||
</div>
|
|
||||||
<div style="text-align: center;width: 100%;background-color: #fff;margin:10px;margin-bottom: 30px!important;font-size: small;color: #6c757d ;">
|
|
||||||
You have received this email because it contains important information about your {{websiteName}} Account account.
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</body>
|
|
||||||
|
|
||||||
</html>
|
|
@ -1,324 +0,0 @@
|
|||||||
import json
|
|
||||||
import smtplib
|
|
||||||
import os
|
|
||||||
import datetime
|
|
||||||
import requests
|
|
||||||
import pandas as pd
|
|
||||||
from email.message import EmailMessage
|
|
||||||
from openpyxl import load_workbook
|
|
||||||
from openpyxl.utils import get_column_letter
|
|
||||||
from openpyxl.styles import Font, Alignment, PatternFill, Border, Side
|
|
||||||
|
|
||||||
|
|
||||||
# --- Config and color map ---
|
|
||||||
color_map = {
|
|
||||||
"all_ok": "4CAF50", #Green present
|
|
||||||
"act4_true": "009688", # teal Regularization is accepted
|
|
||||||
"act1_nullOut": "FFC107", #Amber Check out pending
|
|
||||||
"act2": "2196F3", # Bule Regularization pending
|
|
||||||
"act5": "B71C1C", #Dark Red Regularization rejected
|
|
||||||
"all_null": "F6635C", #Red Absent
|
|
||||||
"sunday":"FF0000"
|
|
||||||
}
|
|
||||||
|
|
||||||
# Add Legend sheet to the workbook
|
|
||||||
legend_data = {
|
|
||||||
"Color Description": [
|
|
||||||
"Present (All Ok)",
|
|
||||||
"Regularization Accepted",
|
|
||||||
"Check-out Pending",
|
|
||||||
"Regularization Pending",
|
|
||||||
"Regularization Rejected",
|
|
||||||
"Absent",
|
|
||||||
"Sundays"
|
|
||||||
],
|
|
||||||
"Hex Color": [
|
|
||||||
color_map["all_ok"],
|
|
||||||
color_map["act4_true"],
|
|
||||||
color_map["act1_nullOut"],
|
|
||||||
color_map["act2"],
|
|
||||||
color_map["act5"],
|
|
||||||
color_map["all_null"],
|
|
||||||
color_map["sunday"]
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
def login_api():
|
|
||||||
payload = {"username": API_USERNAME, "password": API_PASSWORD}
|
|
||||||
headers = {"Content-Type": "application/json"}
|
|
||||||
try:
|
|
||||||
response = requests.post(f"{BASE_URL}/auth/login", json=payload, headers=headers)
|
|
||||||
response.raise_for_status()
|
|
||||||
data = response.json()["data"]
|
|
||||||
jwt = data["token"]
|
|
||||||
print("API login successful.")
|
|
||||||
return jwt
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Login API error: {e}")
|
|
||||||
return None
|
|
||||||
|
|
||||||
def select_tenant(jwt):
|
|
||||||
headers = {"Authorization": f"Bearer {jwt}", "Content-Type": "application/json"}
|
|
||||||
try:
|
|
||||||
response = requests.post(f"{BASE_URL}/auth/select-tenant/{API_TENANT}", headers=headers)
|
|
||||||
response.raise_for_status()
|
|
||||||
data = response.json()["data"]
|
|
||||||
jwt = data["token"]
|
|
||||||
print("Tenant selected successful.")
|
|
||||||
return jwt
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Select tenant error: {e}")
|
|
||||||
return None
|
|
||||||
|
|
||||||
def attendance_report(jwt,is_current_month):
|
|
||||||
headers = {"Authorization": f"Bearer {jwt}", "Content-Type": "application/json"}
|
|
||||||
response = requests.get(f"{BASE_URL}/report/report-attendance?isCurrentMonth={is_current_month}", headers=headers)
|
|
||||||
response.raise_for_status()
|
|
||||||
data = response.json()
|
|
||||||
projects = data.get('data', [])
|
|
||||||
|
|
||||||
# Get current date and time
|
|
||||||
today = datetime.datetime.now()
|
|
||||||
|
|
||||||
if not is_current_month:
|
|
||||||
first_day_current_month = today.replace(day=1)
|
|
||||||
last_month_last_day = first_day_current_month - datetime.timedelta(days=1)
|
|
||||||
month_name = last_month_last_day.strftime("%B")
|
|
||||||
year = last_month_last_day.year
|
|
||||||
else:
|
|
||||||
month_name = today.strftime("%B")
|
|
||||||
year = today.year
|
|
||||||
|
|
||||||
excel_file = f"{month_name}-{year}_attendance_report.xlsx"
|
|
||||||
writer = pd.ExcelWriter(excel_file, engine="openpyxl")
|
|
||||||
|
|
||||||
# Border style
|
|
||||||
thin = Side(border_style="thin", color="000000")
|
|
||||||
border = Border(left=thin, right=thin, top=thin, bottom=thin)
|
|
||||||
|
|
||||||
for proj_idx, project in enumerate(projects):
|
|
||||||
project_name = project.get('projectName', 'UnknownProject')[:31] # Sheet name max 31 chars
|
|
||||||
attendances = project.get('projectAttendance', [])
|
|
||||||
|
|
||||||
# Collect all unique dates in this project
|
|
||||||
all_dates = set()
|
|
||||||
for user in attendances:
|
|
||||||
for att in user.get('attendances', []):
|
|
||||||
all_dates.add(att.get('attendanceDate', '').split('T')[0])
|
|
||||||
all_dates = sorted(all_dates)
|
|
||||||
|
|
||||||
# Prepare rows
|
|
||||||
rows = []
|
|
||||||
for user in attendances:
|
|
||||||
row = {'Name': f"{user.get('firstName','')} {user.get('lastName','')}".strip()}
|
|
||||||
date_map = {a.get('attendanceDate', '').split('T')[0]: a for a in user.get('attendances', [])}
|
|
||||||
for date in all_dates:
|
|
||||||
att = date_map.get(date, {})
|
|
||||||
# Store also needed fields for coloring below
|
|
||||||
row[f"{date}_checkin"] = ''
|
|
||||||
row[f"{date}_checkout"] = ''
|
|
||||||
|
|
||||||
if att.get('checkIn'):
|
|
||||||
# Extract time part only, e.g. "09:30:00"
|
|
||||||
row[f"{date}_checkin"] = att.get('checkIn').split('T')[1]
|
|
||||||
|
|
||||||
if att.get('checkOut'):
|
|
||||||
# Extract time part only
|
|
||||||
row[f"{date}_checkout"] = att.get('checkOut').split('T')[1]
|
|
||||||
row[f"{date}_activity"] = att.get('activity', None)
|
|
||||||
row[f"{date}_isapproved"] = att.get('isApproved', None)
|
|
||||||
row['CheckInCheckOutDone'] = user.get('checkInCheckOutDone', 0)
|
|
||||||
row['CheckOutPending'] = user.get('checkOutPending', 0)
|
|
||||||
row['CheckInDone'] = user.get('checkInDone', 0)
|
|
||||||
row['AbsentAttendance'] = user.get('absentAttendance', 0)
|
|
||||||
row['RejectedRegularize'] = user.get('rejectedRegularize', 0)
|
|
||||||
rows.append(row)
|
|
||||||
|
|
||||||
legend_df = pd.DataFrame(legend_data)
|
|
||||||
|
|
||||||
# Use the same writer to add sheet after all sheets written
|
|
||||||
legend_df.to_excel(writer, sheet_name="Legend", index=False)
|
|
||||||
|
|
||||||
# Access Legend sheet for coloring the color sample cells
|
|
||||||
ws_legend = writer.book["Legend"]
|
|
||||||
|
|
||||||
# Assuming data starts from row 2 (header in row 1)
|
|
||||||
for row_idx in range(2, 2 + len(legend_data["Color Description"])):
|
|
||||||
# Color sample cell: B column (2nd col)
|
|
||||||
cell = ws_legend.cell(row=row_idx, column=2)
|
|
||||||
hex_color = cell.value
|
|
||||||
fill = PatternFill(start_color=hex_color, fill_type="solid")
|
|
||||||
cell.fill = fill
|
|
||||||
|
|
||||||
# Optional: bold and center description column
|
|
||||||
desc_cell = ws_legend.cell(row=row_idx, column=1)
|
|
||||||
desc_cell.font = Font(bold=True)
|
|
||||||
desc_cell.alignment = Alignment(horizontal="left", vertical="center")
|
|
||||||
|
|
||||||
# Optionally adjust headers and column width for visual clarity
|
|
||||||
for col in [1, 2]:
|
|
||||||
ws_legend.column_dimensions[get_column_letter(col)].width = 30
|
|
||||||
|
|
||||||
ws_legend["A1"].font = Font(bold=True)
|
|
||||||
ws_legend["B1"].font = Font(bold=True)
|
|
||||||
ws_legend["A1"].alignment = ws_legend["B1"].alignment = Alignment(horizontal="center")
|
|
||||||
|
|
||||||
# Columns: Name, checkin/checkout pairs for dates, and summary columns
|
|
||||||
columns = ['Name']
|
|
||||||
for date in all_dates:
|
|
||||||
columns.extend([f"{date}_checkin", f"{date}_checkout"])
|
|
||||||
columns.extend(['CheckInCheckOutDone', 'CheckInDone','CheckOutPending', 'AbsentAttendance', 'RejectedRegularize'])
|
|
||||||
|
|
||||||
df = pd.DataFrame(rows, columns=columns)
|
|
||||||
|
|
||||||
# Write to sheet
|
|
||||||
df.to_excel(writer, sheet_name=project_name, index=False, startrow=2)
|
|
||||||
|
|
||||||
# Post-process sheet using openpyxl
|
|
||||||
wb = writer.book
|
|
||||||
ws = wb[project_name]
|
|
||||||
|
|
||||||
# Merged header with date and subheaders
|
|
||||||
ws["A3"].value = "Name"
|
|
||||||
ws["A3"].font = Font(bold=True)
|
|
||||||
ws["A3"].alignment = Alignment(horizontal="center", vertical="center")
|
|
||||||
|
|
||||||
col = 2
|
|
||||||
for date in all_dates:
|
|
||||||
col_letter_1 = get_column_letter(col)
|
|
||||||
col_letter_2 = get_column_letter(col + 1)
|
|
||||||
ws.merge_cells(f"{col_letter_1}2:{col_letter_2}2")
|
|
||||||
ws[f"{col_letter_1}2"].value = pd.to_datetime(date).strftime("%d-%m-%Y")
|
|
||||||
ws[f"{col_letter_1}2"].font = Font(bold=True)
|
|
||||||
ws[f"{col_letter_1}2"].alignment = Alignment(horizontal="center", vertical="center")
|
|
||||||
ws[f"{col_letter_1}3"].value = "Check-in"
|
|
||||||
ws[f"{col_letter_2}3"].value = "Check-out"
|
|
||||||
ws[f"{col_letter_1}3"].alignment = ws[f"{col_letter_2}3"].alignment = Alignment(horizontal="center")
|
|
||||||
col += 2
|
|
||||||
|
|
||||||
# Add headers for summary columns
|
|
||||||
summary_headers = ['CheckIn-CheckOut Done', 'CheckIn Done', 'CheckOut Pending', 'Absent Attendance', 'Rejected Regularize']
|
|
||||||
summary_start_col = col
|
|
||||||
for i, header in enumerate(summary_headers):
|
|
||||||
col_letter = get_column_letter(summary_start_col + i)
|
|
||||||
ws.merge_cells(f"{col_letter}2:{col_letter}3")
|
|
||||||
ws[f"{col_letter}2"].value = header
|
|
||||||
ws[f"{col_letter}2"].font = Font(bold=True)
|
|
||||||
ws[f"{col_letter}2"].alignment = Alignment(horizontal="center", vertical="center")
|
|
||||||
|
|
||||||
# Apply borders and coloring
|
|
||||||
max_row = ws.max_row
|
|
||||||
max_col = ws.max_column
|
|
||||||
|
|
||||||
for row_idx in range(3, max_row + 1): # Starting from 3 to include header rows
|
|
||||||
for col_idx in range(1, max_col + 1):
|
|
||||||
cell = ws.cell(row=row_idx, column=col_idx)
|
|
||||||
cell.border = border
|
|
||||||
|
|
||||||
# Coloring according to rules for check-in/out pairs
|
|
||||||
for row_idx, user_row in enumerate(rows, start=4):
|
|
||||||
for date_idx, date in enumerate(all_dates):
|
|
||||||
checkin_col = 2 + date_idx * 2
|
|
||||||
checkout_col = checkin_col + 1
|
|
||||||
|
|
||||||
activity = user_row.get(f"{date}_activity")
|
|
||||||
isapproved = user_row.get(f"{date}_isapproved")
|
|
||||||
c_in = user_row.get(f"{date}_checkin")
|
|
||||||
c_out = user_row.get(f"{date}_checkout")
|
|
||||||
|
|
||||||
# --- Sunday RED coloring ---
|
|
||||||
dt = datetime.datetime.strptime(date, "%Y-%m-%d")
|
|
||||||
if dt.weekday() == 6: # Sunday
|
|
||||||
fill = PatternFill(start_color="FF0000", fill_type="solid")
|
|
||||||
ws.cell(row=row_idx, column=checkin_col).fill = fill
|
|
||||||
ws.cell(row=row_idx, column=checkout_col).fill = fill
|
|
||||||
continue # Skip further coloring for this date
|
|
||||||
|
|
||||||
fill_color = None
|
|
||||||
if not c_in and not c_out:
|
|
||||||
fill_color = color_map["all_null"]
|
|
||||||
elif c_in and c_out and activity == 4 and not bool(isapproved):
|
|
||||||
fill_color = color_map["all_ok"]
|
|
||||||
elif activity == 4 and bool(isapproved):
|
|
||||||
fill_color = color_map["act4_true"]
|
|
||||||
elif activity == 1 and not c_out:
|
|
||||||
fill_color = color_map["act1_nullOut"]
|
|
||||||
elif activity == 2:
|
|
||||||
fill_color = color_map["act2"]
|
|
||||||
elif activity == 5:
|
|
||||||
fill_color = color_map["act5"]
|
|
||||||
|
|
||||||
if fill_color:
|
|
||||||
fill = PatternFill(start_color=fill_color, fill_type="solid")
|
|
||||||
ws.cell(row=row_idx, column=checkin_col).fill = fill
|
|
||||||
ws.cell(row=row_idx, column=checkout_col).fill = fill
|
|
||||||
|
|
||||||
writer.close()
|
|
||||||
print(f"Excel '{excel_file}' generated with sheets per project, summary columns and conditional coloring.")
|
|
||||||
|
|
||||||
# --- 2. Compose the Email ----
|
|
||||||
msg = EmailMessage()
|
|
||||||
msg["Subject"] = f"Attendance Report for {month_name}-{year}"
|
|
||||||
msg["From"] = SENDER_EMAIL
|
|
||||||
msg["To"] = RECIPIENT_EMAILS
|
|
||||||
msg.set_content("Please find the attached Excel file.")
|
|
||||||
|
|
||||||
# Add the Excel attachment
|
|
||||||
with open(excel_file, "rb") as f:
|
|
||||||
file_data = f.read()
|
|
||||||
file_name = f.name
|
|
||||||
msg.add_attachment(file_data, maintype="application", subtype="vnd.openxmlformats-officedocument.spreadsheetml.sheet", filename=file_name)
|
|
||||||
|
|
||||||
# --- 3. Send the Email ----
|
|
||||||
with smtplib.SMTP(SMPTSERVER, PORT) as smtp:
|
|
||||||
smtp.ehlo()
|
|
||||||
smtp.starttls()
|
|
||||||
smtp.ehlo()
|
|
||||||
smtp.login(SENDER_EMAIL, SENDER_PASSWORD)
|
|
||||||
smtp.send_message(msg)
|
|
||||||
|
|
||||||
# After all operations (e.g., emailing) are done, delete the file
|
|
||||||
if os.path.exists(excel_file):
|
|
||||||
os.remove(excel_file)
|
|
||||||
print(f"Deleted file: {excel_file}")
|
|
||||||
else:
|
|
||||||
print("File not found, cannot delete.")
|
|
||||||
|
|
||||||
|
|
||||||
# --- Main execution logic ---
|
|
||||||
if __name__ == "__main__":
|
|
||||||
import sys
|
|
||||||
|
|
||||||
# Load your real config before calling any API
|
|
||||||
GLOBAL_CONFIG_PATH = "config.json"
|
|
||||||
try:
|
|
||||||
with open(GLOBAL_CONFIG_PATH, "r", encoding="utf-8") as f:
|
|
||||||
config = json.load(f)
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Failed to load config: {e}")
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
API_CONFIG = config.get("API", {})
|
|
||||||
BASE_URL = API_CONFIG.get("BASE_URL")
|
|
||||||
API_USERNAME = API_CONFIG.get("USERNAME")
|
|
||||||
API_PASSWORD = API_CONFIG.get("PASSWORD")
|
|
||||||
API_TENANT = API_CONFIG.get("TENANTID")
|
|
||||||
|
|
||||||
SMPT_CONFIG = config.get("SMPT", {})
|
|
||||||
SMPTSERVER = SMPT_CONFIG.get("SMPTSERVER")
|
|
||||||
PORT = SMPT_CONFIG.get("PORT")
|
|
||||||
SENDER_EMAIL = SMPT_CONFIG.get("SENDER_EMAIL")
|
|
||||||
SENDER_PASSWORD = SMPT_CONFIG.get("SENDER_PASSWORD")
|
|
||||||
RECIPIENT_EMAILS = SMPT_CONFIG.get("RECIPIENT_EMAILS")
|
|
||||||
|
|
||||||
token = login_api()
|
|
||||||
if not token:
|
|
||||||
print("Login failed, aborting.")
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
jwt_token = select_tenant(token)
|
|
||||||
if not jwt_token:
|
|
||||||
print("Tenant selection failed, aborting.")
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
attendance_report(jwt_token, False)
|
|
@ -1,148 +0,0 @@
|
|||||||
import sys
|
|
||||||
import json
|
|
||||||
import smtplib
|
|
||||||
import datetime
|
|
||||||
import requests
|
|
||||||
from pathlib import Path
|
|
||||||
from numbers import Real
|
|
||||||
from email.message import EmailMessage
|
|
||||||
from jinja2 import Environment, FileSystemLoader, select_autoescape
|
|
||||||
|
|
||||||
# Load your real config before calling any API
|
|
||||||
GLOBAL_CONFIG_PATH = "config.json"
|
|
||||||
try:
|
|
||||||
with open(GLOBAL_CONFIG_PATH, "r", encoding="utf-8") as f:
|
|
||||||
config = json.load(f)
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Failed to load config: {e}")
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
WEB_CONFIG = config.get("WEB", {})
|
|
||||||
WEB_BASE_URL = WEB_CONFIG.get("BASE_URL")
|
|
||||||
|
|
||||||
API_CONFIG = config.get("API", {})
|
|
||||||
BASE_URL = API_CONFIG.get("BASE_URL")
|
|
||||||
|
|
||||||
SMPT_CONFIG = config.get("SMPT", {})
|
|
||||||
SMPTSERVER = SMPT_CONFIG.get("SMPTSERVER")
|
|
||||||
PORT = SMPT_CONFIG.get("PORT")
|
|
||||||
SENDER_EMAIL = SMPT_CONFIG.get("SENDER_EMAIL")
|
|
||||||
SENDER_PASSWORD = SMPT_CONFIG.get("SENDER_PASSWORD")
|
|
||||||
RECIPIENT_EMAILS = SMPT_CONFIG.get("RECIPIENT_EMAILS")
|
|
||||||
|
|
||||||
UNIQUE_IDENTIFIER_CONFIG = config.get("UNIQUE_IDENTIFIER", {})
|
|
||||||
PROJECT_IDS = UNIQUE_IDENTIFIER_CONFIG.get("PROJECT_IDS")
|
|
||||||
|
|
||||||
|
|
||||||
def render_template_from_file(template_name,context):
|
|
||||||
base_dir = Path(__file__).parent
|
|
||||||
env = Environment(
|
|
||||||
loader=FileSystemLoader(searchpath=str(base_dir)),
|
|
||||||
autoescape=select_autoescape(["html", "xml"])
|
|
||||||
)
|
|
||||||
tmpl = env.get_template(template_name)
|
|
||||||
return tmpl.render(**context)
|
|
||||||
|
|
||||||
def fetch_Project_report(project_id):
|
|
||||||
headers = {"Content-Type": "application/json"}
|
|
||||||
try:
|
|
||||||
response = requests.get(f"{BASE_URL}/market/get/project/report/{project_id}", headers=headers)
|
|
||||||
response.raise_for_status()
|
|
||||||
data = response.json()["data"]
|
|
||||||
print(f"Project report for project \"{data["projectName"]}\" fetched successfully.")
|
|
||||||
return data
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Select tenant error: {e}")
|
|
||||||
return None
|
|
||||||
|
|
||||||
def get_percentage(part, whole, decimals: int = 2):
|
|
||||||
if not isinstance(part, Real) or not isinstance(whole, Real):
|
|
||||||
raise TypeError("part and whole must be numbers")
|
|
||||||
if whole == 0:
|
|
||||||
return 0.0
|
|
||||||
return round((part / whole) * 100.0, decimals)
|
|
||||||
|
|
||||||
def value_minization(minimize_value):
|
|
||||||
if minimize_value >= 1000:
|
|
||||||
minimized_value = round((minimize_value/1000),2)
|
|
||||||
result = f"{minimized_value}K"
|
|
||||||
else:
|
|
||||||
result = f"{minimize_value}"
|
|
||||||
return result
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
template_name = "dpr.html"
|
|
||||||
project_ids = [p.strip() for p in PROJECT_IDS.split(",") if p.strip()]
|
|
||||||
|
|
||||||
for project_id in project_ids:
|
|
||||||
|
|
||||||
data = fetch_Project_report(project_id)
|
|
||||||
if "attendancePercentage" in data:
|
|
||||||
attendance_percentage = data["attendancePercentage"]
|
|
||||||
else:
|
|
||||||
attendance_percentage = get_percentage(data["todaysAttendances"], data["totalEmployees"], 2)
|
|
||||||
|
|
||||||
if "taskPercentage" in data:
|
|
||||||
task_percentage = data["taskPercentage"]
|
|
||||||
else:
|
|
||||||
task_percentage = get_percentage(data["totalCompletedTask"], data["totalPlannedTask"], 2)
|
|
||||||
|
|
||||||
if "todaysCompletedTasks" in data:
|
|
||||||
todays_completed_tasks = data["todaysCompletedTasks"]
|
|
||||||
else:
|
|
||||||
todays_completed_tasks = 0
|
|
||||||
|
|
||||||
web_url = f"{WEB_BASE_URL}/auth/login"
|
|
||||||
|
|
||||||
dt = datetime.datetime.strptime(data["date"], "%Y-%m-%dT%H:%M:%SZ")
|
|
||||||
api_formatted_date = dt.strftime("%d-%b-%Y")
|
|
||||||
|
|
||||||
total_planned_work = value_minization(data["totalPlannedWork"])
|
|
||||||
total_completed_work = value_minization(data["totalCompletedWork"])
|
|
||||||
total_planned_task = value_minization(data["totalPlannedTask"])
|
|
||||||
total_completed_task = value_minization(data["totalCompletedTask"])
|
|
||||||
|
|
||||||
context = {
|
|
||||||
"webUrl":web_url,
|
|
||||||
"date": api_formatted_date,
|
|
||||||
"projectName": data["projectName"],
|
|
||||||
"timeStamp": data["timeStamp"],
|
|
||||||
"todaysAttendances": data["todaysAttendances"],
|
|
||||||
"totalEmployees": data["totalEmployees"],
|
|
||||||
"attendancePercentage":attendance_percentage,
|
|
||||||
"taskPercentage":task_percentage,
|
|
||||||
"regularizationPending": data["regularizationPending"],
|
|
||||||
"checkoutPending": data["checkoutPending"],
|
|
||||||
"totalPlannedWork": total_planned_work,
|
|
||||||
"totalCompletedWork": total_completed_work,
|
|
||||||
"totalPlannedTask": total_planned_task,
|
|
||||||
"totalCompletedTask": total_completed_task,
|
|
||||||
"completionStatus": round(data["completionStatus"],2),
|
|
||||||
"reportPending": data["reportPending"],
|
|
||||||
"todaysAssignTasks": data["todaysAssignTasks"],
|
|
||||||
"todaysCompletedTasks": todays_completed_tasks,
|
|
||||||
"websiteName":"OnFieldWork.com",
|
|
||||||
"teamOnSite": data["teamOnSite"],
|
|
||||||
"performedTasks": data["performedTasks"],
|
|
||||||
"performedAttendance": data["performedAttendance"]
|
|
||||||
}
|
|
||||||
|
|
||||||
# print(context)
|
|
||||||
project_name = data["projectName"]
|
|
||||||
html = render_template_from_file(template_name,context)
|
|
||||||
|
|
||||||
# print(html)
|
|
||||||
|
|
||||||
msg = EmailMessage()
|
|
||||||
msg["Subject"] = f"DPR - {api_formatted_date} - {project_name}"
|
|
||||||
msg["From"] = SENDER_EMAIL
|
|
||||||
msg["To"] = RECIPIENT_EMAILS
|
|
||||||
msg.set_content("HTML version attached as alternative.")
|
|
||||||
msg.add_alternative(html, subtype="html")
|
|
||||||
|
|
||||||
with smtplib.SMTP(SMPTSERVER, PORT) as smtp:
|
|
||||||
smtp.ehlo()
|
|
||||||
smtp.starttls()
|
|
||||||
smtp.ehlo()
|
|
||||||
smtp.login(SENDER_EMAIL, SENDER_PASSWORD)
|
|
||||||
smtp.send_message(msg)
|
|
@ -1,319 +0,0 @@
|
|||||||
import smtplib
|
|
||||||
import requests
|
|
||||||
import ssl
|
|
||||||
import json
|
|
||||||
from email.message import EmailMessage
|
|
||||||
import pymongo
|
|
||||||
from pymongo.errors import ConnectionFailure, OperationFailure, PyMongoError
|
|
||||||
from bson.objectid import ObjectId # Import ObjectId for querying by _id
|
|
||||||
|
|
||||||
# --- Configuration Loading ---
|
|
||||||
def load_config_from_json(file_path="config.json"):
|
|
||||||
"""Loads configuration from a JSON file."""
|
|
||||||
try:
|
|
||||||
with open(file_path, "r", encoding="utf-8") as f:
|
|
||||||
config = json.load(f)
|
|
||||||
print(f"Configuration loaded from {file_path}")
|
|
||||||
return config
|
|
||||||
except FileNotFoundError:
|
|
||||||
print(f"Error: Configuration file '{file_path}' not found. Please create it.")
|
|
||||||
exit(1)
|
|
||||||
except json.JSONDecodeError:
|
|
||||||
print(f"Error: Could not decode JSON from '{file_path}'. Check file format for errors.")
|
|
||||||
exit(1)
|
|
||||||
except Exception as e:
|
|
||||||
print(f"An unexpected error occurred while loading config: {e}")
|
|
||||||
exit(1)
|
|
||||||
|
|
||||||
# Load configuration at the start
|
|
||||||
CONFIG = load_config_from_json()
|
|
||||||
|
|
||||||
# Access variables from the nested configuration
|
|
||||||
SMPT_CONFIG = CONFIG.get("SMPT", {})
|
|
||||||
SMTP_SERVER = SMPT_CONFIG.get("SMPTSERVER")
|
|
||||||
SMTP_PORT = int(SMPT_CONFIG.get("PORT")) if SMPT_CONFIG.get("PORT") is not None else None
|
|
||||||
SENDER_EMAIL = SMPT_CONFIG.get("SENDER_EMAIL")
|
|
||||||
SENDER_PASSWORD = SMPT_CONFIG.get("SENDER_PASSWORD")
|
|
||||||
|
|
||||||
API_CONFIG = CONFIG.get("API", {})
|
|
||||||
BASE_URL = API_CONFIG.get("BASE_URL")
|
|
||||||
API_USERNAME = API_CONFIG.get("USERNAME")
|
|
||||||
API_PASSWORD = API_CONFIG.get("PASSWORD")
|
|
||||||
|
|
||||||
MONGODB_CONFIG = CONFIG.get("MONGODB", {})
|
|
||||||
MONGO_CONNECTION_STRING = MONGODB_CONFIG.get("MONGO_CONNECTION_STRING")
|
|
||||||
DATABASE_NAME = MONGODB_CONFIG.get("DATABASE_NAME")
|
|
||||||
COLLECTION_NAME = MONGODB_CONFIG.get("COLLECTION_NAME")
|
|
||||||
|
|
||||||
# Validate essential configuration (more robust check)
|
|
||||||
if not (SMTP_SERVER and SMTP_PORT and SENDER_EMAIL and SENDER_PASSWORD and
|
|
||||||
BASE_URL and API_USERNAME and API_PASSWORD and
|
|
||||||
MONGO_CONNECTION_STRING and DATABASE_NAME and COLLECTION_NAME):
|
|
||||||
print("Error: One or more essential configuration variables are missing or invalid in config.json.")
|
|
||||||
# Print which specific parts are missing for easier debugging
|
|
||||||
missing_configs = []
|
|
||||||
if not SMTP_SERVER: missing_configs.append("SMPT.SMPTSERVER")
|
|
||||||
if not SMTP_PORT: missing_configs.append("SMPT.PORT")
|
|
||||||
if not SENDER_EMAIL: missing_configs.append("SMPT.SENDER_EMAIL")
|
|
||||||
if not SENDER_PASSWORD: missing_configs.append("SMPT.SENDER_PASSWORD")
|
|
||||||
if not BASE_URL: missing_configs.append("API.BASE_URL")
|
|
||||||
if not API_USERNAME: missing_configs.append("API.USERNAME")
|
|
||||||
if not API_PASSWORD: missing_configs.append("API.PASSWORD")
|
|
||||||
if not MONGO_CONNECTION_STRING: missing_configs.append("MONGODB.MONGO_CONNECTION_STRING")
|
|
||||||
if not DATABASE_NAME: missing_configs.append("MONGODB.DATABASE_NAME")
|
|
||||||
if not COLLECTION_NAME: missing_configs.append("MONGODB.COLLECTION_NAME")
|
|
||||||
print(f"Missing/Invalid: {', '.join(missing_configs)}")
|
|
||||||
exit(1)
|
|
||||||
|
|
||||||
# Create a default SSL context once for secure connections
|
|
||||||
SSL_CONTEXT = ssl.create_default_context()
|
|
||||||
|
|
||||||
# --- API Functions ---
|
|
||||||
def login_api():
|
|
||||||
"""Logs into the API and returns the JWT token."""
|
|
||||||
payload = {"username": API_USERNAME, "password": API_PASSWORD}
|
|
||||||
headers = {"Content-Type": "application/json"}
|
|
||||||
try:
|
|
||||||
response = requests.post(f"{BASE_URL}/auth/login", json=payload, headers=headers, timeout=10) # Add timeout
|
|
||||||
response.raise_for_status()
|
|
||||||
data = response.json().get("data") # Use .get() for safer access
|
|
||||||
jwt = data.get("token") if data else None # Safely get token
|
|
||||||
if jwt:
|
|
||||||
print("API login successful.")
|
|
||||||
return jwt
|
|
||||||
else:
|
|
||||||
print("API response missing 'data' or 'token' key.")
|
|
||||||
return None
|
|
||||||
except requests.exceptions.HTTPError as e:
|
|
||||||
print(f"HTTP Error during API login: {e} - Response: {getattr(e.response, 'text', 'N/A')}")
|
|
||||||
return None
|
|
||||||
except requests.exceptions.Timeout:
|
|
||||||
print("Timeout Error during API login: The server did not respond in time.")
|
|
||||||
return None
|
|
||||||
except requests.exceptions.ConnectionError:
|
|
||||||
print("Connection Error during API login: Could not connect to the API server. Check URL and network.")
|
|
||||||
return None
|
|
||||||
except requests.exceptions.RequestException as e:
|
|
||||||
print(f"An unexpected error occurred during API login: {e}")
|
|
||||||
return None
|
|
||||||
except json.JSONDecodeError:
|
|
||||||
print(f"API response is not valid JSON for login: {response.text}")
|
|
||||||
return None
|
|
||||||
|
|
||||||
def get_email_bodies_from_api(jwt):
|
|
||||||
"""Retrieves email body data from the API."""
|
|
||||||
if not jwt:
|
|
||||||
print("No JWT token available to get email bodies.")
|
|
||||||
return []
|
|
||||||
|
|
||||||
headers = {"Authorization": f"Bearer {jwt}", "Content-Type": "application/json"}
|
|
||||||
try:
|
|
||||||
response = requests.get(f"{BASE_URL}/report/report-mail", headers=headers, timeout=10) # Add timeout
|
|
||||||
response.raise_for_status()
|
|
||||||
data = response.json().get("data") # Safely get data
|
|
||||||
if isinstance(data, list):
|
|
||||||
print(f"Successfully retrieved {len(data)} email bodies from API.")
|
|
||||||
return data
|
|
||||||
else:
|
|
||||||
print("API response for report-mail is missing 'data' key or it's not a list.")
|
|
||||||
return []
|
|
||||||
except requests.exceptions.HTTPError as e:
|
|
||||||
print(f"HTTP Error when getting email bodies: {e} - Response: {getattr(e.response, 'text', 'N/A')}")
|
|
||||||
return []
|
|
||||||
except requests.exceptions.Timeout:
|
|
||||||
print("Timeout Error when getting email bodies: The server did not respond in time.")
|
|
||||||
return []
|
|
||||||
except requests.exceptions.ConnectionError:
|
|
||||||
print("Connection Error when getting email bodies: Could not connect to the API server. Check URL and network.")
|
|
||||||
return []
|
|
||||||
except requests.exceptions.RequestException as e:
|
|
||||||
print(f"An error occurred when getting email bodies: {e}")
|
|
||||||
return []
|
|
||||||
except json.JSONDecodeError:
|
|
||||||
print(f"API response for getting email bodies is not valid JSON: {response.text}")
|
|
||||||
return []
|
|
||||||
|
|
||||||
# --- SMTP Functions ---
|
|
||||||
def login_smtp():
|
|
||||||
"""Logs into the SMTP server and returns the server object."""
|
|
||||||
server = None
|
|
||||||
try:
|
|
||||||
server = smtplib.SMTP(SMTP_SERVER, SMTP_PORT)
|
|
||||||
server.starttls(context=SSL_CONTEXT)
|
|
||||||
print("TLS connection established.")
|
|
||||||
|
|
||||||
server.login(SENDER_EMAIL, SENDER_PASSWORD)
|
|
||||||
print("Logged in to SMTP server successfully.")
|
|
||||||
return server
|
|
||||||
except smtplib.SMTPAuthenticationError as e:
|
|
||||||
print(f"SMTP Authentication Error: {e}")
|
|
||||||
print("Please check your email address and password/app password.")
|
|
||||||
print("For Gmail, ensure 'Less secure app access' is OFF and you're using an App Password.")
|
|
||||||
print("For Outlook, you might need to generate an App Password.")
|
|
||||||
if server: server.quit()
|
|
||||||
return None
|
|
||||||
except smtplib.SMTPServerDisconnected as e:
|
|
||||||
print(f"SMTP Server Disconnected Error: {e}")
|
|
||||||
print("The SMTP server unexpectedly disconnected. Check network or server status.")
|
|
||||||
if server: server.quit()
|
|
||||||
return None
|
|
||||||
except smtplib.SMTPException as e:
|
|
||||||
print(f"SMTP Error: {e}")
|
|
||||||
print("An error occurred during the SMTP transaction.")
|
|
||||||
if server: server.quit()
|
|
||||||
return None
|
|
||||||
except Exception as e:
|
|
||||||
print(f"An unexpected error occurred during SMTP login: {e}")
|
|
||||||
if server: server.quit()
|
|
||||||
return None
|
|
||||||
|
|
||||||
def send_email(smtp_server, receiver_email, subject, body):
|
|
||||||
"""Sends an HTML email using a pre-logged-in SMTP server."""
|
|
||||||
if not smtp_server:
|
|
||||||
print("SMTP server not initialized. Cannot send email.")
|
|
||||||
return False
|
|
||||||
|
|
||||||
msg = EmailMessage()
|
|
||||||
msg["From"] = SENDER_EMAIL
|
|
||||||
msg["Subject"] = subject
|
|
||||||
|
|
||||||
msg.add_alternative(body, subtype="html")
|
|
||||||
|
|
||||||
if isinstance(receiver_email, str):
|
|
||||||
receiver_email = [receiver_email]
|
|
||||||
|
|
||||||
msg["To"] = ", ".join(receiver_email)
|
|
||||||
recipients_for_log = ", ".join(receiver_email)
|
|
||||||
|
|
||||||
print(f"Attempting to send email from {SENDER_EMAIL} to {recipients_for_log}...")
|
|
||||||
|
|
||||||
try:
|
|
||||||
smtp_server.send_message(msg, from_addr=SENDER_EMAIL, to_addrs=receiver_email)
|
|
||||||
print(f"Email sent successfully to {recipients_for_log}!")
|
|
||||||
return True
|
|
||||||
except smtplib.SMTPRecipientsRefused as e:
|
|
||||||
print(f"Recipient Refused Error: {e.recipients}")
|
|
||||||
print(f"Email not sent to some recipients: {recipients_for_log}. Check recipient addresses.")
|
|
||||||
return False
|
|
||||||
except smtplib.SMTPException as e:
|
|
||||||
print(f"SMTP Error during sending: {e}")
|
|
||||||
print(f"Could not send email to {recipients_for_log}.")
|
|
||||||
return False
|
|
||||||
except Exception as e:
|
|
||||||
print(f"An unexpected error occurred while sending email: {e}")
|
|
||||||
print(f"Could not send email to {recipients_for_log}.")
|
|
||||||
return False
|
|
||||||
|
|
||||||
# --- MongoDB Functions ---
|
|
||||||
def update_mongodb_document(query, new_values, upsert=False, multi=False):
|
|
||||||
"""
|
|
||||||
Connects to MongoDB and updates documents in a specified collection.
|
|
||||||
"""
|
|
||||||
client = None # Initialize client to None for finally block
|
|
||||||
try:
|
|
||||||
# Establish a connection to MongoDB
|
|
||||||
# The serverSelectionTimeoutMS helps prevent long waits if the server is unreachable
|
|
||||||
client = pymongo.MongoClient(
|
|
||||||
MONGO_CONNECTION_STRING, serverSelectionTimeoutMS=5000
|
|
||||||
)
|
|
||||||
|
|
||||||
# The ping command is cheap and does not require auth.
|
|
||||||
# This will raise an exception if the connection fails.
|
|
||||||
client.admin.command("ping")
|
|
||||||
print("MongoDB connection successful!")
|
|
||||||
|
|
||||||
# Access the specified database and collection
|
|
||||||
db = client[DATABASE_NAME]
|
|
||||||
collection = db[COLLECTION_NAME]
|
|
||||||
|
|
||||||
print(
|
|
||||||
f"Attempting to update document(s) in '{DATABASE_NAME}.{COLLECTION_NAME}'..."
|
|
||||||
)
|
|
||||||
print(f"Query: {query}")
|
|
||||||
print(f"New Values: {new_values}")
|
|
||||||
|
|
||||||
if multi:
|
|
||||||
# Update multiple documents
|
|
||||||
result = collection.update_many(query, new_values, upsert=upsert)
|
|
||||||
print(
|
|
||||||
f"Matched {result.matched_count} document(s) and modified {result.modified_count} document(s)."
|
|
||||||
)
|
|
||||||
if result.upserted_id:
|
|
||||||
print(f"Upserted a new document with _id: {result.upserted_id}")
|
|
||||||
else:
|
|
||||||
# Update a single document
|
|
||||||
result = collection.update_one(query, new_values, upsert=upsert)
|
|
||||||
print(
|
|
||||||
f"Matched {result.matched_count} document(s) and modified {result.modified_count} document(s)."
|
|
||||||
)
|
|
||||||
if result.upserted_id:
|
|
||||||
print(f"Upserted a new document with _id: {result.upserted_id}")
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
except ConnectionFailure as e:
|
|
||||||
print(
|
|
||||||
f"MongoDB Connection Error: Could not connect to server. Please check connection string and server status. Error: {e}"
|
|
||||||
)
|
|
||||||
except OperationFailure as e:
|
|
||||||
print(
|
|
||||||
f"MongoDB Operation Error: An error occurred during a database operation. Error: {e}"
|
|
||||||
)
|
|
||||||
except PyMongoError as e:
|
|
||||||
print(f"PyMongo Error: An unexpected PyMongo error occurred: {e}")
|
|
||||||
except Exception as e:
|
|
||||||
print(f"An unexpected error occurred: {e}")
|
|
||||||
finally:
|
|
||||||
# Ensure the client connection is closed
|
|
||||||
if client:
|
|
||||||
client.close()
|
|
||||||
print("MongoDB connection closed.")
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
# --- Main execution logic ---
|
|
||||||
if __name__ == "__main__":
|
|
||||||
jwt_token = login_api()
|
|
||||||
|
|
||||||
if jwt_token:
|
|
||||||
# Call add_email_body_to_api only if necessary, e.g., if you're populating the DB
|
|
||||||
# For a regular report sending script, you might only need to get_email_bodies_from_api
|
|
||||||
email_objects = get_email_bodies_from_api(jwt_token)
|
|
||||||
|
|
||||||
if email_objects:
|
|
||||||
smtp_connection = login_smtp()
|
|
||||||
if smtp_connection:
|
|
||||||
for item in email_objects:
|
|
||||||
receivers = item.get("receivers")
|
|
||||||
subject = item.get("subject")
|
|
||||||
body = item.get("body")
|
|
||||||
|
|
||||||
if receivers and subject and body:
|
|
||||||
send_success = send_email(
|
|
||||||
smtp_connection, receivers, subject, body
|
|
||||||
)
|
|
||||||
if send_success:
|
|
||||||
try:
|
|
||||||
# Ensure you have an actual _id from your database for this to work
|
|
||||||
document_id_string = item.get("id")
|
|
||||||
query_by_id = {"_id": ObjectId(document_id_string)}
|
|
||||||
update_by_id_values = {"$set": {"IsSent": True}}
|
|
||||||
update_mongodb_document(
|
|
||||||
query_by_id, update_by_id_values
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
print(
|
|
||||||
f"Error in Example 5: Make sure the _id string is valid and exists. {e}"
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
print(
|
|
||||||
f"Skipping email due to missing data in API response: {item}"
|
|
||||||
)
|
|
||||||
smtp_connection.quit() # Ensure SMTP connection is closed after all emails are sent
|
|
||||||
print("SMTP connection closed.")
|
|
||||||
else:
|
|
||||||
print("Failed to establish SMTP connection. Cannot send emails.")
|
|
||||||
else:
|
|
||||||
print("No email bodies retrieved from API to send.")
|
|
||||||
else:
|
|
||||||
print("Failed to obtain JWT token. Cannot proceed with email operations.")
|
|
@ -1,106 +0,0 @@
|
|||||||
import json
|
|
||||||
import requests
|
|
||||||
|
|
||||||
# --- Configuration ---
|
|
||||||
def load_config_from_json(file_path="config.json"):
|
|
||||||
try:
|
|
||||||
with open(file_path, 'r', encoding='utf-8') as f:
|
|
||||||
config = json.load(f)
|
|
||||||
print(f"Configuration loaded from {file_path}")
|
|
||||||
return config
|
|
||||||
except FileNotFoundError:
|
|
||||||
print(f"Error: Configuration file '{file_path}' not found.")
|
|
||||||
exit(1)
|
|
||||||
except json.JSONDecodeError:
|
|
||||||
print(f"Error: Could not decode JSON from '{file_path}'. Check file format.")
|
|
||||||
exit(1)
|
|
||||||
except Exception as e:
|
|
||||||
print(f"An unexpected error occurred while loading config: {e}")
|
|
||||||
exit(1)
|
|
||||||
|
|
||||||
# Load configuration at the start
|
|
||||||
CONFIG = load_config_from_json()
|
|
||||||
# Access variables from the nested configuration
|
|
||||||
# Accessing API section
|
|
||||||
API_CONFIG = CONFIG.get('API', {}) # Use .get() with a default empty dict to prevent KeyError if 'API' is missing
|
|
||||||
BASE_URL = API_CONFIG.get('BASE_URL')
|
|
||||||
API_USERNAME = API_CONFIG.get('USERNAME')
|
|
||||||
API_PASSWORD = API_CONFIG.get('PASSWORD')
|
|
||||||
|
|
||||||
def login_api():
|
|
||||||
"""
|
|
||||||
Logs into the API and returns the JWT token.
|
|
||||||
Handles potential request errors.
|
|
||||||
"""
|
|
||||||
payload = {
|
|
||||||
"username": API_USERNAME,
|
|
||||||
"password": API_PASSWORD
|
|
||||||
}
|
|
||||||
|
|
||||||
headers = {
|
|
||||||
"Content-Type": "application/json"
|
|
||||||
}
|
|
||||||
|
|
||||||
try:
|
|
||||||
response = requests.post(f"{BASE_URL}/auth/login", json=payload, headers=headers)
|
|
||||||
response.raise_for_status() # Raise an exception for HTTP errors (4xx or 5xx)
|
|
||||||
data = response.json()['data']
|
|
||||||
jwt = data["token"]
|
|
||||||
print("API login successful.")
|
|
||||||
return jwt
|
|
||||||
except requests.exceptions.HTTPError as e:
|
|
||||||
print(f"HTTP Error during API login: {e}")
|
|
||||||
print(f"Response: {response.text}")
|
|
||||||
return None
|
|
||||||
except requests.exceptions.ConnectionError as e:
|
|
||||||
print(f"Connection Error during API login: {e}")
|
|
||||||
return None
|
|
||||||
except requests.exceptions.Timeout as e:
|
|
||||||
print(f"Timeout Error during API login: {e}")
|
|
||||||
return None
|
|
||||||
except requests.exceptions.RequestException as e:
|
|
||||||
print(f"An unexpected error occurred during API login: {e}")
|
|
||||||
return None
|
|
||||||
except KeyError:
|
|
||||||
print("API response missing 'data' or 'token' key.")
|
|
||||||
return None
|
|
||||||
|
|
||||||
def add_email_body_to_api(jwt):
|
|
||||||
"""
|
|
||||||
Adds email body data to the API.
|
|
||||||
"""
|
|
||||||
if not jwt:
|
|
||||||
print("No JWT token available to add email body.")
|
|
||||||
return None
|
|
||||||
|
|
||||||
headers = {
|
|
||||||
"Authorization": f"Bearer {jwt}",
|
|
||||||
"Content-Type": "application/json"
|
|
||||||
}
|
|
||||||
try:
|
|
||||||
# Assuming this POST request adds new mail records to the database.
|
|
||||||
# If it's idempotent (can be called multiple times without issues), it's fine.
|
|
||||||
# Otherwise, consider if this should only be called once or based on specific logic.
|
|
||||||
response = requests.post(f"{BASE_URL}/report/add-report-mail", headers=headers)
|
|
||||||
response.raise_for_status()
|
|
||||||
print("Email body successfully added to API.")
|
|
||||||
return response
|
|
||||||
except requests.exceptions.HTTPError as e:
|
|
||||||
print(f"HTTP Error when adding email body: {e}")
|
|
||||||
print(f"Response: {response.text}")
|
|
||||||
return None
|
|
||||||
except requests.exceptions.RequestException as e:
|
|
||||||
print(f"An error occurred when adding email body: {e}")
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
# --- Main execution logic ---
|
|
||||||
if __name__ == "__main__":
|
|
||||||
jwt_token = login_api()
|
|
||||||
|
|
||||||
if jwt_token:
|
|
||||||
# Call add_email_body_to_api only if necessary, e.g., if you're populating the DB
|
|
||||||
# For a regular report sending script, you might only need to get_email_bodies_from_api
|
|
||||||
add_response = add_email_body_to_api(jwt_token)
|
|
||||||
else:
|
|
||||||
print("Failed to obtain JWT token. Cannot proceed with email operations.")
|
|
@ -1,17 +1,9 @@
|
|||||||
# config.env
|
# config.env
|
||||||
ENVIRONMENT=production
|
ENVIRONMENT=production
|
||||||
WEB_BRANCH_NAME=react-query-v2
|
WEB_BRANCH_NAME=Issue_May_2W
|
||||||
WEB_IMAGE_NAME=marco.web.prod:v3.13
|
WEB_IMAGE_NAME=marco.web.prod:v1.1
|
||||||
WEB_CONTAINER_NAME=marco.web.prod
|
WEB_CONTAINER_NAME=marco.web.prod
|
||||||
WEB_PORT=4175
|
API_BRANCH_NAME=Issues_May_2W
|
||||||
WEB_REPO_DIR=marco.pms.web
|
API_IMAGE_NAME=marco.api.prod:v1.1
|
||||||
WEB_REPO=https://git.marcoaiot.com/admin/marco.pms.web.git
|
|
||||||
API_BRANCH_NAME=Ashutosh_Refactor
|
|
||||||
API_IMAGE_NAME=marco.api.prod:v3.13.1
|
|
||||||
API_CONTAINER_NAME=marco.api.prod
|
API_CONTAINER_NAME=marco.api.prod
|
||||||
API_PORT1=8085
|
|
||||||
API_PORT2=8086
|
|
||||||
API_REPO_DIR=marco.pms.api
|
|
||||||
API_REPO=https://git.marcoaiot.com/admin/marco.pms.api.git
|
|
||||||
API_URL=https://api.marcoaiot.com
|
API_URL=https://api.marcoaiot.com
|
||||||
|
|
||||||
|
@ -1 +0,0 @@
|
|||||||
docker run -d --name marco.web.prod -p 4175:4173 -e VITE_API_URL=https://api.marcoaiot.com marco.web.prod:v3.10
|
|
@ -6,7 +6,7 @@ source ./config.env
|
|||||||
# Step 1: set branch Name
|
# Step 1: set branch Name
|
||||||
IMAGE_NAME=$API_IMAGE_NAME
|
IMAGE_NAME=$API_IMAGE_NAME
|
||||||
CONTAINER_NAME=$API_CONTAINER_NAME
|
CONTAINER_NAME=$API_CONTAINER_NAME
|
||||||
REPO_DIR=$API_REPO_DIR
|
REPO_DIR="marco.pms.api"
|
||||||
|
|
||||||
# Navigate into the project directory
|
# Navigate into the project directory
|
||||||
|
|
||||||
@ -46,7 +46,7 @@ fi
|
|||||||
# Step 5: Run the Docker containerer
|
# Step 5: Run the Docker containerer
|
||||||
echo "Running Docker container..."
|
echo "Running Docker container..."
|
||||||
|
|
||||||
docker run -d --name $CONTAINER_NAME --network mynetwork -p $API_PORT1:8080 -p $API_PORT2:8081 $IMAGE_NAME
|
docker run -d --name $CONTAINER_NAME --network mynetwork -p 8085:8080 -p 8086:8081 $IMAGE_NAME
|
||||||
|
|
||||||
# Step 6: Final Message
|
# Step 6: Final Message
|
||||||
echo "Successfully Running $CONTAINER_NAME"
|
echo "Successfully Running $CONTAINER_NAME"
|
||||||
|
@ -7,7 +7,7 @@ source ./config.env
|
|||||||
# Set the name for the Docker image and container
|
# Set the name for the Docker image and container
|
||||||
IMAGE_NAME=$WEB_IMAGE_NAME
|
IMAGE_NAME=$WEB_IMAGE_NAME
|
||||||
CONTAINER_NAME=$WEB_CONTAINER_NAME
|
CONTAINER_NAME=$WEB_CONTAINER_NAME
|
||||||
REPO_DIR=$WEB_REPO_DIR
|
REPO_DIR="marco.pms.web"
|
||||||
VITE_API_URL=$API_URL
|
VITE_API_URL=$API_URL
|
||||||
|
|
||||||
|
|
||||||
@ -68,7 +68,7 @@ cd ../
|
|||||||
|
|
||||||
# Step 7: Run the Docker containerer
|
# Step 7: Run the Docker containerer
|
||||||
echo "------------ Running Docker container..."
|
echo "------------ Running Docker container..."
|
||||||
docker run -d --name "$CONTAINER_NAME" -p $WEB_PORT:4173 \
|
docker run -d --name "$CONTAINER_NAME" -p 4175:4173 \
|
||||||
-e VITE_API_URL="$VITE_API_URL" "$IMAGE_NAME"
|
-e VITE_API_URL="$VITE_API_URL" "$IMAGE_NAME"
|
||||||
|
|
||||||
|
|
||||||
|
@ -3,13 +3,12 @@
|
|||||||
# Load config file
|
# Load config file
|
||||||
source ./config.env
|
source ./config.env
|
||||||
|
|
||||||
|
|
||||||
# Repository URL
|
|
||||||
REPO_URL=$API_REPO
|
|
||||||
REPO_DIR=$API_REPO_DIR
|
|
||||||
#set branch Name
|
#set branch Name
|
||||||
BRANCH_NAME=$API_BRANCH_NAME
|
BRANCH_NAME=$API_BRANCH_NAME
|
||||||
|
|
||||||
|
# Repository URL
|
||||||
|
REPO_URL="https://git.marcoaiot.com/admin/marco.pms.api.git"
|
||||||
|
REPO_DIR="marco.pms.api"
|
||||||
|
|
||||||
# Step 1: Clone the GitHub repository (use the branch defined)
|
# Step 1: Clone the GitHub repository (use the branch defined)
|
||||||
if [ -d "$REPO_DIR" ]; then
|
if [ -d "$REPO_DIR" ]; then
|
||||||
@ -28,17 +27,4 @@ else
|
|||||||
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
|
||||||
echo "Successfully pull $BRANCH_NAME"
|
echo "Successfully pull $BRANCH_NAME"
|
||||||
echo "---------------------------------------------"
|
|
||||||
echo "Copying appsetting"
|
|
||||||
|
|
||||||
|
|
||||||
cd ..
|
|
||||||
SOURCE_FILE="./appsettings.Production.json"
|
|
||||||
TARGET_DIR="./marco.pms.api/Marco.Pms.Services/"
|
|
||||||
cp "$SOURCE_FILE" "$TARGET_DIR"
|
|
||||||
#cp "./appsettings.Production.json" "./marco.pms.api/Marco.Pms.Services/"
|
|
||||||
|
|
||||||
echo "Successfully copied appsetting to $TARGET_DIR"
|
|
||||||
|
|
||||||
|
@ -8,8 +8,8 @@ source ./config.env
|
|||||||
BRANCH_NAME=$WEB_BRANCH_NAME
|
BRANCH_NAME=$WEB_BRANCH_NAME
|
||||||
|
|
||||||
# Repository URL
|
# Repository URL
|
||||||
REPO_URL=$WEB_REPO
|
REPO_URL="https://git.marcoaiot.com/admin/marco.pms.web.git"
|
||||||
REPO_DIR=$WEB_REPO_DIR
|
REPO_DIR="marco.pms.web"
|
||||||
|
|
||||||
# Step 1: Clone the GitHub repository (use the branch defined)
|
# Step 1: Clone the GitHub repository (use the branch defined)
|
||||||
if [ -d "$REPO_DIR" ]; then
|
if [ -d "$REPO_DIR" ]; then
|
||||||
|
Loading…
x
Reference in New Issue
Block a user