Document_Manager #129

Merged
ashutosh.nehete merged 83 commits from Document_Manager into main 2025-09-11 04:12:01 +00:00
Showing only changes of commit 2e55d1e3c4 - Show all commits

View File

@ -519,6 +519,48 @@ namespace Marco.Pms.Services.Controllers
// Return the pre-signed URL with a success response
}
[HttpGet("get/tags")]
public async Task<IActionResult> GetAllDocumentTagsAsync()
{
// Log: API endpoint execution started
_logger.LogInfo("Executing GetAllDocumentTagsAsync to retrieve document tags for tenant.");
// Immediately create DbContext asynchronously using the factory pattern for efficiency and DI compliance
await using var context = await _dbContextFactory.CreateDbContextAsync();
// Create a new DI scope for scoped services (e.g., user/context-specific dependencies)
using var scope = _serviceScope.CreateScope();
try
{
// Fetch the currently logged-in employee (auth/user context)
var loggedInEmployee = await _userHelper.GetCurrentEmployeeAsync();
if (loggedInEmployee == null)
{
_logger.LogWarning("Current employee could not be identified.");
return Unauthorized(ApiResponse<object>.ErrorResponse("Unauthorized", 401));
}
// Retrieve the tags that belong to the specified tenant and project only the names (performance: projection)
var tags = await context.DocumentTagMasters
.Where(dt => dt.TenantId == tenantId)
.Select(dt => dt.Name)
.ToListAsync();
_logger.LogInfo("Successfully retrieved {TagCount} document tags for tenant {TenantId}.", tags.Count, tenantId);
// Return tags wrapped in ApiResponse object
return Ok(ApiResponse<object>.SuccessResponse(tags, "Tags fetched successfully", 200));
}
catch (Exception ex)
{
// Log: Unexpected error handling
_logger.LogError(ex, "Error occurred while retrieving document tags for tenant {TenantId}.", tenantId);
return StatusCode(500, ApiResponse<object>.ErrorResponse("An error occurred while fetching tags.", 500));
}
}
/// <summary>
/// Uploads a document attachment for an Employee or Project.
/// Validates permissions, document type, entity existence, tags, and uploads to S3.