Merge pull request 'dev' (#78) from dev into master
Reviewed-on: freeleaps/freeleaps-service-hub#78
This commit is contained in:
commit
6d9b35bb73
@ -1,4 +1,4 @@
|
||||
FROM python:3.10-slim-bullseye
|
||||
FROM python:3.12-slim
|
||||
|
||||
# docker settings
|
||||
ARG CONTAINER_APP_ROOT="/app"
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
FROM python:3.10-slim-buster
|
||||
FROM python:3.12-slim
|
||||
|
||||
# docker settings
|
||||
ARG CONTAINER_APP_ROOT="/app"
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
from backend.business.document_manager import (
|
||||
DocumentManager,
|
||||
)
|
||||
from common.log.module_logger import ModuleLogger
|
||||
|
||||
|
||||
class DocumentHub:
|
||||
@ -8,15 +9,22 @@ class DocumentHub:
|
||||
self,
|
||||
):
|
||||
self.document_manager = DocumentManager()
|
||||
self.module_logger = ModuleLogger(sender_id="DocumentHub")
|
||||
return
|
||||
|
||||
async def retrieve_document_info(self, document_id: str):
|
||||
return await self.document_manager.retrieve_document_info(document_id)
|
||||
await self.module_logger.log_info(f"Retrieving document info for ID: {document_id}")
|
||||
result = await self.document_manager.retrieve_document_info(document_id)
|
||||
await self.module_logger.log_info(f"Document info retrieved successfully for ID: {document_id}")
|
||||
return result
|
||||
|
||||
async def read_document_file_as_http_media_data(self, document_id: str):
|
||||
return await self.document_manager.read_document_file_as_http_media_data(
|
||||
await self.module_logger.log_info(f"Reading document as HTTP media for ID: {document_id}")
|
||||
result = await self.document_manager.read_document_file_as_http_media_data(
|
||||
document_id
|
||||
)
|
||||
await self.module_logger.log_info(f"Document media data read successfully for ID: {document_id}")
|
||||
return result
|
||||
|
||||
async def upload_document(
|
||||
self, associated_with: str, file_name: str, file_data: bytes
|
||||
@ -26,11 +34,16 @@ class DocumentHub:
|
||||
file_name: the name of the file
|
||||
file (bytes): the file to be uploaded
|
||||
"""
|
||||
return await self.document_manager.upload_file(
|
||||
await self.module_logger.log_info(f"Uploading document {file_name} for user {associated_with}")
|
||||
result = await self.document_manager.upload_file(
|
||||
associated_with, file_name, file_data
|
||||
)
|
||||
await self.module_logger.log_info(f"Document upload completed for {file_name}, result: {result}")
|
||||
return result
|
||||
|
||||
async def delete_documents(self, document_ids: list):
|
||||
await self.module_logger.log_info(f"Deleting documents: {document_ids}")
|
||||
for document_id in document_ids:
|
||||
await self.document_manager.delete_document(document_id)
|
||||
await self.module_logger.log_info(f"Documents deleted successfully: {document_ids}")
|
||||
return
|
||||
|
||||
@ -1,10 +1,12 @@
|
||||
from backend.services.document_service import DocumentService
|
||||
from backend.models.models import MediaType, DataFormat
|
||||
from common.log.module_logger import ModuleLogger
|
||||
|
||||
|
||||
class DocumentManager:
|
||||
def __init__(self) -> None:
|
||||
self.document_service = DocumentService()
|
||||
self.module_logger = ModuleLogger(sender_id="DocumentManager")
|
||||
|
||||
async def retrieve_document_info(self, document_id: str):
|
||||
await self.document_service.load_document(document_id=document_id)
|
||||
|
||||
@ -12,6 +12,7 @@ from webapi.providers import probes
|
||||
from webapi.providers import metrics
|
||||
from .freeleaps_app import FreeleapsApp
|
||||
from common.config.app_settings import app_settings
|
||||
from common.log.module_logger import ModuleLogger
|
||||
|
||||
|
||||
def create_app() -> FastAPI:
|
||||
@ -20,6 +21,10 @@ def create_app() -> FastAPI:
|
||||
app = FreeleapsApp()
|
||||
|
||||
register_logger()
|
||||
|
||||
# Create application logger for startup logging
|
||||
app_logger = ModuleLogger(sender_id="ApplicationBootstrap")
|
||||
|
||||
register(app, exception_handler)
|
||||
register(app, database)
|
||||
register(app, router)
|
||||
@ -37,6 +42,14 @@ def create_app() -> FastAPI:
|
||||
if app_settings.METRICS_ENABLED:
|
||||
register(app, metrics)
|
||||
|
||||
# Log application startup completion
|
||||
import asyncio
|
||||
async def log_startup():
|
||||
await app_logger.log_info("Central Storage application initialized successfully")
|
||||
|
||||
# Run the async logging
|
||||
asyncio.create_task(log_startup())
|
||||
|
||||
return app
|
||||
|
||||
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
FROM python:3.10-slim-buster
|
||||
FROM python:3.12-slim
|
||||
|
||||
# docker settings
|
||||
ARG CONTAINER_APP_ROOT="/app"
|
||||
|
||||
@ -6,15 +6,18 @@ from common.constants.region import UserRegion
|
||||
from backend.document.document_manager import DocumentManager
|
||||
from .content_sharepoint_manager import ContentSharePointManager
|
||||
from backend.content.constants import ContentSource
|
||||
from common.log.module_logger import ModuleLogger
|
||||
|
||||
|
||||
class ContentService:
|
||||
def __init__(self) -> None:
|
||||
pass
|
||||
self.module_logger = ModuleLogger(sender_id="ContentService")
|
||||
|
||||
async def retrieve_content_directories_for_folder(
|
||||
self, folder_name: str, region: UserRegion
|
||||
) -> List[ContentDirectory]:
|
||||
await self.module_logger.log_info(f"Retrieving content directories for folder: {folder_name}, region: {region.name}")
|
||||
|
||||
folder = (
|
||||
await ContentFolderDoc.find(
|
||||
ContentFolderDoc.folder_name == folder_name,
|
||||
@ -26,12 +29,18 @@ class ContentService:
|
||||
if folder is None or folder.valid_thru.replace(
|
||||
tzinfo=timezone.utc
|
||||
) < datetime.now(timezone.utc):
|
||||
await self.module_logger.log_info(f"Folder cache expired or not found, fetching from SharePoint: {folder_name}")
|
||||
folder = await ContentSharePointManager().retrieve_directories_for_folder(
|
||||
folder_name=folder_name, region=region
|
||||
)
|
||||
|
||||
return folder.content_directories if folder else None
|
||||
result = folder.content_directories if folder else None
|
||||
await self.module_logger.log_info(f"Successfully retrieved {len(result) if result else 0} content directories for folder: {folder_name}")
|
||||
return result
|
||||
|
||||
async def retrieve_content_as_media_data(self, document_id: str) -> Optional[str]:
|
||||
await self.module_logger.log_info(f"Retrieving content as media data for document ID: {document_id}")
|
||||
document_manager = DocumentManager()
|
||||
return await document_manager.retrieve_document_as_http_media(document_id)
|
||||
result = await document_manager.retrieve_document_as_http_media(document_id)
|
||||
await self.module_logger.log_info(f"Successfully retrieved media data for document ID: {document_id}")
|
||||
return result
|
||||
|
||||
@ -13,6 +13,7 @@ from webapi.providers import probes
|
||||
from webapi.providers import metrics
|
||||
from .freeleaps_app import FreeleapsApp
|
||||
from common.config.app_settings import app_settings
|
||||
from common.log.module_logger import ModuleLogger
|
||||
|
||||
|
||||
def create_app() -> FastAPI:
|
||||
@ -21,6 +22,10 @@ def create_app() -> FastAPI:
|
||||
app = FreeleapsApp()
|
||||
|
||||
register_logger()
|
||||
|
||||
# Create application logger for startup logging
|
||||
app_logger = ModuleLogger(sender_id="ApplicationBootstrap")
|
||||
|
||||
register(app, exception_handler)
|
||||
register(app, database)
|
||||
register(app, router)
|
||||
@ -37,6 +42,15 @@ def create_app() -> FastAPI:
|
||||
# Register metrics APIs if enabled
|
||||
if app_settings.METRICS_ENABLED:
|
||||
register(app, metrics)
|
||||
|
||||
# Log application startup completion
|
||||
import asyncio
|
||||
async def log_startup():
|
||||
await app_logger.log_info("Content application initialized successfully")
|
||||
|
||||
# Run the async logging
|
||||
asyncio.create_task(log_startup())
|
||||
|
||||
return app
|
||||
|
||||
|
||||
|
||||
@ -15,7 +15,7 @@ RUN pip install --no-cache-dir -r requirements.txt
|
||||
COPY . .
|
||||
|
||||
# Set environment variables
|
||||
ENV LOG_BASE_PATH=/app/log/devsvc
|
||||
ENV LOG_BASE_PATH=/app/log/devops
|
||||
|
||||
# Create necessary directories
|
||||
RUN mkdir -p /app/log/devops
|
||||
|
||||
@ -11,6 +11,7 @@ from app.providers import probes
|
||||
from app.providers import exception_handler
|
||||
from app.providers import message_queue
|
||||
from app.common.config.app_settings import app_settings
|
||||
from app.common.log.module_logger import ModuleLogger
|
||||
|
||||
def create_app() -> FastAPI:
|
||||
logging.info("App initializing")
|
||||
@ -33,6 +34,16 @@ def create_app() -> FastAPI:
|
||||
# Register metrics APIs if enabled
|
||||
if app_settings.METRICS_ENABLED:
|
||||
register(app, metrics)
|
||||
|
||||
# Add startup logging
|
||||
@app.on_event("startup")
|
||||
async def startup_logging():
|
||||
module_logger = ModuleLogger(sender_id="ApplicationBootstrap")
|
||||
await module_logger.log_info(
|
||||
text=f"DevOps service started successfully in {app_settings.APP_ENV} environment",
|
||||
data={"app_name": app_settings.APP_NAME, "environment": app_settings.APP_ENV}
|
||||
)
|
||||
|
||||
return app
|
||||
|
||||
|
||||
|
||||
@ -2,7 +2,7 @@ from pydantic_settings import BaseSettings
|
||||
|
||||
# NOTE: The values fall backs to your environment variables when not set here
|
||||
class AppSettings(BaseSettings):
|
||||
NAME: str = "YOUR_APP_NAME"
|
||||
NAME: str = "devops"
|
||||
APP_NAME: str = NAME
|
||||
APP_ENV: str = "alpha"
|
||||
|
||||
@ -17,8 +17,8 @@ class AppSettings(BaseSettings):
|
||||
APP_MONGODB_NAME: str = "testdb"
|
||||
|
||||
LOG_BASE_PATH: str = "./log"
|
||||
BACKEND_LOG_FILE_NAME: str = APP_NAME
|
||||
APPLICATION_ACTIVITY_LOG: str = APP_NAME + "-application-activity"\
|
||||
BACKEND_LOG_FILE_NAME: str = "devops"
|
||||
APPLICATION_ACTIVITY_LOG: str = "devops-application-activity"\
|
||||
|
||||
RABBITMQ_HOST: str = "localhost"
|
||||
RABBITMQ_PORT: int = 5672
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
FROM python:3.10-slim-buster
|
||||
FROM python:3.12-slim
|
||||
|
||||
# docker settings
|
||||
ARG CONTAINER_APP_ROOT=/app
|
||||
|
||||
@ -14,6 +14,7 @@ from webapi.providers import metrics
|
||||
from webapi.providers import middleware
|
||||
from .freeleaps_app import FreeleapsApp
|
||||
from common.config.app_settings import app_settings
|
||||
from common.log.module_logger import ModuleLogger
|
||||
|
||||
from prometheus_fastapi_instrumentator import Instrumentator
|
||||
|
||||
|
||||
@ -21,9 +21,15 @@ class FreeleapsApp(FastAPI):
|
||||
@self.on_event("startup")
|
||||
async def start_consumers():
|
||||
print("starting up!")
|
||||
|
||||
# Consumer registration first
|
||||
await self.sms_handler.register_consumer()
|
||||
await self.email_handler.register_consumer()
|
||||
|
||||
# Note: If we want startup logging for audit purposes,
|
||||
# it should be done AFTER all critical startup is complete
|
||||
# and database is ready. Currently keeping minimal startup.
|
||||
|
||||
@self.on_event("shutdown")
|
||||
async def stop_consumers():
|
||||
await self.sms_handler.unregister_consumer()
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
from .freeleaps_auth_middleware import FreeleapsAuthMiddleware
|
||||
from .tenant_DBConnection_middleware import TenantDBConnectionMiddleware
|
||||
from .database_middleware import DatabaseMiddleware
|
||||
|
||||
__all__ = ['FreeleapsAuthMiddleware', 'TenantDBConnectionMiddleware']
|
||||
__all__ = ['FreeleapsAuthMiddleware', 'DatabaseMiddleware']
|
||||
@ -4,10 +4,10 @@ from webapi.middleware.freeleaps_auth_middleware import request_context_var
|
||||
from common.log.module_logger import ModuleLogger
|
||||
|
||||
|
||||
class TenantDBConnectionMiddleware:
|
||||
class DatabaseMiddleware:
|
||||
def __init__(self, app):
|
||||
self.app = app
|
||||
self.module_logger = ModuleLogger(sender_id=TenantDBConnectionMiddleware)
|
||||
self.module_logger = ModuleLogger(sender_id=DatabaseMiddleware)
|
||||
|
||||
async def __call__(self, scope, receive, send):
|
||||
if scope["type"] != "http":
|
||||
@ -32,15 +32,16 @@ tenant_document_models = [
|
||||
|
||||
class TenantDBCache:
|
||||
"""
|
||||
Enhanced tenant database cache that includes Beanie initialization.
|
||||
product_id -> (AsyncIOMotorClient, AsyncIOMotorDatabase, beanie_initialized: bool)
|
||||
Uses main_db.tenant_doc to resolve mongodb_uri; caches clients/dbs with LRU and Beanie state.
|
||||
Enhanced tenant database cache that caches only clients, not databases.
|
||||
product_id -> AsyncIOMotorClient
|
||||
Uses main_db.tenant_doc to resolve mongodb_uri; caches clients with LRU.
|
||||
Database instances are created fresh each time from cached clients.
|
||||
"""
|
||||
|
||||
def __init__(self, main_db: AsyncIOMotorDatabase, max_size: int = 64):
|
||||
self.main_db = main_db
|
||||
self.max_size = max_size
|
||||
self._cache: "OrderedDict[str, tuple[AsyncIOMotorClient, AsyncIOMotorDatabase, bool]]" = OrderedDict()
|
||||
self._cache: "OrderedDict[str, AsyncIOMotorClient]" = OrderedDict()
|
||||
self._locks: dict[str, asyncio.Lock] = {}
|
||||
self._global_lock = asyncio.Lock()
|
||||
self.module_logger = ModuleLogger(sender_id="TenantDBCache")
|
||||
@ -48,43 +49,43 @@ class TenantDBCache:
|
||||
async def get_initialized_db(self, product_id: str) -> AsyncIOMotorDatabase:
|
||||
"""Get tenant database with Beanie already initialized"""
|
||||
|
||||
# fast-path
|
||||
cached = self._cache.get(product_id)
|
||||
if cached:
|
||||
client, db, beanie_initialized = cached
|
||||
await self.module_logger.log_info(f"Found cached database for {product_id}, beanie_initialized: {beanie_initialized}")
|
||||
# fast-path: check if client is cached
|
||||
cached_client = self._cache.get(product_id)
|
||||
if cached_client:
|
||||
await self.module_logger.log_info(f"Found cached client for {product_id}")
|
||||
self._cache.move_to_end(product_id)
|
||||
|
||||
if beanie_initialized:
|
||||
# Get fresh database instance from cached client
|
||||
db = cached_client.get_default_database()
|
||||
if db is not None:
|
||||
# Initialize Beanie for this fresh database instance
|
||||
await init_beanie(database=db, document_models=tenant_document_models)
|
||||
await self.module_logger.log_info(f"Beanie initialization completed for {product_id} using cached client")
|
||||
return db
|
||||
else:
|
||||
# Initialize Beanie if not done yet
|
||||
await init_beanie(database=db, document_models=tenant_document_models)
|
||||
await self.module_logger.log_info(f"Beanie initialization completed for {product_id}")
|
||||
|
||||
# Update cache with beanie_initialized = True
|
||||
self._cache[product_id] = (client, db, True)
|
||||
return db
|
||||
await self.module_logger.log_error(f"No default database found for cached client {product_id}")
|
||||
# Remove invalid cached client
|
||||
del self._cache[product_id]
|
||||
|
||||
# double-checked under per-tenant lock
|
||||
lock = self._locks.setdefault(product_id, asyncio.Lock())
|
||||
async with lock:
|
||||
cached = self._cache.get(product_id)
|
||||
if cached:
|
||||
client, db, beanie_initialized = cached
|
||||
await self.module_logger.log_info(f"Double-check found cached database for {product_id}, beanie_initialized: {beanie_initialized}")
|
||||
cached_client = self._cache.get(product_id)
|
||||
if cached_client:
|
||||
await self.module_logger.log_info(f"Double-check found cached client for {product_id}")
|
||||
self._cache.move_to_end(product_id)
|
||||
|
||||
if beanie_initialized:
|
||||
# Get fresh database instance from cached client
|
||||
db = cached_client.get_default_database()
|
||||
if db is not None:
|
||||
# Initialize Beanie for this fresh database instance
|
||||
await init_beanie(database=db, document_models=tenant_document_models)
|
||||
await self.module_logger.log_info(f"Beanie initialization completed for {product_id} using cached client (double-check)")
|
||||
return db
|
||||
else:
|
||||
# Initialize Beanie if not done yet
|
||||
await init_beanie(database=db, document_models=tenant_document_models)
|
||||
await self.module_logger.log_info(f"Beanie initialization completed for {product_id} (double-check)")
|
||||
|
||||
# Update cache with beanie_initialized = True
|
||||
self._cache[product_id] = (client, db, True)
|
||||
return db
|
||||
await self.module_logger.log_error(f"No default database found for cached client {product_id}")
|
||||
# Remove invalid cached client
|
||||
del self._cache[product_id]
|
||||
|
||||
# Create new tenant connection - use raw MongoDB query since we don't have TenantDoc model
|
||||
"""
|
||||
@ -127,13 +128,14 @@ class TenantDBCache:
|
||||
detail=f"No default database found for tenant {product_id}",
|
||||
headers={"X-Error-Message": f"No default database found for tenant {product_id}"}
|
||||
)
|
||||
|
||||
# Initialize Beanie for this tenant database
|
||||
await init_beanie(database=db, document_models=tenant_document_models)
|
||||
await self.module_logger.log_info(f"Beanie initialization completed for new tenant database {product_id}")
|
||||
|
||||
# LRU put with beanie_initialized = True
|
||||
await self._lru_put(product_id, (client, db, True))
|
||||
await self.module_logger.log_info(f"Tenant database {product_id} cached successfully with Beanie initialized")
|
||||
# Cache only the client
|
||||
await self._lru_put(product_id, client)
|
||||
await self.module_logger.log_info(f"Tenant client {product_id} cached successfully")
|
||||
return db
|
||||
|
||||
async def get_main_db_initialized(self) -> AsyncIOMotorDatabase:
|
||||
@ -143,12 +145,12 @@ class TenantDBCache:
|
||||
await self.module_logger.log_info("Beanie initialization completed for main database")
|
||||
return self.main_db
|
||||
|
||||
async def _lru_put(self, key: str, value: tuple[AsyncIOMotorClient, AsyncIOMotorDatabase, bool]):
|
||||
async def _lru_put(self, key: str, client: AsyncIOMotorClient):
|
||||
async with self._global_lock:
|
||||
self._cache[key] = value
|
||||
self._cache[key] = client
|
||||
self._cache.move_to_end(key)
|
||||
if len(self._cache) > self.max_size:
|
||||
old_key, (old_client, _, _) = self._cache.popitem(last=False)
|
||||
old_key, old_client = self._cache.popitem(last=False)
|
||||
await self.module_logger.log_info(f"Cache full, removing LRU tenant: {old_key}")
|
||||
try:
|
||||
old_client.close()
|
||||
@ -159,7 +161,7 @@ class TenantDBCache:
|
||||
|
||||
async def aclose(self):
|
||||
async with self._global_lock:
|
||||
for key, (client, _, _) in self._cache.items():
|
||||
for key, client in self._cache.items():
|
||||
try:
|
||||
client.close()
|
||||
await self.module_logger.log_info(f"Closed connection for tenant: {key}")
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
from webapi.middleware.freeleaps_auth_middleware import FreeleapsAuthMiddleware
|
||||
from webapi.middleware.tenant_DBConnection_middleware import TenantDBConnectionMiddleware
|
||||
from webapi.middleware.database_middleware import DatabaseMiddleware
|
||||
|
||||
|
||||
def register(app):
|
||||
@ -7,5 +7,5 @@ def register(app):
|
||||
Register middleware to FastAPI application
|
||||
"""
|
||||
# Register middlewares
|
||||
app.add_middleware(TenantDBConnectionMiddleware)
|
||||
app.add_middleware(DatabaseMiddleware)
|
||||
app.add_middleware(FreeleapsAuthMiddleware)
|
||||
@ -1,4 +1,4 @@
|
||||
FROM python:3.10-slim-buster
|
||||
FROM python:3.12-slim
|
||||
|
||||
# docker settings
|
||||
ARG CONTAINER_APP_ROOT=/app
|
||||
|
||||
Loading…
Reference in New Issue
Block a user