From 50087c95be5100a43b84a42e05688fef87a04729 Mon Sep 17 00:00:00 2001 From: dongli Date: Sun, 11 May 2025 18:44:05 -0700 Subject: [PATCH 01/41] Add template fastapi app, this can be used for future extension --- apps/devops/README.md | 13 ++ apps/devops/bootstrap/__init__.py | 0 apps/devops/bootstrap/application.py | 82 ++++++++++ apps/devops/common/__init__.py | 0 apps/devops/common/config/__init__.py | 0 apps/devops/common/config/app_settings.py | 29 ++++ apps/devops/common/config/log_settings.py | 16 ++ apps/devops/common/config/site_settings.py | 27 ++++ apps/devops/common/daos/__init__.py | 0 .../common/daos/hello_world/__init__.py | 3 + .../daos/hello_world/hello_world_dao.py | 30 ++++ apps/devops/common/log/__init__.py | 0 apps/devops/common/log/application_logger.py | 12 ++ apps/devops/common/log/base_logger.py | 136 +++++++++++++++++ apps/devops/common/log/json_sink.py | 85 +++++++++++ apps/devops/common/models/__init__.py | 4 + .../common/models/hello_world/__init__.py | 0 .../common/models/hello_world/hello_world.py | 17 +++ apps/devops/common/probes/__init__.py | 140 ++++++++++++++++++ apps/devops/common/probes/adapters.py | 15 ++ apps/devops/main.py | 16 ++ apps/devops/providers/__init__.py | 0 apps/devops/providers/common.py | 31 ++++ apps/devops/providers/database.py | 34 +++++ apps/devops/providers/exception_handler.py | 39 +++++ apps/devops/providers/logger.py | 8 + apps/devops/providers/metrics.py | 16 ++ apps/devops/providers/probes.py | 25 ++++ apps/devops/providers/router.py | 34 +++++ apps/devops/providers/scheduler.py | 8 + apps/devops/requirements.txt | 30 ++++ apps/devops/routes/__init__.py | 8 + apps/devops/routes/hello_world/__init__.py | 7 + apps/devops/routes/hello_world/apis.py | 22 +++ .../devops/scripts/mongodb/docker-compose.yml | 18 +++ apps/devops/test_main.http | 8 + 36 files changed, 913 insertions(+) create mode 100644 apps/devops/README.md create mode 100644 apps/devops/bootstrap/__init__.py create mode 100644 apps/devops/bootstrap/application.py create mode 100644 apps/devops/common/__init__.py create mode 100644 apps/devops/common/config/__init__.py create mode 100644 apps/devops/common/config/app_settings.py create mode 100644 apps/devops/common/config/log_settings.py create mode 100644 apps/devops/common/config/site_settings.py create mode 100644 apps/devops/common/daos/__init__.py create mode 100644 apps/devops/common/daos/hello_world/__init__.py create mode 100644 apps/devops/common/daos/hello_world/hello_world_dao.py create mode 100644 apps/devops/common/log/__init__.py create mode 100644 apps/devops/common/log/application_logger.py create mode 100644 apps/devops/common/log/base_logger.py create mode 100644 apps/devops/common/log/json_sink.py create mode 100644 apps/devops/common/models/__init__.py create mode 100644 apps/devops/common/models/hello_world/__init__.py create mode 100644 apps/devops/common/models/hello_world/hello_world.py create mode 100644 apps/devops/common/probes/__init__.py create mode 100644 apps/devops/common/probes/adapters.py create mode 100644 apps/devops/main.py create mode 100644 apps/devops/providers/__init__.py create mode 100644 apps/devops/providers/common.py create mode 100644 apps/devops/providers/database.py create mode 100644 apps/devops/providers/exception_handler.py create mode 100644 apps/devops/providers/logger.py create mode 100644 apps/devops/providers/metrics.py create mode 100644 apps/devops/providers/probes.py create mode 100644 apps/devops/providers/router.py create mode 100644 apps/devops/providers/scheduler.py create mode 100644 apps/devops/requirements.txt create mode 100644 apps/devops/routes/__init__.py create mode 100644 apps/devops/routes/hello_world/__init__.py create mode 100644 apps/devops/routes/hello_world/apis.py create mode 100644 apps/devops/scripts/mongodb/docker-compose.yml create mode 100644 apps/devops/test_main.http diff --git a/apps/devops/README.md b/apps/devops/README.md new file mode 100644 index 0000000..d4f2587 --- /dev/null +++ b/apps/devops/README.md @@ -0,0 +1,13 @@ +This is a template backend service based on fastapi + mongodb app + +To start development in local, go to the root directory of the project YOUR_WORKSPACE_PATH/devops/ +```bash +cd scripts/mongodb +docker compose -f scripts/mongodb/docker-compose.yml up -d +``` + + +Then run the app +```bash +uvicorn main:app --reload +``` diff --git a/apps/devops/bootstrap/__init__.py b/apps/devops/bootstrap/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/apps/devops/bootstrap/application.py b/apps/devops/bootstrap/application.py new file mode 100644 index 0000000..36a1616 --- /dev/null +++ b/apps/devops/bootstrap/application.py @@ -0,0 +1,82 @@ +import logging +from fastapi import FastAPI +from fastapi.openapi.utils import get_openapi + +from providers import common +from providers.logger import register_logger +from providers import router +from providers import database +from providers import metrics +from providers import probes +from providers import exception_handler +from common.config.app_settings import app_settings + +def create_app() -> FastAPI: + logging.info("App initializing") + + app = FreeleapsApp() + + register_logger() + register(app, exception_handler) + register(app, database) + register(app, router) + # register(app, scheduler) + register(app, common) + + # Call the custom_openapi function to change the OpenAPI version + customize_openapi_security(app) + # Register probe APIs if enabled + if app_settings.PROBES_ENABLED: + register(app, probes) + + # Register metrics APIs if enabled + if app_settings.METRICS_ENABLED: + register(app, metrics) + return app + + +# This function overrides the OpenAPI schema version to 3.0.0 +def customize_openapi_security(app: FastAPI) -> None: + + def custom_openapi(): + if app.openapi_schema: + return app.openapi_schema + + # Generate OpenAPI schema + openapi_schema = get_openapi( + title="FreeLeaps API", + version="3.1.0", + description="FreeLeaps API Documentation", + routes=app.routes, + ) + + # Ensure the components section exists in the OpenAPI schema + if "components" not in openapi_schema: + openapi_schema["components"] = {} + + # Add security scheme to components + openapi_schema["components"]["securitySchemes"] = { + "bearerAuth": {"type": "http", "scheme": "bearer", "bearerFormat": "JWT"} + } + + # Add security requirement globally + openapi_schema["security"] = [{"bearerAuth": []}] + + app.openapi_schema = openapi_schema + return app.openapi_schema + + app.openapi = custom_openapi + + +def register(app, provider): + logging.info(provider.__name__ + " registering") + provider.register(app) + + +def boot(app, provider): + logging.info(provider.__name__ + " booting") + provider.boot(app) + +class FreeleapsApp(FastAPI): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) diff --git a/apps/devops/common/__init__.py b/apps/devops/common/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/apps/devops/common/config/__init__.py b/apps/devops/common/config/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/apps/devops/common/config/app_settings.py b/apps/devops/common/config/app_settings.py new file mode 100644 index 0000000..9a73bcb --- /dev/null +++ b/apps/devops/common/config/app_settings.py @@ -0,0 +1,29 @@ +from pydantic_settings import BaseSettings + +# NOTE: The values fall backs to your environment variables when not set here +class AppSettings(BaseSettings): + NAME: str = "YOUR_APP_NAME" + APP_NAME: str = NAME + APP_ENV: str = "alpha" + + JWT_SECRET_KEY: str = "" + ACCESS_TOKEN_EXPIRE_MINUTES: int = 3600 + REFRESH_TOKEN_EXPIRE_DAYS: int = 1 + + METRICS_ENABLED: bool = False + PROBES_ENABLED: bool = True + + APP_MONGODB_URI: str = "mongodb://localhost:27017" + APP_MONGODB_NAME: str = "testdb" + + LOG_BASE_PATH: str = "./log" + BACKEND_LOG_FILE_NAME: str = APP_NAME + APPLICATION_ACTIVITY_LOG: str = APP_NAME + "-application-activity" + + + class Config: + env_file = ".myapp.env" + env_file_encoding = "utf-8" + + +app_settings = AppSettings() diff --git a/apps/devops/common/config/log_settings.py b/apps/devops/common/config/log_settings.py new file mode 100644 index 0000000..2f6985c --- /dev/null +++ b/apps/devops/common/config/log_settings.py @@ -0,0 +1,16 @@ +import os +from dataclasses import dataclass +from .app_settings import app_settings + +@dataclass +class LogSettings: + LOG_PATH_BASE: str = app_settings.LOG_BASE_PATH + LOG_RETENTION: str = os.environ.get("LOG_RETENTION", "30 days") + LOG_ROTATION: str = os.environ.get("LOG_ROTATION", "00:00") # midnight + MAX_BACKUP_FILES: int = int(os.environ.get("LOG_BACKUP_FILES", 5)) + LOG_ROTATION_BYTES: int = int(os.environ.get("LOG_ROTATION_BYTES", 10 * 1024 * 1024)) # 10 MB + APP_NAME: str = app_settings.APP_NAME + ENVIRONMENT: str = app_settings.APP_ENV + + +log_settings = LogSettings() diff --git a/apps/devops/common/config/site_settings.py b/apps/devops/common/config/site_settings.py new file mode 100644 index 0000000..76e5af1 --- /dev/null +++ b/apps/devops/common/config/site_settings.py @@ -0,0 +1,27 @@ +import os + +from pydantic_settings import BaseSettings + + +# NOTE: The values fall backs to your environment variables when not set here +class SiteSettings(BaseSettings): + NAME: str = "appname" + DEBUG: bool = True + + ENV: str = "dev" + + SERVER_HOST: str = "localhost" + SERVER_PORT: int = 8000 + + URL: str = "http://localhost" + TIME_ZONE: str = "UTC" + + BASE_PATH: str = os.path.dirname(os.path.dirname((os.path.abspath(__file__)))) + + class Config: + env_file = ".devbase-webapi.env" + env_file_encoding = "utf-8" + + +site_settings = SiteSettings() + diff --git a/apps/devops/common/daos/__init__.py b/apps/devops/common/daos/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/apps/devops/common/daos/hello_world/__init__.py b/apps/devops/common/daos/hello_world/__init__.py new file mode 100644 index 0000000..f8a7a2d --- /dev/null +++ b/apps/devops/common/daos/hello_world/__init__.py @@ -0,0 +1,3 @@ +from common.daos.hello_world.hello_world_dao import HelloWorldDao + +hello_world_dao = HelloWorldDao() diff --git a/apps/devops/common/daos/hello_world/hello_world_dao.py b/apps/devops/common/daos/hello_world/hello_world_dao.py new file mode 100644 index 0000000..88d8f8c --- /dev/null +++ b/apps/devops/common/daos/hello_world/hello_world_dao.py @@ -0,0 +1,30 @@ +from common.models.hello_world.hello_world import HelloWorld + +class HelloWorldDao: + def __init__(self): + pass + + async def create_hello_world(self, message: str, count: int): + hello_world = HelloWorld(message=message, count=count) + await hello_world.insert() + return hello_world + + async def get_hello_world(self, id: str): + hello_world = await HelloWorld.get(id) + return hello_world + + async def update_hello_world(self, id: str, message: str, count: int): + hello_world = await HelloWorld.get(id) + if hello_world: + hello_world.message = message + hello_world.count = count + await hello_world.save() + return hello_world + return None + + async def delete_hello_world(self, id: str): + hello_world = await HelloWorld.get(id) + if hello_world: + await hello_world.delete() + return True + return False \ No newline at end of file diff --git a/apps/devops/common/log/__init__.py b/apps/devops/common/log/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/apps/devops/common/log/application_logger.py b/apps/devops/common/log/application_logger.py new file mode 100644 index 0000000..67ec321 --- /dev/null +++ b/apps/devops/common/log/application_logger.py @@ -0,0 +1,12 @@ +from .base_logger import LoggerBase +from common.config.app_settings import app_settings + +class ApplicationLogger(LoggerBase): + def __init__(self, application_activities: dict[str, any] = {}) -> None: + extra_fileds = {} + if application_activities: + extra_fileds.update(application_activities) + super().__init__( + logger_name=app_settings.APPLICATION_ACTIVITY_LOG, + extra_fileds=extra_fileds, + ) diff --git a/apps/devops/common/log/base_logger.py b/apps/devops/common/log/base_logger.py new file mode 100644 index 0000000..24f7bb0 --- /dev/null +++ b/apps/devops/common/log/base_logger.py @@ -0,0 +1,136 @@ +from loguru import logger as guru_logger +from common.config.log_settings import log_settings +from typing import Dict, Any, Optional +import socket +import json +import threading +import os +import sys +import inspect +import logging + +from common.log.json_sink import JsonSink + +class LoggerBase: + binded_loggers = {} + logger_lock = threading.Lock() + + def __init__(self, logger_name: str, extra_fileds: dict[str, any]) -> None: + self.__logger_name = logger_name + self.extra_fileds = extra_fileds + with LoggerBase.logger_lock: + if self.__logger_name in LoggerBase.binded_loggers: + self.logger = LoggerBase.binded_loggers[self.__logger_name] + return + + log_filename = f"{log_settings.LOG_PATH_BASE}/{self.__logger_name}.log" + log_level = "INFO" + rotation_bytes = int(log_settings.LOG_ROTATION_BYTES or 10 * 1024 * 1024) + + guru_logger.remove() + + file_sink = JsonSink( + log_file_path=log_filename, + rotation_size_bytes=rotation_bytes, + max_backup_files=log_settings.MAX_BACKUP_FILES + ) + guru_logger.add( + sink=file_sink, + level=log_level, + filter=lambda record: record["extra"].get("topic") == self.__logger_name, + ) + + guru_logger.add( + sink=sys.stderr, + level=log_level, + format="{level} - {time:YYYY-MM-DD HH:mm:ss} - <{extra[log_file]}:{extra[log_line]}> - {extra[properties_str]} - {message}", + filter=lambda record: record["extra"].get("topic") == self.__logger_name, + ) + + host_name = socket.gethostname() + host_ip = socket.gethostbyname(host_name) + self.logger = guru_logger.bind( + topic=self.__logger_name, + host_ip=host_ip, + host_name=host_name, + app=log_settings.APP_NAME, + env=log_settings.ENVIRONMENT, + ) + with LoggerBase.logger_lock: + LoggerBase.binded_loggers[self.__logger_name] = self.logger + + def _get_log_context(self) -> dict: + frame = inspect.currentframe().f_back.f_back + filename = os.path.basename(frame.f_code.co_filename) + lineno = frame.f_lineno + return {"log_file": filename, "log_line": lineno} + + def _prepare_properties(self, properties: Optional[Dict[str, Any]]) -> Dict[str, Any]: + props = {} if properties is None else properties.copy() + props_str = json.dumps(props, ensure_ascii=False) if props else "{}" + return props, props_str + + async def log_event(self, sender_id: str, receiver_id: str, subject: str, event: str, properties: dict[str, any], text: str = "") -> None: + props, props_str = self._prepare_properties(properties) + context = self._get_log_context() + local_logger = self.logger.bind(sender_id=sender_id, receiver_id=receiver_id, subject=subject, event=event, properties=props, properties_str=props_str, **context) + local_logger.info(text) + + async def log_exception(self, sender_id: str, receiver_id: str, subject: str, exception: Exception, text: str = "", properties: dict[str, any] = None) -> None: + props, props_str = self._prepare_properties(properties) + context = self._get_log_context() + local_logger = self.logger.bind(sender_id=sender_id, receiver_id=receiver_id, subject=subject, event="exception", properties=props, properties_str=props_str, exception=exception, **context) + local_logger.exception(text) + + async def log_info(self, sender_id: str, receiver_id: str, subject: str, text: str = "", properties: dict[str, any] = None) -> None: + props, props_str = self._prepare_properties(properties) + context = self._get_log_context() + local_logger = self.logger.bind(sender_id=sender_id, receiver_id=receiver_id, subject=subject, event="information", properties=props, properties_str=props_str, **context) + local_logger.info(text) + + async def log_warning(self, sender_id: str, receiver_id: str, subject: str, text: str = "", properties: dict[str, any] = None) -> None: + props, props_str = self._prepare_properties(properties) + context = self._get_log_context() + local_logger = self.logger.bind(sender_id=sender_id, receiver_id=receiver_id, subject=subject, event="warning", properties=props, properties_str=props_str, **context) + local_logger.warning(text) + + async def log_error(self, sender_id: str, receiver_id: str, subject: str, text: str = "", properties: dict[str, any] = None) -> None: + props, props_str = self._prepare_properties(properties) + context = self._get_log_context() + local_logger = self.logger.bind(sender_id=sender_id, receiver_id=receiver_id, subject=subject, event="error", properties=props, properties_str=props_str, **context) + local_logger.error(text) + + @staticmethod + def configure_uvicorn_logging(): + print("📢 Setting up uvicorn logging interception...") + + # Intercept logs from these loggers + intercept_loggers = ["uvicorn", "uvicorn.access", "uvicorn.error", "fastapi"] + + class InterceptHandler(logging.Handler): + def emit(self, record): + level = ( + guru_logger.level(record.levelname).name + if guru_logger.level(record.levelname, None) + else record.levelno + ) + frame, depth = logging.currentframe(), 2 + while frame.f_code.co_filename == logging.__file__: + frame = frame.f_back + depth += 1 + + guru_logger.opt(depth=depth, exception=record.exc_info).log( + level, + f"[{record.name}] {record.getMessage()}", + ) + + # Replace default handlers + logging.root.handlers.clear() + logging.root.setLevel(logging.INFO) + logging.root.handlers = [InterceptHandler()] + + # Configure specific uvicorn loggers + for logger_name in intercept_loggers: + logging_logger = logging.getLogger(logger_name) + logging_logger.handlers.clear() # Remove default handlers + logging_logger.propagate = True # Ensure propagation through Loguru diff --git a/apps/devops/common/log/json_sink.py b/apps/devops/common/log/json_sink.py new file mode 100644 index 0000000..a798156 --- /dev/null +++ b/apps/devops/common/log/json_sink.py @@ -0,0 +1,85 @@ +import json +import datetime +import traceback +from pathlib import Path +from typing import Optional + +class JsonSink: + def __init__( + self, + log_file_path: str, + rotation_size_bytes: int = 10 * 1024 * 1024, + max_backup_files: int = 5, + ): + self.log_file_path = Path(log_file_path) + self.rotation_size = rotation_size_bytes + self.max_backup_files = max_backup_files + self._open_log_file() + + def _open_log_file(self): + # ensure the parent directory exists + parent_dir = self.log_file_path.parent + if not parent_dir.exists(): + parent_dir.mkdir(parents=True, exist_ok=True) + self.log_file = self.log_file_path.open("a", encoding="utf-8") + + def _should_rotate(self) -> bool: + return self.log_file_path.exists() and self.log_file_path.stat().st_size >= self.rotation_size + + def _rotate(self): + self.log_file.close() + timestamp = datetime.datetime.now().strftime("%Y%m%d%H%M%S") + rotated_path = self.log_file_path.with_name(f"{self.log_file_path.stem}_{timestamp}{self.log_file_path.suffix}") + self.log_file_path.rename(rotated_path) + self._cleanup_old_backups() + self._open_log_file() + + def _cleanup_old_backups(self): + parent = self.log_file_path.parent + stem = self.log_file_path.stem + suffix = self.log_file_path.suffix + + backup_files = sorted( + parent.glob(f"{stem}_*{suffix}"), + key=lambda p: p.stat().st_mtime, + reverse=True, + ) + + for old_file in backup_files[self.max_backup_files:]: + try: + old_file.unlink() + except Exception as e: + print(f"Failed to delete old backup {old_file}: {e}") + + def __call__(self, message): + record = message.record + if self._should_rotate(): + self._rotate() + + log_entry = { + "level": record["level"].name.lower(), + "timestamp": int(record["time"].timestamp() * 1000), + "text": record["message"], + "fields": record["extra"].get("properties", {}), + "context": { + "app": record["extra"].get("app"), + "env": record["extra"].get("env"), + "log_file": record["extra"].get("log_file"), + "log_line": record["extra"].get("log_line"), + "topic": record["extra"].get("topic"), + "sender_id": record["extra"].get("sender_id"), + "receiver_id": record["extra"].get("receiver_id"), + "subject": record["extra"].get("subject"), + "event": record["extra"].get("event"), + "host_ip": record["extra"].get("host_ip"), + "host_name": record["extra"].get("host_name"), + }, + "stacktrace": None + } + + if record["exception"]: + exc_type, exc_value, exc_tb = record["exception"] + log_entry["stacktrace"] = traceback.format_exception(exc_type, exc_value, exc_tb) + + self.log_file.write(json.dumps(log_entry, ensure_ascii=False) + "\n") + self.log_file.flush() diff --git a/apps/devops/common/models/__init__.py b/apps/devops/common/models/__init__.py new file mode 100644 index 0000000..a6dc240 --- /dev/null +++ b/apps/devops/common/models/__init__.py @@ -0,0 +1,4 @@ +from common.models.hello_world.hello_world import HelloWorld + +# list of beanie document models +db_models = [HelloWorld] \ No newline at end of file diff --git a/apps/devops/common/models/hello_world/__init__.py b/apps/devops/common/models/hello_world/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/apps/devops/common/models/hello_world/hello_world.py b/apps/devops/common/models/hello_world/hello_world.py new file mode 100644 index 0000000..55000c7 --- /dev/null +++ b/apps/devops/common/models/hello_world/hello_world.py @@ -0,0 +1,17 @@ +from datetime import datetime + +from beanie import Document + + +class HelloWorld(Document): + message: str + count: int = 0 + created_time: datetime = datetime.now() + + class Settings: + name = "hello_world" + indexes = [ + [("message", 1), ("count", 1)] + ] + + diff --git a/apps/devops/common/probes/__init__.py b/apps/devops/common/probes/__init__.py new file mode 100644 index 0000000..4071df8 --- /dev/null +++ b/apps/devops/common/probes/__init__.py @@ -0,0 +1,140 @@ +import logging +from enum import Enum +from typing import Optional, Callable, Tuple, Dict +import inspect +from datetime import datetime, timezone + +# ProbeType is an Enum that defines the types of probes that can be registered. +class ProbeType(Enum): + LIVENESS = "liveness" + READINESS = "readiness" + STARTUP = "startup" + +# ProbeResult is a class that represents the result of a probe check. +class ProbeResult: + def __init__(self, success: bool, message: str = "ok", data: Optional[dict] = None): + self.success = success + self.message = message + self.data = data or {} + + def to_dict(self) -> dict: + return { + "success": self.success, + "message": self.message, + "data": self.data + } + +# Probe is a class that represents a probe that can be registered. +class Probe: + def __init__(self, type: ProbeType, path: str, check_fn: Callable, name: Optional[str] = None): + self.type = type + self.path = path + self.check_fn = check_fn + self.name = name or f"{type.value}-{id(self)}" + + async def execute(self) -> ProbeResult: + try: + result = self.check_fn() + if inspect.isawaitable(result): + result = await result + + if isinstance(result, ProbeResult): + return result + elif isinstance(result, bool): + return ProbeResult(result, "ok" if result else "failed") + else: + return ProbeResult(True, "ok") + except Exception as e: + return ProbeResult(False, str(e)) + +# ProbeGroup is a class that represents a group of probes that can be checked together. +class ProbeGroup: + def __init__(self, path: str): + self.path = path + self.probes: Dict[str, Probe] = {} + + def add_probe(self, probe: Probe): + self.probes[probe.name] = probe + + async def check_all(self) -> Tuple[bool, dict]: + results = {} + all_success = True + + for name, probe in self.probes.items(): + result = await probe.execute() + results[name] = result.to_dict() + if not result.success: + all_success = False + + return all_success, results + +# FrameworkAdapter is an abstract class that defines the interface for framework-specific probe adapters. +class FrameworkAdapter: + async def handle_request(self, group: ProbeGroup): + all_success, results = await group.check_all() + status_code = 200 if all_success else 503 + return {"status": "ok" if all_success else "failed", "payload": results, "timestamp": int(datetime.now(timezone.utc).timestamp())}, status_code + + def register_route(self, path: str, handler: Callable): + raise NotImplementedError + +# ProbeManager is a class that manages the registration of probes and their corresponding framework adapters. +class ProbeManager: + _default_paths = { + ProbeType.LIVENESS: "/_/livez", + ProbeType.READINESS: "/_/readyz", + ProbeType.STARTUP: "/_/healthz" + } + + def __init__(self): + self.groups: Dict[str, ProbeGroup] = {} + self.adapters: Dict[str, FrameworkAdapter] = {} + self._startup_complete = False + + def register_adapter(self, framework: str, adapter: FrameworkAdapter): + self.adapters[framework] = adapter + logging.info(f"Registered probe adapter ({adapter}) for framework: {framework}") + + def register( + self, + type: ProbeType, + check_func: Optional[Callable] = None, + path: Optional[str] = None, + prefix: str = "", + name: Optional[str] = None, + frameworks: Optional[list] = None + ): + path = path or self._default_paths.get(type, "/_/healthz") + if prefix: + path = f"{prefix}{path}" + + if type == ProbeType.STARTUP and check_func is None: + check_func = self._default_startup_check + + probe = Probe(type, path, check_func or (lambda: True), name) + + if path not in self.groups: + self.groups[path] = ProbeGroup(path) + self.groups[path].add_probe(probe) + + for framework in (frameworks or ["default"]): + self._register_route(framework, path) + logging.info(f"Registered {type.value} probe route ({path}) for framework: {framework}") + + def _register_route(self, framework: str, path: str): + if framework not in self.adapters: + return + + adapter = self.adapters[framework] + group = self.groups[path] + + async def handler(): + return await adapter.handle_request(group) + + adapter.register_route(path, handler) + + def _default_startup_check(self) -> bool: + return self._startup_complete + + def mark_startup_complete(self): + self._startup_complete = True \ No newline at end of file diff --git a/apps/devops/common/probes/adapters.py b/apps/devops/common/probes/adapters.py new file mode 100644 index 0000000..2ecd38a --- /dev/null +++ b/apps/devops/common/probes/adapters.py @@ -0,0 +1,15 @@ +from . import FrameworkAdapter +from fastapi.responses import JSONResponse +from typing import Callable + +# FastAPIAdapter is a class that implements the FrameworkAdapter interface for FastAPI. +class FastAPIAdapter(FrameworkAdapter): + def __init__(self, app): + self.app = app + + def register_route(self,path: str, handler: Callable): + async def wrapper(): + data, status_code = await handler() + return JSONResponse(content=data, status_code=status_code) + + self.app.add_api_route(path, wrapper, methods=["GET"]) diff --git a/apps/devops/main.py b/apps/devops/main.py new file mode 100644 index 0000000..c8bf0ba --- /dev/null +++ b/apps/devops/main.py @@ -0,0 +1,16 @@ +from fastapi.responses import RedirectResponse +from common.config.site_settings import site_settings +from bootstrap.application import create_app + +app = create_app() + +@app.get("/", status_code=301) +async def root(): + """ + TODO: redirect client to /doc# + """ + return RedirectResponse("docs") + +if __name__ == "__main__": + import uvicorn + uvicorn.run("main:app", host=site_settings.SERVER_HOST, port=site_settings.SERVER_PORT, reload=True) \ No newline at end of file diff --git a/apps/devops/providers/__init__.py b/apps/devops/providers/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/apps/devops/providers/common.py b/apps/devops/providers/common.py new file mode 100644 index 0000000..b7b1aa5 --- /dev/null +++ b/apps/devops/providers/common.py @@ -0,0 +1,31 @@ +from fastapi.middleware.cors import CORSMiddleware +from common.config.site_settings import site_settings + + +def register(app): + app.debug = site_settings.DEBUG + app.title = site_settings.NAME + + add_global_middleware(app) + + # This hook ensures that a connection is opened to handle any queries + # generated by the request. + @app.on_event("startup") + def startup(): + pass + + # This hook ensures that the connection is closed when we've finished + # processing the request. + @app.on_event("shutdown") + def shutdown(): + pass + + +def add_global_middleware(app): + app.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], + ) diff --git a/apps/devops/providers/database.py b/apps/devops/providers/database.py new file mode 100644 index 0000000..fa311bd --- /dev/null +++ b/apps/devops/providers/database.py @@ -0,0 +1,34 @@ +import asyncio +from common.config.app_settings import app_settings +from beanie import init_beanie +from motor.motor_asyncio import AsyncIOMotorClient +from common.models import db_models +from common.probes import ProbeResult + +client = AsyncIOMotorClient( + app_settings.APP_MONGODB_URI, + serverSelectionTimeoutMS=60000, + minPoolSize=5, # Minimum number of connections in the pool + maxPoolSize=20, # Maximum number of connections in the pool +) + +def register(app): + app.debug = "auth_mongo_debug" + app.title = "auth_mongo_name" + + @app.on_event("startup") + async def start_database(): + await initiate_database() + +async def check_database_initialized() -> ProbeResult: + try: + await asyncio.wait_for(client.server_info(), timeout=5) + return ProbeResult(success=True, message="service has been initialized and ready to serve") + except Exception: + return ProbeResult(success=False, message="service is not initialized yet", data={"error": "database is not ready"}) + + +async def initiate_database(): + await init_beanie( + database=client[app_settings.APP_MONGODB_NAME], document_models=db_models + ) diff --git a/apps/devops/providers/exception_handler.py b/apps/devops/providers/exception_handler.py new file mode 100644 index 0000000..21117a5 --- /dev/null +++ b/apps/devops/providers/exception_handler.py @@ -0,0 +1,39 @@ +from fastapi import FastAPI, HTTPException +from fastapi.exceptions import RequestValidationError +from starlette.requests import Request +from starlette.responses import JSONResponse +from starlette.status import ( + HTTP_400_BAD_REQUEST, + HTTP_401_UNAUTHORIZED, + HTTP_403_FORBIDDEN, + HTTP_404_NOT_FOUND, + HTTP_422_UNPROCESSABLE_ENTITY, + HTTP_500_INTERNAL_SERVER_ERROR, +) + + +async def custom_http_exception_handler(request: Request, exc: HTTPException): + return JSONResponse( + status_code=exc.status_code, + content={"error": exc.detail}, + ) + + + +async def validation_exception_handler(request: Request, exc: RequestValidationError): + return JSONResponse( + status_code=HTTP_400_BAD_REQUEST, + content={"error": str(exc)}, + ) + +async def exception_handler(request: Request, exc: Exception): + return JSONResponse( + status_code=HTTP_500_INTERNAL_SERVER_ERROR, + content={"error": str(exc)}, + ) + + +def register(app: FastAPI): + app.add_exception_handler(HTTPException, custom_http_exception_handler) + app.add_exception_handler(RequestValidationError, validation_exception_handler) + app.add_exception_handler(Exception, exception_handler) diff --git a/apps/devops/providers/logger.py b/apps/devops/providers/logger.py new file mode 100644 index 0000000..6eb1e22 --- /dev/null +++ b/apps/devops/providers/logger.py @@ -0,0 +1,8 @@ +from loguru import logger as guru_logger +from common.log.base_logger import LoggerBase + + +def register_logger(): + print("📢 Setting up logging interception...") + LoggerBase.configure_uvicorn_logging() + print("✅ Logging interception complete. Logs are formatted and deduplicated!") diff --git a/apps/devops/providers/metrics.py b/apps/devops/providers/metrics.py new file mode 100644 index 0000000..c2270b7 --- /dev/null +++ b/apps/devops/providers/metrics.py @@ -0,0 +1,16 @@ +import logging +from prometheus_fastapi_instrumentator import Instrumentator +from common.config.app_settings import app_settings + +def register(app): + instrumentator = ( + Instrumentator().instrument( + app, + metric_namespace="freeleaps", + metric_subsystem=app_settings.APP_NAME) + ) + + @app.on_event("startup") + async def startup(): + instrumentator.expose(app, endpoint="/api/_/metrics", should_gzip=True) + logging.info("Metrics endpoint exposed at /api/_/metrics") \ No newline at end of file diff --git a/apps/devops/providers/probes.py b/apps/devops/providers/probes.py new file mode 100644 index 0000000..7c5b5d8 --- /dev/null +++ b/apps/devops/providers/probes.py @@ -0,0 +1,25 @@ +from common.probes import ProbeManager, ProbeType +from common.probes.adapters import FastAPIAdapter +from .database import check_database_initialized + +def register(app): + probes_manager = ProbeManager() + probes_manager.register_adapter("fastapi", FastAPIAdapter(app)) + + async def readiness_checker(): + return await check_database_initialized() + + probes_manager.register( + name="readiness", + prefix="/api", + type=ProbeType.READINESS, + check_func=readiness_checker, + frameworks=["fastapi"] + ) + + probes_manager.register(name="liveness", prefix="/api", type=ProbeType.LIVENESS, frameworks=["fastapi"]) + probes_manager.register(name="startup", prefix="/api", type=ProbeType.STARTUP, frameworks=["fastapi"]) + + @app.on_event("startup") + async def mark_startup_complete(): + probes_manager.mark_startup_complete() \ No newline at end of file diff --git a/apps/devops/providers/router.py b/apps/devops/providers/router.py new file mode 100644 index 0000000..5b91f75 --- /dev/null +++ b/apps/devops/providers/router.py @@ -0,0 +1,34 @@ +from routes import api_router + +from starlette import routing + + +def register(app): + app.include_router( + api_router, + prefix="/api", + tags=["api"], + dependencies=[], + responses={404: {"description": "no page found"}}, + ) + + if app.debug: + for route in app.routes: + if not isinstance(route, routing.WebSocketRoute): + print( + { + "path": route.path, + "endpoint": route.endpoint, + "name": route.name, + "methods": route.methods, + } + ) + else: + print( + { + "path": route.path, + "endpoint": route.endpoint, + "name": route.name, + "type": "web socket route", + } + ) diff --git a/apps/devops/providers/scheduler.py b/apps/devops/providers/scheduler.py new file mode 100644 index 0000000..7ea8d6c --- /dev/null +++ b/apps/devops/providers/scheduler.py @@ -0,0 +1,8 @@ +import asyncio + + +def register(app): + @app.on_event("startup") + async def start_scheduler(): + #create your scheduler here + pass diff --git a/apps/devops/requirements.txt b/apps/devops/requirements.txt new file mode 100644 index 0000000..295004e --- /dev/null +++ b/apps/devops/requirements.txt @@ -0,0 +1,30 @@ +annotated-types==0.7.0 +anyio==4.9.0 +beanie==1.29.0 +click==8.2.0 +dnspython==2.7.0 +exceptiongroup==1.3.0 +fastapi==0.115.12 +h11==0.16.0 +httptools==0.6.4 +idna==3.10 +lazy-model==0.2.0 +loguru==0.7.3 +motor==3.7.0 +prometheus-fastapi-instrumentator==7.1.0 +prometheus_client==0.21.1 +pydantic==2.11.4 +pydantic-settings==2.9.1 +pydantic_core==2.33.2 +pymongo==4.12.1 +python-dotenv==1.1.0 +PyYAML==6.0.2 +sniffio==1.3.1 +starlette==0.46.2 +toml==0.10.2 +typing-inspection==0.4.0 +typing_extensions==4.13.2 +uvicorn==0.34.2 +uvloop==0.21.0 +watchfiles==1.0.5 +websockets==15.0.1 diff --git a/apps/devops/routes/__init__.py b/apps/devops/routes/__init__.py new file mode 100644 index 0000000..d698f6b --- /dev/null +++ b/apps/devops/routes/__init__.py @@ -0,0 +1,8 @@ +from fastapi import APIRouter +from routes.hello_world import router as hello_world_router + + +api_router = APIRouter() + +# TODO: add custom routers here +api_router.include_router(hello_world_router, tags=["hello_world"]) diff --git a/apps/devops/routes/hello_world/__init__.py b/apps/devops/routes/hello_world/__init__.py new file mode 100644 index 0000000..70ca8e2 --- /dev/null +++ b/apps/devops/routes/hello_world/__init__.py @@ -0,0 +1,7 @@ +from fastapi import APIRouter +from .apis import router as hello_world_api + + +router = APIRouter(prefix="/hello_world") + +router.include_router(hello_world_api, tags=["hello_world"]) diff --git a/apps/devops/routes/hello_world/apis.py b/apps/devops/routes/hello_world/apis.py new file mode 100644 index 0000000..d2983c9 --- /dev/null +++ b/apps/devops/routes/hello_world/apis.py @@ -0,0 +1,22 @@ +from fastapi import APIRouter +from loguru import logger + +from common.daos.hello_world import hello_world_dao + +router = APIRouter() + +@router.get("/") +async def hello_world(): + logger.info("Hello, World! endpoint was called") + return {"message": "Hello, World!"} + + +@router.post("/insert") +async def insert_hello_world(msg: str): + """ + Insert a HelloWorld document into the database. + """ + hello_world = await hello_world_dao.create_hello_world(msg, 1) + return hello_world + + diff --git a/apps/devops/scripts/mongodb/docker-compose.yml b/apps/devops/scripts/mongodb/docker-compose.yml new file mode 100644 index 0000000..8ab07c7 --- /dev/null +++ b/apps/devops/scripts/mongodb/docker-compose.yml @@ -0,0 +1,18 @@ +version: '3.8' + +services: + mongodb: + image: mongo:6.0 # You can change to the desired version + container_name: mongodb + restart: unless-stopped + ports: + - "27017:27017" + environment: + MONGO_INITDB_DATABASE: testdb # <-- This creates the initial database + volumes: + - mongodb_data:/data/db + command: ["mongod", "--noauth"] # <-- Disable authentication + + +volumes: + mongodb_data: \ No newline at end of file diff --git a/apps/devops/test_main.http b/apps/devops/test_main.http new file mode 100644 index 0000000..b847198 --- /dev/null +++ b/apps/devops/test_main.http @@ -0,0 +1,8 @@ +# Test your FastAPI endpoints + +GET http://localhost:8000/api/hello_world/ +Accept: application/json + +### +POST http://localhost:8000/api/hello_world/insert?msg=Hello%20World +Accept: application/json From c4c34e6cd76afc8ee0a6d72c6a0db2a317b7cd47 Mon Sep 17 00:00:00 2001 From: dongli Date: Sun, 11 May 2025 19:15:41 -0700 Subject: [PATCH 02/41] Add template fastapi app, test, README.md, this can be used for future extension --- apps/devops/README.md | 15 +++++++--- apps/devops/{bootstrap => app}/__init__.py | 0 .../{common => app/bootstrap}/__init__.py | 0 .../devops/{ => app}/bootstrap/application.py | 16 +++++------ .../{common/config => app/common}/__init__.py | 0 .../daos => app/common/config}/__init__.py | 0 .../{ => app}/common/config/app_settings.py | 0 .../{ => app}/common/config/log_settings.py | 0 .../{ => app}/common/config/site_settings.py | 0 .../log => app/common/daos}/__init__.py | 0 .../app/common/daos/hello_world/__init__.py | 3 ++ .../daos/hello_world/hello_world_dao.py | 2 +- .../common/log}/__init__.py | 0 .../common/log/application_logger.py | 2 +- .../{ => app}/common/log/base_logger.py | 4 +-- apps/devops/{ => app}/common/log/json_sink.py | 0 apps/devops/app/common/models/__init__.py | 4 +++ .../common/models/hello_world}/__init__.py | 0 .../common/models/hello_world/hello_world.py | 0 .../{ => app}/common/probes/__init__.py | 0 .../{ => app}/common/probes/adapters.py | 0 apps/devops/app/envs/alpha.yml | 0 apps/devops/app/envs/prod.yml | 0 apps/devops/{ => app}/main.py | 4 +-- apps/devops/app/providers/__init__.py | 0 apps/devops/{ => app}/providers/common.py | 2 +- apps/devops/{ => app}/providers/database.py | 6 ++-- .../{ => app}/providers/exception_handler.py | 0 apps/devops/{ => app}/providers/logger.py | 3 +- apps/devops/{ => app}/providers/metrics.py | 2 +- apps/devops/{ => app}/providers/probes.py | 4 +-- apps/devops/{ => app}/providers/router.py | 2 +- apps/devops/{ => app}/providers/scheduler.py | 0 apps/devops/{ => app}/routes/__init__.py | 2 +- .../{ => app}/routes/hello_world/__init__.py | 0 .../{ => app}/routes/hello_world/apis.py | 2 +- .../scripts/mongodb/docker-compose.yml | 0 .../common/daos/hello_world/__init__.py | 3 -- apps/devops/common/models/__init__.py | 4 --- apps/devops/requirements.txt | 28 +++---------------- apps/devops/tests/__init__.py | 0 41 files changed, 47 insertions(+), 61 deletions(-) rename apps/devops/{bootstrap => app}/__init__.py (100%) rename apps/devops/{common => app/bootstrap}/__init__.py (100%) rename apps/devops/{ => app}/bootstrap/application.py (86%) rename apps/devops/{common/config => app/common}/__init__.py (100%) rename apps/devops/{common/daos => app/common/config}/__init__.py (100%) rename apps/devops/{ => app}/common/config/app_settings.py (100%) rename apps/devops/{ => app}/common/config/log_settings.py (100%) rename apps/devops/{ => app}/common/config/site_settings.py (100%) rename apps/devops/{common/log => app/common/daos}/__init__.py (100%) create mode 100644 apps/devops/app/common/daos/hello_world/__init__.py rename apps/devops/{ => app}/common/daos/hello_world/hello_world_dao.py (93%) rename apps/devops/{common/models/hello_world => app/common/log}/__init__.py (100%) rename apps/devops/{ => app}/common/log/application_logger.py (87%) rename apps/devops/{ => app}/common/log/base_logger.py (98%) rename apps/devops/{ => app}/common/log/json_sink.py (100%) create mode 100644 apps/devops/app/common/models/__init__.py rename apps/devops/{providers => app/common/models/hello_world}/__init__.py (100%) rename apps/devops/{ => app}/common/models/hello_world/hello_world.py (100%) rename apps/devops/{ => app}/common/probes/__init__.py (100%) rename apps/devops/{ => app}/common/probes/adapters.py (100%) create mode 100644 apps/devops/app/envs/alpha.yml create mode 100644 apps/devops/app/envs/prod.yml rename apps/devops/{ => app}/main.py (76%) create mode 100644 apps/devops/app/providers/__init__.py rename apps/devops/{ => app}/providers/common.py (92%) rename apps/devops/{ => app}/providers/database.py (88%) rename apps/devops/{ => app}/providers/exception_handler.py (100%) rename apps/devops/{ => app}/providers/logger.py (70%) rename apps/devops/{ => app}/providers/metrics.py (88%) rename apps/devops/{ => app}/providers/probes.py (87%) rename apps/devops/{ => app}/providers/router.py (96%) rename apps/devops/{ => app}/providers/scheduler.py (100%) rename apps/devops/{ => app}/routes/__init__.py (71%) rename apps/devops/{ => app}/routes/hello_world/__init__.py (100%) rename apps/devops/{ => app}/routes/hello_world/apis.py (88%) rename apps/devops/{ => app}/scripts/mongodb/docker-compose.yml (100%) delete mode 100644 apps/devops/common/daos/hello_world/__init__.py delete mode 100644 apps/devops/common/models/__init__.py create mode 100644 apps/devops/tests/__init__.py diff --git a/apps/devops/README.md b/apps/devops/README.md index d4f2587..588044f 100644 --- a/apps/devops/README.md +++ b/apps/devops/README.md @@ -2,12 +2,19 @@ This is a template backend service based on fastapi + mongodb app To start development in local, go to the root directory of the project YOUR_WORKSPACE_PATH/devops/ ```bash -cd scripts/mongodb -docker compose -f scripts/mongodb/docker-compose.yml up -d +docker compose -f app/scripts/mongodb/docker-compose.yml up -d ``` - Then run the app ```bash -uvicorn main:app --reload +uvicorn app.main:app --reload +``` + +In case a new dependency is added, run the following command to update the requirements.txt file +```bash +# optional: if you have not installed pipreqs +pip3 install pipreqs + +# generate requirements.txt +pipreqs . --force ``` diff --git a/apps/devops/bootstrap/__init__.py b/apps/devops/app/__init__.py similarity index 100% rename from apps/devops/bootstrap/__init__.py rename to apps/devops/app/__init__.py diff --git a/apps/devops/common/__init__.py b/apps/devops/app/bootstrap/__init__.py similarity index 100% rename from apps/devops/common/__init__.py rename to apps/devops/app/bootstrap/__init__.py diff --git a/apps/devops/bootstrap/application.py b/apps/devops/app/bootstrap/application.py similarity index 86% rename from apps/devops/bootstrap/application.py rename to apps/devops/app/bootstrap/application.py index 36a1616..24223b6 100644 --- a/apps/devops/bootstrap/application.py +++ b/apps/devops/app/bootstrap/application.py @@ -2,14 +2,14 @@ import logging from fastapi import FastAPI from fastapi.openapi.utils import get_openapi -from providers import common -from providers.logger import register_logger -from providers import router -from providers import database -from providers import metrics -from providers import probes -from providers import exception_handler -from common.config.app_settings import app_settings +from app.providers import common +from app.providers.logger import register_logger +from app.providers import router +from app.providers import database +from app.providers import metrics +from app.providers import probes +from app.providers import exception_handler +from app.common.config.app_settings import app_settings def create_app() -> FastAPI: logging.info("App initializing") diff --git a/apps/devops/common/config/__init__.py b/apps/devops/app/common/__init__.py similarity index 100% rename from apps/devops/common/config/__init__.py rename to apps/devops/app/common/__init__.py diff --git a/apps/devops/common/daos/__init__.py b/apps/devops/app/common/config/__init__.py similarity index 100% rename from apps/devops/common/daos/__init__.py rename to apps/devops/app/common/config/__init__.py diff --git a/apps/devops/common/config/app_settings.py b/apps/devops/app/common/config/app_settings.py similarity index 100% rename from apps/devops/common/config/app_settings.py rename to apps/devops/app/common/config/app_settings.py diff --git a/apps/devops/common/config/log_settings.py b/apps/devops/app/common/config/log_settings.py similarity index 100% rename from apps/devops/common/config/log_settings.py rename to apps/devops/app/common/config/log_settings.py diff --git a/apps/devops/common/config/site_settings.py b/apps/devops/app/common/config/site_settings.py similarity index 100% rename from apps/devops/common/config/site_settings.py rename to apps/devops/app/common/config/site_settings.py diff --git a/apps/devops/common/log/__init__.py b/apps/devops/app/common/daos/__init__.py similarity index 100% rename from apps/devops/common/log/__init__.py rename to apps/devops/app/common/daos/__init__.py diff --git a/apps/devops/app/common/daos/hello_world/__init__.py b/apps/devops/app/common/daos/hello_world/__init__.py new file mode 100644 index 0000000..b953163 --- /dev/null +++ b/apps/devops/app/common/daos/hello_world/__init__.py @@ -0,0 +1,3 @@ +from app.common.daos.hello_world.hello_world_dao import HelloWorldDao + +hello_world_dao = HelloWorldDao() diff --git a/apps/devops/common/daos/hello_world/hello_world_dao.py b/apps/devops/app/common/daos/hello_world/hello_world_dao.py similarity index 93% rename from apps/devops/common/daos/hello_world/hello_world_dao.py rename to apps/devops/app/common/daos/hello_world/hello_world_dao.py index 88d8f8c..3b3a112 100644 --- a/apps/devops/common/daos/hello_world/hello_world_dao.py +++ b/apps/devops/app/common/daos/hello_world/hello_world_dao.py @@ -1,4 +1,4 @@ -from common.models.hello_world.hello_world import HelloWorld +from app.common.models.hello_world.hello_world import HelloWorld class HelloWorldDao: def __init__(self): diff --git a/apps/devops/common/models/hello_world/__init__.py b/apps/devops/app/common/log/__init__.py similarity index 100% rename from apps/devops/common/models/hello_world/__init__.py rename to apps/devops/app/common/log/__init__.py diff --git a/apps/devops/common/log/application_logger.py b/apps/devops/app/common/log/application_logger.py similarity index 87% rename from apps/devops/common/log/application_logger.py rename to apps/devops/app/common/log/application_logger.py index 67ec321..896c044 100644 --- a/apps/devops/common/log/application_logger.py +++ b/apps/devops/app/common/log/application_logger.py @@ -1,5 +1,5 @@ from .base_logger import LoggerBase -from common.config.app_settings import app_settings +from app.common.config.app_settings import app_settings class ApplicationLogger(LoggerBase): def __init__(self, application_activities: dict[str, any] = {}) -> None: diff --git a/apps/devops/common/log/base_logger.py b/apps/devops/app/common/log/base_logger.py similarity index 98% rename from apps/devops/common/log/base_logger.py rename to apps/devops/app/common/log/base_logger.py index 24f7bb0..a370296 100644 --- a/apps/devops/common/log/base_logger.py +++ b/apps/devops/app/common/log/base_logger.py @@ -1,5 +1,5 @@ from loguru import logger as guru_logger -from common.config.log_settings import log_settings +from app.common.config.log_settings import log_settings from typing import Dict, Any, Optional import socket import json @@ -9,7 +9,7 @@ import sys import inspect import logging -from common.log.json_sink import JsonSink +from app.common.log.json_sink import JsonSink class LoggerBase: binded_loggers = {} diff --git a/apps/devops/common/log/json_sink.py b/apps/devops/app/common/log/json_sink.py similarity index 100% rename from apps/devops/common/log/json_sink.py rename to apps/devops/app/common/log/json_sink.py diff --git a/apps/devops/app/common/models/__init__.py b/apps/devops/app/common/models/__init__.py new file mode 100644 index 0000000..2535cbb --- /dev/null +++ b/apps/devops/app/common/models/__init__.py @@ -0,0 +1,4 @@ +from app.common.models.hello_world.hello_world import HelloWorld + +# list of beanie document models +db_models = [HelloWorld] \ No newline at end of file diff --git a/apps/devops/providers/__init__.py b/apps/devops/app/common/models/hello_world/__init__.py similarity index 100% rename from apps/devops/providers/__init__.py rename to apps/devops/app/common/models/hello_world/__init__.py diff --git a/apps/devops/common/models/hello_world/hello_world.py b/apps/devops/app/common/models/hello_world/hello_world.py similarity index 100% rename from apps/devops/common/models/hello_world/hello_world.py rename to apps/devops/app/common/models/hello_world/hello_world.py diff --git a/apps/devops/common/probes/__init__.py b/apps/devops/app/common/probes/__init__.py similarity index 100% rename from apps/devops/common/probes/__init__.py rename to apps/devops/app/common/probes/__init__.py diff --git a/apps/devops/common/probes/adapters.py b/apps/devops/app/common/probes/adapters.py similarity index 100% rename from apps/devops/common/probes/adapters.py rename to apps/devops/app/common/probes/adapters.py diff --git a/apps/devops/app/envs/alpha.yml b/apps/devops/app/envs/alpha.yml new file mode 100644 index 0000000..e69de29 diff --git a/apps/devops/app/envs/prod.yml b/apps/devops/app/envs/prod.yml new file mode 100644 index 0000000..e69de29 diff --git a/apps/devops/main.py b/apps/devops/app/main.py similarity index 76% rename from apps/devops/main.py rename to apps/devops/app/main.py index c8bf0ba..559d7ed 100644 --- a/apps/devops/main.py +++ b/apps/devops/app/main.py @@ -1,6 +1,6 @@ from fastapi.responses import RedirectResponse -from common.config.site_settings import site_settings -from bootstrap.application import create_app +from app.common.config.site_settings import site_settings +from app.bootstrap.application import create_app app = create_app() diff --git a/apps/devops/app/providers/__init__.py b/apps/devops/app/providers/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/apps/devops/providers/common.py b/apps/devops/app/providers/common.py similarity index 92% rename from apps/devops/providers/common.py rename to apps/devops/app/providers/common.py index b7b1aa5..64a9a44 100644 --- a/apps/devops/providers/common.py +++ b/apps/devops/app/providers/common.py @@ -1,5 +1,5 @@ from fastapi.middleware.cors import CORSMiddleware -from common.config.site_settings import site_settings +from app.common.config.site_settings import site_settings def register(app): diff --git a/apps/devops/providers/database.py b/apps/devops/app/providers/database.py similarity index 88% rename from apps/devops/providers/database.py rename to apps/devops/app/providers/database.py index fa311bd..8716b8e 100644 --- a/apps/devops/providers/database.py +++ b/apps/devops/app/providers/database.py @@ -1,9 +1,9 @@ import asyncio -from common.config.app_settings import app_settings +from app.common.config.app_settings import app_settings from beanie import init_beanie from motor.motor_asyncio import AsyncIOMotorClient -from common.models import db_models -from common.probes import ProbeResult +from app.common.models import db_models +from app.common.probes import ProbeResult client = AsyncIOMotorClient( app_settings.APP_MONGODB_URI, diff --git a/apps/devops/providers/exception_handler.py b/apps/devops/app/providers/exception_handler.py similarity index 100% rename from apps/devops/providers/exception_handler.py rename to apps/devops/app/providers/exception_handler.py diff --git a/apps/devops/providers/logger.py b/apps/devops/app/providers/logger.py similarity index 70% rename from apps/devops/providers/logger.py rename to apps/devops/app/providers/logger.py index 6eb1e22..2785603 100644 --- a/apps/devops/providers/logger.py +++ b/apps/devops/app/providers/logger.py @@ -1,5 +1,4 @@ -from loguru import logger as guru_logger -from common.log.base_logger import LoggerBase +from app.common.log.base_logger import LoggerBase def register_logger(): diff --git a/apps/devops/providers/metrics.py b/apps/devops/app/providers/metrics.py similarity index 88% rename from apps/devops/providers/metrics.py rename to apps/devops/app/providers/metrics.py index c2270b7..1ae941a 100644 --- a/apps/devops/providers/metrics.py +++ b/apps/devops/app/providers/metrics.py @@ -1,6 +1,6 @@ import logging from prometheus_fastapi_instrumentator import Instrumentator -from common.config.app_settings import app_settings +from app.common.config.app_settings import app_settings def register(app): instrumentator = ( diff --git a/apps/devops/providers/probes.py b/apps/devops/app/providers/probes.py similarity index 87% rename from apps/devops/providers/probes.py rename to apps/devops/app/providers/probes.py index 7c5b5d8..883e3d6 100644 --- a/apps/devops/providers/probes.py +++ b/apps/devops/app/providers/probes.py @@ -1,5 +1,5 @@ -from common.probes import ProbeManager, ProbeType -from common.probes.adapters import FastAPIAdapter +from app.common.probes import ProbeManager, ProbeType +from app.common.probes.adapters import FastAPIAdapter from .database import check_database_initialized def register(app): diff --git a/apps/devops/providers/router.py b/apps/devops/app/providers/router.py similarity index 96% rename from apps/devops/providers/router.py rename to apps/devops/app/providers/router.py index 5b91f75..b273eb8 100644 --- a/apps/devops/providers/router.py +++ b/apps/devops/app/providers/router.py @@ -1,4 +1,4 @@ -from routes import api_router +from app.routes import api_router from starlette import routing diff --git a/apps/devops/providers/scheduler.py b/apps/devops/app/providers/scheduler.py similarity index 100% rename from apps/devops/providers/scheduler.py rename to apps/devops/app/providers/scheduler.py diff --git a/apps/devops/routes/__init__.py b/apps/devops/app/routes/__init__.py similarity index 71% rename from apps/devops/routes/__init__.py rename to apps/devops/app/routes/__init__.py index d698f6b..9644f27 100644 --- a/apps/devops/routes/__init__.py +++ b/apps/devops/app/routes/__init__.py @@ -1,5 +1,5 @@ from fastapi import APIRouter -from routes.hello_world import router as hello_world_router +from app.routes.hello_world import router as hello_world_router api_router = APIRouter() diff --git a/apps/devops/routes/hello_world/__init__.py b/apps/devops/app/routes/hello_world/__init__.py similarity index 100% rename from apps/devops/routes/hello_world/__init__.py rename to apps/devops/app/routes/hello_world/__init__.py diff --git a/apps/devops/routes/hello_world/apis.py b/apps/devops/app/routes/hello_world/apis.py similarity index 88% rename from apps/devops/routes/hello_world/apis.py rename to apps/devops/app/routes/hello_world/apis.py index d2983c9..dfb8cf0 100644 --- a/apps/devops/routes/hello_world/apis.py +++ b/apps/devops/app/routes/hello_world/apis.py @@ -1,7 +1,7 @@ from fastapi import APIRouter from loguru import logger -from common.daos.hello_world import hello_world_dao +from app.common.daos.hello_world import hello_world_dao router = APIRouter() diff --git a/apps/devops/scripts/mongodb/docker-compose.yml b/apps/devops/app/scripts/mongodb/docker-compose.yml similarity index 100% rename from apps/devops/scripts/mongodb/docker-compose.yml rename to apps/devops/app/scripts/mongodb/docker-compose.yml diff --git a/apps/devops/common/daos/hello_world/__init__.py b/apps/devops/common/daos/hello_world/__init__.py deleted file mode 100644 index f8a7a2d..0000000 --- a/apps/devops/common/daos/hello_world/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from common.daos.hello_world.hello_world_dao import HelloWorldDao - -hello_world_dao = HelloWorldDao() diff --git a/apps/devops/common/models/__init__.py b/apps/devops/common/models/__init__.py deleted file mode 100644 index a6dc240..0000000 --- a/apps/devops/common/models/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -from common.models.hello_world.hello_world import HelloWorld - -# list of beanie document models -db_models = [HelloWorld] \ No newline at end of file diff --git a/apps/devops/requirements.txt b/apps/devops/requirements.txt index 295004e..056543d 100644 --- a/apps/devops/requirements.txt +++ b/apps/devops/requirements.txt @@ -1,30 +1,10 @@ -annotated-types==0.7.0 -anyio==4.9.0 beanie==1.29.0 -click==8.2.0 -dnspython==2.7.0 -exceptiongroup==1.3.0 fastapi==0.115.12 -h11==0.16.0 -httptools==0.6.4 -idna==3.10 -lazy-model==0.2.0 loguru==0.7.3 motor==3.7.0 -prometheus-fastapi-instrumentator==7.1.0 -prometheus_client==0.21.1 -pydantic==2.11.4 -pydantic-settings==2.9.1 -pydantic_core==2.33.2 -pymongo==4.12.1 -python-dotenv==1.1.0 -PyYAML==6.0.2 -sniffio==1.3.1 +prometheus_fastapi_instrumentator==7.1.0 +pydantic_settings==2.9.1 +pytest==7.1.2 starlette==0.46.2 -toml==0.10.2 -typing-inspection==0.4.0 -typing_extensions==4.13.2 uvicorn==0.34.2 -uvloop==0.21.0 -watchfiles==1.0.5 -websockets==15.0.1 +httpx==0.24.0 \ No newline at end of file diff --git a/apps/devops/tests/__init__.py b/apps/devops/tests/__init__.py new file mode 100644 index 0000000..e69de29 From bf96f8b301d315f1a17c0cd39081d258899ef600 Mon Sep 17 00:00:00 2001 From: dongli Date: Sun, 11 May 2025 19:16:00 -0700 Subject: [PATCH 03/41] Add template fastapi app, test, README.md, this can be used for future extension --- apps/devops/tests/routes/__init__.py | 0 apps/devops/tests/routes/test_hello_world.py | 9 +++++++++ 2 files changed, 9 insertions(+) create mode 100644 apps/devops/tests/routes/__init__.py create mode 100644 apps/devops/tests/routes/test_hello_world.py diff --git a/apps/devops/tests/routes/__init__.py b/apps/devops/tests/routes/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/apps/devops/tests/routes/test_hello_world.py b/apps/devops/tests/routes/test_hello_world.py new file mode 100644 index 0000000..285d174 --- /dev/null +++ b/apps/devops/tests/routes/test_hello_world.py @@ -0,0 +1,9 @@ +from fastapi.testclient import TestClient +from app.main import app # Adjust this import if your app is in a different location + +client = TestClient(app) + +def test_hello_world(): + response = client.get("/api/hello_world/") + assert response.status_code == 200 + assert response.json() == {"message": "Hello, World!"} # Update if your endpoint returns different data \ No newline at end of file From dd8926b2ec3aeadb4b53338db962441370238529 Mon Sep 17 00:00:00 2001 From: dongli Date: Sun, 11 May 2025 21:01:11 -0700 Subject: [PATCH 04/41] Add template fastapi app, test, README.md, this can be used for future extension --- apps/devops/tests/routes/test_hello_world.py | 9 ------- apps/{devops => helloworld}/README.md | 2 +- apps/{devops => helloworld}/app/__init__.py | 0 .../app/bootstrap/__init__.py | 0 .../app/bootstrap/application.py | 0 .../app/common/__init__.py | 0 .../app/common/config/__init__.py | 0 .../app/common/config/app_settings.py | 0 .../app/common/config/log_settings.py | 0 .../app/common/config/site_settings.py | 0 .../app/common/daos/__init__.py | 0 .../app/common/daos/hello_world/__init__.py | 3 +++ .../daos/hello_world/hello_world_dao.py | 0 .../app/common/log/__init__.py | 0 .../app/common/log/application_logger.py | 0 .../app/common/log/base_logger.py | 0 .../app/common/log/json_sink.py | 0 .../app/common/models/__init__.py | 0 .../app/common/models/hello_world/__init__.py | 0 .../common/models/hello_world/hello_world.py | 0 .../app/common/probes/__init__.py | 0 .../app/common/probes/adapters.py | 0 .../{devops => helloworld}/app/envs/alpha.yml | 0 apps/{devops => helloworld}/app/envs/prod.yml | 0 apps/{devops => helloworld}/app/main.py | 0 .../app/providers/__init__.py | 0 .../app/providers/common.py | 0 .../app/providers/database.py | 0 .../app/providers/exception_handler.py | 0 .../app/providers/logger.py | 0 .../app/providers/metrics.py | 0 .../app/providers/probes.py | 0 .../app/providers/router.py | 0 .../app/providers/scheduler.py | 0 .../app/routes/__init__.py | 0 .../app/routes/hello_world/__init__.py | 0 .../app/routes/hello_world/apis.py | 11 +++++--- .../app/scripts/mongodb/docker-compose.yml | 0 apps/{devops => helloworld}/requirements.txt | 0 apps/{devops => helloworld}/tests/__init__.py | 0 .../tests/routes/__init__.py | 0 .../tests/routes/test_hello_world.py | 27 +++++++++++++++++++ .../tests}/test_main.http | 0 43 files changed, 38 insertions(+), 14 deletions(-) delete mode 100644 apps/devops/tests/routes/test_hello_world.py rename apps/{devops => helloworld}/README.md (92%) rename apps/{devops => helloworld}/app/__init__.py (100%) rename apps/{devops => helloworld}/app/bootstrap/__init__.py (100%) rename apps/{devops => helloworld}/app/bootstrap/application.py (100%) rename apps/{devops => helloworld}/app/common/__init__.py (100%) rename apps/{devops => helloworld}/app/common/config/__init__.py (100%) rename apps/{devops => helloworld}/app/common/config/app_settings.py (100%) rename apps/{devops => helloworld}/app/common/config/log_settings.py (100%) rename apps/{devops => helloworld}/app/common/config/site_settings.py (100%) rename apps/{devops => helloworld}/app/common/daos/__init__.py (100%) rename apps/{devops => helloworld}/app/common/daos/hello_world/__init__.py (59%) rename apps/{devops => helloworld}/app/common/daos/hello_world/hello_world_dao.py (100%) rename apps/{devops => helloworld}/app/common/log/__init__.py (100%) rename apps/{devops => helloworld}/app/common/log/application_logger.py (100%) rename apps/{devops => helloworld}/app/common/log/base_logger.py (100%) rename apps/{devops => helloworld}/app/common/log/json_sink.py (100%) rename apps/{devops => helloworld}/app/common/models/__init__.py (100%) rename apps/{devops => helloworld}/app/common/models/hello_world/__init__.py (100%) rename apps/{devops => helloworld}/app/common/models/hello_world/hello_world.py (100%) rename apps/{devops => helloworld}/app/common/probes/__init__.py (100%) rename apps/{devops => helloworld}/app/common/probes/adapters.py (100%) rename apps/{devops => helloworld}/app/envs/alpha.yml (100%) rename apps/{devops => helloworld}/app/envs/prod.yml (100%) rename apps/{devops => helloworld}/app/main.py (100%) rename apps/{devops => helloworld}/app/providers/__init__.py (100%) rename apps/{devops => helloworld}/app/providers/common.py (100%) rename apps/{devops => helloworld}/app/providers/database.py (100%) rename apps/{devops => helloworld}/app/providers/exception_handler.py (100%) rename apps/{devops => helloworld}/app/providers/logger.py (100%) rename apps/{devops => helloworld}/app/providers/metrics.py (100%) rename apps/{devops => helloworld}/app/providers/probes.py (100%) rename apps/{devops => helloworld}/app/providers/router.py (100%) rename apps/{devops => helloworld}/app/providers/scheduler.py (100%) rename apps/{devops => helloworld}/app/routes/__init__.py (100%) rename apps/{devops => helloworld}/app/routes/hello_world/__init__.py (100%) rename apps/{devops => helloworld}/app/routes/hello_world/apis.py (53%) rename apps/{devops => helloworld}/app/scripts/mongodb/docker-compose.yml (100%) rename apps/{devops => helloworld}/requirements.txt (100%) rename apps/{devops => helloworld}/tests/__init__.py (100%) rename apps/{devops => helloworld}/tests/routes/__init__.py (100%) create mode 100644 apps/helloworld/tests/routes/test_hello_world.py rename apps/{devops => helloworld/tests}/test_main.http (100%) diff --git a/apps/devops/tests/routes/test_hello_world.py b/apps/devops/tests/routes/test_hello_world.py deleted file mode 100644 index 285d174..0000000 --- a/apps/devops/tests/routes/test_hello_world.py +++ /dev/null @@ -1,9 +0,0 @@ -from fastapi.testclient import TestClient -from app.main import app # Adjust this import if your app is in a different location - -client = TestClient(app) - -def test_hello_world(): - response = client.get("/api/hello_world/") - assert response.status_code == 200 - assert response.json() == {"message": "Hello, World!"} # Update if your endpoint returns different data \ No newline at end of file diff --git a/apps/devops/README.md b/apps/helloworld/README.md similarity index 92% rename from apps/devops/README.md rename to apps/helloworld/README.md index 588044f..2fb84b3 100644 --- a/apps/devops/README.md +++ b/apps/helloworld/README.md @@ -1,6 +1,6 @@ This is a template backend service based on fastapi + mongodb app -To start development in local, go to the root directory of the project YOUR_WORKSPACE_PATH/devops/ +To start development in local, go to the root directory of the project YOUR_WORKSPACE_PATH/helloworld/ ```bash docker compose -f app/scripts/mongodb/docker-compose.yml up -d ``` diff --git a/apps/devops/app/__init__.py b/apps/helloworld/app/__init__.py similarity index 100% rename from apps/devops/app/__init__.py rename to apps/helloworld/app/__init__.py diff --git a/apps/devops/app/bootstrap/__init__.py b/apps/helloworld/app/bootstrap/__init__.py similarity index 100% rename from apps/devops/app/bootstrap/__init__.py rename to apps/helloworld/app/bootstrap/__init__.py diff --git a/apps/devops/app/bootstrap/application.py b/apps/helloworld/app/bootstrap/application.py similarity index 100% rename from apps/devops/app/bootstrap/application.py rename to apps/helloworld/app/bootstrap/application.py diff --git a/apps/devops/app/common/__init__.py b/apps/helloworld/app/common/__init__.py similarity index 100% rename from apps/devops/app/common/__init__.py rename to apps/helloworld/app/common/__init__.py diff --git a/apps/devops/app/common/config/__init__.py b/apps/helloworld/app/common/config/__init__.py similarity index 100% rename from apps/devops/app/common/config/__init__.py rename to apps/helloworld/app/common/config/__init__.py diff --git a/apps/devops/app/common/config/app_settings.py b/apps/helloworld/app/common/config/app_settings.py similarity index 100% rename from apps/devops/app/common/config/app_settings.py rename to apps/helloworld/app/common/config/app_settings.py diff --git a/apps/devops/app/common/config/log_settings.py b/apps/helloworld/app/common/config/log_settings.py similarity index 100% rename from apps/devops/app/common/config/log_settings.py rename to apps/helloworld/app/common/config/log_settings.py diff --git a/apps/devops/app/common/config/site_settings.py b/apps/helloworld/app/common/config/site_settings.py similarity index 100% rename from apps/devops/app/common/config/site_settings.py rename to apps/helloworld/app/common/config/site_settings.py diff --git a/apps/devops/app/common/daos/__init__.py b/apps/helloworld/app/common/daos/__init__.py similarity index 100% rename from apps/devops/app/common/daos/__init__.py rename to apps/helloworld/app/common/daos/__init__.py diff --git a/apps/devops/app/common/daos/hello_world/__init__.py b/apps/helloworld/app/common/daos/hello_world/__init__.py similarity index 59% rename from apps/devops/app/common/daos/hello_world/__init__.py rename to apps/helloworld/app/common/daos/hello_world/__init__.py index b953163..463a9cb 100644 --- a/apps/devops/app/common/daos/hello_world/__init__.py +++ b/apps/helloworld/app/common/daos/hello_world/__init__.py @@ -1,3 +1,6 @@ from app.common.daos.hello_world.hello_world_dao import HelloWorldDao hello_world_dao = HelloWorldDao() + +def get_hello_world_dao() -> HelloWorldDao: + return hello_world_dao diff --git a/apps/devops/app/common/daos/hello_world/hello_world_dao.py b/apps/helloworld/app/common/daos/hello_world/hello_world_dao.py similarity index 100% rename from apps/devops/app/common/daos/hello_world/hello_world_dao.py rename to apps/helloworld/app/common/daos/hello_world/hello_world_dao.py diff --git a/apps/devops/app/common/log/__init__.py b/apps/helloworld/app/common/log/__init__.py similarity index 100% rename from apps/devops/app/common/log/__init__.py rename to apps/helloworld/app/common/log/__init__.py diff --git a/apps/devops/app/common/log/application_logger.py b/apps/helloworld/app/common/log/application_logger.py similarity index 100% rename from apps/devops/app/common/log/application_logger.py rename to apps/helloworld/app/common/log/application_logger.py diff --git a/apps/devops/app/common/log/base_logger.py b/apps/helloworld/app/common/log/base_logger.py similarity index 100% rename from apps/devops/app/common/log/base_logger.py rename to apps/helloworld/app/common/log/base_logger.py diff --git a/apps/devops/app/common/log/json_sink.py b/apps/helloworld/app/common/log/json_sink.py similarity index 100% rename from apps/devops/app/common/log/json_sink.py rename to apps/helloworld/app/common/log/json_sink.py diff --git a/apps/devops/app/common/models/__init__.py b/apps/helloworld/app/common/models/__init__.py similarity index 100% rename from apps/devops/app/common/models/__init__.py rename to apps/helloworld/app/common/models/__init__.py diff --git a/apps/devops/app/common/models/hello_world/__init__.py b/apps/helloworld/app/common/models/hello_world/__init__.py similarity index 100% rename from apps/devops/app/common/models/hello_world/__init__.py rename to apps/helloworld/app/common/models/hello_world/__init__.py diff --git a/apps/devops/app/common/models/hello_world/hello_world.py b/apps/helloworld/app/common/models/hello_world/hello_world.py similarity index 100% rename from apps/devops/app/common/models/hello_world/hello_world.py rename to apps/helloworld/app/common/models/hello_world/hello_world.py diff --git a/apps/devops/app/common/probes/__init__.py b/apps/helloworld/app/common/probes/__init__.py similarity index 100% rename from apps/devops/app/common/probes/__init__.py rename to apps/helloworld/app/common/probes/__init__.py diff --git a/apps/devops/app/common/probes/adapters.py b/apps/helloworld/app/common/probes/adapters.py similarity index 100% rename from apps/devops/app/common/probes/adapters.py rename to apps/helloworld/app/common/probes/adapters.py diff --git a/apps/devops/app/envs/alpha.yml b/apps/helloworld/app/envs/alpha.yml similarity index 100% rename from apps/devops/app/envs/alpha.yml rename to apps/helloworld/app/envs/alpha.yml diff --git a/apps/devops/app/envs/prod.yml b/apps/helloworld/app/envs/prod.yml similarity index 100% rename from apps/devops/app/envs/prod.yml rename to apps/helloworld/app/envs/prod.yml diff --git a/apps/devops/app/main.py b/apps/helloworld/app/main.py similarity index 100% rename from apps/devops/app/main.py rename to apps/helloworld/app/main.py diff --git a/apps/devops/app/providers/__init__.py b/apps/helloworld/app/providers/__init__.py similarity index 100% rename from apps/devops/app/providers/__init__.py rename to apps/helloworld/app/providers/__init__.py diff --git a/apps/devops/app/providers/common.py b/apps/helloworld/app/providers/common.py similarity index 100% rename from apps/devops/app/providers/common.py rename to apps/helloworld/app/providers/common.py diff --git a/apps/devops/app/providers/database.py b/apps/helloworld/app/providers/database.py similarity index 100% rename from apps/devops/app/providers/database.py rename to apps/helloworld/app/providers/database.py diff --git a/apps/devops/app/providers/exception_handler.py b/apps/helloworld/app/providers/exception_handler.py similarity index 100% rename from apps/devops/app/providers/exception_handler.py rename to apps/helloworld/app/providers/exception_handler.py diff --git a/apps/devops/app/providers/logger.py b/apps/helloworld/app/providers/logger.py similarity index 100% rename from apps/devops/app/providers/logger.py rename to apps/helloworld/app/providers/logger.py diff --git a/apps/devops/app/providers/metrics.py b/apps/helloworld/app/providers/metrics.py similarity index 100% rename from apps/devops/app/providers/metrics.py rename to apps/helloworld/app/providers/metrics.py diff --git a/apps/devops/app/providers/probes.py b/apps/helloworld/app/providers/probes.py similarity index 100% rename from apps/devops/app/providers/probes.py rename to apps/helloworld/app/providers/probes.py diff --git a/apps/devops/app/providers/router.py b/apps/helloworld/app/providers/router.py similarity index 100% rename from apps/devops/app/providers/router.py rename to apps/helloworld/app/providers/router.py diff --git a/apps/devops/app/providers/scheduler.py b/apps/helloworld/app/providers/scheduler.py similarity index 100% rename from apps/devops/app/providers/scheduler.py rename to apps/helloworld/app/providers/scheduler.py diff --git a/apps/devops/app/routes/__init__.py b/apps/helloworld/app/routes/__init__.py similarity index 100% rename from apps/devops/app/routes/__init__.py rename to apps/helloworld/app/routes/__init__.py diff --git a/apps/devops/app/routes/hello_world/__init__.py b/apps/helloworld/app/routes/hello_world/__init__.py similarity index 100% rename from apps/devops/app/routes/hello_world/__init__.py rename to apps/helloworld/app/routes/hello_world/__init__.py diff --git a/apps/devops/app/routes/hello_world/apis.py b/apps/helloworld/app/routes/hello_world/apis.py similarity index 53% rename from apps/devops/app/routes/hello_world/apis.py rename to apps/helloworld/app/routes/hello_world/apis.py index dfb8cf0..9f4902a 100644 --- a/apps/devops/app/routes/hello_world/apis.py +++ b/apps/helloworld/app/routes/hello_world/apis.py @@ -1,7 +1,7 @@ -from fastapi import APIRouter +from fastapi import APIRouter, Depends from loguru import logger -from app.common.daos.hello_world import hello_world_dao +from app.common.daos.hello_world import get_hello_world_dao, HelloWorldDao router = APIRouter() @@ -12,11 +12,14 @@ async def hello_world(): @router.post("/insert") -async def insert_hello_world(msg: str): +async def insert_hello_world(msg: str, dao: HelloWorldDao = Depends(get_hello_world_dao)): """ Insert a HelloWorld document into the database. """ - hello_world = await hello_world_dao.create_hello_world(msg, 1) + hello_world = await dao.create_hello_world(msg, 1) return hello_world + + + diff --git a/apps/devops/app/scripts/mongodb/docker-compose.yml b/apps/helloworld/app/scripts/mongodb/docker-compose.yml similarity index 100% rename from apps/devops/app/scripts/mongodb/docker-compose.yml rename to apps/helloworld/app/scripts/mongodb/docker-compose.yml diff --git a/apps/devops/requirements.txt b/apps/helloworld/requirements.txt similarity index 100% rename from apps/devops/requirements.txt rename to apps/helloworld/requirements.txt diff --git a/apps/devops/tests/__init__.py b/apps/helloworld/tests/__init__.py similarity index 100% rename from apps/devops/tests/__init__.py rename to apps/helloworld/tests/__init__.py diff --git a/apps/devops/tests/routes/__init__.py b/apps/helloworld/tests/routes/__init__.py similarity index 100% rename from apps/devops/tests/routes/__init__.py rename to apps/helloworld/tests/routes/__init__.py diff --git a/apps/helloworld/tests/routes/test_hello_world.py b/apps/helloworld/tests/routes/test_hello_world.py new file mode 100644 index 0000000..638a4b2 --- /dev/null +++ b/apps/helloworld/tests/routes/test_hello_world.py @@ -0,0 +1,27 @@ +from unittest.mock import AsyncMock, patch +from fastapi.testclient import TestClient +from app.main import app +from app.routes.hello_world.apis import get_hello_world_dao + + +def test_hello_world(): + with TestClient(app) as client: + response = client.get("/api/hello_world/") + assert response.status_code == 200 + assert response.json() == {"message": "Hello, World!"} + + +# mock out initiate_database so it doesn’t run during tests +@patch("app.providers.database.initiate_database", new_callable=AsyncMock) +def test_insert_hello_world(mock_db_init): + + class MockHelloWorldDao: + async def create_hello_world(self, msg: str, user_id: int): + return {"message": msg, "user_id": user_id} + + app.dependency_overrides[get_hello_world_dao] = lambda: MockHelloWorldDao() + with TestClient(app) as client: + response = client.post("/api/hello_world/insert", params={"msg": "Test Message"}) + assert response.status_code == 200 + assert response.json() == {"message": "Test Message", "user_id": 1} + app.dependency_overrides.clear() diff --git a/apps/devops/test_main.http b/apps/helloworld/tests/test_main.http similarity index 100% rename from apps/devops/test_main.http rename to apps/helloworld/tests/test_main.http From bcf13b48f248d1a58c759846f9b9c324814260af Mon Sep 17 00:00:00 2001 From: dongli Date: Sun, 18 May 2025 22:23:44 -0700 Subject: [PATCH 05/41] Add implementation of devops service --- apps/devops/README.md | 20 +++ apps/devops/app/__init__.py | 0 apps/devops/app/bootstrap/__init__.py | 0 apps/devops/app/bootstrap/application.py | 82 +++++++++ apps/devops/app/common/__init__.py | 0 apps/devops/app/common/config/__init__.py | 0 apps/devops/app/common/config/app_settings.py | 29 ++++ apps/devops/app/common/config/log_settings.py | 16 ++ .../devops/app/common/config/site_settings.py | 27 +++ apps/devops/app/common/daos/__init__.py | 0 .../app/common/daos/deployment/__init__.py | 6 + .../common/daos/deployment/deployment_dao.py | 45 +++++ .../app/common/daos/hello_world/__init__.py | 6 + .../daos/hello_world/hello_world_dao.py | 30 ++++ apps/devops/app/common/log/__init__.py | 0 .../app/common/log/application_logger.py | 12 ++ apps/devops/app/common/log/base_logger.py | 136 +++++++++++++++ apps/devops/app/common/log/json_sink.py | 85 ++++++++++ apps/devops/app/common/models/__init__.py | 5 + .../app/common/models/deployment/__init__.py | 0 .../common/models/deployment/deployment.py | 52 ++++++ .../app/common/models/hello_world/__init__.py | 0 .../common/models/hello_world/hello_world.py | 17 ++ apps/devops/app/common/probes/__init__.py | 140 ++++++++++++++++ apps/devops/app/common/probes/adapters.py | 15 ++ apps/devops/app/envs/alpha.yml | 0 apps/devops/app/envs/prod.yml | 0 apps/devops/app/main.py | 16 ++ apps/devops/app/providers/__init__.py | 0 apps/devops/app/providers/common.py | 31 ++++ apps/devops/app/providers/database.py | 34 ++++ .../devops/app/providers/exception_handler.py | 39 +++++ apps/devops/app/providers/logger.py | 7 + apps/devops/app/providers/metrics.py | 16 ++ apps/devops/app/providers/probes.py | 25 +++ apps/devops/app/providers/router.py | 34 ++++ apps/devops/app/providers/scheduler.py | 8 + apps/devops/app/routes/__init__.py | 7 + apps/devops/app/routes/deployment/__init__.py | 0 apps/devops/app/routes/deployment/apis.py | 34 ++++ apps/devops/app/routes/deployment/service.py | 157 ++++++++++++++++++ .../app/scripts/mongodb/docker-compose.yml | 18 ++ apps/devops/requirements.txt | 10 ++ apps/devops/tests/__init__.py | 0 apps/devops/tests/routes/__init__.py | 0 apps/devops/tests/routes/test_hello_world.py | 27 +++ apps/devops/tests/test_main.http | 8 + 47 files changed, 1194 insertions(+) create mode 100644 apps/devops/README.md create mode 100644 apps/devops/app/__init__.py create mode 100644 apps/devops/app/bootstrap/__init__.py create mode 100644 apps/devops/app/bootstrap/application.py create mode 100644 apps/devops/app/common/__init__.py create mode 100644 apps/devops/app/common/config/__init__.py create mode 100644 apps/devops/app/common/config/app_settings.py create mode 100644 apps/devops/app/common/config/log_settings.py create mode 100644 apps/devops/app/common/config/site_settings.py create mode 100644 apps/devops/app/common/daos/__init__.py create mode 100644 apps/devops/app/common/daos/deployment/__init__.py create mode 100644 apps/devops/app/common/daos/deployment/deployment_dao.py create mode 100644 apps/devops/app/common/daos/hello_world/__init__.py create mode 100644 apps/devops/app/common/daos/hello_world/hello_world_dao.py create mode 100644 apps/devops/app/common/log/__init__.py create mode 100644 apps/devops/app/common/log/application_logger.py create mode 100644 apps/devops/app/common/log/base_logger.py create mode 100644 apps/devops/app/common/log/json_sink.py create mode 100644 apps/devops/app/common/models/__init__.py create mode 100644 apps/devops/app/common/models/deployment/__init__.py create mode 100644 apps/devops/app/common/models/deployment/deployment.py create mode 100644 apps/devops/app/common/models/hello_world/__init__.py create mode 100644 apps/devops/app/common/models/hello_world/hello_world.py create mode 100644 apps/devops/app/common/probes/__init__.py create mode 100644 apps/devops/app/common/probes/adapters.py create mode 100644 apps/devops/app/envs/alpha.yml create mode 100644 apps/devops/app/envs/prod.yml create mode 100644 apps/devops/app/main.py create mode 100644 apps/devops/app/providers/__init__.py create mode 100644 apps/devops/app/providers/common.py create mode 100644 apps/devops/app/providers/database.py create mode 100644 apps/devops/app/providers/exception_handler.py create mode 100644 apps/devops/app/providers/logger.py create mode 100644 apps/devops/app/providers/metrics.py create mode 100644 apps/devops/app/providers/probes.py create mode 100644 apps/devops/app/providers/router.py create mode 100644 apps/devops/app/providers/scheduler.py create mode 100644 apps/devops/app/routes/__init__.py create mode 100644 apps/devops/app/routes/deployment/__init__.py create mode 100644 apps/devops/app/routes/deployment/apis.py create mode 100644 apps/devops/app/routes/deployment/service.py create mode 100644 apps/devops/app/scripts/mongodb/docker-compose.yml create mode 100644 apps/devops/requirements.txt create mode 100644 apps/devops/tests/__init__.py create mode 100644 apps/devops/tests/routes/__init__.py create mode 100644 apps/devops/tests/routes/test_hello_world.py create mode 100644 apps/devops/tests/test_main.http diff --git a/apps/devops/README.md b/apps/devops/README.md new file mode 100644 index 0000000..2fb84b3 --- /dev/null +++ b/apps/devops/README.md @@ -0,0 +1,20 @@ +This is a template backend service based on fastapi + mongodb app + +To start development in local, go to the root directory of the project YOUR_WORKSPACE_PATH/helloworld/ +```bash +docker compose -f app/scripts/mongodb/docker-compose.yml up -d +``` + +Then run the app +```bash +uvicorn app.main:app --reload +``` + +In case a new dependency is added, run the following command to update the requirements.txt file +```bash +# optional: if you have not installed pipreqs +pip3 install pipreqs + +# generate requirements.txt +pipreqs . --force +``` diff --git a/apps/devops/app/__init__.py b/apps/devops/app/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/apps/devops/app/bootstrap/__init__.py b/apps/devops/app/bootstrap/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/apps/devops/app/bootstrap/application.py b/apps/devops/app/bootstrap/application.py new file mode 100644 index 0000000..24223b6 --- /dev/null +++ b/apps/devops/app/bootstrap/application.py @@ -0,0 +1,82 @@ +import logging +from fastapi import FastAPI +from fastapi.openapi.utils import get_openapi + +from app.providers import common +from app.providers.logger import register_logger +from app.providers import router +from app.providers import database +from app.providers import metrics +from app.providers import probes +from app.providers import exception_handler +from app.common.config.app_settings import app_settings + +def create_app() -> FastAPI: + logging.info("App initializing") + + app = FreeleapsApp() + + register_logger() + register(app, exception_handler) + register(app, database) + register(app, router) + # register(app, scheduler) + register(app, common) + + # Call the custom_openapi function to change the OpenAPI version + customize_openapi_security(app) + # Register probe APIs if enabled + if app_settings.PROBES_ENABLED: + register(app, probes) + + # Register metrics APIs if enabled + if app_settings.METRICS_ENABLED: + register(app, metrics) + return app + + +# This function overrides the OpenAPI schema version to 3.0.0 +def customize_openapi_security(app: FastAPI) -> None: + + def custom_openapi(): + if app.openapi_schema: + return app.openapi_schema + + # Generate OpenAPI schema + openapi_schema = get_openapi( + title="FreeLeaps API", + version="3.1.0", + description="FreeLeaps API Documentation", + routes=app.routes, + ) + + # Ensure the components section exists in the OpenAPI schema + if "components" not in openapi_schema: + openapi_schema["components"] = {} + + # Add security scheme to components + openapi_schema["components"]["securitySchemes"] = { + "bearerAuth": {"type": "http", "scheme": "bearer", "bearerFormat": "JWT"} + } + + # Add security requirement globally + openapi_schema["security"] = [{"bearerAuth": []}] + + app.openapi_schema = openapi_schema + return app.openapi_schema + + app.openapi = custom_openapi + + +def register(app, provider): + logging.info(provider.__name__ + " registering") + provider.register(app) + + +def boot(app, provider): + logging.info(provider.__name__ + " booting") + provider.boot(app) + +class FreeleapsApp(FastAPI): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) diff --git a/apps/devops/app/common/__init__.py b/apps/devops/app/common/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/apps/devops/app/common/config/__init__.py b/apps/devops/app/common/config/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/apps/devops/app/common/config/app_settings.py b/apps/devops/app/common/config/app_settings.py new file mode 100644 index 0000000..9a73bcb --- /dev/null +++ b/apps/devops/app/common/config/app_settings.py @@ -0,0 +1,29 @@ +from pydantic_settings import BaseSettings + +# NOTE: The values fall backs to your environment variables when not set here +class AppSettings(BaseSettings): + NAME: str = "YOUR_APP_NAME" + APP_NAME: str = NAME + APP_ENV: str = "alpha" + + JWT_SECRET_KEY: str = "" + ACCESS_TOKEN_EXPIRE_MINUTES: int = 3600 + REFRESH_TOKEN_EXPIRE_DAYS: int = 1 + + METRICS_ENABLED: bool = False + PROBES_ENABLED: bool = True + + APP_MONGODB_URI: str = "mongodb://localhost:27017" + APP_MONGODB_NAME: str = "testdb" + + LOG_BASE_PATH: str = "./log" + BACKEND_LOG_FILE_NAME: str = APP_NAME + APPLICATION_ACTIVITY_LOG: str = APP_NAME + "-application-activity" + + + class Config: + env_file = ".myapp.env" + env_file_encoding = "utf-8" + + +app_settings = AppSettings() diff --git a/apps/devops/app/common/config/log_settings.py b/apps/devops/app/common/config/log_settings.py new file mode 100644 index 0000000..2f6985c --- /dev/null +++ b/apps/devops/app/common/config/log_settings.py @@ -0,0 +1,16 @@ +import os +from dataclasses import dataclass +from .app_settings import app_settings + +@dataclass +class LogSettings: + LOG_PATH_BASE: str = app_settings.LOG_BASE_PATH + LOG_RETENTION: str = os.environ.get("LOG_RETENTION", "30 days") + LOG_ROTATION: str = os.environ.get("LOG_ROTATION", "00:00") # midnight + MAX_BACKUP_FILES: int = int(os.environ.get("LOG_BACKUP_FILES", 5)) + LOG_ROTATION_BYTES: int = int(os.environ.get("LOG_ROTATION_BYTES", 10 * 1024 * 1024)) # 10 MB + APP_NAME: str = app_settings.APP_NAME + ENVIRONMENT: str = app_settings.APP_ENV + + +log_settings = LogSettings() diff --git a/apps/devops/app/common/config/site_settings.py b/apps/devops/app/common/config/site_settings.py new file mode 100644 index 0000000..76e5af1 --- /dev/null +++ b/apps/devops/app/common/config/site_settings.py @@ -0,0 +1,27 @@ +import os + +from pydantic_settings import BaseSettings + + +# NOTE: The values fall backs to your environment variables when not set here +class SiteSettings(BaseSettings): + NAME: str = "appname" + DEBUG: bool = True + + ENV: str = "dev" + + SERVER_HOST: str = "localhost" + SERVER_PORT: int = 8000 + + URL: str = "http://localhost" + TIME_ZONE: str = "UTC" + + BASE_PATH: str = os.path.dirname(os.path.dirname((os.path.abspath(__file__)))) + + class Config: + env_file = ".devbase-webapi.env" + env_file_encoding = "utf-8" + + +site_settings = SiteSettings() + diff --git a/apps/devops/app/common/daos/__init__.py b/apps/devops/app/common/daos/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/apps/devops/app/common/daos/deployment/__init__.py b/apps/devops/app/common/daos/deployment/__init__.py new file mode 100644 index 0000000..4ee4c85 --- /dev/null +++ b/apps/devops/app/common/daos/deployment/__init__.py @@ -0,0 +1,6 @@ +from app.common.daos.deployment.deployment_dao import DeploymentDao + +deployment_dao = DeploymentDao() + +def get_hello_world_dao() -> DeploymentDao: + return deployment_dao \ No newline at end of file diff --git a/apps/devops/app/common/daos/deployment/deployment_dao.py b/apps/devops/app/common/daos/deployment/deployment_dao.py new file mode 100644 index 0000000..f34565a --- /dev/null +++ b/apps/devops/app/common/daos/deployment/deployment_dao.py @@ -0,0 +1,45 @@ +from app.common.models.deployment.deployment import Deployment + + +class DeploymentDao(): + def __init__(self): + pass + + async def create_deployment(self, deployment_data: Deployment): + # Logic to create a new deployment + Deployment.insert(deployment_data) + + async def get_deployments_by_deployment_id(self, deployment_id: str): + # Logic to get a deployment by ID + pass + + async def get_deployments_by_project_id(self, project_id: str): + # Logic to get deployments by project ID + pass + + async def get_deployments_by_product_id(self, project_id: str): + # Logic to get deployments by project ID + pass + + async def get_latest_deployment_by_project_id(self, project_id: str): + # Logic to get the latest deployment by project ID + pass + + + + async def get_deployments_by_user_id(self, user_id: str): + # Logic to get deployments by user ID + pass + + + + async def update_deployment(self, deployment_id: str, deployment_data: dict): + # Logic to update a deployment + pass + + async def delete_deployment(self, deployment_id: str): + # Logic to delete a deployment + pass + + + diff --git a/apps/devops/app/common/daos/hello_world/__init__.py b/apps/devops/app/common/daos/hello_world/__init__.py new file mode 100644 index 0000000..463a9cb --- /dev/null +++ b/apps/devops/app/common/daos/hello_world/__init__.py @@ -0,0 +1,6 @@ +from app.common.daos.hello_world.hello_world_dao import HelloWorldDao + +hello_world_dao = HelloWorldDao() + +def get_hello_world_dao() -> HelloWorldDao: + return hello_world_dao diff --git a/apps/devops/app/common/daos/hello_world/hello_world_dao.py b/apps/devops/app/common/daos/hello_world/hello_world_dao.py new file mode 100644 index 0000000..3b3a112 --- /dev/null +++ b/apps/devops/app/common/daos/hello_world/hello_world_dao.py @@ -0,0 +1,30 @@ +from app.common.models.hello_world.hello_world import HelloWorld + +class HelloWorldDao: + def __init__(self): + pass + + async def create_hello_world(self, message: str, count: int): + hello_world = HelloWorld(message=message, count=count) + await hello_world.insert() + return hello_world + + async def get_hello_world(self, id: str): + hello_world = await HelloWorld.get(id) + return hello_world + + async def update_hello_world(self, id: str, message: str, count: int): + hello_world = await HelloWorld.get(id) + if hello_world: + hello_world.message = message + hello_world.count = count + await hello_world.save() + return hello_world + return None + + async def delete_hello_world(self, id: str): + hello_world = await HelloWorld.get(id) + if hello_world: + await hello_world.delete() + return True + return False \ No newline at end of file diff --git a/apps/devops/app/common/log/__init__.py b/apps/devops/app/common/log/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/apps/devops/app/common/log/application_logger.py b/apps/devops/app/common/log/application_logger.py new file mode 100644 index 0000000..896c044 --- /dev/null +++ b/apps/devops/app/common/log/application_logger.py @@ -0,0 +1,12 @@ +from .base_logger import LoggerBase +from app.common.config.app_settings import app_settings + +class ApplicationLogger(LoggerBase): + def __init__(self, application_activities: dict[str, any] = {}) -> None: + extra_fileds = {} + if application_activities: + extra_fileds.update(application_activities) + super().__init__( + logger_name=app_settings.APPLICATION_ACTIVITY_LOG, + extra_fileds=extra_fileds, + ) diff --git a/apps/devops/app/common/log/base_logger.py b/apps/devops/app/common/log/base_logger.py new file mode 100644 index 0000000..a370296 --- /dev/null +++ b/apps/devops/app/common/log/base_logger.py @@ -0,0 +1,136 @@ +from loguru import logger as guru_logger +from app.common.config.log_settings import log_settings +from typing import Dict, Any, Optional +import socket +import json +import threading +import os +import sys +import inspect +import logging + +from app.common.log.json_sink import JsonSink + +class LoggerBase: + binded_loggers = {} + logger_lock = threading.Lock() + + def __init__(self, logger_name: str, extra_fileds: dict[str, any]) -> None: + self.__logger_name = logger_name + self.extra_fileds = extra_fileds + with LoggerBase.logger_lock: + if self.__logger_name in LoggerBase.binded_loggers: + self.logger = LoggerBase.binded_loggers[self.__logger_name] + return + + log_filename = f"{log_settings.LOG_PATH_BASE}/{self.__logger_name}.log" + log_level = "INFO" + rotation_bytes = int(log_settings.LOG_ROTATION_BYTES or 10 * 1024 * 1024) + + guru_logger.remove() + + file_sink = JsonSink( + log_file_path=log_filename, + rotation_size_bytes=rotation_bytes, + max_backup_files=log_settings.MAX_BACKUP_FILES + ) + guru_logger.add( + sink=file_sink, + level=log_level, + filter=lambda record: record["extra"].get("topic") == self.__logger_name, + ) + + guru_logger.add( + sink=sys.stderr, + level=log_level, + format="{level} - {time:YYYY-MM-DD HH:mm:ss} - <{extra[log_file]}:{extra[log_line]}> - {extra[properties_str]} - {message}", + filter=lambda record: record["extra"].get("topic") == self.__logger_name, + ) + + host_name = socket.gethostname() + host_ip = socket.gethostbyname(host_name) + self.logger = guru_logger.bind( + topic=self.__logger_name, + host_ip=host_ip, + host_name=host_name, + app=log_settings.APP_NAME, + env=log_settings.ENVIRONMENT, + ) + with LoggerBase.logger_lock: + LoggerBase.binded_loggers[self.__logger_name] = self.logger + + def _get_log_context(self) -> dict: + frame = inspect.currentframe().f_back.f_back + filename = os.path.basename(frame.f_code.co_filename) + lineno = frame.f_lineno + return {"log_file": filename, "log_line": lineno} + + def _prepare_properties(self, properties: Optional[Dict[str, Any]]) -> Dict[str, Any]: + props = {} if properties is None else properties.copy() + props_str = json.dumps(props, ensure_ascii=False) if props else "{}" + return props, props_str + + async def log_event(self, sender_id: str, receiver_id: str, subject: str, event: str, properties: dict[str, any], text: str = "") -> None: + props, props_str = self._prepare_properties(properties) + context = self._get_log_context() + local_logger = self.logger.bind(sender_id=sender_id, receiver_id=receiver_id, subject=subject, event=event, properties=props, properties_str=props_str, **context) + local_logger.info(text) + + async def log_exception(self, sender_id: str, receiver_id: str, subject: str, exception: Exception, text: str = "", properties: dict[str, any] = None) -> None: + props, props_str = self._prepare_properties(properties) + context = self._get_log_context() + local_logger = self.logger.bind(sender_id=sender_id, receiver_id=receiver_id, subject=subject, event="exception", properties=props, properties_str=props_str, exception=exception, **context) + local_logger.exception(text) + + async def log_info(self, sender_id: str, receiver_id: str, subject: str, text: str = "", properties: dict[str, any] = None) -> None: + props, props_str = self._prepare_properties(properties) + context = self._get_log_context() + local_logger = self.logger.bind(sender_id=sender_id, receiver_id=receiver_id, subject=subject, event="information", properties=props, properties_str=props_str, **context) + local_logger.info(text) + + async def log_warning(self, sender_id: str, receiver_id: str, subject: str, text: str = "", properties: dict[str, any] = None) -> None: + props, props_str = self._prepare_properties(properties) + context = self._get_log_context() + local_logger = self.logger.bind(sender_id=sender_id, receiver_id=receiver_id, subject=subject, event="warning", properties=props, properties_str=props_str, **context) + local_logger.warning(text) + + async def log_error(self, sender_id: str, receiver_id: str, subject: str, text: str = "", properties: dict[str, any] = None) -> None: + props, props_str = self._prepare_properties(properties) + context = self._get_log_context() + local_logger = self.logger.bind(sender_id=sender_id, receiver_id=receiver_id, subject=subject, event="error", properties=props, properties_str=props_str, **context) + local_logger.error(text) + + @staticmethod + def configure_uvicorn_logging(): + print("📢 Setting up uvicorn logging interception...") + + # Intercept logs from these loggers + intercept_loggers = ["uvicorn", "uvicorn.access", "uvicorn.error", "fastapi"] + + class InterceptHandler(logging.Handler): + def emit(self, record): + level = ( + guru_logger.level(record.levelname).name + if guru_logger.level(record.levelname, None) + else record.levelno + ) + frame, depth = logging.currentframe(), 2 + while frame.f_code.co_filename == logging.__file__: + frame = frame.f_back + depth += 1 + + guru_logger.opt(depth=depth, exception=record.exc_info).log( + level, + f"[{record.name}] {record.getMessage()}", + ) + + # Replace default handlers + logging.root.handlers.clear() + logging.root.setLevel(logging.INFO) + logging.root.handlers = [InterceptHandler()] + + # Configure specific uvicorn loggers + for logger_name in intercept_loggers: + logging_logger = logging.getLogger(logger_name) + logging_logger.handlers.clear() # Remove default handlers + logging_logger.propagate = True # Ensure propagation through Loguru diff --git a/apps/devops/app/common/log/json_sink.py b/apps/devops/app/common/log/json_sink.py new file mode 100644 index 0000000..a798156 --- /dev/null +++ b/apps/devops/app/common/log/json_sink.py @@ -0,0 +1,85 @@ +import json +import datetime +import traceback +from pathlib import Path +from typing import Optional + +class JsonSink: + def __init__( + self, + log_file_path: str, + rotation_size_bytes: int = 10 * 1024 * 1024, + max_backup_files: int = 5, + ): + self.log_file_path = Path(log_file_path) + self.rotation_size = rotation_size_bytes + self.max_backup_files = max_backup_files + self._open_log_file() + + def _open_log_file(self): + # ensure the parent directory exists + parent_dir = self.log_file_path.parent + if not parent_dir.exists(): + parent_dir.mkdir(parents=True, exist_ok=True) + self.log_file = self.log_file_path.open("a", encoding="utf-8") + + def _should_rotate(self) -> bool: + return self.log_file_path.exists() and self.log_file_path.stat().st_size >= self.rotation_size + + def _rotate(self): + self.log_file.close() + timestamp = datetime.datetime.now().strftime("%Y%m%d%H%M%S") + rotated_path = self.log_file_path.with_name(f"{self.log_file_path.stem}_{timestamp}{self.log_file_path.suffix}") + self.log_file_path.rename(rotated_path) + self._cleanup_old_backups() + self._open_log_file() + + def _cleanup_old_backups(self): + parent = self.log_file_path.parent + stem = self.log_file_path.stem + suffix = self.log_file_path.suffix + + backup_files = sorted( + parent.glob(f"{stem}_*{suffix}"), + key=lambda p: p.stat().st_mtime, + reverse=True, + ) + + for old_file in backup_files[self.max_backup_files:]: + try: + old_file.unlink() + except Exception as e: + print(f"Failed to delete old backup {old_file}: {e}") + + def __call__(self, message): + record = message.record + if self._should_rotate(): + self._rotate() + + log_entry = { + "level": record["level"].name.lower(), + "timestamp": int(record["time"].timestamp() * 1000), + "text": record["message"], + "fields": record["extra"].get("properties", {}), + "context": { + "app": record["extra"].get("app"), + "env": record["extra"].get("env"), + "log_file": record["extra"].get("log_file"), + "log_line": record["extra"].get("log_line"), + "topic": record["extra"].get("topic"), + "sender_id": record["extra"].get("sender_id"), + "receiver_id": record["extra"].get("receiver_id"), + "subject": record["extra"].get("subject"), + "event": record["extra"].get("event"), + "host_ip": record["extra"].get("host_ip"), + "host_name": record["extra"].get("host_name"), + }, + "stacktrace": None + } + + if record["exception"]: + exc_type, exc_value, exc_tb = record["exception"] + log_entry["stacktrace"] = traceback.format_exception(exc_type, exc_value, exc_tb) + + self.log_file.write(json.dumps(log_entry, ensure_ascii=False) + "\n") + self.log_file.flush() diff --git a/apps/devops/app/common/models/__init__.py b/apps/devops/app/common/models/__init__.py new file mode 100644 index 0000000..35ed321 --- /dev/null +++ b/apps/devops/app/common/models/__init__.py @@ -0,0 +1,5 @@ +from app.common.models.hello_world.hello_world import HelloWorld +from app.common.models.deployment.deployment import Deployment + +# list of beanie document models +db_models = [HelloWorld, Deployment] \ No newline at end of file diff --git a/apps/devops/app/common/models/deployment/__init__.py b/apps/devops/app/common/models/deployment/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/apps/devops/app/common/models/deployment/deployment.py b/apps/devops/app/common/models/deployment/deployment.py new file mode 100644 index 0000000..7abff26 --- /dev/null +++ b/apps/devops/app/common/models/deployment/deployment.py @@ -0,0 +1,52 @@ +from datetime import datetime +from typing import Literal + +from beanie import Document +from pydantic import Field +from pydantic import BaseModel + + +class Deployment(Document): + deployment_id: str = Field(alias="_id") + deployment_stage: str + deployment_status: Literal["started", "failed", "succeeded", "aborted"] + + deployment_target_env: Literal["alpha", "prod"] + deployment_ttl_hours: int = 2 + + deployment_project_id: str + deployment_project_name: str + deployment_product_id: str + deployment_product_name: str + deployment_git_url: str + deployment_git_sha256: str + deployment_reason: str + + deployed_by: str + created_at: datetime = datetime.now() + updated_at: datetime = datetime.now() + + class Settings: + name = "deployment" + indexes = [ + [("deployment_product_id", 1), ("created_at", 1)], # Compound index + [("deployment_id", 1), ("deployment_status", 1)], # Compound index + {"keys": [("deployment_id", 1), ("deployment_stage", 1)], "unique": True} # Unique compound index + ] + +class InitDeploymentRequest(BaseModel): + product_id: str + sha256: str + target_env: str + user_id: str + reason: str = "not provided" + ttl_hours: int = 3 + +class CheckDeploymentStatusRequest(BaseModel): + product_id: str + target_env: str + user_id: str + + + + diff --git a/apps/devops/app/common/models/hello_world/__init__.py b/apps/devops/app/common/models/hello_world/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/apps/devops/app/common/models/hello_world/hello_world.py b/apps/devops/app/common/models/hello_world/hello_world.py new file mode 100644 index 0000000..55000c7 --- /dev/null +++ b/apps/devops/app/common/models/hello_world/hello_world.py @@ -0,0 +1,17 @@ +from datetime import datetime + +from beanie import Document + + +class HelloWorld(Document): + message: str + count: int = 0 + created_time: datetime = datetime.now() + + class Settings: + name = "hello_world" + indexes = [ + [("message", 1), ("count", 1)] + ] + + diff --git a/apps/devops/app/common/probes/__init__.py b/apps/devops/app/common/probes/__init__.py new file mode 100644 index 0000000..4071df8 --- /dev/null +++ b/apps/devops/app/common/probes/__init__.py @@ -0,0 +1,140 @@ +import logging +from enum import Enum +from typing import Optional, Callable, Tuple, Dict +import inspect +from datetime import datetime, timezone + +# ProbeType is an Enum that defines the types of probes that can be registered. +class ProbeType(Enum): + LIVENESS = "liveness" + READINESS = "readiness" + STARTUP = "startup" + +# ProbeResult is a class that represents the result of a probe check. +class ProbeResult: + def __init__(self, success: bool, message: str = "ok", data: Optional[dict] = None): + self.success = success + self.message = message + self.data = data or {} + + def to_dict(self) -> dict: + return { + "success": self.success, + "message": self.message, + "data": self.data + } + +# Probe is a class that represents a probe that can be registered. +class Probe: + def __init__(self, type: ProbeType, path: str, check_fn: Callable, name: Optional[str] = None): + self.type = type + self.path = path + self.check_fn = check_fn + self.name = name or f"{type.value}-{id(self)}" + + async def execute(self) -> ProbeResult: + try: + result = self.check_fn() + if inspect.isawaitable(result): + result = await result + + if isinstance(result, ProbeResult): + return result + elif isinstance(result, bool): + return ProbeResult(result, "ok" if result else "failed") + else: + return ProbeResult(True, "ok") + except Exception as e: + return ProbeResult(False, str(e)) + +# ProbeGroup is a class that represents a group of probes that can be checked together. +class ProbeGroup: + def __init__(self, path: str): + self.path = path + self.probes: Dict[str, Probe] = {} + + def add_probe(self, probe: Probe): + self.probes[probe.name] = probe + + async def check_all(self) -> Tuple[bool, dict]: + results = {} + all_success = True + + for name, probe in self.probes.items(): + result = await probe.execute() + results[name] = result.to_dict() + if not result.success: + all_success = False + + return all_success, results + +# FrameworkAdapter is an abstract class that defines the interface for framework-specific probe adapters. +class FrameworkAdapter: + async def handle_request(self, group: ProbeGroup): + all_success, results = await group.check_all() + status_code = 200 if all_success else 503 + return {"status": "ok" if all_success else "failed", "payload": results, "timestamp": int(datetime.now(timezone.utc).timestamp())}, status_code + + def register_route(self, path: str, handler: Callable): + raise NotImplementedError + +# ProbeManager is a class that manages the registration of probes and their corresponding framework adapters. +class ProbeManager: + _default_paths = { + ProbeType.LIVENESS: "/_/livez", + ProbeType.READINESS: "/_/readyz", + ProbeType.STARTUP: "/_/healthz" + } + + def __init__(self): + self.groups: Dict[str, ProbeGroup] = {} + self.adapters: Dict[str, FrameworkAdapter] = {} + self._startup_complete = False + + def register_adapter(self, framework: str, adapter: FrameworkAdapter): + self.adapters[framework] = adapter + logging.info(f"Registered probe adapter ({adapter}) for framework: {framework}") + + def register( + self, + type: ProbeType, + check_func: Optional[Callable] = None, + path: Optional[str] = None, + prefix: str = "", + name: Optional[str] = None, + frameworks: Optional[list] = None + ): + path = path or self._default_paths.get(type, "/_/healthz") + if prefix: + path = f"{prefix}{path}" + + if type == ProbeType.STARTUP and check_func is None: + check_func = self._default_startup_check + + probe = Probe(type, path, check_func or (lambda: True), name) + + if path not in self.groups: + self.groups[path] = ProbeGroup(path) + self.groups[path].add_probe(probe) + + for framework in (frameworks or ["default"]): + self._register_route(framework, path) + logging.info(f"Registered {type.value} probe route ({path}) for framework: {framework}") + + def _register_route(self, framework: str, path: str): + if framework not in self.adapters: + return + + adapter = self.adapters[framework] + group = self.groups[path] + + async def handler(): + return await adapter.handle_request(group) + + adapter.register_route(path, handler) + + def _default_startup_check(self) -> bool: + return self._startup_complete + + def mark_startup_complete(self): + self._startup_complete = True \ No newline at end of file diff --git a/apps/devops/app/common/probes/adapters.py b/apps/devops/app/common/probes/adapters.py new file mode 100644 index 0000000..2ecd38a --- /dev/null +++ b/apps/devops/app/common/probes/adapters.py @@ -0,0 +1,15 @@ +from . import FrameworkAdapter +from fastapi.responses import JSONResponse +from typing import Callable + +# FastAPIAdapter is a class that implements the FrameworkAdapter interface for FastAPI. +class FastAPIAdapter(FrameworkAdapter): + def __init__(self, app): + self.app = app + + def register_route(self,path: str, handler: Callable): + async def wrapper(): + data, status_code = await handler() + return JSONResponse(content=data, status_code=status_code) + + self.app.add_api_route(path, wrapper, methods=["GET"]) diff --git a/apps/devops/app/envs/alpha.yml b/apps/devops/app/envs/alpha.yml new file mode 100644 index 0000000..e69de29 diff --git a/apps/devops/app/envs/prod.yml b/apps/devops/app/envs/prod.yml new file mode 100644 index 0000000..e69de29 diff --git a/apps/devops/app/main.py b/apps/devops/app/main.py new file mode 100644 index 0000000..559d7ed --- /dev/null +++ b/apps/devops/app/main.py @@ -0,0 +1,16 @@ +from fastapi.responses import RedirectResponse +from app.common.config.site_settings import site_settings +from app.bootstrap.application import create_app + +app = create_app() + +@app.get("/", status_code=301) +async def root(): + """ + TODO: redirect client to /doc# + """ + return RedirectResponse("docs") + +if __name__ == "__main__": + import uvicorn + uvicorn.run("main:app", host=site_settings.SERVER_HOST, port=site_settings.SERVER_PORT, reload=True) \ No newline at end of file diff --git a/apps/devops/app/providers/__init__.py b/apps/devops/app/providers/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/apps/devops/app/providers/common.py b/apps/devops/app/providers/common.py new file mode 100644 index 0000000..64a9a44 --- /dev/null +++ b/apps/devops/app/providers/common.py @@ -0,0 +1,31 @@ +from fastapi.middleware.cors import CORSMiddleware +from app.common.config.site_settings import site_settings + + +def register(app): + app.debug = site_settings.DEBUG + app.title = site_settings.NAME + + add_global_middleware(app) + + # This hook ensures that a connection is opened to handle any queries + # generated by the request. + @app.on_event("startup") + def startup(): + pass + + # This hook ensures that the connection is closed when we've finished + # processing the request. + @app.on_event("shutdown") + def shutdown(): + pass + + +def add_global_middleware(app): + app.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], + ) diff --git a/apps/devops/app/providers/database.py b/apps/devops/app/providers/database.py new file mode 100644 index 0000000..8716b8e --- /dev/null +++ b/apps/devops/app/providers/database.py @@ -0,0 +1,34 @@ +import asyncio +from app.common.config.app_settings import app_settings +from beanie import init_beanie +from motor.motor_asyncio import AsyncIOMotorClient +from app.common.models import db_models +from app.common.probes import ProbeResult + +client = AsyncIOMotorClient( + app_settings.APP_MONGODB_URI, + serverSelectionTimeoutMS=60000, + minPoolSize=5, # Minimum number of connections in the pool + maxPoolSize=20, # Maximum number of connections in the pool +) + +def register(app): + app.debug = "auth_mongo_debug" + app.title = "auth_mongo_name" + + @app.on_event("startup") + async def start_database(): + await initiate_database() + +async def check_database_initialized() -> ProbeResult: + try: + await asyncio.wait_for(client.server_info(), timeout=5) + return ProbeResult(success=True, message="service has been initialized and ready to serve") + except Exception: + return ProbeResult(success=False, message="service is not initialized yet", data={"error": "database is not ready"}) + + +async def initiate_database(): + await init_beanie( + database=client[app_settings.APP_MONGODB_NAME], document_models=db_models + ) diff --git a/apps/devops/app/providers/exception_handler.py b/apps/devops/app/providers/exception_handler.py new file mode 100644 index 0000000..21117a5 --- /dev/null +++ b/apps/devops/app/providers/exception_handler.py @@ -0,0 +1,39 @@ +from fastapi import FastAPI, HTTPException +from fastapi.exceptions import RequestValidationError +from starlette.requests import Request +from starlette.responses import JSONResponse +from starlette.status import ( + HTTP_400_BAD_REQUEST, + HTTP_401_UNAUTHORIZED, + HTTP_403_FORBIDDEN, + HTTP_404_NOT_FOUND, + HTTP_422_UNPROCESSABLE_ENTITY, + HTTP_500_INTERNAL_SERVER_ERROR, +) + + +async def custom_http_exception_handler(request: Request, exc: HTTPException): + return JSONResponse( + status_code=exc.status_code, + content={"error": exc.detail}, + ) + + + +async def validation_exception_handler(request: Request, exc: RequestValidationError): + return JSONResponse( + status_code=HTTP_400_BAD_REQUEST, + content={"error": str(exc)}, + ) + +async def exception_handler(request: Request, exc: Exception): + return JSONResponse( + status_code=HTTP_500_INTERNAL_SERVER_ERROR, + content={"error": str(exc)}, + ) + + +def register(app: FastAPI): + app.add_exception_handler(HTTPException, custom_http_exception_handler) + app.add_exception_handler(RequestValidationError, validation_exception_handler) + app.add_exception_handler(Exception, exception_handler) diff --git a/apps/devops/app/providers/logger.py b/apps/devops/app/providers/logger.py new file mode 100644 index 0000000..2785603 --- /dev/null +++ b/apps/devops/app/providers/logger.py @@ -0,0 +1,7 @@ +from app.common.log.base_logger import LoggerBase + + +def register_logger(): + print("📢 Setting up logging interception...") + LoggerBase.configure_uvicorn_logging() + print("✅ Logging interception complete. Logs are formatted and deduplicated!") diff --git a/apps/devops/app/providers/metrics.py b/apps/devops/app/providers/metrics.py new file mode 100644 index 0000000..1ae941a --- /dev/null +++ b/apps/devops/app/providers/metrics.py @@ -0,0 +1,16 @@ +import logging +from prometheus_fastapi_instrumentator import Instrumentator +from app.common.config.app_settings import app_settings + +def register(app): + instrumentator = ( + Instrumentator().instrument( + app, + metric_namespace="freeleaps", + metric_subsystem=app_settings.APP_NAME) + ) + + @app.on_event("startup") + async def startup(): + instrumentator.expose(app, endpoint="/api/_/metrics", should_gzip=True) + logging.info("Metrics endpoint exposed at /api/_/metrics") \ No newline at end of file diff --git a/apps/devops/app/providers/probes.py b/apps/devops/app/providers/probes.py new file mode 100644 index 0000000..883e3d6 --- /dev/null +++ b/apps/devops/app/providers/probes.py @@ -0,0 +1,25 @@ +from app.common.probes import ProbeManager, ProbeType +from app.common.probes.adapters import FastAPIAdapter +from .database import check_database_initialized + +def register(app): + probes_manager = ProbeManager() + probes_manager.register_adapter("fastapi", FastAPIAdapter(app)) + + async def readiness_checker(): + return await check_database_initialized() + + probes_manager.register( + name="readiness", + prefix="/api", + type=ProbeType.READINESS, + check_func=readiness_checker, + frameworks=["fastapi"] + ) + + probes_manager.register(name="liveness", prefix="/api", type=ProbeType.LIVENESS, frameworks=["fastapi"]) + probes_manager.register(name="startup", prefix="/api", type=ProbeType.STARTUP, frameworks=["fastapi"]) + + @app.on_event("startup") + async def mark_startup_complete(): + probes_manager.mark_startup_complete() \ No newline at end of file diff --git a/apps/devops/app/providers/router.py b/apps/devops/app/providers/router.py new file mode 100644 index 0000000..b273eb8 --- /dev/null +++ b/apps/devops/app/providers/router.py @@ -0,0 +1,34 @@ +from app.routes import api_router + +from starlette import routing + + +def register(app): + app.include_router( + api_router, + prefix="/api", + tags=["api"], + dependencies=[], + responses={404: {"description": "no page found"}}, + ) + + if app.debug: + for route in app.routes: + if not isinstance(route, routing.WebSocketRoute): + print( + { + "path": route.path, + "endpoint": route.endpoint, + "name": route.name, + "methods": route.methods, + } + ) + else: + print( + { + "path": route.path, + "endpoint": route.endpoint, + "name": route.name, + "type": "web socket route", + } + ) diff --git a/apps/devops/app/providers/scheduler.py b/apps/devops/app/providers/scheduler.py new file mode 100644 index 0000000..7ea8d6c --- /dev/null +++ b/apps/devops/app/providers/scheduler.py @@ -0,0 +1,8 @@ +import asyncio + + +def register(app): + @app.on_event("startup") + async def start_scheduler(): + #create your scheduler here + pass diff --git a/apps/devops/app/routes/__init__.py b/apps/devops/app/routes/__init__.py new file mode 100644 index 0000000..ab02a04 --- /dev/null +++ b/apps/devops/app/routes/__init__.py @@ -0,0 +1,7 @@ +from fastapi import APIRouter +from app.routes.deployment.apis import router as deployment_api + +api_router = APIRouter() + +# TODO: add custom routers here +api_router.include_router(deployment_api, tags=["deployment"]) diff --git a/apps/devops/app/routes/deployment/__init__.py b/apps/devops/app/routes/deployment/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/apps/devops/app/routes/deployment/apis.py b/apps/devops/app/routes/deployment/apis.py new file mode 100644 index 0000000..50eff75 --- /dev/null +++ b/apps/devops/app/routes/deployment/apis.py @@ -0,0 +1,34 @@ +from datetime import datetime +from typing import List + +from fastapi import APIRouter, Depends +from loguru import logger + +from app.common.daos.hello_world import get_hello_world_dao, HelloWorldDao +from app.common.models.deployment.deployment import Deployment, InitDeploymentRequest +from app.routes.deployment.service import DeploymentService, get_deployment_service + +router = APIRouter(prefix="/deployment") + +@router.post("/initDeployment") +## insert a new Deployment object to db +async def init_deployment( + request: InitDeploymentRequest, + service: DeploymentService = Depends(get_deployment_service) +) -> Deployment: + return await service.init_deployment(request) + +@router.post("/updateDeploymentStatus") +async def update_deployment( + request: Deployment, + service: DeploymentService = Depends(get_deployment_service) +) -> bool: + return await service.update_deployment_status(request) + +@router.get("/checkDeploymentStatus") +async def check_deployment_status( + deployment_id: str, + service: DeploymentService = Depends(get_deployment_service) +) -> List[Deployment]: + return await service.check_deployment_status(deployment_id) + diff --git a/apps/devops/app/routes/deployment/service.py b/apps/devops/app/routes/deployment/service.py new file mode 100644 index 0000000..8a74d76 --- /dev/null +++ b/apps/devops/app/routes/deployment/service.py @@ -0,0 +1,157 @@ +from collections import defaultdict +from datetime import datetime, timedelta +from typing import List + +from fastapi import HTTPException + +from app.common.models import Deployment +from app.common.models.deployment.deployment import InitDeploymentRequest + + +class DeploymentService: + + def __init__(self): + pass + + async def init_deployment( + self, + request: InitDeploymentRequest + ) -> Deployment: + """ + """ + # TODO validate permission with user_id + # currently skip + + git_url = await self._retrieve_git_url_by_product_id(request.product_id) + + product_initialized = await self._check_if_project_initialized(git_url, request.product_id) + if not product_initialized: + await self._init_product(git_url, request.product_id) + + # retrieve project name + project_name = "TODO" + + # retrieve product info + product_id = "TODO" + product_name = "TODO" + + + + deployment = Deployment.model_construct( + deployment_stage = "init", + deployment_status = "started", + deployment_target_env = request.target_env, + deployment_ttl_hours = request.ttl_hours, + deployment_project_id = "project_id", + deployment_project_name = "project_name", + deployment_product_id = product_id, + deployment_product_name = product_name, + deployment_git_url = git_url, + deployment_git_sha256 = request.sha256, + deployment_reason = request.reason, + deployed_by = request.user_id, + created_at = datetime.now(), + updated_at = datetime.now(), + ) + + await self._start_deployment(deployment) + + res = await deployment.insert() + return res + + async def check_deployment_status( + self, + product_id: str, + ) -> List[Deployment]: + """ + Check the deployment status of the application, only check past 48 hours + """ + # TODO implement this function + time_threshold = datetime.now() - timedelta(hours=48) + deployment_records = await Deployment.find( + Deployment.deployment_product_id == product_id, + Deployment.created_at >= time_threshold + ).to_list() + grouped = defaultdict(list) + for deployment in deployment_records: + grouped[deployment.deployment_product_id].append(deployment) + for deployment_list in grouped.values(): + deployment_list.sort(key=lambda d: (d.created_at, d.updated_at), reverse=True) + + latest_deployments = [deployments[-1] for deployments in grouped.values()] + return latest_deployments + + async def update_deployment_status( + self, + deployment: Deployment + ) -> bool: + latest_record = await Deployment.find_one( + Deployment.deployment_id == deployment.deployment_id, + sort=[("created_at", -1)] + ) + + if not latest_record: + raise HTTPException(status_code=404, detail="No record found, please initiate deployment first") + + # TODO add more sanity check logic here + + if deployment.deployment_stage == latest_record.deployment_status: + # update existing record + latest_record.deployment_status = deployment.deployment_status + latest_record.updated_at = deployment.updated_at or datetime.now() + await latest_record.save() + else: + # create new record + deployment.deployment_id = latest_record.deployment_id + deployment.created_at = latest_record.created_at + deployment.updated_at = datetime.now() + await deployment.insert() + + return True + + async def _retrieve_git_url_by_product_id( + self, + product_id: str + ) -> str: + """ + Retrieve git url by product id + """ + # TODO implement this function + pass + + async def _check_if_project_initialized( + self, + git_url: str, + product_id: str + ) -> bool: + """ + Check if the project has been initialized + """ + # TODO implement this function + pass + + async def _init_product( + self, + git_url: str, + product_id: str + ) -> bool: + """ + Initialize the product + """ + # TODO implement this function + pass + + async def _start_deployment( + self, + deployment: Deployment + ) -> bool: + """ + Start the deployment + """ + # TODO implement this function + pass + +deployment_service = DeploymentService() + +def get_deployment_service() -> DeploymentService: + return deployment_service \ No newline at end of file diff --git a/apps/devops/app/scripts/mongodb/docker-compose.yml b/apps/devops/app/scripts/mongodb/docker-compose.yml new file mode 100644 index 0000000..8ab07c7 --- /dev/null +++ b/apps/devops/app/scripts/mongodb/docker-compose.yml @@ -0,0 +1,18 @@ +version: '3.8' + +services: + mongodb: + image: mongo:6.0 # You can change to the desired version + container_name: mongodb + restart: unless-stopped + ports: + - "27017:27017" + environment: + MONGO_INITDB_DATABASE: testdb # <-- This creates the initial database + volumes: + - mongodb_data:/data/db + command: ["mongod", "--noauth"] # <-- Disable authentication + + +volumes: + mongodb_data: \ No newline at end of file diff --git a/apps/devops/requirements.txt b/apps/devops/requirements.txt new file mode 100644 index 0000000..056543d --- /dev/null +++ b/apps/devops/requirements.txt @@ -0,0 +1,10 @@ +beanie==1.29.0 +fastapi==0.115.12 +loguru==0.7.3 +motor==3.7.0 +prometheus_fastapi_instrumentator==7.1.0 +pydantic_settings==2.9.1 +pytest==7.1.2 +starlette==0.46.2 +uvicorn==0.34.2 +httpx==0.24.0 \ No newline at end of file diff --git a/apps/devops/tests/__init__.py b/apps/devops/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/apps/devops/tests/routes/__init__.py b/apps/devops/tests/routes/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/apps/devops/tests/routes/test_hello_world.py b/apps/devops/tests/routes/test_hello_world.py new file mode 100644 index 0000000..638a4b2 --- /dev/null +++ b/apps/devops/tests/routes/test_hello_world.py @@ -0,0 +1,27 @@ +from unittest.mock import AsyncMock, patch +from fastapi.testclient import TestClient +from app.main import app +from app.routes.hello_world.apis import get_hello_world_dao + + +def test_hello_world(): + with TestClient(app) as client: + response = client.get("/api/hello_world/") + assert response.status_code == 200 + assert response.json() == {"message": "Hello, World!"} + + +# mock out initiate_database so it doesn’t run during tests +@patch("app.providers.database.initiate_database", new_callable=AsyncMock) +def test_insert_hello_world(mock_db_init): + + class MockHelloWorldDao: + async def create_hello_world(self, msg: str, user_id: int): + return {"message": msg, "user_id": user_id} + + app.dependency_overrides[get_hello_world_dao] = lambda: MockHelloWorldDao() + with TestClient(app) as client: + response = client.post("/api/hello_world/insert", params={"msg": "Test Message"}) + assert response.status_code == 200 + assert response.json() == {"message": "Test Message", "user_id": 1} + app.dependency_overrides.clear() diff --git a/apps/devops/tests/test_main.http b/apps/devops/tests/test_main.http new file mode 100644 index 0000000..b847198 --- /dev/null +++ b/apps/devops/tests/test_main.http @@ -0,0 +1,8 @@ +# Test your FastAPI endpoints + +GET http://localhost:8000/api/hello_world/ +Accept: application/json + +### +POST http://localhost:8000/api/hello_world/insert?msg=Hello%20World +Accept: application/json From 43ec8ec01e65202bbf946d638bf002f47725d50e Mon Sep 17 00:00:00 2001 From: dongli Date: Sun, 18 May 2025 23:09:39 -0700 Subject: [PATCH 06/41] Clean up --- .../app/common/daos/hello_world/__init__.py | 6 ---- .../daos/hello_world/hello_world_dao.py | 30 ------------------- apps/devops/app/common/models/__init__.py | 3 +- .../common/models/deployment/deployment.py | 12 ++++++-- .../app/common/models/hello_world/__init__.py | 0 .../common/models/hello_world/hello_world.py | 17 ----------- apps/devops/app/routes/deployment/apis.py | 1 - 7 files changed, 11 insertions(+), 58 deletions(-) delete mode 100644 apps/devops/app/common/daos/hello_world/__init__.py delete mode 100644 apps/devops/app/common/daos/hello_world/hello_world_dao.py delete mode 100644 apps/devops/app/common/models/hello_world/__init__.py delete mode 100644 apps/devops/app/common/models/hello_world/hello_world.py diff --git a/apps/devops/app/common/daos/hello_world/__init__.py b/apps/devops/app/common/daos/hello_world/__init__.py deleted file mode 100644 index 463a9cb..0000000 --- a/apps/devops/app/common/daos/hello_world/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -from app.common.daos.hello_world.hello_world_dao import HelloWorldDao - -hello_world_dao = HelloWorldDao() - -def get_hello_world_dao() -> HelloWorldDao: - return hello_world_dao diff --git a/apps/devops/app/common/daos/hello_world/hello_world_dao.py b/apps/devops/app/common/daos/hello_world/hello_world_dao.py deleted file mode 100644 index 3b3a112..0000000 --- a/apps/devops/app/common/daos/hello_world/hello_world_dao.py +++ /dev/null @@ -1,30 +0,0 @@ -from app.common.models.hello_world.hello_world import HelloWorld - -class HelloWorldDao: - def __init__(self): - pass - - async def create_hello_world(self, message: str, count: int): - hello_world = HelloWorld(message=message, count=count) - await hello_world.insert() - return hello_world - - async def get_hello_world(self, id: str): - hello_world = await HelloWorld.get(id) - return hello_world - - async def update_hello_world(self, id: str, message: str, count: int): - hello_world = await HelloWorld.get(id) - if hello_world: - hello_world.message = message - hello_world.count = count - await hello_world.save() - return hello_world - return None - - async def delete_hello_world(self, id: str): - hello_world = await HelloWorld.get(id) - if hello_world: - await hello_world.delete() - return True - return False \ No newline at end of file diff --git a/apps/devops/app/common/models/__init__.py b/apps/devops/app/common/models/__init__.py index 35ed321..dd9cfe5 100644 --- a/apps/devops/app/common/models/__init__.py +++ b/apps/devops/app/common/models/__init__.py @@ -1,5 +1,4 @@ -from app.common.models.hello_world.hello_world import HelloWorld from app.common.models.deployment.deployment import Deployment # list of beanie document models -db_models = [HelloWorld, Deployment] \ No newline at end of file +db_models = [Deployment] \ No newline at end of file diff --git a/apps/devops/app/common/models/deployment/deployment.py b/apps/devops/app/common/models/deployment/deployment.py index 7abff26..c29feeb 100644 --- a/apps/devops/app/common/models/deployment/deployment.py +++ b/apps/devops/app/common/models/deployment/deployment.py @@ -2,12 +2,18 @@ from datetime import datetime from typing import Literal from beanie import Document -from pydantic import Field +from bson import ObjectId +from pydantic import Field, field_validator from pydantic import BaseModel class Deployment(Document): deployment_id: str = Field(alias="_id") + @field_validator("deployment_id", mode="before") + @classmethod + def convert_object_id(cls, v): + return str(v) + deployment_stage: str deployment_status: Literal["started", "failed", "succeeded", "aborted"] @@ -31,7 +37,9 @@ class Deployment(Document): indexes = [ [("deployment_product_id", 1), ("created_at", 1)], # Compound index [("deployment_id", 1), ("deployment_status", 1)], # Compound index - {"keys": [("deployment_id", 1), ("deployment_stage", 1)], "unique": True} # Unique compound index + + # somehow combo + unique errors out + # {"keys": [("deployment_id", 1), ("deployment_stage", 1)], "unique": True} # Unique compound index ] class InitDeploymentRequest(BaseModel): diff --git a/apps/devops/app/common/models/hello_world/__init__.py b/apps/devops/app/common/models/hello_world/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/apps/devops/app/common/models/hello_world/hello_world.py b/apps/devops/app/common/models/hello_world/hello_world.py deleted file mode 100644 index 55000c7..0000000 --- a/apps/devops/app/common/models/hello_world/hello_world.py +++ /dev/null @@ -1,17 +0,0 @@ -from datetime import datetime - -from beanie import Document - - -class HelloWorld(Document): - message: str - count: int = 0 - created_time: datetime = datetime.now() - - class Settings: - name = "hello_world" - indexes = [ - [("message", 1), ("count", 1)] - ] - - diff --git a/apps/devops/app/routes/deployment/apis.py b/apps/devops/app/routes/deployment/apis.py index 50eff75..c6ebefc 100644 --- a/apps/devops/app/routes/deployment/apis.py +++ b/apps/devops/app/routes/deployment/apis.py @@ -4,7 +4,6 @@ from typing import List from fastapi import APIRouter, Depends from loguru import logger -from app.common.daos.hello_world import get_hello_world_dao, HelloWorldDao from app.common.models.deployment.deployment import Deployment, InitDeploymentRequest from app.routes.deployment.service import DeploymentService, get_deployment_service From 217f33fc176a936f90b769e0be215c5779624217 Mon Sep 17 00:00:00 2001 From: dongli Date: Mon, 19 May 2025 14:55:56 -0700 Subject: [PATCH 07/41] fix check deployment status bug --- .../app/common/models/deployment/deployment.py | 15 +++++---------- apps/devops/app/routes/deployment/service.py | 12 +++++++----- 2 files changed, 12 insertions(+), 15 deletions(-) diff --git a/apps/devops/app/common/models/deployment/deployment.py b/apps/devops/app/common/models/deployment/deployment.py index c29feeb..2bcbdda 100644 --- a/apps/devops/app/common/models/deployment/deployment.py +++ b/apps/devops/app/common/models/deployment/deployment.py @@ -5,14 +5,11 @@ from beanie import Document from bson import ObjectId from pydantic import Field, field_validator from pydantic import BaseModel +from pymongo import IndexModel class Deployment(Document): - deployment_id: str = Field(alias="_id") - @field_validator("deployment_id", mode="before") - @classmethod - def convert_object_id(cls, v): - return str(v) + deployment_id: str deployment_stage: str deployment_status: Literal["started", "failed", "succeeded", "aborted"] @@ -35,11 +32,9 @@ class Deployment(Document): class Settings: name = "deployment" indexes = [ - [("deployment_product_id", 1), ("created_at", 1)], # Compound index - [("deployment_id", 1), ("deployment_status", 1)], # Compound index - - # somehow combo + unique errors out - # {"keys": [("deployment_id", 1), ("deployment_stage", 1)], "unique": True} # Unique compound index + IndexModel([("deployment_product_id", 1), ("created_at", 1)]), + IndexModel([("deployment_id", 1), ("deployment_status", 1)]), + IndexModel([("deployment_id", 1), ("deployment_stage", 1)], unique=True) ] class InitDeploymentRequest(BaseModel): diff --git a/apps/devops/app/routes/deployment/service.py b/apps/devops/app/routes/deployment/service.py index 8a74d76..9093d77 100644 --- a/apps/devops/app/routes/deployment/service.py +++ b/apps/devops/app/routes/deployment/service.py @@ -1,3 +1,4 @@ +import uuid from collections import defaultdict from datetime import datetime, timedelta from typing import List @@ -32,12 +33,13 @@ class DeploymentService: project_name = "TODO" # retrieve product info - product_id = "TODO" + product_id = request.product_id product_name = "TODO" deployment = Deployment.model_construct( + deployment_id = str(uuid.uuid4()), deployment_stage = "init", deployment_status = "started", deployment_target_env = request.target_env, @@ -74,11 +76,11 @@ class DeploymentService: ).to_list() grouped = defaultdict(list) for deployment in deployment_records: - grouped[deployment.deployment_product_id].append(deployment) + grouped[deployment.deployment_id].append(deployment) for deployment_list in grouped.values(): deployment_list.sort(key=lambda d: (d.created_at, d.updated_at), reverse=True) - latest_deployments = [deployments[-1] for deployments in grouped.values()] + return latest_deployments async def update_deployment_status( @@ -117,7 +119,7 @@ class DeploymentService: Retrieve git url by product id """ # TODO implement this function - pass + return "TODO-git_url" async def _check_if_project_initialized( self, @@ -128,7 +130,7 @@ class DeploymentService: Check if the project has been initialized """ # TODO implement this function - pass + return True async def _init_product( self, From 308f82740d5b234132241a179d709dd2082b0127 Mon Sep 17 00:00:00 2001 From: dongli Date: Sat, 7 Jun 2025 11:32:48 -0700 Subject: [PATCH 08/41] More impl for devops --- .../devops/app/common/config/site_settings.py | 5 + .../app/common/daos/code_depot/__init__.py | 6 ++ .../common/daos/code_depot/code_depot_dao.py | 19 ++++ .../app/common/daos/deployment/__init__.py | 2 +- .../common/daos/deployment/deployment_dao.py | 5 +- apps/devops/app/common/models/__init__.py | 6 +- apps/devops/app/routes/deployment/apis.py | 14 +++ apps/devops/app/routes/deployment/service.py | 100 +++++++++++------- 8 files changed, 112 insertions(+), 45 deletions(-) create mode 100644 apps/devops/app/common/daos/code_depot/__init__.py create mode 100644 apps/devops/app/common/daos/code_depot/code_depot_dao.py diff --git a/apps/devops/app/common/config/site_settings.py b/apps/devops/app/common/config/site_settings.py index 76e5af1..9a96914 100644 --- a/apps/devops/app/common/config/site_settings.py +++ b/apps/devops/app/common/config/site_settings.py @@ -18,6 +18,11 @@ class SiteSettings(BaseSettings): BASE_PATH: str = os.path.dirname(os.path.dirname((os.path.abspath(__file__)))) + BASE_GITEA_URL: str = "https://gitea.freeleaps.mathmast.com" + + # TODO: confirm with Zhenyu + BASE_RECONSILE_URL: str = "https://reconcile.freeleaps.mathmast.com" + class Config: env_file = ".devbase-webapi.env" env_file_encoding = "utf-8" diff --git a/apps/devops/app/common/daos/code_depot/__init__.py b/apps/devops/app/common/daos/code_depot/__init__.py new file mode 100644 index 0000000..7a4a4dc --- /dev/null +++ b/apps/devops/app/common/daos/code_depot/__init__.py @@ -0,0 +1,6 @@ +from app.common.daos.code_depot.code_depot_dao import CodeDepotDao + +code_depot_dao = CodeDepotDao() + +def get_code_depot_dao() -> CodeDepotDao: + return code_depot_dao \ No newline at end of file diff --git a/apps/devops/app/common/daos/code_depot/code_depot_dao.py b/apps/devops/app/common/daos/code_depot/code_depot_dao.py new file mode 100644 index 0000000..46539ad --- /dev/null +++ b/apps/devops/app/common/daos/code_depot/code_depot_dao.py @@ -0,0 +1,19 @@ +from app.common.models.code_depot.code_depot import CodeDepotDoc + + +class CodeDepotDao(): + + def __init__(self): + pass + + async def get_code_depot_by_product_id(self, product_id: str) -> CodeDepotDoc: + """ + Retrieve code depot by product id + """ + return await CodeDepotDoc.find_one({"product_id": product_id}) + + async def insert_code_depot(self, code_depot: CodeDepotDoc) -> CodeDepotDoc: + """ + Insert a new code depot into the database + """ + return await CodeDepotDoc.insert_one(code_depot) \ No newline at end of file diff --git a/apps/devops/app/common/daos/deployment/__init__.py b/apps/devops/app/common/daos/deployment/__init__.py index 4ee4c85..c97fd9d 100644 --- a/apps/devops/app/common/daos/deployment/__init__.py +++ b/apps/devops/app/common/daos/deployment/__init__.py @@ -2,5 +2,5 @@ from app.common.daos.deployment.deployment_dao import DeploymentDao deployment_dao = DeploymentDao() -def get_hello_world_dao() -> DeploymentDao: +def get_deployment_dao() -> DeploymentDao: return deployment_dao \ No newline at end of file diff --git a/apps/devops/app/common/daos/deployment/deployment_dao.py b/apps/devops/app/common/daos/deployment/deployment_dao.py index f34565a..ef8b18c 100644 --- a/apps/devops/app/common/daos/deployment/deployment_dao.py +++ b/apps/devops/app/common/daos/deployment/deployment_dao.py @@ -5,9 +5,10 @@ class DeploymentDao(): def __init__(self): pass - async def create_deployment(self, deployment_data: Deployment): + async def create_deployment(self, deployment_data: Deployment) -> Deployment: # Logic to create a new deployment - Deployment.insert(deployment_data) + return await Deployment.insert(deployment_data) + async def get_deployments_by_deployment_id(self, deployment_id: str): # Logic to get a deployment by ID diff --git a/apps/devops/app/common/models/__init__.py b/apps/devops/app/common/models/__init__.py index dd9cfe5..20aacb2 100644 --- a/apps/devops/app/common/models/__init__.py +++ b/apps/devops/app/common/models/__init__.py @@ -1,4 +1,6 @@ +from app.common.models.code_depot.code_depot import CodeDepotDoc from app.common.models.deployment.deployment import Deployment -# list of beanie document models -db_models = [Deployment] \ No newline at end of file +# list of beanie document models, +# must add here so that the mongo db collection can be automatically created +db_models = [Deployment, CodeDepotDoc] \ No newline at end of file diff --git a/apps/devops/app/routes/deployment/apis.py b/apps/devops/app/routes/deployment/apis.py index c6ebefc..a247742 100644 --- a/apps/devops/app/routes/deployment/apis.py +++ b/apps/devops/app/routes/deployment/apis.py @@ -4,6 +4,7 @@ from typing import List from fastapi import APIRouter, Depends from loguru import logger +from app.common.models import CodeDepotDoc from app.common.models.deployment.deployment import Deployment, InitDeploymentRequest from app.routes.deployment.service import DeploymentService, get_deployment_service @@ -31,3 +32,16 @@ async def check_deployment_status( ) -> List[Deployment]: return await service.check_deployment_status(deployment_id) +@router.post("/createDummyCodeDepot") +async def create_dummy_code_depot( + service: DeploymentService = Depends(get_deployment_service) +) -> CodeDepotDoc: + """ + Create a dummy code depot for testing purposes. + """ + try: + depot_name = await service.create_dummy_code_depot() + return depot_name + except Exception as e: + logger.error(f"Failed to create dummy code depot: {e}") + raise e diff --git a/apps/devops/app/routes/deployment/service.py b/apps/devops/app/routes/deployment/service.py index 9093d77..b0f27bf 100644 --- a/apps/devops/app/routes/deployment/service.py +++ b/apps/devops/app/routes/deployment/service.py @@ -3,9 +3,14 @@ from collections import defaultdict from datetime import datetime, timedelta from typing import List -from fastapi import HTTPException +import httpx +from fastapi import HTTPException, Depends +from app.common.config.site_settings import site_settings +from app.common.daos.code_depot import get_code_depot_dao, CodeDepotDao +from app.common.daos.deployment import DeploymentDao, get_deployment_dao from app.common.models import Deployment +from app.common.models.code_depot.code_depot import CodeDepotDoc, DepotStatus from app.common.models.deployment.deployment import InitDeploymentRequest @@ -16,27 +21,25 @@ class DeploymentService: async def init_deployment( self, - request: InitDeploymentRequest + request: InitDeploymentRequest, + dao: DeploymentDao = Depends(get_deployment_dao) ) -> Deployment: """ """ # TODO validate permission with user_id # currently skip - git_url = await self._retrieve_git_url_by_product_id(request.product_id) + code_depot = await self._get_code_depot_by_product_id(request.product_id) + + git_url = await self._compose_git_url(code_depot.depot_name) - product_initialized = await self._check_if_project_initialized(git_url, request.product_id) - if not product_initialized: - await self._init_product(git_url, request.product_id) # retrieve project name project_name = "TODO" - # retrieve product info + # retrieve product info, depot name should be the same as product name product_id = request.product_id - product_name = "TODO" - - + product_name = code_depot.depot_name deployment = Deployment.model_construct( deployment_id = str(uuid.uuid4()), @@ -57,8 +60,8 @@ class DeploymentService: ) await self._start_deployment(deployment) + res = await dao.create_deployment(deployment) - res = await deployment.insert() return res async def check_deployment_status( @@ -111,47 +114,64 @@ class DeploymentService: return True - async def _retrieve_git_url_by_product_id( + async def _get_code_depot_by_product_id( self, - product_id: str + product_id: str, + code_depot_dao: CodeDepotDao = Depends(get_code_depot_dao) + ) -> CodeDepotDoc: + """ + Retrieve code depot by product id + """ + code_depot = await code_depot_dao.get_code_depot_by_product_id(product_id) + if not code_depot: + raise HTTPException(status_code=404, + detail="Code depot not found for the given product id, " + "please initialize the product first" + ) + return code_depot + + async def _compose_git_url( + self, + code_depot_name: str, + gitea_base_url: str = site_settings.BASE_GITEA_URL ) -> str: """ Retrieve git url by product id """ - # TODO implement this function - return "TODO-git_url" - - async def _check_if_project_initialized( - self, - git_url: str, - product_id: str - ) -> bool: - """ - Check if the project has been initialized - """ - # TODO implement this function - return True - - async def _init_product( - self, - git_url: str, - product_id: str - ) -> bool: - """ - Initialize the product - """ - # TODO implement this function - pass + return f"{gitea_base_url}/prodcuts/{code_depot_name.lower()}.git" async def _start_deployment( self, - deployment: Deployment + deployment: Deployment, + reconsile_base_url: str = site_settings.BASE_RECONSILE_URL, ) -> bool: """ Start the deployment """ - # TODO implement this function - pass + async with httpx.AsyncClient() as client: + response = await client.post( + f"{reconsile_base_url}/api/devops/reconcile", + json=deployment.model_dump() + ) + if response.status_code != 200: + raise HTTPException(status_code=response.status_code, detail=response.text) + return True + + async def create_dummy_code_depot( + self, + code_depot_dao: CodeDepotDao = Depends(get_code_depot_dao) + ) -> CodeDepotDoc: + """ + Create a dummy code depot for testing purposes. + """ + depot_name = f"dummy-depot-{uuid.uuid4()}" + code_depot = CodeDepotDoc( + depot_name=depot_name, + product_id="dummy-product-id", + depot_status=DepotStatus.CREATED + ) + + return await code_depot.insert_one(code_depot) deployment_service = DeploymentService() From ab7e6728b5041bce6dfb1f8c750bee553d5bb27c Mon Sep 17 00:00:00 2001 From: dongli Date: Sat, 7 Jun 2025 11:32:53 -0700 Subject: [PATCH 09/41] More impl for devops --- .../app/common/models/code_depot/__init__.py | 0 .../common/models/code_depot/code_depot.py | 39 +++++++++++++++++++ 2 files changed, 39 insertions(+) create mode 100644 apps/devops/app/common/models/code_depot/__init__.py create mode 100644 apps/devops/app/common/models/code_depot/code_depot.py diff --git a/apps/devops/app/common/models/code_depot/__init__.py b/apps/devops/app/common/models/code_depot/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/apps/devops/app/common/models/code_depot/code_depot.py b/apps/devops/app/common/models/code_depot/code_depot.py new file mode 100644 index 0000000..c3d8ffc --- /dev/null +++ b/apps/devops/app/common/models/code_depot/code_depot.py @@ -0,0 +1,39 @@ +from datetime import datetime, timezone +from typing import List, Optional, Dict + +from beanie import Document +from enum import IntEnum + +from pymongo import IndexModel + + +class DepotStatus(IntEnum): + TO_BE_CREATED = 0 + CREATED = 1 + DELETED = 2 + + +class UserAccountStatus(IntEnum): + TO_BE_CREATED = 0 + CREATED = 1 + DELETED = 2 + DEACTIVATED = 3 + +class CodeDepotDoc(Document): + depot_name: str + product_id: str + depot_status: DepotStatus + collaborators: List[str] = [] + total_commits: Optional[int] = 0 + last_commiter: Optional[str] = "" + last_update: Optional[datetime] = datetime.now(timezone.utc) + weekly_commits: Optional[Dict[str, int]] = {} + + class Settings: + name = "code_depot" + indexes = [ + IndexModel([("product_id", 1)]) + ] + + + From b83f5fd606c2fd9d6c1079860255d50838ab06dc Mon Sep 17 00:00:00 2001 From: dongli Date: Sat, 7 Jun 2025 12:34:34 -0700 Subject: [PATCH 10/41] Depends only works with fastApi methods, so don't use Depends for other methods --- apps/devops/app/routes/deployment/service.py | 20 +++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/apps/devops/app/routes/deployment/service.py b/apps/devops/app/routes/deployment/service.py index b0f27bf..609e743 100644 --- a/apps/devops/app/routes/deployment/service.py +++ b/apps/devops/app/routes/deployment/service.py @@ -22,7 +22,7 @@ class DeploymentService: async def init_deployment( self, request: InitDeploymentRequest, - dao: DeploymentDao = Depends(get_deployment_dao) + dao: DeploymentDao = get_deployment_dao() ) -> Deployment: """ """ @@ -117,7 +117,7 @@ class DeploymentService: async def _get_code_depot_by_product_id( self, product_id: str, - code_depot_dao: CodeDepotDao = Depends(get_code_depot_dao) + code_depot_dao: CodeDepotDao = get_code_depot_dao() ) -> CodeDepotDoc: """ Retrieve code depot by product id @@ -147,16 +147,18 @@ class DeploymentService: ) -> bool: """ Start the deployment + Return true atm, modify calling reconsile service later """ - async with httpx.AsyncClient() as client: - response = await client.post( - f"{reconsile_base_url}/api/devops/reconcile", - json=deployment.model_dump() - ) - if response.status_code != 200: - raise HTTPException(status_code=response.status_code, detail=response.text) + # async with httpx.AsyncClient() as client: + # response = await client.post( + # f"{reconsile_base_url}/api/devops/reconcile", + # json=deployment.model_dump() + # ) + # if response.status_code != 200: + # raise HTTPException(status_code=response.status_code, detail=response.text) return True +# TODO: dummy test code, remove later async def create_dummy_code_depot( self, code_depot_dao: CodeDepotDao = Depends(get_code_depot_dao) From 7955b5884a03ec2a0b0314d3700d5f47ff58b53f Mon Sep 17 00:00:00 2001 From: dongli Date: Sat, 7 Jun 2025 13:11:22 -0700 Subject: [PATCH 11/41] Fix some bugs --- apps/devops/app/routes/deployment/apis.py | 2 +- apps/devops/app/routes/deployment/service.py | 17 ++++++++++------- 2 files changed, 11 insertions(+), 8 deletions(-) diff --git a/apps/devops/app/routes/deployment/apis.py b/apps/devops/app/routes/deployment/apis.py index a247742..853439c 100644 --- a/apps/devops/app/routes/deployment/apis.py +++ b/apps/devops/app/routes/deployment/apis.py @@ -22,7 +22,7 @@ async def init_deployment( async def update_deployment( request: Deployment, service: DeploymentService = Depends(get_deployment_service) -) -> bool: +) -> Deployment: return await service.update_deployment_status(request) @router.get("/checkDeploymentStatus") diff --git a/apps/devops/app/routes/deployment/service.py b/apps/devops/app/routes/deployment/service.py index 609e743..a4ea867 100644 --- a/apps/devops/app/routes/deployment/service.py +++ b/apps/devops/app/routes/deployment/service.py @@ -60,7 +60,7 @@ class DeploymentService: ) await self._start_deployment(deployment) - res = await dao.create_deployment(deployment) + res = await Deployment.insert(deployment) return res @@ -89,7 +89,7 @@ class DeploymentService: async def update_deployment_status( self, deployment: Deployment - ) -> bool: + ) -> Deployment: latest_record = await Deployment.find_one( Deployment.deployment_id == deployment.deployment_id, sort=[("created_at", -1)] @@ -100,19 +100,22 @@ class DeploymentService: # TODO add more sanity check logic here - if deployment.deployment_stage == latest_record.deployment_status: + # if updating the same stage, just update the status and timestamp + # else, create a new record with the same deployment_id + res = None + if deployment.deployment_stage == latest_record.deployment_stage: # update existing record latest_record.deployment_status = deployment.deployment_status latest_record.updated_at = deployment.updated_at or datetime.now() - await latest_record.save() + res = await latest_record.save() else: # create new record deployment.deployment_id = latest_record.deployment_id - deployment.created_at = latest_record.created_at + deployment.created_at = datetime.now() deployment.updated_at = datetime.now() - await deployment.insert() + res = await deployment.insert() - return True + return res async def _get_code_depot_by_product_id( self, From 6254d1309832c639ce4018d67c026b6e7bc6011d Mon Sep 17 00:00:00 2001 From: dongli Date: Sat, 7 Jun 2025 13:17:37 -0700 Subject: [PATCH 12/41] Clean up --- apps/devops/app/common/daos/__init__.py | 0 .../app/common/daos/code_depot/__init__.py | 6 --- .../common/daos/code_depot/code_depot_dao.py | 19 -------- .../app/common/daos/deployment/__init__.py | 6 --- .../common/daos/deployment/deployment_dao.py | 46 ------------------- apps/devops/app/routes/deployment/service.py | 9 +--- 6 files changed, 2 insertions(+), 84 deletions(-) delete mode 100644 apps/devops/app/common/daos/__init__.py delete mode 100644 apps/devops/app/common/daos/code_depot/__init__.py delete mode 100644 apps/devops/app/common/daos/code_depot/code_depot_dao.py delete mode 100644 apps/devops/app/common/daos/deployment/__init__.py delete mode 100644 apps/devops/app/common/daos/deployment/deployment_dao.py diff --git a/apps/devops/app/common/daos/__init__.py b/apps/devops/app/common/daos/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/apps/devops/app/common/daos/code_depot/__init__.py b/apps/devops/app/common/daos/code_depot/__init__.py deleted file mode 100644 index 7a4a4dc..0000000 --- a/apps/devops/app/common/daos/code_depot/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -from app.common.daos.code_depot.code_depot_dao import CodeDepotDao - -code_depot_dao = CodeDepotDao() - -def get_code_depot_dao() -> CodeDepotDao: - return code_depot_dao \ No newline at end of file diff --git a/apps/devops/app/common/daos/code_depot/code_depot_dao.py b/apps/devops/app/common/daos/code_depot/code_depot_dao.py deleted file mode 100644 index 46539ad..0000000 --- a/apps/devops/app/common/daos/code_depot/code_depot_dao.py +++ /dev/null @@ -1,19 +0,0 @@ -from app.common.models.code_depot.code_depot import CodeDepotDoc - - -class CodeDepotDao(): - - def __init__(self): - pass - - async def get_code_depot_by_product_id(self, product_id: str) -> CodeDepotDoc: - """ - Retrieve code depot by product id - """ - return await CodeDepotDoc.find_one({"product_id": product_id}) - - async def insert_code_depot(self, code_depot: CodeDepotDoc) -> CodeDepotDoc: - """ - Insert a new code depot into the database - """ - return await CodeDepotDoc.insert_one(code_depot) \ No newline at end of file diff --git a/apps/devops/app/common/daos/deployment/__init__.py b/apps/devops/app/common/daos/deployment/__init__.py deleted file mode 100644 index c97fd9d..0000000 --- a/apps/devops/app/common/daos/deployment/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -from app.common.daos.deployment.deployment_dao import DeploymentDao - -deployment_dao = DeploymentDao() - -def get_deployment_dao() -> DeploymentDao: - return deployment_dao \ No newline at end of file diff --git a/apps/devops/app/common/daos/deployment/deployment_dao.py b/apps/devops/app/common/daos/deployment/deployment_dao.py deleted file mode 100644 index ef8b18c..0000000 --- a/apps/devops/app/common/daos/deployment/deployment_dao.py +++ /dev/null @@ -1,46 +0,0 @@ -from app.common.models.deployment.deployment import Deployment - - -class DeploymentDao(): - def __init__(self): - pass - - async def create_deployment(self, deployment_data: Deployment) -> Deployment: - # Logic to create a new deployment - return await Deployment.insert(deployment_data) - - - async def get_deployments_by_deployment_id(self, deployment_id: str): - # Logic to get a deployment by ID - pass - - async def get_deployments_by_project_id(self, project_id: str): - # Logic to get deployments by project ID - pass - - async def get_deployments_by_product_id(self, project_id: str): - # Logic to get deployments by project ID - pass - - async def get_latest_deployment_by_project_id(self, project_id: str): - # Logic to get the latest deployment by project ID - pass - - - - async def get_deployments_by_user_id(self, user_id: str): - # Logic to get deployments by user ID - pass - - - - async def update_deployment(self, deployment_id: str, deployment_data: dict): - # Logic to update a deployment - pass - - async def delete_deployment(self, deployment_id: str): - # Logic to delete a deployment - pass - - - diff --git a/apps/devops/app/routes/deployment/service.py b/apps/devops/app/routes/deployment/service.py index a4ea867..476718e 100644 --- a/apps/devops/app/routes/deployment/service.py +++ b/apps/devops/app/routes/deployment/service.py @@ -7,8 +7,6 @@ import httpx from fastapi import HTTPException, Depends from app.common.config.site_settings import site_settings -from app.common.daos.code_depot import get_code_depot_dao, CodeDepotDao -from app.common.daos.deployment import DeploymentDao, get_deployment_dao from app.common.models import Deployment from app.common.models.code_depot.code_depot import CodeDepotDoc, DepotStatus from app.common.models.deployment.deployment import InitDeploymentRequest @@ -22,7 +20,6 @@ class DeploymentService: async def init_deployment( self, request: InitDeploymentRequest, - dao: DeploymentDao = get_deployment_dao() ) -> Deployment: """ """ @@ -120,12 +117,11 @@ class DeploymentService: async def _get_code_depot_by_product_id( self, product_id: str, - code_depot_dao: CodeDepotDao = get_code_depot_dao() ) -> CodeDepotDoc: """ Retrieve code depot by product id """ - code_depot = await code_depot_dao.get_code_depot_by_product_id(product_id) + code_depot = await CodeDepotDoc.find_one(CodeDepotDoc.product_id == product_id) if not code_depot: raise HTTPException(status_code=404, detail="Code depot not found for the given product id, " @@ -164,7 +160,6 @@ class DeploymentService: # TODO: dummy test code, remove later async def create_dummy_code_depot( self, - code_depot_dao: CodeDepotDao = Depends(get_code_depot_dao) ) -> CodeDepotDoc: """ Create a dummy code depot for testing purposes. @@ -176,7 +171,7 @@ class DeploymentService: depot_status=DepotStatus.CREATED ) - return await code_depot.insert_one(code_depot) + return await CodeDepotDoc.insert_one(code_depot) deployment_service = DeploymentService() From c09ad7f5c9623dd40f9d6e014bda6ef2da149e3c Mon Sep 17 00:00:00 2001 From: dongli Date: Thu, 12 Jun 2025 17:44:13 -0700 Subject: [PATCH 13/41] minor --- apps/devops/app/routes/deployment/apis.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/apps/devops/app/routes/deployment/apis.py b/apps/devops/app/routes/deployment/apis.py index 853439c..b1b098e 100644 --- a/apps/devops/app/routes/deployment/apis.py +++ b/apps/devops/app/routes/deployment/apis.py @@ -18,6 +18,18 @@ async def init_deployment( ) -> Deployment: return await service.init_deployment(request) +@router.get('/getLatestDeployment') +async def get_latest_deployment( + product_id: str, + service: DeploymentService = Depends(get_deployment_service) +) -> Deployment: + """ + Get the latest deployment for a given product ID. + """ + # return await service.get_latest_deployment(product_id) + return None + + @router.post("/updateDeploymentStatus") async def update_deployment( request: Deployment, From 92b2aba960ec809772c9ea2dea35f64289ac7550 Mon Sep 17 00:00:00 2001 From: dongli Date: Mon, 16 Jun 2025 23:18:26 -0700 Subject: [PATCH 14/41] Add check_application_logs --- .../devops/app/common/config/site_settings.py | 3 ++ .../common/models/deployment/deployment.py | 25 ++++++++- apps/devops/app/routes/deployment/apis.py | 19 ++++++- apps/devops/app/routes/deployment/service.py | 51 ++++++++++++++++++- apps/devops/requirements.txt | 6 ++- 5 files changed, 99 insertions(+), 5 deletions(-) diff --git a/apps/devops/app/common/config/site_settings.py b/apps/devops/app/common/config/site_settings.py index 9a96914..c313636 100644 --- a/apps/devops/app/common/config/site_settings.py +++ b/apps/devops/app/common/config/site_settings.py @@ -23,6 +23,9 @@ class SiteSettings(BaseSettings): # TODO: confirm with Zhenyu BASE_RECONSILE_URL: str = "https://reconcile.freeleaps.mathmast.com" + # TODO: modify this with actual Loki URL + BASE_LOKI_URL: str = "http://localhost:3100" + class Config: env_file = ".devbase-webapi.env" env_file_encoding = "utf-8" diff --git a/apps/devops/app/common/models/deployment/deployment.py b/apps/devops/app/common/models/deployment/deployment.py index 2bcbdda..4d9a9e1 100644 --- a/apps/devops/app/common/models/deployment/deployment.py +++ b/apps/devops/app/common/models/deployment/deployment.py @@ -1,5 +1,5 @@ -from datetime import datetime -from typing import Literal +from datetime import datetime, timedelta +from typing import Literal, List from beanie import Document from bson import ObjectId @@ -50,6 +50,27 @@ class CheckDeploymentStatusRequest(BaseModel): target_env: str user_id: str +class CheckApplicationLogsRequest(BaseModel): + product_id: str + target_env: Literal["alpha", "prod"] = "alpha" + user_id: str = '' + log_level: List[Literal["info", "error", "debug"]] = Field(default_factory=lambda: ["info"]) + start_time: datetime = datetime.now() - timedelta(minutes=5) + end_time: datetime = datetime.now() + limit: int = 1000 + +class CheckApplicationLogsResponse(BaseModel): + product_id: str + target_env: Literal["alpha", "prod"] + user_id: str = '' + log_level: List[Literal["info", "error", "debug"]] + start_time: datetime + end_time: datetime + limit: int + logs: list[str] + + + diff --git a/apps/devops/app/routes/deployment/apis.py b/apps/devops/app/routes/deployment/apis.py index b1b098e..d514127 100644 --- a/apps/devops/app/routes/deployment/apis.py +++ b/apps/devops/app/routes/deployment/apis.py @@ -5,7 +5,8 @@ from fastapi import APIRouter, Depends from loguru import logger from app.common.models import CodeDepotDoc -from app.common.models.deployment.deployment import Deployment, InitDeploymentRequest +from app.common.models.deployment.deployment import Deployment, InitDeploymentRequest, CheckDeploymentStatusRequest, \ + CheckApplicationLogsRequest, CheckApplicationLogsResponse from app.routes.deployment.service import DeploymentService, get_deployment_service router = APIRouter(prefix="/deployment") @@ -57,3 +58,19 @@ async def create_dummy_code_depot( except Exception as e: logger.error(f"Failed to create dummy code depot: {e}") raise e + +@router.post("/checkApplicationLogs") +async def check_application_logs( + request: CheckApplicationLogsRequest, + service: DeploymentService = Depends(get_deployment_service) +) -> CheckApplicationLogsResponse: + """ + Check application logs for a given deployment. + """ + try: + res = await service.check_application_logs(request) + return res + except Exception as e: + logger.error(f"Failed to check application logs: {e}") + raise e + diff --git a/apps/devops/app/routes/deployment/service.py b/apps/devops/app/routes/deployment/service.py index 476718e..66e13f4 100644 --- a/apps/devops/app/routes/deployment/service.py +++ b/apps/devops/app/routes/deployment/service.py @@ -4,12 +4,14 @@ from datetime import datetime, timedelta from typing import List import httpx +import requests from fastapi import HTTPException, Depends from app.common.config.site_settings import site_settings from app.common.models import Deployment from app.common.models.code_depot.code_depot import CodeDepotDoc, DepotStatus -from app.common.models.deployment.deployment import InitDeploymentRequest +from app.common.models.deployment.deployment import InitDeploymentRequest, CheckApplicationLogsRequest, \ + CheckApplicationLogsResponse class DeploymentService: @@ -157,6 +159,53 @@ class DeploymentService: # raise HTTPException(status_code=response.status_code, detail=response.text) return True + async def check_application_logs( + self, + request: CheckApplicationLogsRequest, + loki_url: str = site_settings.BASE_LOKI_URL, + ) -> CheckApplicationLogsResponse: + # Convert to nanoseconds since epoch + start_ns = int(request.start_time.timestamp() * 1e9) + end_ns = int(request.end_time.timestamp() * 1e9) + + # TODO: convert product_id to application name if needed + base_query = f'{{application="{request.product_id}", environment="{request.target_env}"}}' + log_level = '|'.join(request.log_level) if request.log_level else '' + loki_query = f'{base_query} |~ "{log_level}"' + + params = { + "query": loki_query, + "limit": request.limit, + "start": start_ns, + "end": end_ns, + } + + url = f"{loki_url}/loki/api/v1/query_range" + response = requests.get(url, params=params) + + if response.status_code != 200: + raise Exception(f"Query failed: {response.status_code} - {response.text}") + + data = response.json() + streams = data.get("data", {}).get("result", []) + + logs = [] + for stream in streams: + for ts, log in stream.get("values", []): + timestamp = datetime.fromtimestamp(int(ts) / 1e9) + logs.append(f"[{timestamp}] {log.strip()}") + + return CheckApplicationLogsResponse( + product_id=request.product_id, + target_env=request.target_env, + user_id=request.user_id, + log_level=request.log_level, + start_time=request.start_time, + end_time=request.end_time, + limit=request.limit, + logs=logs + ) + # TODO: dummy test code, remove later async def create_dummy_code_depot( self, diff --git a/apps/devops/requirements.txt b/apps/devops/requirements.txt index 056543d..c593732 100644 --- a/apps/devops/requirements.txt +++ b/apps/devops/requirements.txt @@ -7,4 +7,8 @@ pydantic_settings==2.9.1 pytest==7.1.2 starlette==0.46.2 uvicorn==0.34.2 -httpx==0.24.0 \ No newline at end of file +httpx==0.24.0 +pydantic-settings~=2.9.1 +pymongo~=4.12.1 +pydantic~=2.11.4 +requests~=2.32.3 \ No newline at end of file From d660002076f70bff64770ea598ca0837931a9b4f Mon Sep 17 00:00:00 2001 From: dongli Date: Mon, 16 Jun 2025 23:19:09 -0700 Subject: [PATCH 15/41] minor --- apps/devops/app/bootstrap/application.py | 1 - 1 file changed, 1 deletion(-) diff --git a/apps/devops/app/bootstrap/application.py b/apps/devops/app/bootstrap/application.py index 24223b6..81db6f5 100644 --- a/apps/devops/app/bootstrap/application.py +++ b/apps/devops/app/bootstrap/application.py @@ -20,7 +20,6 @@ def create_app() -> FastAPI: register(app, exception_handler) register(app, database) register(app, router) - # register(app, scheduler) register(app, common) # Call the custom_openapi function to change the OpenAPI version From aff12753dab4339c6152251172a60b1948e202dd Mon Sep 17 00:00:00 2001 From: dongli Date: Thu, 19 Jun 2025 21:08:08 -0700 Subject: [PATCH 16/41] Add deployment_app_url --- apps/devops/app/common/models/deployment/deployment.py | 1 + 1 file changed, 1 insertion(+) diff --git a/apps/devops/app/common/models/deployment/deployment.py b/apps/devops/app/common/models/deployment/deployment.py index 4d9a9e1..b9a3619 100644 --- a/apps/devops/app/common/models/deployment/deployment.py +++ b/apps/devops/app/common/models/deployment/deployment.py @@ -24,6 +24,7 @@ class Deployment(Document): deployment_git_url: str deployment_git_sha256: str deployment_reason: str + deployment_app_url: str = "" # URL to access the deployed application, keep it empty to be filled later deployed_by: str created_at: datetime = datetime.now() From 2934c647c6841551530082ce1a85e9c75640b27d Mon Sep 17 00:00:00 2001 From: dongli Date: Fri, 20 Jun 2025 15:09:27 -0700 Subject: [PATCH 17/41] make sha256 optional --- apps/devops/app/common/models/deployment/deployment.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/apps/devops/app/common/models/deployment/deployment.py b/apps/devops/app/common/models/deployment/deployment.py index b9a3619..d4b9982 100644 --- a/apps/devops/app/common/models/deployment/deployment.py +++ b/apps/devops/app/common/models/deployment/deployment.py @@ -2,7 +2,6 @@ from datetime import datetime, timedelta from typing import Literal, List from beanie import Document -from bson import ObjectId from pydantic import Field, field_validator from pydantic import BaseModel from pymongo import IndexModel @@ -40,8 +39,8 @@ class Deployment(Document): class InitDeploymentRequest(BaseModel): product_id: str - sha256: str - target_env: str + sha256: str = "" + target_env: Literal["alpha", "prod"] user_id: str reason: str = "not provided" ttl_hours: int = 3 @@ -53,7 +52,7 @@ class CheckDeploymentStatusRequest(BaseModel): class CheckApplicationLogsRequest(BaseModel): product_id: str - target_env: Literal["alpha", "prod"] = "alpha" + target_env: Literal["alpha", "prod"] user_id: str = '' log_level: List[Literal["info", "error", "debug"]] = Field(default_factory=lambda: ["info"]) start_time: datetime = datetime.now() - timedelta(minutes=5) From 979fcd298cff36354eeaba099732405788f8beef Mon Sep 17 00:00:00 2001 From: zhenyus Date: Mon, 23 Jun 2025 16:46:06 +0800 Subject: [PATCH 18/41] Add DevOps reconciliation models and integrate into deployment service Signed-off-by: zhenyus --- apps/devops/app/common/models/__init__.py | 1 + .../common/models/deployment/deployment.py | 23 +++++++++++--- apps/devops/app/routes/deployment/service.py | 31 +++++++++++++------ 3 files changed, 41 insertions(+), 14 deletions(-) diff --git a/apps/devops/app/common/models/__init__.py b/apps/devops/app/common/models/__init__.py index 20aacb2..10e5320 100644 --- a/apps/devops/app/common/models/__init__.py +++ b/apps/devops/app/common/models/__init__.py @@ -1,5 +1,6 @@ from app.common.models.code_depot.code_depot import CodeDepotDoc from app.common.models.deployment.deployment import Deployment +from app.common.models.deployment.deployment import DevOpsReconcileRequest, DevOpsReconcileOperationType # list of beanie document models, # must add here so that the mongo db collection can be automatically created diff --git a/apps/devops/app/common/models/deployment/deployment.py b/apps/devops/app/common/models/deployment/deployment.py index 4d9a9e1..b47c423 100644 --- a/apps/devops/app/common/models/deployment/deployment.py +++ b/apps/devops/app/common/models/deployment/deployment.py @@ -1,5 +1,7 @@ from datetime import datetime, timedelta -from typing import Literal, List +from typing import Literal, List, Optional +from dataclasses import dataclass +from enum import Enum from beanie import Document from bson import ObjectId @@ -69,8 +71,19 @@ class CheckApplicationLogsResponse(BaseModel): limit: int logs: list[str] +class DevOpsReconcileOperationType(Enum): + START = "start" + TERMINATE = "terminate" + RESTART = "restart" - - - - +@dataclass +class DevOpsReconcileRequest(BaseModel): + operation: DevOpsReconcileOperationType + id: str + devops_proj_id: str + triggered_user_id: str + causes: str + commit_sha256: Optional[str] = None + target_env: Literal["alpha", "prod"] + ttl_controled: bool = False + ttl: int = 10800 \ No newline at end of file diff --git a/apps/devops/app/routes/deployment/service.py b/apps/devops/app/routes/deployment/service.py index 66e13f4..c535a24 100644 --- a/apps/devops/app/routes/deployment/service.py +++ b/apps/devops/app/routes/deployment/service.py @@ -8,7 +8,7 @@ import requests from fastapi import HTTPException, Depends from app.common.config.site_settings import site_settings -from app.common.models import Deployment +from app.common.models import Deployment, DevOpsReconcileRequest, DevOpsReconcileOperationType from app.common.models.code_depot.code_depot import CodeDepotDoc, DepotStatus from app.common.models.deployment.deployment import InitDeploymentRequest, CheckApplicationLogsRequest, \ CheckApplicationLogsResponse @@ -148,15 +148,28 @@ class DeploymentService: ) -> bool: """ Start the deployment - Return true atm, modify calling reconsile service later + Return true atm, modify calling reconcile service later """ - # async with httpx.AsyncClient() as client: - # response = await client.post( - # f"{reconsile_base_url}/api/devops/reconcile", - # json=deployment.model_dump() - # ) - # if response.status_code != 200: - # raise HTTPException(status_code=response.status_code, detail=response.text) + # construct request body + request = DevOpsReconcileRequest( + operation=DevOpsReconcileOperationType.START, + id=deployment.deployment_id, + devops_proj_id=deployment.deployment_project_id, + triggered_user_id=deployment.deployed_by, + causes=deployment.deployment_reason, + target_env=deployment.deployment_target_env, + ttl_controled=True, + ttl=deployment.deployment_ttl_hours, + commit_sha256=deployment.deployment_git_sha256, + ) + # send request to reoncile service + async with httpx.AsyncClient() as client: + response = await client.post( + f"{reconsile_base_url}/api/devops/reconcile", + json=request.model_dump() + ) + if response.status_code != 200: + raise HTTPException(status_code=response.status_code, detail=response.text) return True async def check_application_logs( From 921633dc040e93dd40b0f5a83178be72f1a47626 Mon Sep 17 00:00:00 2001 From: dongli Date: Sun, 13 Jul 2025 11:38:57 -0700 Subject: [PATCH 19/41] Implement get_latest_deployment --- apps/devops/app/routes/deployment/apis.py | 9 ++++----- apps/devops/app/routes/deployment/service.py | 21 ++++++++++++++++++++ 2 files changed, 25 insertions(+), 5 deletions(-) diff --git a/apps/devops/app/routes/deployment/apis.py b/apps/devops/app/routes/deployment/apis.py index d514127..a86c305 100644 --- a/apps/devops/app/routes/deployment/apis.py +++ b/apps/devops/app/routes/deployment/apis.py @@ -1,5 +1,5 @@ from datetime import datetime -from typing import List +from typing import List, Optional from fastapi import APIRouter, Depends from loguru import logger @@ -22,14 +22,13 @@ async def init_deployment( @router.get('/getLatestDeployment') async def get_latest_deployment( product_id: str, + target_env: str = "alpha", service: DeploymentService = Depends(get_deployment_service) -) -> Deployment: +) -> Optional[Deployment]: """ Get the latest deployment for a given product ID. """ - # return await service.get_latest_deployment(product_id) - return None - + return await service.get_latest_deployment(product_id, target_env) @router.post("/updateDeploymentStatus") async def update_deployment( diff --git a/apps/devops/app/routes/deployment/service.py b/apps/devops/app/routes/deployment/service.py index c535a24..8c2f258 100644 --- a/apps/devops/app/routes/deployment/service.py +++ b/apps/devops/app/routes/deployment/service.py @@ -13,6 +13,8 @@ from app.common.models.code_depot.code_depot import CodeDepotDoc, DepotStatus from app.common.models.deployment.deployment import InitDeploymentRequest, CheckApplicationLogsRequest, \ CheckApplicationLogsResponse +from loguru import logger + class DeploymentService: @@ -63,6 +65,25 @@ class DeploymentService: return res + async def get_latest_deployment( + self, + product_id: str, + target_env: str, + ) -> Deployment: + time_threshold = datetime.now() - timedelta(hours=168) # 7 days + deployment_records = await Deployment.find( + Deployment.deployment_product_id == product_id, + Deployment.deployment_target_env == target_env, + Deployment.updated_at >= time_threshold + ).to_list() + + if not deployment_records or len(deployment_records) == 0: + logger.warning(f"No deployment records found for product ID: {product_id} in the last 7 days") + return None + + latest_deployment = max(deployment_records, key=lambda d: (d.updated_at, d.created_at)) + return latest_deployment + async def check_deployment_status( self, product_id: str, From ccc995f599eb4002253502552a1c6091bf4e6284 Mon Sep 17 00:00:00 2001 From: sunhaolou Date: Mon, 21 Jul 2025 12:50:51 +0800 Subject: [PATCH 20/41] refractor: a current working version before cleaning up. --- .env | 24 ++ apps/payment/.env | 6 +- .../backend/application/payment_hub.py | 3 - .../backend/business/payment_manager.py | 22 +- .../backend/business/stripe_manager.py | 310 +++++++++++++++--- .../backend/infra/payment/constants.py | 1 - apps/payment/backend/infra/payment/models.py | 15 + apps/payment/backend/models/__init__.py | 4 +- apps/payment/backend/models/payment/models.py | 5 +- .../backend/services/payment/constants.py | 1 - .../backend/services/payment/models.py | 1 - apps/payment/common/log/base_logger.py | 15 +- .../payment/payment_manager_controller.py | 13 - .../payment/stripe_manager_controller.py | 24 +- 14 files changed, 360 insertions(+), 84 deletions(-) create mode 100644 .env diff --git a/.env b/.env new file mode 100644 index 0000000..1834048 --- /dev/null +++ b/.env @@ -0,0 +1,24 @@ +APP_NAME=payment +export SERVICE_API_ACCESS_HOST=0.0.0.0 +export SERVICE_API_ACCESS_PORT=8006 +export CONTAINER_APP_ROOT=/app +export LOG_BASE_PATH=$CONTAINER_APP_ROOT/log/$APP_NAME +export BACKEND_LOG_FILE_NAME=$APP_NAME +export APPLICATION_ACTIVITY_LOG=$APP_NAME-activity +export MONGODB_NAME=freeleaps2 +export MONGODB_PORT=27017 +GIT_REPO_ROOT=/Users/sunhaolou/Downloads/Freeleaps/freeleaps-service-hub +CODEBASE_ROOT=/Users/sunhaolou/Downloads/Freeleaps/freeleaps-service-hub/apps/payment +SITE_DEPLOY_FOLDER=/Users/sunhaolou/Downloads/Freeleaps/freeleaps-service-hub/sites/payment/deploy +#!/bin/bash +export VENV_DIR=venv_t +export VENV_ACTIVATE=venv_t/bin/activate +export DOCKER_HOME=/var/lib/docker +export DOCKER_APP_HOME=$DOCKER_HOME/app +export DOCKER_BACKEND_HOME=$DOCKER_APP_HOME/$APP_NAME +export DOCKER_BACKEND_LOG_HOME=$DOCKER_BACKEND_HOME/log +export MONGODB_URI=mongodb://localhost:27017/ +export FREELEAPS_ENV=local +export SITE_URL_ROOT=http://localhost:5173/ +export LOG_BASE_PATH=${CODEBASE_ROOT}/log +export STRIPE_API_KEY=sk_test_51Ogsw5B0IyqaSJBrwczlr820jnmvA1qQQGoLZ2XxOsIzikpmXo4pRLjw4XVMTEBR8DdVTYySiAv1XX53Zv5xqynF00GfMqttFd diff --git a/apps/payment/.env b/apps/payment/.env index 6b56dca..1834048 100644 --- a/apps/payment/.env +++ b/apps/payment/.env @@ -7,9 +7,9 @@ export BACKEND_LOG_FILE_NAME=$APP_NAME export APPLICATION_ACTIVITY_LOG=$APP_NAME-activity export MONGODB_NAME=freeleaps2 export MONGODB_PORT=27017 -GIT_REPO_ROOT=/mnt/freeleaps/freeleaps-service-hub -CODEBASE_ROOT=/mnt/freeleaps/freeleaps-service-hub/apps/payment -SITE_DEPLOY_FOLDER=/mnt/freeleaps/freeleaps-service-hub/sites/payment/deploy +GIT_REPO_ROOT=/Users/sunhaolou/Downloads/Freeleaps/freeleaps-service-hub +CODEBASE_ROOT=/Users/sunhaolou/Downloads/Freeleaps/freeleaps-service-hub/apps/payment +SITE_DEPLOY_FOLDER=/Users/sunhaolou/Downloads/Freeleaps/freeleaps-service-hub/sites/payment/deploy #!/bin/bash export VENV_DIR=venv_t export VENV_ACTIVATE=venv_t/bin/activate diff --git a/apps/payment/backend/application/payment_hub.py b/apps/payment/backend/application/payment_hub.py index 07cce96..8f18ba0 100644 --- a/apps/payment/backend/application/payment_hub.py +++ b/apps/payment/backend/application/payment_hub.py @@ -9,9 +9,6 @@ class PaymentHub: self.stripe_manager = StripeManager() return - async def fetch_wechat_qr_code(self, project_id: str) -> Optional[Dict[str, any]]: - return await self.payment_manager.fetch_wechat_qr_code(project_id) - async def fetch_stripe_account_id(self, user_id: str) -> Optional[str]: return await self.payment_manager.fetch_stripe_account_id(user_id) diff --git a/apps/payment/backend/business/payment_manager.py b/apps/payment/backend/business/payment_manager.py index 6a5d6d3..bb2b955 100644 --- a/apps/payment/backend/business/payment_manager.py +++ b/apps/payment/backend/business/payment_manager.py @@ -9,18 +9,6 @@ class PaymentManager: def __init__(self) -> None: self.module_logger = ModuleLogger(sender_id=PaymentManager) - async def fetch_wechat_qr_code(self, project_id: str) -> Optional[Dict[str, any]]: - project = await ProjectDoc.get(project_id) - proposer = project.proposer_id - income_profile = await IncomeProfileDoc.find_one( - IncomeProfileDoc.user_id == proposer - ) - if income_profile: - return income_profile.bank_account.money_collecting_methods[ - 0 - ].wechat_qr_code - return None - async def fetch_stripe_account_id(self, user_id: str) -> Optional[str]: income_profile = await IncomeProfileDoc.find_one(IncomeProfileDoc.user_id == user_id) if income_profile: @@ -44,7 +32,7 @@ class PaymentManager: } }} ) - + if not payment_profile: await self.module_logger.log_warning( warning="No payment profile found for Stripe account", @@ -54,7 +42,7 @@ class PaymentManager: } ) return False - + # Update the stripe method status updated = False # Need to check if money_collecting_methods exists and is not empty @@ -66,7 +54,7 @@ class PaymentManager: method.last_update_time = int(datetime.now().timestamp()) updated = True break # Exit loop once found and updated - + if updated: await payment_profile.save() await self.module_logger.log_info( @@ -79,7 +67,7 @@ class PaymentManager: } ) return True - + # Log warning with more context await self.module_logger.log_warning( warning="Stripe account not found in payment methods", @@ -91,7 +79,7 @@ class PaymentManager: } ) return False - + except Exception as e: await self.module_logger.log_exception( exception=e, diff --git a/apps/payment/backend/business/stripe_manager.py b/apps/payment/backend/business/stripe_manager.py index 656f884..92b28e4 100644 --- a/apps/payment/backend/business/stripe_manager.py +++ b/apps/payment/backend/business/stripe_manager.py @@ -8,6 +8,7 @@ from stripe.error import SignatureVerificationError from common.log.module_logger import ModuleLogger from decimal import Decimal import json +import httpx stripe.api_key = app_settings.STRIPE_API_KEY @@ -15,7 +16,7 @@ stripe.api_key = app_settings.STRIPE_API_KEY class StripeManager: def __init__(self) -> None: - self.site_url_root = app_settings.SITE_URL_ROOT.rstrip("/") + self.site_url_root = "http://localhost:8888" self.module_logger = ModuleLogger(sender_id="StripeManager") async def create_stripe_account(self) -> Optional[str]: @@ -25,26 +26,18 @@ class StripeManager: async def create_account_link(self, account_id: str, link_type: str = "account_onboarding") -> Optional[str]: account = stripe.Account.retrieve(account_id) # For account_update, try to show dashboard if TOS is accepted - - self.module_logger.log_info("create_account_link urls", - { - "redirect_url": "{}/work".format(self.site_url_root), - "refresh_url": "{}/front-door".format(self.site_url_root), - "return_url": "{}/work".format(self.site_url_root) - } - ) if link_type == "account_update" and account.tos_acceptance.date: login_link = stripe.Account.create_login_link( account_id, - redirect_url="{}/work".format(self.site_url_root) + redirect_url="http://localhost:8888/work" ) return login_link.url - + # Otherwise show onboarding account_link = stripe.AccountLink.create( account=account_id, - refresh_url="{}/front-door".format(self.site_url_root), - return_url="{}/work".format(self.site_url_root), + refresh_url="http://localhost:8888/front-door", + return_url="http://localhost:8888/work", type="account_onboarding", ) return account_link.url @@ -78,10 +71,20 @@ class StripeManager: async def __fetch_transaction_by_session_id( self, session_id: str ) -> Optional[StripeTransactionDoc]: + await self.module_logger.log_info( + f"Looking up transaction for session_id: {session_id}", + properties={"session_id": session_id} + ) + transactions = await StripeTransactionDoc.find( StripeTransactionDoc.stripe_checkout_session_id == session_id ).to_list() + await self.module_logger.log_info( + f"Found {len(transactions)} transactions for session_id: {session_id}", + properties={"session_id": session_id, "transaction_count": len(transactions)} + ) + if len(transactions) > 1: await self.module_logger.log_error( error="More than one transaction found for session_id: {}".format( @@ -90,9 +93,24 @@ class StripeManager: properties={"session_id": session_id}, ) elif len(transactions) == 0: + await self.module_logger.log_error( + error="No transaction found for session_id: {}".format(session_id), + properties={"session_id": session_id}, + ) return None - return transactions[0] + transaction = transactions[0] + await self.module_logger.log_info( + f"Found transaction: project_id={transaction.project_id}, milestone_index={transaction.milestone_index}, status={transaction.status}", + properties={ + "session_id": session_id, + "project_id": transaction.project_id, + "milestone_index": transaction.milestone_index, + "status": transaction.status + } + ) + + return transaction async def fetch_transaction_by_session_id( self, session_id: str @@ -203,19 +221,25 @@ class StripeManager: transaction.stripe_price_id = price.id await transaction.save() - payment_link = stripe.PaymentLink.create( - line_items=[ + # Prepare payment link parameters with conditional application_fee_amount + payment_link_params = { + "line_items": [ { "price": transaction.stripe_price_id, "quantity": 1, } ], - application_fee_amount=transaction.application_fee_amount, - on_behalf_of=transaction.to_stripe_account_id, - transfer_data={ + "on_behalf_of": transaction.to_stripe_account_id, + "transfer_data": { "destination": transaction.to_stripe_account_id, }, - ) + } + + # Only add application_fee_amount if it's greater than 0 + if transaction.application_fee_amount and transaction.application_fee_amount > 0: + payment_link_params["application_fee_amount"] = transaction.application_fee_amount + + payment_link = stripe.PaymentLink.create(**payment_link_params) if payment_link: transaction.stripe_payment_link = payment_link.url @@ -276,27 +300,37 @@ class StripeManager: transaction.stripe_price_id = price.id await transaction.save() - session = stripe.checkout.Session.create( - payment_method_types=["card"], - line_items=[ + # Prepare payment_intent_data with conditional application_fee_amount + payment_intent_data = { + "on_behalf_of": transaction.to_stripe_account_id, + "transfer_data": { + "destination": transaction.to_stripe_account_id, + }, + } + + # Only add application_fee_amount if it's greater than 0 + if transaction.application_fee_amount and transaction.application_fee_amount > 0: + payment_intent_data["application_fee_amount"] = transaction.application_fee_amount + + + + session_params = { + "payment_method_types": ["card"], + "line_items": [ { "price": transaction.stripe_price_id, "quantity": 1, } ], - payment_intent_data={ - "on_behalf_of": transaction.to_stripe_account_id, - "application_fee_amount": transaction.application_fee_amount, - "transfer_data": { - "destination": transaction.to_stripe_account_id, - }, - }, - mode="payment", - success_url="{}/projects".format( - self.site_url_root - ), # needs to be set, local: http://localhost/ - cancel_url="{}/projects".format(self.site_url_root), - ) + "payment_intent_data": payment_intent_data, + "mode": "payment", + "success_url": "http://localhost:8888/projects", + "cancel_url": "http://localhost:8888/projects", + } + + + + session = stripe.checkout.Session.create(**session_params) if session: transaction.stripe_checkout_session_id = session.id @@ -335,18 +369,220 @@ class StripeManager: # Handle the checkout.session.completed event if event["type"] == "checkout.session.completed": session = event["data"]["object"] + await self.module_logger.log_info( + f"Processing checkout.session.completed webhook for session_id: {session['id']}", + properties={"session_id": session["id"]} + ) + transaction = await self.__fetch_transaction_by_session_id(session["id"]) if not transaction: await self.module_logger.log_error( error="Transaction not found for session_id: {}".format(session["id"]), properties={"session_id": session["id"]}, ) - return False + return False, None, None + # Update transaction status transaction.status = TransactionStatus.COMPLETED transaction.updated_time = datetime.now(timezone.utc) await transaction.save() + await self.module_logger.log_info( + f"Successfully updated transaction status to COMPLETED for project_id: {transaction.project_id}, milestone_index: {transaction.milestone_index}", + properties={ + "project_id": transaction.project_id, + "milestone_index": transaction.milestone_index, + "session_id": session["id"] + } + ) + + # Save payment method information + payment_method_saved = False + try: + print("=" * 50) + print("STARTING PAYMENT METHOD PROCESSING") + print("=" * 50) + print(f"Starting payment method processing for session {session['id']}") + + # Get the Stripe session to extract payment method details + try: + stripe_session = stripe.checkout.Session.retrieve(session["id"]) + print(f"Successfully retrieved Stripe session: {session['id']}") + except Exception as session_error: + print(f"Failed to retrieve Stripe session {session['id']}: {session_error}") + raise session_error + + payment_intent_id = stripe_session.get('payment_intent') + print(f"Payment intent ID from session: {payment_intent_id}") + + if payment_intent_id: + try: + payment_intent = stripe.PaymentIntent.retrieve(payment_intent_id) + print(f"Successfully retrieved payment intent: {payment_intent_id}") + except Exception as pi_error: + print(f"Failed to retrieve payment intent {payment_intent_id}: {pi_error}") + raise pi_error + + payment_method_id = payment_intent.get('payment_method') + print(f"Payment method ID from payment intent: {payment_method_id}") + + if payment_method_id: + try: + payment_method = stripe.PaymentMethod.retrieve(payment_method_id) + print(f"Successfully retrieved payment method: {payment_method_id}") + except Exception as pm_error: + print(f"Failed to retrieve payment method {payment_method_id}: {pm_error}") + raise pm_error + + card_details = payment_method.get('card', {}) + print(f"Card details: {card_details}") + + # Get user email (use a fallback since we don't have access to user profile) + user_email = f"user_{transaction.from_user}@freeleaps.com" + print(f"User email for customer creation: {user_email}") + + # Get or create customer for the user + # Try to find existing customer first + customer_id = None + try: + # Search for existing customers by email + customers = stripe.Customer.list(email=user_email, limit=1) + if customers.data: + customer_id = customers.data[0].id + print(f"Found existing customer: {customer_id}") + else: + # Create new customer + customer = stripe.Customer.create( + email=user_email, + metadata={"user_id": transaction.from_user} + ) + customer_id = customer.id + print(f"Created new customer: {customer_id}") + except Exception as customer_error: + print(f"Error creating/finding customer: {customer_error}") + # Use a fallback customer ID or skip payment method saving + customer_id = None + + if customer_id: + try: + # Check if payment method is already attached to a customer + payment_method_obj = stripe.PaymentMethod.retrieve(payment_method_id) + if payment_method_obj.customer: + print(f"Payment method {payment_method_id} already attached to customer {payment_method_obj.customer}") + # Use the existing customer ID + customer_id = payment_method_obj.customer + else: + # Try to attach payment method to customer in Stripe + stripe.PaymentMethod.attach( + payment_method_id, + customer=customer_id + ) + print(f"Successfully attached payment method {payment_method_id} to customer {customer_id}") + + # Check if payment method already exists in our database + from backend.infra.payment.models import StripePaymentMethodDoc + existing_payment_method = await StripePaymentMethodDoc.find_one( + StripePaymentMethodDoc.stripe_payment_method_id == payment_method_id + ) + + if existing_payment_method: + print(f"Payment method {payment_method_id} already exists in database, skipping save") + payment_method_saved = True + else: + # Save to our database only if it doesn't exist + payment_method_doc = StripePaymentMethodDoc( + user_id=transaction.from_user, + stripe_customer_id=customer_id, + stripe_payment_method_id=payment_method_id, + card_last4=card_details.get('last4'), + card_brand=card_details.get('brand'), + card_exp_month=card_details.get('exp_month'), + card_exp_year=card_details.get('exp_year'), + created_time=datetime.now(timezone.utc), + updated_time=datetime.now(timezone.utc), + ) + await payment_method_doc.save() + payment_method_saved = True + print(f"Successfully saved payment method {payment_method_id} for user {transaction.from_user}") + except stripe.error.InvalidRequestError as attach_error: + if "already attached" in str(attach_error).lower(): + print(f"Payment method {payment_method_id} already attached to customer {customer_id}") + # Check if payment method already exists in our database + from backend.infra.payment.models import StripePaymentMethodDoc + existing_payment_method = await StripePaymentMethodDoc.find_one( + StripePaymentMethodDoc.stripe_payment_method_id == payment_method_id + ) + + if existing_payment_method: + print(f"Payment method {payment_method_id} already exists in database, skipping save") + payment_method_saved = True + else: + # Still save to our database since it's already attached + payment_method_doc = StripePaymentMethodDoc( + user_id=transaction.from_user, + stripe_customer_id=customer_id, + stripe_payment_method_id=payment_method_id, + card_last4=card_details.get('last4'), + card_brand=card_details.get('brand'), + card_exp_month=card_details.get('exp_month'), + card_exp_year=card_details.get('exp_year'), + created_time=datetime.now(timezone.utc), + updated_time=datetime.now(timezone.utc), + ) + await payment_method_doc.save() + payment_method_saved = True + print(f"Successfully saved payment method {payment_method_id} for user {transaction.from_user}") + elif "may not be used again" in str(attach_error).lower(): + print(f"Payment method {payment_method_id} was already used and cannot be attached to customer") + print(f"This is normal for one-time payment methods. Saving card details to database anyway.") + # Check if payment method already exists in our database + from backend.infra.payment.models import StripePaymentMethodDoc + existing_payment_method = await StripePaymentMethodDoc.find_one( + StripePaymentMethodDoc.stripe_payment_method_id == payment_method_id + ) + + if existing_payment_method: + print(f"Payment method {payment_method_id} already exists in database, skipping save") + payment_method_saved = True + else: + # Save to our database even though it can't be attached + payment_method_doc = StripePaymentMethodDoc( + user_id=transaction.from_user, + stripe_customer_id=customer_id, + stripe_payment_method_id=payment_method_id, + card_last4=card_details.get('last4'), + card_brand=card_details.get('brand'), + card_exp_month=card_details.get('exp_month'), + card_exp_year=card_details.get('exp_year'), + created_time=datetime.now(timezone.utc), + updated_time=datetime.now(timezone.utc), + ) + await payment_method_doc.save() + payment_method_saved = True + print(f"Successfully saved payment method {payment_method_id} for user {transaction.from_user} (one-time use)") + else: + print(f"Error attaching payment method: {attach_error}") + except Exception as save_error: + print(f"Error saving payment method to database: {save_error}") + else: + print(f"Could not create customer for user {transaction.from_user}, skipping payment method save") + else: + print(f"No payment method found in payment intent {payment_intent_id}") + else: + print(f"No payment intent found in session {session['id']}") + + except Exception as payment_method_error: + print(f"Error processing payment method: {payment_method_error}") + import traceback + print(f"Full traceback for payment method error:") + print(traceback.format_exc()) + # Don't fail the webhook if payment method saving fails, but log it + + print(f"Payment method saved: {payment_method_saved}") return True, transaction.project_id, transaction.milestone_index + await self.module_logger.log_info( + f"Received non-checkout.session.completed webhook event: {event['type']}", + properties={"event_type": event["type"]} + ) return False, None, None diff --git a/apps/payment/backend/infra/payment/constants.py b/apps/payment/backend/infra/payment/constants.py index 25d4254..310f8ff 100644 --- a/apps/payment/backend/infra/payment/constants.py +++ b/apps/payment/backend/infra/payment/constants.py @@ -12,7 +12,6 @@ class MoneyCollectionType(IntEnum): UNSPECIFIED = 0 MARKED_AS_PAID = 1 UPLOAD_PROOF = 2 - WECHAT_QR_CODE = 3 STRIPE_CHECKOUT = 4 diff --git a/apps/payment/backend/infra/payment/models.py b/apps/payment/backend/infra/payment/models.py index 8d556dc..e6a2df4 100644 --- a/apps/payment/backend/infra/payment/models.py +++ b/apps/payment/backend/infra/payment/models.py @@ -24,3 +24,18 @@ class StripeTransactionDoc(Document): class Settings: name = "stripe_transaction" + + +class StripePaymentMethodDoc(Document): + user_id: str + stripe_customer_id: str + stripe_payment_method_id: str + card_last4: Optional[str] = None + card_brand: Optional[str] = None + card_exp_month: Optional[int] = None + card_exp_year: Optional[int] = None + created_time: datetime + updated_time: datetime + + class Settings: + name = "stripe_payment_method" diff --git a/apps/payment/backend/models/__init__.py b/apps/payment/backend/models/__init__.py index 077c1f5..b5a31b5 100644 --- a/apps/payment/backend/models/__init__.py +++ b/apps/payment/backend/models/__init__.py @@ -5,9 +5,9 @@ # TODO: Add all models to backend_models from backend.services.payment.models import IncomeProfileDoc, PaymentProfileDoc from backend.services.project.models import ProjectDoc -from backend.infra.payment.models import StripeTransactionDoc +from backend.infra.payment.models import StripeTransactionDoc, StripePaymentMethodDoc -backend_models = [IncomeProfileDoc, PaymentProfileDoc, ProjectDoc, StripeTransactionDoc] +backend_models = [IncomeProfileDoc, PaymentProfileDoc, ProjectDoc, StripeTransactionDoc, StripePaymentMethodDoc] # backend_models.extend(code_models) # backend_models.extend(user_models) # backend_models.extend(profile_models) diff --git a/apps/payment/backend/models/payment/models.py b/apps/payment/backend/models/payment/models.py index 387c59e..5ad97e0 100644 --- a/apps/payment/backend/models/payment/models.py +++ b/apps/payment/backend/models/payment/models.py @@ -2,6 +2,7 @@ from typing import List, Dict, Optional from decimal import Decimal from beanie import Document from pydantic import BaseModel +from datetime import datetime from backend.services.payment.constants import PaymentGateway from backend.infra.payment.constants import MoneyCollectionType, PaymentLocation @@ -23,7 +24,6 @@ class MoneyCollectingMethod(BaseModel): location: Optional[PaymentLocation] priority: int = 0 # less number has high priority to be used. stripe_account_id: Optional[str] - wechat_qr_code: Optional[str] last_update_time: Optional[int] = None @@ -61,3 +61,6 @@ class PaymentProfileDoc(Document): class Settings: name = "payment_profile" + + + diff --git a/apps/payment/backend/services/payment/constants.py b/apps/payment/backend/services/payment/constants.py index 202cba6..1b5d568 100644 --- a/apps/payment/backend/services/payment/constants.py +++ b/apps/payment/backend/services/payment/constants.py @@ -3,4 +3,3 @@ from enum import IntEnum class PaymentGateway(IntEnum): STRIP = 1 - WECHAT = 2 diff --git a/apps/payment/backend/services/payment/models.py b/apps/payment/backend/services/payment/models.py index 606668e..05d79bf 100644 --- a/apps/payment/backend/services/payment/models.py +++ b/apps/payment/backend/services/payment/models.py @@ -27,7 +27,6 @@ class MoneyCollectingMethod(BaseModel): location: Optional[PaymentLocation] priority: int = 0 # less number has high priority to be used. stripe_account_id: Optional[str] - wechat_qr_code: Optional[str] last_update_time: Optional[int] = None diff --git a/apps/payment/common/log/base_logger.py b/apps/payment/common/log/base_logger.py index 24f7bb0..d87356b 100644 --- a/apps/payment/common/log/base_logger.py +++ b/apps/payment/common/log/base_logger.py @@ -30,7 +30,7 @@ class LoggerBase: guru_logger.remove() file_sink = JsonSink( - log_file_path=log_filename, + log_file_path=log_filename, rotation_size_bytes=rotation_bytes, max_backup_files=log_settings.MAX_BACKUP_FILES ) @@ -47,8 +47,17 @@ class LoggerBase: filter=lambda record: record["extra"].get("topic") == self.__logger_name, ) - host_name = socket.gethostname() - host_ip = socket.gethostbyname(host_name) + try: + host_name = socket.gethostname() + host_ip = socket.gethostbyname(host_name) + except socket.gaierror: + # Fallback if hostname resolution fails + host_name = "localhost" + host_ip = "127.0.0.1" + except Exception: + # Generic fallback + host_name = "localhost" + host_ip = "127.0.0.1" self.logger = guru_logger.bind( topic=self.__logger_name, host_ip=host_ip, diff --git a/apps/payment/webapi/routes/payment/payment_manager_controller.py b/apps/payment/webapi/routes/payment/payment_manager_controller.py index e9500ce..eb21bd1 100644 --- a/apps/payment/webapi/routes/payment/payment_manager_controller.py +++ b/apps/payment/webapi/routes/payment/payment_manager_controller.py @@ -6,19 +6,6 @@ from fastapi.encoders import jsonable_encoder router = APIRouter() payment_hub = PaymentHub() -# Web API -# Fetch wechat qr code -@router.get( - "/fetch_wechat_qr_code/{project_id}", - operation_id="fetch_wechat_qr_code", - summary="Fetch wechat qr code", - description="Fetch wechat qr code", -) -async def fetch_wechat_qr_code( - project_id: str -): - return await payment_hub.fetch_wechat_qr_code(project_id) - # Web API # Fetch stripe account id @router.get( diff --git a/apps/payment/webapi/routes/payment/stripe_manager_controller.py b/apps/payment/webapi/routes/payment/stripe_manager_controller.py index ee95dea..c8c353a 100644 --- a/apps/payment/webapi/routes/payment/stripe_manager_controller.py +++ b/apps/payment/webapi/routes/payment/stripe_manager_controller.py @@ -204,8 +204,28 @@ async def handle_account_webhook( details_submitted=session["details_submitted"], payouts_enabled=session["payouts_enabled"], charges_enabled=session["charges_enabled"] - ) + ) except Exception as e: raise HTTPException(status_code=400, detail=str(e)) - + return JSONResponse(content={"status": "success"}) + + +# Web API +# Detach payment method +@router.delete( + "/detach_payment_method/{payment_method_id}", + operation_id="detach_payment_method", + summary="Detach payment method from customer", + description="Detach a payment method from a Stripe customer", +) +async def detach_payment_method(payment_method_id: str): + try: + # Detach the payment method from Stripe + stripe.PaymentMethod.detach(payment_method_id) + return JSONResponse(content={"success": True, "message": "Payment method detached successfully"}) + except Exception as e: + return JSONResponse( + status_code=400, + content={"success": False, "message": f"Failed to detach payment method: {str(e)}"} + ) From a8f0a714ae06b692638ba3d35d0fbb6e128dffcc Mon Sep 17 00:00:00 2001 From: sunhaolou Date: Mon, 21 Jul 2025 15:23:19 +0800 Subject: [PATCH 21/41] refractor: clean up the codes for online services --- .env | 24 ---- apps/payment/.env | 6 +- .../backend/business/stripe_manager.py | 123 +++--------------- 3 files changed, 21 insertions(+), 132 deletions(-) delete mode 100644 .env diff --git a/.env b/.env deleted file mode 100644 index 1834048..0000000 --- a/.env +++ /dev/null @@ -1,24 +0,0 @@ -APP_NAME=payment -export SERVICE_API_ACCESS_HOST=0.0.0.0 -export SERVICE_API_ACCESS_PORT=8006 -export CONTAINER_APP_ROOT=/app -export LOG_BASE_PATH=$CONTAINER_APP_ROOT/log/$APP_NAME -export BACKEND_LOG_FILE_NAME=$APP_NAME -export APPLICATION_ACTIVITY_LOG=$APP_NAME-activity -export MONGODB_NAME=freeleaps2 -export MONGODB_PORT=27017 -GIT_REPO_ROOT=/Users/sunhaolou/Downloads/Freeleaps/freeleaps-service-hub -CODEBASE_ROOT=/Users/sunhaolou/Downloads/Freeleaps/freeleaps-service-hub/apps/payment -SITE_DEPLOY_FOLDER=/Users/sunhaolou/Downloads/Freeleaps/freeleaps-service-hub/sites/payment/deploy -#!/bin/bash -export VENV_DIR=venv_t -export VENV_ACTIVATE=venv_t/bin/activate -export DOCKER_HOME=/var/lib/docker -export DOCKER_APP_HOME=$DOCKER_HOME/app -export DOCKER_BACKEND_HOME=$DOCKER_APP_HOME/$APP_NAME -export DOCKER_BACKEND_LOG_HOME=$DOCKER_BACKEND_HOME/log -export MONGODB_URI=mongodb://localhost:27017/ -export FREELEAPS_ENV=local -export SITE_URL_ROOT=http://localhost:5173/ -export LOG_BASE_PATH=${CODEBASE_ROOT}/log -export STRIPE_API_KEY=sk_test_51Ogsw5B0IyqaSJBrwczlr820jnmvA1qQQGoLZ2XxOsIzikpmXo4pRLjw4XVMTEBR8DdVTYySiAv1XX53Zv5xqynF00GfMqttFd diff --git a/apps/payment/.env b/apps/payment/.env index 1834048..6b56dca 100644 --- a/apps/payment/.env +++ b/apps/payment/.env @@ -7,9 +7,9 @@ export BACKEND_LOG_FILE_NAME=$APP_NAME export APPLICATION_ACTIVITY_LOG=$APP_NAME-activity export MONGODB_NAME=freeleaps2 export MONGODB_PORT=27017 -GIT_REPO_ROOT=/Users/sunhaolou/Downloads/Freeleaps/freeleaps-service-hub -CODEBASE_ROOT=/Users/sunhaolou/Downloads/Freeleaps/freeleaps-service-hub/apps/payment -SITE_DEPLOY_FOLDER=/Users/sunhaolou/Downloads/Freeleaps/freeleaps-service-hub/sites/payment/deploy +GIT_REPO_ROOT=/mnt/freeleaps/freeleaps-service-hub +CODEBASE_ROOT=/mnt/freeleaps/freeleaps-service-hub/apps/payment +SITE_DEPLOY_FOLDER=/mnt/freeleaps/freeleaps-service-hub/sites/payment/deploy #!/bin/bash export VENV_DIR=venv_t export VENV_ACTIVATE=venv_t/bin/activate diff --git a/apps/payment/backend/business/stripe_manager.py b/apps/payment/backend/business/stripe_manager.py index 92b28e4..8f7c892 100644 --- a/apps/payment/backend/business/stripe_manager.py +++ b/apps/payment/backend/business/stripe_manager.py @@ -16,7 +16,7 @@ stripe.api_key = app_settings.STRIPE_API_KEY class StripeManager: def __init__(self) -> None: - self.site_url_root = "http://localhost:8888" + self.site_url_root = app_settings.SITE_URL_ROOT.rstrip("/") self.module_logger = ModuleLogger(sender_id="StripeManager") async def create_stripe_account(self) -> Optional[str]: @@ -29,15 +29,15 @@ class StripeManager: if link_type == "account_update" and account.tos_acceptance.date: login_link = stripe.Account.create_login_link( account_id, - redirect_url="http://localhost:8888/work" + redirect_url="{}/work".format(self.site_url_root) ) return login_link.url # Otherwise show onboarding account_link = stripe.AccountLink.create( account=account_id, - refresh_url="http://localhost:8888/front-door", - return_url="http://localhost:8888/work", + refresh_url="{}/front-door".format(self.site_url_root), + return_url="{}/work".format(self.site_url_root), type="account_onboarding", ) return account_link.url @@ -71,20 +71,10 @@ class StripeManager: async def __fetch_transaction_by_session_id( self, session_id: str ) -> Optional[StripeTransactionDoc]: - await self.module_logger.log_info( - f"Looking up transaction for session_id: {session_id}", - properties={"session_id": session_id} - ) - transactions = await StripeTransactionDoc.find( StripeTransactionDoc.stripe_checkout_session_id == session_id ).to_list() - await self.module_logger.log_info( - f"Found {len(transactions)} transactions for session_id: {session_id}", - properties={"session_id": session_id, "transaction_count": len(transactions)} - ) - if len(transactions) > 1: await self.module_logger.log_error( error="More than one transaction found for session_id: {}".format( @@ -99,18 +89,7 @@ class StripeManager: ) return None - transaction = transactions[0] - await self.module_logger.log_info( - f"Found transaction: project_id={transaction.project_id}, milestone_index={transaction.milestone_index}, status={transaction.status}", - properties={ - "session_id": session_id, - "project_id": transaction.project_id, - "milestone_index": transaction.milestone_index, - "status": transaction.status - } - ) - - return transaction + return transactions[0] async def fetch_transaction_by_session_id( self, session_id: str @@ -324,8 +303,8 @@ class StripeManager: ], "payment_intent_data": payment_intent_data, "mode": "payment", - "success_url": "http://localhost:8888/projects", - "cancel_url": "http://localhost:8888/projects", + "success_url": "{}/projects".format(self.site_url_root), + "cancel_url": "{}/projects".format(self.site_url_root), } @@ -369,10 +348,6 @@ class StripeManager: # Handle the checkout.session.completed event if event["type"] == "checkout.session.completed": session = event["data"]["object"] - await self.module_logger.log_info( - f"Processing checkout.session.completed webhook for session_id: {session['id']}", - properties={"session_id": session["id"]} - ) transaction = await self.__fetch_transaction_by_session_id(session["id"]) if not transaction: @@ -387,69 +362,31 @@ class StripeManager: transaction.updated_time = datetime.now(timezone.utc) await transaction.save() - await self.module_logger.log_info( - f"Successfully updated transaction status to COMPLETED for project_id: {transaction.project_id}, milestone_index: {transaction.milestone_index}", - properties={ - "project_id": transaction.project_id, - "milestone_index": transaction.milestone_index, - "session_id": session["id"] - } - ) - # Save payment method information payment_method_saved = False try: - print("=" * 50) - print("STARTING PAYMENT METHOD PROCESSING") - print("=" * 50) - print(f"Starting payment method processing for session {session['id']}") - # Get the Stripe session to extract payment method details - try: - stripe_session = stripe.checkout.Session.retrieve(session["id"]) - print(f"Successfully retrieved Stripe session: {session['id']}") - except Exception as session_error: - print(f"Failed to retrieve Stripe session {session['id']}: {session_error}") - raise session_error - + stripe_session = stripe.checkout.Session.retrieve(session["id"]) payment_intent_id = stripe_session.get('payment_intent') - print(f"Payment intent ID from session: {payment_intent_id}") if payment_intent_id: - try: - payment_intent = stripe.PaymentIntent.retrieve(payment_intent_id) - print(f"Successfully retrieved payment intent: {payment_intent_id}") - except Exception as pi_error: - print(f"Failed to retrieve payment intent {payment_intent_id}: {pi_error}") - raise pi_error - + payment_intent = stripe.PaymentIntent.retrieve(payment_intent_id) payment_method_id = payment_intent.get('payment_method') - print(f"Payment method ID from payment intent: {payment_method_id}") if payment_method_id: - try: - payment_method = stripe.PaymentMethod.retrieve(payment_method_id) - print(f"Successfully retrieved payment method: {payment_method_id}") - except Exception as pm_error: - print(f"Failed to retrieve payment method {payment_method_id}: {pm_error}") - raise pm_error - + payment_method = stripe.PaymentMethod.retrieve(payment_method_id) card_details = payment_method.get('card', {}) - print(f"Card details: {card_details}") # Get user email (use a fallback since we don't have access to user profile) user_email = f"user_{transaction.from_user}@freeleaps.com" - print(f"User email for customer creation: {user_email}") # Get or create customer for the user - # Try to find existing customer first customer_id = None try: # Search for existing customers by email customers = stripe.Customer.list(email=user_email, limit=1) if customers.data: customer_id = customers.data[0].id - print(f"Found existing customer: {customer_id}") else: # Create new customer customer = stripe.Customer.create( @@ -457,9 +394,7 @@ class StripeManager: metadata={"user_id": transaction.from_user} ) customer_id = customer.id - print(f"Created new customer: {customer_id}") except Exception as customer_error: - print(f"Error creating/finding customer: {customer_error}") # Use a fallback customer ID or skip payment method saving customer_id = None @@ -468,7 +403,6 @@ class StripeManager: # Check if payment method is already attached to a customer payment_method_obj = stripe.PaymentMethod.retrieve(payment_method_id) if payment_method_obj.customer: - print(f"Payment method {payment_method_id} already attached to customer {payment_method_obj.customer}") # Use the existing customer ID customer_id = payment_method_obj.customer else: @@ -477,7 +411,6 @@ class StripeManager: payment_method_id, customer=customer_id ) - print(f"Successfully attached payment method {payment_method_id} to customer {customer_id}") # Check if payment method already exists in our database from backend.infra.payment.models import StripePaymentMethodDoc @@ -486,7 +419,6 @@ class StripeManager: ) if existing_payment_method: - print(f"Payment method {payment_method_id} already exists in database, skipping save") payment_method_saved = True else: # Save to our database only if it doesn't exist @@ -503,10 +435,8 @@ class StripeManager: ) await payment_method_doc.save() payment_method_saved = True - print(f"Successfully saved payment method {payment_method_id} for user {transaction.from_user}") except stripe.error.InvalidRequestError as attach_error: if "already attached" in str(attach_error).lower(): - print(f"Payment method {payment_method_id} already attached to customer {customer_id}") # Check if payment method already exists in our database from backend.infra.payment.models import StripePaymentMethodDoc existing_payment_method = await StripePaymentMethodDoc.find_one( @@ -514,7 +444,6 @@ class StripeManager: ) if existing_payment_method: - print(f"Payment method {payment_method_id} already exists in database, skipping save") payment_method_saved = True else: # Still save to our database since it's already attached @@ -531,10 +460,7 @@ class StripeManager: ) await payment_method_doc.save() payment_method_saved = True - print(f"Successfully saved payment method {payment_method_id} for user {transaction.from_user}") elif "may not be used again" in str(attach_error).lower(): - print(f"Payment method {payment_method_id} was already used and cannot be attached to customer") - print(f"This is normal for one-time payment methods. Saving card details to database anyway.") # Check if payment method already exists in our database from backend.infra.payment.models import StripePaymentMethodDoc existing_payment_method = await StripePaymentMethodDoc.find_one( @@ -542,7 +468,6 @@ class StripeManager: ) if existing_payment_method: - print(f"Payment method {payment_method_id} already exists in database, skipping save") payment_method_saved = True else: # Save to our database even though it can't be attached @@ -559,30 +484,18 @@ class StripeManager: ) await payment_method_doc.save() payment_method_saved = True - print(f"Successfully saved payment method {payment_method_id} for user {transaction.from_user} (one-time use)") - else: - print(f"Error attaching payment method: {attach_error}") except Exception as save_error: - print(f"Error saving payment method to database: {save_error}") - else: - print(f"Could not create customer for user {transaction.from_user}, skipping payment method save") - else: - print(f"No payment method found in payment intent {payment_intent_id}") - else: - print(f"No payment intent found in session {session['id']}") + await self.module_logger.log_error( + error=f"Error saving payment method to database: {save_error}", + properties={"payment_method_id": payment_method_id, "user_id": transaction.from_user} + ) except Exception as payment_method_error: - print(f"Error processing payment method: {payment_method_error}") - import traceback - print(f"Full traceback for payment method error:") - print(traceback.format_exc()) + await self.module_logger.log_error( + error=f"Error processing payment method: {payment_method_error}", + properties={"session_id": session["id"], "user_id": transaction.from_user} + ) # Don't fail the webhook if payment method saving fails, but log it - - print(f"Payment method saved: {payment_method_saved}") return True, transaction.project_id, transaction.milestone_index - await self.module_logger.log_info( - f"Received non-checkout.session.completed webhook event: {event['type']}", - properties={"event_type": event["type"]} - ) return False, None, None From d01468f89a1ee7e489bf8f005be78d3cf1a0df51 Mon Sep 17 00:00:00 2001 From: sunhaolou Date: Tue, 22 Jul 2025 13:16:14 +0800 Subject: [PATCH 22/41] fix: refractor the length functions and add comments, and address the issues --- .../backend/business/stripe_manager.py | 338 +++++++++++------- apps/payment/common/log/base_logger.py | 13 +- 2 files changed, 202 insertions(+), 149 deletions(-) diff --git a/apps/payment/backend/business/stripe_manager.py b/apps/payment/backend/business/stripe_manager.py index 8f7c892..4503370 100644 --- a/apps/payment/backend/business/stripe_manager.py +++ b/apps/payment/backend/business/stripe_manager.py @@ -26,6 +26,14 @@ class StripeManager: async def create_account_link(self, account_id: str, link_type: str = "account_onboarding") -> Optional[str]: account = stripe.Account.retrieve(account_id) # For account_update, try to show dashboard if TOS is accepted + + self.module_logger.log_info("create_account_link urls", + { + "redirect_url": "{}/work".format(self.site_url_root), + "refresh_url": "{}/front-door".format(self.site_url_root), + "return_url": "{}/work".format(self.site_url_root) + } + ) if link_type == "account_update" and account.tos_acceptance.date: login_link = stripe.Account.create_login_link( account_id, @@ -345,10 +353,14 @@ class StripeManager: async def invoke_checkout_session_webhook( self, event: dict ) -> Tuple[bool, Optional[str], Optional[str]]: - # Handle the checkout.session.completed event + """ + Handle checkout.session.completed webhook events from Stripe. + Updates transaction status and saves payment method information for future use. + """ if event["type"] == "checkout.session.completed": session = event["data"]["object"] + # Find and validate the transaction transaction = await self.__fetch_transaction_by_session_id(session["id"]) if not transaction: await self.module_logger.log_error( @@ -357,145 +369,195 @@ class StripeManager: ) return False, None, None - # Update transaction status - transaction.status = TransactionStatus.COMPLETED - transaction.updated_time = datetime.now(timezone.utc) - await transaction.save() + # Update transaction status to completed + await self.__update_transaction_status(transaction) - # Save payment method information - payment_method_saved = False - try: - # Get the Stripe session to extract payment method details - stripe_session = stripe.checkout.Session.retrieve(session["id"]) - payment_intent_id = stripe_session.get('payment_intent') + # Process and save payment method information + await self.__process_payment_method(session, transaction) - if payment_intent_id: - payment_intent = stripe.PaymentIntent.retrieve(payment_intent_id) - payment_method_id = payment_intent.get('payment_method') - - if payment_method_id: - payment_method = stripe.PaymentMethod.retrieve(payment_method_id) - card_details = payment_method.get('card', {}) - - # Get user email (use a fallback since we don't have access to user profile) - user_email = f"user_{transaction.from_user}@freeleaps.com" - - # Get or create customer for the user - customer_id = None - try: - # Search for existing customers by email - customers = stripe.Customer.list(email=user_email, limit=1) - if customers.data: - customer_id = customers.data[0].id - else: - # Create new customer - customer = stripe.Customer.create( - email=user_email, - metadata={"user_id": transaction.from_user} - ) - customer_id = customer.id - except Exception as customer_error: - # Use a fallback customer ID or skip payment method saving - customer_id = None - - if customer_id: - try: - # Check if payment method is already attached to a customer - payment_method_obj = stripe.PaymentMethod.retrieve(payment_method_id) - if payment_method_obj.customer: - # Use the existing customer ID - customer_id = payment_method_obj.customer - else: - # Try to attach payment method to customer in Stripe - stripe.PaymentMethod.attach( - payment_method_id, - customer=customer_id - ) - - # Check if payment method already exists in our database - from backend.infra.payment.models import StripePaymentMethodDoc - existing_payment_method = await StripePaymentMethodDoc.find_one( - StripePaymentMethodDoc.stripe_payment_method_id == payment_method_id - ) - - if existing_payment_method: - payment_method_saved = True - else: - # Save to our database only if it doesn't exist - payment_method_doc = StripePaymentMethodDoc( - user_id=transaction.from_user, - stripe_customer_id=customer_id, - stripe_payment_method_id=payment_method_id, - card_last4=card_details.get('last4'), - card_brand=card_details.get('brand'), - card_exp_month=card_details.get('exp_month'), - card_exp_year=card_details.get('exp_year'), - created_time=datetime.now(timezone.utc), - updated_time=datetime.now(timezone.utc), - ) - await payment_method_doc.save() - payment_method_saved = True - except stripe.error.InvalidRequestError as attach_error: - if "already attached" in str(attach_error).lower(): - # Check if payment method already exists in our database - from backend.infra.payment.models import StripePaymentMethodDoc - existing_payment_method = await StripePaymentMethodDoc.find_one( - StripePaymentMethodDoc.stripe_payment_method_id == payment_method_id - ) - - if existing_payment_method: - payment_method_saved = True - else: - # Still save to our database since it's already attached - payment_method_doc = StripePaymentMethodDoc( - user_id=transaction.from_user, - stripe_customer_id=customer_id, - stripe_payment_method_id=payment_method_id, - card_last4=card_details.get('last4'), - card_brand=card_details.get('brand'), - card_exp_month=card_details.get('exp_month'), - card_exp_year=card_details.get('exp_year'), - created_time=datetime.now(timezone.utc), - updated_time=datetime.now(timezone.utc), - ) - await payment_method_doc.save() - payment_method_saved = True - elif "may not be used again" in str(attach_error).lower(): - # Check if payment method already exists in our database - from backend.infra.payment.models import StripePaymentMethodDoc - existing_payment_method = await StripePaymentMethodDoc.find_one( - StripePaymentMethodDoc.stripe_payment_method_id == payment_method_id - ) - - if existing_payment_method: - payment_method_saved = True - else: - # Save to our database even though it can't be attached - payment_method_doc = StripePaymentMethodDoc( - user_id=transaction.from_user, - stripe_customer_id=customer_id, - stripe_payment_method_id=payment_method_id, - card_last4=card_details.get('last4'), - card_brand=card_details.get('brand'), - card_exp_month=card_details.get('exp_month'), - card_exp_year=card_details.get('exp_year'), - created_time=datetime.now(timezone.utc), - updated_time=datetime.now(timezone.utc), - ) - await payment_method_doc.save() - payment_method_saved = True - except Exception as save_error: - await self.module_logger.log_error( - error=f"Error saving payment method to database: {save_error}", - properties={"payment_method_id": payment_method_id, "user_id": transaction.from_user} - ) - - except Exception as payment_method_error: - await self.module_logger.log_error( - error=f"Error processing payment method: {payment_method_error}", - properties={"session_id": session["id"], "user_id": transaction.from_user} - ) - # Don't fail the webhook if payment method saving fails, but log it return True, transaction.project_id, transaction.milestone_index return False, None, None + + async def __update_transaction_status(self, transaction: StripeTransactionDoc) -> None: + """ + Update transaction status to completed and save to database. + """ + transaction.status = TransactionStatus.COMPLETED + transaction.updated_time = datetime.now(timezone.utc) + await transaction.save() + + async def __process_payment_method(self, session: dict, transaction: StripeTransactionDoc) -> None: + """ + Extract payment method details from Stripe session and save to database. + Creates or finds customer and attaches payment method for future use. + """ + try: + # Get payment method details from Stripe + payment_method_info = await self.__extract_payment_method_info(session) + if not payment_method_info: + return + + payment_method_id, card_details = payment_method_info + + # Get or create Stripe customer for the user + customer_id = await self.__get_or_create_customer(transaction.from_user) + if not customer_id: + return + + # Attach payment method to customer and save to database + await self.__attach_and_save_payment_method( + payment_method_id, card_details, customer_id, transaction.from_user + ) + + except Exception as payment_method_error: + await self.module_logger.log_error( + error=f"Error processing payment method: {payment_method_error}", + properties={"session_id": session["id"], "user_id": transaction.from_user} + ) + + async def __extract_payment_method_info(self, session: dict) -> Optional[Tuple[str, dict]]: + """ + Extract payment method ID and card details from Stripe session. + Returns tuple of (payment_method_id, card_details) or None if not found. + """ + try: + # Get the Stripe session to extract payment method details + stripe_session = stripe.checkout.Session.retrieve(session["id"]) + payment_intent_id = stripe_session.get('payment_intent') + + if not payment_intent_id: + return None + + payment_intent = stripe.PaymentIntent.retrieve(payment_intent_id) + payment_method_id = payment_intent.get('payment_method') + + if not payment_method_id: + return None + + payment_method = stripe.PaymentMethod.retrieve(payment_method_id) + card_details = payment_method.get('card', {}) + + return payment_method_id, card_details + + except Exception as e: + await self.module_logger.log_error( + error=f"Error extracting payment method info: {e}", + properties={"session_id": session["id"]} + ) + return None + + async def __get_or_create_customer(self, user_id: str) -> Optional[str]: + """ + Find existing Stripe customer by email or create new one. + Returns customer ID or None if creation fails. + """ + try: + # Generate email for user (fallback since we don't have access to user profile) + user_email = f"user_{user_id}@freeleaps.com" + + # Search for existing customers by email + customers = stripe.Customer.list(email=user_email, limit=1) + if customers.data: + return customers.data[0].id + + # Create new customer if not found + customer = stripe.Customer.create( + email=user_email, + metadata={"user_id": user_id} + ) + return customer.id + + except Exception as customer_error: + await self.module_logger.log_error( + error=f"Error getting/creating customer: {customer_error}", + properties={"user_id": user_id} + ) + return None + + async def __attach_and_save_payment_method( + self, payment_method_id: str, card_details: dict, customer_id: str, user_id: str + ) -> None: + """ + Attach payment method to Stripe customer and save details to database. + Handles various error scenarios gracefully. + """ + try: + # Check if payment method is already attached to a customer + payment_method_obj = stripe.PaymentMethod.retrieve(payment_method_id) + if payment_method_obj.customer: + # Use the existing customer ID + customer_id = payment_method_obj.customer + else: + # Try to attach payment method to customer in Stripe + stripe.PaymentMethod.attach( + payment_method_id, + customer=customer_id + ) + + # Save to database + await self.__save_payment_method_to_db( + payment_method_id, card_details, customer_id, user_id + ) + + except stripe.error.InvalidRequestError as attach_error: + # Handle specific Stripe attachment errors + await self.__handle_attachment_error( + attach_error, payment_method_id, card_details, customer_id, user_id + ) + except Exception as save_error: + await self.module_logger.log_error( + error=f"Error attaching payment method: {save_error}", + properties={"payment_method_id": payment_method_id, "user_id": user_id} + ) + + async def __save_payment_method_to_db( + self, payment_method_id: str, card_details: dict, customer_id: str, user_id: str + ) -> None: + """ + Save payment method details to database if it doesn't already exist. + """ + from backend.infra.payment.models import StripePaymentMethodDoc + + # Check if payment method already exists in our database + existing_payment_method = await StripePaymentMethodDoc.find_one( + StripePaymentMethodDoc.stripe_payment_method_id == payment_method_id + ) + + if existing_payment_method: + return # Already saved + + # Save to our database + payment_method_doc = StripePaymentMethodDoc( + user_id=user_id, + stripe_customer_id=customer_id, + stripe_payment_method_id=payment_method_id, + card_last4=card_details.get('last4'), + card_brand=card_details.get('brand'), + card_exp_month=card_details.get('exp_month'), + card_exp_year=card_details.get('exp_year'), + created_time=datetime.now(timezone.utc), + updated_time=datetime.now(timezone.utc), + ) + await payment_method_doc.save() + + async def __handle_attachment_error( + self, attach_error: stripe.error.InvalidRequestError, + payment_method_id: str, card_details: dict, customer_id: str, user_id: str + ) -> None: + """ + Handle specific Stripe attachment errors and still save to database when possible. + """ + error_message = str(attach_error).lower() + + if "already attached" in error_message or "may not be used again" in error_message: + # Payment method can't be attached but we can still save to database + await self.__save_payment_method_to_db( + payment_method_id, card_details, customer_id, user_id + ) + else: + # Log other attachment errors + await self.module_logger.log_error( + error=f"Error attaching payment method: {attach_error}", + properties={"payment_method_id": payment_method_id, "user_id": user_id} + ) diff --git a/apps/payment/common/log/base_logger.py b/apps/payment/common/log/base_logger.py index d87356b..2470ebe 100644 --- a/apps/payment/common/log/base_logger.py +++ b/apps/payment/common/log/base_logger.py @@ -47,17 +47,8 @@ class LoggerBase: filter=lambda record: record["extra"].get("topic") == self.__logger_name, ) - try: - host_name = socket.gethostname() - host_ip = socket.gethostbyname(host_name) - except socket.gaierror: - # Fallback if hostname resolution fails - host_name = "localhost" - host_ip = "127.0.0.1" - except Exception: - # Generic fallback - host_name = "localhost" - host_ip = "127.0.0.1" + host_name = socket.gethostname() + host_ip = socket.gethostbyname(host_name) self.logger = guru_logger.bind( topic=self.__logger_name, host_ip=host_ip, From d0aa2ba14b99985ecfc40326f72d7ea7e9d17a43 Mon Sep 17 00:00:00 2001 From: zhenyus Date: Wed, 30 Jul 2025 10:50:22 +0800 Subject: [PATCH 23/41] Add message queue registration in application bootstrap and update dependencies Signed-off-by: zhenyus --- .gitignore | 3 +- .../app/backend/infra/rabbitmq/__init__.py | 1 + .../backend/infra/rabbitmq/async_client.py | 64 ++++++ .../infra/rabbitmq/async_subscriber.py | 84 +++++++ .../deployment_status_update_service.py | 100 ++++++++ apps/devops/app/bootstrap/application.py | 2 + apps/devops/app/common/log/module_logger.py | 46 ++++ .../app/common/models/deployment/heartbeat.py | 17 ++ apps/devops/app/providers/message_queue.py | 30 +++ apps/devops/requirements.txt | 4 +- .../test_deployment_status_update_service.py | 216 ++++++++++++++++++ 11 files changed, 565 insertions(+), 2 deletions(-) create mode 100644 apps/devops/app/backend/infra/rabbitmq/__init__.py create mode 100644 apps/devops/app/backend/infra/rabbitmq/async_client.py create mode 100644 apps/devops/app/backend/infra/rabbitmq/async_subscriber.py create mode 100644 apps/devops/app/backend/services/deployment_status_update_service.py create mode 100644 apps/devops/app/common/log/module_logger.py create mode 100644 apps/devops/app/common/models/deployment/heartbeat.py create mode 100644 apps/devops/app/providers/message_queue.py create mode 100644 apps/devops/tests/services/test_deployment_status_update_service.py diff --git a/.gitignore b/.gitignore index b022725..c6032c1 100644 --- a/.gitignore +++ b/.gitignore @@ -7,4 +7,5 @@ *.pyc freedev.code-workspace .idea/ -.pytest_cache/ \ No newline at end of file +.pytest_cache/ +CLAUDE.md diff --git a/apps/devops/app/backend/infra/rabbitmq/__init__.py b/apps/devops/app/backend/infra/rabbitmq/__init__.py new file mode 100644 index 0000000..881d42a --- /dev/null +++ b/apps/devops/app/backend/infra/rabbitmq/__init__.py @@ -0,0 +1 @@ +# RabbitMQ infrastructure for DevOps service \ No newline at end of file diff --git a/apps/devops/app/backend/infra/rabbitmq/async_client.py b/apps/devops/app/backend/infra/rabbitmq/async_client.py new file mode 100644 index 0000000..a48fc58 --- /dev/null +++ b/apps/devops/app/backend/infra/rabbitmq/async_client.py @@ -0,0 +1,64 @@ +from app.common.config.app_settings import app_settings +from app.common.log.module_logger import ModuleLogger +import asyncio +from asyncio import AbstractEventLoop +import aio_pika + + +class AsyncMQClient: + exchange_name_format = "freeleaps.devops.exchange.{}" + exchange_type = "direct" + + def __init__(self, channel_name: str) -> None: + self.exchange_name_format = AsyncMQClient.exchange_name_format + self.channel_name = channel_name + self.exchange_type = AsyncMQClient.exchange_type + self.exchange_name = self.exchange_name_format.format(self.channel_name) + self.process_callable = None + self.routing_key = self.channel_name + self.module_logger = ModuleLogger(sender_id="AsyncMQClient") + + async def bind(self, max_retries=10, event_loop: AbstractEventLoop = None): + retry_count = 0 + retry_interval = 1 + + while retry_count < max_retries: + try: + self.connection = await aio_pika.connect_robust( + host=app_settings.RABBITMQ_HOST, + port=int(app_settings.RABBITMQ_PORT), + login=app_settings.RABBITMQ_USERNAME, + password=app_settings.RABBITMQ_PASSWORD, + virtualhost=app_settings.RABBITMQ_VIRTUAL_HOST, + loop=event_loop, + ) + self.channel = await self.connection.channel() + self.exchange = await self.channel.declare_exchange( + name=self.exchange_name, type="direct", auto_delete=False + ) + + self.queue = await self.channel.declare_queue( + name=None, exclusive=True, auto_delete=True, durable=False + ) + await self.queue.bind( + exchange=self.exchange, routing_key=self.routing_key + ) + break + except Exception as e: + await self.module_logger.log_exception( + exception=e, + text=f"Reconnection attempt {retry_count + 1}/{max_retries} failed: {e}", + ) + await asyncio.sleep(retry_interval) + retry_interval = min(retry_interval * 2, 60) + retry_count += 1 + + if retry_count >= max_retries: + raise ConnectionError( + "Unable to connect to RabbitMQ after multiple retries." + ) + + async def close(self): + """Unbind the queue and close the connection gracefully.""" + await self.queue.unbind(self.exchange, self.routing_key) + await self.connection.close() \ No newline at end of file diff --git a/apps/devops/app/backend/infra/rabbitmq/async_subscriber.py b/apps/devops/app/backend/infra/rabbitmq/async_subscriber.py new file mode 100644 index 0000000..05ef7e4 --- /dev/null +++ b/apps/devops/app/backend/infra/rabbitmq/async_subscriber.py @@ -0,0 +1,84 @@ +from asyncio import AbstractEventLoop +from app.common.log.module_logger import ModuleLogger +import json +import asyncio +from .async_client import AsyncMQClient + + +class AsyncMQSubscriber(AsyncMQClient): + def __init__(self, channel_name: str) -> None: + super().__init__(channel_name=channel_name) + self.process_callable = None + self.routing_key = self.channel_name + self.consumer_callbacks = {} + self.consumer_callbacks_lock = asyncio.Lock() + self.module_logger = ModuleLogger(sender_id="AsyncMQSubscriber") + + async def process_incoming_message(self, message): + """Processing incoming message from RabbitMQ""" + await message.ack() + body = message.body + if body: + async with self.consumer_callbacks_lock: + for registry_key, callback_info in self.consumer_callbacks.items(): + try: + await callback_info["method"]( + registry_key, json.loads(body), callback_info["args"] + ) + except Exception as err: + await self.module_logger.log_exception( + exception=err, + text=f"Error processing message for consumer '{registry_key}'", + ) + + async def subscribe(self, max_retries=10, event_loop: AbstractEventLoop = None): + """Attempts to bind and consume messages, with retry mechanism.""" + retries = 0 + while retries < max_retries: + try: + await self.bind(max_retries=5, event_loop=event_loop) + await self.queue.consume( + no_ack=False, exclusive=True, callback=self.process_incoming_message + ) + break + except Exception as e: + await self.module_logger.log_exception( + exception=e, + text=f"Failed to subscribe at {retries} time, will retry", + ) + retries += 1 + await asyncio.sleep(5) + else: + await self.module_logger.log_exception( + exception=ConnectionError( + f"Exceeded max retries ({max_retries}) for subscription." + ), + text=f"Subscription failed for {self.channel_name} after {max_retries} attempts.", + ) + + async def register_consumer( + self, + registry_key: str, + callback_method, + args: dict, + ): + """Register a consumer callback with a unique key.""" + async with self.consumer_callbacks_lock: + self.consumer_callbacks[registry_key] = { + "method": callback_method, + "args": args, + } + + async def unregister_consumer( + self, + registry_key: str, + ): + """Unregister a consumer callback by its key.""" + async with self.consumer_callbacks_lock: + if registry_key in self.consumer_callbacks: + del self.consumer_callbacks[registry_key] + + async def clear_all_consumers(self): + """Unregister all consumer callbacks.""" + async with self.consumer_callbacks_lock: + self.consumer_callbacks.clear() \ No newline at end of file diff --git a/apps/devops/app/backend/services/deployment_status_update_service.py b/apps/devops/app/backend/services/deployment_status_update_service.py new file mode 100644 index 0000000..2dfa53a --- /dev/null +++ b/apps/devops/app/backend/services/deployment_status_update_service.py @@ -0,0 +1,100 @@ +from datetime import datetime +from typing import Dict, Literal +from app.common.log.module_logger import ModuleLogger +from app.common.models.deployment.deployment import Deployment +from app.common.models.deployment.heartbeat import DevOpsReconcileJobHeartbeatMessage + + +class DeploymentStatusUpdateService: + def __init__(self): + self.module_logger = ModuleLogger(sender_id="DeploymentStatusUpdateService") + + # Status mapping from heartbeat to deployment model + self.status_mapping: Dict[str, Literal["started", "failed", "succeeded", "aborted"]] = { + "running": "started", + "success": "succeeded", + "failed": "failed", + "terminated": "aborted" + } + + # Phase to stage mapping for more detailed tracking + self.phase_to_stage_mapping: Dict[str, str] = { + "initializing": "initialization", + "jenkins_build": "build", + "building": "build", + "deploying": "deployment", + "finished": "completed" + } + + async def process_heartbeat_message(self, registry_key: str, message_data: dict, args: dict): + """Process incoming heartbeat message and update deployment status""" + # registry_key and args are provided by the message queue framework but not used in this implementation + _ = registry_key, args + try: + # Parse the message using our Pydantic model + heartbeat_message = DevOpsReconcileJobHeartbeatMessage(**message_data) + payload = heartbeat_message.payload + + await self.module_logger.log_info( + text=f"Processing heartbeat for deployment {payload.id}: {payload.status} - {payload.phase}", + data={"deployment_id": payload.id, "status": payload.status, "phase": payload.phase} + ) + + # Find the deployment by ID + deployment = await Deployment.find_one(Deployment.deployment_id == payload.id) + if not deployment: + await self.module_logger.log_warning( + text=f"Deployment not found: {payload.id}", + data={"deployment_id": payload.id} + ) + return + + # Map heartbeat status to deployment status + if payload.status in self.status_mapping: + deployment.deployment_status = self.status_mapping[payload.status] + else: + await self.module_logger.log_warning( + text=f"Unknown status received: {payload.status}", + data={"deployment_id": payload.id, "status": payload.status} + ) + return + + # Map phase to deployment stage + if payload.phase in self.phase_to_stage_mapping: + deployment.deployment_stage = self.phase_to_stage_mapping[payload.phase] + else: + deployment.deployment_stage = payload.phase + + # Update app URL if provided and deployment is successful + if payload.url and payload.status == "success": + deployment.deployment_app_url = payload.url + + # Update timestamp + deployment.updated_at = datetime.now() + + # Save the updated deployment + await deployment.save() + + await self.module_logger.log_info( + text=f"Updated deployment {payload.id}: status={deployment.deployment_status}, stage={deployment.deployment_stage}", + data={ + "deployment_id": payload.id, + "status": deployment.deployment_status, + "stage": deployment.deployment_stage, + "app_url": deployment.deployment_app_url, + "error": payload.error + } + ) + + # Log errors if present + if payload.error: + await self.module_logger.log_error( + text=f"Deployment {payload.id} failed: {payload.error}", + data={"deployment_id": payload.id, "error": payload.error, "phase": payload.phase} + ) + + except Exception as e: + await self.module_logger.log_exception( + exception=e, + text=f"Error processing heartbeat message: {message_data}", + ) \ No newline at end of file diff --git a/apps/devops/app/bootstrap/application.py b/apps/devops/app/bootstrap/application.py index 81db6f5..3cf635b 100644 --- a/apps/devops/app/bootstrap/application.py +++ b/apps/devops/app/bootstrap/application.py @@ -9,6 +9,7 @@ from app.providers import database from app.providers import metrics from app.providers import probes from app.providers import exception_handler +from app.providers import message_queue from app.common.config.app_settings import app_settings def create_app() -> FastAPI: @@ -21,6 +22,7 @@ def create_app() -> FastAPI: register(app, database) register(app, router) register(app, common) + register(app, message_queue) # Call the custom_openapi function to change the OpenAPI version customize_openapi_security(app) diff --git a/apps/devops/app/common/log/module_logger.py b/apps/devops/app/common/log/module_logger.py new file mode 100644 index 0000000..3e82f74 --- /dev/null +++ b/apps/devops/app/common/log/module_logger.py @@ -0,0 +1,46 @@ +from .application_logger import ApplicationLogger + + +class ModuleLogger(ApplicationLogger): + def __init__(self, sender_id: str) -> None: + super().__init__() + self.event_sender_id = sender_id + self.event_receiver_id = "ModuleLogger" + self.event_subject = "module" + + async def log_exception(self, exception: Exception, text: str = "Exception", properties: dict[str, any] = None) -> None: + return await super().log_exception( + sender_id=self.event_sender_id, + receiver_id=self.event_receiver_id, + subject=self.event_subject, + exception=exception, + text=text, + properties=properties, + ) + + async def log_info(self, text: str, data: dict[str, any] = None) -> None: + return await super().log_info( + sender_id=self.event_sender_id, + receiver_id=self.event_receiver_id, + subject=self.event_subject, + text=text, + properties=data, + ) + + async def log_warning(self, text: str, data: dict[str, any] = None) -> None: + return await super().log_warning( + sender_id=self.event_sender_id, + receiver_id=self.event_receiver_id, + subject=self.event_subject, + text=text, + properties=data, + ) + + async def log_error(self, text: str, data: dict[str, any] = None) -> None: + return await super().log_error( + sender_id=self.event_sender_id, + receiver_id=self.event_receiver_id, + subject=self.event_subject, + text=text, + properties=data, + ) \ No newline at end of file diff --git a/apps/devops/app/common/models/deployment/heartbeat.py b/apps/devops/app/common/models/deployment/heartbeat.py new file mode 100644 index 0000000..7c7a624 --- /dev/null +++ b/apps/devops/app/common/models/deployment/heartbeat.py @@ -0,0 +1,17 @@ +from typing import Literal, Optional +from pydantic import BaseModel + + +class DevOpsReconcileJobHeartbeatPayload(BaseModel): + operation: Literal["heartbeat"] = "heartbeat" + id: str + status: Literal["running", "success", "failed", "terminated"] + phase: Literal["initializing", "jenkins_build", "building", "deploying", "finished"] + phase_message: str + error: Optional[str] = None + url: Optional[str] = None + + +class DevOpsReconcileJobHeartbeatMessage(BaseModel): + event_type: Literal["DevOpsReconcileJobHeartbeat"] + payload: DevOpsReconcileJobHeartbeatPayload \ No newline at end of file diff --git a/apps/devops/app/providers/message_queue.py b/apps/devops/app/providers/message_queue.py new file mode 100644 index 0000000..03ecb00 --- /dev/null +++ b/apps/devops/app/providers/message_queue.py @@ -0,0 +1,30 @@ +import asyncio +from app.backend.infra.rabbitmq.async_subscriber import AsyncMQSubscriber +from app.backend.services.deployment_status_update_service import DeploymentStatusUpdateService + + +def register(app): + # Initialize the message subscriber and status update service + app.deployment_heartbeat_subscriber = AsyncMQSubscriber("devops_reconcile_heartbeat") + app.deployment_status_service = DeploymentStatusUpdateService() + + @app.on_event("startup") + async def start_message_consumers(): + # Register the heartbeat processor + await app.deployment_heartbeat_subscriber.register_consumer( + registry_key="deployment_heartbeat_processor", + callback_method=app.deployment_status_service.process_heartbeat_message, + args={} + ) + + # Start the subscriber + loop = asyncio.get_running_loop() + await loop.create_task( + app.deployment_heartbeat_subscriber.subscribe(max_retries=5, event_loop=loop) + ) + + @app.on_event("shutdown") + async def stop_message_consumers(): + # Clear consumers and close connection + await app.deployment_heartbeat_subscriber.clear_all_consumers() + await app.deployment_heartbeat_subscriber.close() \ No newline at end of file diff --git a/apps/devops/requirements.txt b/apps/devops/requirements.txt index c593732..a83128e 100644 --- a/apps/devops/requirements.txt +++ b/apps/devops/requirements.txt @@ -11,4 +11,6 @@ httpx==0.24.0 pydantic-settings~=2.9.1 pymongo~=4.12.1 pydantic~=2.11.4 -requests~=2.32.3 \ No newline at end of file +requests~=2.32.3 +aio-pika==9.4.3 +pytest-asyncio==0.24.0 \ No newline at end of file diff --git a/apps/devops/tests/services/test_deployment_status_update_service.py b/apps/devops/tests/services/test_deployment_status_update_service.py new file mode 100644 index 0000000..8bd4a01 --- /dev/null +++ b/apps/devops/tests/services/test_deployment_status_update_service.py @@ -0,0 +1,216 @@ +import pytest +from unittest.mock import AsyncMock +from datetime import datetime +from app.backend.services.deployment_status_update_service import DeploymentStatusUpdateService +from app.common.models.deployment.deployment import Deployment + + +@pytest.fixture +def status_update_service(): + return DeploymentStatusUpdateService() + + +@pytest.fixture +def sample_heartbeat_message(): + return { + "event_type": "DevOpsReconcileJobHeartbeat", + "payload": { + "operation": "heartbeat", + "id": "deployment-123-abc", + "status": "running", + "phase": "building", + "phase_message": "Building container image", + "error": None, + "url": None + } + } + + +@pytest.fixture +def sample_success_message(): + return { + "event_type": "DevOpsReconcileJobHeartbeat", + "payload": { + "operation": "heartbeat", + "id": "deployment-789-ghi", + "status": "success", + "phase": "finished", + "phase_message": "Deployment completed successfully", + "error": None, + "url": "https://my-app-alpha.freeleaps.com" + } + } + + +@pytest.fixture +def sample_failed_message(): + return { + "event_type": "DevOpsReconcileJobHeartbeat", + "payload": { + "operation": "heartbeat", + "id": "deployment-456-def", + "status": "failed", + "phase": "jenkins_build", + "phase_message": "Build failed due to compilation errors", + "error": "Build step 'Invoke top-level Maven targets' marked build as failure", + "url": None + } + } + + +@pytest.fixture +def mock_deployment(): + from unittest.mock import AsyncMock + + class MockDeployment: + def __init__(self): + self.deployment_id = "deployment-123-abc" + self.deployment_status = "started" + self.deployment_stage = "initialization" + self.deployment_app_url = "" + self.updated_at = datetime.now() + self.save = AsyncMock() + + return MockDeployment() + + +class TestDeploymentStatusUpdateService: + + @pytest.mark.asyncio + async def test_status_mapping(self, status_update_service): + """Test that status mapping works correctly""" + assert status_update_service.status_mapping["running"] == "started" + assert status_update_service.status_mapping["success"] == "succeeded" + assert status_update_service.status_mapping["failed"] == "failed" + assert status_update_service.status_mapping["terminated"] == "aborted" + + @pytest.mark.asyncio + async def test_phase_to_stage_mapping(self, status_update_service): + """Test that phase to stage mapping works correctly""" + assert status_update_service.phase_to_stage_mapping["initializing"] == "initialization" + assert status_update_service.phase_to_stage_mapping["jenkins_build"] == "build" + assert status_update_service.phase_to_stage_mapping["building"] == "build" + assert status_update_service.phase_to_stage_mapping["deploying"] == "deployment" + assert status_update_service.phase_to_stage_mapping["finished"] == "completed" + + @pytest.mark.asyncio + async def test_process_running_heartbeat_message(self, status_update_service, sample_heartbeat_message, mock_deployment, monkeypatch): + """Test processing a running status heartbeat""" + # Mock Deployment.find_one to return our mock deployment + async def mock_find_one(query): + _ = query # Parameter required by interface but not used in mock + return mock_deployment + + # Mock the logger methods to avoid actual logging during tests + status_update_service.module_logger.log_info = AsyncMock() + status_update_service.module_logger.log_warning = AsyncMock() + status_update_service.module_logger.log_error = AsyncMock() + status_update_service.module_logger.log_exception = AsyncMock() + + # Mock the Beanie query mechanism properly + mock_deployment_class = AsyncMock() + mock_deployment_class.find_one = mock_find_one + monkeypatch.setattr("app.backend.services.deployment_status_update_service.Deployment", mock_deployment_class) + + await status_update_service.process_heartbeat_message( + "test_key", sample_heartbeat_message, {} + ) + + # Verify the deployment was updated correctly + assert mock_deployment.deployment_status == "started" + assert mock_deployment.deployment_stage == "build" + mock_deployment.save.assert_called_once() + + @pytest.mark.asyncio + async def test_process_success_heartbeat_message(self, status_update_service, sample_success_message, mock_deployment, monkeypatch): + """Test processing a success status heartbeat with URL""" + async def mock_find_one(query): + _ = query # Parameter required by interface but not used in mock + return mock_deployment + + # Mock the logger methods + status_update_service.module_logger.log_info = AsyncMock() + status_update_service.module_logger.log_warning = AsyncMock() + status_update_service.module_logger.log_error = AsyncMock() + status_update_service.module_logger.log_exception = AsyncMock() + + # Mock the Beanie query mechanism properly + mock_deployment_class = AsyncMock() + mock_deployment_class.find_one = mock_find_one + monkeypatch.setattr("app.backend.services.deployment_status_update_service.Deployment", mock_deployment_class) + + await status_update_service.process_heartbeat_message( + "test_key", sample_success_message, {} + ) + + # Verify the deployment was updated correctly + assert mock_deployment.deployment_status == "succeeded" + assert mock_deployment.deployment_stage == "completed" + assert mock_deployment.deployment_app_url == "https://my-app-alpha.freeleaps.com" + mock_deployment.save.assert_called_once() + + @pytest.mark.asyncio + async def test_process_failed_heartbeat_message(self, status_update_service, sample_failed_message, mock_deployment, monkeypatch): + """Test processing a failed status heartbeat""" + async def mock_find_one(query): + _ = query # Parameter required by interface but not used in mock + return mock_deployment + + # Mock the logger methods + status_update_service.module_logger.log_info = AsyncMock() + status_update_service.module_logger.log_warning = AsyncMock() + status_update_service.module_logger.log_error = AsyncMock() + status_update_service.module_logger.log_exception = AsyncMock() + + # Mock the Beanie query mechanism properly + mock_deployment_class = AsyncMock() + mock_deployment_class.find_one = mock_find_one + monkeypatch.setattr("app.backend.services.deployment_status_update_service.Deployment", mock_deployment_class) + + await status_update_service.process_heartbeat_message( + "test_key", sample_failed_message, {} + ) + + # Verify the deployment was updated correctly + assert mock_deployment.deployment_status == "failed" + assert mock_deployment.deployment_stage == "build" + mock_deployment.save.assert_called_once() + + @pytest.mark.asyncio + async def test_deployment_not_found(self, status_update_service, sample_heartbeat_message, monkeypatch): + """Test handling when deployment is not found""" + async def mock_find_one(query): + _ = query # Parameter required by interface but not used in mock + return None + + # Mock the logger methods + status_update_service.module_logger.log_info = AsyncMock() + status_update_service.module_logger.log_warning = AsyncMock() + status_update_service.module_logger.log_error = AsyncMock() + status_update_service.module_logger.log_exception = AsyncMock() + + # Mock the Beanie query mechanism properly + mock_deployment_class = AsyncMock() + mock_deployment_class.find_one = mock_find_one + monkeypatch.setattr("app.backend.services.deployment_status_update_service.Deployment", mock_deployment_class) + + # Should not raise an exception + await status_update_service.process_heartbeat_message( + "test_key", sample_heartbeat_message, {} + ) + + @pytest.mark.asyncio + async def test_invalid_message_format(self, status_update_service): + """Test handling invalid message format""" + invalid_message = {"invalid": "format"} + + # Mock the logger methods + status_update_service.module_logger.log_info = AsyncMock() + status_update_service.module_logger.log_warning = AsyncMock() + status_update_service.module_logger.log_error = AsyncMock() + status_update_service.module_logger.log_exception = AsyncMock() + + # Should not raise an exception due to try/catch in the method + await status_update_service.process_heartbeat_message( + "test_key", invalid_message, {} + ) \ No newline at end of file From d74967db55609336e6e0fd9476240a242ae630db Mon Sep 17 00:00:00 2001 From: zhenyus Date: Fri, 1 Aug 2025 00:15:06 +0800 Subject: [PATCH 24/41] feat: add RabbitMQ configuration to app settings Signed-off-by: zhenyus --- apps/devops/app/common/config/app_settings.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/apps/devops/app/common/config/app_settings.py b/apps/devops/app/common/config/app_settings.py index 9a73bcb..d7b3c7d 100644 --- a/apps/devops/app/common/config/app_settings.py +++ b/apps/devops/app/common/config/app_settings.py @@ -18,7 +18,13 @@ class AppSettings(BaseSettings): LOG_BASE_PATH: str = "./log" BACKEND_LOG_FILE_NAME: str = APP_NAME - APPLICATION_ACTIVITY_LOG: str = APP_NAME + "-application-activity" + APPLICATION_ACTIVITY_LOG: str = APP_NAME + "-application-activity"\ + + RABBITMQ_HOST: str = "localhost" + RABBITMQ_PORT: int = 5672 + RABBITMQ_USERNAME: str = "guest" + RABBITMQ_PASSWORD: str = "guest" + RABBITMQ_VIRTUAL_HOST: str = "/" class Config: From 14eb5abda6161ca2d4e52a3d3bb5e213b4d03521 Mon Sep 17 00:00:00 2001 From: zhenyus Date: Fri, 1 Aug 2025 09:03:39 +0800 Subject: [PATCH 25/41] refactor: remove unnecessary @dataclass decorator from DevOpsReconcileRequest Signed-off-by: zhenyus --- apps/devops/app/common/models/deployment/deployment.py | 1 - 1 file changed, 1 deletion(-) diff --git a/apps/devops/app/common/models/deployment/deployment.py b/apps/devops/app/common/models/deployment/deployment.py index dc50780..e217eae 100644 --- a/apps/devops/app/common/models/deployment/deployment.py +++ b/apps/devops/app/common/models/deployment/deployment.py @@ -76,7 +76,6 @@ class DevOpsReconcileOperationType(Enum): TERMINATE = "terminate" RESTART = "restart" -@dataclass class DevOpsReconcileRequest(BaseModel): operation: DevOpsReconcileOperationType id: str From ebc37bbd0d772eb736f7ba6de74a8177c43c21fd Mon Sep 17 00:00:00 2001 From: zhenyus Date: Fri, 1 Aug 2025 09:06:42 +0800 Subject: [PATCH 26/41] refactor: streamline DevOpsReconcileRequest by removing redundant code Signed-off-by: zhenyus --- apps/devops/Dockerfile | 31 +++++++++++++++++++++++++++++++ 1 file changed, 31 insertions(+) create mode 100644 apps/devops/Dockerfile diff --git a/apps/devops/Dockerfile b/apps/devops/Dockerfile new file mode 100644 index 0000000..1d96106 --- /dev/null +++ b/apps/devops/Dockerfile @@ -0,0 +1,31 @@ +FROM python:3.12-slim + +WORKDIR /app + +# Install system dependencies +RUN apt-get update && apt-get install -y \ + curl \ + && rm -rf /var/lib/apt/lists/* + +# Copy requirements and install Python dependencies +COPY requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt + +# Copy application code +COPY . . + +# Set environment variables +ENV LOG_BASE_PATH=/app/log/devsvc + +# Create necessary directories +RUN mkdir -p /app/log/devops + +# Expose port +EXPOSE 8014 + +# Health check +HEALTHCHECK --interval=30s --timeout=30s --start-period=5s --retries=3 \ + CMD curl -f http://localhost:8014/api/_/healthz || exit 1 + +# Start the application +CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8014", "--reload"] \ No newline at end of file From 113bc2bc7510a0097bebbabea493c97c44d35f00 Mon Sep 17 00:00:00 2001 From: zhenyus Date: Fri, 1 Aug 2025 09:12:19 +0800 Subject: [PATCH 27/41] fix: ensure consistent formatting in requirements.txt by removing redundant line Signed-off-by: zhenyus --- apps/devops/requirements.txt | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/apps/devops/requirements.txt b/apps/devops/requirements.txt index a83128e..f6f1fa8 100644 --- a/apps/devops/requirements.txt +++ b/apps/devops/requirements.txt @@ -12,5 +12,4 @@ pydantic-settings~=2.9.1 pymongo~=4.12.1 pydantic~=2.11.4 requests~=2.32.3 -aio-pika==9.4.3 -pytest-asyncio==0.24.0 \ No newline at end of file +aio-pika==9.4.3 \ No newline at end of file From 9283e52e487d0f842fb81bb5065fbfb85cf6b1fe Mon Sep 17 00:00:00 2001 From: zhenyus Date: Sun, 3 Aug 2025 02:38:33 +0800 Subject: [PATCH 28/41] feat: enhance message consumer startup and shutdown logging in message_queue.py Added print statements to log the status of message consumers during startup and shutdown processes, improving traceability and debugging capabilities. Signed-off-by: zhenyus --- apps/devops/app/providers/message_queue.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/apps/devops/app/providers/message_queue.py b/apps/devops/app/providers/message_queue.py index 03ecb00..b272db4 100644 --- a/apps/devops/app/providers/message_queue.py +++ b/apps/devops/app/providers/message_queue.py @@ -10,21 +10,27 @@ def register(app): @app.on_event("startup") async def start_message_consumers(): + print("Starting message consumers") # Register the heartbeat processor await app.deployment_heartbeat_subscriber.register_consumer( registry_key="deployment_heartbeat_processor", callback_method=app.deployment_status_service.process_heartbeat_message, args={} ) + print("Registered deployment heartbeat processor") # Start the subscriber loop = asyncio.get_running_loop() await loop.create_task( app.deployment_heartbeat_subscriber.subscribe(max_retries=5, event_loop=loop) ) + print("Started deployment heartbeat subscriber") @app.on_event("shutdown") async def stop_message_consumers(): # Clear consumers and close connection + print("Stopping message consumers") await app.deployment_heartbeat_subscriber.clear_all_consumers() - await app.deployment_heartbeat_subscriber.close() \ No newline at end of file + print("Cleared all consumers") + await app.deployment_heartbeat_subscriber.close() + print("Closed deployment heartbeat subscriber") \ No newline at end of file From 95b6560ffd75b91f1b844f2ac5983b828401976a Mon Sep 17 00:00:00 2001 From: zhenyus Date: Sun, 3 Aug 2025 02:48:55 +0800 Subject: [PATCH 29/41] refactor: improve clarity of subscriber startup process in message_queue.py Updated comment to specify that the subscriber starts in the background, enhancing code readability and understanding of the asynchronous behavior. Signed-off-by: zhenyus --- apps/devops/app/providers/message_queue.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/apps/devops/app/providers/message_queue.py b/apps/devops/app/providers/message_queue.py index b272db4..51cb071 100644 --- a/apps/devops/app/providers/message_queue.py +++ b/apps/devops/app/providers/message_queue.py @@ -19,9 +19,9 @@ def register(app): ) print("Registered deployment heartbeat processor") - # Start the subscriber + # Start the subscriber in the background loop = asyncio.get_running_loop() - await loop.create_task( + loop.create_task( app.deployment_heartbeat_subscriber.subscribe(max_retries=5, event_loop=loop) ) print("Started deployment heartbeat subscriber") From 2dd73c0734f1b33f98f6ec283648ac8b1a1fad55 Mon Sep 17 00:00:00 2001 From: zhenyus Date: Sun, 3 Aug 2025 03:01:11 +0800 Subject: [PATCH 30/41] refactor: enhance lazy initialization and error handling in message consumer startup Updated the message consumer initialization to occur lazily during the startup event, improving application startup performance. Added detailed logging for service initialization, registration, and error handling to enhance traceability and robustness during startup and shutdown processes. Signed-off-by: zhenyus --- apps/devops/app/providers/message_queue.py | 63 ++++++++++++++-------- 1 file changed, 42 insertions(+), 21 deletions(-) diff --git a/apps/devops/app/providers/message_queue.py b/apps/devops/app/providers/message_queue.py index 51cb071..978e232 100644 --- a/apps/devops/app/providers/message_queue.py +++ b/apps/devops/app/providers/message_queue.py @@ -4,33 +4,54 @@ from app.backend.services.deployment_status_update_service import DeploymentStat def register(app): - # Initialize the message subscriber and status update service - app.deployment_heartbeat_subscriber = AsyncMQSubscriber("devops_reconcile_heartbeat") - app.deployment_status_service = DeploymentStatusUpdateService() + # Initialize the message subscriber and status update service lazily to avoid blocking startup + app.deployment_heartbeat_subscriber = None + app.deployment_status_service = None @app.on_event("startup") async def start_message_consumers(): - print("Starting message consumers") - # Register the heartbeat processor - await app.deployment_heartbeat_subscriber.register_consumer( - registry_key="deployment_heartbeat_processor", - callback_method=app.deployment_status_service.process_heartbeat_message, - args={} - ) - print("Registered deployment heartbeat processor") + print("🚀 Starting message consumers...") - # Start the subscriber in the background - loop = asyncio.get_running_loop() - loop.create_task( - app.deployment_heartbeat_subscriber.subscribe(max_retries=5, event_loop=loop) - ) - print("Started deployment heartbeat subscriber") + try: + # Initialize services during startup to avoid blocking app initialization + print("🔧 Initializing services...") + app.deployment_heartbeat_subscriber = AsyncMQSubscriber("devops_reconcile_heartbeat") + app.deployment_status_service = DeploymentStatusUpdateService() + print("✅ Services initialized") + + # Register the heartbeat processor + print("📝 Registering deployment heartbeat processor...") + await app.deployment_heartbeat_subscriber.register_consumer( + registry_key="deployment_heartbeat_processor", + callback_method=app.deployment_status_service.process_heartbeat_message, + args={} + ) + print("✅ Registered deployment heartbeat processor") + + # Start the subscriber in the background + print("🔄 Starting subscriber in background...") + loop = asyncio.get_running_loop() + loop.create_task( + app.deployment_heartbeat_subscriber.subscribe(max_retries=5, event_loop=loop) + ) + print("✅ Started deployment heartbeat subscriber") + print("🎉 Message consumers startup complete!") + except Exception as e: + print(f"❌ Error in message consumer startup: {e}") + # Don't raise the exception to prevent app startup failure + print("⚠️ App will continue without message queue functionality") @app.on_event("shutdown") async def stop_message_consumers(): # Clear consumers and close connection print("Stopping message consumers") - await app.deployment_heartbeat_subscriber.clear_all_consumers() - print("Cleared all consumers") - await app.deployment_heartbeat_subscriber.close() - print("Closed deployment heartbeat subscriber") \ No newline at end of file + if app.deployment_heartbeat_subscriber: + try: + await app.deployment_heartbeat_subscriber.clear_all_consumers() + print("Cleared all consumers") + await app.deployment_heartbeat_subscriber.close() + print("Closed deployment heartbeat subscriber") + except Exception as e: + print(f"Error during shutdown: {e}") + else: + print("No message consumers to stop") \ No newline at end of file From 6ebac252b0d10f34714afeaaaaca6c2f2ad9759e Mon Sep 17 00:00:00 2001 From: zhenyus Date: Sun, 3 Aug 2025 03:28:06 +0800 Subject: [PATCH 31/41] refactor: change DevOpsReconcileOperationType to inherit from str and Enum Updated the DevOpsReconcileOperationType class to inherit from both str and Enum, enhancing its usability as a string enumeration in the application. Signed-off-by: zhenyus --- apps/devops/app/common/models/deployment/deployment.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/devops/app/common/models/deployment/deployment.py b/apps/devops/app/common/models/deployment/deployment.py index e217eae..360d890 100644 --- a/apps/devops/app/common/models/deployment/deployment.py +++ b/apps/devops/app/common/models/deployment/deployment.py @@ -71,7 +71,7 @@ class CheckApplicationLogsResponse(BaseModel): limit: int logs: list[str] -class DevOpsReconcileOperationType(Enum): +class DevOpsReconcileOperationType(str, Enum): START = "start" TERMINATE = "terminate" RESTART = "restart" From e41aa3d983024aadbb5bb05db5640d97768b2c25 Mon Sep 17 00:00:00 2001 From: zhenyus Date: Sun, 3 Aug 2025 03:41:26 +0800 Subject: [PATCH 32/41] fix: correct typo in BASE_RECONCILE_URL in site_settings.py Signed-off-by: zhenyus --- apps/devops/app/common/config/site_settings.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/devops/app/common/config/site_settings.py b/apps/devops/app/common/config/site_settings.py index c313636..0bf92eb 100644 --- a/apps/devops/app/common/config/site_settings.py +++ b/apps/devops/app/common/config/site_settings.py @@ -21,7 +21,7 @@ class SiteSettings(BaseSettings): BASE_GITEA_URL: str = "https://gitea.freeleaps.mathmast.com" # TODO: confirm with Zhenyu - BASE_RECONSILE_URL: str = "https://reconcile.freeleaps.mathmast.com" + BASE_RECONCILE_URL: str = "https://reconcile.freeleaps.mathmast.com" # TODO: modify this with actual Loki URL BASE_LOKI_URL: str = "http://localhost:3100" From 5f3368cbdcf26b2a4a1588359915e534bbc4e40e Mon Sep 17 00:00:00 2001 From: zhenyus Date: Sun, 3 Aug 2025 03:51:15 +0800 Subject: [PATCH 33/41] fix: correct typo in BASE_RECONCILE_URL parameter in DeploymentService Updated the parameter name in the _start_deployment method to match the corrected constant in site_settings.py, ensuring consistency across the codebase. Signed-off-by: zhenyus --- apps/devops/app/routes/deployment/service.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/devops/app/routes/deployment/service.py b/apps/devops/app/routes/deployment/service.py index 8c2f258..142ab3b 100644 --- a/apps/devops/app/routes/deployment/service.py +++ b/apps/devops/app/routes/deployment/service.py @@ -165,7 +165,7 @@ class DeploymentService: async def _start_deployment( self, deployment: Deployment, - reconsile_base_url: str = site_settings.BASE_RECONSILE_URL, + reconsile_base_url: str = site_settings.BASE_RECONCILE_URL, ) -> bool: """ Start the deployment From 6c60a0b6a59184840fdd173121803d52caef8eea Mon Sep 17 00:00:00 2001 From: zhenyus Date: Sun, 3 Aug 2025 04:04:19 +0800 Subject: [PATCH 34/41] fix: update deployment service to use correct product ID Changed the parameter name from deployment_project_id to deployment_product_id in the DevOpsReconcileRequest to ensure accurate referencing of the product associated with the deployment. Signed-off-by: zhenyus --- apps/devops/app/common/models/deployment/deployment.py | 2 +- apps/devops/app/routes/deployment/service.py | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/apps/devops/app/common/models/deployment/deployment.py b/apps/devops/app/common/models/deployment/deployment.py index 360d890..b2c719a 100644 --- a/apps/devops/app/common/models/deployment/deployment.py +++ b/apps/devops/app/common/models/deployment/deployment.py @@ -84,5 +84,5 @@ class DevOpsReconcileRequest(BaseModel): causes: str commit_sha256: Optional[str] = None target_env: Literal["alpha", "prod"] - ttl_controled: bool = False + ttl_control: bool = False ttl: int = 10800 \ No newline at end of file diff --git a/apps/devops/app/routes/deployment/service.py b/apps/devops/app/routes/deployment/service.py index 142ab3b..d2c2e0c 100644 --- a/apps/devops/app/routes/deployment/service.py +++ b/apps/devops/app/routes/deployment/service.py @@ -175,12 +175,12 @@ class DeploymentService: request = DevOpsReconcileRequest( operation=DevOpsReconcileOperationType.START, id=deployment.deployment_id, - devops_proj_id=deployment.deployment_project_id, + devops_proj_id=deployment.deployment_product_id, triggered_user_id=deployment.deployed_by, causes=deployment.deployment_reason, target_env=deployment.deployment_target_env, - ttl_controled=True, - ttl=deployment.deployment_ttl_hours, + ttl_control=True, + ttl=deployment.deployment_ttl_hours * 60, commit_sha256=deployment.deployment_git_sha256, ) # send request to reoncile service From 879e45eeb5a4fadce4b9fb61982f6d086106d0c5 Mon Sep 17 00:00:00 2001 From: zhenyus Date: Sun, 3 Aug 2025 12:22:24 +0800 Subject: [PATCH 35/41] fix: correct deployment TTL calculation in DeploymentService Updated the TTL calculation in the DeploymentService to multiply by 3600 instead of 60, ensuring accurate time-to-live settings for deployments. Signed-off-by: zhenyus --- apps/devops/app/routes/deployment/service.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/devops/app/routes/deployment/service.py b/apps/devops/app/routes/deployment/service.py index d2c2e0c..28769fe 100644 --- a/apps/devops/app/routes/deployment/service.py +++ b/apps/devops/app/routes/deployment/service.py @@ -180,7 +180,7 @@ class DeploymentService: causes=deployment.deployment_reason, target_env=deployment.deployment_target_env, ttl_control=True, - ttl=deployment.deployment_ttl_hours * 60, + ttl=deployment.deployment_ttl_hours * 60 * 60, commit_sha256=deployment.deployment_git_sha256, ) # send request to reoncile service From d680b9e015c8fe4c58c84eafc3cc404c130ca354 Mon Sep 17 00:00:00 2001 From: dongli Date: Sun, 3 Aug 2025 21:39:39 -0700 Subject: [PATCH 36/41] Fix check_deployment_status --- apps/devops/app/routes/deployment/apis.py | 4 ++-- apps/devops/app/routes/deployment/service.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/apps/devops/app/routes/deployment/apis.py b/apps/devops/app/routes/deployment/apis.py index a86c305..31d61d3 100644 --- a/apps/devops/app/routes/deployment/apis.py +++ b/apps/devops/app/routes/deployment/apis.py @@ -39,10 +39,10 @@ async def update_deployment( @router.get("/checkDeploymentStatus") async def check_deployment_status( - deployment_id: str, + product_id: str, service: DeploymentService = Depends(get_deployment_service) ) -> List[Deployment]: - return await service.check_deployment_status(deployment_id) + return await service.check_deployment_status(product_id) @router.post("/createDummyCodeDepot") async def create_dummy_code_depot( diff --git a/apps/devops/app/routes/deployment/service.py b/apps/devops/app/routes/deployment/service.py index 28769fe..650dd8d 100644 --- a/apps/devops/app/routes/deployment/service.py +++ b/apps/devops/app/routes/deployment/service.py @@ -101,7 +101,7 @@ class DeploymentService: for deployment in deployment_records: grouped[deployment.deployment_id].append(deployment) for deployment_list in grouped.values(): - deployment_list.sort(key=lambda d: (d.created_at, d.updated_at), reverse=True) + deployment_list.sort(key=lambda d: (d.created_at, d.updated_at)) latest_deployments = [deployments[-1] for deployments in grouped.values()] return latest_deployments From b5ca0cf0f55634e191efdcc51c3c9cf4b864700f Mon Sep 17 00:00:00 2001 From: Nicolas Date: Thu, 7 Aug 2025 18:29:59 +0800 Subject: [PATCH 37/41] fix: update devops service to subscribe to correct reconciler heartbeat queue --- apps/devops/app/providers/message_queue.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/devops/app/providers/message_queue.py b/apps/devops/app/providers/message_queue.py index 978e232..61e5c23 100644 --- a/apps/devops/app/providers/message_queue.py +++ b/apps/devops/app/providers/message_queue.py @@ -15,7 +15,7 @@ def register(app): try: # Initialize services during startup to avoid blocking app initialization print("🔧 Initializing services...") - app.deployment_heartbeat_subscriber = AsyncMQSubscriber("devops_reconcile_heartbeat") + app.deployment_heartbeat_subscriber = AsyncMQSubscriber("reconciler.output") app.deployment_status_service = DeploymentStatusUpdateService() print("✅ Services initialized") From e4fe9394b1a71ae0df946007c78cc29bdabf0e26 Mon Sep 17 00:00:00 2001 From: Nicolas Date: Fri, 8 Aug 2025 12:01:47 +0800 Subject: [PATCH 38/41] fix: use environment variable for RabbitMQ output queue name - Add RABBITMQ_OUTPUT_QUEUE_NAME environment variable support - Fix hardcoded queue name 'reconciler.output' to use configurable queue name - Default to 'freeleaps.devops.reconciler.output' if env var not set - Add debug logging to show which queue name is being used - This fixes the issue where 42 messages were stuck in the output queue due to queue name mismatch --- apps/devops/app/providers/message_queue.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/apps/devops/app/providers/message_queue.py b/apps/devops/app/providers/message_queue.py index 61e5c23..b2d87cb 100644 --- a/apps/devops/app/providers/message_queue.py +++ b/apps/devops/app/providers/message_queue.py @@ -1,4 +1,5 @@ import asyncio +import os from app.backend.infra.rabbitmq.async_subscriber import AsyncMQSubscriber from app.backend.services.deployment_status_update_service import DeploymentStatusUpdateService @@ -15,7 +16,9 @@ def register(app): try: # Initialize services during startup to avoid blocking app initialization print("🔧 Initializing services...") - app.deployment_heartbeat_subscriber = AsyncMQSubscriber("reconciler.output") + output_queue_name = os.getenv("RABBITMQ_OUTPUT_QUEUE_NAME", "freeleaps.devops.reconciler.output") + print(f"Using output queue: {output_queue_name}") + app.deployment_heartbeat_subscriber = AsyncMQSubscriber(output_queue_name) app.deployment_status_service = DeploymentStatusUpdateService() print("✅ Services initialized") From 4340949f5734f60eac280b0ef524e53039b5d7a3 Mon Sep 17 00:00:00 2001 From: Nicolas Date: Fri, 8 Aug 2025 12:33:14 +0800 Subject: [PATCH 39/41] fix: make startup and shutdown event handlers async in common provider - Fix TypeError: object NoneType can't be used in 'await' expression - FastAPI requires event handlers to be async functions - This was blocking the entire application startup sequence - Fixes the issue where message queue consumers were not starting properly --- apps/devops/app/providers/common.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/apps/devops/app/providers/common.py b/apps/devops/app/providers/common.py index 64a9a44..656bcc3 100644 --- a/apps/devops/app/providers/common.py +++ b/apps/devops/app/providers/common.py @@ -11,13 +11,13 @@ def register(app): # This hook ensures that a connection is opened to handle any queries # generated by the request. @app.on_event("startup") - def startup(): + async def startup(): pass # This hook ensures that the connection is closed when we've finished # processing the request. @app.on_event("shutdown") - def shutdown(): + async def shutdown(): pass From 04acd78d78359d791d6a7187a2eab4bf03cbea23 Mon Sep 17 00:00:00 2001 From: Nicolas Date: Fri, 8 Aug 2025 12:45:21 +0800 Subject: [PATCH 40/41] fix: connect to existing named queue instead of creating anonymous queue - Change AsyncMQClient to connect to existing persistent queue by name - Fix issue where DevOps Service created temporary anonymous queues instead of consuming from the correct named queue - This allows consuming the 42 backlogged messages in freeleaps.devops.reconciler.output queue - Change queue properties: exclusive=False, auto_delete=False, durable=True - Resolves the core issue where messages were split between persistent and temporary queues --- apps/devops/app/backend/infra/rabbitmq/async_client.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/apps/devops/app/backend/infra/rabbitmq/async_client.py b/apps/devops/app/backend/infra/rabbitmq/async_client.py index a48fc58..39e9f28 100644 --- a/apps/devops/app/backend/infra/rabbitmq/async_client.py +++ b/apps/devops/app/backend/infra/rabbitmq/async_client.py @@ -37,8 +37,10 @@ class AsyncMQClient: name=self.exchange_name, type="direct", auto_delete=False ) + # Connect to existing named queue instead of creating anonymous queue + # channel_name already contains the full queue name from environment variable self.queue = await self.channel.declare_queue( - name=None, exclusive=True, auto_delete=True, durable=False + name=self.channel_name, exclusive=False, auto_delete=False, durable=True ) await self.queue.bind( exchange=self.exchange, routing_key=self.routing_key From 8caa8e6b31c46e205b0179024e080e2aac81c162 Mon Sep 17 00:00:00 2001 From: dongli Date: Sun, 10 Aug 2025 18:21:39 -0700 Subject: [PATCH 41/41] Add dummy change to trigger ci/cd --- apps/devops/app/main.py | 1 + 1 file changed, 1 insertion(+) diff --git a/apps/devops/app/main.py b/apps/devops/app/main.py index 559d7ed..1bfc3d4 100644 --- a/apps/devops/app/main.py +++ b/apps/devops/app/main.py @@ -13,4 +13,5 @@ async def root(): if __name__ == "__main__": import uvicorn + print("Starting FastAPI server...") uvicorn.run("main:app", host=site_settings.SERVER_HOST, port=site_settings.SERVER_PORT, reload=True) \ No newline at end of file