diff --git a/apps/devops/README.md b/apps/devops/README.md new file mode 100644 index 0000000..2fb84b3 --- /dev/null +++ b/apps/devops/README.md @@ -0,0 +1,20 @@ +This is a template backend service based on fastapi + mongodb app + +To start development in local, go to the root directory of the project YOUR_WORKSPACE_PATH/helloworld/ +```bash +docker compose -f app/scripts/mongodb/docker-compose.yml up -d +``` + +Then run the app +```bash +uvicorn app.main:app --reload +``` + +In case a new dependency is added, run the following command to update the requirements.txt file +```bash +# optional: if you have not installed pipreqs +pip3 install pipreqs + +# generate requirements.txt +pipreqs . --force +``` diff --git a/apps/devops/app/__init__.py b/apps/devops/app/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/apps/devops/app/bootstrap/__init__.py b/apps/devops/app/bootstrap/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/apps/devops/app/bootstrap/application.py b/apps/devops/app/bootstrap/application.py new file mode 100644 index 0000000..24223b6 --- /dev/null +++ b/apps/devops/app/bootstrap/application.py @@ -0,0 +1,82 @@ +import logging +from fastapi import FastAPI +from fastapi.openapi.utils import get_openapi + +from app.providers import common +from app.providers.logger import register_logger +from app.providers import router +from app.providers import database +from app.providers import metrics +from app.providers import probes +from app.providers import exception_handler +from app.common.config.app_settings import app_settings + +def create_app() -> FastAPI: + logging.info("App initializing") + + app = FreeleapsApp() + + register_logger() + register(app, exception_handler) + register(app, database) + register(app, router) + # register(app, scheduler) + register(app, common) + + # Call the custom_openapi function to change the OpenAPI version + customize_openapi_security(app) + # Register probe APIs if enabled + if app_settings.PROBES_ENABLED: + register(app, probes) + + # Register metrics APIs if enabled + if app_settings.METRICS_ENABLED: + register(app, metrics) + return app + + +# This function overrides the OpenAPI schema version to 3.0.0 +def customize_openapi_security(app: FastAPI) -> None: + + def custom_openapi(): + if app.openapi_schema: + return app.openapi_schema + + # Generate OpenAPI schema + openapi_schema = get_openapi( + title="FreeLeaps API", + version="3.1.0", + description="FreeLeaps API Documentation", + routes=app.routes, + ) + + # Ensure the components section exists in the OpenAPI schema + if "components" not in openapi_schema: + openapi_schema["components"] = {} + + # Add security scheme to components + openapi_schema["components"]["securitySchemes"] = { + "bearerAuth": {"type": "http", "scheme": "bearer", "bearerFormat": "JWT"} + } + + # Add security requirement globally + openapi_schema["security"] = [{"bearerAuth": []}] + + app.openapi_schema = openapi_schema + return app.openapi_schema + + app.openapi = custom_openapi + + +def register(app, provider): + logging.info(provider.__name__ + " registering") + provider.register(app) + + +def boot(app, provider): + logging.info(provider.__name__ + " booting") + provider.boot(app) + +class FreeleapsApp(FastAPI): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) diff --git a/apps/devops/app/common/__init__.py b/apps/devops/app/common/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/apps/devops/app/common/config/__init__.py b/apps/devops/app/common/config/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/apps/devops/app/common/config/app_settings.py b/apps/devops/app/common/config/app_settings.py new file mode 100644 index 0000000..9a73bcb --- /dev/null +++ b/apps/devops/app/common/config/app_settings.py @@ -0,0 +1,29 @@ +from pydantic_settings import BaseSettings + +# NOTE: The values fall backs to your environment variables when not set here +class AppSettings(BaseSettings): + NAME: str = "YOUR_APP_NAME" + APP_NAME: str = NAME + APP_ENV: str = "alpha" + + JWT_SECRET_KEY: str = "" + ACCESS_TOKEN_EXPIRE_MINUTES: int = 3600 + REFRESH_TOKEN_EXPIRE_DAYS: int = 1 + + METRICS_ENABLED: bool = False + PROBES_ENABLED: bool = True + + APP_MONGODB_URI: str = "mongodb://localhost:27017" + APP_MONGODB_NAME: str = "testdb" + + LOG_BASE_PATH: str = "./log" + BACKEND_LOG_FILE_NAME: str = APP_NAME + APPLICATION_ACTIVITY_LOG: str = APP_NAME + "-application-activity" + + + class Config: + env_file = ".myapp.env" + env_file_encoding = "utf-8" + + +app_settings = AppSettings() diff --git a/apps/devops/app/common/config/log_settings.py b/apps/devops/app/common/config/log_settings.py new file mode 100644 index 0000000..2f6985c --- /dev/null +++ b/apps/devops/app/common/config/log_settings.py @@ -0,0 +1,16 @@ +import os +from dataclasses import dataclass +from .app_settings import app_settings + +@dataclass +class LogSettings: + LOG_PATH_BASE: str = app_settings.LOG_BASE_PATH + LOG_RETENTION: str = os.environ.get("LOG_RETENTION", "30 days") + LOG_ROTATION: str = os.environ.get("LOG_ROTATION", "00:00") # midnight + MAX_BACKUP_FILES: int = int(os.environ.get("LOG_BACKUP_FILES", 5)) + LOG_ROTATION_BYTES: int = int(os.environ.get("LOG_ROTATION_BYTES", 10 * 1024 * 1024)) # 10 MB + APP_NAME: str = app_settings.APP_NAME + ENVIRONMENT: str = app_settings.APP_ENV + + +log_settings = LogSettings() diff --git a/apps/devops/app/common/config/site_settings.py b/apps/devops/app/common/config/site_settings.py new file mode 100644 index 0000000..76e5af1 --- /dev/null +++ b/apps/devops/app/common/config/site_settings.py @@ -0,0 +1,27 @@ +import os + +from pydantic_settings import BaseSettings + + +# NOTE: The values fall backs to your environment variables when not set here +class SiteSettings(BaseSettings): + NAME: str = "appname" + DEBUG: bool = True + + ENV: str = "dev" + + SERVER_HOST: str = "localhost" + SERVER_PORT: int = 8000 + + URL: str = "http://localhost" + TIME_ZONE: str = "UTC" + + BASE_PATH: str = os.path.dirname(os.path.dirname((os.path.abspath(__file__)))) + + class Config: + env_file = ".devbase-webapi.env" + env_file_encoding = "utf-8" + + +site_settings = SiteSettings() + diff --git a/apps/devops/app/common/daos/__init__.py b/apps/devops/app/common/daos/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/apps/devops/app/common/daos/deployment/__init__.py b/apps/devops/app/common/daos/deployment/__init__.py new file mode 100644 index 0000000..4ee4c85 --- /dev/null +++ b/apps/devops/app/common/daos/deployment/__init__.py @@ -0,0 +1,6 @@ +from app.common.daos.deployment.deployment_dao import DeploymentDao + +deployment_dao = DeploymentDao() + +def get_hello_world_dao() -> DeploymentDao: + return deployment_dao \ No newline at end of file diff --git a/apps/devops/app/common/daos/deployment/deployment_dao.py b/apps/devops/app/common/daos/deployment/deployment_dao.py new file mode 100644 index 0000000..f34565a --- /dev/null +++ b/apps/devops/app/common/daos/deployment/deployment_dao.py @@ -0,0 +1,45 @@ +from app.common.models.deployment.deployment import Deployment + + +class DeploymentDao(): + def __init__(self): + pass + + async def create_deployment(self, deployment_data: Deployment): + # Logic to create a new deployment + Deployment.insert(deployment_data) + + async def get_deployments_by_deployment_id(self, deployment_id: str): + # Logic to get a deployment by ID + pass + + async def get_deployments_by_project_id(self, project_id: str): + # Logic to get deployments by project ID + pass + + async def get_deployments_by_product_id(self, project_id: str): + # Logic to get deployments by project ID + pass + + async def get_latest_deployment_by_project_id(self, project_id: str): + # Logic to get the latest deployment by project ID + pass + + + + async def get_deployments_by_user_id(self, user_id: str): + # Logic to get deployments by user ID + pass + + + + async def update_deployment(self, deployment_id: str, deployment_data: dict): + # Logic to update a deployment + pass + + async def delete_deployment(self, deployment_id: str): + # Logic to delete a deployment + pass + + + diff --git a/apps/devops/app/common/daos/hello_world/__init__.py b/apps/devops/app/common/daos/hello_world/__init__.py new file mode 100644 index 0000000..463a9cb --- /dev/null +++ b/apps/devops/app/common/daos/hello_world/__init__.py @@ -0,0 +1,6 @@ +from app.common.daos.hello_world.hello_world_dao import HelloWorldDao + +hello_world_dao = HelloWorldDao() + +def get_hello_world_dao() -> HelloWorldDao: + return hello_world_dao diff --git a/apps/devops/app/common/daos/hello_world/hello_world_dao.py b/apps/devops/app/common/daos/hello_world/hello_world_dao.py new file mode 100644 index 0000000..3b3a112 --- /dev/null +++ b/apps/devops/app/common/daos/hello_world/hello_world_dao.py @@ -0,0 +1,30 @@ +from app.common.models.hello_world.hello_world import HelloWorld + +class HelloWorldDao: + def __init__(self): + pass + + async def create_hello_world(self, message: str, count: int): + hello_world = HelloWorld(message=message, count=count) + await hello_world.insert() + return hello_world + + async def get_hello_world(self, id: str): + hello_world = await HelloWorld.get(id) + return hello_world + + async def update_hello_world(self, id: str, message: str, count: int): + hello_world = await HelloWorld.get(id) + if hello_world: + hello_world.message = message + hello_world.count = count + await hello_world.save() + return hello_world + return None + + async def delete_hello_world(self, id: str): + hello_world = await HelloWorld.get(id) + if hello_world: + await hello_world.delete() + return True + return False \ No newline at end of file diff --git a/apps/devops/app/common/log/__init__.py b/apps/devops/app/common/log/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/apps/devops/app/common/log/application_logger.py b/apps/devops/app/common/log/application_logger.py new file mode 100644 index 0000000..896c044 --- /dev/null +++ b/apps/devops/app/common/log/application_logger.py @@ -0,0 +1,12 @@ +from .base_logger import LoggerBase +from app.common.config.app_settings import app_settings + +class ApplicationLogger(LoggerBase): + def __init__(self, application_activities: dict[str, any] = {}) -> None: + extra_fileds = {} + if application_activities: + extra_fileds.update(application_activities) + super().__init__( + logger_name=app_settings.APPLICATION_ACTIVITY_LOG, + extra_fileds=extra_fileds, + ) diff --git a/apps/devops/app/common/log/base_logger.py b/apps/devops/app/common/log/base_logger.py new file mode 100644 index 0000000..a370296 --- /dev/null +++ b/apps/devops/app/common/log/base_logger.py @@ -0,0 +1,136 @@ +from loguru import logger as guru_logger +from app.common.config.log_settings import log_settings +from typing import Dict, Any, Optional +import socket +import json +import threading +import os +import sys +import inspect +import logging + +from app.common.log.json_sink import JsonSink + +class LoggerBase: + binded_loggers = {} + logger_lock = threading.Lock() + + def __init__(self, logger_name: str, extra_fileds: dict[str, any]) -> None: + self.__logger_name = logger_name + self.extra_fileds = extra_fileds + with LoggerBase.logger_lock: + if self.__logger_name in LoggerBase.binded_loggers: + self.logger = LoggerBase.binded_loggers[self.__logger_name] + return + + log_filename = f"{log_settings.LOG_PATH_BASE}/{self.__logger_name}.log" + log_level = "INFO" + rotation_bytes = int(log_settings.LOG_ROTATION_BYTES or 10 * 1024 * 1024) + + guru_logger.remove() + + file_sink = JsonSink( + log_file_path=log_filename, + rotation_size_bytes=rotation_bytes, + max_backup_files=log_settings.MAX_BACKUP_FILES + ) + guru_logger.add( + sink=file_sink, + level=log_level, + filter=lambda record: record["extra"].get("topic") == self.__logger_name, + ) + + guru_logger.add( + sink=sys.stderr, + level=log_level, + format="{level} - {time:YYYY-MM-DD HH:mm:ss} - <{extra[log_file]}:{extra[log_line]}> - {extra[properties_str]} - {message}", + filter=lambda record: record["extra"].get("topic") == self.__logger_name, + ) + + host_name = socket.gethostname() + host_ip = socket.gethostbyname(host_name) + self.logger = guru_logger.bind( + topic=self.__logger_name, + host_ip=host_ip, + host_name=host_name, + app=log_settings.APP_NAME, + env=log_settings.ENVIRONMENT, + ) + with LoggerBase.logger_lock: + LoggerBase.binded_loggers[self.__logger_name] = self.logger + + def _get_log_context(self) -> dict: + frame = inspect.currentframe().f_back.f_back + filename = os.path.basename(frame.f_code.co_filename) + lineno = frame.f_lineno + return {"log_file": filename, "log_line": lineno} + + def _prepare_properties(self, properties: Optional[Dict[str, Any]]) -> Dict[str, Any]: + props = {} if properties is None else properties.copy() + props_str = json.dumps(props, ensure_ascii=False) if props else "{}" + return props, props_str + + async def log_event(self, sender_id: str, receiver_id: str, subject: str, event: str, properties: dict[str, any], text: str = "") -> None: + props, props_str = self._prepare_properties(properties) + context = self._get_log_context() + local_logger = self.logger.bind(sender_id=sender_id, receiver_id=receiver_id, subject=subject, event=event, properties=props, properties_str=props_str, **context) + local_logger.info(text) + + async def log_exception(self, sender_id: str, receiver_id: str, subject: str, exception: Exception, text: str = "", properties: dict[str, any] = None) -> None: + props, props_str = self._prepare_properties(properties) + context = self._get_log_context() + local_logger = self.logger.bind(sender_id=sender_id, receiver_id=receiver_id, subject=subject, event="exception", properties=props, properties_str=props_str, exception=exception, **context) + local_logger.exception(text) + + async def log_info(self, sender_id: str, receiver_id: str, subject: str, text: str = "", properties: dict[str, any] = None) -> None: + props, props_str = self._prepare_properties(properties) + context = self._get_log_context() + local_logger = self.logger.bind(sender_id=sender_id, receiver_id=receiver_id, subject=subject, event="information", properties=props, properties_str=props_str, **context) + local_logger.info(text) + + async def log_warning(self, sender_id: str, receiver_id: str, subject: str, text: str = "", properties: dict[str, any] = None) -> None: + props, props_str = self._prepare_properties(properties) + context = self._get_log_context() + local_logger = self.logger.bind(sender_id=sender_id, receiver_id=receiver_id, subject=subject, event="warning", properties=props, properties_str=props_str, **context) + local_logger.warning(text) + + async def log_error(self, sender_id: str, receiver_id: str, subject: str, text: str = "", properties: dict[str, any] = None) -> None: + props, props_str = self._prepare_properties(properties) + context = self._get_log_context() + local_logger = self.logger.bind(sender_id=sender_id, receiver_id=receiver_id, subject=subject, event="error", properties=props, properties_str=props_str, **context) + local_logger.error(text) + + @staticmethod + def configure_uvicorn_logging(): + print("📢 Setting up uvicorn logging interception...") + + # Intercept logs from these loggers + intercept_loggers = ["uvicorn", "uvicorn.access", "uvicorn.error", "fastapi"] + + class InterceptHandler(logging.Handler): + def emit(self, record): + level = ( + guru_logger.level(record.levelname).name + if guru_logger.level(record.levelname, None) + else record.levelno + ) + frame, depth = logging.currentframe(), 2 + while frame.f_code.co_filename == logging.__file__: + frame = frame.f_back + depth += 1 + + guru_logger.opt(depth=depth, exception=record.exc_info).log( + level, + f"[{record.name}] {record.getMessage()}", + ) + + # Replace default handlers + logging.root.handlers.clear() + logging.root.setLevel(logging.INFO) + logging.root.handlers = [InterceptHandler()] + + # Configure specific uvicorn loggers + for logger_name in intercept_loggers: + logging_logger = logging.getLogger(logger_name) + logging_logger.handlers.clear() # Remove default handlers + logging_logger.propagate = True # Ensure propagation through Loguru diff --git a/apps/devops/app/common/log/json_sink.py b/apps/devops/app/common/log/json_sink.py new file mode 100644 index 0000000..a798156 --- /dev/null +++ b/apps/devops/app/common/log/json_sink.py @@ -0,0 +1,85 @@ +import json +import datetime +import traceback +from pathlib import Path +from typing import Optional + +class JsonSink: + def __init__( + self, + log_file_path: str, + rotation_size_bytes: int = 10 * 1024 * 1024, + max_backup_files: int = 5, + ): + self.log_file_path = Path(log_file_path) + self.rotation_size = rotation_size_bytes + self.max_backup_files = max_backup_files + self._open_log_file() + + def _open_log_file(self): + # ensure the parent directory exists + parent_dir = self.log_file_path.parent + if not parent_dir.exists(): + parent_dir.mkdir(parents=True, exist_ok=True) + self.log_file = self.log_file_path.open("a", encoding="utf-8") + + def _should_rotate(self) -> bool: + return self.log_file_path.exists() and self.log_file_path.stat().st_size >= self.rotation_size + + def _rotate(self): + self.log_file.close() + timestamp = datetime.datetime.now().strftime("%Y%m%d%H%M%S") + rotated_path = self.log_file_path.with_name(f"{self.log_file_path.stem}_{timestamp}{self.log_file_path.suffix}") + self.log_file_path.rename(rotated_path) + self._cleanup_old_backups() + self._open_log_file() + + def _cleanup_old_backups(self): + parent = self.log_file_path.parent + stem = self.log_file_path.stem + suffix = self.log_file_path.suffix + + backup_files = sorted( + parent.glob(f"{stem}_*{suffix}"), + key=lambda p: p.stat().st_mtime, + reverse=True, + ) + + for old_file in backup_files[self.max_backup_files:]: + try: + old_file.unlink() + except Exception as e: + print(f"Failed to delete old backup {old_file}: {e}") + + def __call__(self, message): + record = message.record + if self._should_rotate(): + self._rotate() + + log_entry = { + "level": record["level"].name.lower(), + "timestamp": int(record["time"].timestamp() * 1000), + "text": record["message"], + "fields": record["extra"].get("properties", {}), + "context": { + "app": record["extra"].get("app"), + "env": record["extra"].get("env"), + "log_file": record["extra"].get("log_file"), + "log_line": record["extra"].get("log_line"), + "topic": record["extra"].get("topic"), + "sender_id": record["extra"].get("sender_id"), + "receiver_id": record["extra"].get("receiver_id"), + "subject": record["extra"].get("subject"), + "event": record["extra"].get("event"), + "host_ip": record["extra"].get("host_ip"), + "host_name": record["extra"].get("host_name"), + }, + "stacktrace": None + } + + if record["exception"]: + exc_type, exc_value, exc_tb = record["exception"] + log_entry["stacktrace"] = traceback.format_exception(exc_type, exc_value, exc_tb) + + self.log_file.write(json.dumps(log_entry, ensure_ascii=False) + "\n") + self.log_file.flush() diff --git a/apps/devops/app/common/models/__init__.py b/apps/devops/app/common/models/__init__.py new file mode 100644 index 0000000..35ed321 --- /dev/null +++ b/apps/devops/app/common/models/__init__.py @@ -0,0 +1,5 @@ +from app.common.models.hello_world.hello_world import HelloWorld +from app.common.models.deployment.deployment import Deployment + +# list of beanie document models +db_models = [HelloWorld, Deployment] \ No newline at end of file diff --git a/apps/devops/app/common/models/deployment/__init__.py b/apps/devops/app/common/models/deployment/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/apps/devops/app/common/models/deployment/deployment.py b/apps/devops/app/common/models/deployment/deployment.py new file mode 100644 index 0000000..7abff26 --- /dev/null +++ b/apps/devops/app/common/models/deployment/deployment.py @@ -0,0 +1,52 @@ +from datetime import datetime +from typing import Literal + +from beanie import Document +from pydantic import Field +from pydantic import BaseModel + + +class Deployment(Document): + deployment_id: str = Field(alias="_id") + deployment_stage: str + deployment_status: Literal["started", "failed", "succeeded", "aborted"] + + deployment_target_env: Literal["alpha", "prod"] + deployment_ttl_hours: int = 2 + + deployment_project_id: str + deployment_project_name: str + deployment_product_id: str + deployment_product_name: str + deployment_git_url: str + deployment_git_sha256: str + deployment_reason: str + + deployed_by: str + created_at: datetime = datetime.now() + updated_at: datetime = datetime.now() + + class Settings: + name = "deployment" + indexes = [ + [("deployment_product_id", 1), ("created_at", 1)], # Compound index + [("deployment_id", 1), ("deployment_status", 1)], # Compound index + {"keys": [("deployment_id", 1), ("deployment_stage", 1)], "unique": True} # Unique compound index + ] + +class InitDeploymentRequest(BaseModel): + product_id: str + sha256: str + target_env: str + user_id: str + reason: str = "not provided" + ttl_hours: int = 3 + +class CheckDeploymentStatusRequest(BaseModel): + product_id: str + target_env: str + user_id: str + + + + diff --git a/apps/devops/app/common/models/hello_world/__init__.py b/apps/devops/app/common/models/hello_world/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/apps/devops/app/common/models/hello_world/hello_world.py b/apps/devops/app/common/models/hello_world/hello_world.py new file mode 100644 index 0000000..55000c7 --- /dev/null +++ b/apps/devops/app/common/models/hello_world/hello_world.py @@ -0,0 +1,17 @@ +from datetime import datetime + +from beanie import Document + + +class HelloWorld(Document): + message: str + count: int = 0 + created_time: datetime = datetime.now() + + class Settings: + name = "hello_world" + indexes = [ + [("message", 1), ("count", 1)] + ] + + diff --git a/apps/devops/app/common/probes/__init__.py b/apps/devops/app/common/probes/__init__.py new file mode 100644 index 0000000..4071df8 --- /dev/null +++ b/apps/devops/app/common/probes/__init__.py @@ -0,0 +1,140 @@ +import logging +from enum import Enum +from typing import Optional, Callable, Tuple, Dict +import inspect +from datetime import datetime, timezone + +# ProbeType is an Enum that defines the types of probes that can be registered. +class ProbeType(Enum): + LIVENESS = "liveness" + READINESS = "readiness" + STARTUP = "startup" + +# ProbeResult is a class that represents the result of a probe check. +class ProbeResult: + def __init__(self, success: bool, message: str = "ok", data: Optional[dict] = None): + self.success = success + self.message = message + self.data = data or {} + + def to_dict(self) -> dict: + return { + "success": self.success, + "message": self.message, + "data": self.data + } + +# Probe is a class that represents a probe that can be registered. +class Probe: + def __init__(self, type: ProbeType, path: str, check_fn: Callable, name: Optional[str] = None): + self.type = type + self.path = path + self.check_fn = check_fn + self.name = name or f"{type.value}-{id(self)}" + + async def execute(self) -> ProbeResult: + try: + result = self.check_fn() + if inspect.isawaitable(result): + result = await result + + if isinstance(result, ProbeResult): + return result + elif isinstance(result, bool): + return ProbeResult(result, "ok" if result else "failed") + else: + return ProbeResult(True, "ok") + except Exception as e: + return ProbeResult(False, str(e)) + +# ProbeGroup is a class that represents a group of probes that can be checked together. +class ProbeGroup: + def __init__(self, path: str): + self.path = path + self.probes: Dict[str, Probe] = {} + + def add_probe(self, probe: Probe): + self.probes[probe.name] = probe + + async def check_all(self) -> Tuple[bool, dict]: + results = {} + all_success = True + + for name, probe in self.probes.items(): + result = await probe.execute() + results[name] = result.to_dict() + if not result.success: + all_success = False + + return all_success, results + +# FrameworkAdapter is an abstract class that defines the interface for framework-specific probe adapters. +class FrameworkAdapter: + async def handle_request(self, group: ProbeGroup): + all_success, results = await group.check_all() + status_code = 200 if all_success else 503 + return {"status": "ok" if all_success else "failed", "payload": results, "timestamp": int(datetime.now(timezone.utc).timestamp())}, status_code + + def register_route(self, path: str, handler: Callable): + raise NotImplementedError + +# ProbeManager is a class that manages the registration of probes and their corresponding framework adapters. +class ProbeManager: + _default_paths = { + ProbeType.LIVENESS: "/_/livez", + ProbeType.READINESS: "/_/readyz", + ProbeType.STARTUP: "/_/healthz" + } + + def __init__(self): + self.groups: Dict[str, ProbeGroup] = {} + self.adapters: Dict[str, FrameworkAdapter] = {} + self._startup_complete = False + + def register_adapter(self, framework: str, adapter: FrameworkAdapter): + self.adapters[framework] = adapter + logging.info(f"Registered probe adapter ({adapter}) for framework: {framework}") + + def register( + self, + type: ProbeType, + check_func: Optional[Callable] = None, + path: Optional[str] = None, + prefix: str = "", + name: Optional[str] = None, + frameworks: Optional[list] = None + ): + path = path or self._default_paths.get(type, "/_/healthz") + if prefix: + path = f"{prefix}{path}" + + if type == ProbeType.STARTUP and check_func is None: + check_func = self._default_startup_check + + probe = Probe(type, path, check_func or (lambda: True), name) + + if path not in self.groups: + self.groups[path] = ProbeGroup(path) + self.groups[path].add_probe(probe) + + for framework in (frameworks or ["default"]): + self._register_route(framework, path) + logging.info(f"Registered {type.value} probe route ({path}) for framework: {framework}") + + def _register_route(self, framework: str, path: str): + if framework not in self.adapters: + return + + adapter = self.adapters[framework] + group = self.groups[path] + + async def handler(): + return await adapter.handle_request(group) + + adapter.register_route(path, handler) + + def _default_startup_check(self) -> bool: + return self._startup_complete + + def mark_startup_complete(self): + self._startup_complete = True \ No newline at end of file diff --git a/apps/devops/app/common/probes/adapters.py b/apps/devops/app/common/probes/adapters.py new file mode 100644 index 0000000..2ecd38a --- /dev/null +++ b/apps/devops/app/common/probes/adapters.py @@ -0,0 +1,15 @@ +from . import FrameworkAdapter +from fastapi.responses import JSONResponse +from typing import Callable + +# FastAPIAdapter is a class that implements the FrameworkAdapter interface for FastAPI. +class FastAPIAdapter(FrameworkAdapter): + def __init__(self, app): + self.app = app + + def register_route(self,path: str, handler: Callable): + async def wrapper(): + data, status_code = await handler() + return JSONResponse(content=data, status_code=status_code) + + self.app.add_api_route(path, wrapper, methods=["GET"]) diff --git a/apps/devops/app/envs/alpha.yml b/apps/devops/app/envs/alpha.yml new file mode 100644 index 0000000..e69de29 diff --git a/apps/devops/app/envs/prod.yml b/apps/devops/app/envs/prod.yml new file mode 100644 index 0000000..e69de29 diff --git a/apps/devops/app/main.py b/apps/devops/app/main.py new file mode 100644 index 0000000..559d7ed --- /dev/null +++ b/apps/devops/app/main.py @@ -0,0 +1,16 @@ +from fastapi.responses import RedirectResponse +from app.common.config.site_settings import site_settings +from app.bootstrap.application import create_app + +app = create_app() + +@app.get("/", status_code=301) +async def root(): + """ + TODO: redirect client to /doc# + """ + return RedirectResponse("docs") + +if __name__ == "__main__": + import uvicorn + uvicorn.run("main:app", host=site_settings.SERVER_HOST, port=site_settings.SERVER_PORT, reload=True) \ No newline at end of file diff --git a/apps/devops/app/providers/__init__.py b/apps/devops/app/providers/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/apps/devops/app/providers/common.py b/apps/devops/app/providers/common.py new file mode 100644 index 0000000..64a9a44 --- /dev/null +++ b/apps/devops/app/providers/common.py @@ -0,0 +1,31 @@ +from fastapi.middleware.cors import CORSMiddleware +from app.common.config.site_settings import site_settings + + +def register(app): + app.debug = site_settings.DEBUG + app.title = site_settings.NAME + + add_global_middleware(app) + + # This hook ensures that a connection is opened to handle any queries + # generated by the request. + @app.on_event("startup") + def startup(): + pass + + # This hook ensures that the connection is closed when we've finished + # processing the request. + @app.on_event("shutdown") + def shutdown(): + pass + + +def add_global_middleware(app): + app.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], + ) diff --git a/apps/devops/app/providers/database.py b/apps/devops/app/providers/database.py new file mode 100644 index 0000000..8716b8e --- /dev/null +++ b/apps/devops/app/providers/database.py @@ -0,0 +1,34 @@ +import asyncio +from app.common.config.app_settings import app_settings +from beanie import init_beanie +from motor.motor_asyncio import AsyncIOMotorClient +from app.common.models import db_models +from app.common.probes import ProbeResult + +client = AsyncIOMotorClient( + app_settings.APP_MONGODB_URI, + serverSelectionTimeoutMS=60000, + minPoolSize=5, # Minimum number of connections in the pool + maxPoolSize=20, # Maximum number of connections in the pool +) + +def register(app): + app.debug = "auth_mongo_debug" + app.title = "auth_mongo_name" + + @app.on_event("startup") + async def start_database(): + await initiate_database() + +async def check_database_initialized() -> ProbeResult: + try: + await asyncio.wait_for(client.server_info(), timeout=5) + return ProbeResult(success=True, message="service has been initialized and ready to serve") + except Exception: + return ProbeResult(success=False, message="service is not initialized yet", data={"error": "database is not ready"}) + + +async def initiate_database(): + await init_beanie( + database=client[app_settings.APP_MONGODB_NAME], document_models=db_models + ) diff --git a/apps/devops/app/providers/exception_handler.py b/apps/devops/app/providers/exception_handler.py new file mode 100644 index 0000000..21117a5 --- /dev/null +++ b/apps/devops/app/providers/exception_handler.py @@ -0,0 +1,39 @@ +from fastapi import FastAPI, HTTPException +from fastapi.exceptions import RequestValidationError +from starlette.requests import Request +from starlette.responses import JSONResponse +from starlette.status import ( + HTTP_400_BAD_REQUEST, + HTTP_401_UNAUTHORIZED, + HTTP_403_FORBIDDEN, + HTTP_404_NOT_FOUND, + HTTP_422_UNPROCESSABLE_ENTITY, + HTTP_500_INTERNAL_SERVER_ERROR, +) + + +async def custom_http_exception_handler(request: Request, exc: HTTPException): + return JSONResponse( + status_code=exc.status_code, + content={"error": exc.detail}, + ) + + + +async def validation_exception_handler(request: Request, exc: RequestValidationError): + return JSONResponse( + status_code=HTTP_400_BAD_REQUEST, + content={"error": str(exc)}, + ) + +async def exception_handler(request: Request, exc: Exception): + return JSONResponse( + status_code=HTTP_500_INTERNAL_SERVER_ERROR, + content={"error": str(exc)}, + ) + + +def register(app: FastAPI): + app.add_exception_handler(HTTPException, custom_http_exception_handler) + app.add_exception_handler(RequestValidationError, validation_exception_handler) + app.add_exception_handler(Exception, exception_handler) diff --git a/apps/devops/app/providers/logger.py b/apps/devops/app/providers/logger.py new file mode 100644 index 0000000..2785603 --- /dev/null +++ b/apps/devops/app/providers/logger.py @@ -0,0 +1,7 @@ +from app.common.log.base_logger import LoggerBase + + +def register_logger(): + print("📢 Setting up logging interception...") + LoggerBase.configure_uvicorn_logging() + print("✅ Logging interception complete. Logs are formatted and deduplicated!") diff --git a/apps/devops/app/providers/metrics.py b/apps/devops/app/providers/metrics.py new file mode 100644 index 0000000..1ae941a --- /dev/null +++ b/apps/devops/app/providers/metrics.py @@ -0,0 +1,16 @@ +import logging +from prometheus_fastapi_instrumentator import Instrumentator +from app.common.config.app_settings import app_settings + +def register(app): + instrumentator = ( + Instrumentator().instrument( + app, + metric_namespace="freeleaps", + metric_subsystem=app_settings.APP_NAME) + ) + + @app.on_event("startup") + async def startup(): + instrumentator.expose(app, endpoint="/api/_/metrics", should_gzip=True) + logging.info("Metrics endpoint exposed at /api/_/metrics") \ No newline at end of file diff --git a/apps/devops/app/providers/probes.py b/apps/devops/app/providers/probes.py new file mode 100644 index 0000000..883e3d6 --- /dev/null +++ b/apps/devops/app/providers/probes.py @@ -0,0 +1,25 @@ +from app.common.probes import ProbeManager, ProbeType +from app.common.probes.adapters import FastAPIAdapter +from .database import check_database_initialized + +def register(app): + probes_manager = ProbeManager() + probes_manager.register_adapter("fastapi", FastAPIAdapter(app)) + + async def readiness_checker(): + return await check_database_initialized() + + probes_manager.register( + name="readiness", + prefix="/api", + type=ProbeType.READINESS, + check_func=readiness_checker, + frameworks=["fastapi"] + ) + + probes_manager.register(name="liveness", prefix="/api", type=ProbeType.LIVENESS, frameworks=["fastapi"]) + probes_manager.register(name="startup", prefix="/api", type=ProbeType.STARTUP, frameworks=["fastapi"]) + + @app.on_event("startup") + async def mark_startup_complete(): + probes_manager.mark_startup_complete() \ No newline at end of file diff --git a/apps/devops/app/providers/router.py b/apps/devops/app/providers/router.py new file mode 100644 index 0000000..b273eb8 --- /dev/null +++ b/apps/devops/app/providers/router.py @@ -0,0 +1,34 @@ +from app.routes import api_router + +from starlette import routing + + +def register(app): + app.include_router( + api_router, + prefix="/api", + tags=["api"], + dependencies=[], + responses={404: {"description": "no page found"}}, + ) + + if app.debug: + for route in app.routes: + if not isinstance(route, routing.WebSocketRoute): + print( + { + "path": route.path, + "endpoint": route.endpoint, + "name": route.name, + "methods": route.methods, + } + ) + else: + print( + { + "path": route.path, + "endpoint": route.endpoint, + "name": route.name, + "type": "web socket route", + } + ) diff --git a/apps/devops/app/providers/scheduler.py b/apps/devops/app/providers/scheduler.py new file mode 100644 index 0000000..7ea8d6c --- /dev/null +++ b/apps/devops/app/providers/scheduler.py @@ -0,0 +1,8 @@ +import asyncio + + +def register(app): + @app.on_event("startup") + async def start_scheduler(): + #create your scheduler here + pass diff --git a/apps/devops/app/routes/__init__.py b/apps/devops/app/routes/__init__.py new file mode 100644 index 0000000..ab02a04 --- /dev/null +++ b/apps/devops/app/routes/__init__.py @@ -0,0 +1,7 @@ +from fastapi import APIRouter +from app.routes.deployment.apis import router as deployment_api + +api_router = APIRouter() + +# TODO: add custom routers here +api_router.include_router(deployment_api, tags=["deployment"]) diff --git a/apps/devops/app/routes/deployment/__init__.py b/apps/devops/app/routes/deployment/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/apps/devops/app/routes/deployment/apis.py b/apps/devops/app/routes/deployment/apis.py new file mode 100644 index 0000000..50eff75 --- /dev/null +++ b/apps/devops/app/routes/deployment/apis.py @@ -0,0 +1,34 @@ +from datetime import datetime +from typing import List + +from fastapi import APIRouter, Depends +from loguru import logger + +from app.common.daos.hello_world import get_hello_world_dao, HelloWorldDao +from app.common.models.deployment.deployment import Deployment, InitDeploymentRequest +from app.routes.deployment.service import DeploymentService, get_deployment_service + +router = APIRouter(prefix="/deployment") + +@router.post("/initDeployment") +## insert a new Deployment object to db +async def init_deployment( + request: InitDeploymentRequest, + service: DeploymentService = Depends(get_deployment_service) +) -> Deployment: + return await service.init_deployment(request) + +@router.post("/updateDeploymentStatus") +async def update_deployment( + request: Deployment, + service: DeploymentService = Depends(get_deployment_service) +) -> bool: + return await service.update_deployment_status(request) + +@router.get("/checkDeploymentStatus") +async def check_deployment_status( + deployment_id: str, + service: DeploymentService = Depends(get_deployment_service) +) -> List[Deployment]: + return await service.check_deployment_status(deployment_id) + diff --git a/apps/devops/app/routes/deployment/service.py b/apps/devops/app/routes/deployment/service.py new file mode 100644 index 0000000..8a74d76 --- /dev/null +++ b/apps/devops/app/routes/deployment/service.py @@ -0,0 +1,157 @@ +from collections import defaultdict +from datetime import datetime, timedelta +from typing import List + +from fastapi import HTTPException + +from app.common.models import Deployment +from app.common.models.deployment.deployment import InitDeploymentRequest + + +class DeploymentService: + + def __init__(self): + pass + + async def init_deployment( + self, + request: InitDeploymentRequest + ) -> Deployment: + """ + """ + # TODO validate permission with user_id + # currently skip + + git_url = await self._retrieve_git_url_by_product_id(request.product_id) + + product_initialized = await self._check_if_project_initialized(git_url, request.product_id) + if not product_initialized: + await self._init_product(git_url, request.product_id) + + # retrieve project name + project_name = "TODO" + + # retrieve product info + product_id = "TODO" + product_name = "TODO" + + + + deployment = Deployment.model_construct( + deployment_stage = "init", + deployment_status = "started", + deployment_target_env = request.target_env, + deployment_ttl_hours = request.ttl_hours, + deployment_project_id = "project_id", + deployment_project_name = "project_name", + deployment_product_id = product_id, + deployment_product_name = product_name, + deployment_git_url = git_url, + deployment_git_sha256 = request.sha256, + deployment_reason = request.reason, + deployed_by = request.user_id, + created_at = datetime.now(), + updated_at = datetime.now(), + ) + + await self._start_deployment(deployment) + + res = await deployment.insert() + return res + + async def check_deployment_status( + self, + product_id: str, + ) -> List[Deployment]: + """ + Check the deployment status of the application, only check past 48 hours + """ + # TODO implement this function + time_threshold = datetime.now() - timedelta(hours=48) + deployment_records = await Deployment.find( + Deployment.deployment_product_id == product_id, + Deployment.created_at >= time_threshold + ).to_list() + grouped = defaultdict(list) + for deployment in deployment_records: + grouped[deployment.deployment_product_id].append(deployment) + for deployment_list in grouped.values(): + deployment_list.sort(key=lambda d: (d.created_at, d.updated_at), reverse=True) + + latest_deployments = [deployments[-1] for deployments in grouped.values()] + return latest_deployments + + async def update_deployment_status( + self, + deployment: Deployment + ) -> bool: + latest_record = await Deployment.find_one( + Deployment.deployment_id == deployment.deployment_id, + sort=[("created_at", -1)] + ) + + if not latest_record: + raise HTTPException(status_code=404, detail="No record found, please initiate deployment first") + + # TODO add more sanity check logic here + + if deployment.deployment_stage == latest_record.deployment_status: + # update existing record + latest_record.deployment_status = deployment.deployment_status + latest_record.updated_at = deployment.updated_at or datetime.now() + await latest_record.save() + else: + # create new record + deployment.deployment_id = latest_record.deployment_id + deployment.created_at = latest_record.created_at + deployment.updated_at = datetime.now() + await deployment.insert() + + return True + + async def _retrieve_git_url_by_product_id( + self, + product_id: str + ) -> str: + """ + Retrieve git url by product id + """ + # TODO implement this function + pass + + async def _check_if_project_initialized( + self, + git_url: str, + product_id: str + ) -> bool: + """ + Check if the project has been initialized + """ + # TODO implement this function + pass + + async def _init_product( + self, + git_url: str, + product_id: str + ) -> bool: + """ + Initialize the product + """ + # TODO implement this function + pass + + async def _start_deployment( + self, + deployment: Deployment + ) -> bool: + """ + Start the deployment + """ + # TODO implement this function + pass + +deployment_service = DeploymentService() + +def get_deployment_service() -> DeploymentService: + return deployment_service \ No newline at end of file diff --git a/apps/devops/app/scripts/mongodb/docker-compose.yml b/apps/devops/app/scripts/mongodb/docker-compose.yml new file mode 100644 index 0000000..8ab07c7 --- /dev/null +++ b/apps/devops/app/scripts/mongodb/docker-compose.yml @@ -0,0 +1,18 @@ +version: '3.8' + +services: + mongodb: + image: mongo:6.0 # You can change to the desired version + container_name: mongodb + restart: unless-stopped + ports: + - "27017:27017" + environment: + MONGO_INITDB_DATABASE: testdb # <-- This creates the initial database + volumes: + - mongodb_data:/data/db + command: ["mongod", "--noauth"] # <-- Disable authentication + + +volumes: + mongodb_data: \ No newline at end of file diff --git a/apps/devops/requirements.txt b/apps/devops/requirements.txt new file mode 100644 index 0000000..056543d --- /dev/null +++ b/apps/devops/requirements.txt @@ -0,0 +1,10 @@ +beanie==1.29.0 +fastapi==0.115.12 +loguru==0.7.3 +motor==3.7.0 +prometheus_fastapi_instrumentator==7.1.0 +pydantic_settings==2.9.1 +pytest==7.1.2 +starlette==0.46.2 +uvicorn==0.34.2 +httpx==0.24.0 \ No newline at end of file diff --git a/apps/devops/tests/__init__.py b/apps/devops/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/apps/devops/tests/routes/__init__.py b/apps/devops/tests/routes/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/apps/devops/tests/routes/test_hello_world.py b/apps/devops/tests/routes/test_hello_world.py new file mode 100644 index 0000000..638a4b2 --- /dev/null +++ b/apps/devops/tests/routes/test_hello_world.py @@ -0,0 +1,27 @@ +from unittest.mock import AsyncMock, patch +from fastapi.testclient import TestClient +from app.main import app +from app.routes.hello_world.apis import get_hello_world_dao + + +def test_hello_world(): + with TestClient(app) as client: + response = client.get("/api/hello_world/") + assert response.status_code == 200 + assert response.json() == {"message": "Hello, World!"} + + +# mock out initiate_database so it doesn’t run during tests +@patch("app.providers.database.initiate_database", new_callable=AsyncMock) +def test_insert_hello_world(mock_db_init): + + class MockHelloWorldDao: + async def create_hello_world(self, msg: str, user_id: int): + return {"message": msg, "user_id": user_id} + + app.dependency_overrides[get_hello_world_dao] = lambda: MockHelloWorldDao() + with TestClient(app) as client: + response = client.post("/api/hello_world/insert", params={"msg": "Test Message"}) + assert response.status_code == 200 + assert response.json() == {"message": "Test Message", "user_id": 1} + app.dependency_overrides.clear() diff --git a/apps/devops/tests/test_main.http b/apps/devops/tests/test_main.http new file mode 100644 index 0000000..b847198 --- /dev/null +++ b/apps/devops/tests/test_main.http @@ -0,0 +1,8 @@ +# Test your FastAPI endpoints + +GET http://localhost:8000/api/hello_world/ +Accept: application/json + +### +POST http://localhost:8000/api/hello_world/insert?msg=Hello%20World +Accept: application/json