From 44f08eee68fc35d0fa87928a833eacb48e7938ac Mon Sep 17 00:00:00 2001 From: weicao Date: Thu, 11 Sep 2025 17:35:20 +0800 Subject: [PATCH 01/15] Add metrics service with user registration API endpoints - Add complete metrics microservice structure - Implement StarRocks database integration - Add user registration data query APIs: - Daily registered users by date range - Recent N days registration data - Registration data by start date and days - Registration summary statistics - Add comprehensive error handling and logging - Include test scripts and documentation --- apps/metrics/.gitignore | 75 ++++++ apps/metrics/Dockerfile | 37 +++ apps/metrics/__init__.py | 1 + apps/metrics/backend/__init__.py | 1 + apps/metrics/backend/annotation/__init__.py | 0 apps/metrics/backend/application/__init__.py | 0 apps/metrics/backend/business/__init__.py | 0 apps/metrics/backend/infra/__init__.py | 0 .../metrics/backend/infra/starrocks_client.py | 90 +++++++ apps/metrics/backend/models/__init__.py | 0 .../backend/models/registered_users.py | 26 ++ apps/metrics/backend/services/__init__.py | 0 .../backend/services/registration_service.py | 125 ++++++++++ apps/metrics/common/__init__.py | 0 apps/metrics/requirements.txt | 16 ++ apps/metrics/start_fastapi.sh | 38 +++ apps/metrics/test_registration_api.py | 106 ++++++++ apps/metrics/tests/__init__.py | 0 apps/metrics/webapi/__init__.py | 1 + apps/metrics/webapi/bootstrap/__init__.py | 0 apps/metrics/webapi/bootstrap/application.py | 69 ++++++ apps/metrics/webapi/config/__init__.py | 0 apps/metrics/webapi/config/site_settings.py | 41 ++++ apps/metrics/webapi/main.py | 36 +++ apps/metrics/webapi/providers/__init__.py | 0 apps/metrics/webapi/routes/__init__.py | 0 apps/metrics/webapi/routes/registration.py | 229 ++++++++++++++++++ 27 files changed, 891 insertions(+) create mode 100644 apps/metrics/.gitignore create mode 100644 apps/metrics/Dockerfile create mode 100644 apps/metrics/__init__.py create mode 100644 apps/metrics/backend/__init__.py create mode 100644 apps/metrics/backend/annotation/__init__.py create mode 100644 apps/metrics/backend/application/__init__.py create mode 100644 apps/metrics/backend/business/__init__.py create mode 100644 apps/metrics/backend/infra/__init__.py create mode 100644 apps/metrics/backend/infra/starrocks_client.py create mode 100644 apps/metrics/backend/models/__init__.py create mode 100644 apps/metrics/backend/models/registered_users.py create mode 100644 apps/metrics/backend/services/__init__.py create mode 100644 apps/metrics/backend/services/registration_service.py create mode 100644 apps/metrics/common/__init__.py create mode 100644 apps/metrics/requirements.txt create mode 100755 apps/metrics/start_fastapi.sh create mode 100755 apps/metrics/test_registration_api.py create mode 100644 apps/metrics/tests/__init__.py create mode 100644 apps/metrics/webapi/__init__.py create mode 100644 apps/metrics/webapi/bootstrap/__init__.py create mode 100644 apps/metrics/webapi/bootstrap/application.py create mode 100644 apps/metrics/webapi/config/__init__.py create mode 100644 apps/metrics/webapi/config/site_settings.py create mode 100644 apps/metrics/webapi/main.py create mode 100644 apps/metrics/webapi/providers/__init__.py create mode 100644 apps/metrics/webapi/routes/__init__.py create mode 100644 apps/metrics/webapi/routes/registration.py diff --git a/apps/metrics/.gitignore b/apps/metrics/.gitignore new file mode 100644 index 0000000..365d8bf --- /dev/null +++ b/apps/metrics/.gitignore @@ -0,0 +1,75 @@ +# Python +__pycache__/ +*.py[cod] +*$py.class +*.so +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# Virtual environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# IDE +.vscode/ +.idea/ +*.swp +*.swo +*~ + +# Logs +logs/ +*.log + +# OS +.DS_Store +.DS_Store? +._* +.Spotlight-V100 +.Trashes +ehthumbs.db +Thumbs.db + +# Test coverage +htmlcov/ +.tox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +.hypothesis/ +.pytest_cache/ + +# Jupyter Notebook +.ipynb_checkpoints + +# pyenv +.python-version + +# Environments +.env.local +.env.development.local +.env.test.local +.env.production.local diff --git a/apps/metrics/Dockerfile b/apps/metrics/Dockerfile new file mode 100644 index 0000000..4c40152 --- /dev/null +++ b/apps/metrics/Dockerfile @@ -0,0 +1,37 @@ +FROM python:3.10-slim-bullseye + +# docker settings +ARG CONTAINER_APP_ROOT="/app" +ENV APP_NAME="metrics" + +# Service dependencies +ENV DEVSVC_WEBAPI_URL_BASE="http://devsvc:8007/api/devsvc" +ENV NOTIFICATION_WEBAPI_URL_BASE="http://notification:8003/api/notification/" + +# JWT settings +ENV JWT_SECRET_KEY="8f87ca8c3c9c3df09a9c78e0adb0927855568f6072d9efc892534aee35f5867b" +ENV JWT_ALGORITHM="HS256" + +# Site settings +ENV SERVICE_API_ACCESS_HOST=0.0.0.0 +ENV SERVICE_API_ACCESS_PORT=8009 +ENV MONGODB_NAME=freeleaps2 +ENV MONGODB_PORT=27017 +ENV MONGODB_URI="mongodb://localhost:27017/" + +# Log settings +ENV LOG_BASE_PATH=$CONTAINER_APP_ROOT/log/$APP_NAME +ENV BACKEND_LOG_FILE_NAME=$APP_NAME +ENV APPLICATION_ACTIVITY_LOG=$APP_NAME-activity + +WORKDIR ${CONTAINER_APP_ROOT} +COPY requirements.txt . + +RUN pip install --upgrade pip +RUN pip install --no-cache-dir -r requirements.txt + +COPY . ${CONTAINER_APP_ROOT} + +EXPOSE ${SERVICE_API_ACCESS_PORT} +# Using shell to expand environment to ensure pass the actual environment value to uvicorn +CMD uvicorn webapi.main:app --reload --port=$SERVICE_API_ACCESS_PORT --host=$SERVICE_API_ACCESS_HOST diff --git a/apps/metrics/__init__.py b/apps/metrics/__init__.py new file mode 100644 index 0000000..648659c --- /dev/null +++ b/apps/metrics/__init__.py @@ -0,0 +1 @@ +# Metrics Service diff --git a/apps/metrics/backend/__init__.py b/apps/metrics/backend/__init__.py new file mode 100644 index 0000000..f4e0926 --- /dev/null +++ b/apps/metrics/backend/__init__.py @@ -0,0 +1 @@ +# Backend module diff --git a/apps/metrics/backend/annotation/__init__.py b/apps/metrics/backend/annotation/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/apps/metrics/backend/application/__init__.py b/apps/metrics/backend/application/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/apps/metrics/backend/business/__init__.py b/apps/metrics/backend/business/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/apps/metrics/backend/infra/__init__.py b/apps/metrics/backend/infra/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/apps/metrics/backend/infra/starrocks_client.py b/apps/metrics/backend/infra/starrocks_client.py new file mode 100644 index 0000000..af7fb97 --- /dev/null +++ b/apps/metrics/backend/infra/starrocks_client.py @@ -0,0 +1,90 @@ +import pymysql +from typing import List, Dict, Any, Optional +from datetime import date +from loguru import logger +from webapi.config.site_settings import site_settings + + +class StarRocksClient: + """StarRocks database client for querying user registration data""" + + def __init__(self): + self.host = site_settings.STARROCKS_HOST + self.port = site_settings.STARROCKS_PORT + self.user = site_settings.STARROCKS_USER + self.password = site_settings.STARROCKS_PASSWORD + self.database = site_settings.STARROCKS_DATABASE + self.connection = None + + def connect(self) -> bool: + """Establish connection to StarRocks database""" + try: + self.connection = pymysql.connect( + host=self.host, + port=self.port, + user=self.user, + password=self.password, + database=self.database, + charset='utf8mb4', + autocommit=True + ) + logger.info(f"Successfully connected to StarRocks at {self.host}:{self.port}") + return True + except Exception as e: + logger.error(f"Failed to connect to StarRocks: {e}") + return False + + def disconnect(self): + """Close database connection""" + if self.connection: + self.connection.close() + self.connection = None + logger.info("Disconnected from StarRocks") + + def execute_query(self, query: str, params: Optional[tuple] = None) -> List[Dict[str, Any]]: + """Execute SQL query and return results""" + if not self.connection: + if not self.connect(): + raise Exception("Failed to connect to StarRocks database") + + try: + with self.connection.cursor(pymysql.cursors.DictCursor) as cursor: + cursor.execute(query, params) + results = cursor.fetchall() + logger.info(f"Query executed successfully, returned {len(results)} rows") + return results + except Exception as e: + logger.error(f"Query execution failed: {e}") + raise e + + def get_daily_registered_users( + self, + start_date: date, + end_date: date, + product_id: str = "freeleaps" + ) -> List[Dict[str, Any]]: + """Query daily registered users from StarRocks""" + query = """ + SELECT + date_id, + product_id, + registered_cnt, + updated_at + FROM dws_daily_registered_users + WHERE date_id >= %s + AND date_id <= %s + AND product_id = %s + ORDER BY date_id ASC + """ + + params = (start_date, end_date, product_id) + return self.execute_query(query, params) + + def __enter__(self): + """Context manager entry""" + self.connect() + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + """Context manager exit""" + self.disconnect() diff --git a/apps/metrics/backend/models/__init__.py b/apps/metrics/backend/models/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/apps/metrics/backend/models/registered_users.py b/apps/metrics/backend/models/registered_users.py new file mode 100644 index 0000000..64bebcb --- /dev/null +++ b/apps/metrics/backend/models/registered_users.py @@ -0,0 +1,26 @@ +from pydantic import BaseModel +from datetime import date, datetime +from typing import List, Optional + + +class DailyRegisteredUsers(BaseModel): + """Daily registered users data model""" + date_id: date + product_id: str = "freeleaps" + registered_cnt: int + updated_at: Optional[datetime] = None + + +class UserRegistrationQuery(BaseModel): + """Query parameters for user registration data""" + start_date: date + end_date: date + product_id: str = "freeleaps" + + +class UserRegistrationResponse(BaseModel): + """Response model for user registration data""" + dates: List[str] + counts: List[int] + total_registrations: int + query_period: str diff --git a/apps/metrics/backend/services/__init__.py b/apps/metrics/backend/services/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/apps/metrics/backend/services/registration_service.py b/apps/metrics/backend/services/registration_service.py new file mode 100644 index 0000000..29432c4 --- /dev/null +++ b/apps/metrics/backend/services/registration_service.py @@ -0,0 +1,125 @@ +from typing import List, Dict, Any +from datetime import date, timedelta +from loguru import logger +from backend.infra.starrocks_client import StarRocksClient +from backend.models.registered_users import UserRegistrationResponse, DailyRegisteredUsers + + +class RegistrationService: + """Service for handling user registration data queries""" + + def __init__(self): + self.starrocks_client = StarRocksClient() + + def get_daily_registered_users( + self, + start_date: date, + end_date: date, + product_id: str = "freeleaps" + ) -> UserRegistrationResponse: + """ + Get daily registered users count for a date range + + Args: + start_date: Start date for the query + end_date: End date for the query + product_id: Product identifier (default: freeleaps) + + Returns: + UserRegistrationResponse with dates and counts + """ + try: + # Query data from StarRocks + raw_data = self.starrocks_client.get_daily_registered_users( + start_date, end_date, product_id + ) + + # Convert to DailyRegisteredUsers objects + daily_data = [ + DailyRegisteredUsers( + date_id=row['date_id'], + product_id=row['product_id'], + registered_cnt=row['registered_cnt'], + updated_at=row.get('updated_at') + ) + for row in raw_data + ] + + # Create date-to-count mapping + data_dict = {str(item.date_id): item.registered_cnt for item in daily_data} + + # Generate complete date range + dates = [] + counts = [] + current_date = start_date + + while current_date <= end_date: + date_str = str(current_date) + dates.append(date_str) + counts.append(data_dict.get(date_str, 0)) + current_date += timedelta(days=1) + + # Calculate total registrations + total_registrations = sum(counts) + + logger.info( + f"Retrieved registration data for {len(dates)} days, " + f"total registrations: {total_registrations}" + ) + + return UserRegistrationResponse( + dates=dates, + counts=counts, + total_registrations=total_registrations, + query_period=f"{start_date} to {end_date}" + ) + + except Exception as e: + logger.error(f"Failed to get daily registered users: {e}") + raise e + + def get_registration_summary( + self, + start_date: date, + end_date: date, + product_id: str = "freeleaps" + ) -> Dict[str, Any]: + """ + Get summary statistics for user registrations + + Args: + start_date: Start date for the query + end_date: End date for the query + product_id: Product identifier + + Returns: + Dictionary with summary statistics + """ + try: + response = self.get_daily_registered_users(start_date, end_date, product_id) + + if not response.counts: + return { + "total_registrations": 0, + "average_daily": 0, + "max_daily": 0, + "min_daily": 0, + "days_with_registrations": 0, + "total_days": len(response.dates) + } + + counts = response.counts + non_zero_counts = [c for c in counts if c > 0] + + return { + "total_registrations": response.total_registrations, + "average_daily": round(sum(counts) / len(counts), 2), + "max_daily": max(counts), + "min_daily": min(counts), + "days_with_registrations": len(non_zero_counts), + "total_days": len(response.dates) + } + + except Exception as e: + logger.error(f"Failed to get registration summary: {e}") + raise e diff --git a/apps/metrics/common/__init__.py b/apps/metrics/common/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/apps/metrics/requirements.txt b/apps/metrics/requirements.txt new file mode 100644 index 0000000..96d4bfd --- /dev/null +++ b/apps/metrics/requirements.txt @@ -0,0 +1,16 @@ +fastapi==0.114.0 +pydantic==2.9.2 +loguru==0.7.2 +uvicorn==0.23.2 +beanie==1.21.0 +pika==1.3.2 +aio-pika +httpx +pydantic-settings +python-jose +passlib[bcrypt] +prometheus-fastapi-instrumentator==7.0.2 +pytest==8.4.1 +pytest-asyncio==0.21.2 +pymysql==1.1.0 +sqlalchemy==2.0.23 diff --git a/apps/metrics/start_fastapi.sh b/apps/metrics/start_fastapi.sh new file mode 100755 index 0000000..7c469ba --- /dev/null +++ b/apps/metrics/start_fastapi.sh @@ -0,0 +1,38 @@ +#!/bin/bash +rp=$(dirname "$(realpath '$1')") +pushd $rp + +APP_NAME=metrics +APP_PARENT_FOLDER=apps + +GIT_REPO_ROOT=$(git rev-parse --show-toplevel) +CODEBASE_ROOT=$GIT_REPO_ROOT/$APP_PARENT_FOLDER/$APP_NAME +SITE_DEPLOY_FOLDER=$GIT_REPO_ROOT/sites/$APP_NAME/deploy + +echo APP_NAME=$APP_NAME > .env +cat $SITE_DEPLOY_FOLDER/common/.env >> .env +echo GIT_REPO_ROOT=$(git rev-parse --show-toplevel) >> .env +echo CODEBASE_ROOT=$GIT_REPO_ROOT/$APP_PARENT_FOLDER/$APP_NAME >> .env +echo SITE_DEPLOY_FOLDER=$GIT_REPO_ROOT/sites/$APP_NAME/deploy >> .env +cat $SITE_DEPLOY_FOLDER/common/.host.env >> .env +cat $SITE_DEPLOY_FOLDER/local/.env >> .env + +. .env + +if [ -d "$VENV_DIR" ] +then + echo "Folder $VENV_DIR exists. Proceed to next steps" +else + echo "Folder $VENV_DIR doesn't exist. create it" + sudo apt install python3-pip + python3 -m pip install virtualenv + python3 -m virtualenv $VENV_DIR +fi + +source $VENV_DIR/bin/activate +pip install --upgrade pip +pip install -r requirements.txt + +uvicorn webapi.main:app --reload --host 0.0.0.0 --port $SERVICE_API_ACCESS_PORT + +popd diff --git a/apps/metrics/test_registration_api.py b/apps/metrics/test_registration_api.py new file mode 100755 index 0000000..a181b0b --- /dev/null +++ b/apps/metrics/test_registration_api.py @@ -0,0 +1,106 @@ +#!/usr/bin/env python3 +""" +Test script for registration API endpoints +""" +import requests +import json +from datetime import date, timedelta + +# API base URL +BASE_URL = "http://localhost:8009" + +def test_daily_registered_users(): + """Test the daily registered users endpoint""" + print("Testing daily registered users endpoint...") + + # Test with last 7 days + end_date = date.today() + start_date = end_date - timedelta(days=6) + + url = f"{BASE_URL}/api/metrics/daily-registered-users" + params = { + "start_date": str(start_date), + "end_date": str(end_date), + "product_id": "freeleaps" + } + + try: + response = requests.get(url, params=params) + print(f"Status Code: {response.status_code}") + + if response.status_code == 200: + data = response.json() + print(f"Response: {json.dumps(data, indent=2)}") + print(f"Number of days: {len(data['dates'])}") + print(f"Total registrations: {data['total_registrations']}") + else: + print(f"Error: {response.text}") + + except Exception as e: + print(f"Request failed: {e}") + +def test_registration_summary(): + """Test the registration summary endpoint""" + print("\nTesting registration summary endpoint...") + + end_date = date.today() + start_date = end_date - timedelta(days=6) + + url = f"{BASE_URL}/api/metrics/registration-summary" + params = { + "start_date": str(start_date), + "end_date": str(end_date), + "product_id": "freeleaps" + } + + try: + response = requests.get(url, params=params) + print(f"Status Code: {response.status_code}") + + if response.status_code == 200: + data = response.json() + print(f"Summary: {json.dumps(data, indent=2)}") + else: + print(f"Error: {response.text}") + + except Exception as e: + print(f"Request failed: {e}") + +def test_post_method(): + """Test the POST method for daily registered users""" + print("\nTesting POST method for daily registered users...") + + end_date = date.today() + start_date = end_date - timedelta(days=6) + + url = f"{BASE_URL}/api/metrics/daily-registered-users" + payload = { + "start_date": str(start_date), + "end_date": str(end_date), + "product_id": "freeleaps" + } + + try: + response = requests.post(url, json=payload) + print(f"Status Code: {response.status_code}") + + if response.status_code == 200: + data = response.json() + print(f"Response: {json.dumps(data, indent=2)}") + else: + print(f"Error: {response.text}") + + except Exception as e: + print(f"Request failed: {e}") + +if __name__ == "__main__": + print("Starting registration API tests...") + print(f"Testing against: {BASE_URL}") + print("=" * 50) + + test_daily_registered_users() + test_registration_summary() + test_post_method() + + print("\n" + "=" * 50) + print("Tests completed!") diff --git a/apps/metrics/tests/__init__.py b/apps/metrics/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/apps/metrics/webapi/__init__.py b/apps/metrics/webapi/__init__.py new file mode 100644 index 0000000..b5778ef --- /dev/null +++ b/apps/metrics/webapi/__init__.py @@ -0,0 +1 @@ +# WebAPI module diff --git a/apps/metrics/webapi/bootstrap/__init__.py b/apps/metrics/webapi/bootstrap/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/apps/metrics/webapi/bootstrap/application.py b/apps/metrics/webapi/bootstrap/application.py new file mode 100644 index 0000000..bfe3e11 --- /dev/null +++ b/apps/metrics/webapi/bootstrap/application.py @@ -0,0 +1,69 @@ +from fastapi import FastAPI +from fastapi.middleware.cors import CORSMiddleware +from prometheus_fastapi_instrumentator import Instrumentator +from webapi.config.site_settings import site_settings +from loguru import logger +import os + + +def create_app() -> FastAPI: + """ + Create and configure the FastAPI application + """ + app = FastAPI( + title="Metrics Service API", + description="Metrics Service for Freeleaps Platform", + version="1.0.0", + docs_url="/docs", + redoc_url="/redoc" + ) + + # Add CORS middleware + app.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], + ) + + # Setup logging + setup_logging() + + # Setup Prometheus metrics + Instrumentator().instrument(app).expose(app) + + # Include routers + # from webapi.routes import health, api + # app.include_router(health.router, prefix="/health", tags=["health"]) + # app.include_router(api.router, prefix="/api/metrics", tags=["metrics"]) + # Note: Registration router is included in main.py + + return app + + +def setup_logging(): + """ + Setup logging configuration + """ + # Create log directory if it doesn't exist + log_dir = site_settings.LOG_BASE_PATH + os.makedirs(log_dir, exist_ok=True) + + # Configure loguru + logger.add( + f"{log_dir}/{site_settings.BACKEND_LOG_FILE_NAME}.log", + rotation="1 day", + retention="30 days", + level="INFO", + format="{time:YYYY-MM-DD HH:mm:ss} | {level} | {name}:{function}:{line} | {message}" + ) + + logger.add( + f"{log_dir}/{site_settings.APPLICATION_ACTIVITY_LOG}.log", + rotation="1 day", + retention="30 days", + level="INFO", + format="{time:YYYY-MM-DD HH:mm:ss} | {level} | {name}:{function}:{line} | {message}", + filter=lambda record: record["level"].name == "INFO" + ) diff --git a/apps/metrics/webapi/config/__init__.py b/apps/metrics/webapi/config/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/apps/metrics/webapi/config/site_settings.py b/apps/metrics/webapi/config/site_settings.py new file mode 100644 index 0000000..098d36f --- /dev/null +++ b/apps/metrics/webapi/config/site_settings.py @@ -0,0 +1,41 @@ +from pydantic_settings import BaseSettings +from typing import Optional + + +class SiteSettings(BaseSettings): + # Server settings + SERVER_HOST: str = "0.0.0.0" + SERVER_PORT: int = 8009 + SERVICE_API_ACCESS_HOST: str = "0.0.0.0" + SERVICE_API_ACCESS_PORT: int = 8009 + + # Database settings + MONGODB_URI: str = "mongodb://localhost:27017/" + MONGODB_NAME: str = "freeleaps2" + MONGODB_PORT: int = 27017 + + # JWT settings + JWT_SECRET_KEY: str = "8f87ca8c3c9c3df09a9c78e0adb0927855568f6072d9efc892534aee35f5867b" + JWT_ALGORITHM: str = "HS256" + + # Log settings + LOG_BASE_PATH: str = "./logs" + BACKEND_LOG_FILE_NAME: str = "metrics" + APPLICATION_ACTIVITY_LOG: str = "metrics-activity" + + # Service dependencies + DEVSVC_WEBAPI_URL_BASE: str = "http://devsvc:8007/api/devsvc" + NOTIFICATION_WEBAPI_URL_BASE: str = "http://notification:8003/api/notification/" + + # StarRocks database settings + STARROCKS_HOST: str = "freeleaps-starrocks-cluster-fe-service.freeleaps-data-platform.svc" + STARROCKS_PORT: int = 9030 + STARROCKS_USER: str = "root" + STARROCKS_PASSWORD: str = "" + STARROCKS_DATABASE: str = "freeleaps" + + class Config: + env_file = ".env" + + +site_settings = SiteSettings() diff --git a/apps/metrics/webapi/main.py b/apps/metrics/webapi/main.py new file mode 100644 index 0000000..fb977b6 --- /dev/null +++ b/apps/metrics/webapi/main.py @@ -0,0 +1,36 @@ +from webapi.bootstrap.application import create_app +from webapi.config.site_settings import site_settings +from fastapi.responses import RedirectResponse +import uvicorn +from typing import Any +from webapi.routes import registration + + +app = create_app() + +# Include routers +app.include_router(registration.router) + + +@app.get("/", status_code=301) +async def root(): + """ + TODO: redirect client to /docs + """ + return RedirectResponse("docs") + + +if __name__ == "__main__": + uvicorn.run( + app="main:app", host=site_settings.SERVER_HOST, port=site_settings.SERVER_PORT + ) + + +def get_context() -> Any: + # Define your context function. This is where you can set up authentication, database connections, etc. + return {} + + +def get_root_value() -> Any: + # Define your root value function. This is where you can set up the root value for GraphQL. + return {} diff --git a/apps/metrics/webapi/providers/__init__.py b/apps/metrics/webapi/providers/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/apps/metrics/webapi/routes/__init__.py b/apps/metrics/webapi/routes/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/apps/metrics/webapi/routes/registration.py b/apps/metrics/webapi/routes/registration.py new file mode 100644 index 0000000..327067d --- /dev/null +++ b/apps/metrics/webapi/routes/registration.py @@ -0,0 +1,229 @@ +from fastapi import APIRouter, HTTPException, Query +from datetime import date, datetime, timedelta +from typing import Optional +from loguru import logger +from backend.services.registration_service import RegistrationService +from backend.models.registered_users import UserRegistrationResponse, UserRegistrationQuery + +router = APIRouter(prefix="/api/metrics", tags=["registration"]) + +# Initialize service +registration_service = RegistrationService() + + +@router.get("/daily-registered-users", response_model=UserRegistrationResponse) +async def get_daily_registered_users( + start_date: date = Query(..., description="Start date in YYYY-MM-DD format"), + end_date: date = Query(..., description="End date in YYYY-MM-DD format"), + product_id: str = Query("freeleaps", description="Product identifier") +): + """ + Get daily registered users count for a date range + + Returns two lists: + - dates: List of dates in YYYY-MM-DD format + - counts: List of daily registration counts + + Example: + - GET /api/metrics/daily-registered-users?start_date=2024-01-01&end_date=2024-01-07 + """ + try: + # Validate date range + if start_date > end_date: + raise HTTPException( + status_code=400, + detail="Start date must be before or equal to end date" + ) + + # Check date range is not too large (max 1 year) + if (end_date - start_date).days > 365: + raise HTTPException( + status_code=400, + detail="Date range cannot exceed 365 days" + ) + + logger.info(f"Querying registration data from {start_date} to {end_date} for product {product_id}") + + # Get data from service + result = registration_service.get_daily_registered_users( + start_date, end_date, product_id + ) + + logger.info(f"Successfully retrieved data for {len(result.dates)} days") + return result + + except HTTPException: + raise + except Exception as e: + logger.error(f"Failed to get daily registered users: {e}") + raise HTTPException( + status_code=500, + detail=f"Internal server error: {str(e)}" + ) + + +@router.get("/registration-summary") +async def get_registration_summary( + start_date: date = Query(..., description="Start date in YYYY-MM-DD format"), + end_date: date = Query(..., description="End date in YYYY-MM-DD format"), + product_id: str = Query("freeleaps", description="Product identifier") +): + """ + Get summary statistics for user registrations in a date range + + Returns summary statistics including: + - total_registrations: Total number of registrations + - average_daily: Average daily registrations + - max_daily: Maximum daily registrations + - min_daily: Minimum daily registrations + - days_with_registrations: Number of days with registrations + - total_days: Total number of days in range + """ + try: + # Validate date range + if start_date > end_date: + raise HTTPException( + status_code=400, + detail="Start date must be before or equal to end date" + ) + + if (end_date - start_date).days > 365: + raise HTTPException( + status_code=400, + detail="Date range cannot exceed 365 days" + ) + + logger.info(f"Querying registration summary from {start_date} to {end_date} for product {product_id}") + + # Get summary from service + summary = registration_service.get_registration_summary( + start_date, end_date, product_id + ) + + return summary + + except HTTPException: + raise + except Exception as e: + logger.error(f"Failed to get registration summary: {e}") + raise HTTPException( + status_code=500, + detail=f"Internal server error: {str(e)}" + ) + + +@router.get("/recent-registered-users", response_model=UserRegistrationResponse) +async def get_recent_registered_users( + days: int = Query(7, ge=1, le=365, description="Number of recent days to query"), + product_id: str = Query("freeleaps", description="Product identifier") +): + """ + Get daily registered users count for recent N days + + Returns registration data for the last N days from today + + Example: + - GET /api/metrics/recent-registered-users?days=7 + - GET /api/metrics/recent-registered-users?days=30&product_id=freeleaps + """ + try: + # Calculate date range + end_date = date.today() + start_date = end_date - timedelta(days=days-1) + + logger.info(f"Querying recent {days} days registration data from {start_date} to {end_date} for product {product_id}") + + # Get data from service + result = registration_service.get_daily_registered_users( + start_date, end_date, product_id + ) + + logger.info(f"Successfully retrieved recent {days} days data, total registrations: {result.total_registrations}") + return result + + except Exception as e: + logger.error(f"Failed to get recent registered users: {e}") + raise HTTPException( + status_code=500, + detail=f"Internal server error: {str(e)}" + ) + + +@router.get("/registered-users-by-days", response_model=UserRegistrationResponse) +async def get_registered_users_by_days( + start_date: date = Query(..., description="Start date in YYYY-MM-DD format"), + days: int = Query(..., ge=1, le=365, description="Number of days from start date"), + product_id: str = Query("freeleaps", description="Product identifier") +): + """ + Get daily registered users count starting from a specific date for N days + + Returns registration data for N days starting from the specified start date + + Example: + - GET /api/metrics/registered-users-by-days?start_date=2024-01-01&days=7 + - GET /api/metrics/registered-users-by-days?start_date=2024-09-01&days=30&product_id=freeleaps + """ + try: + # Calculate end date + end_date = start_date + timedelta(days=days-1) + + logger.info(f"Querying registration data from {start_date} for {days} days (until {end_date}) for product {product_id}") + + # Get data from service + result = registration_service.get_daily_registered_users( + start_date, end_date, product_id + ) + + logger.info(f"Successfully retrieved {days} days data from {start_date}, total registrations: {result.total_registrations}") + return result + + except Exception as e: + logger.error(f"Failed to get registered users by days: {e}") + raise HTTPException( + status_code=500, + detail=f"Internal server error: {str(e)}" + ) + + +@router.post("/daily-registered-users", response_model=UserRegistrationResponse) +async def get_daily_registered_users_post( + query: UserRegistrationQuery +): + """ + Get daily registered users count for a date range (POST method) + + Same as GET method but accepts parameters in request body + """ + try: + # Validate date range + if query.start_date > query.end_date: + raise HTTPException( + status_code=400, + detail="Start date must be before or equal to end date" + ) + + if (query.end_date - query.start_date).days > 365: + raise HTTPException( + status_code=400, + detail="Date range cannot exceed 365 days" + ) + + logger.info(f"Querying registration data from {query.start_date} to {query.end_date} for product {query.product_id}") + + # Get data from service + result = registration_service.get_daily_registered_users( + query.start_date, query.end_date, query.product_id + ) + + logger.info(f"Successfully retrieved data for {len(result.dates)} days") + return result + + except HTTPException: + raise + except Exception as e: + logger.error(f"Failed to get daily registered users: {e}") + raise HTTPException( + status_code=500, + detail=f"Internal server error: {str(e)}" + ) From 79a1b62f688a3b4b34f13797fd8e908e888dafb7 Mon Sep 17 00:00:00 2001 From: weicao Date: Thu, 11 Sep 2025 17:40:18 +0800 Subject: [PATCH 02/15] Add .env configuration file for metrics service --- apps/metrics/.env | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) create mode 100644 apps/metrics/.env diff --git a/apps/metrics/.env b/apps/metrics/.env new file mode 100644 index 0000000..f88d5c1 --- /dev/null +++ b/apps/metrics/.env @@ -0,0 +1,16 @@ +# Local environment configuration for Metrics service +SERVICE_API_ACCESS_PORT=8009 +SERVICE_API_ACCESS_HOST=0.0.0.0 + +# Local database settings +MONGODB_URI=mongodb://localhost:27017/ +MONGODB_NAME=freeleaps2 + +# Local service URLs +DEVSVC_WEBAPI_URL_BASE=http://localhost:8007/api/devsvc +NOTIFICATION_WEBAPI_URL_BASE=http://localhost:8003/api/notification/ + +# Log settings +LOG_BASE_PATH=./logs +BACKEND_LOG_FILE_NAME=metrics +APPLICATION_ACTIVITY_LOG=metrics-activity \ No newline at end of file From b490ced2f0992e17ee414a1b7e8f86745ffb9423 Mon Sep 17 00:00:00 2001 From: weicao Date: Mon, 15 Sep 2025 15:22:52 +0800 Subject: [PATCH 03/15] Clean up metrics service: remove unnecessary files, update Dockerfile, and add README --- apps/metrics/.env | 16 - apps/metrics/.gitignore | 9 + apps/metrics/Dockerfile | 43 +-- apps/metrics/README.md | 335 ++++++++++++++++++ apps/metrics/backend/annotation/__init__.py | 0 apps/metrics/backend/application/__init__.py | 0 apps/metrics/backend/business/__init__.py | 0 .../metrics/backend/infra/starrocks_client.py | 9 + .../backend/services/registration_service.py | 9 + apps/metrics/local.env | 17 + apps/metrics/start_fastapi.sh | 38 -- apps/metrics/test_registration_api.py | 106 ------ apps/metrics/tests/__init__.py | 0 apps/metrics/webapi/config/site_settings.py | 15 +- apps/metrics/webapi/providers/__init__.py | 0 15 files changed, 393 insertions(+), 204 deletions(-) delete mode 100644 apps/metrics/.env create mode 100644 apps/metrics/README.md delete mode 100644 apps/metrics/backend/annotation/__init__.py delete mode 100644 apps/metrics/backend/application/__init__.py delete mode 100644 apps/metrics/backend/business/__init__.py create mode 100644 apps/metrics/local.env delete mode 100755 apps/metrics/start_fastapi.sh delete mode 100755 apps/metrics/test_registration_api.py delete mode 100644 apps/metrics/tests/__init__.py delete mode 100644 apps/metrics/webapi/providers/__init__.py diff --git a/apps/metrics/.env b/apps/metrics/.env deleted file mode 100644 index f88d5c1..0000000 --- a/apps/metrics/.env +++ /dev/null @@ -1,16 +0,0 @@ -# Local environment configuration for Metrics service -SERVICE_API_ACCESS_PORT=8009 -SERVICE_API_ACCESS_HOST=0.0.0.0 - -# Local database settings -MONGODB_URI=mongodb://localhost:27017/ -MONGODB_NAME=freeleaps2 - -# Local service URLs -DEVSVC_WEBAPI_URL_BASE=http://localhost:8007/api/devsvc -NOTIFICATION_WEBAPI_URL_BASE=http://localhost:8003/api/notification/ - -# Log settings -LOG_BASE_PATH=./logs -BACKEND_LOG_FILE_NAME=metrics -APPLICATION_ACTIVITY_LOG=metrics-activity \ No newline at end of file diff --git a/apps/metrics/.gitignore b/apps/metrics/.gitignore index 365d8bf..0cf956f 100644 --- a/apps/metrics/.gitignore +++ b/apps/metrics/.gitignore @@ -73,3 +73,12 @@ coverage.xml .env.development.local .env.test.local .env.production.local + + + + + + + + + diff --git a/apps/metrics/Dockerfile b/apps/metrics/Dockerfile index 4c40152..b090349 100644 --- a/apps/metrics/Dockerfile +++ b/apps/metrics/Dockerfile @@ -1,37 +1,20 @@ -FROM python:3.10-slim-bullseye +# download image here: https://docker.aityp.com/image/docker.io/python:3.12-slim +FROM swr.cn-north-4.myhuaweicloud.com/ddn-k8s/docker.io/python:3.12-slim -# docker settings -ARG CONTAINER_APP_ROOT="/app" -ENV APP_NAME="metrics" +# Set working directory +WORKDIR /app -# Service dependencies -ENV DEVSVC_WEBAPI_URL_BASE="http://devsvc:8007/api/devsvc" -ENV NOTIFICATION_WEBAPI_URL_BASE="http://notification:8003/api/notification/" - -# JWT settings -ENV JWT_SECRET_KEY="8f87ca8c3c9c3df09a9c78e0adb0927855568f6072d9efc892534aee35f5867b" -ENV JWT_ALGORITHM="HS256" - -# Site settings -ENV SERVICE_API_ACCESS_HOST=0.0.0.0 -ENV SERVICE_API_ACCESS_PORT=8009 -ENV MONGODB_NAME=freeleaps2 -ENV MONGODB_PORT=27017 -ENV MONGODB_URI="mongodb://localhost:27017/" - -# Log settings -ENV LOG_BASE_PATH=$CONTAINER_APP_ROOT/log/$APP_NAME -ENV BACKEND_LOG_FILE_NAME=$APP_NAME -ENV APPLICATION_ACTIVITY_LOG=$APP_NAME-activity - -WORKDIR ${CONTAINER_APP_ROOT} +# Copy requirements file COPY requirements.txt . -RUN pip install --upgrade pip +# Install dependencies RUN pip install --no-cache-dir -r requirements.txt -COPY . ${CONTAINER_APP_ROOT} +# Copy application code +COPY . . -EXPOSE ${SERVICE_API_ACCESS_PORT} -# Using shell to expand environment to ensure pass the actual environment value to uvicorn -CMD uvicorn webapi.main:app --reload --port=$SERVICE_API_ACCESS_PORT --host=$SERVICE_API_ACCESS_HOST +# Expose port +EXPOSE 8009 + +# Start command +CMD ["uvicorn", "webapi.main:app", "--host", "0.0.0.0", "--port", "8009"] diff --git a/apps/metrics/README.md b/apps/metrics/README.md new file mode 100644 index 0000000..ac893d0 --- /dev/null +++ b/apps/metrics/README.md @@ -0,0 +1,335 @@ +# Metrics Service + +A FastAPI microservice for user registration statistics and data analytics in the Freeleaps platform. + +## ๐Ÿš€ Overview + +The Metrics service provides comprehensive APIs for querying and analyzing user registration data from the StarRocks database. It offers flexible querying options and statistical summaries for better insights into user growth patterns. + +## โœจ Features + +### ๐Ÿ“Š User Registration Statistics APIs +- **Date Range Query** - Query registration data for specific date ranges +- **Recent N Days Query** - Get registration data for the last N days +- **Start Date + Days Query** - Query N days starting from a specified date +- **Statistics Summary** - Get comprehensive statistics and analytics +- **POST Method Support** - JSON request body support for complex queries + +### ๐Ÿ—„๏ธ Database Integration +- **StarRocks Database Connection** + - Host: `freeleaps-starrocks-cluster-fe-service.freeleaps-data-platform.svc` + - Port: `9030` + - Database: `freeleaps` + - Table: `dws_daily_registered_users` + +### ๐Ÿ”ง Technical Features +- **Data Models**: Pydantic validation for data integrity +- **Connection Management**: Automatic database connection and disconnection +- **Error Handling**: Comprehensive exception handling with user-friendly error messages +- **Logging**: Structured logging using Loguru +- **API Documentation**: Auto-generated Swagger/OpenAPI documentation + +## ๐Ÿ“ Project Structure + +``` +apps/metrics/ +โ”œโ”€โ”€ backend/ +โ”‚ โ”œโ”€โ”€ infra/starrocks_client.py # StarRocks database client +โ”‚ โ”œโ”€โ”€ models/registered_users.py # Data model definitions +โ”‚ โ””โ”€โ”€ services/registration_service.py # Business logic service +โ”œโ”€โ”€ webapi/ +โ”‚ โ”œโ”€โ”€ routes/registration.py # API route definitions +โ”‚ โ”œโ”€โ”€ config/site_settings.py # Configuration management +โ”‚ โ”œโ”€โ”€ bootstrap/application.py # Application initialization +โ”‚ โ””โ”€โ”€ main.py # FastAPI main application +โ”œโ”€โ”€ common/ # Shared utilities +โ”œโ”€โ”€ tests/ # Test files +โ”œโ”€โ”€ requirements.txt # Python dependencies +โ”œโ”€โ”€ Dockerfile # Simplified Docker configuration (Python 3.12-slim) +โ”œโ”€โ”€ start_fastapi.sh # Startup script +โ”œโ”€โ”€ test_registration_api.py # API test script +โ”œโ”€โ”€ .env # Environment configuration +โ””โ”€โ”€ README.md # This file +``` + +## ๐Ÿš€ Quick Start + +### Prerequisites +- Python 3.12+ +- Access to StarRocks database +- pip package manager + +### Installation + +1. **Clone the repository** + ```bash + git clone + cd freeleaps-service-hub/apps/metrics + ``` + +2. **Install dependencies** + ```bash + pip install -r requirements.txt + ``` + +3. **Configure environment** + ```bash + cp .env.example .env # If available + # Edit .env with your configuration + ``` + +4. **Start the service** + + **Option A: Direct Python execution** + ```bash + python3 -m uvicorn webapi.main:app --host 0.0.0.0 --port 8009 --reload + ``` + + **Option B: Using Docker** + ```bash + # Build Docker image + docker build -t metrics:latest . + + # Run container + docker run --rm -p 8009:8009 metrics:latest + ``` + +5. **Access API documentation** + ``` + http://localhost:8009/docs + ``` + +## ๐Ÿ“Š API Endpoints + +### 1. Daily Registered Users (Date Range) +```http +GET /api/metrics/daily-registered-users?start_date=2024-09-10&end_date=2024-09-20&product_id=freeleaps +``` + +**Parameters:** +- `start_date` (required): Start date in YYYY-MM-DD format +- `end_date` (required): End date in YYYY-MM-DD format +- `product_id` (optional): Product identifier (default: "freeleaps") + +### 2. Recent Registered Users +```http +GET /api/metrics/recent-registered-users?days=7&product_id=freeleaps +``` + +**Parameters:** +- `days` (optional): Number of recent days (default: 7, max: 365) +- `product_id` (optional): Product identifier (default: "freeleaps") + +### 3. Registered Users by Days +```http +GET /api/metrics/registered-users-by-days?start_date=2024-09-01&days=5&product_id=freeleaps +``` + +**Parameters:** +- `start_date` (required): Start date in YYYY-MM-DD format +- `days` (required): Number of days from start date (max: 365) +- `product_id` (optional): Product identifier (default: "freeleaps") + +### 4. Registration Summary +```http +GET /api/metrics/registration-summary?start_date=2024-09-10&end_date=2024-09-20&product_id=freeleaps +``` + +**Parameters:** +- `start_date` (required): Start date in YYYY-MM-DD format +- `end_date` (required): End date in YYYY-MM-DD format +- `product_id` (optional): Product identifier (default: "freeleaps") + +### 5. Daily Registered Users (POST) +```http +POST /api/metrics/daily-registered-users +Content-Type: application/json + +{ + "start_date": "2024-09-10", + "end_date": "2024-09-20", + "product_id": "freeleaps" +} +``` + +## ๐Ÿ“ˆ Response Format + +### Standard Response +```json +{ + "dates": ["2024-09-10", "2024-09-11", "2024-09-12"], + "counts": [39, 38, 31], + "total_registrations": 108, + "query_period": "2024-09-10 to 2024-09-12" +} +``` + +### Summary Response +```json +{ + "total_registrations": 282, + "average_daily": 25.64, + "max_daily": 39, + "min_daily": 8, + "days_with_registrations": 10, + "total_days": 11 +} +``` + +## ๐Ÿงช Testing + +### Run API Tests +```bash +python test_registration_api.py +``` + +### Manual Testing Examples +```bash +# Test recent 7 days +curl "http://localhost:8009/api/metrics/recent-registered-users?days=7" + +# Test date range +curl "http://localhost:8009/api/metrics/daily-registered-users?start_date=2024-09-10&end_date=2024-09-20" + +# Test summary +curl "http://localhost:8009/api/metrics/registration-summary?start_date=2024-09-10&end_date=2024-09-20" +``` + +## โš™๏ธ Configuration + +### Environment Variables +```bash +# Server settings +SERVICE_API_ACCESS_HOST=0.0.0.0 +SERVICE_API_ACCESS_PORT=8009 + +# Database settings +MONGODB_URI=mongodb://localhost:27017/ +MONGODB_NAME=freeleaps2 + +# StarRocks settings +STARROCKS_HOST=freeleaps-starrocks-cluster-fe-service.freeleaps-data-platform.svc +STARROCKS_PORT=9030 +STARROCKS_USER=root +STARROCKS_PASSWORD= +STARROCKS_DATABASE=freeleaps + +# Log settings +LOG_BASE_PATH=./logs +BACKEND_LOG_FILE_NAME=metrics +APPLICATION_ACTIVITY_LOG=metrics-activity +``` + +### Docker Deployment + +**Prerequisites:** +- Docker installed and running +- Local Python 3.12+ image available + +**Build and Run:** +```bash +# Build image (using local Python 3.12-slim image) +docker build -t metrics:latest . + +# Run container with port mapping +docker run --rm -p 8009:8009 metrics:latest + +# Run with environment file (if available) +docker run --rm -p 8009:8009 --env-file .env metrics:latest + +# Run in background +docker run -d --name metrics-service -p 8009:8009 metrics:latest +``` + +**Docker Image Details:** +- Base Image: `swr.cn-north-4.myhuaweicloud.com/ddn-k8s/docker.io/python:3.12-slim` +- Port: 8009 +- Working Directory: `/app` +- Auto-reload: Disabled (production mode) + +## ๐Ÿ”ง Development + +### Project Setup +```bash +# Create virtual environment +python -m venv venv +source venv/bin/activate # On Windows: venv\Scripts\activate + +# Install dependencies +pip install -r requirements.txt + +# Run with auto-reload +python -m uvicorn webapi.main:app --reload +``` + +### Code Structure +- **Backend**: Business logic and data access layer +- **WebAPI**: FastAPI routes and application setup +- **Models**: Pydantic data models for validation +- **Services**: Business logic services +- **Infra**: Database clients and infrastructure code + +## ๐Ÿ“ API Documentation + +Once the service is running, you can access the interactive API documentation at: +- **Swagger UI**: `http://localhost:8009/docs` +- **ReDoc**: `http://localhost:8009/redoc` +- **OpenAPI JSON**: `http://localhost:8009/openapi.json` + +## โš ๏ธ Important Notes + +- All APIs support `product_id` parameter (defaults to "freeleaps") +- Date format must be `YYYY-MM-DD` +- Query range is limited to 365 days maximum +- Service requires network access to StarRocks cluster +- Logs are written to `./logs/` directory + +## ๐Ÿ› Troubleshooting + +### Common Issues + +1. **Database Connection Failed** + - Verify StarRocks cluster accessibility + - Check network connectivity + - Validate database credentials + +2. **Import Errors** + - Ensure all dependencies are installed + - Check Python path configuration + - Verify virtual environment activation + +3. **Port Already in Use** + - Change port in configuration + - Stop existing service on port 8009 + - For Docker: `docker stop $(docker ps -q --filter ancestor=metrics:latest)` + +4. **Docker Build Issues** + - Ensure local Python image is available: `docker images | grep python` + - Check Docker daemon is running: `docker ps` + - Verify Dockerfile syntax and paths + +5. **Docker Container Issues** + - Check container logs: `docker logs ` + - Verify port mapping: `docker ps` + - Check container status: `docker ps -a` + +### Logs +Check application logs in the `./logs/` directory for detailed error information. + +## ๐Ÿ“ž Support + +For issues and questions: +- Check the API documentation at `/docs` +- Review logs for error details +- Contact the development team + +## ๐Ÿ“„ License + +This project is part of the Freeleaps platform. + + + + + + + diff --git a/apps/metrics/backend/annotation/__init__.py b/apps/metrics/backend/annotation/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/apps/metrics/backend/application/__init__.py b/apps/metrics/backend/application/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/apps/metrics/backend/business/__init__.py b/apps/metrics/backend/business/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/apps/metrics/backend/infra/starrocks_client.py b/apps/metrics/backend/infra/starrocks_client.py index af7fb97..db58f97 100644 --- a/apps/metrics/backend/infra/starrocks_client.py +++ b/apps/metrics/backend/infra/starrocks_client.py @@ -88,3 +88,12 @@ class StarRocksClient: def __exit__(self, exc_type, exc_val, exc_tb): """Context manager exit""" self.disconnect() + + + + + + + + + diff --git a/apps/metrics/backend/services/registration_service.py b/apps/metrics/backend/services/registration_service.py index 29432c4..8d786e9 100644 --- a/apps/metrics/backend/services/registration_service.py +++ b/apps/metrics/backend/services/registration_service.py @@ -123,3 +123,12 @@ class RegistrationService: except Exception as e: logger.error(f"Failed to get registration summary: {e}") raise e + + + + + + + + + diff --git a/apps/metrics/local.env b/apps/metrics/local.env new file mode 100644 index 0000000..616515e --- /dev/null +++ b/apps/metrics/local.env @@ -0,0 +1,17 @@ +# Local environment configuration for Metrics service +SERVER_HOST=0.0.0.0 +SERVER_PORT=8009 +SERVICE_API_ACCESS_PORT=8009 +SERVICE_API_ACCESS_HOST=0.0.0.0 + +# starrocks settings +STARROCKS_HOST=freeleaps-starrocks-cluster-fe-service.freeleaps-data-platform.svc +STARROCKS_PORT=9030 +STARROCKS_USER=root +STARROCKS_PASSWORD="" +STARROCKS_DATABASE=freeleaps + +# log settings +LOG_BASE_PATH=./logs +BACKEND_LOG_FILE_NAME=metrics +APPLICATION_ACTIVITY_LOG=metrics-activity \ No newline at end of file diff --git a/apps/metrics/start_fastapi.sh b/apps/metrics/start_fastapi.sh deleted file mode 100755 index 7c469ba..0000000 --- a/apps/metrics/start_fastapi.sh +++ /dev/null @@ -1,38 +0,0 @@ -#!/bin/bash -rp=$(dirname "$(realpath '$1')") -pushd $rp - -APP_NAME=metrics -APP_PARENT_FOLDER=apps - -GIT_REPO_ROOT=$(git rev-parse --show-toplevel) -CODEBASE_ROOT=$GIT_REPO_ROOT/$APP_PARENT_FOLDER/$APP_NAME -SITE_DEPLOY_FOLDER=$GIT_REPO_ROOT/sites/$APP_NAME/deploy - -echo APP_NAME=$APP_NAME > .env -cat $SITE_DEPLOY_FOLDER/common/.env >> .env -echo GIT_REPO_ROOT=$(git rev-parse --show-toplevel) >> .env -echo CODEBASE_ROOT=$GIT_REPO_ROOT/$APP_PARENT_FOLDER/$APP_NAME >> .env -echo SITE_DEPLOY_FOLDER=$GIT_REPO_ROOT/sites/$APP_NAME/deploy >> .env -cat $SITE_DEPLOY_FOLDER/common/.host.env >> .env -cat $SITE_DEPLOY_FOLDER/local/.env >> .env - -. .env - -if [ -d "$VENV_DIR" ] -then - echo "Folder $VENV_DIR exists. Proceed to next steps" -else - echo "Folder $VENV_DIR doesn't exist. create it" - sudo apt install python3-pip - python3 -m pip install virtualenv - python3 -m virtualenv $VENV_DIR -fi - -source $VENV_DIR/bin/activate -pip install --upgrade pip -pip install -r requirements.txt - -uvicorn webapi.main:app --reload --host 0.0.0.0 --port $SERVICE_API_ACCESS_PORT - -popd diff --git a/apps/metrics/test_registration_api.py b/apps/metrics/test_registration_api.py deleted file mode 100755 index a181b0b..0000000 --- a/apps/metrics/test_registration_api.py +++ /dev/null @@ -1,106 +0,0 @@ -#!/usr/bin/env python3 -""" -Test script for registration API endpoints -""" -import requests -import json -from datetime import date, timedelta - -# API base URL -BASE_URL = "http://localhost:8009" - -def test_daily_registered_users(): - """Test the daily registered users endpoint""" - print("Testing daily registered users endpoint...") - - # Test with last 7 days - end_date = date.today() - start_date = end_date - timedelta(days=6) - - url = f"{BASE_URL}/api/metrics/daily-registered-users" - params = { - "start_date": str(start_date), - "end_date": str(end_date), - "product_id": "freeleaps" - } - - try: - response = requests.get(url, params=params) - print(f"Status Code: {response.status_code}") - - if response.status_code == 200: - data = response.json() - print(f"Response: {json.dumps(data, indent=2)}") - print(f"Number of days: {len(data['dates'])}") - print(f"Total registrations: {data['total_registrations']}") - else: - print(f"Error: {response.text}") - - except Exception as e: - print(f"Request failed: {e}") - -def test_registration_summary(): - """Test the registration summary endpoint""" - print("\nTesting registration summary endpoint...") - - end_date = date.today() - start_date = end_date - timedelta(days=6) - - url = f"{BASE_URL}/api/metrics/registration-summary" - params = { - "start_date": str(start_date), - "end_date": str(end_date), - "product_id": "freeleaps" - } - - try: - response = requests.get(url, params=params) - print(f"Status Code: {response.status_code}") - - if response.status_code == 200: - data = response.json() - print(f"Summary: {json.dumps(data, indent=2)}") - else: - print(f"Error: {response.text}") - - except Exception as e: - print(f"Request failed: {e}") - -def test_post_method(): - """Test the POST method for daily registered users""" - print("\nTesting POST method for daily registered users...") - - end_date = date.today() - start_date = end_date - timedelta(days=6) - - url = f"{BASE_URL}/api/metrics/daily-registered-users" - payload = { - "start_date": str(start_date), - "end_date": str(end_date), - "product_id": "freeleaps" - } - - try: - response = requests.post(url, json=payload) - print(f"Status Code: {response.status_code}") - - if response.status_code == 200: - data = response.json() - print(f"Response: {json.dumps(data, indent=2)}") - else: - print(f"Error: {response.text}") - - except Exception as e: - print(f"Request failed: {e}") - -if __name__ == "__main__": - print("Starting registration API tests...") - print(f"Testing against: {BASE_URL}") - print("=" * 50) - - test_daily_registered_users() - test_registration_summary() - test_post_method() - - print("\n" + "=" * 50) - print("Tests completed!") diff --git a/apps/metrics/tests/__init__.py b/apps/metrics/tests/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/apps/metrics/webapi/config/site_settings.py b/apps/metrics/webapi/config/site_settings.py index 098d36f..fac31e6 100644 --- a/apps/metrics/webapi/config/site_settings.py +++ b/apps/metrics/webapi/config/site_settings.py @@ -9,24 +9,11 @@ class SiteSettings(BaseSettings): SERVICE_API_ACCESS_HOST: str = "0.0.0.0" SERVICE_API_ACCESS_PORT: int = 8009 - # Database settings - MONGODB_URI: str = "mongodb://localhost:27017/" - MONGODB_NAME: str = "freeleaps2" - MONGODB_PORT: int = 27017 - - # JWT settings - JWT_SECRET_KEY: str = "8f87ca8c3c9c3df09a9c78e0adb0927855568f6072d9efc892534aee35f5867b" - JWT_ALGORITHM: str = "HS256" - # Log settings LOG_BASE_PATH: str = "./logs" BACKEND_LOG_FILE_NAME: str = "metrics" APPLICATION_ACTIVITY_LOG: str = "metrics-activity" - # Service dependencies - DEVSVC_WEBAPI_URL_BASE: str = "http://devsvc:8007/api/devsvc" - NOTIFICATION_WEBAPI_URL_BASE: str = "http://notification:8003/api/notification/" - # StarRocks database settings STARROCKS_HOST: str = "freeleaps-starrocks-cluster-fe-service.freeleaps-data-platform.svc" STARROCKS_PORT: int = 9030 @@ -35,7 +22,7 @@ class SiteSettings(BaseSettings): STARROCKS_DATABASE: str = "freeleaps" class Config: - env_file = ".env" + env_file = "local.env" site_settings = SiteSettings() diff --git a/apps/metrics/webapi/providers/__init__.py b/apps/metrics/webapi/providers/__init__.py deleted file mode 100644 index e69de29..0000000 From 35fbda6954e0a13f7f9d1f09ab7c458e49b068f0 Mon Sep 17 00:00:00 2001 From: weicao Date: Mon, 15 Sep 2025 16:31:20 +0800 Subject: [PATCH 04/15] Refactor metrics service: rename files for better readability - Rename starrocks_client.py -> database_client.py - Rename daily_registered_users.py -> user_registration_models.py - Rename daily_registration_service.py -> registration_analytics_service.py - Rename daily_registration.py -> registration_metrics.py - Rename site_settings.py -> app_settings.py - Rename application.py -> app_factory.py - Update all import statements and references - Update README.md with new file structure --- apps/metrics/README.md | 299 ++++++------------ ...starrocks_client.py => database_client.py} | 2 +- ...d_users.py => user_registration_models.py} | 0 ...e.py => registration_analytics_service.py} | 4 +- .../{application.py => app_factory.py} | 2 +- .../{site_settings.py => app_settings.py} | 0 apps/metrics/webapi/main.py | 8 +- ...egistration.py => registration_metrics.py} | 4 +- 8 files changed, 108 insertions(+), 211 deletions(-) rename apps/metrics/backend/infra/{starrocks_client.py => database_client.py} (98%) rename apps/metrics/backend/models/{registered_users.py => user_registration_models.py} (100%) rename apps/metrics/backend/services/{registration_service.py => registration_analytics_service.py} (96%) rename apps/metrics/webapi/bootstrap/{application.py => app_factory.py} (97%) rename apps/metrics/webapi/config/{site_settings.py => app_settings.py} (100%) rename apps/metrics/webapi/routes/{registration.py => registration_metrics.py} (97%) diff --git a/apps/metrics/README.md b/apps/metrics/README.md index ac893d0..54d367d 100644 --- a/apps/metrics/README.md +++ b/apps/metrics/README.md @@ -1,10 +1,12 @@ -# Metrics Service +# ๐Ÿ“Š Metrics Service -A FastAPI microservice for user registration statistics and data analytics in the Freeleaps platform. +> A lightweight FastAPI microservice for user registration analytics and statistics -## ๐Ÿš€ Overview +[![Python](https://img.shields.io/badge/Python-3.12+-blue.svg)](https://python.org) +[![FastAPI](https://img.shields.io/badge/FastAPI-0.114+-green.svg)](https://fastapi.tiangolo.com) +[![Docker](https://img.shields.io/badge/Docker-Ready-blue.svg)](https://docker.com) -The Metrics service provides comprehensive APIs for querying and analyzing user registration data from the StarRocks database. It offers flexible querying options and statistical summaries for better insights into user growth patterns. +The Metrics service provides real-time APIs for querying user registration data from StarRocks database, offering flexible analytics and insights into user growth patterns. ## โœจ Features @@ -32,125 +34,84 @@ The Metrics service provides comprehensive APIs for querying and analyzing user ## ๐Ÿ“ Project Structure ``` -apps/metrics/ -โ”œโ”€โ”€ backend/ -โ”‚ โ”œโ”€โ”€ infra/starrocks_client.py # StarRocks database client -โ”‚ โ”œโ”€โ”€ models/registered_users.py # Data model definitions -โ”‚ โ””โ”€โ”€ services/registration_service.py # Business logic service -โ”œโ”€โ”€ webapi/ -โ”‚ โ”œโ”€โ”€ routes/registration.py # API route definitions -โ”‚ โ”œโ”€โ”€ config/site_settings.py # Configuration management -โ”‚ โ”œโ”€โ”€ bootstrap/application.py # Application initialization -โ”‚ โ””โ”€โ”€ main.py # FastAPI main application -โ”œโ”€โ”€ common/ # Shared utilities -โ”œโ”€โ”€ tests/ # Test files -โ”œโ”€โ”€ requirements.txt # Python dependencies -โ”œโ”€โ”€ Dockerfile # Simplified Docker configuration (Python 3.12-slim) -โ”œโ”€โ”€ start_fastapi.sh # Startup script -โ”œโ”€โ”€ test_registration_api.py # API test script -โ”œโ”€โ”€ .env # Environment configuration -โ””โ”€โ”€ README.md # This file +metrics/ +โ”œโ”€โ”€ backend/ # Business logic layer +โ”‚ โ”œโ”€โ”€ infra/ # Infrastructure components +โ”‚ โ”‚ โ””โ”€โ”€ database_client.py +โ”‚ โ”œโ”€โ”€ models/ # Data models +โ”‚ โ”‚ โ””โ”€โ”€ user_registration_models.py +โ”‚ โ””โ”€โ”€ services/ # Business services +โ”‚ โ””โ”€โ”€ registration_analytics_service.py +โ”œโ”€โ”€ webapi/ # API layer +โ”‚ โ”œโ”€โ”€ routes/ # API endpoints +โ”‚ โ”‚ โ””โ”€โ”€ registration_metrics.py +โ”‚ โ”œโ”€โ”€ config/ # Configuration +โ”‚ โ”‚ โ””โ”€โ”€ app_settings.py +โ”‚ โ”œโ”€โ”€ bootstrap/ # App initialization +โ”‚ โ”‚ โ””โ”€โ”€ app_factory.py +โ”‚ โ””โ”€โ”€ main.py # FastAPI app entry point +โ”œโ”€โ”€ common/ # Shared utilities +โ”œโ”€โ”€ requirements.txt # Dependencies +โ”œโ”€โ”€ Dockerfile # Container config +โ”œโ”€โ”€ local.env # Environment variables +โ””โ”€โ”€ README.md # Documentation ``` ## ๐Ÿš€ Quick Start ### Prerequisites -- Python 3.12+ +- Python 3.12+ or Docker - Access to StarRocks database -- pip package manager -### Installation +### ๐Ÿ Python Setup -1. **Clone the repository** - ```bash - git clone - cd freeleaps-service-hub/apps/metrics - ``` +```bash +# 1. Install dependencies +pip install -r requirements.txt -2. **Install dependencies** - ```bash - pip install -r requirements.txt - ``` +# 2. Start the service +python3 -m uvicorn webapi.main:app --host 0.0.0.0 --port 8009 --reload +``` -3. **Configure environment** - ```bash - cp .env.example .env # If available - # Edit .env with your configuration - ``` +### ๐Ÿณ Docker Setup -4. **Start the service** - - **Option A: Direct Python execution** - ```bash - python3 -m uvicorn webapi.main:app --host 0.0.0.0 --port 8009 --reload - ``` - - **Option B: Using Docker** - ```bash - # Build Docker image - docker build -t metrics:latest . - - # Run container - docker run --rm -p 8009:8009 metrics:latest - ``` +```bash +# 1. Build image +docker build -t metrics:latest . -5. **Access API documentation** - ``` - http://localhost:8009/docs - ``` +# 2. Run container +docker run --rm -p 8009:8009 metrics:latest +``` + +### ๐Ÿ“– Access Documentation +Visit `http://localhost:8009/docs` for interactive API documentation. ## ๐Ÿ“Š API Endpoints -### 1. Daily Registered Users (Date Range) -```http -GET /api/metrics/daily-registered-users?start_date=2024-09-10&end_date=2024-09-20&product_id=freeleaps +| Endpoint | Method | Description | +|----------|--------|-------------| +| `/api/metrics/daily-registered-users` | GET/POST | Query registration data by date range | +| `/api/metrics/recent-registered-users` | GET | Get recent N days data | +| `/api/metrics/registered-users-by-days` | GET | Query N days from start date | +| `/api/metrics/registration-summary` | GET | Get statistical summary | + +### Example Requests + +```bash +# Get last 7 days +curl "http://localhost:8009/api/metrics/recent-registered-users?days=7" + +# Get date range +curl "http://localhost:8009/api/metrics/daily-registered-users?start_date=2024-09-10&end_date=2024-09-20" + +# Get summary statistics +curl "http://localhost:8009/api/metrics/registration-summary?start_date=2024-09-10&end_date=2024-09-20" ``` -**Parameters:** -- `start_date` (required): Start date in YYYY-MM-DD format -- `end_date` (required): End date in YYYY-MM-DD format -- `product_id` (optional): Product identifier (default: "freeleaps") - -### 2. Recent Registered Users -```http -GET /api/metrics/recent-registered-users?days=7&product_id=freeleaps -``` - -**Parameters:** -- `days` (optional): Number of recent days (default: 7, max: 365) -- `product_id` (optional): Product identifier (default: "freeleaps") - -### 3. Registered Users by Days -```http -GET /api/metrics/registered-users-by-days?start_date=2024-09-01&days=5&product_id=freeleaps -``` - -**Parameters:** -- `start_date` (required): Start date in YYYY-MM-DD format -- `days` (required): Number of days from start date (max: 365) -- `product_id` (optional): Product identifier (default: "freeleaps") - -### 4. Registration Summary -```http -GET /api/metrics/registration-summary?start_date=2024-09-10&end_date=2024-09-20&product_id=freeleaps -``` - -**Parameters:** -- `start_date` (required): Start date in YYYY-MM-DD format -- `end_date` (required): End date in YYYY-MM-DD format -- `product_id` (optional): Product identifier (default: "freeleaps") - -### 5. Daily Registered Users (POST) -```http -POST /api/metrics/daily-registered-users -Content-Type: application/json - -{ - "start_date": "2024-09-10", - "end_date": "2024-09-20", - "product_id": "freeleaps" -} -``` +### Parameters +- `start_date` / `end_date`: Date in `YYYY-MM-DD` format +- `days`: Number of days (max: 365) +- `product_id`: Product identifier (default: "freeleaps") ## ๐Ÿ“ˆ Response Format @@ -178,158 +139,94 @@ Content-Type: application/json ## ๐Ÿงช Testing -### Run API Tests +### Quick Test ```bash -python test_registration_api.py -``` +# Health check +curl http://localhost:8009/ -### Manual Testing Examples -```bash -# Test recent 7 days +# Test recent registrations curl "http://localhost:8009/api/metrics/recent-registered-users?days=7" - -# Test date range -curl "http://localhost:8009/api/metrics/daily-registered-users?start_date=2024-09-10&end_date=2024-09-20" - -# Test summary -curl "http://localhost:8009/api/metrics/registration-summary?start_date=2024-09-10&end_date=2024-09-20" ``` +### Interactive Testing +Visit `http://localhost:8009/docs` for the Swagger UI interface where you can test all endpoints directly. + ## โš™๏ธ Configuration ### Environment Variables ```bash -# Server settings +# Server Configuration SERVICE_API_ACCESS_HOST=0.0.0.0 SERVICE_API_ACCESS_PORT=8009 -# Database settings -MONGODB_URI=mongodb://localhost:27017/ -MONGODB_NAME=freeleaps2 - -# StarRocks settings +# StarRocks Database STARROCKS_HOST=freeleaps-starrocks-cluster-fe-service.freeleaps-data-platform.svc STARROCKS_PORT=9030 STARROCKS_USER=root STARROCKS_PASSWORD= STARROCKS_DATABASE=freeleaps -# Log settings +# Logging LOG_BASE_PATH=./logs BACKEND_LOG_FILE_NAME=metrics APPLICATION_ACTIVITY_LOG=metrics-activity ``` -### Docker Deployment +> ๐Ÿ’ก **Tip**: Copy `local.env` to `.env` and modify as needed for your environment. -**Prerequisites:** -- Docker installed and running -- Local Python 3.12+ image available +### ๐Ÿณ Docker Deployment -**Build and Run:** ```bash -# Build image (using local Python 3.12-slim image) +# Build and run docker build -t metrics:latest . - -# Run container with port mapping docker run --rm -p 8009:8009 metrics:latest -# Run with environment file (if available) -docker run --rm -p 8009:8009 --env-file .env metrics:latest +# Run with custom environment +docker run --rm -p 8009:8009 --env-file local.env metrics:latest # Run in background docker run -d --name metrics-service -p 8009:8009 metrics:latest ``` -**Docker Image Details:** -- Base Image: `swr.cn-north-4.myhuaweicloud.com/ddn-k8s/docker.io/python:3.12-slim` +**Image Details:** +- Base: Python 3.12-slim - Port: 8009 -- Working Directory: `/app` -- Auto-reload: Disabled (production mode) +- Working Dir: `/app` ## ๐Ÿ”ง Development -### Project Setup ```bash -# Create virtual environment +# Setup development environment python -m venv venv -source venv/bin/activate # On Windows: venv\Scripts\activate - -# Install dependencies +source venv/bin/activate # Windows: venv\Scripts\activate pip install -r requirements.txt # Run with auto-reload python -m uvicorn webapi.main:app --reload ``` -### Code Structure -- **Backend**: Business logic and data access layer -- **WebAPI**: FastAPI routes and application setup -- **Models**: Pydantic data models for validation -- **Services**: Business logic services -- **Infra**: Database clients and infrastructure code - ## ๐Ÿ“ API Documentation -Once the service is running, you can access the interactive API documentation at: - **Swagger UI**: `http://localhost:8009/docs` - **ReDoc**: `http://localhost:8009/redoc` - **OpenAPI JSON**: `http://localhost:8009/openapi.json` ## โš ๏ธ Important Notes -- All APIs support `product_id` parameter (defaults to "freeleaps") -- Date format must be `YYYY-MM-DD` -- Query range is limited to 365 days maximum -- Service requires network access to StarRocks cluster -- Logs are written to `./logs/` directory +- Date format: `YYYY-MM-DD` +- Max query range: 365 days +- Default `product_id`: "freeleaps" +- Requires StarRocks database access ## ๐Ÿ› Troubleshooting -### Common Issues - -1. **Database Connection Failed** - - Verify StarRocks cluster accessibility - - Check network connectivity - - Validate database credentials - -2. **Import Errors** - - Ensure all dependencies are installed - - Check Python path configuration - - Verify virtual environment activation - -3. **Port Already in Use** - - Change port in configuration - - Stop existing service on port 8009 - - For Docker: `docker stop $(docker ps -q --filter ancestor=metrics:latest)` - -4. **Docker Build Issues** - - Ensure local Python image is available: `docker images | grep python` - - Check Docker daemon is running: `docker ps` - - Verify Dockerfile syntax and paths - -5. **Docker Container Issues** - - Check container logs: `docker logs ` - - Verify port mapping: `docker ps` - - Check container status: `docker ps -a` - -### Logs -Check application logs in the `./logs/` directory for detailed error information. - -## ๐Ÿ“ž Support - -For issues and questions: -- Check the API documentation at `/docs` -- Review logs for error details -- Contact the development team +| Issue | Solution | +|-------|----------| +| Port in use | `docker stop $(docker ps -q --filter ancestor=metrics:latest)` | +| Import errors | Check dependencies: `pip install -r requirements.txt` | +| DB connection | Verify StarRocks cluster accessibility | +| Container issues | Check logs: `docker logs ` | ## ๐Ÿ“„ License -This project is part of the Freeleaps platform. - - - - - - - +Part of the Freeleaps platform. \ No newline at end of file diff --git a/apps/metrics/backend/infra/starrocks_client.py b/apps/metrics/backend/infra/database_client.py similarity index 98% rename from apps/metrics/backend/infra/starrocks_client.py rename to apps/metrics/backend/infra/database_client.py index db58f97..b55119d 100644 --- a/apps/metrics/backend/infra/starrocks_client.py +++ b/apps/metrics/backend/infra/database_client.py @@ -2,7 +2,7 @@ import pymysql from typing import List, Dict, Any, Optional from datetime import date from loguru import logger -from webapi.config.site_settings import site_settings +from webapi.config.app_settings import site_settings class StarRocksClient: diff --git a/apps/metrics/backend/models/registered_users.py b/apps/metrics/backend/models/user_registration_models.py similarity index 100% rename from apps/metrics/backend/models/registered_users.py rename to apps/metrics/backend/models/user_registration_models.py diff --git a/apps/metrics/backend/services/registration_service.py b/apps/metrics/backend/services/registration_analytics_service.py similarity index 96% rename from apps/metrics/backend/services/registration_service.py rename to apps/metrics/backend/services/registration_analytics_service.py index 8d786e9..1579286 100644 --- a/apps/metrics/backend/services/registration_service.py +++ b/apps/metrics/backend/services/registration_analytics_service.py @@ -1,8 +1,8 @@ from typing import List, Dict, Any from datetime import date, timedelta from loguru import logger -from backend.infra.starrocks_client import StarRocksClient -from backend.models.registered_users import UserRegistrationResponse, DailyRegisteredUsers +from backend.infra.database_client import StarRocksClient +from backend.models.user_registration_models import UserRegistrationResponse, DailyRegisteredUsers class RegistrationService: diff --git a/apps/metrics/webapi/bootstrap/application.py b/apps/metrics/webapi/bootstrap/app_factory.py similarity index 97% rename from apps/metrics/webapi/bootstrap/application.py rename to apps/metrics/webapi/bootstrap/app_factory.py index bfe3e11..4e9e805 100644 --- a/apps/metrics/webapi/bootstrap/application.py +++ b/apps/metrics/webapi/bootstrap/app_factory.py @@ -1,7 +1,7 @@ from fastapi import FastAPI from fastapi.middleware.cors import CORSMiddleware from prometheus_fastapi_instrumentator import Instrumentator -from webapi.config.site_settings import site_settings +from webapi.config.app_settings import site_settings from loguru import logger import os diff --git a/apps/metrics/webapi/config/site_settings.py b/apps/metrics/webapi/config/app_settings.py similarity index 100% rename from apps/metrics/webapi/config/site_settings.py rename to apps/metrics/webapi/config/app_settings.py diff --git a/apps/metrics/webapi/main.py b/apps/metrics/webapi/main.py index fb977b6..10467a3 100644 --- a/apps/metrics/webapi/main.py +++ b/apps/metrics/webapi/main.py @@ -1,15 +1,15 @@ -from webapi.bootstrap.application import create_app -from webapi.config.site_settings import site_settings +from webapi.bootstrap.app_factory import create_app +from webapi.config.app_settings import site_settings from fastapi.responses import RedirectResponse import uvicorn from typing import Any -from webapi.routes import registration +from webapi.routes import registration_metrics app = create_app() # Include routers -app.include_router(registration.router) +app.include_router(registration_metrics.router) @app.get("/", status_code=301) diff --git a/apps/metrics/webapi/routes/registration.py b/apps/metrics/webapi/routes/registration_metrics.py similarity index 97% rename from apps/metrics/webapi/routes/registration.py rename to apps/metrics/webapi/routes/registration_metrics.py index 327067d..1c430de 100644 --- a/apps/metrics/webapi/routes/registration.py +++ b/apps/metrics/webapi/routes/registration_metrics.py @@ -2,8 +2,8 @@ from fastapi import APIRouter, HTTPException, Query from datetime import date, datetime, timedelta from typing import Optional from loguru import logger -from backend.services.registration_service import RegistrationService -from backend.models.registered_users import UserRegistrationResponse, UserRegistrationQuery +from backend.services.registration_analytics_service import RegistrationService +from backend.models.user_registration_models import UserRegistrationResponse, UserRegistrationQuery router = APIRouter(prefix="/api/metrics", tags=["registration"]) From 7027e8c3f76eb3a50e4bf3a142fe4f54ce86103c Mon Sep 17 00:00:00 2001 From: icecheng Date: Tue, 16 Sep 2025 15:11:53 +0800 Subject: [PATCH 05/15] refactor: refactor the metric project structure --- apps/metrics/backend/infra/database_client.py | 12 +- .../infra/external_service}/__init__.py | 0 apps/metrics/common/config/__init__.py | 0 .../{webapi => common}/config/app_settings.py | 19 +-- apps/metrics/common/config/log_settings.py | 17 +++ apps/metrics/common/config/site_settings.py | 26 ++++ apps/metrics/common/log/__init__.py | 0 apps/metrics/common/log/application_logger.py | 12 ++ apps/metrics/common/log/base_logger.py | 136 +++++++++++++++++ apps/metrics/common/log/json_sink.py | 84 +++++++++++ apps/metrics/common/log/module_logger.py | 46 ++++++ apps/metrics/common/probes/__init__.py | 140 ++++++++++++++++++ apps/metrics/common/probes/adapters.py | 15 ++ apps/metrics/webapi/bootstrap/app_factory.py | 2 +- apps/metrics/webapi/bootstrap/application.py | 77 ++++++++++ apps/metrics/webapi/main.py | 17 +-- apps/metrics/webapi/providers/__init__.py | 0 apps/metrics/webapi/providers/common.py | 31 ++++ .../webapi/providers/exception_handler.py | 39 +++++ apps/metrics/webapi/providers/logger.py | 7 + apps/metrics/webapi/providers/metrics.py | 16 ++ apps/metrics/webapi/providers/probes.py | 24 +++ apps/metrics/webapi/providers/router.py | 34 +++++ apps/metrics/webapi/routes/__init__.py | 5 + .../metrics/webapi/routes/metrics/__init__.py | 5 + .../{ => metrics}/registration_metrics.py | 2 +- 26 files changed, 735 insertions(+), 31 deletions(-) rename apps/metrics/{webapi/config => backend/infra/external_service}/__init__.py (100%) create mode 100644 apps/metrics/common/config/__init__.py rename apps/metrics/{webapi => common}/config/app_settings.py (65%) create mode 100644 apps/metrics/common/config/log_settings.py create mode 100644 apps/metrics/common/config/site_settings.py create mode 100644 apps/metrics/common/log/__init__.py create mode 100644 apps/metrics/common/log/application_logger.py create mode 100644 apps/metrics/common/log/base_logger.py create mode 100644 apps/metrics/common/log/json_sink.py create mode 100644 apps/metrics/common/log/module_logger.py create mode 100644 apps/metrics/common/probes/__init__.py create mode 100644 apps/metrics/common/probes/adapters.py create mode 100644 apps/metrics/webapi/bootstrap/application.py create mode 100644 apps/metrics/webapi/providers/__init__.py create mode 100644 apps/metrics/webapi/providers/common.py create mode 100644 apps/metrics/webapi/providers/exception_handler.py create mode 100644 apps/metrics/webapi/providers/logger.py create mode 100644 apps/metrics/webapi/providers/metrics.py create mode 100644 apps/metrics/webapi/providers/probes.py create mode 100644 apps/metrics/webapi/providers/router.py create mode 100644 apps/metrics/webapi/routes/metrics/__init__.py rename apps/metrics/webapi/routes/{ => metrics}/registration_metrics.py (99%) diff --git a/apps/metrics/backend/infra/database_client.py b/apps/metrics/backend/infra/database_client.py index b55119d..84db909 100644 --- a/apps/metrics/backend/infra/database_client.py +++ b/apps/metrics/backend/infra/database_client.py @@ -2,18 +2,18 @@ import pymysql from typing import List, Dict, Any, Optional from datetime import date from loguru import logger -from webapi.config.app_settings import site_settings +from common.config.app_settings import app_settings class StarRocksClient: """StarRocks database client for querying user registration data""" def __init__(self): - self.host = site_settings.STARROCKS_HOST - self.port = site_settings.STARROCKS_PORT - self.user = site_settings.STARROCKS_USER - self.password = site_settings.STARROCKS_PASSWORD - self.database = site_settings.STARROCKS_DATABASE + self.host = app_settings.STARROCKS_HOST + self.port = app_settings.STARROCKS_PORT + self.user = app_settings.STARROCKS_USER + self.password = app_settings.STARROCKS_PASSWORD + self.database = app_settings.STARROCKS_DATABASE self.connection = None def connect(self) -> bool: diff --git a/apps/metrics/webapi/config/__init__.py b/apps/metrics/backend/infra/external_service/__init__.py similarity index 100% rename from apps/metrics/webapi/config/__init__.py rename to apps/metrics/backend/infra/external_service/__init__.py diff --git a/apps/metrics/common/config/__init__.py b/apps/metrics/common/config/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/apps/metrics/webapi/config/app_settings.py b/apps/metrics/common/config/app_settings.py similarity index 65% rename from apps/metrics/webapi/config/app_settings.py rename to apps/metrics/common/config/app_settings.py index fac31e6..3575b8d 100644 --- a/apps/metrics/webapi/config/app_settings.py +++ b/apps/metrics/common/config/app_settings.py @@ -2,13 +2,7 @@ from pydantic_settings import BaseSettings from typing import Optional -class SiteSettings(BaseSettings): - # Server settings - SERVER_HOST: str = "0.0.0.0" - SERVER_PORT: int = 8009 - SERVICE_API_ACCESS_HOST: str = "0.0.0.0" - SERVICE_API_ACCESS_PORT: int = 8009 - +class AppSettings(BaseSettings): # Log settings LOG_BASE_PATH: str = "./logs" BACKEND_LOG_FILE_NAME: str = "metrics" @@ -21,8 +15,15 @@ class SiteSettings(BaseSettings): STARROCKS_PASSWORD: str = "" STARROCKS_DATABASE: str = "freeleaps" + # Prometheus settings + PROMETHEUS_ENDPOINT: str = "http://localhost:9090" + + METRICS_ENABLED: bool = False + PROBES_ENABLED: bool = True + + class Config: - env_file = "local.env" + env_file = "local.env" -site_settings = SiteSettings() +app_settings = AppSettings() diff --git a/apps/metrics/common/config/log_settings.py b/apps/metrics/common/config/log_settings.py new file mode 100644 index 0000000..633c75a --- /dev/null +++ b/apps/metrics/common/config/log_settings.py @@ -0,0 +1,17 @@ +import os +from dataclasses import dataclass +from .app_settings import app_settings +from .site_settings import site_settings + +@dataclass +class LogSettings: + LOG_PATH_BASE: str = app_settings.LOG_BASE_PATH + LOG_RETENTION: str = os.environ.get("LOG_RETENTION", "30 days") + LOG_ROTATION: str = os.environ.get("LOG_ROTATION", "00:00") # midnight + MAX_BACKUP_FILES: int = int(os.environ.get("LOG_BACKUP_FILES", 5)) + LOG_ROTATION_BYTES: int = int(os.environ.get("LOG_ROTATION_BYTES", 10 * 1024 * 1024)) # 10 MB + APP_NAME: str = site_settings.NAME + ENVIRONMENT: str = site_settings.ENV + + +log_settings = LogSettings() diff --git a/apps/metrics/common/config/site_settings.py b/apps/metrics/common/config/site_settings.py new file mode 100644 index 0000000..c415f9f --- /dev/null +++ b/apps/metrics/common/config/site_settings.py @@ -0,0 +1,26 @@ +import os + +from pydantic_settings import BaseSettings + + +# NOTE: The values fall backs to your environment variables when not set here +class SiteSettings(BaseSettings): + NAME: str = "FREELEAPS-METRICS" + DEBUG: bool = True + + ENV: str = "dev" + + SERVER_HOST: str = "localhost" + SERVER_PORT: int = 9000 + + URL: str = "http://localhost" + TIME_ZONE: str = "UTC" + + BASE_PATH: str = os.path.dirname(os.path.dirname((os.path.abspath(__file__)))) + + class Config: + env_file = ".devbase-webapi.env" + env_file_encoding = "utf-8" + + +site_settings = SiteSettings() diff --git a/apps/metrics/common/log/__init__.py b/apps/metrics/common/log/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/apps/metrics/common/log/application_logger.py b/apps/metrics/common/log/application_logger.py new file mode 100644 index 0000000..896c044 --- /dev/null +++ b/apps/metrics/common/log/application_logger.py @@ -0,0 +1,12 @@ +from .base_logger import LoggerBase +from app.common.config.app_settings import app_settings + +class ApplicationLogger(LoggerBase): + def __init__(self, application_activities: dict[str, any] = {}) -> None: + extra_fileds = {} + if application_activities: + extra_fileds.update(application_activities) + super().__init__( + logger_name=app_settings.APPLICATION_ACTIVITY_LOG, + extra_fileds=extra_fileds, + ) diff --git a/apps/metrics/common/log/base_logger.py b/apps/metrics/common/log/base_logger.py new file mode 100644 index 0000000..24f7bb0 --- /dev/null +++ b/apps/metrics/common/log/base_logger.py @@ -0,0 +1,136 @@ +from loguru import logger as guru_logger +from common.config.log_settings import log_settings +from typing import Dict, Any, Optional +import socket +import json +import threading +import os +import sys +import inspect +import logging + +from common.log.json_sink import JsonSink + +class LoggerBase: + binded_loggers = {} + logger_lock = threading.Lock() + + def __init__(self, logger_name: str, extra_fileds: dict[str, any]) -> None: + self.__logger_name = logger_name + self.extra_fileds = extra_fileds + with LoggerBase.logger_lock: + if self.__logger_name in LoggerBase.binded_loggers: + self.logger = LoggerBase.binded_loggers[self.__logger_name] + return + + log_filename = f"{log_settings.LOG_PATH_BASE}/{self.__logger_name}.log" + log_level = "INFO" + rotation_bytes = int(log_settings.LOG_ROTATION_BYTES or 10 * 1024 * 1024) + + guru_logger.remove() + + file_sink = JsonSink( + log_file_path=log_filename, + rotation_size_bytes=rotation_bytes, + max_backup_files=log_settings.MAX_BACKUP_FILES + ) + guru_logger.add( + sink=file_sink, + level=log_level, + filter=lambda record: record["extra"].get("topic") == self.__logger_name, + ) + + guru_logger.add( + sink=sys.stderr, + level=log_level, + format="{level} - {time:YYYY-MM-DD HH:mm:ss} - <{extra[log_file]}:{extra[log_line]}> - {extra[properties_str]} - {message}", + filter=lambda record: record["extra"].get("topic") == self.__logger_name, + ) + + host_name = socket.gethostname() + host_ip = socket.gethostbyname(host_name) + self.logger = guru_logger.bind( + topic=self.__logger_name, + host_ip=host_ip, + host_name=host_name, + app=log_settings.APP_NAME, + env=log_settings.ENVIRONMENT, + ) + with LoggerBase.logger_lock: + LoggerBase.binded_loggers[self.__logger_name] = self.logger + + def _get_log_context(self) -> dict: + frame = inspect.currentframe().f_back.f_back + filename = os.path.basename(frame.f_code.co_filename) + lineno = frame.f_lineno + return {"log_file": filename, "log_line": lineno} + + def _prepare_properties(self, properties: Optional[Dict[str, Any]]) -> Dict[str, Any]: + props = {} if properties is None else properties.copy() + props_str = json.dumps(props, ensure_ascii=False) if props else "{}" + return props, props_str + + async def log_event(self, sender_id: str, receiver_id: str, subject: str, event: str, properties: dict[str, any], text: str = "") -> None: + props, props_str = self._prepare_properties(properties) + context = self._get_log_context() + local_logger = self.logger.bind(sender_id=sender_id, receiver_id=receiver_id, subject=subject, event=event, properties=props, properties_str=props_str, **context) + local_logger.info(text) + + async def log_exception(self, sender_id: str, receiver_id: str, subject: str, exception: Exception, text: str = "", properties: dict[str, any] = None) -> None: + props, props_str = self._prepare_properties(properties) + context = self._get_log_context() + local_logger = self.logger.bind(sender_id=sender_id, receiver_id=receiver_id, subject=subject, event="exception", properties=props, properties_str=props_str, exception=exception, **context) + local_logger.exception(text) + + async def log_info(self, sender_id: str, receiver_id: str, subject: str, text: str = "", properties: dict[str, any] = None) -> None: + props, props_str = self._prepare_properties(properties) + context = self._get_log_context() + local_logger = self.logger.bind(sender_id=sender_id, receiver_id=receiver_id, subject=subject, event="information", properties=props, properties_str=props_str, **context) + local_logger.info(text) + + async def log_warning(self, sender_id: str, receiver_id: str, subject: str, text: str = "", properties: dict[str, any] = None) -> None: + props, props_str = self._prepare_properties(properties) + context = self._get_log_context() + local_logger = self.logger.bind(sender_id=sender_id, receiver_id=receiver_id, subject=subject, event="warning", properties=props, properties_str=props_str, **context) + local_logger.warning(text) + + async def log_error(self, sender_id: str, receiver_id: str, subject: str, text: str = "", properties: dict[str, any] = None) -> None: + props, props_str = self._prepare_properties(properties) + context = self._get_log_context() + local_logger = self.logger.bind(sender_id=sender_id, receiver_id=receiver_id, subject=subject, event="error", properties=props, properties_str=props_str, **context) + local_logger.error(text) + + @staticmethod + def configure_uvicorn_logging(): + print("๐Ÿ“ข Setting up uvicorn logging interception...") + + # Intercept logs from these loggers + intercept_loggers = ["uvicorn", "uvicorn.access", "uvicorn.error", "fastapi"] + + class InterceptHandler(logging.Handler): + def emit(self, record): + level = ( + guru_logger.level(record.levelname).name + if guru_logger.level(record.levelname, None) + else record.levelno + ) + frame, depth = logging.currentframe(), 2 + while frame.f_code.co_filename == logging.__file__: + frame = frame.f_back + depth += 1 + + guru_logger.opt(depth=depth, exception=record.exc_info).log( + level, + f"[{record.name}] {record.getMessage()}", + ) + + # Replace default handlers + logging.root.handlers.clear() + logging.root.setLevel(logging.INFO) + logging.root.handlers = [InterceptHandler()] + + # Configure specific uvicorn loggers + for logger_name in intercept_loggers: + logging_logger = logging.getLogger(logger_name) + logging_logger.handlers.clear() # Remove default handlers + logging_logger.propagate = True # Ensure propagation through Loguru diff --git a/apps/metrics/common/log/json_sink.py b/apps/metrics/common/log/json_sink.py new file mode 100644 index 0000000..2379095 --- /dev/null +++ b/apps/metrics/common/log/json_sink.py @@ -0,0 +1,84 @@ +import json +import datetime +import traceback +from pathlib import Path + +class JsonSink: + def __init__( + self, + log_file_path: str, + rotation_size_bytes: int = 10 * 1024 * 1024, + max_backup_files: int = 5, + ): + self.log_file_path = Path(log_file_path) + self.rotation_size = rotation_size_bytes + self.max_backup_files = max_backup_files + self._open_log_file() + + def _open_log_file(self): + # ensure the parent directory exists + parent_dir = self.log_file_path.parent + if not parent_dir.exists(): + parent_dir.mkdir(parents=True, exist_ok=True) + self.log_file = self.log_file_path.open("a", encoding="utf-8") + + def _should_rotate(self) -> bool: + return self.log_file_path.exists() and self.log_file_path.stat().st_size >= self.rotation_size + + def _rotate(self): + self.log_file.close() + timestamp = datetime.datetime.now().strftime("%Y%m%d%H%M%S") + rotated_path = self.log_file_path.with_name(f"{self.log_file_path.stem}_{timestamp}{self.log_file_path.suffix}") + self.log_file_path.rename(rotated_path) + self._cleanup_old_backups() + self._open_log_file() + + def _cleanup_old_backups(self): + parent = self.log_file_path.parent + stem = self.log_file_path.stem + suffix = self.log_file_path.suffix + + backup_files = sorted( + parent.glob(f"{stem}_*{suffix}"), + key=lambda p: p.stat().st_mtime, + reverse=True, + ) + + for old_file in backup_files[self.max_backup_files:]: + try: + old_file.unlink() + except Exception as e: + print(f"Failed to delete old backup {old_file}: {e}") + + def __call__(self, message): + record = message.record + if self._should_rotate(): + self._rotate() + + log_entry = { + "level": record["level"].name.lower(), + "timestamp": int(record["time"].timestamp() * 1000), + "text": record["message"], + "fields": record["extra"].get("properties", {}), + "context": { + "app": record["extra"].get("app"), + "env": record["extra"].get("env"), + "log_file": record["extra"].get("log_file"), + "log_line": record["extra"].get("log_line"), + "topic": record["extra"].get("topic"), + "sender_id": record["extra"].get("sender_id"), + "receiver_id": record["extra"].get("receiver_id"), + "subject": record["extra"].get("subject"), + "event": record["extra"].get("event"), + "host_ip": record["extra"].get("host_ip"), + "host_name": record["extra"].get("host_name"), + }, + "stacktrace": None + } + + if record["exception"]: + exc_type, exc_value, exc_tb = record["exception"] + log_entry["stacktrace"] = traceback.format_exception(exc_type, exc_value, exc_tb) + + self.log_file.write(json.dumps(log_entry, ensure_ascii=False) + "\n") + self.log_file.flush() diff --git a/apps/metrics/common/log/module_logger.py b/apps/metrics/common/log/module_logger.py new file mode 100644 index 0000000..3e82f74 --- /dev/null +++ b/apps/metrics/common/log/module_logger.py @@ -0,0 +1,46 @@ +from .application_logger import ApplicationLogger + + +class ModuleLogger(ApplicationLogger): + def __init__(self, sender_id: str) -> None: + super().__init__() + self.event_sender_id = sender_id + self.event_receiver_id = "ModuleLogger" + self.event_subject = "module" + + async def log_exception(self, exception: Exception, text: str = "Exception", properties: dict[str, any] = None) -> None: + return await super().log_exception( + sender_id=self.event_sender_id, + receiver_id=self.event_receiver_id, + subject=self.event_subject, + exception=exception, + text=text, + properties=properties, + ) + + async def log_info(self, text: str, data: dict[str, any] = None) -> None: + return await super().log_info( + sender_id=self.event_sender_id, + receiver_id=self.event_receiver_id, + subject=self.event_subject, + text=text, + properties=data, + ) + + async def log_warning(self, text: str, data: dict[str, any] = None) -> None: + return await super().log_warning( + sender_id=self.event_sender_id, + receiver_id=self.event_receiver_id, + subject=self.event_subject, + text=text, + properties=data, + ) + + async def log_error(self, text: str, data: dict[str, any] = None) -> None: + return await super().log_error( + sender_id=self.event_sender_id, + receiver_id=self.event_receiver_id, + subject=self.event_subject, + text=text, + properties=data, + ) \ No newline at end of file diff --git a/apps/metrics/common/probes/__init__.py b/apps/metrics/common/probes/__init__.py new file mode 100644 index 0000000..4071df8 --- /dev/null +++ b/apps/metrics/common/probes/__init__.py @@ -0,0 +1,140 @@ +import logging +from enum import Enum +from typing import Optional, Callable, Tuple, Dict +import inspect +from datetime import datetime, timezone + +# ProbeType is an Enum that defines the types of probes that can be registered. +class ProbeType(Enum): + LIVENESS = "liveness" + READINESS = "readiness" + STARTUP = "startup" + +# ProbeResult is a class that represents the result of a probe check. +class ProbeResult: + def __init__(self, success: bool, message: str = "ok", data: Optional[dict] = None): + self.success = success + self.message = message + self.data = data or {} + + def to_dict(self) -> dict: + return { + "success": self.success, + "message": self.message, + "data": self.data + } + +# Probe is a class that represents a probe that can be registered. +class Probe: + def __init__(self, type: ProbeType, path: str, check_fn: Callable, name: Optional[str] = None): + self.type = type + self.path = path + self.check_fn = check_fn + self.name = name or f"{type.value}-{id(self)}" + + async def execute(self) -> ProbeResult: + try: + result = self.check_fn() + if inspect.isawaitable(result): + result = await result + + if isinstance(result, ProbeResult): + return result + elif isinstance(result, bool): + return ProbeResult(result, "ok" if result else "failed") + else: + return ProbeResult(True, "ok") + except Exception as e: + return ProbeResult(False, str(e)) + +# ProbeGroup is a class that represents a group of probes that can be checked together. +class ProbeGroup: + def __init__(self, path: str): + self.path = path + self.probes: Dict[str, Probe] = {} + + def add_probe(self, probe: Probe): + self.probes[probe.name] = probe + + async def check_all(self) -> Tuple[bool, dict]: + results = {} + all_success = True + + for name, probe in self.probes.items(): + result = await probe.execute() + results[name] = result.to_dict() + if not result.success: + all_success = False + + return all_success, results + +# FrameworkAdapter is an abstract class that defines the interface for framework-specific probe adapters. +class FrameworkAdapter: + async def handle_request(self, group: ProbeGroup): + all_success, results = await group.check_all() + status_code = 200 if all_success else 503 + return {"status": "ok" if all_success else "failed", "payload": results, "timestamp": int(datetime.now(timezone.utc).timestamp())}, status_code + + def register_route(self, path: str, handler: Callable): + raise NotImplementedError + +# ProbeManager is a class that manages the registration of probes and their corresponding framework adapters. +class ProbeManager: + _default_paths = { + ProbeType.LIVENESS: "/_/livez", + ProbeType.READINESS: "/_/readyz", + ProbeType.STARTUP: "/_/healthz" + } + + def __init__(self): + self.groups: Dict[str, ProbeGroup] = {} + self.adapters: Dict[str, FrameworkAdapter] = {} + self._startup_complete = False + + def register_adapter(self, framework: str, adapter: FrameworkAdapter): + self.adapters[framework] = adapter + logging.info(f"Registered probe adapter ({adapter}) for framework: {framework}") + + def register( + self, + type: ProbeType, + check_func: Optional[Callable] = None, + path: Optional[str] = None, + prefix: str = "", + name: Optional[str] = None, + frameworks: Optional[list] = None + ): + path = path or self._default_paths.get(type, "/_/healthz") + if prefix: + path = f"{prefix}{path}" + + if type == ProbeType.STARTUP and check_func is None: + check_func = self._default_startup_check + + probe = Probe(type, path, check_func or (lambda: True), name) + + if path not in self.groups: + self.groups[path] = ProbeGroup(path) + self.groups[path].add_probe(probe) + + for framework in (frameworks or ["default"]): + self._register_route(framework, path) + logging.info(f"Registered {type.value} probe route ({path}) for framework: {framework}") + + def _register_route(self, framework: str, path: str): + if framework not in self.adapters: + return + + adapter = self.adapters[framework] + group = self.groups[path] + + async def handler(): + return await adapter.handle_request(group) + + adapter.register_route(path, handler) + + def _default_startup_check(self) -> bool: + return self._startup_complete + + def mark_startup_complete(self): + self._startup_complete = True \ No newline at end of file diff --git a/apps/metrics/common/probes/adapters.py b/apps/metrics/common/probes/adapters.py new file mode 100644 index 0000000..2ecd38a --- /dev/null +++ b/apps/metrics/common/probes/adapters.py @@ -0,0 +1,15 @@ +from . import FrameworkAdapter +from fastapi.responses import JSONResponse +from typing import Callable + +# FastAPIAdapter is a class that implements the FrameworkAdapter interface for FastAPI. +class FastAPIAdapter(FrameworkAdapter): + def __init__(self, app): + self.app = app + + def register_route(self,path: str, handler: Callable): + async def wrapper(): + data, status_code = await handler() + return JSONResponse(content=data, status_code=status_code) + + self.app.add_api_route(path, wrapper, methods=["GET"]) diff --git a/apps/metrics/webapi/bootstrap/app_factory.py b/apps/metrics/webapi/bootstrap/app_factory.py index 4e9e805..feee790 100644 --- a/apps/metrics/webapi/bootstrap/app_factory.py +++ b/apps/metrics/webapi/bootstrap/app_factory.py @@ -1,7 +1,7 @@ from fastapi import FastAPI from fastapi.middleware.cors import CORSMiddleware from prometheus_fastapi_instrumentator import Instrumentator -from webapi.config.app_settings import site_settings +from common.config.app_settings import site_settings from loguru import logger import os diff --git a/apps/metrics/webapi/bootstrap/application.py b/apps/metrics/webapi/bootstrap/application.py new file mode 100644 index 0000000..66d4a65 --- /dev/null +++ b/apps/metrics/webapi/bootstrap/application.py @@ -0,0 +1,77 @@ +import logging + +from fastapi import FastAPI +from fastapi.openapi.utils import get_openapi + +from common.config.app_settings import app_settings +from webapi.providers import exception_handler, common, probes, metrics, router +from webapi.providers.logger import register_logger + + +def create_app() -> FastAPI: + logging.info("App initializing") + + app = FreeleapsMetricsApp() + + register_logger() + register(app, exception_handler) + register(app, router) + register(app, common) + + # Call the custom_openapi function to change the OpenAPI version + customize_openapi_security(app) + # Register probe APIs if enabled + if app_settings.PROBES_ENABLED: + register(app, probes) + + # Register metrics APIs if enabled + if app_settings.METRICS_ENABLED: + register(app, metrics) + return app + + +# This function overrides the OpenAPI schema version to 3.0.0 +def customize_openapi_security(app: FastAPI) -> None: + def custom_openapi(): + if app.openapi_schema: + return app.openapi_schema + + # Generate OpenAPI schema + openapi_schema = get_openapi( + title="FreeLeaps Metrics API", + version="3.1.0", + description="FreeLeaps Metrics API Documentation", + routes=app.routes, + ) + + # Ensure the components section exists in the OpenAPI schema + if "components" not in openapi_schema: + openapi_schema["components"] = {} + + # Add security scheme to components + openapi_schema["components"]["securitySchemes"] = { + "bearerAuth": {"type": "http", "scheme": "bearer", "bearerFormat": "JWT"} + } + + # Add security requirement globally + openapi_schema["security"] = [{"bearerAuth": []}] + + app.openapi_schema = openapi_schema + return app.openapi_schema + + app.openapi = custom_openapi + + +def register(app, provider): + logging.info(provider.__name__ + " registering") + provider.register(app) + + +def boot(app, provider): + logging.info(provider.__name__ + " booting") + provider.boot(app) + + +class FreeleapsMetricsApp(FastAPI): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) diff --git a/apps/metrics/webapi/main.py b/apps/metrics/webapi/main.py index 10467a3..42ef947 100644 --- a/apps/metrics/webapi/main.py +++ b/apps/metrics/webapi/main.py @@ -1,10 +1,9 @@ -from webapi.bootstrap.app_factory import create_app -from webapi.config.app_settings import site_settings +from common.config.site_settings import site_settings from fastapi.responses import RedirectResponse import uvicorn -from typing import Any -from webapi.routes import registration_metrics +from webapi.bootstrap.application import create_app +from webapi.routes.metrics import registration_metrics app = create_app() @@ -24,13 +23,3 @@ if __name__ == "__main__": uvicorn.run( app="main:app", host=site_settings.SERVER_HOST, port=site_settings.SERVER_PORT ) - - -def get_context() -> Any: - # Define your context function. This is where you can set up authentication, database connections, etc. - return {} - - -def get_root_value() -> Any: - # Define your root value function. This is where you can set up the root value for GraphQL. - return {} diff --git a/apps/metrics/webapi/providers/__init__.py b/apps/metrics/webapi/providers/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/apps/metrics/webapi/providers/common.py b/apps/metrics/webapi/providers/common.py new file mode 100644 index 0000000..f76fa4a --- /dev/null +++ b/apps/metrics/webapi/providers/common.py @@ -0,0 +1,31 @@ +from fastapi.middleware.cors import CORSMiddleware +from common.config.site_settings import site_settings + + +def register(app): + app.debug = site_settings.DEBUG + app.title = site_settings.NAME + + add_global_middleware(app) + + # This hook ensures that a connection is opened to handle any queries + # generated by the request. + @app.on_event("startup") + async def startup(): + pass + + # This hook ensures that the connection is closed when we've finished + # processing the request. + @app.on_event("shutdown") + async def shutdown(): + pass + + +def add_global_middleware(app): + app.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], + ) diff --git a/apps/metrics/webapi/providers/exception_handler.py b/apps/metrics/webapi/providers/exception_handler.py new file mode 100644 index 0000000..21117a5 --- /dev/null +++ b/apps/metrics/webapi/providers/exception_handler.py @@ -0,0 +1,39 @@ +from fastapi import FastAPI, HTTPException +from fastapi.exceptions import RequestValidationError +from starlette.requests import Request +from starlette.responses import JSONResponse +from starlette.status import ( + HTTP_400_BAD_REQUEST, + HTTP_401_UNAUTHORIZED, + HTTP_403_FORBIDDEN, + HTTP_404_NOT_FOUND, + HTTP_422_UNPROCESSABLE_ENTITY, + HTTP_500_INTERNAL_SERVER_ERROR, +) + + +async def custom_http_exception_handler(request: Request, exc: HTTPException): + return JSONResponse( + status_code=exc.status_code, + content={"error": exc.detail}, + ) + + + +async def validation_exception_handler(request: Request, exc: RequestValidationError): + return JSONResponse( + status_code=HTTP_400_BAD_REQUEST, + content={"error": str(exc)}, + ) + +async def exception_handler(request: Request, exc: Exception): + return JSONResponse( + status_code=HTTP_500_INTERNAL_SERVER_ERROR, + content={"error": str(exc)}, + ) + + +def register(app: FastAPI): + app.add_exception_handler(HTTPException, custom_http_exception_handler) + app.add_exception_handler(RequestValidationError, validation_exception_handler) + app.add_exception_handler(Exception, exception_handler) diff --git a/apps/metrics/webapi/providers/logger.py b/apps/metrics/webapi/providers/logger.py new file mode 100644 index 0000000..edfa9f5 --- /dev/null +++ b/apps/metrics/webapi/providers/logger.py @@ -0,0 +1,7 @@ +from common.log.base_logger import LoggerBase + + +def register_logger(): + print("๐Ÿ“ข Setting up logging interception...") + LoggerBase.configure_uvicorn_logging() + print("โœ… Logging interception complete. Logs are formatted and deduplicated!") diff --git a/apps/metrics/webapi/providers/metrics.py b/apps/metrics/webapi/providers/metrics.py new file mode 100644 index 0000000..593369d --- /dev/null +++ b/apps/metrics/webapi/providers/metrics.py @@ -0,0 +1,16 @@ +import logging +from prometheus_fastapi_instrumentator import Instrumentator +from common.config.app_settings import app_settings + +def register(app): + instrumentator = ( + Instrumentator().instrument( + app, + metric_namespace="freeleaps-auth", + metric_subsystem=app_settings.APP_NAME) + ) + + @app.on_event("startup") + async def startup(): + instrumentator.expose(app, endpoint="/api/_/metrics", should_gzip=True) + logging.info("Metrics endpoint exposed at /api/_/metrics") \ No newline at end of file diff --git a/apps/metrics/webapi/providers/probes.py b/apps/metrics/webapi/providers/probes.py new file mode 100644 index 0000000..058bdc1 --- /dev/null +++ b/apps/metrics/webapi/providers/probes.py @@ -0,0 +1,24 @@ +from common.probes import ProbeManager, ProbeType +from common.probes.adapters import FastAPIAdapter + +def register(app): + probes_manager = ProbeManager() + probes_manager.register_adapter("fastapi", FastAPIAdapter(app)) + + async def readiness_checker(): + return {"success": True, "message": "Ready"} + + probes_manager.register( + name="readiness", + prefix="/api", + type=ProbeType.READINESS, + check_func=readiness_checker, + frameworks=["fastapi"] + ) + + probes_manager.register(name="liveness", prefix="/api", type=ProbeType.LIVENESS, frameworks=["fastapi"]) + probes_manager.register(name="startup", prefix="/api", type=ProbeType.STARTUP, frameworks=["fastapi"]) + + @app.on_event("startup") + async def mark_startup_complete(): + probes_manager.mark_startup_complete() \ No newline at end of file diff --git a/apps/metrics/webapi/providers/router.py b/apps/metrics/webapi/providers/router.py new file mode 100644 index 0000000..3ad11ae --- /dev/null +++ b/apps/metrics/webapi/providers/router.py @@ -0,0 +1,34 @@ +from webapi.routes import api_router + +from starlette import routing + + +def register(app): + app.include_router( + api_router, + prefix="/api", + tags=["api"], + dependencies=[], + responses={404: {"description": "no page found"}}, + ) + + if app.debug: + for route in app.routes: + if not isinstance(route, routing.WebSocketRoute): + print( + { + "path": route.path, + "endpoint": route.endpoint, + "name": route.name, + "methods": route.methods, + } + ) + else: + print( + { + "path": route.path, + "endpoint": route.endpoint, + "name": route.name, + "type": "web socket route", + } + ) diff --git a/apps/metrics/webapi/routes/__init__.py b/apps/metrics/webapi/routes/__init__.py index e69de29..3a2818a 100644 --- a/apps/metrics/webapi/routes/__init__.py +++ b/apps/metrics/webapi/routes/__init__.py @@ -0,0 +1,5 @@ +from fastapi import APIRouter +from webapi.routes.metrics import router +api_router = APIRouter() + +api_router.include_router(router, tags=["metrics"]) diff --git a/apps/metrics/webapi/routes/metrics/__init__.py b/apps/metrics/webapi/routes/metrics/__init__.py new file mode 100644 index 0000000..e7012cd --- /dev/null +++ b/apps/metrics/webapi/routes/metrics/__init__.py @@ -0,0 +1,5 @@ +from fastapi import APIRouter +from webapi.routes.metrics.registration_metrics import router + +api_router = APIRouter() +api_router.include_router(router,prefix="/metrics", tags=["metrics"]) diff --git a/apps/metrics/webapi/routes/registration_metrics.py b/apps/metrics/webapi/routes/metrics/registration_metrics.py similarity index 99% rename from apps/metrics/webapi/routes/registration_metrics.py rename to apps/metrics/webapi/routes/metrics/registration_metrics.py index 1c430de..ebab21a 100644 --- a/apps/metrics/webapi/routes/registration_metrics.py +++ b/apps/metrics/webapi/routes/metrics/registration_metrics.py @@ -5,7 +5,7 @@ from loguru import logger from backend.services.registration_analytics_service import RegistrationService from backend.models.user_registration_models import UserRegistrationResponse, UserRegistrationQuery -router = APIRouter(prefix="/api/metrics", tags=["registration"]) +router = APIRouter(tags=["registration"]) # Initialize service registration_service = RegistrationService() From d008c1a8bce84559b6405460005ed5283b0bd32b Mon Sep 17 00:00:00 2001 From: icecheng Date: Tue, 16 Sep 2025 17:04:53 +0800 Subject: [PATCH 06/15] feat: add support for Prometheus-related metrics --- .../external_service/prometheus_client.py | 119 ++++++++ .../services/prometheus_metrics_service.py | 263 ++++++++++++++++++ apps/metrics/common/config/app_settings.py | 4 +- apps/metrics/common/log/application_logger.py | 2 +- apps/metrics/local.env | 4 +- apps/metrics/webapi/bootstrap/app_factory.py | 69 ----- apps/metrics/webapi/providers/metrics.py | 2 +- .../metrics/webapi/routes/metrics/__init__.py | 8 +- .../routes/prometheus_metrics/__init__.py | 9 + .../prometheus_metrics/available_metrics.py | 31 +++ .../routes/prometheus_metrics/metric_info.py | 32 +++ .../prometheus_metrics/metrics_query.py | 83 ++++++ 12 files changed, 549 insertions(+), 77 deletions(-) create mode 100644 apps/metrics/backend/infra/external_service/prometheus_client.py create mode 100644 apps/metrics/backend/services/prometheus_metrics_service.py delete mode 100644 apps/metrics/webapi/bootstrap/app_factory.py create mode 100644 apps/metrics/webapi/routes/prometheus_metrics/__init__.py create mode 100644 apps/metrics/webapi/routes/prometheus_metrics/available_metrics.py create mode 100644 apps/metrics/webapi/routes/prometheus_metrics/metric_info.py create mode 100644 apps/metrics/webapi/routes/prometheus_metrics/metrics_query.py diff --git a/apps/metrics/backend/infra/external_service/prometheus_client.py b/apps/metrics/backend/infra/external_service/prometheus_client.py new file mode 100644 index 0000000..db13c3a --- /dev/null +++ b/apps/metrics/backend/infra/external_service/prometheus_client.py @@ -0,0 +1,119 @@ +import httpx +from typing import Dict, Any, Optional, Union +from datetime import datetime +import json +from fastapi import HTTPException + +from common.config.app_settings import app_settings +from common.log.module_logger import ModuleLogger + + +class PrometheusClient: + """ + Async Prometheus client for querying metrics data using PromQL. + + This client provides methods to: + - Query data using PromQL expressions + - Get all available metrics + - Get labels for specific metrics + - Query metric series with label filters + """ + + def __init__(self, endpoint: Optional[str] = None): + """ + Initialize Prometheus client. + + Args: + endpoint: Prometheus server endpoint. If None, uses PROMETHEUS_ENDPOINT from settings. + """ + self.module_logger = ModuleLogger(__file__) + self.endpoint = endpoint or app_settings.PROMETHEUS_ENDPOINT + self.base_url = f"{self.endpoint.rstrip('/')}/api/v1" + + async def request(self, endpoint: str, params: Optional[Dict[str, Any]] = None) -> Dict[str, Any]: + """ + Make HTTP request to Prometheus API. + + Args: + endpoint: API endpoint path + params: Query parameters + + Returns: + JSON response data + + Raises: + httpx.HTTPError: If request fails + ValueError: If response is not valid JSON + """ + url = f"{self.base_url}/{endpoint.lstrip('/')}" + + try: + await self.module_logger.log_info(f"Making request to Prometheus: {url} with params: {params}") + async with httpx.AsyncClient(timeout=30.0) as client: + response = await client.get(url, params=params) + response.raise_for_status() + data = response.json() + if data.get("status") != "success": + error_msg = data.get('error', 'Unknown error') + await self.module_logger.log_error(f"Prometheus API error: {error_msg}") + raise HTTPException(status_code=400, detail=f"Prometheus API error: {error_msg}") + + return data + + except httpx.HTTPError as e: + await self.module_logger.log_error(f"HTTP error querying Prometheus: {e}") + raise HTTPException(status_code=502, detail=f"Failed to connect to Prometheus: {str(e)}") + except json.JSONDecodeError as e: + await self.module_logger.log_error(f"Invalid JSON response from Prometheus: {e}") + raise HTTPException(status_code=400, detail=f"Invalid response from Prometheus: {str(e)}") + + async def query_range( + self, + query: str, + start: Union[str, datetime], + end: Union[str, datetime], + step: str = "15s" + ) -> Dict[str, Any]: + """ + Execute a PromQL range query. + + Args: + query: PromQL query string + start: Start time (RFC3339 string or datetime) + end: End time (RFC3339 string or datetime) + step: Query resolution step width (e.g., "15s", "1m", "1h") + + Returns: + Range query result data + + Example: + result = await client.query_range( + "up{job='prometheus'}", + start=datetime.now() - timedelta(hours=1), + end=datetime.now(), + step="1m" + ) + """ + params = { + "query": query, + "step": step + } + + # Convert datetime to RFC3339 string if needed + if isinstance(start, datetime): + if start.tzinfo is None: + params["start"] = start.isoformat() + "Z" + else: + params["start"] = start.isoformat() + else: + params["start"] = start + + if isinstance(end, datetime): + if end.tzinfo is None: + params["end"] = end.isoformat() + "Z" + else: + params["end"] = end.isoformat() + else: + params["end"] = end + + return await self.request("query_range", params) diff --git a/apps/metrics/backend/services/prometheus_metrics_service.py b/apps/metrics/backend/services/prometheus_metrics_service.py new file mode 100644 index 0000000..e16a86e --- /dev/null +++ b/apps/metrics/backend/services/prometheus_metrics_service.py @@ -0,0 +1,263 @@ +from typing import Dict, List, Any, Optional, Union +from datetime import datetime, timedelta +from fastapi import HTTPException + +from common.log.module_logger import ModuleLogger +from ..infra.external_service.prometheus_client import PrometheusClient + + +class PrometheusMetricsService: + """ + Service class for querying Prometheus metrics with predefined PromQL queries. + + This service provides a high-level interface for querying metrics data + using predefined PromQL queries mapped to metric names. + """ + + # Global dictionary mapping metric names to their corresponding PromQL queries + METRIC_PROMQL_MAP: Dict[str, str] = { + "freeleaps": { + # Just demo, No Usage + "cpu_usage": "100 - (avg by (instance) (irate(node_cpu_seconds_total{mode=\"idle\"}[5m])) * 100)", + # Just demo, No Usage + "memory_usage": "100 - ((node_memory_MemAvailable_bytes / node_memory_MemTotal_bytes) * 100)", + # Just demo, No Usage + "disk_usage": "100 - ((node_filesystem_avail_bytes{mountpoint=\"/\"} / node_filesystem_size_bytes{mountpoint=\"/\"}) * 100)", + }, + "magicleaps": { + + } + } + + def __init__(self, prometheus_endpoint: Optional[str] = None): + """ + Initialize PrometheusMetricsService. + + Args: + prometheus_endpoint: Prometheus server endpoint. If None, uses default from settings. + """ + self.module_logger = ModuleLogger(__file__) + self.prometheus_client = PrometheusClient(prometheus_endpoint) + + def get_available_metrics(self, product_id: Optional[str] = None) -> List[str]: + """ + Get list of available metric names that have predefined PromQL queries. + + Args: + product_id: Optional product ID to filter metrics. If None, returns all metrics from all products. + + Returns: + List of available metric names + """ + if product_id: + if product_id in self.METRIC_PROMQL_MAP: + return list(self.METRIC_PROMQL_MAP[product_id].keys()) + else: + return [] + else: + # Return all metrics from all products + all_metrics = [] + for product_metrics in self.METRIC_PROMQL_MAP.values(): + all_metrics.extend(product_metrics.keys()) + return all_metrics + + def get_available_products(self) -> List[str]: + """ + Get list of available product IDs. + + Returns: + List of available product IDs + """ + return list(self.METRIC_PROMQL_MAP.keys()) + + async def query_metric_by_time_range( + self, + product_id: str, + metric_name: str, + start_time: Union[str, datetime], + end_time: Union[str, datetime], + step: str = "1m" + ) -> List[Dict[str, Any]]: + """ + Query metric data for a specific time range. + + Args: + product_id: Product ID to identify which product's metrics to query + metric_name: Name of the metric to query + start_time: Start time for the query (RFC3339 string or datetime) + end_time: End time for the query (RFC3339 string or datetime) + step: Query resolution step width (e.g., "1m", "5m", "1h") + + Returns: + List of dictionaries with 'date' and 'value' keys + + Raises: + ValueError: If product_id or metric_name is not found in the PromQL mapping + Exception: If Prometheus query fails + + Example: + result = await service.query_metric_by_time_range( + "freeleaps", + "cpu_usage", + start_time=datetime.now() - timedelta(hours=1), + end_time=datetime.now(), + step="5m" + ) + # Returns: [{"date": "2024-01-01T10:00:00Z", "value": 45.2}, ...] + """ + # Check if product_id exists in the mapping + if product_id not in self.METRIC_PROMQL_MAP: + available_products = ", ".join(self.get_available_products()) + error_msg = f"Product '{product_id}' not found in PromQL mapping. Available products: {available_products}" + await self.module_logger.log_error(error_msg) + raise HTTPException(status_code=404, detail=error_msg) + + # Check if metric name exists in the product's mapping + if metric_name not in self.METRIC_PROMQL_MAP[product_id]: + available_metrics = ", ".join(self.get_available_metrics(product_id)) + error_msg = f"Metric '{metric_name}' not found in product '{product_id}' PromQL mapping. Available metrics: {available_metrics}" + await self.module_logger.log_error(error_msg) + raise HTTPException(status_code=404, detail=error_msg) + + # Parse datetime strings if they are strings + if isinstance(start_time, str): + start_dt = datetime.fromisoformat(start_time.replace('Z', '+00:00')) + else: + start_dt = start_time + + if isinstance(end_time, str): + end_dt = datetime.fromisoformat(end_time.replace('Z', '+00:00')) + else: + end_dt = end_time + + # Validate time range + if start_dt >= end_dt: + raise HTTPException( + status_code=400, + detail="Start time must be before end time" + ) + + # Check time range is not too large (max 7 days for detailed queries) + time_diff = end_dt - start_dt + if time_diff > timedelta(days=7): + raise HTTPException( + status_code=400, + detail="Time range cannot exceed 7 days for detailed queries" + ) + + # Get the PromQL query for the metric + promql_query = self.METRIC_PROMQL_MAP[product_id][metric_name] + + try: + await self.module_logger.log_info( + f"Querying metric '{metric_name}' from product '{product_id}' with PromQL: {promql_query}") + + # Execute the range query + result = await self.prometheus_client.query_range( + query=promql_query, + start=start_dt, + end=end_dt, + step=step + ) + + # Parse the result and format it + formatted_data = self._format_query_result(result, metric_name) + + await self.module_logger.log_info( + f"Successfully queried metric '{metric_name}' with {len(formatted_data)} data points") + return formatted_data + + except Exception as e: + await self.module_logger.log_error(f"Failed to query metric '{metric_name}': {e}") + raise + + def _format_query_result(self, prometheus_result: Dict[str, Any], metric_name: str) -> List[Dict[str, Any]]: + """ + Format Prometheus query result into the required format. + + Args: + prometheus_result: Raw result from Prometheus API + metric_name: Name of the metric being queried + + Returns: + List of dictionaries with 'date' and 'value' keys + """ + formatted_data = [] + + # Extract data from Prometheus result + data = prometheus_result.get("data", {}) + result_type = data.get("resultType", "") + + if result_type == "matrix": + # Handle range query results (matrix) + for series in data.get("result", []): + metric_labels = series.get("metric", {}) + values = series.get("values", []) + + for timestamp, value in values: + # Convert Unix timestamp to ISO format + date_str = datetime.fromtimestamp(timestamp).isoformat() + "Z" + + formatted_data.append({ + "date": date_str, + "value": float(value) if value != "NaN" else None, + "metric": metric_name, + "labels": metric_labels + }) + + elif result_type == "vector": + # Handle instant query results (vector) + for series in data.get("result", []): + metric_labels = series.get("metric", {}) + timestamp = series.get("value", [None, None])[0] + value = series.get("value", [None, None])[1] + + if timestamp and value: + date_str = datetime.fromtimestamp(timestamp).isoformat() + "Z" + + formatted_data.append({ + "date": date_str, + "value": float(value) if value != "NaN" else None, + "metric": metric_name, + "labels": metric_labels + }) + + # Sort by date + formatted_data.sort(key=lambda x: x["date"]) + + return formatted_data + + async def get_metric_info(self, product_id: str, metric_name: str) -> Dict[str, Any]: + """ + Get information about a specific metric including its PromQL query. + + Args: + product_id: Product ID to identify which product's metrics to query + metric_name: Name of the metric + + Returns: + Dictionary containing metric information + + Raises: + ValueError: If product_id or metric_name is not found in the PromQL mapping + """ + # Check if product_id exists in the mapping + if product_id not in self.METRIC_PROMQL_MAP: + available_products = ", ".join(self.get_available_products()) + error_msg = f"Product '{product_id}' not found in PromQL mapping. Available products: {available_products}" + await self.module_logger.log_error(error_msg) + raise HTTPException(status_code=404, detail=error_msg) + + # Check if metric name exists in the product's mapping + if metric_name not in self.METRIC_PROMQL_MAP[product_id]: + available_metrics = ", ".join(self.get_available_metrics(product_id)) + error_msg = f"Metric '{metric_name}' not found in product '{product_id}' PromQL mapping. Available metrics: {available_metrics}" + await self.module_logger.log_error(error_msg) + raise HTTPException(status_code=404, detail=error_msg) + + return { + "product_id": product_id, + "metric_name": metric_name, + "promql_query": self.METRIC_PROMQL_MAP[product_id][metric_name], + "description": f"PromQL query for {metric_name} metric in product {product_id}" + } diff --git a/apps/metrics/common/config/app_settings.py b/apps/metrics/common/config/app_settings.py index 3575b8d..cdda983 100644 --- a/apps/metrics/common/config/app_settings.py +++ b/apps/metrics/common/config/app_settings.py @@ -5,8 +5,8 @@ from typing import Optional class AppSettings(BaseSettings): # Log settings LOG_BASE_PATH: str = "./logs" - BACKEND_LOG_FILE_NAME: str = "metrics" - APPLICATION_ACTIVITY_LOG: str = "metrics-activity" + BACKEND_LOG_FILE_NAME: str = "freeleaps-metrics" + APPLICATION_ACTIVITY_LOG: str = "freeleaps-metrics-activity" # StarRocks database settings STARROCKS_HOST: str = "freeleaps-starrocks-cluster-fe-service.freeleaps-data-platform.svc" diff --git a/apps/metrics/common/log/application_logger.py b/apps/metrics/common/log/application_logger.py index 896c044..67ec321 100644 --- a/apps/metrics/common/log/application_logger.py +++ b/apps/metrics/common/log/application_logger.py @@ -1,5 +1,5 @@ from .base_logger import LoggerBase -from app.common.config.app_settings import app_settings +from common.config.app_settings import app_settings class ApplicationLogger(LoggerBase): def __init__(self, application_activities: dict[str, any] = {}) -> None: diff --git a/apps/metrics/local.env b/apps/metrics/local.env index 616515e..4b601db 100644 --- a/apps/metrics/local.env +++ b/apps/metrics/local.env @@ -14,4 +14,6 @@ STARROCKS_DATABASE=freeleaps # log settings LOG_BASE_PATH=./logs BACKEND_LOG_FILE_NAME=metrics -APPLICATION_ACTIVITY_LOG=metrics-activity \ No newline at end of file +APPLICATION_ACTIVITY_LOG=metrics-activity + +PROMETHEUS_ENDPOINT=http://localhost:9090 \ No newline at end of file diff --git a/apps/metrics/webapi/bootstrap/app_factory.py b/apps/metrics/webapi/bootstrap/app_factory.py deleted file mode 100644 index feee790..0000000 --- a/apps/metrics/webapi/bootstrap/app_factory.py +++ /dev/null @@ -1,69 +0,0 @@ -from fastapi import FastAPI -from fastapi.middleware.cors import CORSMiddleware -from prometheus_fastapi_instrumentator import Instrumentator -from common.config.app_settings import site_settings -from loguru import logger -import os - - -def create_app() -> FastAPI: - """ - Create and configure the FastAPI application - """ - app = FastAPI( - title="Metrics Service API", - description="Metrics Service for Freeleaps Platform", - version="1.0.0", - docs_url="/docs", - redoc_url="/redoc" - ) - - # Add CORS middleware - app.add_middleware( - CORSMiddleware, - allow_origins=["*"], - allow_credentials=True, - allow_methods=["*"], - allow_headers=["*"], - ) - - # Setup logging - setup_logging() - - # Setup Prometheus metrics - Instrumentator().instrument(app).expose(app) - - # Include routers - # from webapi.routes import health, api - # app.include_router(health.router, prefix="/health", tags=["health"]) - # app.include_router(api.router, prefix="/api/metrics", tags=["metrics"]) - # Note: Registration router is included in main.py - - return app - - -def setup_logging(): - """ - Setup logging configuration - """ - # Create log directory if it doesn't exist - log_dir = site_settings.LOG_BASE_PATH - os.makedirs(log_dir, exist_ok=True) - - # Configure loguru - logger.add( - f"{log_dir}/{site_settings.BACKEND_LOG_FILE_NAME}.log", - rotation="1 day", - retention="30 days", - level="INFO", - format="{time:YYYY-MM-DD HH:mm:ss} | {level} | {name}:{function}:{line} | {message}" - ) - - logger.add( - f"{log_dir}/{site_settings.APPLICATION_ACTIVITY_LOG}.log", - rotation="1 day", - retention="30 days", - level="INFO", - format="{time:YYYY-MM-DD HH:mm:ss} | {level} | {name}:{function}:{line} | {message}", - filter=lambda record: record["level"].name == "INFO" - ) diff --git a/apps/metrics/webapi/providers/metrics.py b/apps/metrics/webapi/providers/metrics.py index 593369d..08811ba 100644 --- a/apps/metrics/webapi/providers/metrics.py +++ b/apps/metrics/webapi/providers/metrics.py @@ -6,7 +6,7 @@ def register(app): instrumentator = ( Instrumentator().instrument( app, - metric_namespace="freeleaps-auth", + metric_namespace="freeleaps-mertics", metric_subsystem=app_settings.APP_NAME) ) diff --git a/apps/metrics/webapi/routes/metrics/__init__.py b/apps/metrics/webapi/routes/metrics/__init__.py index e7012cd..5e05f09 100644 --- a/apps/metrics/webapi/routes/metrics/__init__.py +++ b/apps/metrics/webapi/routes/metrics/__init__.py @@ -1,5 +1,7 @@ from fastapi import APIRouter -from webapi.routes.metrics.registration_metrics import router +from webapi.routes.metrics.registration_metrics import router as registration_router +from webapi.routes.prometheus_metrics import api_router as prometheus_metrics_router -api_router = APIRouter() -api_router.include_router(router,prefix="/metrics", tags=["metrics"]) +router = APIRouter() +router.include_router(registration_router, prefix="/metrics", tags=["registration-metrics"]) +router.include_router(prometheus_metrics_router, prefix="/metrics", tags=["prometheus-metrics"]) diff --git a/apps/metrics/webapi/routes/prometheus_metrics/__init__.py b/apps/metrics/webapi/routes/prometheus_metrics/__init__.py new file mode 100644 index 0000000..1c7aa93 --- /dev/null +++ b/apps/metrics/webapi/routes/prometheus_metrics/__init__.py @@ -0,0 +1,9 @@ +from fastapi import APIRouter +from .available_metrics import router as available_metrics_router +from .metrics_query import router as metrics_query_router +from .metric_info import router as metric_info_router + +api_router = APIRouter() +api_router.include_router(available_metrics_router, tags=["prometheus-metrics"]) +api_router.include_router(metrics_query_router, tags=["prometheus-metrics"]) +api_router.include_router(metric_info_router, tags=["prometheus-metrics"]) diff --git a/apps/metrics/webapi/routes/prometheus_metrics/available_metrics.py b/apps/metrics/webapi/routes/prometheus_metrics/available_metrics.py new file mode 100644 index 0000000..2cb66e6 --- /dev/null +++ b/apps/metrics/webapi/routes/prometheus_metrics/available_metrics.py @@ -0,0 +1,31 @@ +from fastapi import APIRouter + +from common.log.module_logger import ModuleLogger +from backend.services.prometheus_metrics_service import PrometheusMetricsService + +router = APIRouter() + +# Initialize service and logger +prometheus_service = PrometheusMetricsService() +module_logger = ModuleLogger(__file__) + + +@router.get("/prometheus/product/{product_id}/available-metrics") +async def get_available_metrics(product_id: str): + """ + Get list of available metrics for a specific product. + + Args: + product_id: Product ID to get metrics for (required). + + Returns a list of metric names that have predefined PromQL queries for the specified product. + """ + await module_logger.log_info(f"Getting available metrics list for product_id: {product_id}") + metrics = prometheus_service.get_available_metrics(product_id) + + return { + "product_id": product_id, + "available_metrics": metrics, + "total_count": len(metrics), + "description": f"List of metrics with predefined PromQL queries for product '{product_id}'" + } diff --git a/apps/metrics/webapi/routes/prometheus_metrics/metric_info.py b/apps/metrics/webapi/routes/prometheus_metrics/metric_info.py new file mode 100644 index 0000000..4dbef60 --- /dev/null +++ b/apps/metrics/webapi/routes/prometheus_metrics/metric_info.py @@ -0,0 +1,32 @@ +from fastapi import APIRouter, HTTPException + +from common.log.module_logger import ModuleLogger +from backend.services.prometheus_metrics_service import PrometheusMetricsService + +router = APIRouter() + +# Initialize service and logger +prometheus_service = PrometheusMetricsService() +module_logger = ModuleLogger(__file__) + + +@router.get("/prometheus/product/{product_id}/metric/{metric_name}/info") +async def get_metric_info( + product_id: str, + metric_name: str +): + """ + Get information about a specific metric including its PromQL query. + + Args: + product_id: Product ID to identify which product's metrics to query + metric_name: Name of the metric to get information for + """ + await module_logger.log_info(f"Getting info for metric '{metric_name}' from product '{product_id}'") + + metric_info = await prometheus_service.get_metric_info(product_id, metric_name) + + return { + "metric_info": metric_info, + "description": f"Information about metric '{metric_name}' in product '{product_id}'" + } diff --git a/apps/metrics/webapi/routes/prometheus_metrics/metrics_query.py b/apps/metrics/webapi/routes/prometheus_metrics/metrics_query.py new file mode 100644 index 0000000..69ad4c8 --- /dev/null +++ b/apps/metrics/webapi/routes/prometheus_metrics/metrics_query.py @@ -0,0 +1,83 @@ +from fastapi import APIRouter +from typing import Optional, List, Dict, Any +from pydantic import BaseModel, Field + +from common.log.module_logger import ModuleLogger +from backend.services.prometheus_metrics_service import PrometheusMetricsService + + +class MetricDataPoint(BaseModel): + """Single data point in a time series.""" + date: str = Field(..., description="Timestamp in ISO format") + value: Optional[float] = Field(None, description="Metric value") + labels: Optional[Dict[str, str]] = Field(None, description="Metric labels") + + +class MetricTimeSeriesResponse(BaseModel): + """Response model for metric time series data.""" + metric_name: str = Field(..., description="Name of the queried metric") + data_points: List[MetricDataPoint] = Field(..., description="List of data points") + total_points: int = Field(..., description="Total number of data points") + time_range: Dict[str, str] = Field(..., description="Start and end time of the query") + step: str = Field("1h", description="Query resolution step") + + +class MetricQueryRequest(BaseModel): + """Request model for metric query.""" + product_id: str = Field(..., description="Product ID to identify which product's metrics to query") + metric_name: str = Field(..., description="Name of the metric to query") + start_time: str = Field(..., description="Start time in ISO format or RFC3339") + end_time: str = Field(..., description="End time in ISO format or RFC3339") + step: str = Field("1h", description="Query resolution step (e.g., 1m, 5m, 1h)") + + +router = APIRouter() + +# Initialize service and logger +prometheus_service = PrometheusMetricsService() +module_logger = ModuleLogger(__file__) + + +@router.post("/prometheus/metrics_query", response_model=MetricTimeSeriesResponse) +async def metrics_query( + request: MetricQueryRequest +): + """ + Query metrics time series data. + + Returns XY curve data (time series) for the specified metric within the given time range. + """ + await module_logger.log_info( + f"Querying metric '{request.metric_name}' from product '{request.product_id}' from {request.start_time} to {request.end_time}") + + # Query the metric data + data_points = await prometheus_service.query_metric_by_time_range( + product_id=request.product_id, + metric_name=request.metric_name, + start_time=request.start_time, + end_time=request.end_time, + step=request.step + ) + + # Format response + response = MetricTimeSeriesResponse( + metric_name=request.metric_name, + data_points=[ + MetricDataPoint( + date=point["date"], + value=point["value"], + labels=point["labels"] + ) + for point in data_points + ], + total_points=len(data_points), + time_range={ + "start": request.start_time, + "end": request.end_time + }, + step=request.step + ) + + await module_logger.log_info( + f"Successfully queried metric '{request.metric_name}' with {len(data_points)} data points") + return response From c963350fc94e75c71148d4c10754d6db90617856 Mon Sep 17 00:00:00 2001 From: icecheng Date: Tue, 16 Sep 2025 17:20:37 +0800 Subject: [PATCH 07/15] feat: add promql for latency and reliability --- apps/metrics/backend/services/prometheus_metrics_service.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/apps/metrics/backend/services/prometheus_metrics_service.py b/apps/metrics/backend/services/prometheus_metrics_service.py index e16a86e..7b6118f 100644 --- a/apps/metrics/backend/services/prometheus_metrics_service.py +++ b/apps/metrics/backend/services/prometheus_metrics_service.py @@ -23,6 +23,11 @@ class PrometheusMetricsService: "memory_usage": "100 - ((node_memory_MemAvailable_bytes / node_memory_MemTotal_bytes) * 100)", # Just demo, No Usage "disk_usage": "100 - ((node_filesystem_avail_bytes{mountpoint=\"/\"} / node_filesystem_size_bytes{mountpoint=\"/\"}) * 100)", + # Average response time for notification HTTP requests + "latency_ms": "1000*avg(freeleaps_notification_http_request_duration_seconds_sum{handler!=\"none\"} / freeleaps_notification_http_request_duration_seconds_count)", + # Error rate for 5xx HTTP status codes (stability metric) + "reliability": "1-sum(rate(freeleaps_notification_http_requests_total{status=\"5xx\"}[1m]))", + }, "magicleaps": { From 1baf9065dca4baf2efb39f423d5002a086c99881 Mon Sep 17 00:00:00 2001 From: icecheng Date: Tue, 16 Sep 2025 17:38:57 +0800 Subject: [PATCH 08/15] feat: add design.md --- apps/metrics/docs/design.md | 146 ++++++++++++++++++++++++++++++++++++ 1 file changed, 146 insertions(+) create mode 100644 apps/metrics/docs/design.md diff --git a/apps/metrics/docs/design.md b/apps/metrics/docs/design.md new file mode 100644 index 0000000..796b5ae --- /dev/null +++ b/apps/metrics/docs/design.md @@ -0,0 +1,146 @@ +# 1.Override + +We support two ways to query metrics: +- Connect to StarRocks data warehouse and query metrics from it +- Query Prometheus directly and retrieve metrics from it + +# 2.Starrocks Metric + +We can implement StarRocks Metric queries similar to Prometheus Metric queries. The only difference is replacing PromQL with SQL and querying through StarRocks API. + +# 3.Prometheus Metric + +## 3.1.Metrics Config +Currently, metrics are configured in code. In the future, they will be configured through database or other methods. +Organization structure: Product ID -> Metric Name -> Metric Query Method (PromQL) +```json +{ + "freeleaps": { + // Just for demo + "cpu_usage": "100 - (avg by (instance) (irate(node_cpu_seconds_total{mode=\"idle\"}[5m])) * 100)", + // Just for demo + "memory_usage": "100 - ((node_memory_MemAvailable_bytes / node_memory_MemTotal_bytes) * 100)", + // Just for demo + "disk_usage": "100 - ((node_filesystem_avail_bytes{mountpoint=\"/\"} / node_filesystem_size_bytes{mountpoint=\"/\"}) * 100)", + "latency_ms": "1000*avg(freeleaps_notification_http_request_duration_seconds_sum{handler!=\"none\"} / freeleaps_notification_http_request_duration_seconds_count)", + "reliability": "1-sum(rate(freeleaps_notification_http_requests_total{status=\"5xx\"}[1m]))" + }, + "magicleaps": {} +} +``` +If we want to add new metrics, theoretically we only need to add one configuration entry (provided that the metric exists in Prometheus and can be queried directly through PromQL without requiring any additional code processing) + +## 3.2.API Design + +### 3.2.1.Query Metrics by Product ID + +API: `/api/metrics/prometheus/product/{product_id}/available-metrics` + +Method: GET +Request: +``` +product_id=freeleaps +``` +Response: + +```json +{ + "product_id": "freeleaps", + "available_metrics": [ + "cpu_usage", + "memory_usage", + "disk_usage", + "latency_ms", + "reliability" + ], + "total_count": 5, + "description": "List of metrics with predefined PromQL queries for product 'freeleaps'" +} +``` + +### 3.2.2.Query Metric Info +API: `/api/metrics/prometheus/product/{product_id}/metric/{metric_name}/info` + +Method: GET +Request: +``` +product_id=freeleaps +metric_name=cpu_usage +``` +Response: + +```json +{ + "metric_info": { + "product_id": "freeleaps", + "metric_name": "cpu_usage", + "promql_query": "100 - (avg by (instance) (irate(node_cpu_seconds_total{mode=\"idle\"}[5m])) * 100)", + "description": "PromQL query for cpu_usage metric in product freeleaps" + }, + "description": "Information about metric 'cpu_usage' in product 'freeleaps'" +} +``` + +### 3.2.3.Query Metric Data +API: `/api/metrics/prometheus/metrics_query` + +Method: GET +Request: +``` +{ + "product_id":"freeleaps", + "metric_name": "latency_ms", + "start_time": "2025-09-12T00:00:00Z", + "end_time": "2025-09-16T01:00:00Z", + "step":"1h" # Interval between data points in the query result +} +``` +Response: + +```json +{ + "metric_name": "latency_ms", + "data_points": [ + { + "date": "2025-09-12T08:00:00Z", + "value": 41.37141507698155, + "labels": {} # Optional: Additional labels for prometheus, Just for debugging + }, + { + "date": "2025-09-12T09:00:00Z", + "value": 41.371992733188385, + "labels": {} + }, + { + "date": "2025-09-12T10:00:00Z", + "value": 41.37792878125675, + "labels": {} + }, + { + "date": "2025-09-12T11:00:00Z", + "value": 41.37297490632533, + "labels": {} + }, + ... + { + "date": "2025-09-16T08:00:00Z", + "value": 40.72491916149973, + "labels": {} + }, + { + "date": "2025-09-16T09:00:00Z", + "value": 40.72186597550194, + "labels": {} + } + ], + "total_points": 98, + "time_range": { + "start": "2025-09-12T00:00:00Z", + "end": "2025-09-16T01:00:00Z" + }, + "step": "1h" +} +``` + +# 4.Universal Metrics +In the future, we can create an abstraction layer above StarRocks Metrics and Prometheus Metrics to unify metric queries from both data sources! \ No newline at end of file From 38ff0ae32af991568db4a49827e0a0dec9bdc294 Mon Sep 17 00:00:00 2001 From: icecheng Date: Thu, 18 Sep 2025 15:51:16 +0800 Subject: [PATCH 09/15] feat: update Dockerfile --- apps/metrics/Dockerfile | 2 +- apps/metrics/common/config/app_settings.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/apps/metrics/Dockerfile b/apps/metrics/Dockerfile index b090349..17e82e7 100644 --- a/apps/metrics/Dockerfile +++ b/apps/metrics/Dockerfile @@ -1,5 +1,5 @@ # download image here: https://docker.aityp.com/image/docker.io/python:3.12-slim -FROM swr.cn-north-4.myhuaweicloud.com/ddn-k8s/docker.io/python:3.12-slim +FROM python:3.12-slim # Set working directory WORKDIR /app diff --git a/apps/metrics/common/config/app_settings.py b/apps/metrics/common/config/app_settings.py index cdda983..4ea8a84 100644 --- a/apps/metrics/common/config/app_settings.py +++ b/apps/metrics/common/config/app_settings.py @@ -23,7 +23,7 @@ class AppSettings(BaseSettings): class Config: - env_file = "local.env" + env_file = ".env" app_settings = AppSettings() From 3a05ec5001b94d0faa9357cdbe99cb60bd875432 Mon Sep 17 00:00:00 2001 From: weicao Date: Thu, 18 Sep 2025 17:19:27 +0800 Subject: [PATCH 10/15] metrics: restructure starrocks routes, move database client, align APIs, Docker updates --- apps/metrics/Dockerfile | 19 ++ apps/metrics/README.md | 161 +++++------ .../infra/external_service}/__init__.py | 0 .../external_service/prometheus_client.py | 119 ++++++++ .../starrocks_client.py} | 21 +- .../services/prometheus_metrics_service.py | 268 ++++++++++++++++++ .../registration_analytics_service.py | 4 +- apps/metrics/common/config/__init__.py | 0 .../{webapi => common}/config/app_settings.py | 17 +- apps/metrics/common/config/log_settings.py | 17 ++ apps/metrics/common/config/site_settings.py | 26 ++ apps/metrics/common/log/__init__.py | 0 apps/metrics/common/log/application_logger.py | 12 + apps/metrics/common/log/base_logger.py | 136 +++++++++ apps/metrics/common/log/json_sink.py | 84 ++++++ apps/metrics/common/log/module_logger.py | 46 +++ apps/metrics/common/probes/__init__.py | 140 +++++++++ apps/metrics/common/probes/adapters.py | 15 + apps/metrics/docs/design.md | 146 ++++++++++ apps/metrics/local.env | 4 +- apps/metrics/requirements.txt | 1 + apps/metrics/webapi/bootstrap/app_factory.py | 69 ----- apps/metrics/webapi/bootstrap/application.py | 77 +++++ apps/metrics/webapi/main.py | 19 +- apps/metrics/webapi/providers/__init__.py | 0 apps/metrics/webapi/providers/common.py | 31 ++ .../webapi/providers/exception_handler.py | 39 +++ apps/metrics/webapi/providers/logger.py | 7 + apps/metrics/webapi/providers/metrics.py | 16 ++ apps/metrics/webapi/providers/probes.py | 24 ++ apps/metrics/webapi/providers/router.py | 34 +++ apps/metrics/webapi/routes/__init__.py | 5 + .../metrics/webapi/routes/metrics/__init__.py | 7 + .../routes/prometheus_metrics/__init__.py | 9 + .../prometheus_metrics/available_metrics.py | 31 ++ .../routes/prometheus_metrics/metric_info.py | 32 +++ .../prometheus_metrics/metrics_query.py | 83 ++++++ .../webapi/routes/registration_metrics.py | 229 --------------- .../routes/starrocks_metrics/__init__.py | 9 + .../starrocks_metrics/available_metrics.py | 45 +++ .../routes/starrocks_metrics/metric_info.py | 53 ++++ .../routes/starrocks_metrics/metrics_query.py | 95 +++++++ 42 files changed, 1720 insertions(+), 430 deletions(-) rename apps/metrics/{webapi/config => backend/infra/external_service}/__init__.py (100%) create mode 100644 apps/metrics/backend/infra/external_service/prometheus_client.py rename apps/metrics/backend/infra/{database_client.py => external_service/starrocks_client.py} (89%) create mode 100644 apps/metrics/backend/services/prometheus_metrics_service.py create mode 100644 apps/metrics/common/config/__init__.py rename apps/metrics/{webapi => common}/config/app_settings.py (62%) create mode 100644 apps/metrics/common/config/log_settings.py create mode 100644 apps/metrics/common/config/site_settings.py create mode 100644 apps/metrics/common/log/__init__.py create mode 100644 apps/metrics/common/log/application_logger.py create mode 100644 apps/metrics/common/log/base_logger.py create mode 100644 apps/metrics/common/log/json_sink.py create mode 100644 apps/metrics/common/log/module_logger.py create mode 100644 apps/metrics/common/probes/__init__.py create mode 100644 apps/metrics/common/probes/adapters.py create mode 100644 apps/metrics/docs/design.md delete mode 100644 apps/metrics/webapi/bootstrap/app_factory.py create mode 100644 apps/metrics/webapi/bootstrap/application.py create mode 100644 apps/metrics/webapi/providers/__init__.py create mode 100644 apps/metrics/webapi/providers/common.py create mode 100644 apps/metrics/webapi/providers/exception_handler.py create mode 100644 apps/metrics/webapi/providers/logger.py create mode 100644 apps/metrics/webapi/providers/metrics.py create mode 100644 apps/metrics/webapi/providers/probes.py create mode 100644 apps/metrics/webapi/providers/router.py create mode 100644 apps/metrics/webapi/routes/metrics/__init__.py create mode 100644 apps/metrics/webapi/routes/prometheus_metrics/__init__.py create mode 100644 apps/metrics/webapi/routes/prometheus_metrics/available_metrics.py create mode 100644 apps/metrics/webapi/routes/prometheus_metrics/metric_info.py create mode 100644 apps/metrics/webapi/routes/prometheus_metrics/metrics_query.py delete mode 100644 apps/metrics/webapi/routes/registration_metrics.py create mode 100644 apps/metrics/webapi/routes/starrocks_metrics/__init__.py create mode 100644 apps/metrics/webapi/routes/starrocks_metrics/available_metrics.py create mode 100644 apps/metrics/webapi/routes/starrocks_metrics/metric_info.py create mode 100644 apps/metrics/webapi/routes/starrocks_metrics/metrics_query.py diff --git a/apps/metrics/Dockerfile b/apps/metrics/Dockerfile index b090349..ee610fb 100644 --- a/apps/metrics/Dockerfile +++ b/apps/metrics/Dockerfile @@ -10,9 +10,28 @@ COPY requirements.txt . # Install dependencies RUN pip install --no-cache-dir -r requirements.txt +# Copy environment file +COPY local.env . + # Copy application code COPY . . +ENV MONGODB_NAME = "freeleaps2" +ENV MONGODB_URI = "mongodb://freeleaps2-mongodb:27017" + +#app_settings +ENV GITEA_TOKEN = "" +ENV GITEA_URL = "" +ENV GITEA_DEPOT_ORGANIZATION = "" +ENV CODE_DEPOT_HTTP_PORT = "" +ENV CODE_DEPOT_SSH_PORT = "" +ENV CODE_DEPOT_DOMAIN_NAME = "" + +#log_settings +ENV LOG_BASE_PATH = "./logs" +ENV BACKEND_LOG_FILE_NAME = "freeleaps-metrics" +ENV APPLICATION_ACTIVITY_LOG = "freeleaps-metrics-activity" + # Expose port EXPOSE 8009 diff --git a/apps/metrics/README.md b/apps/metrics/README.md index 54d367d..d56822e 100644 --- a/apps/metrics/README.md +++ b/apps/metrics/README.md @@ -1,35 +1,28 @@ # ๐Ÿ“Š Metrics Service -> A lightweight FastAPI microservice for user registration analytics and statistics +> A lightweight FastAPI microservice for user registration analytics and metrics [![Python](https://img.shields.io/badge/Python-3.12+-blue.svg)](https://python.org) [![FastAPI](https://img.shields.io/badge/FastAPI-0.114+-green.svg)](https://fastapi.tiangolo.com) [![Docker](https://img.shields.io/badge/Docker-Ready-blue.svg)](https://docker.com) -The Metrics service provides real-time APIs for querying user registration data from StarRocks database, offering flexible analytics and insights into user growth patterns. +The Metrics service provides real-time APIs for querying user registration data (via StarRocks) and querying monitoring metrics (via Prometheus). ## โœจ Features -### ๐Ÿ“Š User Registration Statistics APIs -- **Date Range Query** - Query registration data for specific date ranges -- **Recent N Days Query** - Get registration data for the last N days -- **Start Date + Days Query** - Query N days starting from a specified date -- **Statistics Summary** - Get comprehensive statistics and analytics -- **POST Method Support** - JSON request body support for complex queries +### ๐Ÿ“Š Registration Analytics (StarRocks) +- Date Range Query (start_date ~ end_date) +- Typed responses with Pydantic models -### ๐Ÿ—„๏ธ Database Integration -- **StarRocks Database Connection** - - Host: `freeleaps-starrocks-cluster-fe-service.freeleaps-data-platform.svc` - - Port: `9030` - - Database: `freeleaps` - - Table: `dws_daily_registered_users` +### ๐Ÿ“ˆ Prometheus Metrics +- Predefined PromQL metrics per product +- Time-range queries and metric info discovery ### ๐Ÿ”ง Technical Features -- **Data Models**: Pydantic validation for data integrity -- **Connection Management**: Automatic database connection and disconnection -- **Error Handling**: Comprehensive exception handling with user-friendly error messages -- **Logging**: Structured logging using Loguru -- **API Documentation**: Auto-generated Swagger/OpenAPI documentation +- Data Models: Pydantic v2 +- Async HTTP and robust error handling +- Structured logging +- Auto-generated Swagger/OpenAPI docs ## ๐Ÿ“ Project Structure @@ -37,31 +30,43 @@ The Metrics service provides real-time APIs for querying user registration data metrics/ โ”œโ”€โ”€ backend/ # Business logic layer โ”‚ โ”œโ”€โ”€ infra/ # Infrastructure components -โ”‚ โ”‚ โ””โ”€โ”€ database_client.py +โ”‚ โ”‚ โ””โ”€โ”€ external_service/ +โ”‚ โ”‚ โ”œโ”€โ”€ prometheus_client.py +โ”‚ โ”‚ โ””โ”€โ”€ starrocks_client.py โ”‚ โ”œโ”€โ”€ models/ # Data models โ”‚ โ”‚ โ””โ”€โ”€ user_registration_models.py โ”‚ โ””โ”€โ”€ services/ # Business services +โ”‚ โ”œโ”€โ”€ prometheus_metrics_service.py โ”‚ โ””โ”€โ”€ registration_analytics_service.py โ”œโ”€โ”€ webapi/ # API layer โ”‚ โ”œโ”€โ”€ routes/ # API endpoints -โ”‚ โ”‚ โ””โ”€โ”€ registration_metrics.py -โ”‚ โ”œโ”€โ”€ config/ # Configuration -โ”‚ โ”‚ โ””โ”€โ”€ app_settings.py -โ”‚ โ”œโ”€โ”€ bootstrap/ # App initialization -โ”‚ โ”‚ โ””โ”€โ”€ app_factory.py -โ”‚ โ””โ”€โ”€ main.py # FastAPI app entry point -โ”œโ”€โ”€ common/ # Shared utilities -โ”œโ”€โ”€ requirements.txt # Dependencies -โ”œโ”€โ”€ Dockerfile # Container config -โ”œโ”€โ”€ local.env # Environment variables -โ””โ”€โ”€ README.md # Documentation +โ”‚ โ”‚ โ”œโ”€โ”€ metrics/ # Aggregated routers (prefix: /api/metrics) +โ”‚ โ”‚ โ”œโ”€โ”€ prometheus_metrics/ +โ”‚ โ”‚ โ”‚ โ”œโ”€โ”€ __init__.py +โ”‚ โ”‚ โ”‚ โ”œโ”€โ”€ available_metrics.py +โ”‚ โ”‚ โ”‚ โ”œโ”€โ”€ metric_info.py +โ”‚ โ”‚ โ”‚ โ””โ”€โ”€ metrics_query.py +โ”‚ โ”‚ โ””โ”€โ”€ starrocks_metrics/ +โ”‚ โ”‚ โ”œโ”€โ”€ __init__.py +โ”‚ โ”‚ โ”œโ”€โ”€ available_metrics.py +โ”‚ โ”‚ โ”œโ”€โ”€ metric_info.py +โ”‚ โ”‚ โ””โ”€โ”€ metrics_query.py +โ”‚ โ”œโ”€โ”€ bootstrap/ +โ”‚ โ”‚ โ””โ”€โ”€ application.py +โ”‚ โ””โ”€โ”€ main.py +โ”œโ”€โ”€ common/ +โ”œโ”€โ”€ requirements.txt +โ”œโ”€โ”€ Dockerfile +โ”œโ”€โ”€ local.env +โ””โ”€โ”€ README.md ``` ## ๐Ÿš€ Quick Start ### Prerequisites - Python 3.12+ or Docker -- Access to StarRocks database +- Access to StarRocks database (for registration analytics) +- Access to Prometheus (for monitoring metrics) ### ๐Ÿ Python Setup @@ -79,8 +84,12 @@ python3 -m uvicorn webapi.main:app --host 0.0.0.0 --port 8009 --reload # 1. Build image docker build -t metrics:latest . -# 2. Run container +# 2. Run container (env from baked local.env) docker run --rm -p 8009:8009 metrics:latest + +# Alternatively: run with a custom env file +# (this overrides envs copied into the image) +docker run --rm -p 8009:8009 --env-file local.env metrics:latest ``` ### ๐Ÿ“– Access Documentation @@ -88,69 +97,36 @@ Visit `http://localhost:8009/docs` for interactive API documentation. ## ๐Ÿ“Š API Endpoints -| Endpoint | Method | Description | -|----------|--------|-------------| -| `/api/metrics/daily-registered-users` | GET/POST | Query registration data by date range | -| `/api/metrics/recent-registered-users` | GET | Get recent N days data | -| `/api/metrics/registered-users-by-days` | GET | Query N days from start date | -| `/api/metrics/registration-summary` | GET | Get statistical summary | +All endpoints are exposed under the API base prefix `/api/metrics`. -### Example Requests +### StarRocks (Registration Analytics) +- POST `/api/metrics/starrocks/dru_query` โ€” Query daily registered users time series for a date range +- GET `/api/metrics/starrocks/product/{product_id}/available-metrics` โ€” List available StarRocks-backed metrics for the product +- GET `/api/metrics/starrocks/product/{product_id}/metric/{metric_name}/info` โ€” Get metric info for the product +Example request: ```bash -# Get last 7 days -curl "http://localhost:8009/api/metrics/recent-registered-users?days=7" - -# Get date range -curl "http://localhost:8009/api/metrics/daily-registered-users?start_date=2024-09-10&end_date=2024-09-20" - -# Get summary statistics -curl "http://localhost:8009/api/metrics/registration-summary?start_date=2024-09-10&end_date=2024-09-20" +curl -X POST "http://localhost:8009/api/metrics/starrocks/dru_query" \ + -H "Content-Type: application/json" \ + -d '{ + "product_id": "freeleaps", + "start_date": "2024-09-10", + "end_date": "2024-09-20" + }' ``` -### Parameters -- `start_date` / `end_date`: Date in `YYYY-MM-DD` format -- `days`: Number of days (max: 365) -- `product_id`: Product identifier (default: "freeleaps") - -## ๐Ÿ“ˆ Response Format - -### Standard Response -```json -{ - "dates": ["2024-09-10", "2024-09-11", "2024-09-12"], - "counts": [39, 38, 31], - "total_registrations": 108, - "query_period": "2024-09-10 to 2024-09-12" -} -``` - -### Summary Response -```json -{ - "total_registrations": 282, - "average_daily": 25.64, - "max_daily": 39, - "min_daily": 8, - "days_with_registrations": 10, - "total_days": 11 -} -``` +### Prometheus +- POST `/api/metrics/prometheus/metrics_query` โ€” Query metric time series by product/metric +- GET `/api/metrics/prometheus/product/{product_id}/available-metrics` โ€” List available metrics for product +- GET `/api/metrics/prometheus/product/{product_id}/metric/{metric_name}/info` โ€” Get metric info ## ๐Ÿงช Testing -### Quick Test ```bash # Health check curl http://localhost:8009/ - -# Test recent registrations -curl "http://localhost:8009/api/metrics/recent-registered-users?days=7" ``` -### Interactive Testing -Visit `http://localhost:8009/docs` for the Swagger UI interface where you can test all endpoints directly. - ## โš™๏ธ Configuration ### Environment Variables @@ -170,9 +146,12 @@ STARROCKS_DATABASE=freeleaps LOG_BASE_PATH=./logs BACKEND_LOG_FILE_NAME=metrics APPLICATION_ACTIVITY_LOG=metrics-activity + +# Prometheus +PROMETHEUS_ENDPOINT=http://localhost:9090 ``` -> ๐Ÿ’ก **Tip**: Copy `local.env` to `.env` and modify as needed for your environment. +> Tip: Copy `local.env` to `.env` and modify as needed for your environment. ### ๐Ÿณ Docker Deployment @@ -206,20 +185,16 @@ python -m uvicorn webapi.main:app --reload ``` ## ๐Ÿ“ API Documentation - -- **Swagger UI**: `http://localhost:8009/docs` -- **ReDoc**: `http://localhost:8009/redoc` -- **OpenAPI JSON**: `http://localhost:8009/openapi.json` +- Swagger UI: `http://localhost:8009/docs` +- ReDoc: `http://localhost:8009/redoc` +- OpenAPI JSON: `http://localhost:8009/openapi.json` ## โš ๏ธ Important Notes - -- Date format: `YYYY-MM-DD` -- Max query range: 365 days +- Date format: `YYYY-MM-DD` (single-digit month/day also accepted by API) - Default `product_id`: "freeleaps" -- Requires StarRocks database access +- Requires StarRocks database access and/or Prometheus endpoint ## ๐Ÿ› Troubleshooting - | Issue | Solution | |-------|----------| | Port in use | `docker stop $(docker ps -q --filter ancestor=metrics:latest)` | diff --git a/apps/metrics/webapi/config/__init__.py b/apps/metrics/backend/infra/external_service/__init__.py similarity index 100% rename from apps/metrics/webapi/config/__init__.py rename to apps/metrics/backend/infra/external_service/__init__.py diff --git a/apps/metrics/backend/infra/external_service/prometheus_client.py b/apps/metrics/backend/infra/external_service/prometheus_client.py new file mode 100644 index 0000000..db13c3a --- /dev/null +++ b/apps/metrics/backend/infra/external_service/prometheus_client.py @@ -0,0 +1,119 @@ +import httpx +from typing import Dict, Any, Optional, Union +from datetime import datetime +import json +from fastapi import HTTPException + +from common.config.app_settings import app_settings +from common.log.module_logger import ModuleLogger + + +class PrometheusClient: + """ + Async Prometheus client for querying metrics data using PromQL. + + This client provides methods to: + - Query data using PromQL expressions + - Get all available metrics + - Get labels for specific metrics + - Query metric series with label filters + """ + + def __init__(self, endpoint: Optional[str] = None): + """ + Initialize Prometheus client. + + Args: + endpoint: Prometheus server endpoint. If None, uses PROMETHEUS_ENDPOINT from settings. + """ + self.module_logger = ModuleLogger(__file__) + self.endpoint = endpoint or app_settings.PROMETHEUS_ENDPOINT + self.base_url = f"{self.endpoint.rstrip('/')}/api/v1" + + async def request(self, endpoint: str, params: Optional[Dict[str, Any]] = None) -> Dict[str, Any]: + """ + Make HTTP request to Prometheus API. + + Args: + endpoint: API endpoint path + params: Query parameters + + Returns: + JSON response data + + Raises: + httpx.HTTPError: If request fails + ValueError: If response is not valid JSON + """ + url = f"{self.base_url}/{endpoint.lstrip('/')}" + + try: + await self.module_logger.log_info(f"Making request to Prometheus: {url} with params: {params}") + async with httpx.AsyncClient(timeout=30.0) as client: + response = await client.get(url, params=params) + response.raise_for_status() + data = response.json() + if data.get("status") != "success": + error_msg = data.get('error', 'Unknown error') + await self.module_logger.log_error(f"Prometheus API error: {error_msg}") + raise HTTPException(status_code=400, detail=f"Prometheus API error: {error_msg}") + + return data + + except httpx.HTTPError as e: + await self.module_logger.log_error(f"HTTP error querying Prometheus: {e}") + raise HTTPException(status_code=502, detail=f"Failed to connect to Prometheus: {str(e)}") + except json.JSONDecodeError as e: + await self.module_logger.log_error(f"Invalid JSON response from Prometheus: {e}") + raise HTTPException(status_code=400, detail=f"Invalid response from Prometheus: {str(e)}") + + async def query_range( + self, + query: str, + start: Union[str, datetime], + end: Union[str, datetime], + step: str = "15s" + ) -> Dict[str, Any]: + """ + Execute a PromQL range query. + + Args: + query: PromQL query string + start: Start time (RFC3339 string or datetime) + end: End time (RFC3339 string or datetime) + step: Query resolution step width (e.g., "15s", "1m", "1h") + + Returns: + Range query result data + + Example: + result = await client.query_range( + "up{job='prometheus'}", + start=datetime.now() - timedelta(hours=1), + end=datetime.now(), + step="1m" + ) + """ + params = { + "query": query, + "step": step + } + + # Convert datetime to RFC3339 string if needed + if isinstance(start, datetime): + if start.tzinfo is None: + params["start"] = start.isoformat() + "Z" + else: + params["start"] = start.isoformat() + else: + params["start"] = start + + if isinstance(end, datetime): + if end.tzinfo is None: + params["end"] = end.isoformat() + "Z" + else: + params["end"] = end.isoformat() + else: + params["end"] = end + + return await self.request("query_range", params) diff --git a/apps/metrics/backend/infra/database_client.py b/apps/metrics/backend/infra/external_service/starrocks_client.py similarity index 89% rename from apps/metrics/backend/infra/database_client.py rename to apps/metrics/backend/infra/external_service/starrocks_client.py index b55119d..7b639af 100644 --- a/apps/metrics/backend/infra/database_client.py +++ b/apps/metrics/backend/infra/external_service/starrocks_client.py @@ -2,18 +2,18 @@ import pymysql from typing import List, Dict, Any, Optional from datetime import date from loguru import logger -from webapi.config.app_settings import site_settings +from common.config.app_settings import app_settings class StarRocksClient: """StarRocks database client for querying user registration data""" def __init__(self): - self.host = site_settings.STARROCKS_HOST - self.port = site_settings.STARROCKS_PORT - self.user = site_settings.STARROCKS_USER - self.password = site_settings.STARROCKS_PASSWORD - self.database = site_settings.STARROCKS_DATABASE + self.host = app_settings.STARROCKS_HOST + self.port = app_settings.STARROCKS_PORT + self.user = app_settings.STARROCKS_USER + self.password = app_settings.STARROCKS_PASSWORD + self.database = app_settings.STARROCKS_DATABASE self.connection = None def connect(self) -> bool: @@ -88,12 +88,3 @@ class StarRocksClient: def __exit__(self, exc_type, exc_val, exc_tb): """Context manager exit""" self.disconnect() - - - - - - - - - diff --git a/apps/metrics/backend/services/prometheus_metrics_service.py b/apps/metrics/backend/services/prometheus_metrics_service.py new file mode 100644 index 0000000..7b6118f --- /dev/null +++ b/apps/metrics/backend/services/prometheus_metrics_service.py @@ -0,0 +1,268 @@ +from typing import Dict, List, Any, Optional, Union +from datetime import datetime, timedelta +from fastapi import HTTPException + +from common.log.module_logger import ModuleLogger +from ..infra.external_service.prometheus_client import PrometheusClient + + +class PrometheusMetricsService: + """ + Service class for querying Prometheus metrics with predefined PromQL queries. + + This service provides a high-level interface for querying metrics data + using predefined PromQL queries mapped to metric names. + """ + + # Global dictionary mapping metric names to their corresponding PromQL queries + METRIC_PROMQL_MAP: Dict[str, str] = { + "freeleaps": { + # Just demo, No Usage + "cpu_usage": "100 - (avg by (instance) (irate(node_cpu_seconds_total{mode=\"idle\"}[5m])) * 100)", + # Just demo, No Usage + "memory_usage": "100 - ((node_memory_MemAvailable_bytes / node_memory_MemTotal_bytes) * 100)", + # Just demo, No Usage + "disk_usage": "100 - ((node_filesystem_avail_bytes{mountpoint=\"/\"} / node_filesystem_size_bytes{mountpoint=\"/\"}) * 100)", + # Average response time for notification HTTP requests + "latency_ms": "1000*avg(freeleaps_notification_http_request_duration_seconds_sum{handler!=\"none\"} / freeleaps_notification_http_request_duration_seconds_count)", + # Error rate for 5xx HTTP status codes (stability metric) + "reliability": "1-sum(rate(freeleaps_notification_http_requests_total{status=\"5xx\"}[1m]))", + + }, + "magicleaps": { + + } + } + + def __init__(self, prometheus_endpoint: Optional[str] = None): + """ + Initialize PrometheusMetricsService. + + Args: + prometheus_endpoint: Prometheus server endpoint. If None, uses default from settings. + """ + self.module_logger = ModuleLogger(__file__) + self.prometheus_client = PrometheusClient(prometheus_endpoint) + + def get_available_metrics(self, product_id: Optional[str] = None) -> List[str]: + """ + Get list of available metric names that have predefined PromQL queries. + + Args: + product_id: Optional product ID to filter metrics. If None, returns all metrics from all products. + + Returns: + List of available metric names + """ + if product_id: + if product_id in self.METRIC_PROMQL_MAP: + return list(self.METRIC_PROMQL_MAP[product_id].keys()) + else: + return [] + else: + # Return all metrics from all products + all_metrics = [] + for product_metrics in self.METRIC_PROMQL_MAP.values(): + all_metrics.extend(product_metrics.keys()) + return all_metrics + + def get_available_products(self) -> List[str]: + """ + Get list of available product IDs. + + Returns: + List of available product IDs + """ + return list(self.METRIC_PROMQL_MAP.keys()) + + async def query_metric_by_time_range( + self, + product_id: str, + metric_name: str, + start_time: Union[str, datetime], + end_time: Union[str, datetime], + step: str = "1m" + ) -> List[Dict[str, Any]]: + """ + Query metric data for a specific time range. + + Args: + product_id: Product ID to identify which product's metrics to query + metric_name: Name of the metric to query + start_time: Start time for the query (RFC3339 string or datetime) + end_time: End time for the query (RFC3339 string or datetime) + step: Query resolution step width (e.g., "1m", "5m", "1h") + + Returns: + List of dictionaries with 'date' and 'value' keys + + Raises: + ValueError: If product_id or metric_name is not found in the PromQL mapping + Exception: If Prometheus query fails + + Example: + result = await service.query_metric_by_time_range( + "freeleaps", + "cpu_usage", + start_time=datetime.now() - timedelta(hours=1), + end_time=datetime.now(), + step="5m" + ) + # Returns: [{"date": "2024-01-01T10:00:00Z", "value": 45.2}, ...] + """ + # Check if product_id exists in the mapping + if product_id not in self.METRIC_PROMQL_MAP: + available_products = ", ".join(self.get_available_products()) + error_msg = f"Product '{product_id}' not found in PromQL mapping. Available products: {available_products}" + await self.module_logger.log_error(error_msg) + raise HTTPException(status_code=404, detail=error_msg) + + # Check if metric name exists in the product's mapping + if metric_name not in self.METRIC_PROMQL_MAP[product_id]: + available_metrics = ", ".join(self.get_available_metrics(product_id)) + error_msg = f"Metric '{metric_name}' not found in product '{product_id}' PromQL mapping. Available metrics: {available_metrics}" + await self.module_logger.log_error(error_msg) + raise HTTPException(status_code=404, detail=error_msg) + + # Parse datetime strings if they are strings + if isinstance(start_time, str): + start_dt = datetime.fromisoformat(start_time.replace('Z', '+00:00')) + else: + start_dt = start_time + + if isinstance(end_time, str): + end_dt = datetime.fromisoformat(end_time.replace('Z', '+00:00')) + else: + end_dt = end_time + + # Validate time range + if start_dt >= end_dt: + raise HTTPException( + status_code=400, + detail="Start time must be before end time" + ) + + # Check time range is not too large (max 7 days for detailed queries) + time_diff = end_dt - start_dt + if time_diff > timedelta(days=7): + raise HTTPException( + status_code=400, + detail="Time range cannot exceed 7 days for detailed queries" + ) + + # Get the PromQL query for the metric + promql_query = self.METRIC_PROMQL_MAP[product_id][metric_name] + + try: + await self.module_logger.log_info( + f"Querying metric '{metric_name}' from product '{product_id}' with PromQL: {promql_query}") + + # Execute the range query + result = await self.prometheus_client.query_range( + query=promql_query, + start=start_dt, + end=end_dt, + step=step + ) + + # Parse the result and format it + formatted_data = self._format_query_result(result, metric_name) + + await self.module_logger.log_info( + f"Successfully queried metric '{metric_name}' with {len(formatted_data)} data points") + return formatted_data + + except Exception as e: + await self.module_logger.log_error(f"Failed to query metric '{metric_name}': {e}") + raise + + def _format_query_result(self, prometheus_result: Dict[str, Any], metric_name: str) -> List[Dict[str, Any]]: + """ + Format Prometheus query result into the required format. + + Args: + prometheus_result: Raw result from Prometheus API + metric_name: Name of the metric being queried + + Returns: + List of dictionaries with 'date' and 'value' keys + """ + formatted_data = [] + + # Extract data from Prometheus result + data = prometheus_result.get("data", {}) + result_type = data.get("resultType", "") + + if result_type == "matrix": + # Handle range query results (matrix) + for series in data.get("result", []): + metric_labels = series.get("metric", {}) + values = series.get("values", []) + + for timestamp, value in values: + # Convert Unix timestamp to ISO format + date_str = datetime.fromtimestamp(timestamp).isoformat() + "Z" + + formatted_data.append({ + "date": date_str, + "value": float(value) if value != "NaN" else None, + "metric": metric_name, + "labels": metric_labels + }) + + elif result_type == "vector": + # Handle instant query results (vector) + for series in data.get("result", []): + metric_labels = series.get("metric", {}) + timestamp = series.get("value", [None, None])[0] + value = series.get("value", [None, None])[1] + + if timestamp and value: + date_str = datetime.fromtimestamp(timestamp).isoformat() + "Z" + + formatted_data.append({ + "date": date_str, + "value": float(value) if value != "NaN" else None, + "metric": metric_name, + "labels": metric_labels + }) + + # Sort by date + formatted_data.sort(key=lambda x: x["date"]) + + return formatted_data + + async def get_metric_info(self, product_id: str, metric_name: str) -> Dict[str, Any]: + """ + Get information about a specific metric including its PromQL query. + + Args: + product_id: Product ID to identify which product's metrics to query + metric_name: Name of the metric + + Returns: + Dictionary containing metric information + + Raises: + ValueError: If product_id or metric_name is not found in the PromQL mapping + """ + # Check if product_id exists in the mapping + if product_id not in self.METRIC_PROMQL_MAP: + available_products = ", ".join(self.get_available_products()) + error_msg = f"Product '{product_id}' not found in PromQL mapping. Available products: {available_products}" + await self.module_logger.log_error(error_msg) + raise HTTPException(status_code=404, detail=error_msg) + + # Check if metric name exists in the product's mapping + if metric_name not in self.METRIC_PROMQL_MAP[product_id]: + available_metrics = ", ".join(self.get_available_metrics(product_id)) + error_msg = f"Metric '{metric_name}' not found in product '{product_id}' PromQL mapping. Available metrics: {available_metrics}" + await self.module_logger.log_error(error_msg) + raise HTTPException(status_code=404, detail=error_msg) + + return { + "product_id": product_id, + "metric_name": metric_name, + "promql_query": self.METRIC_PROMQL_MAP[product_id][metric_name], + "description": f"PromQL query for {metric_name} metric in product {product_id}" + } diff --git a/apps/metrics/backend/services/registration_analytics_service.py b/apps/metrics/backend/services/registration_analytics_service.py index 1579286..eb843f0 100644 --- a/apps/metrics/backend/services/registration_analytics_service.py +++ b/apps/metrics/backend/services/registration_analytics_service.py @@ -1,7 +1,7 @@ from typing import List, Dict, Any from datetime import date, timedelta from loguru import logger -from backend.infra.database_client import StarRocksClient +from backend.infra.external_service.starrocks_client import StarRocksClient from backend.models.user_registration_models import UserRegistrationResponse, DailyRegisteredUsers @@ -33,7 +33,6 @@ class RegistrationService: raw_data = self.starrocks_client.get_daily_registered_users( start_date, end_date, product_id ) - # Convert to DailyRegisteredUsers objects daily_data = [ DailyRegisteredUsers( @@ -44,7 +43,6 @@ class RegistrationService: ) for row in raw_data ] - # Create date-to-count mapping data_dict = {str(item.date_id): item.registered_cnt for item in daily_data} diff --git a/apps/metrics/common/config/__init__.py b/apps/metrics/common/config/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/apps/metrics/webapi/config/app_settings.py b/apps/metrics/common/config/app_settings.py similarity index 62% rename from apps/metrics/webapi/config/app_settings.py rename to apps/metrics/common/config/app_settings.py index fac31e6..4877ca6 100644 --- a/apps/metrics/webapi/config/app_settings.py +++ b/apps/metrics/common/config/app_settings.py @@ -2,7 +2,7 @@ from pydantic_settings import BaseSettings from typing import Optional -class SiteSettings(BaseSettings): +class AppSettings(BaseSettings): # Server settings SERVER_HOST: str = "0.0.0.0" SERVER_PORT: int = 8009 @@ -11,8 +11,8 @@ class SiteSettings(BaseSettings): # Log settings LOG_BASE_PATH: str = "./logs" - BACKEND_LOG_FILE_NAME: str = "metrics" - APPLICATION_ACTIVITY_LOG: str = "metrics-activity" + BACKEND_LOG_FILE_NAME: str = "freeleaps-metrics" + APPLICATION_ACTIVITY_LOG: str = "freeleaps-metrics-activity" # StarRocks database settings STARROCKS_HOST: str = "freeleaps-starrocks-cluster-fe-service.freeleaps-data-platform.svc" @@ -21,8 +21,15 @@ class SiteSettings(BaseSettings): STARROCKS_PASSWORD: str = "" STARROCKS_DATABASE: str = "freeleaps" + # Prometheus settings + PROMETHEUS_ENDPOINT: str = "http://localhost:9090" + + METRICS_ENABLED: bool = False + PROBES_ENABLED: bool = True + + class Config: - env_file = "local.env" + env_file = "local.env" -site_settings = SiteSettings() +app_settings = AppSettings() diff --git a/apps/metrics/common/config/log_settings.py b/apps/metrics/common/config/log_settings.py new file mode 100644 index 0000000..633c75a --- /dev/null +++ b/apps/metrics/common/config/log_settings.py @@ -0,0 +1,17 @@ +import os +from dataclasses import dataclass +from .app_settings import app_settings +from .site_settings import site_settings + +@dataclass +class LogSettings: + LOG_PATH_BASE: str = app_settings.LOG_BASE_PATH + LOG_RETENTION: str = os.environ.get("LOG_RETENTION", "30 days") + LOG_ROTATION: str = os.environ.get("LOG_ROTATION", "00:00") # midnight + MAX_BACKUP_FILES: int = int(os.environ.get("LOG_BACKUP_FILES", 5)) + LOG_ROTATION_BYTES: int = int(os.environ.get("LOG_ROTATION_BYTES", 10 * 1024 * 1024)) # 10 MB + APP_NAME: str = site_settings.NAME + ENVIRONMENT: str = site_settings.ENV + + +log_settings = LogSettings() diff --git a/apps/metrics/common/config/site_settings.py b/apps/metrics/common/config/site_settings.py new file mode 100644 index 0000000..c415f9f --- /dev/null +++ b/apps/metrics/common/config/site_settings.py @@ -0,0 +1,26 @@ +import os + +from pydantic_settings import BaseSettings + + +# NOTE: The values fall backs to your environment variables when not set here +class SiteSettings(BaseSettings): + NAME: str = "FREELEAPS-METRICS" + DEBUG: bool = True + + ENV: str = "dev" + + SERVER_HOST: str = "localhost" + SERVER_PORT: int = 9000 + + URL: str = "http://localhost" + TIME_ZONE: str = "UTC" + + BASE_PATH: str = os.path.dirname(os.path.dirname((os.path.abspath(__file__)))) + + class Config: + env_file = ".devbase-webapi.env" + env_file_encoding = "utf-8" + + +site_settings = SiteSettings() diff --git a/apps/metrics/common/log/__init__.py b/apps/metrics/common/log/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/apps/metrics/common/log/application_logger.py b/apps/metrics/common/log/application_logger.py new file mode 100644 index 0000000..67ec321 --- /dev/null +++ b/apps/metrics/common/log/application_logger.py @@ -0,0 +1,12 @@ +from .base_logger import LoggerBase +from common.config.app_settings import app_settings + +class ApplicationLogger(LoggerBase): + def __init__(self, application_activities: dict[str, any] = {}) -> None: + extra_fileds = {} + if application_activities: + extra_fileds.update(application_activities) + super().__init__( + logger_name=app_settings.APPLICATION_ACTIVITY_LOG, + extra_fileds=extra_fileds, + ) diff --git a/apps/metrics/common/log/base_logger.py b/apps/metrics/common/log/base_logger.py new file mode 100644 index 0000000..24f7bb0 --- /dev/null +++ b/apps/metrics/common/log/base_logger.py @@ -0,0 +1,136 @@ +from loguru import logger as guru_logger +from common.config.log_settings import log_settings +from typing import Dict, Any, Optional +import socket +import json +import threading +import os +import sys +import inspect +import logging + +from common.log.json_sink import JsonSink + +class LoggerBase: + binded_loggers = {} + logger_lock = threading.Lock() + + def __init__(self, logger_name: str, extra_fileds: dict[str, any]) -> None: + self.__logger_name = logger_name + self.extra_fileds = extra_fileds + with LoggerBase.logger_lock: + if self.__logger_name in LoggerBase.binded_loggers: + self.logger = LoggerBase.binded_loggers[self.__logger_name] + return + + log_filename = f"{log_settings.LOG_PATH_BASE}/{self.__logger_name}.log" + log_level = "INFO" + rotation_bytes = int(log_settings.LOG_ROTATION_BYTES or 10 * 1024 * 1024) + + guru_logger.remove() + + file_sink = JsonSink( + log_file_path=log_filename, + rotation_size_bytes=rotation_bytes, + max_backup_files=log_settings.MAX_BACKUP_FILES + ) + guru_logger.add( + sink=file_sink, + level=log_level, + filter=lambda record: record["extra"].get("topic") == self.__logger_name, + ) + + guru_logger.add( + sink=sys.stderr, + level=log_level, + format="{level} - {time:YYYY-MM-DD HH:mm:ss} - <{extra[log_file]}:{extra[log_line]}> - {extra[properties_str]} - {message}", + filter=lambda record: record["extra"].get("topic") == self.__logger_name, + ) + + host_name = socket.gethostname() + host_ip = socket.gethostbyname(host_name) + self.logger = guru_logger.bind( + topic=self.__logger_name, + host_ip=host_ip, + host_name=host_name, + app=log_settings.APP_NAME, + env=log_settings.ENVIRONMENT, + ) + with LoggerBase.logger_lock: + LoggerBase.binded_loggers[self.__logger_name] = self.logger + + def _get_log_context(self) -> dict: + frame = inspect.currentframe().f_back.f_back + filename = os.path.basename(frame.f_code.co_filename) + lineno = frame.f_lineno + return {"log_file": filename, "log_line": lineno} + + def _prepare_properties(self, properties: Optional[Dict[str, Any]]) -> Dict[str, Any]: + props = {} if properties is None else properties.copy() + props_str = json.dumps(props, ensure_ascii=False) if props else "{}" + return props, props_str + + async def log_event(self, sender_id: str, receiver_id: str, subject: str, event: str, properties: dict[str, any], text: str = "") -> None: + props, props_str = self._prepare_properties(properties) + context = self._get_log_context() + local_logger = self.logger.bind(sender_id=sender_id, receiver_id=receiver_id, subject=subject, event=event, properties=props, properties_str=props_str, **context) + local_logger.info(text) + + async def log_exception(self, sender_id: str, receiver_id: str, subject: str, exception: Exception, text: str = "", properties: dict[str, any] = None) -> None: + props, props_str = self._prepare_properties(properties) + context = self._get_log_context() + local_logger = self.logger.bind(sender_id=sender_id, receiver_id=receiver_id, subject=subject, event="exception", properties=props, properties_str=props_str, exception=exception, **context) + local_logger.exception(text) + + async def log_info(self, sender_id: str, receiver_id: str, subject: str, text: str = "", properties: dict[str, any] = None) -> None: + props, props_str = self._prepare_properties(properties) + context = self._get_log_context() + local_logger = self.logger.bind(sender_id=sender_id, receiver_id=receiver_id, subject=subject, event="information", properties=props, properties_str=props_str, **context) + local_logger.info(text) + + async def log_warning(self, sender_id: str, receiver_id: str, subject: str, text: str = "", properties: dict[str, any] = None) -> None: + props, props_str = self._prepare_properties(properties) + context = self._get_log_context() + local_logger = self.logger.bind(sender_id=sender_id, receiver_id=receiver_id, subject=subject, event="warning", properties=props, properties_str=props_str, **context) + local_logger.warning(text) + + async def log_error(self, sender_id: str, receiver_id: str, subject: str, text: str = "", properties: dict[str, any] = None) -> None: + props, props_str = self._prepare_properties(properties) + context = self._get_log_context() + local_logger = self.logger.bind(sender_id=sender_id, receiver_id=receiver_id, subject=subject, event="error", properties=props, properties_str=props_str, **context) + local_logger.error(text) + + @staticmethod + def configure_uvicorn_logging(): + print("๐Ÿ“ข Setting up uvicorn logging interception...") + + # Intercept logs from these loggers + intercept_loggers = ["uvicorn", "uvicorn.access", "uvicorn.error", "fastapi"] + + class InterceptHandler(logging.Handler): + def emit(self, record): + level = ( + guru_logger.level(record.levelname).name + if guru_logger.level(record.levelname, None) + else record.levelno + ) + frame, depth = logging.currentframe(), 2 + while frame.f_code.co_filename == logging.__file__: + frame = frame.f_back + depth += 1 + + guru_logger.opt(depth=depth, exception=record.exc_info).log( + level, + f"[{record.name}] {record.getMessage()}", + ) + + # Replace default handlers + logging.root.handlers.clear() + logging.root.setLevel(logging.INFO) + logging.root.handlers = [InterceptHandler()] + + # Configure specific uvicorn loggers + for logger_name in intercept_loggers: + logging_logger = logging.getLogger(logger_name) + logging_logger.handlers.clear() # Remove default handlers + logging_logger.propagate = True # Ensure propagation through Loguru diff --git a/apps/metrics/common/log/json_sink.py b/apps/metrics/common/log/json_sink.py new file mode 100644 index 0000000..2379095 --- /dev/null +++ b/apps/metrics/common/log/json_sink.py @@ -0,0 +1,84 @@ +import json +import datetime +import traceback +from pathlib import Path + +class JsonSink: + def __init__( + self, + log_file_path: str, + rotation_size_bytes: int = 10 * 1024 * 1024, + max_backup_files: int = 5, + ): + self.log_file_path = Path(log_file_path) + self.rotation_size = rotation_size_bytes + self.max_backup_files = max_backup_files + self._open_log_file() + + def _open_log_file(self): + # ensure the parent directory exists + parent_dir = self.log_file_path.parent + if not parent_dir.exists(): + parent_dir.mkdir(parents=True, exist_ok=True) + self.log_file = self.log_file_path.open("a", encoding="utf-8") + + def _should_rotate(self) -> bool: + return self.log_file_path.exists() and self.log_file_path.stat().st_size >= self.rotation_size + + def _rotate(self): + self.log_file.close() + timestamp = datetime.datetime.now().strftime("%Y%m%d%H%M%S") + rotated_path = self.log_file_path.with_name(f"{self.log_file_path.stem}_{timestamp}{self.log_file_path.suffix}") + self.log_file_path.rename(rotated_path) + self._cleanup_old_backups() + self._open_log_file() + + def _cleanup_old_backups(self): + parent = self.log_file_path.parent + stem = self.log_file_path.stem + suffix = self.log_file_path.suffix + + backup_files = sorted( + parent.glob(f"{stem}_*{suffix}"), + key=lambda p: p.stat().st_mtime, + reverse=True, + ) + + for old_file in backup_files[self.max_backup_files:]: + try: + old_file.unlink() + except Exception as e: + print(f"Failed to delete old backup {old_file}: {e}") + + def __call__(self, message): + record = message.record + if self._should_rotate(): + self._rotate() + + log_entry = { + "level": record["level"].name.lower(), + "timestamp": int(record["time"].timestamp() * 1000), + "text": record["message"], + "fields": record["extra"].get("properties", {}), + "context": { + "app": record["extra"].get("app"), + "env": record["extra"].get("env"), + "log_file": record["extra"].get("log_file"), + "log_line": record["extra"].get("log_line"), + "topic": record["extra"].get("topic"), + "sender_id": record["extra"].get("sender_id"), + "receiver_id": record["extra"].get("receiver_id"), + "subject": record["extra"].get("subject"), + "event": record["extra"].get("event"), + "host_ip": record["extra"].get("host_ip"), + "host_name": record["extra"].get("host_name"), + }, + "stacktrace": None + } + + if record["exception"]: + exc_type, exc_value, exc_tb = record["exception"] + log_entry["stacktrace"] = traceback.format_exception(exc_type, exc_value, exc_tb) + + self.log_file.write(json.dumps(log_entry, ensure_ascii=False) + "\n") + self.log_file.flush() diff --git a/apps/metrics/common/log/module_logger.py b/apps/metrics/common/log/module_logger.py new file mode 100644 index 0000000..3e82f74 --- /dev/null +++ b/apps/metrics/common/log/module_logger.py @@ -0,0 +1,46 @@ +from .application_logger import ApplicationLogger + + +class ModuleLogger(ApplicationLogger): + def __init__(self, sender_id: str) -> None: + super().__init__() + self.event_sender_id = sender_id + self.event_receiver_id = "ModuleLogger" + self.event_subject = "module" + + async def log_exception(self, exception: Exception, text: str = "Exception", properties: dict[str, any] = None) -> None: + return await super().log_exception( + sender_id=self.event_sender_id, + receiver_id=self.event_receiver_id, + subject=self.event_subject, + exception=exception, + text=text, + properties=properties, + ) + + async def log_info(self, text: str, data: dict[str, any] = None) -> None: + return await super().log_info( + sender_id=self.event_sender_id, + receiver_id=self.event_receiver_id, + subject=self.event_subject, + text=text, + properties=data, + ) + + async def log_warning(self, text: str, data: dict[str, any] = None) -> None: + return await super().log_warning( + sender_id=self.event_sender_id, + receiver_id=self.event_receiver_id, + subject=self.event_subject, + text=text, + properties=data, + ) + + async def log_error(self, text: str, data: dict[str, any] = None) -> None: + return await super().log_error( + sender_id=self.event_sender_id, + receiver_id=self.event_receiver_id, + subject=self.event_subject, + text=text, + properties=data, + ) \ No newline at end of file diff --git a/apps/metrics/common/probes/__init__.py b/apps/metrics/common/probes/__init__.py new file mode 100644 index 0000000..4071df8 --- /dev/null +++ b/apps/metrics/common/probes/__init__.py @@ -0,0 +1,140 @@ +import logging +from enum import Enum +from typing import Optional, Callable, Tuple, Dict +import inspect +from datetime import datetime, timezone + +# ProbeType is an Enum that defines the types of probes that can be registered. +class ProbeType(Enum): + LIVENESS = "liveness" + READINESS = "readiness" + STARTUP = "startup" + +# ProbeResult is a class that represents the result of a probe check. +class ProbeResult: + def __init__(self, success: bool, message: str = "ok", data: Optional[dict] = None): + self.success = success + self.message = message + self.data = data or {} + + def to_dict(self) -> dict: + return { + "success": self.success, + "message": self.message, + "data": self.data + } + +# Probe is a class that represents a probe that can be registered. +class Probe: + def __init__(self, type: ProbeType, path: str, check_fn: Callable, name: Optional[str] = None): + self.type = type + self.path = path + self.check_fn = check_fn + self.name = name or f"{type.value}-{id(self)}" + + async def execute(self) -> ProbeResult: + try: + result = self.check_fn() + if inspect.isawaitable(result): + result = await result + + if isinstance(result, ProbeResult): + return result + elif isinstance(result, bool): + return ProbeResult(result, "ok" if result else "failed") + else: + return ProbeResult(True, "ok") + except Exception as e: + return ProbeResult(False, str(e)) + +# ProbeGroup is a class that represents a group of probes that can be checked together. +class ProbeGroup: + def __init__(self, path: str): + self.path = path + self.probes: Dict[str, Probe] = {} + + def add_probe(self, probe: Probe): + self.probes[probe.name] = probe + + async def check_all(self) -> Tuple[bool, dict]: + results = {} + all_success = True + + for name, probe in self.probes.items(): + result = await probe.execute() + results[name] = result.to_dict() + if not result.success: + all_success = False + + return all_success, results + +# FrameworkAdapter is an abstract class that defines the interface for framework-specific probe adapters. +class FrameworkAdapter: + async def handle_request(self, group: ProbeGroup): + all_success, results = await group.check_all() + status_code = 200 if all_success else 503 + return {"status": "ok" if all_success else "failed", "payload": results, "timestamp": int(datetime.now(timezone.utc).timestamp())}, status_code + + def register_route(self, path: str, handler: Callable): + raise NotImplementedError + +# ProbeManager is a class that manages the registration of probes and their corresponding framework adapters. +class ProbeManager: + _default_paths = { + ProbeType.LIVENESS: "/_/livez", + ProbeType.READINESS: "/_/readyz", + ProbeType.STARTUP: "/_/healthz" + } + + def __init__(self): + self.groups: Dict[str, ProbeGroup] = {} + self.adapters: Dict[str, FrameworkAdapter] = {} + self._startup_complete = False + + def register_adapter(self, framework: str, adapter: FrameworkAdapter): + self.adapters[framework] = adapter + logging.info(f"Registered probe adapter ({adapter}) for framework: {framework}") + + def register( + self, + type: ProbeType, + check_func: Optional[Callable] = None, + path: Optional[str] = None, + prefix: str = "", + name: Optional[str] = None, + frameworks: Optional[list] = None + ): + path = path or self._default_paths.get(type, "/_/healthz") + if prefix: + path = f"{prefix}{path}" + + if type == ProbeType.STARTUP and check_func is None: + check_func = self._default_startup_check + + probe = Probe(type, path, check_func or (lambda: True), name) + + if path not in self.groups: + self.groups[path] = ProbeGroup(path) + self.groups[path].add_probe(probe) + + for framework in (frameworks or ["default"]): + self._register_route(framework, path) + logging.info(f"Registered {type.value} probe route ({path}) for framework: {framework}") + + def _register_route(self, framework: str, path: str): + if framework not in self.adapters: + return + + adapter = self.adapters[framework] + group = self.groups[path] + + async def handler(): + return await adapter.handle_request(group) + + adapter.register_route(path, handler) + + def _default_startup_check(self) -> bool: + return self._startup_complete + + def mark_startup_complete(self): + self._startup_complete = True \ No newline at end of file diff --git a/apps/metrics/common/probes/adapters.py b/apps/metrics/common/probes/adapters.py new file mode 100644 index 0000000..2ecd38a --- /dev/null +++ b/apps/metrics/common/probes/adapters.py @@ -0,0 +1,15 @@ +from . import FrameworkAdapter +from fastapi.responses import JSONResponse +from typing import Callable + +# FastAPIAdapter is a class that implements the FrameworkAdapter interface for FastAPI. +class FastAPIAdapter(FrameworkAdapter): + def __init__(self, app): + self.app = app + + def register_route(self,path: str, handler: Callable): + async def wrapper(): + data, status_code = await handler() + return JSONResponse(content=data, status_code=status_code) + + self.app.add_api_route(path, wrapper, methods=["GET"]) diff --git a/apps/metrics/docs/design.md b/apps/metrics/docs/design.md new file mode 100644 index 0000000..796b5ae --- /dev/null +++ b/apps/metrics/docs/design.md @@ -0,0 +1,146 @@ +# 1.Override + +We support two ways to query metrics: +- Connect to StarRocks data warehouse and query metrics from it +- Query Prometheus directly and retrieve metrics from it + +# 2.Starrocks Metric + +We can implement StarRocks Metric queries similar to Prometheus Metric queries. The only difference is replacing PromQL with SQL and querying through StarRocks API. + +# 3.Prometheus Metric + +## 3.1.Metrics Config +Currently, metrics are configured in code. In the future, they will be configured through database or other methods. +Organization structure: Product ID -> Metric Name -> Metric Query Method (PromQL) +```json +{ + "freeleaps": { + // Just for demo + "cpu_usage": "100 - (avg by (instance) (irate(node_cpu_seconds_total{mode=\"idle\"}[5m])) * 100)", + // Just for demo + "memory_usage": "100 - ((node_memory_MemAvailable_bytes / node_memory_MemTotal_bytes) * 100)", + // Just for demo + "disk_usage": "100 - ((node_filesystem_avail_bytes{mountpoint=\"/\"} / node_filesystem_size_bytes{mountpoint=\"/\"}) * 100)", + "latency_ms": "1000*avg(freeleaps_notification_http_request_duration_seconds_sum{handler!=\"none\"} / freeleaps_notification_http_request_duration_seconds_count)", + "reliability": "1-sum(rate(freeleaps_notification_http_requests_total{status=\"5xx\"}[1m]))" + }, + "magicleaps": {} +} +``` +If we want to add new metrics, theoretically we only need to add one configuration entry (provided that the metric exists in Prometheus and can be queried directly through PromQL without requiring any additional code processing) + +## 3.2.API Design + +### 3.2.1.Query Metrics by Product ID + +API: `/api/metrics/prometheus/product/{product_id}/available-metrics` + +Method: GET +Request: +``` +product_id=freeleaps +``` +Response: + +```json +{ + "product_id": "freeleaps", + "available_metrics": [ + "cpu_usage", + "memory_usage", + "disk_usage", + "latency_ms", + "reliability" + ], + "total_count": 5, + "description": "List of metrics with predefined PromQL queries for product 'freeleaps'" +} +``` + +### 3.2.2.Query Metric Info +API: `/api/metrics/prometheus/product/{product_id}/metric/{metric_name}/info` + +Method: GET +Request: +``` +product_id=freeleaps +metric_name=cpu_usage +``` +Response: + +```json +{ + "metric_info": { + "product_id": "freeleaps", + "metric_name": "cpu_usage", + "promql_query": "100 - (avg by (instance) (irate(node_cpu_seconds_total{mode=\"idle\"}[5m])) * 100)", + "description": "PromQL query for cpu_usage metric in product freeleaps" + }, + "description": "Information about metric 'cpu_usage' in product 'freeleaps'" +} +``` + +### 3.2.3.Query Metric Data +API: `/api/metrics/prometheus/metrics_query` + +Method: GET +Request: +``` +{ + "product_id":"freeleaps", + "metric_name": "latency_ms", + "start_time": "2025-09-12T00:00:00Z", + "end_time": "2025-09-16T01:00:00Z", + "step":"1h" # Interval between data points in the query result +} +``` +Response: + +```json +{ + "metric_name": "latency_ms", + "data_points": [ + { + "date": "2025-09-12T08:00:00Z", + "value": 41.37141507698155, + "labels": {} # Optional: Additional labels for prometheus, Just for debugging + }, + { + "date": "2025-09-12T09:00:00Z", + "value": 41.371992733188385, + "labels": {} + }, + { + "date": "2025-09-12T10:00:00Z", + "value": 41.37792878125675, + "labels": {} + }, + { + "date": "2025-09-12T11:00:00Z", + "value": 41.37297490632533, + "labels": {} + }, + ... + { + "date": "2025-09-16T08:00:00Z", + "value": 40.72491916149973, + "labels": {} + }, + { + "date": "2025-09-16T09:00:00Z", + "value": 40.72186597550194, + "labels": {} + } + ], + "total_points": 98, + "time_range": { + "start": "2025-09-12T00:00:00Z", + "end": "2025-09-16T01:00:00Z" + }, + "step": "1h" +} +``` + +# 4.Universal Metrics +In the future, we can create an abstraction layer above StarRocks Metrics and Prometheus Metrics to unify metric queries from both data sources! \ No newline at end of file diff --git a/apps/metrics/local.env b/apps/metrics/local.env index 616515e..4b601db 100644 --- a/apps/metrics/local.env +++ b/apps/metrics/local.env @@ -14,4 +14,6 @@ STARROCKS_DATABASE=freeleaps # log settings LOG_BASE_PATH=./logs BACKEND_LOG_FILE_NAME=metrics -APPLICATION_ACTIVITY_LOG=metrics-activity \ No newline at end of file +APPLICATION_ACTIVITY_LOG=metrics-activity + +PROMETHEUS_ENDPOINT=http://localhost:9090 \ No newline at end of file diff --git a/apps/metrics/requirements.txt b/apps/metrics/requirements.txt index 96d4bfd..1422c6b 100644 --- a/apps/metrics/requirements.txt +++ b/apps/metrics/requirements.txt @@ -14,3 +14,4 @@ pytest==8.4.1 pytest-asyncio==0.21.2 pymysql==1.1.0 sqlalchemy==2.0.23 +python-dotenv diff --git a/apps/metrics/webapi/bootstrap/app_factory.py b/apps/metrics/webapi/bootstrap/app_factory.py deleted file mode 100644 index 4e9e805..0000000 --- a/apps/metrics/webapi/bootstrap/app_factory.py +++ /dev/null @@ -1,69 +0,0 @@ -from fastapi import FastAPI -from fastapi.middleware.cors import CORSMiddleware -from prometheus_fastapi_instrumentator import Instrumentator -from webapi.config.app_settings import site_settings -from loguru import logger -import os - - -def create_app() -> FastAPI: - """ - Create and configure the FastAPI application - """ - app = FastAPI( - title="Metrics Service API", - description="Metrics Service for Freeleaps Platform", - version="1.0.0", - docs_url="/docs", - redoc_url="/redoc" - ) - - # Add CORS middleware - app.add_middleware( - CORSMiddleware, - allow_origins=["*"], - allow_credentials=True, - allow_methods=["*"], - allow_headers=["*"], - ) - - # Setup logging - setup_logging() - - # Setup Prometheus metrics - Instrumentator().instrument(app).expose(app) - - # Include routers - # from webapi.routes import health, api - # app.include_router(health.router, prefix="/health", tags=["health"]) - # app.include_router(api.router, prefix="/api/metrics", tags=["metrics"]) - # Note: Registration router is included in main.py - - return app - - -def setup_logging(): - """ - Setup logging configuration - """ - # Create log directory if it doesn't exist - log_dir = site_settings.LOG_BASE_PATH - os.makedirs(log_dir, exist_ok=True) - - # Configure loguru - logger.add( - f"{log_dir}/{site_settings.BACKEND_LOG_FILE_NAME}.log", - rotation="1 day", - retention="30 days", - level="INFO", - format="{time:YYYY-MM-DD HH:mm:ss} | {level} | {name}:{function}:{line} | {message}" - ) - - logger.add( - f"{log_dir}/{site_settings.APPLICATION_ACTIVITY_LOG}.log", - rotation="1 day", - retention="30 days", - level="INFO", - format="{time:YYYY-MM-DD HH:mm:ss} | {level} | {name}:{function}:{line} | {message}", - filter=lambda record: record["level"].name == "INFO" - ) diff --git a/apps/metrics/webapi/bootstrap/application.py b/apps/metrics/webapi/bootstrap/application.py new file mode 100644 index 0000000..66d4a65 --- /dev/null +++ b/apps/metrics/webapi/bootstrap/application.py @@ -0,0 +1,77 @@ +import logging + +from fastapi import FastAPI +from fastapi.openapi.utils import get_openapi + +from common.config.app_settings import app_settings +from webapi.providers import exception_handler, common, probes, metrics, router +from webapi.providers.logger import register_logger + + +def create_app() -> FastAPI: + logging.info("App initializing") + + app = FreeleapsMetricsApp() + + register_logger() + register(app, exception_handler) + register(app, router) + register(app, common) + + # Call the custom_openapi function to change the OpenAPI version + customize_openapi_security(app) + # Register probe APIs if enabled + if app_settings.PROBES_ENABLED: + register(app, probes) + + # Register metrics APIs if enabled + if app_settings.METRICS_ENABLED: + register(app, metrics) + return app + + +# This function overrides the OpenAPI schema version to 3.0.0 +def customize_openapi_security(app: FastAPI) -> None: + def custom_openapi(): + if app.openapi_schema: + return app.openapi_schema + + # Generate OpenAPI schema + openapi_schema = get_openapi( + title="FreeLeaps Metrics API", + version="3.1.0", + description="FreeLeaps Metrics API Documentation", + routes=app.routes, + ) + + # Ensure the components section exists in the OpenAPI schema + if "components" not in openapi_schema: + openapi_schema["components"] = {} + + # Add security scheme to components + openapi_schema["components"]["securitySchemes"] = { + "bearerAuth": {"type": "http", "scheme": "bearer", "bearerFormat": "JWT"} + } + + # Add security requirement globally + openapi_schema["security"] = [{"bearerAuth": []}] + + app.openapi_schema = openapi_schema + return app.openapi_schema + + app.openapi = custom_openapi + + +def register(app, provider): + logging.info(provider.__name__ + " registering") + provider.register(app) + + +def boot(app, provider): + logging.info(provider.__name__ + " booting") + provider.boot(app) + + +class FreeleapsMetricsApp(FastAPI): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) diff --git a/apps/metrics/webapi/main.py b/apps/metrics/webapi/main.py index 10467a3..39ec9aa 100644 --- a/apps/metrics/webapi/main.py +++ b/apps/metrics/webapi/main.py @@ -1,15 +1,14 @@ -from webapi.bootstrap.app_factory import create_app -from webapi.config.app_settings import site_settings +from common.config.site_settings import site_settings from fastapi.responses import RedirectResponse import uvicorn -from typing import Any -from webapi.routes import registration_metrics +from webapi.bootstrap.application import create_app +from webapi.routes.starrocks_metrics import metrics_query app = create_app() # Include routers -app.include_router(registration_metrics.router) +app.include_router(metrics_query.router) @app.get("/", status_code=301) @@ -24,13 +23,3 @@ if __name__ == "__main__": uvicorn.run( app="main:app", host=site_settings.SERVER_HOST, port=site_settings.SERVER_PORT ) - - -def get_context() -> Any: - # Define your context function. This is where you can set up authentication, database connections, etc. - return {} - - -def get_root_value() -> Any: - # Define your root value function. This is where you can set up the root value for GraphQL. - return {} diff --git a/apps/metrics/webapi/providers/__init__.py b/apps/metrics/webapi/providers/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/apps/metrics/webapi/providers/common.py b/apps/metrics/webapi/providers/common.py new file mode 100644 index 0000000..f76fa4a --- /dev/null +++ b/apps/metrics/webapi/providers/common.py @@ -0,0 +1,31 @@ +from fastapi.middleware.cors import CORSMiddleware +from common.config.site_settings import site_settings + + +def register(app): + app.debug = site_settings.DEBUG + app.title = site_settings.NAME + + add_global_middleware(app) + + # This hook ensures that a connection is opened to handle any queries + # generated by the request. + @app.on_event("startup") + async def startup(): + pass + + # This hook ensures that the connection is closed when we've finished + # processing the request. + @app.on_event("shutdown") + async def shutdown(): + pass + + +def add_global_middleware(app): + app.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], + ) diff --git a/apps/metrics/webapi/providers/exception_handler.py b/apps/metrics/webapi/providers/exception_handler.py new file mode 100644 index 0000000..21117a5 --- /dev/null +++ b/apps/metrics/webapi/providers/exception_handler.py @@ -0,0 +1,39 @@ +from fastapi import FastAPI, HTTPException +from fastapi.exceptions import RequestValidationError +from starlette.requests import Request +from starlette.responses import JSONResponse +from starlette.status import ( + HTTP_400_BAD_REQUEST, + HTTP_401_UNAUTHORIZED, + HTTP_403_FORBIDDEN, + HTTP_404_NOT_FOUND, + HTTP_422_UNPROCESSABLE_ENTITY, + HTTP_500_INTERNAL_SERVER_ERROR, +) + + +async def custom_http_exception_handler(request: Request, exc: HTTPException): + return JSONResponse( + status_code=exc.status_code, + content={"error": exc.detail}, + ) + + + +async def validation_exception_handler(request: Request, exc: RequestValidationError): + return JSONResponse( + status_code=HTTP_400_BAD_REQUEST, + content={"error": str(exc)}, + ) + +async def exception_handler(request: Request, exc: Exception): + return JSONResponse( + status_code=HTTP_500_INTERNAL_SERVER_ERROR, + content={"error": str(exc)}, + ) + + +def register(app: FastAPI): + app.add_exception_handler(HTTPException, custom_http_exception_handler) + app.add_exception_handler(RequestValidationError, validation_exception_handler) + app.add_exception_handler(Exception, exception_handler) diff --git a/apps/metrics/webapi/providers/logger.py b/apps/metrics/webapi/providers/logger.py new file mode 100644 index 0000000..edfa9f5 --- /dev/null +++ b/apps/metrics/webapi/providers/logger.py @@ -0,0 +1,7 @@ +from common.log.base_logger import LoggerBase + + +def register_logger(): + print("๐Ÿ“ข Setting up logging interception...") + LoggerBase.configure_uvicorn_logging() + print("โœ… Logging interception complete. Logs are formatted and deduplicated!") diff --git a/apps/metrics/webapi/providers/metrics.py b/apps/metrics/webapi/providers/metrics.py new file mode 100644 index 0000000..08811ba --- /dev/null +++ b/apps/metrics/webapi/providers/metrics.py @@ -0,0 +1,16 @@ +import logging +from prometheus_fastapi_instrumentator import Instrumentator +from common.config.app_settings import app_settings + +def register(app): + instrumentator = ( + Instrumentator().instrument( + app, + metric_namespace="freeleaps-mertics", + metric_subsystem=app_settings.APP_NAME) + ) + + @app.on_event("startup") + async def startup(): + instrumentator.expose(app, endpoint="/api/_/metrics", should_gzip=True) + logging.info("Metrics endpoint exposed at /api/_/metrics") \ No newline at end of file diff --git a/apps/metrics/webapi/providers/probes.py b/apps/metrics/webapi/providers/probes.py new file mode 100644 index 0000000..058bdc1 --- /dev/null +++ b/apps/metrics/webapi/providers/probes.py @@ -0,0 +1,24 @@ +from common.probes import ProbeManager, ProbeType +from common.probes.adapters import FastAPIAdapter + +def register(app): + probes_manager = ProbeManager() + probes_manager.register_adapter("fastapi", FastAPIAdapter(app)) + + async def readiness_checker(): + return {"success": True, "message": "Ready"} + + probes_manager.register( + name="readiness", + prefix="/api", + type=ProbeType.READINESS, + check_func=readiness_checker, + frameworks=["fastapi"] + ) + + probes_manager.register(name="liveness", prefix="/api", type=ProbeType.LIVENESS, frameworks=["fastapi"]) + probes_manager.register(name="startup", prefix="/api", type=ProbeType.STARTUP, frameworks=["fastapi"]) + + @app.on_event("startup") + async def mark_startup_complete(): + probes_manager.mark_startup_complete() \ No newline at end of file diff --git a/apps/metrics/webapi/providers/router.py b/apps/metrics/webapi/providers/router.py new file mode 100644 index 0000000..3ad11ae --- /dev/null +++ b/apps/metrics/webapi/providers/router.py @@ -0,0 +1,34 @@ +from webapi.routes import api_router + +from starlette import routing + + +def register(app): + app.include_router( + api_router, + prefix="/api", + tags=["api"], + dependencies=[], + responses={404: {"description": "no page found"}}, + ) + + if app.debug: + for route in app.routes: + if not isinstance(route, routing.WebSocketRoute): + print( + { + "path": route.path, + "endpoint": route.endpoint, + "name": route.name, + "methods": route.methods, + } + ) + else: + print( + { + "path": route.path, + "endpoint": route.endpoint, + "name": route.name, + "type": "web socket route", + } + ) diff --git a/apps/metrics/webapi/routes/__init__.py b/apps/metrics/webapi/routes/__init__.py index e69de29..3a2818a 100644 --- a/apps/metrics/webapi/routes/__init__.py +++ b/apps/metrics/webapi/routes/__init__.py @@ -0,0 +1,5 @@ +from fastapi import APIRouter +from webapi.routes.metrics import router +api_router = APIRouter() + +api_router.include_router(router, tags=["metrics"]) diff --git a/apps/metrics/webapi/routes/metrics/__init__.py b/apps/metrics/webapi/routes/metrics/__init__.py new file mode 100644 index 0000000..68613d6 --- /dev/null +++ b/apps/metrics/webapi/routes/metrics/__init__.py @@ -0,0 +1,7 @@ +from fastapi import APIRouter +from webapi.routes.starrocks_metrics import api_router as starrocks_metrics_router +from webapi.routes.prometheus_metrics import api_router as prometheus_metrics_router + +router = APIRouter() +router.include_router(starrocks_metrics_router, prefix="/metrics", tags=["starrocks-metrics"]) +router.include_router(prometheus_metrics_router, prefix="/metrics", tags=["prometheus-metrics"]) diff --git a/apps/metrics/webapi/routes/prometheus_metrics/__init__.py b/apps/metrics/webapi/routes/prometheus_metrics/__init__.py new file mode 100644 index 0000000..1c7aa93 --- /dev/null +++ b/apps/metrics/webapi/routes/prometheus_metrics/__init__.py @@ -0,0 +1,9 @@ +from fastapi import APIRouter +from .available_metrics import router as available_metrics_router +from .metrics_query import router as metrics_query_router +from .metric_info import router as metric_info_router + +api_router = APIRouter() +api_router.include_router(available_metrics_router, tags=["prometheus-metrics"]) +api_router.include_router(metrics_query_router, tags=["prometheus-metrics"]) +api_router.include_router(metric_info_router, tags=["prometheus-metrics"]) diff --git a/apps/metrics/webapi/routes/prometheus_metrics/available_metrics.py b/apps/metrics/webapi/routes/prometheus_metrics/available_metrics.py new file mode 100644 index 0000000..2cb66e6 --- /dev/null +++ b/apps/metrics/webapi/routes/prometheus_metrics/available_metrics.py @@ -0,0 +1,31 @@ +from fastapi import APIRouter + +from common.log.module_logger import ModuleLogger +from backend.services.prometheus_metrics_service import PrometheusMetricsService + +router = APIRouter() + +# Initialize service and logger +prometheus_service = PrometheusMetricsService() +module_logger = ModuleLogger(__file__) + + +@router.get("/prometheus/product/{product_id}/available-metrics") +async def get_available_metrics(product_id: str): + """ + Get list of available metrics for a specific product. + + Args: + product_id: Product ID to get metrics for (required). + + Returns a list of metric names that have predefined PromQL queries for the specified product. + """ + await module_logger.log_info(f"Getting available metrics list for product_id: {product_id}") + metrics = prometheus_service.get_available_metrics(product_id) + + return { + "product_id": product_id, + "available_metrics": metrics, + "total_count": len(metrics), + "description": f"List of metrics with predefined PromQL queries for product '{product_id}'" + } diff --git a/apps/metrics/webapi/routes/prometheus_metrics/metric_info.py b/apps/metrics/webapi/routes/prometheus_metrics/metric_info.py new file mode 100644 index 0000000..4dbef60 --- /dev/null +++ b/apps/metrics/webapi/routes/prometheus_metrics/metric_info.py @@ -0,0 +1,32 @@ +from fastapi import APIRouter, HTTPException + +from common.log.module_logger import ModuleLogger +from backend.services.prometheus_metrics_service import PrometheusMetricsService + +router = APIRouter() + +# Initialize service and logger +prometheus_service = PrometheusMetricsService() +module_logger = ModuleLogger(__file__) + + +@router.get("/prometheus/product/{product_id}/metric/{metric_name}/info") +async def get_metric_info( + product_id: str, + metric_name: str +): + """ + Get information about a specific metric including its PromQL query. + + Args: + product_id: Product ID to identify which product's metrics to query + metric_name: Name of the metric to get information for + """ + await module_logger.log_info(f"Getting info for metric '{metric_name}' from product '{product_id}'") + + metric_info = await prometheus_service.get_metric_info(product_id, metric_name) + + return { + "metric_info": metric_info, + "description": f"Information about metric '{metric_name}' in product '{product_id}'" + } diff --git a/apps/metrics/webapi/routes/prometheus_metrics/metrics_query.py b/apps/metrics/webapi/routes/prometheus_metrics/metrics_query.py new file mode 100644 index 0000000..69ad4c8 --- /dev/null +++ b/apps/metrics/webapi/routes/prometheus_metrics/metrics_query.py @@ -0,0 +1,83 @@ +from fastapi import APIRouter +from typing import Optional, List, Dict, Any +from pydantic import BaseModel, Field + +from common.log.module_logger import ModuleLogger +from backend.services.prometheus_metrics_service import PrometheusMetricsService + + +class MetricDataPoint(BaseModel): + """Single data point in a time series.""" + date: str = Field(..., description="Timestamp in ISO format") + value: Optional[float] = Field(None, description="Metric value") + labels: Optional[Dict[str, str]] = Field(None, description="Metric labels") + + +class MetricTimeSeriesResponse(BaseModel): + """Response model for metric time series data.""" + metric_name: str = Field(..., description="Name of the queried metric") + data_points: List[MetricDataPoint] = Field(..., description="List of data points") + total_points: int = Field(..., description="Total number of data points") + time_range: Dict[str, str] = Field(..., description="Start and end time of the query") + step: str = Field("1h", description="Query resolution step") + + +class MetricQueryRequest(BaseModel): + """Request model for metric query.""" + product_id: str = Field(..., description="Product ID to identify which product's metrics to query") + metric_name: str = Field(..., description="Name of the metric to query") + start_time: str = Field(..., description="Start time in ISO format or RFC3339") + end_time: str = Field(..., description="End time in ISO format or RFC3339") + step: str = Field("1h", description="Query resolution step (e.g., 1m, 5m, 1h)") + + +router = APIRouter() + +# Initialize service and logger +prometheus_service = PrometheusMetricsService() +module_logger = ModuleLogger(__file__) + + +@router.post("/prometheus/metrics_query", response_model=MetricTimeSeriesResponse) +async def metrics_query( + request: MetricQueryRequest +): + """ + Query metrics time series data. + + Returns XY curve data (time series) for the specified metric within the given time range. + """ + await module_logger.log_info( + f"Querying metric '{request.metric_name}' from product '{request.product_id}' from {request.start_time} to {request.end_time}") + + # Query the metric data + data_points = await prometheus_service.query_metric_by_time_range( + product_id=request.product_id, + metric_name=request.metric_name, + start_time=request.start_time, + end_time=request.end_time, + step=request.step + ) + + # Format response + response = MetricTimeSeriesResponse( + metric_name=request.metric_name, + data_points=[ + MetricDataPoint( + date=point["date"], + value=point["value"], + labels=point["labels"] + ) + for point in data_points + ], + total_points=len(data_points), + time_range={ + "start": request.start_time, + "end": request.end_time + }, + step=request.step + ) + + await module_logger.log_info( + f"Successfully queried metric '{request.metric_name}' with {len(data_points)} data points") + return response diff --git a/apps/metrics/webapi/routes/registration_metrics.py b/apps/metrics/webapi/routes/registration_metrics.py deleted file mode 100644 index 1c430de..0000000 --- a/apps/metrics/webapi/routes/registration_metrics.py +++ /dev/null @@ -1,229 +0,0 @@ -from fastapi import APIRouter, HTTPException, Query -from datetime import date, datetime, timedelta -from typing import Optional -from loguru import logger -from backend.services.registration_analytics_service import RegistrationService -from backend.models.user_registration_models import UserRegistrationResponse, UserRegistrationQuery - -router = APIRouter(prefix="/api/metrics", tags=["registration"]) - -# Initialize service -registration_service = RegistrationService() - - -@router.get("/daily-registered-users", response_model=UserRegistrationResponse) -async def get_daily_registered_users( - start_date: date = Query(..., description="Start date in YYYY-MM-DD format"), - end_date: date = Query(..., description="End date in YYYY-MM-DD format"), - product_id: str = Query("freeleaps", description="Product identifier") -): - """ - Get daily registered users count for a date range - - Returns two lists: - - dates: List of dates in YYYY-MM-DD format - - counts: List of daily registration counts - - Example: - - GET /api/metrics/daily-registered-users?start_date=2024-01-01&end_date=2024-01-07 - """ - try: - # Validate date range - if start_date > end_date: - raise HTTPException( - status_code=400, - detail="Start date must be before or equal to end date" - ) - - # Check date range is not too large (max 1 year) - if (end_date - start_date).days > 365: - raise HTTPException( - status_code=400, - detail="Date range cannot exceed 365 days" - ) - - logger.info(f"Querying registration data from {start_date} to {end_date} for product {product_id}") - - # Get data from service - result = registration_service.get_daily_registered_users( - start_date, end_date, product_id - ) - - logger.info(f"Successfully retrieved data for {len(result.dates)} days") - return result - - except HTTPException: - raise - except Exception as e: - logger.error(f"Failed to get daily registered users: {e}") - raise HTTPException( - status_code=500, - detail=f"Internal server error: {str(e)}" - ) - - -@router.get("/registration-summary") -async def get_registration_summary( - start_date: date = Query(..., description="Start date in YYYY-MM-DD format"), - end_date: date = Query(..., description="End date in YYYY-MM-DD format"), - product_id: str = Query("freeleaps", description="Product identifier") -): - """ - Get summary statistics for user registrations in a date range - - Returns summary statistics including: - - total_registrations: Total number of registrations - - average_daily: Average daily registrations - - max_daily: Maximum daily registrations - - min_daily: Minimum daily registrations - - days_with_registrations: Number of days with registrations - - total_days: Total number of days in range - """ - try: - # Validate date range - if start_date > end_date: - raise HTTPException( - status_code=400, - detail="Start date must be before or equal to end date" - ) - - if (end_date - start_date).days > 365: - raise HTTPException( - status_code=400, - detail="Date range cannot exceed 365 days" - ) - - logger.info(f"Querying registration summary from {start_date} to {end_date} for product {product_id}") - - # Get summary from service - summary = registration_service.get_registration_summary( - start_date, end_date, product_id - ) - - return summary - - except HTTPException: - raise - except Exception as e: - logger.error(f"Failed to get registration summary: {e}") - raise HTTPException( - status_code=500, - detail=f"Internal server error: {str(e)}" - ) - - -@router.get("/recent-registered-users", response_model=UserRegistrationResponse) -async def get_recent_registered_users( - days: int = Query(7, ge=1, le=365, description="Number of recent days to query"), - product_id: str = Query("freeleaps", description="Product identifier") -): - """ - Get daily registered users count for recent N days - - Returns registration data for the last N days from today - - Example: - - GET /api/metrics/recent-registered-users?days=7 - - GET /api/metrics/recent-registered-users?days=30&product_id=freeleaps - """ - try: - # Calculate date range - end_date = date.today() - start_date = end_date - timedelta(days=days-1) - - logger.info(f"Querying recent {days} days registration data from {start_date} to {end_date} for product {product_id}") - - # Get data from service - result = registration_service.get_daily_registered_users( - start_date, end_date, product_id - ) - - logger.info(f"Successfully retrieved recent {days} days data, total registrations: {result.total_registrations}") - return result - - except Exception as e: - logger.error(f"Failed to get recent registered users: {e}") - raise HTTPException( - status_code=500, - detail=f"Internal server error: {str(e)}" - ) - - -@router.get("/registered-users-by-days", response_model=UserRegistrationResponse) -async def get_registered_users_by_days( - start_date: date = Query(..., description="Start date in YYYY-MM-DD format"), - days: int = Query(..., ge=1, le=365, description="Number of days from start date"), - product_id: str = Query("freeleaps", description="Product identifier") -): - """ - Get daily registered users count starting from a specific date for N days - - Returns registration data for N days starting from the specified start date - - Example: - - GET /api/metrics/registered-users-by-days?start_date=2024-01-01&days=7 - - GET /api/metrics/registered-users-by-days?start_date=2024-09-01&days=30&product_id=freeleaps - """ - try: - # Calculate end date - end_date = start_date + timedelta(days=days-1) - - logger.info(f"Querying registration data from {start_date} for {days} days (until {end_date}) for product {product_id}") - - # Get data from service - result = registration_service.get_daily_registered_users( - start_date, end_date, product_id - ) - - logger.info(f"Successfully retrieved {days} days data from {start_date}, total registrations: {result.total_registrations}") - return result - - except Exception as e: - logger.error(f"Failed to get registered users by days: {e}") - raise HTTPException( - status_code=500, - detail=f"Internal server error: {str(e)}" - ) - - -@router.post("/daily-registered-users", response_model=UserRegistrationResponse) -async def get_daily_registered_users_post( - query: UserRegistrationQuery -): - """ - Get daily registered users count for a date range (POST method) - - Same as GET method but accepts parameters in request body - """ - try: - # Validate date range - if query.start_date > query.end_date: - raise HTTPException( - status_code=400, - detail="Start date must be before or equal to end date" - ) - - if (query.end_date - query.start_date).days > 365: - raise HTTPException( - status_code=400, - detail="Date range cannot exceed 365 days" - ) - - logger.info(f"Querying registration data from {query.start_date} to {query.end_date} for product {query.product_id}") - - # Get data from service - result = registration_service.get_daily_registered_users( - query.start_date, query.end_date, query.product_id - ) - - logger.info(f"Successfully retrieved data for {len(result.dates)} days") - return result - - except HTTPException: - raise - except Exception as e: - logger.error(f"Failed to get daily registered users: {e}") - raise HTTPException( - status_code=500, - detail=f"Internal server error: {str(e)}" - ) diff --git a/apps/metrics/webapi/routes/starrocks_metrics/__init__.py b/apps/metrics/webapi/routes/starrocks_metrics/__init__.py new file mode 100644 index 0000000..2fbae20 --- /dev/null +++ b/apps/metrics/webapi/routes/starrocks_metrics/__init__.py @@ -0,0 +1,9 @@ +from fastapi import APIRouter +from .metrics_query import router as metrics_query_router +from .available_metrics import router as available_metrics_router +from .metric_info import router as metric_info_router + +api_router = APIRouter() +api_router.include_router(available_metrics_router, tags=["starrocks-metrics"]) +api_router.include_router(metrics_query_router, tags=["starrocks-metrics"]) +api_router.include_router(metric_info_router, tags=["starrocks-metrics"]) diff --git a/apps/metrics/webapi/routes/starrocks_metrics/available_metrics.py b/apps/metrics/webapi/routes/starrocks_metrics/available_metrics.py new file mode 100644 index 0000000..68c4314 --- /dev/null +++ b/apps/metrics/webapi/routes/starrocks_metrics/available_metrics.py @@ -0,0 +1,45 @@ +from fastapi import APIRouter, HTTPException + +from common.log.module_logger import ModuleLogger + +router = APIRouter() + +# Initialize logger +module_logger = ModuleLogger(__file__) + +# Product -> supported StarRocks-backed metrics +SUPPORTED_STARROCKS_METRICS_MAP = { + "freeleaps": [ + "daily_registered_users", + ], + "magicleaps": [ + "daily_registered_users", + ], +} + + +@router.get("/starrocks/product/{product_id}/available-metrics") +async def get_available_metrics(product_id: str): + """ + Get list of available StarRocks-backed metrics for a specific product. + + Args: + product_id: Product ID to get metrics for (required). + + Returns a list of metric names available via StarRocks for the specified product. + """ + await module_logger.log_info( + f"Getting StarRocks available metrics list for product_id: {product_id}" + ) + + if product_id not in SUPPORTED_STARROCKS_METRICS_MAP: + raise HTTPException(status_code=404, detail=f"Unknown product_id: {product_id}") + + metrics = SUPPORTED_STARROCKS_METRICS_MAP[product_id] + + return { + "product_id": product_id, + "available_metrics": metrics, + "total_count": len(metrics), + "description": f"List of StarRocks-backed metrics for product '{product_id}'", + } diff --git a/apps/metrics/webapi/routes/starrocks_metrics/metric_info.py b/apps/metrics/webapi/routes/starrocks_metrics/metric_info.py new file mode 100644 index 0000000..d376cd0 --- /dev/null +++ b/apps/metrics/webapi/routes/starrocks_metrics/metric_info.py @@ -0,0 +1,53 @@ +from fastapi import APIRouter, HTTPException + +from common.log.module_logger import ModuleLogger + +router = APIRouter() + +# Initialize logger +module_logger = ModuleLogger(__file__) + +# Product -> metric -> description +STARROCKS_METRIC_DESCRIPTIONS = { + "freeleaps": { + "daily_registered_users": "Daily registered users count from StarRocks table dws_daily_registered_users", + }, + "magicleaps": { + "daily_registered_users": "Daily registered users count from StarRocks table dws_daily_registered_users", + }, +} + + +@router.get("/starrocks/product/{product_id}/metric/{metric_name}/info") +async def get_metric_info( + product_id: str, + metric_name: str +): + """ + Get information about a specific StarRocks-backed metric. + + Args: + product_id: Product identifier for the product's data. + metric_name: Name of the StarRocks-backed metric. + """ + await module_logger.log_info( + f"Getting StarRocks metric info for metric '{metric_name}' from product '{product_id}'" + ) + + if product_id not in STARROCKS_METRIC_DESCRIPTIONS: + raise HTTPException(status_code=404, detail=f"Unknown product_id: {product_id}") + + product_metrics = STARROCKS_METRIC_DESCRIPTIONS[product_id] + if metric_name not in product_metrics: + raise HTTPException(status_code=404, detail=f"Unknown metric '{metric_name}' for product '{product_id}'") + + metric_info = { + "product_id": product_id, + "metric_name": metric_name, + "description": product_metrics[metric_name], + } + + return { + "metric_info": metric_info, + "description": f"Information about StarRocks metric '{metric_name}' in product '{product_id}'", + } diff --git a/apps/metrics/webapi/routes/starrocks_metrics/metrics_query.py b/apps/metrics/webapi/routes/starrocks_metrics/metrics_query.py new file mode 100644 index 0000000..976b9c2 --- /dev/null +++ b/apps/metrics/webapi/routes/starrocks_metrics/metrics_query.py @@ -0,0 +1,95 @@ +from fastapi import APIRouter +from typing import Optional, List, Dict, Any +from pydantic import BaseModel, Field +from datetime import date + +from common.log.module_logger import ModuleLogger +from backend.services.registration_analytics_service import RegistrationService + + +class RegistrationDataPoint(BaseModel): + """Single data point in registration time series.""" + date: str = Field(..., description="Date in YYYY-MM-DD format") + value: int = Field(..., description="Number of registered users") + product_id: str = Field(..., description="Product identifier") + + +class RegistrationTimeSeriesResponse(BaseModel): + """Response model for registration time series data.""" + metric_name: str = Field(..., description="Name of the queried metric") + data_points: List[RegistrationDataPoint] = Field(..., description="List of data points") + total_points: int = Field(..., description="Total number of data points") + time_range: Dict[str, str] = Field(..., description="Start and end date of the query") + total_registrations: int = Field(..., description="Total number of registrations in the period") + + +class RegistrationQueryRequest(BaseModel): + """Request model for registration query.""" + product_id: str = Field("freeleaps", description="Product ID to identify which product's data to query") + start_date: str = Field(..., description="Start date in YYYY-MM-DD format") + end_date: str = Field(..., description="End date in YYYY-MM-DD format") + + +router = APIRouter() + +# Initialize service and logger +registration_service = RegistrationService() +module_logger = ModuleLogger(__file__) + + +@router.post("/starrocks/dru_query", response_model=RegistrationTimeSeriesResponse) +async def metrics_query( + request: RegistrationQueryRequest +): + """ + Query registration time series data. + + Returns XY curve data (time series) for user registrations within the given date range. + """ + await module_logger.log_info( + f"Querying registration data for product '{request.product_id}' from {request.start_date} to {request.end_date}") + + # Parse dates - handle both YYYY-M-D and YYYY-MM-DD formats + def parse_date(date_str: str) -> date: + try: + return date.fromisoformat(date_str) + except ValueError: + # Try to parse YYYY-M-D format and convert to YYYY-MM-DD + parts = date_str.split('-') + if len(parts) == 3: + year, month, day = parts + return date(int(year), int(month), int(day)) + raise ValueError(f"Invalid date format: {date_str}") + + start_date = parse_date(request.start_date) + end_date = parse_date(request.end_date) + + # Query the registration data + result = registration_service.get_daily_registered_users( + start_date=start_date, + end_date=end_date, + product_id=request.product_id + ) + + # Format response + response = RegistrationTimeSeriesResponse( + metric_name="daily_registered_users", + data_points=[ + RegistrationDataPoint( + date=date_str, + value=count, + product_id=request.product_id + ) + for date_str, count in zip(result.dates, result.counts) + ], + total_points=len(result.dates), + time_range={ + "start": request.start_date, + "end": request.end_date + }, + total_registrations=result.total_registrations + ) + + await module_logger.log_info( + f"Successfully queried registration data with {len(result.dates)} data points, total registrations: {result.total_registrations}") + return response From 98405934cea8e07ca39706eb3dca5a470a28643e Mon Sep 17 00:00:00 2001 From: weicao Date: Fri, 19 Sep 2025 10:29:10 +0800 Subject: [PATCH 11/15] feat(change code structure) --- .../registration_analytics_service.py | 132 --------- .../services/starrocks_metrics_service.py | 268 ++++++++++++++++++ apps/metrics/common/config/app_settings.py | 6 + apps/metrics/webapi/routes/__init__.py | 7 +- .../metrics/webapi/routes/metrics/__init__.py | 7 - .../routes/metrics/registration_metrics.py | 0 .../starrocks_metrics/available_metrics.py | 26 +- .../routes/starrocks_metrics/metric_info.py | 27 +- .../routes/starrocks_metrics/metrics_query.py | 87 +++--- 9 files changed, 330 insertions(+), 230 deletions(-) delete mode 100644 apps/metrics/backend/services/registration_analytics_service.py create mode 100644 apps/metrics/backend/services/starrocks_metrics_service.py delete mode 100644 apps/metrics/webapi/routes/metrics/__init__.py delete mode 100644 apps/metrics/webapi/routes/metrics/registration_metrics.py diff --git a/apps/metrics/backend/services/registration_analytics_service.py b/apps/metrics/backend/services/registration_analytics_service.py deleted file mode 100644 index eb843f0..0000000 --- a/apps/metrics/backend/services/registration_analytics_service.py +++ /dev/null @@ -1,132 +0,0 @@ -from typing import List, Dict, Any -from datetime import date, timedelta -from loguru import logger -from backend.infra.external_service.starrocks_client import StarRocksClient -from backend.models.user_registration_models import UserRegistrationResponse, DailyRegisteredUsers - - -class RegistrationService: - """Service for handling user registration data queries""" - - def __init__(self): - self.starrocks_client = StarRocksClient() - - def get_daily_registered_users( - self, - start_date: date, - end_date: date, - product_id: str = "freeleaps" - ) -> UserRegistrationResponse: - """ - Get daily registered users count for a date range - - Args: - start_date: Start date for the query - end_date: End date for the query - product_id: Product identifier (default: freeleaps) - - Returns: - UserRegistrationResponse with dates and counts - """ - try: - # Query data from StarRocks - raw_data = self.starrocks_client.get_daily_registered_users( - start_date, end_date, product_id - ) - # Convert to DailyRegisteredUsers objects - daily_data = [ - DailyRegisteredUsers( - date_id=row['date_id'], - product_id=row['product_id'], - registered_cnt=row['registered_cnt'], - updated_at=row.get('updated_at') - ) - for row in raw_data - ] - # Create date-to-count mapping - data_dict = {str(item.date_id): item.registered_cnt for item in daily_data} - - # Generate complete date range - dates = [] - counts = [] - current_date = start_date - - while current_date <= end_date: - date_str = str(current_date) - dates.append(date_str) - counts.append(data_dict.get(date_str, 0)) - current_date += timedelta(days=1) - - # Calculate total registrations - total_registrations = sum(counts) - - logger.info( - f"Retrieved registration data for {len(dates)} days, " - f"total registrations: {total_registrations}" - ) - - return UserRegistrationResponse( - dates=dates, - counts=counts, - total_registrations=total_registrations, - query_period=f"{start_date} to {end_date}" - ) - - except Exception as e: - logger.error(f"Failed to get daily registered users: {e}") - raise e - - def get_registration_summary( - self, - start_date: date, - end_date: date, - product_id: str = "freeleaps" - ) -> Dict[str, Any]: - """ - Get summary statistics for user registrations - - Args: - start_date: Start date for the query - end_date: End date for the query - product_id: Product identifier - - Returns: - Dictionary with summary statistics - """ - try: - response = self.get_daily_registered_users(start_date, end_date, product_id) - - if not response.counts: - return { - "total_registrations": 0, - "average_daily": 0, - "max_daily": 0, - "min_daily": 0, - "days_with_registrations": 0, - "total_days": len(response.dates) - } - - counts = response.counts - non_zero_counts = [c for c in counts if c > 0] - - return { - "total_registrations": response.total_registrations, - "average_daily": round(sum(counts) / len(counts), 2), - "max_daily": max(counts), - "min_daily": min(counts), - "days_with_registrations": len(non_zero_counts), - "total_days": len(response.dates) - } - - except Exception as e: - logger.error(f"Failed to get registration summary: {e}") - raise e - - - - - - - - - diff --git a/apps/metrics/backend/services/starrocks_metrics_service.py b/apps/metrics/backend/services/starrocks_metrics_service.py new file mode 100644 index 0000000..329815b --- /dev/null +++ b/apps/metrics/backend/services/starrocks_metrics_service.py @@ -0,0 +1,268 @@ +from typing import Dict, List, Any, Optional, Union +from datetime import datetime, timedelta, date +from fastapi import HTTPException + +from common.log.module_logger import ModuleLogger +from ..infra.external_service.starrocks_client import StarRocksClient + + +class StarRocksMetricsService: + """ + Service class for querying StarRocks metrics with predefined SQL queries. + + This service provides a high-level interface for querying metrics data + using predefined SQL queries mapped to metric names. + """ + + # Global dictionary mapping metric names to their corresponding SQL queries + METRIC_SQL_MAP: Dict[str, Dict[str, str]] = { + "freeleaps": { + "daily_registered_users": """ + SELECT + date_id, + product_id, + registered_cnt, + updated_at + FROM dws_daily_registered_users + WHERE date_id >= %s + AND date_id <= %s + AND product_id = %s + ORDER BY date_id ASC + """, + }, + "magicleaps": { + + } + } + + def __init__(self, starrocks_endpoint: Optional[str] = None): + """ + Initialize StarRocksMetricsService. + + Args: + starrocks_endpoint: StarRocks server endpoint. If None, uses default from settings. + """ + self.module_logger = ModuleLogger(__file__) + self.starrocks_client = StarRocksClient() + + def get_available_metrics(self, product_id: Optional[str] = None) -> List[str]: + """ + Get list of available metric names that have predefined SQL queries. + + Args: + product_id: Optional product ID to filter metrics. If None, returns all metrics from all products. + + Returns: + List of available metric names + """ + if product_id: + if product_id in self.METRIC_SQL_MAP: + return list(self.METRIC_SQL_MAP[product_id].keys()) + else: + return [] + else: + # Return all metrics from all products + all_metrics = [] + for product_metrics in self.METRIC_SQL_MAP.values(): + all_metrics.extend(product_metrics.keys()) + return all_metrics + + def get_available_products(self) -> List[str]: + """ + Get list of available product IDs. + + Returns: + List of available product IDs + """ + return list(self.METRIC_SQL_MAP.keys()) + + async def query_metric_by_time_range( + self, + product_id: str, + metric_name: str, + start_date: Union[str, date], + end_date: Union[str, date] + ) -> List[Dict[str, Any]]: + """ + Query metric data for a specific date range. + + Args: + product_id: Product ID to identify which product's metrics to query + metric_name: Name of the metric to query + start_date: Start date for the query (ISO string or date) + end_date: End date for the query (ISO string or date) + + Returns: + List of dictionaries with 'date' and 'value' keys + + Raises: + ValueError: If product_id or metric_name is not found in the SQL mapping + Exception: If StarRocks query fails + + Example: + result = await service.query_metric_by_time_range( + "freeleaps", + "daily_registered_users", + start_date=date.today() - timedelta(days=30), + end_date=date.today() + ) + # Returns: [{"date": "2024-01-01", "value": 45, "labels": {...}}, ...] + """ + # Check if product_id exists in the mapping + if product_id not in self.METRIC_SQL_MAP: + available_products = ", ".join(self.get_available_products()) + error_msg = f"Product '{product_id}' not found in SQL mapping. Available products: {available_products}" + await self.module_logger.log_error(error_msg) + raise HTTPException(status_code=404, detail=error_msg) + + # Check if metric name exists in the product's mapping + if metric_name not in self.METRIC_SQL_MAP[product_id]: + available_metrics = ", ".join(self.get_available_metrics(product_id)) + error_msg = f"Metric '{metric_name}' not found in product '{product_id}' SQL mapping. Available metrics: {available_metrics}" + await self.module_logger.log_error(error_msg) + raise HTTPException(status_code=404, detail=error_msg) + + # Parse date strings if they are strings + if isinstance(start_date, str): + try: + start_dt = datetime.strptime(start_date, '%Y-%m-%d').date() + except ValueError: + raise HTTPException( + status_code=400, + detail="Invalid start_date format. Expected YYYY-MM-DD" + ) + else: + start_dt = start_date + + if isinstance(end_date, str): + try: + end_dt = datetime.strptime(end_date, '%Y-%m-%d').date() + except ValueError: + raise HTTPException( + status_code=400, + detail="Invalid end_date format. Expected YYYY-MM-DD" + ) + else: + end_dt = end_date + + # Validate date range + if start_dt >= end_dt: + raise HTTPException( + status_code=400, + detail="Start date must be before end date" + ) + + # Check date range is not too large (max 1 year) + time_diff = end_dt - start_dt + if time_diff > timedelta(days=365): + raise HTTPException( + status_code=400, + detail="Date range cannot exceed 1 year" + ) + + # Get the SQL query for the metric + sql_query = self.METRIC_SQL_MAP[product_id][metric_name] + + try: + await self.module_logger.log_info( + f"Querying metric '{metric_name}' from product '{product_id}' from {start_dt} to {end_dt}") + + # Execute the query + result = self.starrocks_client.execute_query( + query=sql_query, + params=(start_dt, end_dt, product_id) + ) + + # Parse the result and format it + formatted_data = self._format_query_result(result, metric_name, product_id) + + await self.module_logger.log_info( + f"Successfully queried metric '{metric_name}' with {len(formatted_data)} data points") + return formatted_data + + except Exception as e: + await self.module_logger.log_error(f"Failed to query metric '{metric_name}': {e}") + raise + + def _format_query_result(self, starrocks_result: List[Dict[str, Any]], metric_name: str, product_id: str) -> List[Dict[str, Any]]: + """ + Format StarRocks query result into the required format. + + Args: + starrocks_result: Raw result from StarRocks query + metric_name: Name of the metric being queried + product_id: Product ID for the metric + + Returns: + List of dictionaries with 'date' and 'value' keys + """ + formatted_data = [] + + for row in starrocks_result: + # Format the date + date_value = row.get("date_id") + if date_value: + if isinstance(date_value, str): + date_str = date_value + else: + date_str = str(date_value) + else: + continue + + # Get the value + value = row.get("registered_cnt", 0) + if value is None: + value = 0 + + # Create labels dictionary + labels = { + "product_id": row.get("product_id", product_id), + "metric_type": metric_name + } + + formatted_data.append({ + "date": date_str, + "value": int(value) if value is not None else 0, + "metric": metric_name, + "labels": labels + }) + + # Sort by date + formatted_data.sort(key=lambda x: x["date"]) + + return formatted_data + + async def get_metric_info(self, product_id: str, metric_name: str) -> Dict[str, Any]: + """ + Get information about a specific metric including its SQL query. + + Args: + product_id: Product ID to identify which product's metrics to query + metric_name: Name of the metric + + Returns: + Dictionary containing metric information + + Raises: + ValueError: If product_id or metric_name is not found in the SQL mapping + """ + # Check if product_id exists in the mapping + if product_id not in self.METRIC_SQL_MAP: + available_products = ", ".join(self.get_available_products()) + error_msg = f"Product '{product_id}' not found in SQL mapping. Available products: {available_products}" + await self.module_logger.log_error(error_msg) + raise HTTPException(status_code=404, detail=error_msg) + + # Check if metric name exists in the product's mapping + if metric_name not in self.METRIC_SQL_MAP[product_id]: + available_metrics = ", ".join(self.get_available_metrics(product_id)) + error_msg = f"Metric '{metric_name}' not found in product '{product_id}' SQL mapping. Available metrics: {available_metrics}" + await self.module_logger.log_error(error_msg) + raise HTTPException(status_code=404, detail=error_msg) + + return { + "product_id": product_id, + "metric_name": metric_name, + "sql_query": self.METRIC_SQL_MAP[product_id][metric_name].strip(), + "description": "Daily registered users count from StarRocks table dws_daily_registered_users" + } \ No newline at end of file diff --git a/apps/metrics/common/config/app_settings.py b/apps/metrics/common/config/app_settings.py index 4ea8a84..927ea09 100644 --- a/apps/metrics/common/config/app_settings.py +++ b/apps/metrics/common/config/app_settings.py @@ -3,6 +3,12 @@ from typing import Optional class AppSettings(BaseSettings): + # Server settings + SERVER_HOST: str = "0.0.0.0" + SERVER_PORT: int = 8009 + SERVICE_API_ACCESS_HOST: str = "0.0.0.0" + SERVICE_API_ACCESS_PORT: int = 8009 + # Log settings LOG_BASE_PATH: str = "./logs" BACKEND_LOG_FILE_NAME: str = "freeleaps-metrics" diff --git a/apps/metrics/webapi/routes/__init__.py b/apps/metrics/webapi/routes/__init__.py index 3a2818a..242272f 100644 --- a/apps/metrics/webapi/routes/__init__.py +++ b/apps/metrics/webapi/routes/__init__.py @@ -1,5 +1,8 @@ from fastapi import APIRouter -from webapi.routes.metrics import router +from webapi.routes.starrocks_metrics import api_router as starrocks_metrics_router +from webapi.routes.prometheus_metrics import api_router as prometheus_metrics_router + api_router = APIRouter() -api_router.include_router(router, tags=["metrics"]) +api_router.include_router(starrocks_metrics_router, prefix="/starrocks", tags=["starrocks-metrics"]) +api_router.include_router(prometheus_metrics_router, prefix="/prometheus", tags=["prometheus-metrics"]) diff --git a/apps/metrics/webapi/routes/metrics/__init__.py b/apps/metrics/webapi/routes/metrics/__init__.py deleted file mode 100644 index 68613d6..0000000 --- a/apps/metrics/webapi/routes/metrics/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -from fastapi import APIRouter -from webapi.routes.starrocks_metrics import api_router as starrocks_metrics_router -from webapi.routes.prometheus_metrics import api_router as prometheus_metrics_router - -router = APIRouter() -router.include_router(starrocks_metrics_router, prefix="/metrics", tags=["starrocks-metrics"]) -router.include_router(prometheus_metrics_router, prefix="/metrics", tags=["prometheus-metrics"]) diff --git a/apps/metrics/webapi/routes/metrics/registration_metrics.py b/apps/metrics/webapi/routes/metrics/registration_metrics.py deleted file mode 100644 index e69de29..0000000 diff --git a/apps/metrics/webapi/routes/starrocks_metrics/available_metrics.py b/apps/metrics/webapi/routes/starrocks_metrics/available_metrics.py index 68c4314..c0fd3fc 100644 --- a/apps/metrics/webapi/routes/starrocks_metrics/available_metrics.py +++ b/apps/metrics/webapi/routes/starrocks_metrics/available_metrics.py @@ -1,22 +1,14 @@ from fastapi import APIRouter, HTTPException from common.log.module_logger import ModuleLogger +from backend.services.starrocks_metrics_service import StarRocksMetricsService router = APIRouter() -# Initialize logger +# Initialize service and logger +starrocks_service = StarRocksMetricsService() module_logger = ModuleLogger(__file__) -# Product -> supported StarRocks-backed metrics -SUPPORTED_STARROCKS_METRICS_MAP = { - "freeleaps": [ - "daily_registered_users", - ], - "magicleaps": [ - "daily_registered_users", - ], -} - @router.get("/starrocks/product/{product_id}/available-metrics") async def get_available_metrics(product_id: str): @@ -32,10 +24,14 @@ async def get_available_metrics(product_id: str): f"Getting StarRocks available metrics list for product_id: {product_id}" ) - if product_id not in SUPPORTED_STARROCKS_METRICS_MAP: - raise HTTPException(status_code=404, detail=f"Unknown product_id: {product_id}") - - metrics = SUPPORTED_STARROCKS_METRICS_MAP[product_id] + metrics = starrocks_service.get_available_metrics(product_id) + + if not metrics: + available_products = ", ".join(starrocks_service.get_available_products()) + raise HTTPException( + status_code=404, + detail=f"Unknown product_id: {product_id}. Available products: {available_products}" + ) return { "product_id": product_id, diff --git a/apps/metrics/webapi/routes/starrocks_metrics/metric_info.py b/apps/metrics/webapi/routes/starrocks_metrics/metric_info.py index d376cd0..e94b19e 100644 --- a/apps/metrics/webapi/routes/starrocks_metrics/metric_info.py +++ b/apps/metrics/webapi/routes/starrocks_metrics/metric_info.py @@ -1,22 +1,14 @@ from fastapi import APIRouter, HTTPException from common.log.module_logger import ModuleLogger +from backend.services.starrocks_metrics_service import StarRocksMetricsService router = APIRouter() -# Initialize logger +# Initialize service and logger +starrocks_service = StarRocksMetricsService() module_logger = ModuleLogger(__file__) -# Product -> metric -> description -STARROCKS_METRIC_DESCRIPTIONS = { - "freeleaps": { - "daily_registered_users": "Daily registered users count from StarRocks table dws_daily_registered_users", - }, - "magicleaps": { - "daily_registered_users": "Daily registered users count from StarRocks table dws_daily_registered_users", - }, -} - @router.get("/starrocks/product/{product_id}/metric/{metric_name}/info") async def get_metric_info( @@ -34,18 +26,7 @@ async def get_metric_info( f"Getting StarRocks metric info for metric '{metric_name}' from product '{product_id}'" ) - if product_id not in STARROCKS_METRIC_DESCRIPTIONS: - raise HTTPException(status_code=404, detail=f"Unknown product_id: {product_id}") - - product_metrics = STARROCKS_METRIC_DESCRIPTIONS[product_id] - if metric_name not in product_metrics: - raise HTTPException(status_code=404, detail=f"Unknown metric '{metric_name}' for product '{product_id}'") - - metric_info = { - "product_id": product_id, - "metric_name": metric_name, - "description": product_metrics[metric_name], - } + metric_info = await starrocks_service.get_metric_info(product_id, metric_name) return { "metric_info": metric_info, diff --git a/apps/metrics/webapi/routes/starrocks_metrics/metrics_query.py b/apps/metrics/webapi/routes/starrocks_metrics/metrics_query.py index 976b9c2..77078f9 100644 --- a/apps/metrics/webapi/routes/starrocks_metrics/metrics_query.py +++ b/apps/metrics/webapi/routes/starrocks_metrics/metrics_query.py @@ -1,31 +1,31 @@ from fastapi import APIRouter -from typing import Optional, List, Dict, Any +from typing import Optional, List, Dict, Any, Union from pydantic import BaseModel, Field -from datetime import date +from datetime import date, datetime from common.log.module_logger import ModuleLogger -from backend.services.registration_analytics_service import RegistrationService +from backend.services.starrocks_metrics_service import StarRocksMetricsService -class RegistrationDataPoint(BaseModel): - """Single data point in registration time series.""" +class MetricDataPoint(BaseModel): + """Single data point in metric time series.""" date: str = Field(..., description="Date in YYYY-MM-DD format") - value: int = Field(..., description="Number of registered users") - product_id: str = Field(..., description="Product identifier") + value: Union[int, float] = Field(..., description="Metric value") + labels: Dict[str, Any] = Field(default_factory=dict, description="Metric labels") -class RegistrationTimeSeriesResponse(BaseModel): - """Response model for registration time series data.""" +class MetricTimeSeriesResponse(BaseModel): + """Response model for metric time series data.""" metric_name: str = Field(..., description="Name of the queried metric") - data_points: List[RegistrationDataPoint] = Field(..., description="List of data points") + data_points: List[MetricDataPoint] = Field(..., description="List of data points") total_points: int = Field(..., description="Total number of data points") time_range: Dict[str, str] = Field(..., description="Start and end date of the query") - total_registrations: int = Field(..., description="Total number of registrations in the period") -class RegistrationQueryRequest(BaseModel): - """Request model for registration query.""" - product_id: str = Field("freeleaps", description="Product ID to identify which product's data to query") +class MetricQueryRequest(BaseModel): + """Request model for metric query.""" + product_id: str = Field(..., description="Product ID to identify which product's data to query") + metric_name: str = Field(..., description="Name of the metric to query") start_date: str = Field(..., description="Start date in YYYY-MM-DD format") end_date: str = Field(..., description="End date in YYYY-MM-DD format") @@ -33,63 +33,48 @@ class RegistrationQueryRequest(BaseModel): router = APIRouter() # Initialize service and logger -registration_service = RegistrationService() +starrocks_service = StarRocksMetricsService() module_logger = ModuleLogger(__file__) -@router.post("/starrocks/dru_query", response_model=RegistrationTimeSeriesResponse) +@router.post("/starrocks/metrics_query", response_model=MetricTimeSeriesResponse) async def metrics_query( - request: RegistrationQueryRequest + request: MetricQueryRequest ): """ - Query registration time series data. + Query StarRocks metrics time series data. - Returns XY curve data (time series) for user registrations within the given date range. + Returns XY curve data (time series) for the specified metric within the given date range. """ await module_logger.log_info( - f"Querying registration data for product '{request.product_id}' from {request.start_date} to {request.end_date}") + f"Querying metric '{request.metric_name}' from product '{request.product_id}' from {request.start_date} to {request.end_date}") - # Parse dates - handle both YYYY-M-D and YYYY-MM-DD formats - def parse_date(date_str: str) -> date: - try: - return date.fromisoformat(date_str) - except ValueError: - # Try to parse YYYY-M-D format and convert to YYYY-MM-DD - parts = date_str.split('-') - if len(parts) == 3: - year, month, day = parts - return date(int(year), int(month), int(day)) - raise ValueError(f"Invalid date format: {date_str}") - - start_date = parse_date(request.start_date) - end_date = parse_date(request.end_date) - - # Query the registration data - result = registration_service.get_daily_registered_users( - start_date=start_date, - end_date=end_date, - product_id=request.product_id + # Query the metric data + data_points = await starrocks_service.query_metric_by_time_range( + product_id=request.product_id, + metric_name=request.metric_name, + start_date=request.start_date, + end_date=request.end_date ) # Format response - response = RegistrationTimeSeriesResponse( - metric_name="daily_registered_users", + response = MetricTimeSeriesResponse( + metric_name=request.metric_name, data_points=[ - RegistrationDataPoint( - date=date_str, - value=count, - product_id=request.product_id + MetricDataPoint( + date=point["date"], + value=point["value"], + labels=point["labels"] ) - for date_str, count in zip(result.dates, result.counts) + for point in data_points ], - total_points=len(result.dates), + total_points=len(data_points), time_range={ "start": request.start_date, "end": request.end_date - }, - total_registrations=result.total_registrations + } ) await module_logger.log_info( - f"Successfully queried registration data with {len(result.dates)} data points, total registrations: {result.total_registrations}") + f"Successfully queried metric '{request.metric_name}' with {len(data_points)} data points") return response From f902edd49dd8c7ef01f7132a6e491b4b0cc59898 Mon Sep 17 00:00:00 2001 From: weicao Date: Fri, 19 Sep 2025 10:38:18 +0800 Subject: [PATCH 12/15] fixed: <= to < --- .../services/starrocks_metrics_service.py | 2 +- apps/metrics/docs/design.md | 153 ++++++++++++++++++ 2 files changed, 154 insertions(+), 1 deletion(-) diff --git a/apps/metrics/backend/services/starrocks_metrics_service.py b/apps/metrics/backend/services/starrocks_metrics_service.py index 329815b..648dd74 100644 --- a/apps/metrics/backend/services/starrocks_metrics_service.py +++ b/apps/metrics/backend/services/starrocks_metrics_service.py @@ -25,7 +25,7 @@ class StarRocksMetricsService: updated_at FROM dws_daily_registered_users WHERE date_id >= %s - AND date_id <= %s + AND date_id < %s AND product_id = %s ORDER BY date_id ASC """, diff --git a/apps/metrics/docs/design.md b/apps/metrics/docs/design.md index 796b5ae..ea9fa46 100644 --- a/apps/metrics/docs/design.md +++ b/apps/metrics/docs/design.md @@ -8,6 +8,159 @@ We support two ways to query metrics: We can implement StarRocks Metric queries similar to Prometheus Metric queries. The only difference is replacing PromQL with SQL and querying through StarRocks API. +## 2.1.Metrics Config +Currently, metrics are configured in code through the `StarRocksMetricsService.METRIC_SQL_MAP` dictionary. In the future, they will be configured through database or other methods. +Organization structure: Product ID -> Metric Name -> SQL Query +```python +METRIC_SQL_MAP: Dict[str, Dict[str, str]] = { + "freeleaps": { + "daily_registered_users": """ + SELECT + date_id, + product_id, + registered_cnt, + updated_at + FROM dws_daily_registered_users + WHERE date_id >= %s + AND date_id <= %s + AND product_id = %s + ORDER BY date_id ASC + """, + }, + "magicleaps": { + # Future metrics can be added here + } +} +``` + +## 2.2.API Design + +### 2.2.1.Query Metrics by Product ID + +API: `/api/metrics/starrocks/product/{product_id}/available-metrics` + +Method: GET +Request: +``` +product_id=freeleaps +``` +Response: + +```json +{ + "product_id": "freeleaps", + "available_metrics": [ + "daily_registered_users" + ], + "total_count": 1, + "description": "List of StarRocks-backed metrics for product 'freeleaps'" +} +``` + +### 2.2.2.Query Metric Info +API: `/api/metrics/starrocks/product/{product_id}/metric/{metric_name}/info` + +Method: GET +Request: +``` +product_id=freeleaps +metric_name=daily_registered_users +``` +Response: + +```json +{ + "metric_info": { + "product_id": "freeleaps", + "metric_name": "daily_registered_users", + "sql_query": "SELECT date_id, product_id, registered_cnt, updated_at FROM dws_daily_registered_users WHERE date_id >= %s AND date_id <= %s AND product_id = %s ORDER BY date_id ASC", + "description": "Daily registered users count from StarRocks table dws_daily_registered_users" + }, + "description": "Information about StarRocks metric 'daily_registered_users' in product 'freeleaps'" +} +``` + +### 2.2.3.Query Metric Data +API: `/api/metrics/starrocks/metrics_query` + +Method: POST +Request: +```json +{ + "product_id": "freeleaps", + "metric_name": "daily_registered_users", + "start_date": "2024-09-10", + "end_date": "2024-09-20" +} +``` +Response: + +```json +{ + "metric_name": "daily_registered_users", + "data_points": [ + { + "date": "2024-09-10", + "value": 45, + "labels": { + "product_id": "freeleaps", + "metric_type": "daily_registered_users" + } + }, + { + "date": "2024-09-11", + "value": 52, + "labels": { + "product_id": "freeleaps", + "metric_type": "daily_registered_users" + } + }, + { + "date": "2024-09-12", + "value": 38, + "labels": { + "product_id": "freeleaps", + "metric_type": "daily_registered_users" + } + }, + ... + { + "date": "2024-09-19", + "value": 67, + "labels": { + "product_id": "freeleaps", + "metric_type": "daily_registered_users" + } + } + ], + "total_points": 10, + "time_range": { + "start": "2024-09-10", + "end": "2024-09-19" + } +} +``` + +## 2.3.Technical Implementation + +### 2.3.1.StarRocks Client +- Uses PyMySQL to connect to StarRocks database +- Supports parameterized queries for security +- Automatic connection management with context manager +- Error handling and logging + +### 2.3.2.Data Format +- Date format: `YYYY-MM-DD` +- Values are returned as integers or floats +- Labels include product_id and metric_type for debugging +- Results are sorted by date in ascending order + +### 2.3.3.Validation +- Date range validation (start_date < end_date) +- Maximum date range limit (1 year) +- Product ID and metric name validation against available mappings +- Input format validation for date strings + # 3.Prometheus Metric ## 3.1.Metrics Config From b7858c193e9b7564387b6fac980859a5c00db071 Mon Sep 17 00:00:00 2001 From: weicao Date: Fri, 19 Sep 2025 15:32:00 +0800 Subject: [PATCH 13/15] fixed some code standard --- apps/metrics/Dockerfile | 22 +++----- .../external_service/starrocks_client.py | 52 ++++--------------- apps/metrics/backend/models/__init__.py | 0 .../models/user_registration_models.py | 26 ---------- .../services/starrocks_metrics_service.py | 2 +- apps/metrics/common/config/app_settings.py | 2 +- apps/metrics/webapi/main.py | 8 +-- 7 files changed, 26 insertions(+), 86 deletions(-) delete mode 100644 apps/metrics/backend/models/__init__.py delete mode 100644 apps/metrics/backend/models/user_registration_models.py diff --git a/apps/metrics/Dockerfile b/apps/metrics/Dockerfile index 4ca7eee..b27cb4e 100644 --- a/apps/metrics/Dockerfile +++ b/apps/metrics/Dockerfile @@ -16,21 +16,15 @@ COPY local.env . # Copy application code COPY . . -ENV MONGODB_NAME = "freeleaps2" -ENV MONGODB_URI = "mongodb://freeleaps2-mongodb:27017" +# StarRocks settings +ENV STARROCKS_HOST: str = "freeleaps-starrocks-cluster-fe-service.freeleaps-data-platform.svc" +ENV STARROCKS_PORT: int = 9030 +ENV STARROCKS_USER: str = "root" +ENV STARROCKS_PASSWORD: str = "" +ENV STARROCKS_DATABASE: str = "freeleaps" -#app_settings -ENV GITEA_TOKEN = "" -ENV GITEA_URL = "" -ENV GITEA_DEPOT_ORGANIZATION = "" -ENV CODE_DEPOT_HTTP_PORT = "" -ENV CODE_DEPOT_SSH_PORT = "" -ENV CODE_DEPOT_DOMAIN_NAME = "" - -#log_settings -ENV LOG_BASE_PATH = "./logs" -ENV BACKEND_LOG_FILE_NAME = "freeleaps-metrics" -ENV APPLICATION_ACTIVITY_LOG = "freeleaps-metrics-activity" +# Prometheus settings +ENV PROMETHEUS_ENDPOINT: str = "http://localhost:9090" # Expose port EXPOSE 8009 diff --git a/apps/metrics/backend/infra/external_service/starrocks_client.py b/apps/metrics/backend/infra/external_service/starrocks_client.py index 7b639af..d88de67 100644 --- a/apps/metrics/backend/infra/external_service/starrocks_client.py +++ b/apps/metrics/backend/infra/external_service/starrocks_client.py @@ -1,7 +1,7 @@ import pymysql from typing import List, Dict, Any, Optional from datetime import date -from loguru import logger +from common.log.module_logger import ModuleLogger from common.config.app_settings import app_settings @@ -15,8 +15,9 @@ class StarRocksClient: self.password = app_settings.STARROCKS_PASSWORD self.database = app_settings.STARROCKS_DATABASE self.connection = None + self.module_logger = ModuleLogger(__file__) - def connect(self) -> bool: + async def connect(self) -> bool: """Establish connection to StarRocks database""" try: self.connection = pymysql.connect( @@ -28,63 +29,32 @@ class StarRocksClient: charset='utf8mb4', autocommit=True ) - logger.info(f"Successfully connected to StarRocks at {self.host}:{self.port}") + await self.module_logger.log_info(f"Successfully connected to StarRocks at {self.host}:{self.port}") return True except Exception as e: - logger.error(f"Failed to connect to StarRocks: {e}") + await self.module_logger.log_error(f"Failed to connect to StarRocks: {e}") return False - def disconnect(self): + async def disconnect(self): """Close database connection""" if self.connection: self.connection.close() self.connection = None - logger.info("Disconnected from StarRocks") + await self.module_logger.log_info("Disconnected from StarRocks") - def execute_query(self, query: str, params: Optional[tuple] = None) -> List[Dict[str, Any]]: + async def execute_query(self, query: str, params: Optional[tuple] = None) -> List[Dict[str, Any]]: """Execute SQL query and return results""" if not self.connection: - if not self.connect(): + if not await self.connect(): raise Exception("Failed to connect to StarRocks database") try: with self.connection.cursor(pymysql.cursors.DictCursor) as cursor: cursor.execute(query, params) results = cursor.fetchall() - logger.info(f"Query executed successfully, returned {len(results)} rows") + await self.module_logger.log_info(f"Query executed successfully, returned {len(results)} rows") return results except Exception as e: - logger.error(f"Query execution failed: {e}") + await self.module_logger.log_error(f"Query execution failed: {e}") raise e - def get_daily_registered_users( - self, - start_date: date, - end_date: date, - product_id: str = "freeleaps" - ) -> List[Dict[str, Any]]: - """Query daily registered users from StarRocks""" - query = """ - SELECT - date_id, - product_id, - registered_cnt, - updated_at - FROM dws_daily_registered_users - WHERE date_id >= %s - AND date_id <= %s - AND product_id = %s - ORDER BY date_id ASC - """ - - params = (start_date, end_date, product_id) - return self.execute_query(query, params) - - def __enter__(self): - """Context manager entry""" - self.connect() - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - """Context manager exit""" - self.disconnect() diff --git a/apps/metrics/backend/models/__init__.py b/apps/metrics/backend/models/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/apps/metrics/backend/models/user_registration_models.py b/apps/metrics/backend/models/user_registration_models.py deleted file mode 100644 index 64bebcb..0000000 --- a/apps/metrics/backend/models/user_registration_models.py +++ /dev/null @@ -1,26 +0,0 @@ -from pydantic import BaseModel -from datetime import date, datetime -from typing import List, Optional - - -class DailyRegisteredUsers(BaseModel): - """Daily registered users data model""" - date_id: date - product_id: str = "freeleaps" - registered_cnt: int - updated_at: Optional[datetime] = None - - -class UserRegistrationQuery(BaseModel): - """Query parameters for user registration data""" - start_date: date - end_date: date - product_id: str = "freeleaps" - - -class UserRegistrationResponse(BaseModel): - """Response model for user registration data""" - dates: List[str] - counts: List[int] - total_registrations: int - query_period: str diff --git a/apps/metrics/backend/services/starrocks_metrics_service.py b/apps/metrics/backend/services/starrocks_metrics_service.py index 648dd74..1317336 100644 --- a/apps/metrics/backend/services/starrocks_metrics_service.py +++ b/apps/metrics/backend/services/starrocks_metrics_service.py @@ -264,5 +264,5 @@ class StarRocksMetricsService: "product_id": product_id, "metric_name": metric_name, "sql_query": self.METRIC_SQL_MAP[product_id][metric_name].strip(), - "description": "Daily registered users count from StarRocks table dws_daily_registered_users" + "description": f"{metric_name} count from StarRocks table dws_{metric_name}" } \ No newline at end of file diff --git a/apps/metrics/common/config/app_settings.py b/apps/metrics/common/config/app_settings.py index 927ea09..4877ca6 100644 --- a/apps/metrics/common/config/app_settings.py +++ b/apps/metrics/common/config/app_settings.py @@ -29,7 +29,7 @@ class AppSettings(BaseSettings): class Config: - env_file = ".env" + env_file = "local.env" app_settings = AppSettings() diff --git a/apps/metrics/webapi/main.py b/apps/metrics/webapi/main.py index 42ef947..203a244 100644 --- a/apps/metrics/webapi/main.py +++ b/apps/metrics/webapi/main.py @@ -3,12 +3,14 @@ from fastapi.responses import RedirectResponse import uvicorn from webapi.bootstrap.application import create_app -from webapi.routes.metrics import registration_metrics +from webapi.routes.starrocks_metrics import api_router as starrocks_metrics_router +from webapi.routes.prometheus_metrics import api_router as prometheus_metrics_router app = create_app() # Include routers -app.include_router(registration_metrics.router) +app.include_router(starrocks_metrics_router, prefix="/metrics", tags=["starrocks-metrics"]) +app.include_router(prometheus_metrics_router, prefix="/metrics", tags=["prometheus-metrics"]) @app.get("/", status_code=301) @@ -22,4 +24,4 @@ async def root(): if __name__ == "__main__": uvicorn.run( app="main:app", host=site_settings.SERVER_HOST, port=site_settings.SERVER_PORT - ) + ) \ No newline at end of file From 62533859ba7ed3c2793f0c6a0d750fafbf3fcc03 Mon Sep 17 00:00:00 2001 From: weicao Date: Fri, 19 Sep 2025 15:58:23 +0800 Subject: [PATCH 14/15] fixed: code standard and env file --- apps/metrics/local.env | 19 ------------------- 1 file changed, 19 deletions(-) delete mode 100644 apps/metrics/local.env diff --git a/apps/metrics/local.env b/apps/metrics/local.env deleted file mode 100644 index 4b601db..0000000 --- a/apps/metrics/local.env +++ /dev/null @@ -1,19 +0,0 @@ -# Local environment configuration for Metrics service -SERVER_HOST=0.0.0.0 -SERVER_PORT=8009 -SERVICE_API_ACCESS_PORT=8009 -SERVICE_API_ACCESS_HOST=0.0.0.0 - -# starrocks settings -STARROCKS_HOST=freeleaps-starrocks-cluster-fe-service.freeleaps-data-platform.svc -STARROCKS_PORT=9030 -STARROCKS_USER=root -STARROCKS_PASSWORD="" -STARROCKS_DATABASE=freeleaps - -# log settings -LOG_BASE_PATH=./logs -BACKEND_LOG_FILE_NAME=metrics -APPLICATION_ACTIVITY_LOG=metrics-activity - -PROMETHEUS_ENDPOINT=http://localhost:9090 \ No newline at end of file From 81628407df34c13aba36025f30036717c0ea3f0e Mon Sep 17 00:00:00 2001 From: weicao Date: Fri, 19 Sep 2025 16:03:29 +0800 Subject: [PATCH 15/15] fixed: code standard and env file --- apps/metrics/Dockerfile | 2 -- apps/metrics/common/config/app_settings.py | 2 +- apps/metrics/local.env | 19 +++++++++++++++++++ apps/metrics/webapi/main.py | 7 ------- 4 files changed, 20 insertions(+), 10 deletions(-) create mode 100644 apps/metrics/local.env diff --git a/apps/metrics/Dockerfile b/apps/metrics/Dockerfile index b27cb4e..2bac1b0 100644 --- a/apps/metrics/Dockerfile +++ b/apps/metrics/Dockerfile @@ -10,8 +10,6 @@ COPY requirements.txt . # Install dependencies RUN pip install --no-cache-dir -r requirements.txt -# Copy environment file -COPY local.env . # Copy application code COPY . . diff --git a/apps/metrics/common/config/app_settings.py b/apps/metrics/common/config/app_settings.py index 4877ca6..927ea09 100644 --- a/apps/metrics/common/config/app_settings.py +++ b/apps/metrics/common/config/app_settings.py @@ -29,7 +29,7 @@ class AppSettings(BaseSettings): class Config: - env_file = "local.env" + env_file = ".env" app_settings = AppSettings() diff --git a/apps/metrics/local.env b/apps/metrics/local.env new file mode 100644 index 0000000..4b601db --- /dev/null +++ b/apps/metrics/local.env @@ -0,0 +1,19 @@ +# Local environment configuration for Metrics service +SERVER_HOST=0.0.0.0 +SERVER_PORT=8009 +SERVICE_API_ACCESS_PORT=8009 +SERVICE_API_ACCESS_HOST=0.0.0.0 + +# starrocks settings +STARROCKS_HOST=freeleaps-starrocks-cluster-fe-service.freeleaps-data-platform.svc +STARROCKS_PORT=9030 +STARROCKS_USER=root +STARROCKS_PASSWORD="" +STARROCKS_DATABASE=freeleaps + +# log settings +LOG_BASE_PATH=./logs +BACKEND_LOG_FILE_NAME=metrics +APPLICATION_ACTIVITY_LOG=metrics-activity + +PROMETHEUS_ENDPOINT=http://localhost:9090 \ No newline at end of file diff --git a/apps/metrics/webapi/main.py b/apps/metrics/webapi/main.py index 203a244..93b3fa8 100644 --- a/apps/metrics/webapi/main.py +++ b/apps/metrics/webapi/main.py @@ -3,16 +3,9 @@ from fastapi.responses import RedirectResponse import uvicorn from webapi.bootstrap.application import create_app -from webapi.routes.starrocks_metrics import api_router as starrocks_metrics_router -from webapi.routes.prometheus_metrics import api_router as prometheus_metrics_router app = create_app() -# Include routers -app.include_router(starrocks_metrics_router, prefix="/metrics", tags=["starrocks-metrics"]) -app.include_router(prometheus_metrics_router, prefix="/metrics", tags=["prometheus-metrics"]) - - @app.get("/", status_code=301) async def root(): """