Update payment log config
This commit is contained in:
parent
a33869b7fe
commit
be0725a955
@ -5,10 +5,10 @@ import json
|
||||
|
||||
class ApplicationLogger(LoggerBase):
|
||||
def __init__(self, application_activities: dict[str, any] = {}) -> None:
|
||||
extra_fileds = {}
|
||||
extra_fields = {}
|
||||
if application_activities:
|
||||
extra_fileds.update(application_activities)
|
||||
extra_fields.update(application_activities)
|
||||
super().__init__(
|
||||
logger_name=app_settings.APPLICATION_ACTIVITY_LOG,
|
||||
extra_fileds=extra_fileds,
|
||||
extra_fields=extra_fields,
|
||||
)
|
||||
|
||||
@ -11,19 +11,15 @@ class LoggerBase:
|
||||
binded_loggers = {}
|
||||
logger_lock = threading.Lock()
|
||||
|
||||
def __init__(
|
||||
self, logger_name: str, extra_fileds: dict[str, any]
|
||||
) -> None:
|
||||
def __init__(self, logger_name: str, extra_fields: dict[str, any]) -> None:
|
||||
self.__logger_name = logger_name
|
||||
self.extra_fileds = extra_fileds
|
||||
self.extra_fields = extra_fields
|
||||
with LoggerBase.logger_lock:
|
||||
if self.__logger_name in LoggerBase.binded_loggers:
|
||||
self.logger = LoggerBase.binded_loggers[self.__logger_name]
|
||||
return
|
||||
|
||||
log_filename = (
|
||||
log_settings.LOG_BASE_PATH + "/" + self.__logger_name + ".log"
|
||||
)
|
||||
log_filename = log_settings.LOG_BASE_PATH + "/" + self.__logger_name + ".log"
|
||||
log_retention = log_settings.LOG_RETENTION
|
||||
log_rotation = log_settings.LOG_ROTATION
|
||||
log_level = "INFO"
|
||||
@ -57,14 +53,14 @@ class LoggerBase:
|
||||
subject: str,
|
||||
event: str,
|
||||
properties: dict[str, any],
|
||||
text: str = ""
|
||||
text: str = "",
|
||||
) -> None:
|
||||
local_logger = self.logger.bind(
|
||||
sender_id=sender_id,
|
||||
receiver_id=receiver_id,
|
||||
subject=subject,
|
||||
event=event,
|
||||
properties=properties
|
||||
properties=properties,
|
||||
)
|
||||
local_logger.info(text)
|
||||
|
||||
@ -83,7 +79,7 @@ class LoggerBase:
|
||||
subject=subject,
|
||||
event="exception",
|
||||
properties=properties,
|
||||
exception=exception
|
||||
exception=exception,
|
||||
)
|
||||
local_logger.exception(text)
|
||||
|
||||
|
||||
@ -1,25 +0,0 @@
|
||||
from .base_logger import LoggerBase
|
||||
from common.config.app_settings import app_settings
|
||||
import json
|
||||
|
||||
|
||||
class BusinessMetricLogger(LoggerBase):
|
||||
def __init__(self, business_metrics: dict[str, any] = {}) -> None:
|
||||
extra_fileds = {}
|
||||
if business_metrics:
|
||||
extra_fileds.update(business_metrics)
|
||||
super().__init__(
|
||||
logger_name=app_settings.BUSINESS_METRIC_LOG,
|
||||
extra_fileds=extra_fileds,
|
||||
)
|
||||
|
||||
|
||||
async def log_metrics(self, business_metrics: dict[str, any] = {}) -> None:
|
||||
return await super().log_event(
|
||||
sender_id="business_metric_manager",
|
||||
receiver_id="business_metric_logger",
|
||||
subject="metrics",
|
||||
event="logging",
|
||||
properties=business_metrics,
|
||||
text="business metric logged"
|
||||
)
|
||||
@ -6,9 +6,9 @@ import json
|
||||
|
||||
class UserLogger(LoggerBase):
|
||||
def __init__(self, user_activities: dict[str, any] = {}) -> None:
|
||||
extra_fileds = {}
|
||||
extra_fields = {}
|
||||
if user_activities:
|
||||
extra_fileds.update(user_activities)
|
||||
extra_fields.update(user_activities)
|
||||
super().__init__(
|
||||
logger_name=app_settings.USER_ACTIVITY_LOG, extra_fileds=extra_fileds
|
||||
logger_name=app_settings.USER_ACTIVITY_LOG, extra_fields=extra_fields
|
||||
)
|
||||
|
||||
@ -3,7 +3,7 @@ from fastapi import FastAPI
|
||||
from fastapi.openapi.utils import get_openapi
|
||||
|
||||
from webapi.providers import common
|
||||
from webapi.providers import logger
|
||||
from webapi.providers.logger import register_logger
|
||||
from webapi.providers import router
|
||||
from webapi.providers import database
|
||||
|
||||
@ -20,9 +20,9 @@ def create_app() -> FastAPI:
|
||||
|
||||
app = FreeleapsApp()
|
||||
|
||||
register_logger()
|
||||
register(app, exception_handler)
|
||||
register(app, database)
|
||||
register(app, logger)
|
||||
register(app, router)
|
||||
# register(app, scheduler)
|
||||
register(app, common)
|
||||
|
||||
@ -1,65 +1,47 @@
|
||||
import logging
|
||||
import sys
|
||||
from loguru import logger
|
||||
from common.config.log_settings import log_settings
|
||||
from fastapi import FastAPI
|
||||
from loguru import logger as guru_logger
|
||||
|
||||
|
||||
def register(app: FastAPI):
|
||||
level = log_settings.LOG_LEVEL
|
||||
file_path = log_settings.LOG_PATH
|
||||
retention = log_settings.LOG_RETENTION
|
||||
rotation = log_settings.LOG_ROTATION
|
||||
def register_logger():
|
||||
print("📢 Setting up logging interception...")
|
||||
|
||||
# intercept everything at the root logger
|
||||
logging.root.handlers = [InterceptHandler()]
|
||||
logging.root.setLevel(level)
|
||||
# 🔴 **Ensure Uvicorn Logs Are Captured**
|
||||
intercept_loggers = ["uvicorn", "uvicorn.access", "uvicorn.error", "fastapi"]
|
||||
|
||||
# remove every other logger's handlers
|
||||
# and propagate to root logger
|
||||
for name in logging.root.manager.loggerDict.keys():
|
||||
logging.getLogger(name).handlers = []
|
||||
logging.getLogger(name).propagate = True
|
||||
|
||||
# configure loguru
|
||||
logger.add(sink=sys.stdout)
|
||||
logger.add(sink=file_path, level=level, retention=retention, rotation=rotation)
|
||||
|
||||
# Disable noisy loggers
|
||||
logger.disable("pika.adapters")
|
||||
logger.disable("pika.connection")
|
||||
logger.disable("pika.channel")
|
||||
logger.disable("pika.callback")
|
||||
logger.disable("pika.frame")
|
||||
logger.disable("pika.spec")
|
||||
logger.disable("aiormq.connection")
|
||||
logger.disable("urllib3.connectionpool")
|
||||
|
||||
# Disable noisy MongoDB debug logs
|
||||
logging.getLogger("pymongo").setLevel(logging.WARNING)
|
||||
logging.getLogger("mongodb_migrations").setLevel(logging.WARNING)
|
||||
logging.getLogger("asyncio").setLevel(logging.WARNING)
|
||||
logging.getLogger("motor").setLevel(logging.WARNING)
|
||||
|
||||
|
||||
def boot(app: FastAPI):
|
||||
pass
|
||||
|
||||
|
||||
class InterceptHandler(logging.Handler):
|
||||
class InterceptHandler(logging.Handler):
|
||||
def emit(self, record):
|
||||
# Get corresponding Loguru level if it exists
|
||||
try:
|
||||
level = logger.level(record.levelname).name
|
||||
except ValueError:
|
||||
level = record.levelno
|
||||
|
||||
# Find caller from where originated the logged message
|
||||
level = (
|
||||
guru_logger.level(record.levelname).name
|
||||
if guru_logger.level(record.levelname, None)
|
||||
else record.levelno
|
||||
)
|
||||
frame, depth = logging.currentframe(), 2
|
||||
while frame.f_code.co_filename == logging.__file__:
|
||||
frame = frame.f_back
|
||||
depth += 1
|
||||
|
||||
logger.opt(depth=depth, exception=record.exc_info).log(
|
||||
level, record.getMessage()
|
||||
guru_logger.opt(depth=depth, exception=record.exc_info).log(
|
||||
level,
|
||||
f"[{record.name}] {record.getMessage()}",
|
||||
)
|
||||
|
||||
# 🔴 **Replace Existing Loggers with Interception**
|
||||
logging.root.handlers.clear()
|
||||
logging.root.setLevel(logging.INFO)
|
||||
logging.root.handlers = [InterceptHandler()]
|
||||
|
||||
for logger_name in intercept_loggers:
|
||||
logging_logger = logging.getLogger(logger_name)
|
||||
logging_logger.handlers.clear() # Remove Uvicorn default handlers
|
||||
logging_logger.propagate = True # ✅ Ensure they propagate through Loguru
|
||||
|
||||
# 🔴 **Redirect stdout/stderr to Loguru (Keep Green Timestamps)**
|
||||
guru_logger.remove()
|
||||
guru_logger.add(
|
||||
sys.stdout,
|
||||
level="INFO",
|
||||
format="<green>{time:YYYY-MM-DD HH:mm:ss.SSS}</green> | {level} | {message}",
|
||||
)
|
||||
|
||||
print("✅ Logging interception complete. Logs are formatted and deduplicated!")
|
||||
|
||||
Loading…
Reference in New Issue
Block a user