Fix the duplicated logging issue
This commit is contained in:
parent
669a5303cd
commit
20919277d2
@ -5,10 +5,10 @@ import json
|
|||||||
|
|
||||||
class ApplicationLogger(LoggerBase):
|
class ApplicationLogger(LoggerBase):
|
||||||
def __init__(self, application_activities: dict[str, any] = {}) -> None:
|
def __init__(self, application_activities: dict[str, any] = {}) -> None:
|
||||||
extra_fileds = {}
|
extra_fields = {}
|
||||||
if application_activities:
|
if application_activities:
|
||||||
extra_fileds.update(application_activities)
|
extra_fields.update(application_activities)
|
||||||
super().__init__(
|
super().__init__(
|
||||||
logger_name=app_settings.APPLICATION_ACTIVITY_LOG,
|
logger_name=app_settings.APPLICATION_ACTIVITY_LOG,
|
||||||
extra_fileds=extra_fileds,
|
extra_fields=extra_fields,
|
||||||
)
|
)
|
||||||
|
|||||||
@ -1,49 +1,54 @@
|
|||||||
from loguru import logger as guru_logger
|
from loguru import logger as guru_logger
|
||||||
from common.config.log_settings import log_settings
|
from common.config.log_settings import log_settings
|
||||||
from typing import List
|
from typing import Dict, Any
|
||||||
import socket
|
import socket
|
||||||
import json
|
|
||||||
import logging
|
|
||||||
import threading
|
import threading
|
||||||
|
|
||||||
|
|
||||||
class LoggerBase:
|
class LoggerBase:
|
||||||
binded_loggers = {}
|
binded_loggers = {} # Stores logger instances
|
||||||
|
loguru_sinks_added = set() # Tracks added log sinks
|
||||||
logger_lock = threading.Lock()
|
logger_lock = threading.Lock()
|
||||||
|
|
||||||
def __init__(self, logger_name: str, extra_fileds: dict[str, any]) -> None:
|
def __init__(self, logger_name: str, extra_fields: Dict[str, Any] = None) -> None:
|
||||||
self.__logger_name = logger_name
|
self.__logger_name = logger_name
|
||||||
self.extra_fileds = extra_fileds
|
self.extra_fields = extra_fields or {}
|
||||||
|
|
||||||
with LoggerBase.logger_lock:
|
with LoggerBase.logger_lock:
|
||||||
|
# ✅ **If already created, reuse it** to prevent duplicates
|
||||||
if self.__logger_name in LoggerBase.binded_loggers:
|
if self.__logger_name in LoggerBase.binded_loggers:
|
||||||
self.logger = LoggerBase.binded_loggers[self.__logger_name]
|
self.logger = LoggerBase.binded_loggers[self.__logger_name]
|
||||||
return
|
return
|
||||||
|
|
||||||
log_filename = log_settings.LOG_BASE_PATH + "/" + self.__logger_name + ".log"
|
log_filename = f"{log_settings.LOG_BASE_PATH}/{self.__logger_name}.log"
|
||||||
log_retention = log_settings.LOG_RETENTION
|
|
||||||
log_rotation = log_settings.LOG_ROTATION
|
|
||||||
log_level = "INFO"
|
|
||||||
log_message_format = "{message}"
|
|
||||||
|
|
||||||
guru_logger.add(
|
# ✅ **Ensure Loguru sink is added only once**
|
||||||
sink=log_filename,
|
if log_filename not in LoggerBase.loguru_sinks_added:
|
||||||
level=log_level,
|
guru_logger.add(
|
||||||
retention=log_retention,
|
sink=log_filename,
|
||||||
rotation=log_rotation,
|
level="INFO",
|
||||||
format=log_message_format,
|
retention=log_settings.LOG_RETENTION,
|
||||||
serialize=True,
|
rotation=log_settings.LOG_ROTATION,
|
||||||
filter=lambda record: "extra" in record
|
format="{message}",
|
||||||
and "topic" in record["extra"]
|
serialize=True,
|
||||||
and record["extra"]["topic"] == self.__logger_name,
|
filter=lambda record: "extra" in record
|
||||||
)
|
and "topic" in record["extra"]
|
||||||
host_name = socket.gethostname()
|
and record["extra"]["topic"] == self.__logger_name,
|
||||||
host_ip = socket.gethostbyname(host_name)
|
)
|
||||||
self.logger = guru_logger.bind(
|
LoggerBase.loguru_sinks_added.add(log_filename) # ✅ Mark as added
|
||||||
topic=self.__logger_name,
|
|
||||||
host_ip=host_ip,
|
host_name = socket.gethostname()
|
||||||
host_name=host_name,
|
host_ip = socket.gethostbyname(host_name)
|
||||||
)
|
|
||||||
with LoggerBase.logger_lock:
|
# ✅ Bind the logger with topic and extra fields
|
||||||
|
self.logger = guru_logger.bind(
|
||||||
|
topic=self.__logger_name,
|
||||||
|
host_ip=host_ip,
|
||||||
|
host_name=host_name,
|
||||||
|
**self.extra_fields, # Include additional metadata
|
||||||
|
)
|
||||||
|
|
||||||
|
# ✅ Store reference to prevent duplicate instances
|
||||||
LoggerBase.binded_loggers[self.__logger_name] = self.logger
|
LoggerBase.binded_loggers[self.__logger_name] = self.logger
|
||||||
|
|
||||||
async def log_event(
|
async def log_event(
|
||||||
@ -52,7 +57,7 @@ class LoggerBase:
|
|||||||
receiver_id: str,
|
receiver_id: str,
|
||||||
subject: str,
|
subject: str,
|
||||||
event: str,
|
event: str,
|
||||||
properties: dict[str, any],
|
properties: Dict[str, Any],
|
||||||
text: str = "",
|
text: str = "",
|
||||||
) -> None:
|
) -> None:
|
||||||
local_logger = self.logger.bind(
|
local_logger = self.logger.bind(
|
||||||
@ -71,7 +76,7 @@ class LoggerBase:
|
|||||||
subject: str,
|
subject: str,
|
||||||
exception: Exception,
|
exception: Exception,
|
||||||
text: str = "",
|
text: str = "",
|
||||||
properties: dict[str, any] = None,
|
properties: Dict[str, Any] = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
local_logger = self.logger.bind(
|
local_logger = self.logger.bind(
|
||||||
sender_id=sender_id,
|
sender_id=sender_id,
|
||||||
@ -89,7 +94,7 @@ class LoggerBase:
|
|||||||
receiver_id: str,
|
receiver_id: str,
|
||||||
subject: str,
|
subject: str,
|
||||||
text: str = "",
|
text: str = "",
|
||||||
properties: dict[str, any] = None,
|
properties: Dict[str, Any] = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
local_logger = self.logger.bind(
|
local_logger = self.logger.bind(
|
||||||
sender_id=sender_id,
|
sender_id=sender_id,
|
||||||
@ -106,7 +111,7 @@ class LoggerBase:
|
|||||||
receiver_id: str,
|
receiver_id: str,
|
||||||
subject: str,
|
subject: str,
|
||||||
text: str = "",
|
text: str = "",
|
||||||
properties: dict[str, any] = None,
|
properties: Dict[str, Any] = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
local_logger = self.logger.bind(
|
local_logger = self.logger.bind(
|
||||||
sender_id=sender_id,
|
sender_id=sender_id,
|
||||||
@ -123,7 +128,7 @@ class LoggerBase:
|
|||||||
receiver_id: str,
|
receiver_id: str,
|
||||||
subject: str,
|
subject: str,
|
||||||
text: str = "",
|
text: str = "",
|
||||||
properties: dict[str, any] = None,
|
properties: Dict[str, Any] = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
local_logger = self.logger.bind(
|
local_logger = self.logger.bind(
|
||||||
sender_id=sender_id,
|
sender_id=sender_id,
|
||||||
|
|||||||
@ -5,15 +5,14 @@ import json
|
|||||||
|
|
||||||
class BusinessMetricLogger(LoggerBase):
|
class BusinessMetricLogger(LoggerBase):
|
||||||
def __init__(self, business_metrics: dict[str, any] = {}) -> None:
|
def __init__(self, business_metrics: dict[str, any] = {}) -> None:
|
||||||
extra_fileds = {}
|
extra_fields = {}
|
||||||
if business_metrics:
|
if business_metrics:
|
||||||
extra_fileds.update(business_metrics)
|
extra_fields.update(business_metrics)
|
||||||
super().__init__(
|
super().__init__(
|
||||||
logger_name=app_settings.BUSINESS_METRIC_LOG,
|
logger_name=app_settings.BUSINESS_METRIC_LOG,
|
||||||
extra_fileds=extra_fileds,
|
extra_fields=extra_fields,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
async def log_metrics(self, business_metrics: dict[str, any] = {}) -> None:
|
async def log_metrics(self, business_metrics: dict[str, any] = {}) -> None:
|
||||||
return await super().log_event(
|
return await super().log_event(
|
||||||
sender_id="business_metric_manager",
|
sender_id="business_metric_manager",
|
||||||
@ -21,5 +20,5 @@ class BusinessMetricLogger(LoggerBase):
|
|||||||
subject="metrics",
|
subject="metrics",
|
||||||
event="logging",
|
event="logging",
|
||||||
properties=business_metrics,
|
properties=business_metrics,
|
||||||
text="business metric logged"
|
text="business metric logged",
|
||||||
)
|
)
|
||||||
|
|||||||
@ -6,9 +6,9 @@ import json
|
|||||||
|
|
||||||
class UserLogger(LoggerBase):
|
class UserLogger(LoggerBase):
|
||||||
def __init__(self, user_activities: dict[str, any] = {}) -> None:
|
def __init__(self, user_activities: dict[str, any] = {}) -> None:
|
||||||
extra_fileds = {}
|
extra_fields = {}
|
||||||
if user_activities:
|
if user_activities:
|
||||||
extra_fileds.update(user_activities)
|
extra_fields.update(user_activities)
|
||||||
super().__init__(
|
super().__init__(
|
||||||
logger_name=app_settings.USER_ACTIVITY_LOG, extra_fileds=extra_fileds
|
logger_name=app_settings.USER_ACTIVITY_LOG, extra_fields=extra_fields
|
||||||
)
|
)
|
||||||
|
|||||||
@ -11,10 +11,6 @@ async def cleanup_document_job():
|
|||||||
|
|
||||||
document_manager = DocumentManager()
|
document_manager = DocumentManager()
|
||||||
|
|
||||||
# Monitor execution time
|
|
||||||
start_time = logging.getLogger().handlers[0].formatter.converter()
|
|
||||||
logging.info(f"Job started at {start_time}")
|
|
||||||
|
|
||||||
await document_manager.cleanup_document()
|
await document_manager.cleanup_document()
|
||||||
|
|
||||||
logging.info("Successfully completed cleanup document job.")
|
logging.info("Successfully completed cleanup document job.")
|
||||||
|
|||||||
@ -3,7 +3,8 @@ from fastapi import FastAPI
|
|||||||
from fastapi.openapi.utils import get_openapi
|
from fastapi.openapi.utils import get_openapi
|
||||||
|
|
||||||
from webapi.providers import common
|
from webapi.providers import common
|
||||||
from webapi.providers import logger
|
|
||||||
|
from webapi.providers.logger import register_logger
|
||||||
from webapi.providers import router
|
from webapi.providers import router
|
||||||
from webapi.providers import database
|
from webapi.providers import database
|
||||||
from webapi.providers import scheduler
|
from webapi.providers import scheduler
|
||||||
@ -16,9 +17,9 @@ def create_app() -> FastAPI:
|
|||||||
|
|
||||||
app = FreeleapsApp()
|
app = FreeleapsApp()
|
||||||
|
|
||||||
|
register_logger()
|
||||||
register(app, exception_handler)
|
register(app, exception_handler)
|
||||||
register(app, database)
|
register(app, database)
|
||||||
register(app, logger)
|
|
||||||
register(app, router)
|
register(app, router)
|
||||||
register(app, scheduler)
|
register(app, scheduler)
|
||||||
register(app, common)
|
register(app, common)
|
||||||
|
|||||||
@ -1,60 +1,47 @@
|
|||||||
import logging
|
import logging
|
||||||
import sys
|
import sys
|
||||||
from loguru import logger
|
from loguru import logger as guru_logger
|
||||||
from common.config.log_settings import log_settings
|
|
||||||
|
|
||||||
|
|
||||||
def register(app=None):
|
def register_logger():
|
||||||
level = log_settings.LOG_LEVEL
|
print("📢 Setting up logging interception...")
|
||||||
file_path = log_settings.LOG_PATH
|
|
||||||
retention = log_settings.LOG_RETENTION
|
|
||||||
rotation = log_settings.LOG_ROTATION
|
|
||||||
|
|
||||||
# intercept everything at the root logger
|
# 🔴 **Ensure Uvicorn Logs Are Captured**
|
||||||
|
intercept_loggers = ["uvicorn", "uvicorn.access", "uvicorn.error", "fastapi"]
|
||||||
|
|
||||||
|
class InterceptHandler(logging.Handler):
|
||||||
|
def emit(self, record):
|
||||||
|
level = (
|
||||||
|
guru_logger.level(record.levelname).name
|
||||||
|
if guru_logger.level(record.levelname, None)
|
||||||
|
else record.levelno
|
||||||
|
)
|
||||||
|
frame, depth = logging.currentframe(), 2
|
||||||
|
while frame.f_code.co_filename == logging.__file__:
|
||||||
|
frame = frame.f_back
|
||||||
|
depth += 1
|
||||||
|
|
||||||
|
guru_logger.opt(depth=depth, exception=record.exc_info).log(
|
||||||
|
level,
|
||||||
|
f"[{record.name}] {record.getMessage()}",
|
||||||
|
)
|
||||||
|
|
||||||
|
# 🔴 **Replace Existing Loggers with Interception**
|
||||||
|
logging.root.handlers.clear()
|
||||||
|
logging.root.setLevel(logging.INFO)
|
||||||
logging.root.handlers = [InterceptHandler()]
|
logging.root.handlers = [InterceptHandler()]
|
||||||
logging.root.setLevel(level)
|
|
||||||
|
|
||||||
# remove every other logger's handlers
|
for logger_name in intercept_loggers:
|
||||||
# and propagate to root logger
|
logging_logger = logging.getLogger(logger_name)
|
||||||
for name in logging.root.manager.loggerDict.keys():
|
logging_logger.handlers.clear() # Remove Uvicorn default handlers
|
||||||
logging.getLogger(name).handlers = []
|
logging_logger.propagate = True # ✅ Ensure they propagate through Loguru
|
||||||
logging.getLogger(name).propagate = True
|
|
||||||
|
|
||||||
# configure loguru
|
# 🔴 **Redirect stdout/stderr to Loguru (Keep Green Timestamps)**
|
||||||
logger.add(
|
guru_logger.remove()
|
||||||
sink=sys.stdout
|
guru_logger.add(
|
||||||
)
|
sys.stdout,
|
||||||
logger.add(
|
level="INFO",
|
||||||
sink=file_path,
|
format="<green>{time:YYYY-MM-DD HH:mm:ss.SSS}</green> | {level} | {message}",
|
||||||
level=level,
|
|
||||||
retention=retention,
|
|
||||||
rotation=rotation
|
|
||||||
)
|
)
|
||||||
|
|
||||||
logger.disable("pika.adapters")
|
print("✅ Logging interception complete. Logs are formatted and deduplicated!")
|
||||||
logger.disable("pika.connection")
|
|
||||||
logger.disable("pika.channel")
|
|
||||||
logger.disable("pika.callback")
|
|
||||||
logger.disable("pika.frame")
|
|
||||||
logger.disable("pika.spec")
|
|
||||||
logger.disable("aiormq.connection")
|
|
||||||
logger.disable("urllib3.connectionpool")
|
|
||||||
|
|
||||||
|
|
||||||
class InterceptHandler(logging.Handler):
|
|
||||||
def emit(self, record):
|
|
||||||
# Get corresponding Loguru level if it exists
|
|
||||||
try:
|
|
||||||
level = logger.level(record.levelname).name
|
|
||||||
except ValueError:
|
|
||||||
level = record.levelno
|
|
||||||
|
|
||||||
# Find caller from where originated the logged message
|
|
||||||
frame, depth = logging.currentframe(), 2
|
|
||||||
while frame.f_code.co_filename == logging.__file__:
|
|
||||||
frame = frame.f_back
|
|
||||||
depth += 1
|
|
||||||
|
|
||||||
logger.opt(depth=depth, exception=record.exc_info).log(
|
|
||||||
level, record.getMessage()
|
|
||||||
)
|
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user