Add local redis to cache content
This commit is contained in:
parent
bc49e1b60b
commit
f8b60391b2
@ -20,6 +20,8 @@ stripe==8.1.0
|
||||
strawberry-graphql==0.236.0
|
||||
websockets==12.0
|
||||
pika==1.3.2
|
||||
redis==5.2.1
|
||||
fastapi-cache==0.1.0
|
||||
aio-pika
|
||||
twilio
|
||||
openai
|
||||
|
||||
0
apps/content/start_fastapi.sh
Normal file → Executable file
0
apps/content/start_fastapi.sh
Normal file → Executable file
@ -3,6 +3,7 @@ from fastapi import FastAPI
|
||||
from fastapi.openapi.utils import get_openapi
|
||||
|
||||
from webapi.providers import common
|
||||
from webapi.providers import cache # Import the new cache provider
|
||||
from webapi.providers import logger
|
||||
from webapi.providers import router
|
||||
from webapi.providers import database
|
||||
@ -18,6 +19,7 @@ def create_app() -> FastAPI:
|
||||
|
||||
register(app, exception_handler)
|
||||
register(app, database)
|
||||
register(app, cache)
|
||||
register(app, logger)
|
||||
register(app, router)
|
||||
register(app, scheduler)
|
||||
@ -49,11 +51,7 @@ def customize_openapi_security(app: FastAPI) -> None:
|
||||
|
||||
# Add security scheme to components
|
||||
openapi_schema["components"]["securitySchemes"] = {
|
||||
"bearerAuth": {
|
||||
"type": "http",
|
||||
"scheme": "bearer",
|
||||
"bearerFormat": "JWT"
|
||||
}
|
||||
"bearerAuth": {"type": "http", "scheme": "bearer", "bearerFormat": "JWT"}
|
||||
}
|
||||
|
||||
# Add security requirement globally
|
||||
|
||||
66
apps/content/webapi/providers/cache.py
Normal file
66
apps/content/webapi/providers/cache.py
Normal file
@ -0,0 +1,66 @@
|
||||
import redis.asyncio as redis
|
||||
from redis.asyncio import Redis
|
||||
from fastapi_cache import FastAPICache
|
||||
from fastapi_cache.backends.redis import RedisBackend
|
||||
|
||||
# Declare redis_instance globally
|
||||
redis_instance = None
|
||||
|
||||
|
||||
def register(app):
|
||||
@app.on_event("startup")
|
||||
async def init_cache():
|
||||
redis_instance = redis.Redis(
|
||||
host="localhost", # Replace with your Redis host
|
||||
port=6379, # Replace with your Redis port
|
||||
db=0, # Replace with your Redis database number
|
||||
decode_responses=True, # Optional: Enable decoded responses
|
||||
)
|
||||
|
||||
# Test the Redis connectionafrom redis.asyncio import Redis
|
||||
|
||||
|
||||
from fastapi_cache import FastAPICache
|
||||
from fastapi_cache.backends.redis import RedisBackend
|
||||
|
||||
|
||||
def register(app):
|
||||
@app.on_event("startup")
|
||||
async def init_cache():
|
||||
# Connect to Redis running in Docker
|
||||
redis_instance = Redis(
|
||||
host="localhost", # If Redis is running on the same machine
|
||||
port=6379, # Port mapped to Redis container
|
||||
db=0,
|
||||
decode_responses=True,
|
||||
)
|
||||
|
||||
# Test Redis connection
|
||||
try:
|
||||
await redis_instance.ping()
|
||||
print("Connected to Redis Docker container!")
|
||||
except Exception as e:
|
||||
print(f"Failed to connect to Redis: {e}")
|
||||
raise
|
||||
|
||||
# Initialize FastAPICache
|
||||
FastAPICache.init(RedisBackend(redis_instance), prefix="fastapi-cache")
|
||||
|
||||
@app.on_event("shutdown")
|
||||
async def shutdown_redis():
|
||||
await redis_instance.close()
|
||||
try:
|
||||
await redis_instance.ping()
|
||||
print("Redis connection established successfully!")
|
||||
except Exception as e:
|
||||
print(f"Failed to connect to Redis: {e}")
|
||||
raise
|
||||
|
||||
# Initialize FastAPICache
|
||||
FastAPICache.init(RedisBackend(redis_instance), prefix="fastapi-cache")
|
||||
|
||||
@app.on_event("shutdown")
|
||||
async def close_redis():
|
||||
if redis_instance:
|
||||
await redis_instance.close()
|
||||
print("Redis connection closed gracefully.")
|
||||
@ -3,18 +3,22 @@ from fastapi.encoders import jsonable_encoder
|
||||
from fastapi.responses import JSONResponse
|
||||
from pydantic import BaseModel
|
||||
from backend.content.content_service import ContentService
|
||||
from fastapi_cache.decorator import cache # Import the cache decorator
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.get(
|
||||
"/retrieve-content-as-media-data/{document_id}",
|
||||
operation_id="retrieve-content-as-media-data",
|
||||
summary="retrieve content as media data",
|
||||
description="retrieve content as media data which can be posted to web page.",
|
||||
response_description="Media data"
|
||||
response_description="Media data",
|
||||
)
|
||||
async def retrieve_content_as_media_data(
|
||||
document_id : str
|
||||
):
|
||||
@cache(
|
||||
expire=300, # Cache the result for 5 minutes
|
||||
key_builder=lambda func, *args, **kwargs: f"content-media:{kwargs.get('document_id', args[0] if len(args) > 0 else '')}",
|
||||
)
|
||||
async def retrieve_content_as_media_data(document_id: str):
|
||||
result = await ContentService().retrieve_content_as_media_data(document_id)
|
||||
return JSONResponse(content=jsonable_encoder(result))
|
||||
|
||||
@ -1,9 +1,9 @@
|
||||
from fastapi import APIRouter
|
||||
from fastapi.encoders import jsonable_encoder
|
||||
from fastapi.responses import JSONResponse
|
||||
from pydantic import BaseModel
|
||||
from backend.content.content_service import ContentService
|
||||
from common.constants.region import UserRegion
|
||||
from fastapi_cache.decorator import cache # Import the cache decorator
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
@ -13,11 +13,12 @@ router = APIRouter()
|
||||
operation_id="retrieve-directories-for-folder",
|
||||
summary="retrieve directories for a folder",
|
||||
description="retrieve directories for a folder, such as testimony, legal, etc",
|
||||
response_description="The list of directories under the folder"
|
||||
response_description="The list of directories under the folder",
|
||||
)
|
||||
async def retrieve_directories_for_folder(
|
||||
folder_name : str,
|
||||
region: UserRegion
|
||||
):
|
||||
@cache(
|
||||
expire=300, # Cache for 300 seconds
|
||||
key_builder=lambda func, *args, **kwargs: f"folder:{kwargs.get('folder_name', args[0] if len(args) > 0 else '')}:region:{kwargs.get('region', args[1] if len(args) > 1 else '')}",
|
||||
)
|
||||
async def retrieve_directories_for_folder(folder_name: str, region: UserRegion):
|
||||
result = await ContentService().retrieve_directories_for_folder(folder_name, region)
|
||||
return JSONResponse(content=jsonable_encoder(result))
|
||||
|
||||
@ -31,6 +31,20 @@ services:
|
||||
- type: bind
|
||||
source: $DOCKER_BACKEND_LOG_HOME
|
||||
target: $LOG_BASE_PATH
|
||||
redis:
|
||||
image: redis:latest
|
||||
container_name: redis
|
||||
profiles: [ prod, alpha, dev ]
|
||||
restart: always
|
||||
ports:
|
||||
- "6379:6379" # Redis port
|
||||
networks:
|
||||
- devbox_freeleaps2-network
|
||||
healthcheck:
|
||||
test: [ "CMD", "redis-cli", "ping" ]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
networks:
|
||||
devbox_freeleaps2-network:
|
||||
external: true
|
||||
Loading…
Reference in New Issue
Block a user