mirror of
https://github.com/seanmorley15/AdventureLog.git
synced 2025-08-09 23:25:18 +02:00
feat: integrate memcached for caching in Wanderer services; update Docker, settings, and supervisord configurations
This commit is contained in:
parent
c502ae350e
commit
2459e7d736
5 changed files with 188 additions and 11 deletions
|
@ -24,7 +24,7 @@ WORKDIR /code
|
||||||
|
|
||||||
# Install system dependencies (Nginx included)
|
# Install system dependencies (Nginx included)
|
||||||
RUN apt-get update \
|
RUN apt-get update \
|
||||||
&& apt-get install -y git postgresql-client gdal-bin libgdal-dev nginx supervisor \
|
&& apt-get install -y git postgresql-client gdal-bin libgdal-dev nginx memcached supervisor \
|
||||||
&& apt-get clean \
|
&& apt-get clean \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
|
|
@ -3,7 +3,11 @@ import requests
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from datetime import timezone as dt_timezone
|
from datetime import timezone as dt_timezone
|
||||||
from django.utils import timezone as django_timezone
|
from django.utils import timezone as django_timezone
|
||||||
|
from django.core.cache import cache
|
||||||
|
from django.conf import settings
|
||||||
import logging
|
import logging
|
||||||
|
import hashlib
|
||||||
|
import json
|
||||||
|
|
||||||
from .models import WandererIntegration
|
from .models import WandererIntegration
|
||||||
|
|
||||||
|
@ -16,6 +20,18 @@ class IntegrationError(Exception):
|
||||||
COOKIE_NAMES = ("pb_auth", "pb-auth")
|
COOKIE_NAMES = ("pb_auth", "pb-auth")
|
||||||
LOGIN_PATH = "/api/v1/auth/login"
|
LOGIN_PATH = "/api/v1/auth/login"
|
||||||
|
|
||||||
|
# Cache settings
|
||||||
|
TRAIL_CACHE_TIMEOUT = getattr(settings, 'WANDERER_TRAIL_CACHE_TIMEOUT', 60 * 15) # 15 minutes default
|
||||||
|
TRAIL_CACHE_PREFIX = 'wanderer_trail'
|
||||||
|
|
||||||
|
def _get_cache_key(integration_id: int, trail_id: str) -> str:
|
||||||
|
"""Generate a consistent cache key for trail data."""
|
||||||
|
return f"{TRAIL_CACHE_PREFIX}:{integration_id}:{trail_id}"
|
||||||
|
|
||||||
|
def _get_etag_cache_key(integration_id: int, trail_id: str) -> str:
|
||||||
|
"""Generate cache key for ETags."""
|
||||||
|
return f"{TRAIL_CACHE_PREFIX}_etag:{integration_id}:{trail_id}"
|
||||||
|
|
||||||
def login_to_wanderer(integration: WandererIntegration, password: str):
|
def login_to_wanderer(integration: WandererIntegration, password: str):
|
||||||
"""
|
"""
|
||||||
Authenticate with Wanderer and return the auth cookie and expiry.
|
Authenticate with Wanderer and return the auth cookie and expiry.
|
||||||
|
@ -109,19 +125,169 @@ def make_wanderer_request(integration: WandererIntegration, endpoint: str, metho
|
||||||
except requests.RequestException as exc:
|
except requests.RequestException as exc:
|
||||||
logger.error(f"Error making {method} request to {url}: {exc}")
|
logger.error(f"Error making {method} request to {url}: {exc}")
|
||||||
raise IntegrationError(f"Error communicating with Wanderer: {exc}")
|
raise IntegrationError(f"Error communicating with Wanderer: {exc}")
|
||||||
|
|
||||||
# function to fetch a specific trail by ID
|
def fetch_trail_by_id(integration: WandererIntegration, trail_id: str, password_for_reauth: str = None, use_cache: bool = True):
|
||||||
def fetch_trail_by_id(integration: WandererIntegration, trail_id: str, password_for_reauth: str = None):
|
|
||||||
"""
|
"""
|
||||||
Fetch a specific trail by its ID from the Wanderer API.
|
Fetch a specific trail by its ID from the Wanderer API with intelligent caching.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
integration: WandererIntegration instance
|
integration: WandererIntegration instance
|
||||||
trail_id: ID of the trail to fetch
|
trail_id: ID of the trail to fetch
|
||||||
password_for_reauth: Password to use if re-authentication is needed
|
password_for_reauth: Password to use if re-authentication is needed
|
||||||
|
use_cache: Whether to use caching (default: True)
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
dict: Trail data from the API
|
dict: Trail data from the API
|
||||||
"""
|
"""
|
||||||
response = make_wanderer_request(integration, f"/api/v1/trail/{trail_id}", password_for_reauth=password_for_reauth)
|
cache_key = _get_cache_key(integration.id, trail_id)
|
||||||
return response.json()
|
etag_cache_key = _get_etag_cache_key(integration.id, trail_id)
|
||||||
|
|
||||||
|
# Try to get from cache first
|
||||||
|
if use_cache:
|
||||||
|
cached_data = cache.get(cache_key)
|
||||||
|
if cached_data:
|
||||||
|
logger.debug(f"Trail {trail_id} found in cache")
|
||||||
|
return cached_data
|
||||||
|
|
||||||
|
# Prepare headers for conditional requests
|
||||||
|
headers = {}
|
||||||
|
if use_cache:
|
||||||
|
cached_etag = cache.get(etag_cache_key)
|
||||||
|
if cached_etag:
|
||||||
|
headers['If-None-Match'] = cached_etag
|
||||||
|
|
||||||
|
try:
|
||||||
|
response = make_wanderer_request(
|
||||||
|
integration,
|
||||||
|
f"/api/v1/trail/{trail_id}",
|
||||||
|
password_for_reauth=password_for_reauth,
|
||||||
|
headers=headers
|
||||||
|
)
|
||||||
|
|
||||||
|
# Handle 304 Not Modified
|
||||||
|
if response.status_code == 304:
|
||||||
|
logger.debug(f"Trail {trail_id} not modified, using cached version")
|
||||||
|
cached_data = cache.get(cache_key)
|
||||||
|
if cached_data:
|
||||||
|
return cached_data
|
||||||
|
|
||||||
|
trail_data = response.json()
|
||||||
|
|
||||||
|
# Cache the result
|
||||||
|
if use_cache:
|
||||||
|
cache.set(cache_key, trail_data, TRAIL_CACHE_TIMEOUT)
|
||||||
|
|
||||||
|
# Cache ETag if present
|
||||||
|
etag = response.headers.get('ETag')
|
||||||
|
if etag:
|
||||||
|
cache.set(etag_cache_key, etag, TRAIL_CACHE_TIMEOUT)
|
||||||
|
|
||||||
|
logger.debug(f"Trail {trail_id} cached for {TRAIL_CACHE_TIMEOUT} seconds")
|
||||||
|
|
||||||
|
return trail_data
|
||||||
|
|
||||||
|
except requests.RequestException as exc:
|
||||||
|
# If we have cached data and the request fails, return cached data as fallback
|
||||||
|
if use_cache:
|
||||||
|
cached_data = cache.get(cache_key)
|
||||||
|
if cached_data:
|
||||||
|
logger.debug(f"API request failed, returning cached trail {trail_id}: {exc}")
|
||||||
|
return cached_data
|
||||||
|
raise
|
||||||
|
|
||||||
|
def fetch_multiple_trails_by_id(integration: WandererIntegration, trail_ids: list, password_for_reauth: str = None, use_cache: bool = True):
|
||||||
|
"""
|
||||||
|
Fetch multiple trails efficiently with batch caching.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
integration: WandererIntegration instance
|
||||||
|
trail_ids: List of trail IDs to fetch
|
||||||
|
password_for_reauth: Password to use if re-authentication is needed
|
||||||
|
use_cache: Whether to use caching (default: True)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: Dictionary mapping trail_id to trail data
|
||||||
|
"""
|
||||||
|
results = {}
|
||||||
|
uncached_ids = []
|
||||||
|
|
||||||
|
if use_cache:
|
||||||
|
# Get cache keys for all trails
|
||||||
|
cache_keys = {trail_id: _get_cache_key(integration.id, trail_id) for trail_id in trail_ids}
|
||||||
|
|
||||||
|
# Batch get from cache
|
||||||
|
cached_trails = cache.get_many(cache_keys.values())
|
||||||
|
key_to_id = {v: k for k, v in cache_keys.items()}
|
||||||
|
|
||||||
|
# Separate cached and uncached
|
||||||
|
for cache_key, trail_data in cached_trails.items():
|
||||||
|
trail_id = key_to_id[cache_key]
|
||||||
|
results[trail_id] = trail_data
|
||||||
|
|
||||||
|
uncached_ids = [tid for tid in trail_ids if tid not in results]
|
||||||
|
logger.debug(f"Found {len(results)} trails in cache, need to fetch {len(uncached_ids)}")
|
||||||
|
else:
|
||||||
|
uncached_ids = trail_ids
|
||||||
|
|
||||||
|
# Fetch uncached trails
|
||||||
|
for trail_id in uncached_ids:
|
||||||
|
try:
|
||||||
|
trail_data = fetch_trail_by_id(integration, trail_id, password_for_reauth, use_cache)
|
||||||
|
results[trail_id] = trail_data
|
||||||
|
except IntegrationError as e:
|
||||||
|
logger.error(f"Failed to fetch trail {trail_id}: {e}")
|
||||||
|
# Continue with other trails
|
||||||
|
continue
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
def invalidate_trail_cache(integration_id: int, trail_id: str = None):
|
||||||
|
"""
|
||||||
|
Invalidate cached trail data.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
integration_id: Integration ID
|
||||||
|
trail_id: Specific trail ID to invalidate, or None to clear all trails for this integration
|
||||||
|
"""
|
||||||
|
if trail_id:
|
||||||
|
# Invalidate specific trail
|
||||||
|
cache_key = _get_cache_key(integration_id, trail_id)
|
||||||
|
etag_cache_key = _get_etag_cache_key(integration_id, trail_id)
|
||||||
|
cache.delete_many([cache_key, etag_cache_key])
|
||||||
|
logger.info(f"Invalidated cache for trail {trail_id}")
|
||||||
|
else:
|
||||||
|
# This would require a more complex implementation to find all keys
|
||||||
|
# For now, we'll just log it - you might want to use cache versioning instead
|
||||||
|
logger.debug("Cache invalidation for all trails not implemented - consider using cache versioning")
|
||||||
|
|
||||||
|
def warm_trail_cache(integration: WandererIntegration, trail_ids: list, password_for_reauth: str = None):
|
||||||
|
"""
|
||||||
|
Pre-warm the cache with trail data.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
integration: WandererIntegration instance
|
||||||
|
trail_ids: List of trail IDs to pre-load
|
||||||
|
password_for_reauth: Password to use if re-authentication is needed
|
||||||
|
"""
|
||||||
|
logger.info(f"Warming cache for {len(trail_ids)} trails")
|
||||||
|
fetch_multiple_trails_by_id(integration, trail_ids, password_for_reauth, use_cache=True)
|
||||||
|
|
||||||
|
# Decorator for additional caching layers
|
||||||
|
def cached_trail_method(timeout=TRAIL_CACHE_TIMEOUT):
|
||||||
|
"""
|
||||||
|
Decorator to add method-level caching to any function that takes integration and trail_id.
|
||||||
|
"""
|
||||||
|
def decorator(func):
|
||||||
|
def wrapper(integration, trail_id, *args, **kwargs):
|
||||||
|
# Create cache key based on function name and arguments
|
||||||
|
cache_key = f"{func.__name__}:{integration.id}:{trail_id}:{hashlib.md5(str(args).encode()).hexdigest()}"
|
||||||
|
|
||||||
|
cached_result = cache.get(cache_key)
|
||||||
|
if cached_result is not None:
|
||||||
|
return cached_result
|
||||||
|
|
||||||
|
result = func(integration, trail_id, *args, **kwargs)
|
||||||
|
cache.set(cache_key, result, timeout)
|
||||||
|
return result
|
||||||
|
return wrapper
|
||||||
|
return decorator
|
|
@ -88,7 +88,9 @@ ACCOUNT_EMAIL_VERIFICATION = 'none'
|
||||||
|
|
||||||
CACHES = {
|
CACHES = {
|
||||||
'default': {
|
'default': {
|
||||||
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
|
'BACKEND': 'django.core.cache.backends.memcached.PyMemcacheCache',
|
||||||
|
'LOCATION': '127.0.0.1:11211',
|
||||||
|
'TIMEOUT': 60 * 60 * 24, # Optional: 1 day cache
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -25,4 +25,5 @@ publicsuffix2==2.20191221
|
||||||
geopy==2.4.1
|
geopy==2.4.1
|
||||||
psutil==6.1.1
|
psutil==6.1.1
|
||||||
geojson==3.2.0
|
geojson==3.2.0
|
||||||
gpxpy==1.6.2
|
gpxpy==1.6.2
|
||||||
|
pymemcache==4.0.0
|
|
@ -12,5 +12,13 @@ command=/code/entrypoint.sh
|
||||||
autorestart=true
|
autorestart=true
|
||||||
stdout_logfile=/dev/stdout
|
stdout_logfile=/dev/stdout
|
||||||
stderr_logfile=/dev/stderr
|
stderr_logfile=/dev/stderr
|
||||||
stdout_logfile_maxbytes = 0
|
stdout_logfile_maxbytes=0
|
||||||
stderr_logfile_maxbytes = 0
|
stderr_logfile_maxbytes=0
|
||||||
|
|
||||||
|
[program:memcached]
|
||||||
|
command=memcached -u nobody -m 64 -p 11211
|
||||||
|
autorestart=true
|
||||||
|
stdout_logfile=/dev/stdout
|
||||||
|
stderr_logfile=/dev/stderr
|
||||||
|
stdout_logfile_maxbytes=0
|
||||||
|
stderr_logfile_maxbytes=0
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue