diff --git a/.env.example b/.env.example index a755c7f..817d128 100644 --- a/.env.example +++ b/.env.example @@ -43,10 +43,13 @@ SERVER_STATELESS=false # Logging Configuration LOG_LEVEL=INFO - -LOGZIO_ENABLED=false -LOGZIO_TOKEN= -LOGZIO_URL=https://listener.logz.io:8071 +DD_HOST=datadog +DD_PORT=8126 +DD_STATS_HOST=datadog +DD_STATS_PORT=8125 +DD_ENABLED=0 +DD_API_KEY= +DD_SITE= # Utility Configuration PHPMYADMIN_PORT=8080 diff --git a/.gitignore b/.gitignore index c3385b6..e081872 100644 --- a/.gitignore +++ b/.gitignore @@ -138,3 +138,6 @@ mysql_data/ meilisearch_data/ redis_data/ rgdps_data/ + +# Datadog +logs.log \ No newline at end of file diff --git a/Makefile b/Makefile index af64029..4c70471 100644 --- a/Makefile +++ b/Makefile @@ -7,6 +7,7 @@ run: redis \ mysql \ meilisearch \ + datadog \ realistikgdps run-bg: @@ -14,6 +15,7 @@ run-bg: redis \ mysql \ meilisearch \ + datadog \ realistikgdps stop: @@ -28,6 +30,9 @@ shell: pma: docker-compose up phpmyadmin +datadog: + docker-compose up datadog + converter: APP_COMPONENT=converter docker-compose up \ redis \ diff --git a/README.md b/README.md index fa55235..5c39fcb 100644 --- a/README.md +++ b/README.md @@ -12,7 +12,6 @@ It is written in asynchronous, modern Python and is meant as a replacement for o - MeiliSearch, allowing for typo tolerance - S3 support, allowing for flexible storage solutions - Proper ratelimiting -- Logz.io logging support - Flexible command framework ## How to set up? diff --git a/datadog/conf.d/realistikgdps.yaml b/datadog/conf.d/realistikgdps.yaml new file mode 100644 index 0000000..516cd26 --- /dev/null +++ b/datadog/conf.d/realistikgdps.yaml @@ -0,0 +1,4 @@ +logs: + - type: file + path: /data/logs.log + service: "realistikgdps" \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml index ce15596..d5e8280 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -33,6 +33,7 @@ services: - mysql - redis - meilisearch + - datadog restart: always environment: - APP_PORT=${APP_PORT} @@ -65,10 +66,16 @@ services: - SERVER_GD_URL=${SERVER_GD_URL} - SERVER_STATELESS=${SERVER_STATELESS} - - LOG_LEVEL=${LOG_LEVEL} - - LOGZIO_ENABLED=${LOGZIO_ENABLED} - - LOGZIO_TOKEN=${LOGZIO_TOKEN} - - LOGZIO_URL=${LOGZIO_URL} + - DD_ENABLED=${DD_ENABLED} + - DD_HOST=${DD_HOST} + - DD_PORT=${DD_PORT} + - DD_STATS_HOST=${DD_STATS_HOST} + - DD_STATS_PORT=${DD_STATS_PORT} + - DD_API_KEY=${DD_API_KEY} + - DD_SITE=${DD_SITE} + - DD_LOGS_INJECTION=true + - DD_SERVICE=realistikgdps + - DD_LOGS_ENABLED=true # Internal docker specific variables - INTERNAL_RGDPS_DIRECTORY=/data # NOTE: Ensure this matches the volume mount below. @@ -98,3 +105,22 @@ services: - UPLOAD_LIMIT=500M depends_on: - mysql + + datadog: + image: gcr.io/datadoghq/agent:7 + ports: + - "8125:8125/udp" + - "8126:8126" + environment: + - DD_API_KEY=${DD_API_KEY} + - DD_SITE=${DD_SITE} + - DD_APM_ENABLED=true + - DD_LOGS_ENABLED=true + - DD_TRACE_ENABLED=true + - DD_DOGSTATSD_NON_LOCAL_TRAFFIC=true + volumes: + - /var/run/docker.sock:/var/run/docker.sock:ro + - /proc/:/host/proc/:ro + - /sys/fs/cgroup/:/host/sys/fs/cgroup:ro + - .:/data + - ./datadog/conf.d:/etc/datadog-agent/conf.d diff --git a/logging.yaml b/logging.yaml new file mode 100644 index 0000000..bdbe40a --- /dev/null +++ b/logging.yaml @@ -0,0 +1,28 @@ +version: 1 +disable_existing_loggers: true +loggers: + httpx: + level: WARNING + handlers: [console, file] + propagate: no + httpcore: + level: WARNING + handlers: [console, file] + propagate: no +handlers: + console: + class: logging.StreamHandler + formatter: plaintext + stream: ext://sys.stdout + file: + class: logging.FileHandler + formatter: json + filename: logs.log +formatters: + plaintext: + format: '%(asctime)s %(name)s %(levelname)s %(message)s' + json: + class: pythonjsonlogger.jsonlogger.JsonFormatter + format: '%(asctime)s %(name)s %(levelname)s %(message)s' +root: + handlers: [console, file] \ No newline at end of file diff --git a/requirements/main.txt b/requirements/main.txt index bb66ae7..f5dab25 100644 --- a/requirements/main.txt +++ b/requirements/main.txt @@ -2,11 +2,12 @@ aiobotocore == 2.9.0 bcrypt == 4.1.2 cryptography databases[asyncmy] == 0.8.0 +datadog == 0.49.1 +ddtrace == 2.7.6 email-validator == 2.0.0 fastapi == 0.108.0 fastapi-limiter == 0.1.5 httpx == 0.26.0 -logzio-python-handler == 4.1.0 meilisearch-python-sdk == 2.0.1 orjson == 3.9.15 python-dotenv == 1.0.1 @@ -17,3 +18,5 @@ uvicorn == 0.19.0 uvloop == 0.19.0; sys_platform != "win32" winloop == 0.1.0; sys_platform == "win32" xor-cipher == 3.0.1 +python-json-logger == 2.0.7 +PyYAML == 6.0.1 \ No newline at end of file diff --git a/rgdps/api/__init__.py b/rgdps/api/__init__.py index 204b734..82d3704 100644 --- a/rgdps/api/__init__.py +++ b/rgdps/api/__init__.py @@ -1,6 +1,7 @@ from __future__ import annotations import urllib.parse +import logging import uuid from databases import DatabaseURL @@ -32,14 +33,7 @@ def init_logging() -> None: - if settings.LOGZIO_ENABLED: - logger.init_logzio_logging( - settings.LOGZIO_TOKEN, - settings.LOG_LEVEL, - settings.LOGZIO_URL, - ) - else: - logger.init_basic_logging(settings.LOG_LEVEL) + logger.init_basic_logging(settings.LOG_LEVEL) def init_events(app: FastAPI) -> None: @@ -48,7 +42,7 @@ async def on_validation_error( request: Request, e: RequestValidationError, ) -> Response: - logger.exception( + logging.exception( f"A validation error has occured while parsing a request.", extra={ "url": str(request.url), @@ -84,7 +78,7 @@ def init_mysql(app: FastAPI) -> None: @app.on_event("startup") async def on_startup() -> None: await app.state.mysql.connect() - logger.info( + logging.info( "Connected to the MySQL database.", extra={ "host": settings.SQL_HOST, @@ -118,7 +112,7 @@ async def on_startup() -> None: prefix="rgdps:ratelimit", ) - logger.info( + logging.info( "Connected to the Redis database.", extra={ "host": settings.REDIS_HOST, @@ -141,7 +135,7 @@ def init_meili(app: FastAPI) -> None: @app.on_event("startup") async def startup() -> None: await app.state.meili.health() - logger.info( + logging.info( "Connected to the MeiliSearch database.", extra={ "host": settings.MEILI_HOST, @@ -163,7 +157,7 @@ def init_s3_storage(app: FastAPI) -> None: @app.on_event("startup") async def startup() -> None: app.state.storage = await app.state.storage.connect() - logger.info( + logging.info( "Connected to S3 storage.", extra={ "bucket": settings.S3_BUCKET, @@ -183,7 +177,7 @@ def init_local_storage(app: FastAPI) -> None: @app.on_event("startup") async def startup() -> None: - logger.info("Connected to the local storage.") + logging.info("Connected to the local storage.") def init_gd(app: FastAPI) -> None: @@ -191,7 +185,7 @@ def init_gd(app: FastAPI) -> None: settings.SERVER_GD_URL, ) - logger.info( + logging.info( "Initialised the main Geometry Dash client.", extra={ "server_url": settings.SERVER_GD_URL, @@ -203,7 +197,7 @@ def init_cache_stateful(app: FastAPI) -> None: app.state.user_cache = SimpleAsyncMemoryCache() app.state.password_cache = SimpleAsyncMemoryCache() - logger.info("Initialised stateful caching.") + logging.info("Initialised stateful caching.") def init_cache_stateless(app: FastAPI) -> None: @@ -218,7 +212,7 @@ def init_cache_stateless(app: FastAPI) -> None: serialise=lambda x: x.encode(), ) - logger.info("Initialised stateless caching.") + logging.info("Initialised stateless caching.") def init_routers(app: FastAPI) -> None: @@ -230,7 +224,7 @@ def init_routers(app: FastAPI) -> None: def init_middlewares(app: FastAPI) -> None: @app.middleware("http") async def mysql_transaction(request: Request, call_next): - logger.debug( + logging.debug( "Opened a new MySQL transaction for request.", extra={ "uuid": request.state.uuid, @@ -250,7 +244,7 @@ async def enforce_user_agent( # GD sends an empty User-Agent header. user_agent = request.headers.get("User-Agent") if user_agent != "": - logger.info( + logging.info( "Client request stopped due to invalid User-Agent header.", extra={ "url": str(request.url), @@ -270,7 +264,7 @@ async def exception_logging( try: return await call_next(request) except Exception as e: - logger.exception( + logging.exception( f"An exception has occured while processing a request!", extra={ "url": str(request.url), @@ -288,6 +282,7 @@ async def assign_uuid(request: Request, call_next): def init_api() -> FastAPI: init_logging() + app = FastAPI( title="RealistikGDPS", openapi_url=None, diff --git a/rgdps/api/commands/framework.py b/rgdps/api/commands/framework.py index 904334c..66f76d4 100644 --- a/rgdps/api/commands/framework.py +++ b/rgdps/api/commands/framework.py @@ -14,7 +14,7 @@ from typing import get_origin from typing import get_type_hints -from rgdps import logger +import logging from rgdps import repositories from rgdps.common.context import Context from rgdps.constants.errors import ServiceError @@ -107,7 +107,7 @@ async def _resolve_from_type[T](ctx: CommandContext, value: str, cast: type[T]) elif issubclass(cast, Enum): return cast(value) - logger.error( + logging.error( "Command parser tried to parse an unsupported type!", extra={ "value": value, @@ -323,7 +323,7 @@ def merge(self, router: CommandRouter) -> None: for key, value in router._routes.items(): if key in self._routes: - logger.warning( + logging.warning( "Command router merge has overwritten an existing command!", extra={ "command": key, @@ -390,7 +390,7 @@ async def entrypoint( if level_id is not None: level = await repositories.level.from_id(base_ctx, level_id) if level is None: - logger.error( + logging.error( "Failed to resolve the command level!", extra={"level_id": level_id}, ) @@ -400,7 +400,7 @@ async def entrypoint( if target_user_id is not None: target_user = await repositories.user.from_id(base_ctx, target_user_id) if target_user is None: - logger.error( + logging.error( "Failed to resolve the command target user!", extra={"target_user_id": target_user_id}, ) @@ -511,7 +511,7 @@ def decorator(func: CommandConditional) -> CommandConditional: # Command specific event handlers async def _event_on_exception(ctx: CommandContext, exception: Exception) -> str: - logger.exception( + logging.exception( "An exception has occurred while executing command!", extra={ "command_name": ctx.layer.name, @@ -599,7 +599,7 @@ async def execute(self, ctx: CommandContext) -> str: try: result = await self.handle(ctx, *params) - logger.info( + logging.info( "Successfully executed command!", extra={ "command_name": self.name, @@ -610,7 +610,7 @@ async def execute(self, ctx: CommandContext) -> str: except CommandException as e: return await self._event_interruption(ctx, e) except Exception as e: - logger.exception( + logging.exception( "Failed to run command handler!", extra={ "command_name": self.name, diff --git a/rgdps/api/dependencies.py b/rgdps/api/dependencies.py index 17632de..aa94e92 100644 --- a/rgdps/api/dependencies.py +++ b/rgdps/api/dependencies.py @@ -1,5 +1,7 @@ from __future__ import annotations +import logging + from collections.abc import Awaitable from collections.abc import Callable @@ -7,7 +9,6 @@ from fastapi import Form from fastapi.exceptions import HTTPException -from rgdps import logger from rgdps import usecases from rgdps.api.context import HTTPContext from rgdps.constants.errors import ServiceError @@ -35,7 +36,7 @@ async def wrapper( ) if isinstance(user, ServiceError): - logger.debug( + logging.debug( "Authentication failed for user.", extra={ "user_id": user_id, @@ -50,7 +51,7 @@ async def wrapper( if required_privileges is not None and not ( user.privileges & required_privileges == required_privileges ): - logger.debug( + logging.debug( "Authentication failed for user due to insufficient privileges.", extra={ "user_id": user_id, @@ -85,7 +86,7 @@ async def wrapper( ) if isinstance(user, ServiceError): - logger.debug( + logging.debug( "Authentication failed for user.", extra={ "username": username, @@ -100,7 +101,7 @@ async def wrapper( if required_privileges is not None and not ( user.privileges & required_privileges == required_privileges ): - logger.debug( + logging.debug( "Authentication failed for user due to insufficient privileges.", extra={ "username": username, diff --git a/rgdps/api/gd/leaderboards.py b/rgdps/api/gd/leaderboards.py index 9bc4751..1c67dd2 100644 --- a/rgdps/api/gd/leaderboards.py +++ b/rgdps/api/gd/leaderboards.py @@ -3,7 +3,7 @@ from fastapi import Depends from fastapi import Form -from rgdps import logger +import logging from rgdps.api import responses from rgdps.api.context import HTTPContext from rgdps.common import gd_obj @@ -20,7 +20,7 @@ async def leaderboard_get( leaderboard = await leaderboards.get(ctx, leaderboard_type) if isinstance(leaderboard, ServiceError): - logger.info( + logging.info( "Failed to load the leaderboard.", extra={ "leaderboard_type": leaderboard_type.value, @@ -29,7 +29,7 @@ async def leaderboard_get( ) return responses.fail() - logger.info( + logging.info( "Successfully fetched the leaderboard.", extra={ "leaderboard_type": leaderboard_type.value, diff --git a/rgdps/api/gd/level_comments.py b/rgdps/api/gd/level_comments.py index 449bf70..ff9f529 100644 --- a/rgdps/api/gd/level_comments.py +++ b/rgdps/api/gd/level_comments.py @@ -3,7 +3,7 @@ from fastapi import Depends from fastapi import Form -from rgdps import logger +import logging from rgdps.api import commands from rgdps.api import responses from rgdps.api.context import HTTPContext @@ -52,7 +52,7 @@ async def create_comment_post( ) if isinstance(comment, ServiceError): - logger.info( + logging.info( "Failed to post level comment.", extra={ "user_id": user.id, @@ -64,7 +64,7 @@ async def create_comment_post( ) return responses.fail() - logger.info( + logging.info( "Successfully posted level comment.", extra={ "comment_id": comment.id, @@ -89,7 +89,7 @@ async def level_comments_get( ) if isinstance(result, ServiceError): - logger.info( + logging.info( "Failed to load level comments.", extra={ "level_id": level_id, @@ -111,7 +111,7 @@ async def level_comments_get( ) response += "#" + gd_obj.create_pagination_info(result.total, page, page_size) - logger.info( + logging.info( "Successfully loaded level comments.", extra={ "level_id": level_id, @@ -137,7 +137,7 @@ async def comment_history_get( ) if isinstance(result, ServiceError): - logger.info( + logging.info( "Failed to load level comment history.", extra={ "user_id": user_id, @@ -163,7 +163,7 @@ async def comment_history_get( ) response += "#" + gd_obj.create_pagination_info(result.total, page, page_size) - logger.info( + logging.info( "Successfully loaded level comment history.", extra={ "user_id": user_id, @@ -189,7 +189,7 @@ async def level_comment_delete( ) if isinstance(result, ServiceError): - logger.info( + logging.info( "Failed to delete level comment.", extra={ "user_id": user.id, @@ -199,7 +199,7 @@ async def level_comment_delete( ) return responses.fail() - logger.info( + logging.info( "Successfully deleted level comment.", extra={ "user_id": user.id, diff --git a/rgdps/api/gd/levels.py b/rgdps/api/gd/levels.py index 1c2504f..7ee523d 100644 --- a/rgdps/api/gd/levels.py +++ b/rgdps/api/gd/levels.py @@ -5,7 +5,7 @@ from fastapi import Depends from fastapi import Form -from rgdps import logger +import logging from rgdps.api import responses from rgdps.api.context import HTTPContext from rgdps.api.dependencies import authenticate_dependency @@ -34,7 +34,7 @@ async def song_info_get( ): song = await songs.get(ctx, song_id) if isinstance(song, ServiceError): - logger.info( + logging.info( "Failed to fetch song.", extra={ "song_id": song_id, @@ -43,7 +43,7 @@ async def song_info_get( ) return responses.fail() - logger.info( + logging.info( "Successfully fetched song.", extra={ "song_id": song_id, @@ -108,7 +108,7 @@ async def level_post( ) if isinstance(level, ServiceError): - logger.info( + logging.info( "Failed to upload level.", extra={ "user_id": user.id, @@ -118,7 +118,7 @@ async def level_post( ) return responses.fail() - logger.info( + logging.info( "Successfully uploaded/updated level.", extra={ "user_id": user.id, @@ -180,7 +180,7 @@ async def levels_get( ) if isinstance(level_res, ServiceError): - logger.info( + logging.info( "Failed to search levels.", extra={ "query": query, @@ -200,7 +200,7 @@ async def levels_get( ) return responses.fail() - logger.info( + logging.info( "Successfully searched levels.", extra={ "query": query, @@ -248,7 +248,7 @@ async def level_get( ) if isinstance(level_res, ServiceError): - logger.info( + logging.info( "Failed to fetch level.", extra={ "level_id": level_id, @@ -257,7 +257,7 @@ async def level_get( ) return responses.fail() - logger.info( + logging.info( "Successfully fetched level.", extra={ "level_id": level_res.level.id, @@ -303,7 +303,7 @@ async def suggest_level_stars( ) if isinstance(result, ServiceError): - logger.info( + logging.info( "Failed to suggest stars.", extra={ "user_id": user.id, @@ -315,7 +315,7 @@ async def suggest_level_stars( ) return responses.fail() - logger.info( + logging.info( "Successfully suggested stars.", extra={ "user_id": user.id, @@ -344,7 +344,7 @@ async def level_desc_post( ) if isinstance(result, ServiceError): - logger.info( + logging.info( "Failed to update description.", extra={ "user_id": user.id, @@ -355,7 +355,7 @@ async def level_desc_post( ) return responses.fail() - logger.info( + logging.info( "Successfully updated description.", extra={ "user_id": user.id, @@ -381,7 +381,7 @@ async def level_delete_post( ) if isinstance(result, ServiceError): - logger.info( + logging.info( "Failed to delete level.", extra={ "user_id": user.id, @@ -391,7 +391,7 @@ async def level_delete_post( ) return responses.fail() - logger.info( + logging.info( "Successfully deleted level.", extra={ "user_id": user.id, @@ -415,7 +415,7 @@ async def daily_level_info_get( ) if isinstance(result, ServiceError): - logger.info( + logging.info( "Failed to fetch current level.", extra={ "query_type": query_type.value, @@ -424,7 +424,7 @@ async def daily_level_info_get( ) return responses.fail() - logger.info( + logging.info( "Successfully fetched current level.", extra={ "query_type": query_type.value, @@ -451,7 +451,7 @@ async def demon_difficulty_post( ) if isinstance(result, ServiceError): - logger.info( + logging.info( "Failed to set demon difficulty.", extra={ "user_id": user.id, @@ -462,7 +462,7 @@ async def demon_difficulty_post( ) return responses.fail() - logger.info( + logging.info( "Successfully set demon difficulty.", extra={ "user_id": user.id, @@ -480,7 +480,7 @@ async def custom_content_cdn_get( result = await songs.get_custom_content_url(ctx) if isinstance(result, ServiceError): - logger.info( + logging.info( "Failed to serve custom content CDN url.", extra={ "error": result.value, @@ -488,6 +488,6 @@ async def custom_content_cdn_get( ) return responses.fail() - logger.info("Successfully served custom content CDN url.") + logging.info("Successfully served custom content CDN url.") return result diff --git a/rgdps/api/gd/messages.py b/rgdps/api/gd/messages.py index 1c07880..d75cf3c 100644 --- a/rgdps/api/gd/messages.py +++ b/rgdps/api/gd/messages.py @@ -1,7 +1,7 @@ from fastapi import Depends from fastapi import Form -from rgdps import logger +import logging from rgdps.api import commands from rgdps.api import responses from rgdps.api.context import HTTPContext @@ -48,7 +48,7 @@ async def message_post( ) if isinstance(message, ServiceError): - logger.info( + logging.info( "Failed to send message.", extra={ "sender_user_id": user.id, @@ -58,7 +58,7 @@ async def message_post( ) return responses.fail() - logger.info( + logging.info( "Successfully sent a message.", extra={ "message_id": message.id, @@ -94,7 +94,7 @@ async def messages_get( ) if isinstance(result, ServiceError): - logger.info( + logging.info( "Failed to view message list.", extra={ "user_id": user.id, @@ -123,7 +123,7 @@ async def messages_get( if not is_sender_user_id and message.message.seen_ts is None: await messages.mark_message_as_seen(ctx, user.id, message.message.id) - logger.info( + logging.info( "Successfully viewed the messages list.", extra={ "user_id": user.id, @@ -144,7 +144,7 @@ async def message_get( result = await messages.get(ctx, user.id, message_id=message_id) if isinstance(result, ServiceError): - logger.info( + logging.info( "Failed to view message.", extra={ "user_id": user.id, @@ -157,7 +157,7 @@ async def message_get( if result.message.seen_ts is None: await messages.mark_message_as_seen(ctx, user.id, result.message.id) - logger.info( + logging.info( "Successfully viewed message.", extra={ "user_id": user.id, @@ -191,7 +191,7 @@ async def message_delete( for message in messages_list: await messages.delete_by_user(ctx, user.id, message_id=message) - logger.info( + logging.info( "Successfully deleted message(s).", extra={ "user_id": user.id, diff --git a/rgdps/api/gd/rewards.py b/rgdps/api/gd/rewards.py index 9ef7148..27ebcb8 100644 --- a/rgdps/api/gd/rewards.py +++ b/rgdps/api/gd/rewards.py @@ -3,7 +3,7 @@ from fastapi import Depends from fastapi import Form -from rgdps import logger +import logging from rgdps.api import responses from rgdps.api.context import HTTPContext from rgdps.api.dependencies import authenticate_dependency @@ -30,7 +30,7 @@ async def daily_chest_get( ) if isinstance(result, ServiceError): - logger.info( + logging.info( "Failed to fetch daily chest.", extra={ "user_id": user.id, @@ -54,7 +54,7 @@ async def daily_chest_get( encrypted_result = gd_obj.encrypt_chest_response(result) security_hash = gd_obj.create_chest_security_str(encrypted_result.response) - logger.info( + logging.info( "Successfully fetched daily chest.", extra={ "user_id": user.id, diff --git a/rgdps/api/gd/save_data.py b/rgdps/api/gd/save_data.py index cf64e02..adea0ef 100644 --- a/rgdps/api/gd/save_data.py +++ b/rgdps/api/gd/save_data.py @@ -4,7 +4,7 @@ from fastapi import Form from fastapi import Request -from rgdps import logger +import logging from rgdps import settings from rgdps.api import responses from rgdps.api.context import HTTPContext @@ -23,7 +23,7 @@ async def save_data_get( data = await save_data.get(ctx, user.id) if isinstance(data, ServiceError): - logger.info( + logging.info( "Failed to fetch save data.", extra={ "user_id": user.id, @@ -32,7 +32,7 @@ async def save_data_get( ) return responses.fail() - logger.info( + logging.info( "Successfully fetched save data.", extra={ "user_id": user.id, @@ -57,7 +57,7 @@ async def save_data_post( ) if isinstance(res, ServiceError): - logger.info( + logging.info( "Failed to write save data.", extra={ "user_id": user.id, @@ -66,7 +66,7 @@ async def save_data_post( ) return responses.fail() - logger.info( + logging.info( "Successfully wrote save data.", extra={ "user_id": user.id, diff --git a/rgdps/api/gd/user_comments.py b/rgdps/api/gd/user_comments.py index c30abd2..f5b6892 100644 --- a/rgdps/api/gd/user_comments.py +++ b/rgdps/api/gd/user_comments.py @@ -3,7 +3,7 @@ from fastapi import Depends from fastapi import Form -from rgdps import logger +import logging from rgdps.api import commands from rgdps.api import responses from rgdps.api.context import HTTPContext @@ -33,7 +33,7 @@ async def user_comments_get( ) if isinstance(result, ServiceError): - logger.info( + logging.info( "Failed to view user comments.", extra={ "error": result.value, @@ -49,7 +49,7 @@ async def user_comments_get( ) response += "#" + gd_obj.create_pagination_info(result.total, page, PAGE_SIZE) - logger.info( + logging.info( "Successfully viewed user comments.", extra={ "target_id": target_id, @@ -87,7 +87,7 @@ async def user_comments_post( result = await user_comments.create(ctx, user.id, content) if isinstance(result, ServiceError): - logger.info( + logging.info( "Failed to post user comment.", extra={ "error": result.value, @@ -96,7 +96,7 @@ async def user_comments_post( ) return responses.fail() - logger.info( + logging.info( "Successfully posted a user comment.", extra={ "user_id": user.id, @@ -133,7 +133,7 @@ async def like_target_post( raise NotImplementedError if isinstance(result, ServiceError): - logger.info( + logging.info( f"Failed to like/dislike target.", extra={ "user_id": user.id, @@ -145,7 +145,7 @@ async def like_target_post( ) return responses.fail() - logger.info( + logging.info( "Successfully liked/disliked target.", extra={ "like_id": result.id, @@ -162,7 +162,7 @@ async def user_comment_delete( result = await user_comments.delete(ctx, user.id, comment_id) if isinstance(result, ServiceError): - logger.info( + logging.info( "Failed to delete user comment.", extra={ "user_id": user.id, @@ -172,7 +172,7 @@ async def user_comment_delete( ) return responses.fail() - logger.info( + logging.info( "Successfully deleted comment.", extra={ "user_id": user.id, diff --git a/rgdps/api/gd/user_relationships.py b/rgdps/api/gd/user_relationships.py index 320311b..d988e3d 100644 --- a/rgdps/api/gd/user_relationships.py +++ b/rgdps/api/gd/user_relationships.py @@ -3,7 +3,7 @@ from fastapi import Depends from fastapi import Form -from rgdps import logger +import logging from rgdps.api import responses from rgdps.api.context import HTTPContext from rgdps.api.dependencies import authenticate_dependency @@ -33,7 +33,7 @@ async def friend_requests_get( ) if isinstance(result, ServiceError): - logger.info( + logging.info( "Failed to view friend request list.", extra={ "user_id": user.id, @@ -57,7 +57,7 @@ async def friend_requests_get( ) response += "#" + gd_obj.create_pagination_info(result.total, page, PAGE_SIZE) - logger.info( + logging.info( "Successfully viewed friend requests list.", extra={ "user_id": user.id, @@ -81,7 +81,7 @@ async def friend_request_post( ) if isinstance(result, ServiceError): - logger.info( + logging.info( "Failed to send a friend request.", extra={ "user_id": user.id, @@ -91,7 +91,7 @@ async def friend_request_post( ) return responses.fail() - logger.info( + logging.info( "Successfully sent a friend request.", extra={ "user_id": user.id, @@ -114,7 +114,7 @@ async def friend_request_read( ) if isinstance(result, ServiceError): - logger.info( + logging.info( "Failed to mark friend request as seen.", extra={ "user_id": user.id, @@ -124,7 +124,7 @@ async def friend_request_read( ) return responses.fail() - logger.info( + logging.info( "Successfully marked friend request as seen.", extra={ "user_id": user.id, @@ -154,7 +154,7 @@ async def friend_requests_delete( ) if isinstance(result, ServiceError): - logger.info( + logging.info( "Failed to delete friend requests.", extra={ "user_id": user.id, @@ -166,7 +166,7 @@ async def friend_requests_delete( ) return responses.fail() - logger.info( + logging.info( "Successfully deleted friend requests.", extra={ "user_id": user.id, @@ -192,7 +192,7 @@ async def friend_request_accept( ) if isinstance(result, ServiceError): - logger.info( + logging.info( "Failed to accept friend request.", extra={ "user_id": user.id, @@ -203,7 +203,7 @@ async def friend_request_accept( ) return responses.fail() - logger.info( + logging.info( "Successfully accepted friend request.", extra={ "user_id": user.id, @@ -226,7 +226,7 @@ async def user_relationships_get( ) if isinstance(result, ServiceError): - logger.info( + logging.info( "Failed to view relationships list.", extra={ "user_id": user.id, @@ -251,7 +251,7 @@ async def user_relationships_get( # Mark them as seen. await user_relationships.mark_all_as_seen(ctx, user.id, relationship_type) - logger.info( + logging.info( "Successfully viewed relationship list.", extra={ "user_id": user.id, @@ -275,7 +275,7 @@ async def friend_remove_post( ) if isinstance(result, ServiceError): - logger.info( + logging.info( "Failed to remove friend.", extra={ "user_id": user.id, @@ -285,7 +285,7 @@ async def friend_remove_post( ) return responses.fail() - logger.info( + logging.info( "Successfully removed friend.", extra={ "user_id": user.id, @@ -309,7 +309,7 @@ async def block_user_post( ) # TODO: Temp. if isinstance(result, ServiceError): - logger.info( + logging.info( f"Failed remove friend.", ) return responses.fail() @@ -322,7 +322,7 @@ async def block_user_post( ) if isinstance(result, ServiceError): - logger.info( + logging.info( "Failed to block user.", extra={ "user_id": user.id, @@ -332,7 +332,7 @@ async def block_user_post( ) return responses.fail() - logger.info( + logging.info( "Successfully blocked user.", extra={ "user_id": user.id, @@ -356,7 +356,7 @@ async def unblock_user_post( ) if isinstance(result, ServiceError): - logger.info( + logging.info( "Failed to unblock user.", extra={ "user_id": user.id, @@ -366,7 +366,7 @@ async def unblock_user_post( ) return responses.fail() - logger.info( + logging.info( "Successfully unblocked user.", extra={ "user_id": user.id, diff --git a/rgdps/api/gd/users.py b/rgdps/api/gd/users.py index af3710c..4f220ef 100644 --- a/rgdps/api/gd/users.py +++ b/rgdps/api/gd/users.py @@ -4,7 +4,7 @@ from fastapi import Form from pydantic import EmailStr -from rgdps import logger +import logging from rgdps.api import responses from rgdps.api.context import HTTPContext from rgdps.api.dependencies import authenticate_dependency @@ -38,7 +38,7 @@ async def register_post( ) if isinstance(result, ServiceError): - logger.info( + logging.info( "User registration failed.", extra={ "username": username, @@ -54,7 +54,7 @@ async def register_post( case _: return responses.fail() - logger.info( + logging.info( "User registration success.", extra={ "user_id": result.id, @@ -74,7 +74,7 @@ async def login_post( ): result = await user_credentials.authenticate_from_gjp2_name(ctx, username, gjp2) if isinstance(result, ServiceError): - logger.info( + logging.info( "User login failed", extra={ "username": username, @@ -94,7 +94,7 @@ async def login_post( case _: return responses.fail() - logger.info( + logging.info( "User login successful!", extra={ "user_id": result.id, @@ -113,7 +113,7 @@ async def user_info_get( target = await users.get(ctx, user.id, target_id, is_own) if isinstance(target, ServiceError): - logger.info( + logging.info( "Failed to view a profile.", extra={ "error": target.value, @@ -129,7 +129,7 @@ async def user_info_get( and (not target.user.privileges & UserPrivileges.USER_PROFILE_PUBLIC) and (not user.privileges & UserPrivileges.USER_VIEW_PRIVATE_PROFILE) ): - logger.info( + logging.info( "Tried to view a profile with insufficient privileges.", extra={ "user_id": user.id, @@ -138,7 +138,7 @@ async def user_info_get( ) return responses.fail() - logger.info( + logging.info( "Successfully viewed a profile.", extra={ "user_id": user.id, @@ -218,7 +218,7 @@ async def user_info_update( ) if isinstance(res, ServiceError): - logger.info( + logging.info( "Failed to update the profile.", # XXX: Maybe add the stats here. extra={ @@ -228,7 +228,7 @@ async def user_info_update( ) return responses.fail() - logger.info( + logging.info( "Successfully updated profile.", extra={ "user_id": user.id, @@ -266,7 +266,7 @@ async def user_settings_update( ) if isinstance(result, ServiceError): - logger.info( + logging.info( "Failed to update user settings.", extra={ "error": result.value, @@ -281,7 +281,7 @@ async def user_settings_update( ) return responses.fail() - logger.info( + logging.info( "Successfully updated user settings.", extra={ "user_id": user.id, @@ -303,7 +303,7 @@ async def request_status_get( result = await users.request_status(ctx, user.id) if isinstance(result, ServiceError): - logger.info( + logging.info( "Failed to get user request status.", extra={ "error": result.value, @@ -312,7 +312,7 @@ async def request_status_get( ) return responses.fail() - logger.info( + logging.info( "Successfully got user request status.", extra={ "user_id": user.id, @@ -334,7 +334,7 @@ async def users_get( result = await users.search(ctx, page, PAGE_SIZE, query) if isinstance(result, ServiceError): - logger.info( + logging.info( "Failed to search users.", extra={ "query": query, @@ -343,7 +343,7 @@ async def users_get( ) return responses.fail() - logger.info( + logging.info( "Successfully searched users.", extra={ "query": query, diff --git a/rgdps/api/pubsub.py b/rgdps/api/pubsub.py index b2a588c..48bfc52 100644 --- a/rgdps/api/pubsub.py +++ b/rgdps/api/pubsub.py @@ -1,6 +1,6 @@ from __future__ import annotations -from rgdps import logger +import logging from rgdps.common.context import Context from rgdps.services.pubsub import RedisPubsubRouter from rgdps.usecases import leaderboards @@ -15,7 +15,7 @@ @router.register("rgdps:ping") async def ping_handler(ctx: Context, data: bytes) -> None: - logger.debug( + logging.debug( "Redis received a ping.", extra={ "data": data, @@ -25,23 +25,23 @@ async def ping_handler(ctx: Context, data: bytes) -> None: @router.register("rgdps:levels:sync_meili") async def level_sync_meili_handler(ctx: Context, _) -> None: - logger.debug("Redis received a level sync request.") + logging.debug("Redis received a level sync request.") await levels.synchronise_search(ctx) @router.register("rgdps:users:sync_meili") async def user_sync_meili_handler(ctx: Context, _) -> None: - logger.debug("Redis received a user sync request.") + logging.debug("Redis received a user sync request.") await users.synchronise_search(ctx) @router.register("rgdps:leaderboards:sync_stars") async def leaderboard_sync_stars_handler(ctx: Context, _) -> None: - logger.debug("Redis received a leaderboard sync request.") + logging.debug("Redis received a leaderboard sync request.") await leaderboards.synchronise_top_stars(ctx) @router.register("rgdps:leaderboards:sync_creators") async def leaderboard_sync_creators_handler(ctx: Context, _) -> None: - logger.debug("Redis received a leaderboard sync request.") + logging.debug("Redis received a leaderboard sync request.") await leaderboards.synchronise_top_creators(ctx) diff --git a/rgdps/logger.py b/rgdps/logger.py index ec143d0..c35562b 100644 --- a/rgdps/logger.py +++ b/rgdps/logger.py @@ -3,83 +3,29 @@ import logging.config import sys import threading +import yaml from collections.abc import Callable from types import TracebackType from typing import Any from typing import Optional -# TODO: Look into more customisability. -_LOGGING_CONFIG = { - "version": 1, - "disable_existing_loggers": False, - "formatters": { - "logzioFormat": { - "format": '{"additional_field": "value"}', - "validate": False, - }, - }, - "handlers": { - "logzio": { - "class": "logzio.handler.LogzioHandler", - "level": "DEBUG", - "formatter": "logzioFormat", - "token": "", - "logzio_type": "python", - "logs_drain_timeout": 5, - "url": "", - }, - }, - "loggers": { - "rgdps": { - "level": "DEBUG", - "handlers": ["logzio"], - "propagate": True, - }, - }, -} - - -LOGGER = logging.getLogger("rgdps") +def configure_logging(log_level: str | int) -> None: + with open("logging.yaml") as f: + config = yaml.safe_load(f.read()) + # dynamically map levels for each handler + for handler in config["handlers"].values(): + handler["level"] = log_level -def init_basic_logging(log_level: str | int) -> None: - logging.basicConfig(level=log_level) - hook_exception_handlers() + config["root"]["level"] = log_level + logging.config.dictConfig(config) -def init_logzio_logging(logzio_token: str, log_level: str, logzio_url: str) -> None: - _LOGGING_CONFIG["handlers"]["logzio"]["token"] = logzio_token - _LOGGING_CONFIG["loggers"]["rgdps"]["level"] = log_level - _LOGGING_CONFIG["handlers"]["logzio"]["url"] = logzio_url - - logging.config.dictConfig(_LOGGING_CONFIG) +def init_basic_logging(log_level: str | int) -> None: + configure_logging(log_level) hook_exception_handlers() -def debug(*args, **kwargs) -> None: - return LOGGER.debug(*args, **kwargs) - - -def info(*args, **kwargs) -> None: - return LOGGER.info(*args, **kwargs) - - -def warning(*args, **kwargs) -> None: - return LOGGER.warning(*args, **kwargs) - - -def error(*args, **kwargs) -> None: - return LOGGER.error(*args, **kwargs) - - -def critical(*args, **kwargs) -> None: - return LOGGER.critical(*args, **kwargs) - - -def exception(*args, **kwargs) -> None: - return LOGGER.exception(*args, **kwargs) - - # Hooking the exception handler to log uncaught exceptions. # https://gist.github.com/cmyui/201f3d687d289f24a3357c9ff3302206 # NOTE: Decouple if needed. @@ -100,7 +46,7 @@ def internal_exception_handler( exc_value: BaseException, exc_traceback: TracebackType | None, ) -> Any: - LOGGER.exception( + logging.exception( "An unhandled exception occurred!", exc_info=(exc_type, exc_value, exc_traceback), ) @@ -110,13 +56,13 @@ def internal_thread_exception_handler( args: threading.ExceptHookArgs, ) -> Any: if args.exc_value is not None: - LOGGER.exception( + logging.exception( "An unhandled exception occurred!", exc_info=(args.exc_type, args.exc_value, args.exc_traceback), extra={"thread_vars": vars(args.thread)}, ) else: - LOGGER.warning( + logging.warning( "A thread exception hook was called without an exception value!", extra={ "exc_type": args.exc_type, diff --git a/rgdps/main.py b/rgdps/main.py index 9492644..f27f840 100644 --- a/rgdps/main.py +++ b/rgdps/main.py @@ -1,6 +1,20 @@ #!/usr/bin/env python3.12 from __future__ import annotations +from rgdps import settings +import ddtrace + +if settings.DD_ENABLED: + ddtrace.tracer.configure( + https=False, + hostname=settings.DD_HOST, + port=settings.DD_PORT, + dogstatsd_url=f"udp://{settings.DD_STATS_HOST}:{settings.DD_STATS_PORT}", + ) + + # TODO: bump ddtrace when fastapi and starlette patches work again + ddtrace.patch_all(fastapi=False, starlette=False) + import sys from rgdps.api import init_api @@ -14,5 +28,4 @@ winloop.install() - asgi_app = init_api() diff --git a/rgdps/repositories/song.py b/rgdps/repositories/song.py index 2fbc558..ef12023 100644 --- a/rgdps/repositories/song.py +++ b/rgdps/repositories/song.py @@ -3,7 +3,7 @@ import urllib.parse from datetime import timedelta -from rgdps import logger +import logging from rgdps.common import modelling from rgdps.common.context import Context from rgdps.constants.songs import SongSource @@ -178,7 +178,7 @@ async def get_cdn_url(ctx: Context) -> str | None: if cached is not None: return cached.decode() - logger.debug("CDN URL cache miss. Querying the servers.") + logging.debug("CDN URL cache miss. Querying the servers.") queried_url = await ctx.gd.get_cdn_url() diff --git a/rgdps/services/boomlings.py b/rgdps/services/boomlings.py index f44d84c..719eee2 100644 --- a/rgdps/services/boomlings.py +++ b/rgdps/services/boomlings.py @@ -5,7 +5,7 @@ import httpx -from rgdps import logger +import logging from rgdps.common import gd_obj @@ -120,7 +120,7 @@ async def __make_post_request( ) -> GDStatus[str]: request_url = self.server_url + endpoint - logger.debug( + logging.debug( "Making a POST request to the Geometry Dash servers.", extra={ "endpoint": endpoint, @@ -134,7 +134,7 @@ async def __make_post_request( content = response.content.decode().strip() - logger.debug( + logging.debug( "POST request to Geometry Dash server succeeded.", extra={ "endpoint": endpoint, @@ -152,7 +152,7 @@ async def __make_post_request( async def __make_get_request(self, endpoint: str) -> GDStatus[str]: request_url = self.server_url + endpoint - logger.debug( + logging.debug( "Making a GET request to the Geometry Dash servers.", extra={ "endpoint": endpoint, @@ -165,7 +165,7 @@ async def __make_get_request(self, endpoint: str) -> GDStatus[str]: content = response.content.decode().strip() - logger.debug( + logging.debug( "GET request to Geometry Dash server succeeded.", extra={ "endpoint": endpoint, @@ -194,7 +194,7 @@ async def get_song(self, song_id: int) -> GDStatus[IntKeyResponse]: if isinstance(song_info, GDRequestStatus): if song_info.is_severe_error: - logger.warning( + logging.warning( "Fetching song from the official servers failed with error.", extra={ "song_id": song_id, @@ -223,7 +223,7 @@ async def get_cdn_url(self) -> GDStatus[str]: if isinstance(song_info, GDRequestStatus): if song_info.is_severe_error: - logger.warning( + logging.warning( "Fetching the CDN from the official servers failed with error.", extra={ "error": song_info.value, diff --git a/rgdps/services/pubsub.py b/rgdps/services/pubsub.py index ae5627f..04099e7 100644 --- a/rgdps/services/pubsub.py +++ b/rgdps/services/pubsub.py @@ -7,7 +7,7 @@ from redis.asyncio import Redis -from rgdps import logger +import logging from rgdps.common.context import Context RedisHandler = Callable[[Context, bytes], Awaitable[None]] @@ -22,7 +22,7 @@ async def _listen_router( async with redis.pubsub() as pubsub: for channel in redis_handlers: await pubsub.subscribe(channel) - logger.debug( + logging.debug( "Subscribed to Redis a channel.", extra={ "channel": channel.decode(), @@ -41,7 +41,7 @@ async def _listen_router( handler = redis_handlers[message["channel"]] await handler(ctx, message["data"]) except Exception: - logger.exception( + logging.exception( "Error while handling Redis message.", extra={ "channel": message["channel"].decode(), @@ -94,7 +94,7 @@ def decorator(handler: RedisHandler) -> RedisHandler: def merge(self, other: RedisPubsubRouter) -> None: for channel, handler in other.route_map().items(): if channel in self._routes: - logger.warning( + logging.warning( "Overwritten route when merging Redis routers!", extra={ "channel": channel.decode(), diff --git a/rgdps/services/storage.py b/rgdps/services/storage.py index 8b1c48d..42b4435 100644 --- a/rgdps/services/storage.py +++ b/rgdps/services/storage.py @@ -8,7 +8,7 @@ from aiobotocore.config import AioConfig from aiobotocore.session import get_session -from rgdps import logger +import logging class AbstractStorage(ABC): @@ -100,7 +100,7 @@ async def __save(self, key: str, data: bytes) -> None: return except Exception as e: sleep_time = i * 2 - logger.warning( + logging.warning( "Failed to save to S3. Retrying...", extra={ "key": key, @@ -110,7 +110,7 @@ async def __save(self, key: str, data: bytes) -> None: ) await asyncio.sleep(sleep_time) - logger.error( + logging.error( "Failed to save to S3 after retries.", extra={ "key": key, diff --git a/rgdps/settings.py b/rgdps/settings.py index 5d2b384..8df73a7 100644 --- a/rgdps/settings.py +++ b/rgdps/settings.py @@ -50,6 +50,10 @@ def read_boolean(value: str) -> bool: LOG_LEVEL = os.environ["LOG_LEVEL"] -LOGZIO_ENABLED = read_boolean(os.environ["LOGZIO_ENABLED"]) -LOGZIO_TOKEN = os.environ["LOGZIO_TOKEN"] -LOGZIO_URL = os.environ["LOGZIO_URL"] +DD_ENABLED = read_boolean(os.environ["DD_ENABLED"]) +DD_HOST = os.environ["DD_HOST"] +DD_PORT = int(os.environ["DD_PORT"]) +DD_STATS_HOST = os.environ["DD_STATS_HOST"] +DD_STATS_PORT = int(os.environ["DD_STATS_PORT"]) +DD_API_KEY = os.environ["DD_API_KEY"] +DD_SITE = os.environ["DD_SITE"] diff --git a/rgdps/usecases/user_credentials.py b/rgdps/usecases/user_credentials.py index 08afb77..b81eb3f 100644 --- a/rgdps/usecases/user_credentials.py +++ b/rgdps/usecases/user_credentials.py @@ -1,6 +1,6 @@ from __future__ import annotations -from rgdps import logger +import logging from rgdps import repositories from rgdps.common import hashes from rgdps.common.context import Context @@ -26,7 +26,7 @@ async def authenticate_plain( creds = await repositories.user_credential.from_user_id(ctx, user_id) if creds is None: - logger.warning( + logging.warning( "User has no credentials attached to them.", extra={ "user_id": user_id, @@ -56,7 +56,7 @@ async def authenticate_plain( hashed_pw, ) - logger.info( + logging.info( "Migrated user credentials to latest version.", extra={ "user_id": user_id, diff --git a/rgdps/utilities/gmdps_converter.py b/rgdps/utilities/gmdps_converter.py index 771354a..5b5be35 100755 --- a/rgdps/utilities/gmdps_converter.py +++ b/rgdps/utilities/gmdps_converter.py @@ -18,7 +18,7 @@ from meilisearch_python_sdk import AsyncClient as MeiliClient from redis.asyncio import Redis -from rgdps import logger +import logging from rgdps import repositories from rgdps import settings from rgdps.common import gd_obj @@ -190,7 +190,7 @@ async def convert_songs(ctx: ConverterContext) -> None: try: size = float(song["size"]) except ValueError: - logger.warning( + logging.warning( "Converted song has an invalid file size!", extra={ "song_id": song["ID"], @@ -209,7 +209,7 @@ async def convert_songs(ctx: ConverterContext) -> None: author = author[:32] if len(download_url) > 256: - logger.warning( + logging.warning( "Skipping song due to download URL being too long.", extra={ "song_id": song["ID"], @@ -239,7 +239,7 @@ async def convert_user_comments(ctx: ConverterContext) -> None: for comment in old_comments: account_id = ctx.user_id_map.get(comment["userID"]) if account_id is None: - logger.warning( + logging.warning( "Failed to find account ID for a userID when converting user comments.", extra={ "user_id": comment["userID"], @@ -255,7 +255,7 @@ async def convert_user_comments(ctx: ConverterContext) -> None: try: content = hashes.decode_base64(comment["comment"])[:256] except Exception: - logger.warning( + logging.warning( "User comment had invalid base64 content. Skipped.", extra={ "comment_id": comment["commentID"], @@ -281,7 +281,7 @@ async def convert_level_comments(ctx: ConverterContext) -> None: for comment in old_comments: account_id = ctx.user_id_map.get(comment["userID"]) if account_id is None: - logger.warning( + logging.warning( "Failed to find account ID for a userID when converting level comments.", extra={ "user_id": comment["userID"], @@ -298,7 +298,7 @@ async def convert_level_comments(ctx: ConverterContext) -> None: try: content = hashes.decode_base64(comment["comment"])[:256] except Exception: - logger.warning( + logging.warning( "User comment had invalid base64 content. Skipped.", extra={ "comment_id": comment["commentID"], @@ -396,7 +396,7 @@ async def convert_users(ctx: ConverterContext) -> None: value=user["password"], ) except Exception: - logger.exception( + logging.exception( "Failed to convert user!", extra={ "user_id": user_id, @@ -572,75 +572,75 @@ async def convert_messages(ctx: ConverterContext) -> None: async def main() -> int: - logger.info("Starting the GMDPS -> RealistikGDPS converter.") + logging.info("Starting the GMDPS -> RealistikGDPS converter.") ctx = await get_context() - logger.info("Successfully connected!") + logging.info("Successfully connected!") try: if not await repositories.song.get_count(ctx): - logger.info("Converting songs...") + logging.info("Converting songs...") await convert_songs(ctx) else: - logger.info("Skipping song conversion, songs already exist.") + logging.info("Skipping song conversion, songs already exist.") if not await repositories.user.get_count(ctx): - logger.info("Converting users...") + logging.info("Converting users...") await convert_users(ctx) else: - logger.info("Skipping user conversion, users already exist.") + logging.info("Skipping user conversion, users already exist.") if not await repositories.level.get_count(ctx): - logger.info("Converting levels...") + logging.info("Converting levels...") await convert_levels(ctx) else: - logger.info("Skipping level conversion, levels already exist.") + logging.info("Skipping level conversion, levels already exist.") if not await repositories.user_comment.get_count(ctx): - logger.info("Converting user comments...") + logging.info("Converting user comments...") await convert_user_comments(ctx) else: - logger.info( + logging.info( "Skipping user comment conversion, user comments already exist.", ) if not await repositories.level_comment.get_count(ctx): - logger.info("Converting level comments...") + logging.info("Converting level comments...") await convert_level_comments(ctx) else: - logger.info( + logging.info( "Skipping level comment conversion, level comments already exist.", ) if not await repositories.friend_requests.get_count(ctx): - logger.info("Converting friend requests...") + logging.info("Converting friend requests...") await convert_friend_requests(ctx) else: - logger.info( + logging.info( "Skipping friend requests conversion, friend requests already exist.", ) if not await repositories.user_relationship.get_count(ctx): - logger.info("Converting user relationships...") + logging.info("Converting user relationships...") await convert_user_relationships(ctx) else: - logger.info( + logging.info( "Skipping user relationships conversion, user relationships already exist.", ) if not await repositories.message.get_count(ctx): - logger.info("Converting messages...") + logging.info("Converting messages...") await convert_messages(ctx) else: - logger.info( + logging.info( "Skipping messages conversion, messages already exist.", ) except Exception: - logger.exception( + logging.exception( "Failed to convert data!", ) - logger.info("Migration complete!") + logging.info("Migration complete!") # TODO: Look into a better approach to stop docker # from restarting the container. while True: diff --git a/scripts/run_api.sh b/scripts/run_api.sh index a1d663b..e6454f9 100755 --- a/scripts/run_api.sh +++ b/scripts/run_api.sh @@ -2,7 +2,9 @@ set -euo pipefail echo "Starting server..." + exec uvicorn rgdps.main:asgi_app \ --host $APP_HOST \ --port $APP_PORT \ - --reload + --no-access-log \ + --reload \ No newline at end of file