Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Replace Logz.io with Datadog #74

Draft
wants to merge 5 commits into
base: stable
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 7 additions & 4 deletions .env.example
Original file line number Diff line number Diff line change
Expand Up @@ -43,10 +43,13 @@ SERVER_STATELESS=false

# Logging Configuration
LOG_LEVEL=INFO

LOGZIO_ENABLED=false
LOGZIO_TOKEN=
LOGZIO_URL=https://listener.logz.io:8071
DD_HOST=datadog
DD_PORT=8126
DD_STATS_HOST=datadog
DD_STATS_PORT=8125
DD_ENABLED=0
DD_API_KEY=
DD_SITE=

# Utility Configuration
PHPMYADMIN_PORT=8080
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -138,3 +138,6 @@ mysql_data/
meilisearch_data/
redis_data/
rgdps_data/

# Datadog
logs.log
5 changes: 5 additions & 0 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -7,13 +7,15 @@ run:
redis \
mysql \
meilisearch \
datadog \
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Do we need to run the agent if we have it disabled?

Copy link
Collaborator

@tsunyoku tsunyoku Apr 17, 2024

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

no, but its a PITA to conditionally run in this way. my honest suggestion would be that the DD agent should be up to the person running the server since the provided configuration is more than enough but i imagine you don't want this

realistikgdps

run-bg:
docker-compose up -d \
redis \
mysql \
meilisearch \
datadog \
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

As above.

realistikgdps

stop:
Expand All @@ -28,6 +30,9 @@ shell:
pma:
docker-compose up phpmyadmin

datadog:
docker-compose up datadog

converter:
APP_COMPONENT=converter docker-compose up \
redis \
Expand Down
1 change: 0 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@ It is written in asynchronous, modern Python and is meant as a replacement for o
- MeiliSearch, allowing for typo tolerance
- S3 support, allowing for flexible storage solutions
- Proper ratelimiting
- Logz.io logging support
- Flexible command framework

## How to set up?
Expand Down
4 changes: 4 additions & 0 deletions datadog/conf.d/realistikgdps.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
logs:
- type: file
path: /data/logs.log
service: "realistikgdps"
34 changes: 30 additions & 4 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ services:
- mysql
- redis
- meilisearch
- datadog
restart: always
environment:
- APP_PORT=${APP_PORT}
Expand Down Expand Up @@ -65,10 +66,16 @@ services:
- SERVER_GD_URL=${SERVER_GD_URL}
- SERVER_STATELESS=${SERVER_STATELESS}

- LOG_LEVEL=${LOG_LEVEL}
- LOGZIO_ENABLED=${LOGZIO_ENABLED}
- LOGZIO_TOKEN=${LOGZIO_TOKEN}
- LOGZIO_URL=${LOGZIO_URL}
- DD_ENABLED=${DD_ENABLED}
- DD_HOST=${DD_HOST}
- DD_PORT=${DD_PORT}
- DD_STATS_HOST=${DD_STATS_HOST}
- DD_STATS_PORT=${DD_STATS_PORT}
- DD_API_KEY=${DD_API_KEY}
- DD_SITE=${DD_SITE}
- DD_LOGS_INJECTION=true
- DD_SERVICE=realistikgdps
- DD_LOGS_ENABLED=true

# Internal docker specific variables
- INTERNAL_RGDPS_DIRECTORY=/data # NOTE: Ensure this matches the volume mount below.
Expand Down Expand Up @@ -98,3 +105,22 @@ services:
- UPLOAD_LIMIT=500M
depends_on:
- mysql

datadog:
image: gcr.io/datadoghq/agent:7
ports:
- "8125:8125/udp"
- "8126:8126"
environment:
- DD_API_KEY=${DD_API_KEY}
- DD_SITE=${DD_SITE}
- DD_APM_ENABLED=true
- DD_LOGS_ENABLED=true
- DD_TRACE_ENABLED=true
- DD_DOGSTATSD_NON_LOCAL_TRAFFIC=true
volumes:
- /var/run/docker.sock:/var/run/docker.sock:ro
- /proc/:/host/proc/:ro
- /sys/fs/cgroup/:/host/sys/fs/cgroup:ro
- .:/data
- ./datadog/conf.d:/etc/datadog-agent/conf.d
28 changes: 28 additions & 0 deletions logging.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
version: 1
disable_existing_loggers: true
loggers:
httpx:
level: WARNING
handlers: [console, file]
propagate: no
httpcore:
level: WARNING
handlers: [console, file]
propagate: no
handlers:
console:
class: logging.StreamHandler
formatter: plaintext
stream: ext://sys.stdout
file:
class: logging.FileHandler
formatter: json
filename: logs.log
formatters:
plaintext:
format: '%(asctime)s %(name)s %(levelname)s %(message)s'
json:
class: pythonjsonlogger.jsonlogger.JsonFormatter
format: '%(asctime)s %(name)s %(levelname)s %(message)s'
root:
handlers: [console, file]
5 changes: 4 additions & 1 deletion requirements/main.txt
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,12 @@ aiobotocore == 2.9.0
bcrypt == 4.1.2
cryptography
databases[asyncmy] == 0.8.0
datadog == 0.49.1
ddtrace == 2.7.6
email-validator == 2.0.0
fastapi == 0.108.0
fastapi-limiter == 0.1.5
httpx == 0.26.0
logzio-python-handler == 4.1.0
meilisearch-python-sdk == 2.0.1
orjson == 3.9.15
python-dotenv == 1.0.1
Expand All @@ -17,3 +18,5 @@ uvicorn == 0.19.0
uvloop == 0.19.0; sys_platform != "win32"
winloop == 0.1.0; sys_platform == "win32"
xor-cipher == 3.0.1
python-json-logger == 2.0.7
PyYAML == 6.0.1
35 changes: 15 additions & 20 deletions rgdps/api/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from __future__ import annotations

import urllib.parse
import logging
import uuid

from databases import DatabaseURL
Expand Down Expand Up @@ -32,14 +33,7 @@


def init_logging() -> None:
if settings.LOGZIO_ENABLED:
logger.init_logzio_logging(
settings.LOGZIO_TOKEN,
settings.LOG_LEVEL,
settings.LOGZIO_URL,
)
else:
logger.init_basic_logging(settings.LOG_LEVEL)
logger.init_basic_logging(settings.LOG_LEVEL)


def init_events(app: FastAPI) -> None:
Expand All @@ -48,7 +42,7 @@ async def on_validation_error(
request: Request,
e: RequestValidationError,
) -> Response:
logger.exception(
logging.exception(
f"A validation error has occured while parsing a request.",
extra={
"url": str(request.url),
Expand Down Expand Up @@ -84,7 +78,7 @@ def init_mysql(app: FastAPI) -> None:
@app.on_event("startup")
async def on_startup() -> None:
await app.state.mysql.connect()
logger.info(
logging.info(
"Connected to the MySQL database.",
extra={
"host": settings.SQL_HOST,
Expand Down Expand Up @@ -118,7 +112,7 @@ async def on_startup() -> None:
prefix="rgdps:ratelimit",
)

logger.info(
logging.info(
"Connected to the Redis database.",
extra={
"host": settings.REDIS_HOST,
Expand All @@ -141,7 +135,7 @@ def init_meili(app: FastAPI) -> None:
@app.on_event("startup")
async def startup() -> None:
await app.state.meili.health()
logger.info(
logging.info(
"Connected to the MeiliSearch database.",
extra={
"host": settings.MEILI_HOST,
Expand All @@ -163,7 +157,7 @@ def init_s3_storage(app: FastAPI) -> None:
@app.on_event("startup")
async def startup() -> None:
app.state.storage = await app.state.storage.connect()
logger.info(
logging.info(
"Connected to S3 storage.",
extra={
"bucket": settings.S3_BUCKET,
Expand All @@ -183,15 +177,15 @@ def init_local_storage(app: FastAPI) -> None:

@app.on_event("startup")
async def startup() -> None:
logger.info("Connected to the local storage.")
logging.info("Connected to the local storage.")


def init_gd(app: FastAPI) -> None:
app.state.gd = GeometryDashClient(
settings.SERVER_GD_URL,
)

logger.info(
logging.info(
"Initialised the main Geometry Dash client.",
extra={
"server_url": settings.SERVER_GD_URL,
Expand All @@ -203,7 +197,7 @@ def init_cache_stateful(app: FastAPI) -> None:
app.state.user_cache = SimpleAsyncMemoryCache()
app.state.password_cache = SimpleAsyncMemoryCache()

logger.info("Initialised stateful caching.")
logging.info("Initialised stateful caching.")


def init_cache_stateless(app: FastAPI) -> None:
Expand All @@ -218,7 +212,7 @@ def init_cache_stateless(app: FastAPI) -> None:
serialise=lambda x: x.encode(),
)

logger.info("Initialised stateless caching.")
logging.info("Initialised stateless caching.")


def init_routers(app: FastAPI) -> None:
Expand All @@ -230,7 +224,7 @@ def init_routers(app: FastAPI) -> None:
def init_middlewares(app: FastAPI) -> None:
@app.middleware("http")
async def mysql_transaction(request: Request, call_next):
logger.debug(
logging.debug(
"Opened a new MySQL transaction for request.",
extra={
"uuid": request.state.uuid,
Expand All @@ -250,7 +244,7 @@ async def enforce_user_agent(
# GD sends an empty User-Agent header.
user_agent = request.headers.get("User-Agent")
if user_agent != "":
logger.info(
logging.info(
"Client request stopped due to invalid User-Agent header.",
extra={
"url": str(request.url),
Expand All @@ -270,7 +264,7 @@ async def exception_logging(
try:
return await call_next(request)
except Exception as e:
logger.exception(
logging.exception(
f"An exception has occured while processing a request!",
extra={
"url": str(request.url),
Expand All @@ -288,6 +282,7 @@ async def assign_uuid(request: Request, call_next):

def init_api() -> FastAPI:
init_logging()

app = FastAPI(
title="RealistikGDPS",
openapi_url=None,
Expand Down
16 changes: 8 additions & 8 deletions rgdps/api/commands/framework.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
from typing import get_origin
from typing import get_type_hints

from rgdps import logger
import logging
from rgdps import repositories
from rgdps.common.context import Context
from rgdps.constants.errors import ServiceError
Expand Down Expand Up @@ -107,7 +107,7 @@ async def _resolve_from_type[T](ctx: CommandContext, value: str, cast: type[T])
elif issubclass(cast, Enum):
return cast(value)

logger.error(
logging.error(
"Command parser tried to parse an unsupported type!",
extra={
"value": value,
Expand Down Expand Up @@ -323,7 +323,7 @@ def merge(self, router: CommandRouter) -> None:

for key, value in router._routes.items():
if key in self._routes:
logger.warning(
logging.warning(
"Command router merge has overwritten an existing command!",
extra={
"command": key,
Expand Down Expand Up @@ -390,7 +390,7 @@ async def entrypoint(
if level_id is not None:
level = await repositories.level.from_id(base_ctx, level_id)
if level is None:
logger.error(
logging.error(
"Failed to resolve the command level!",
extra={"level_id": level_id},
)
Expand All @@ -400,7 +400,7 @@ async def entrypoint(
if target_user_id is not None:
target_user = await repositories.user.from_id(base_ctx, target_user_id)
if target_user is None:
logger.error(
logging.error(
"Failed to resolve the command target user!",
extra={"target_user_id": target_user_id},
)
Expand Down Expand Up @@ -511,7 +511,7 @@ def decorator(func: CommandConditional) -> CommandConditional:

# Command specific event handlers
async def _event_on_exception(ctx: CommandContext, exception: Exception) -> str:
logger.exception(
logging.exception(
"An exception has occurred while executing command!",
extra={
"command_name": ctx.layer.name,
Expand Down Expand Up @@ -599,7 +599,7 @@ async def execute(self, ctx: CommandContext) -> str:

try:
result = await self.handle(ctx, *params)
logger.info(
logging.info(
"Successfully executed command!",
extra={
"command_name": self.name,
Expand All @@ -610,7 +610,7 @@ async def execute(self, ctx: CommandContext) -> str:
except CommandException as e:
return await self._event_interruption(ctx, e)
except Exception as e:
logger.exception(
logging.exception(
"Failed to run command handler!",
extra={
"command_name": self.name,
Expand Down
Loading