diff --git a/.env.example b/.env.example index a755c7f..60eea78 100644 --- a/.env.example +++ b/.env.example @@ -38,8 +38,6 @@ RGDPS_DIRECTORY=./rgdps_data SERVER_NAME=RealistikGDPS SERVER_COMMAND_PREFIX=/ SERVER_GD_URL=https://www.boomlings.com/database -# XXX: If you don't know what is that, leave it as false -SERVER_STATELESS=false # Logging Configuration LOG_LEVEL=INFO diff --git a/README.md b/README.md index fa55235..558b5d5 100644 --- a/README.md +++ b/README.md @@ -1,11 +1,11 @@ -# RealistikGDPS -The Python-based backend for RealistikGDPS, made as an expandable solution for a GDPS of any size. +# Ognisko +The Python-based backend for handling communication with the Geometry Dash client. Part of the RealistikGDPS stack. For support and a public running instance, please visit [our Discord!](https://discord.gg/uNTPGPn3D5) ## What is this? This is a modern Python implementation of the Geometry Dash server protocol meant to power my Geometry Dash Private server. -It is written in asynchronous, modern Python and is meant as a replacement for our current [PHP based infrastructure](https://github.com/Cvolton/GMDprivateServer). +It is written in asynchronous, modern Python and is meant as a replacement for our current [PHP based backend](https://github.com/Cvolton/GMDprivateServer). ## Interesting Features - Fully Dockerised, allowing for easy setup diff --git a/docker-compose.yml b/docker-compose.yml index ce15596..3338a47 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -13,6 +13,11 @@ services: volumes: - ${MYSQL_DIRECTORY}:/var/lib/mysql restart: always + healthcheck: + test: "/usr/bin/mysql --user=$$MYSQL_USER --password=$$MYSQL_PASSWORD --execute \"SELECT 1;\"" + interval: 2s + timeout: 20s + retries: 10 redis: logging: @@ -24,15 +29,23 @@ services: - "${REDIS_PORT}:${REDIS_PORT}" volumes: - ${REDIS_DIRECTORY}:/data + healthcheck: + test: "redis-cli -p ${REDIS_PORT} ping" + interval: 2s + timeout: 20s + retries: 10 realistikgdps: image: realistikgdps:latest ports: - "${APP_PORT}:${APP_PORT}" depends_on: - - mysql - - redis - - meilisearch + mysql: + condition: service_healthy + redis: + condition: service_healthy + meilisearch: + condition: service_healthy restart: always environment: - APP_PORT=${APP_PORT} @@ -63,7 +76,6 @@ services: - SERVER_NAME=${SERVER_NAME} - SERVER_COMMAND_PREFIX=${SERVER_COMMAND_PREFIX} - SERVER_GD_URL=${SERVER_GD_URL} - - SERVER_STATELESS=${SERVER_STATELESS} - LOG_LEVEL=${LOG_LEVEL} - LOGZIO_ENABLED=${LOGZIO_ENABLED} @@ -77,6 +89,7 @@ services: - .:/app - ${RGDPS_DIRECTORY}:/data # <- INTERNAL_RGDPS_DIRECTORY + meilisearch: image: getmeili/meilisearch:v1.3.1 restart: always diff --git a/requirements/main.txt b/requirements/main.txt index bb66ae7..3c621af 100644 --- a/requirements/main.txt +++ b/requirements/main.txt @@ -1,19 +1,19 @@ aiobotocore == 2.9.0 -bcrypt == 4.1.2 +bcrypt == 4.2.0 cryptography -databases[asyncmy] == 0.8.0 +databases[asyncmy] == 0.9.0 email-validator == 2.0.0 -fastapi == 0.108.0 -fastapi-limiter == 0.1.5 -httpx == 0.26.0 +fastapi == 0.112.0 +fastapi-limiter == 0.1.6 +httpx == 0.27.0 logzio-python-handler == 4.1.0 -meilisearch-python-sdk == 2.0.1 -orjson == 3.9.15 +meilisearch-python-sdk == 3.1.0 +orjson == 3.10.7 python-dotenv == 1.0.1 python-multipart -redis == 4.6.0 +redis == 5.0.8 types-aiobotocore[s3] == 2.5.2 -uvicorn == 0.19.0 +uvicorn == 0.30.6 uvloop == 0.19.0; sys_platform != "win32" -winloop == 0.1.0; sys_platform == "win32" -xor-cipher == 3.0.1 +winloop == 0.1.6; sys_platform == "win32" +xor-cipher == 5.0.0 diff --git a/rgdps/__init__.py b/rgdps/__init__.py index f90905b..f45253f 100644 --- a/rgdps/__init__.py +++ b/rgdps/__init__.py @@ -1,5 +1,6 @@ from __future__ import annotations +from . import adapters from . import api from . import common from . import constants @@ -7,4 +8,3 @@ from . import repositories from . import services from . import settings -from . import usecases diff --git a/rgdps/adapters/__init__.py b/rgdps/adapters/__init__.py new file mode 100644 index 0000000..9ff02ce --- /dev/null +++ b/rgdps/adapters/__init__.py @@ -0,0 +1,11 @@ +from __future__ import annotations + +from .boomlings import GeometryDashClient +from .meilisearch import MeiliSearchClient +from .mysql import AbstractMySQLService +from .mysql import MySQLService +from .redis import RedisClient +from .redis import RedisPubsubRouter +from .storage import AbstractStorage +from .storage import LocalStorage +from .storage import S3Storage diff --git a/rgdps/services/boomlings.py b/rgdps/adapters/boomlings.py similarity index 85% rename from rgdps/services/boomlings.py rename to rgdps/adapters/boomlings.py index f44d84c..8fd49d1 100644 --- a/rgdps/services/boomlings.py +++ b/rgdps/adapters/boomlings.py @@ -2,8 +2,10 @@ from enum import Enum from typing import Any +from urllib.parse import unquote import httpx +from pydantic import BaseModel from rgdps import logger from rgdps.common import gd_obj @@ -96,6 +98,29 @@ def _is_response_valid(http_code: int, response: str) -> GDRequestStatus: type IntKeyResponse = dict[int, str] +class BoomlingsSong(BaseModel): + """A model representing a song response from""" + + id: int + name: str + author_id: int + author: str + author_youtube: str | None + size: float + download_url: str + + +class BoomlingURL: + """A class wrapping a URL retrieved from the Geometry Dash server. + Used for typing convenience.""" + + def __init__(self, url: str) -> None: + self._url = url + + def url(self) -> str: + return self._url + + class GeometryDashClient: """A client for interacting with the Geometry Dash servers.""" @@ -179,7 +204,7 @@ async def __make_get_request(self, endpoint: str) -> GDStatus[str]: return content - async def get_song(self, song_id: int) -> GDStatus[IntKeyResponse]: + async def song_from_id(self, song_id: int) -> GDStatus[BoomlingsSong]: """Queries the official servers for a song with a given id. Parses the response into a dictionary.""" @@ -211,9 +236,18 @@ async def get_song(self, song_id: int) -> GDStatus[IntKeyResponse]: value_cast=str, ) - return song_parsed + # Creating model. + return BoomlingsSong( + id=int(song_parsed[1]), + name=song_parsed[2], + author_id=int(song_parsed[3]), + author=song_parsed[4], + author_youtube=song_parsed[7] or None, + size=float(song_parsed[5]), + download_url=unquote(song_parsed[10]), + ) - async def get_cdn_url(self) -> GDStatus[str]: + async def fetch_cdn_id(self) -> GDStatus[BoomlingURL]: """Queries the official servers for the URL for the official Geometry Dash song and SFX library.""" @@ -231,4 +265,4 @@ async def get_cdn_url(self) -> GDStatus[str]: ) # No parsing required here. - return song_info + return BoomlingURL(song_info) diff --git a/rgdps/adapters/meilisearch.py b/rgdps/adapters/meilisearch.py new file mode 100644 index 0000000..4178180 --- /dev/null +++ b/rgdps/adapters/meilisearch.py @@ -0,0 +1,23 @@ +from __future__ import annotations + +from meilisearch_python_sdk import AsyncClient + +DEFAULT_TIMEOUT = 10 + + +class MeiliSearchClient(AsyncClient): + """An asynchronous MeiliSearch client.""" + + @staticmethod + def from_host( + host: str, + port: int, + api_key: str | None = None, + *, + timeout: int = DEFAULT_TIMEOUT, + ) -> MeiliSearchClient: + return MeiliSearchClient( + f"http://{host}:{port}", + api_key, + timeout=timeout, + ) diff --git a/rgdps/services/mysql.py b/rgdps/adapters/mysql.py similarity index 100% rename from rgdps/services/mysql.py rename to rgdps/adapters/mysql.py diff --git a/rgdps/adapters/redis.py b/rgdps/adapters/redis.py new file mode 100644 index 0000000..6aaacc3 --- /dev/null +++ b/rgdps/adapters/redis.py @@ -0,0 +1,132 @@ +from __future__ import annotations + +import asyncio +import logging +from collections.abc import Awaitable +from collections.abc import Callable +from collections.abc import Coroutine +from queue import Queue +from typing import Self + +from redis.asyncio import Redis + +type PubSubHandler = Callable[[str], Coroutine[None, None, None]] + + +class RedisClient(Redis): + """A thin wrapper around the asynchronous Redis client.""" + + def __init__( + self, + host: str, + port: int, + database: int = 0, + password: str | None = None, + ) -> None: + super().__init__( + host=host, + port=port, + db=database, + password=password, + decode_responses=True, + ) + + self._pubsub_router = RedisPubsubRouter() + self._tasks: Queue[Awaitable[None]] = Queue(100) + self._pubsub_listen_lock = asyncio.Lock() + + async def initialise(self) -> Self: + if not self._pubsub_router.empty: + self._pubsub_task = self.__create_pubsub_task() + + return await self.initialize() + + def register( + self, + channel: str, + ) -> Callable[[PubSubHandler], PubSubHandler]: + """Registers a pubsub handler.""" + return self._pubsub_router.register(channel) + + def include_router(self, router: RedisPubsubRouter) -> None: + self._pubsub_router.merge(router) + + async def __listen_pubsub( + self, + ) -> None: + async with ( + self._pubsub_listen_lock, + self.pubsub() as pubsub, + ): + for channel in self._pubsub_router.route_map(): + await pubsub.subscribe(channel) + + while True: + message = await pubsub.get_message() + if message is not None: + if message.get("type") != "message": + continue + + handler = self._pubsub_router._get_handler(message["channel"]) + assert handler is not None + + # NOTE: Asyncio tasks can get GC'd lmfao. + if self._tasks.full(): + self._tasks.get() + + self._tasks.put(asyncio.create_task(handler(message["data"]))) + + # NOTE: This is a hack to prevent the event loop from blocking. + await asyncio.sleep(0.1) + + async def __create_pubsub_task(self) -> asyncio.Task: + return asyncio.create_task(self.__listen_pubsub()) + + +class RedisPubsubRouter: + """A router for Redis subscriptions.""" + + __slots__ = ( + "_routes", + "_prefix", + ) + + def __init__( + self, + *, + prefix: str = "", + ) -> None: + self._routes: dict[str, PubSubHandler] = {} + self._prefix = prefix + + @property + def empty(self) -> bool: + return not self._routes + + def register( + self, + channel: str, + ) -> Callable[[PubSubHandler], PubSubHandler]: + def decorator(handler: PubSubHandler) -> PubSubHandler: + channel_name = self._prefix + channel + self._routes[channel_name] = handler + return handler + + return decorator + + def merge(self, other: Self) -> None: + for channel, handler in other.route_map().items(): + if channel in self._routes: + logging.warning( + "Overwritten route when merging Redis routers!", + extra={ + "channel": channel, + }, + ) + self._routes[channel] = handler + + def route_map(self) -> dict[str, PubSubHandler]: + return self._routes + + def _get_handler(self, channel: str) -> PubSubHandler | None: + return self._routes.get(channel) diff --git a/rgdps/services/storage.py b/rgdps/adapters/storage.py similarity index 100% rename from rgdps/services/storage.py rename to rgdps/adapters/storage.py diff --git a/rgdps/api/__init__.py b/rgdps/api/__init__.py index 204b734..b0a1b17 100644 --- a/rgdps/api/__init__.py +++ b/rgdps/api/__init__.py @@ -11,20 +11,20 @@ from fastapi.responses import JSONResponse from fastapi.responses import Response from fastapi_limiter import FastAPILimiter -from meilisearch_python_sdk import AsyncClient as MeiliClient from redis.asyncio import Redis from starlette.middleware.base import RequestResponseEndpoint from rgdps import logger from rgdps import settings +from rgdps.adapters import MeiliSearchClient +from rgdps.adapters.boomlings import GeometryDashClient +from rgdps.adapters.mysql import MySQLService +from rgdps.adapters.redis import RedisClient +from rgdps.adapters.storage import LocalStorage +from rgdps.adapters.storage import S3Storage from rgdps.common.cache.memory import SimpleAsyncMemoryCache from rgdps.common.cache.redis import SimpleRedisCache from rgdps.constants.responses import GenericResponse -from rgdps.services.boomlings import GeometryDashClient -from rgdps.services.mysql import MySQLService -from rgdps.services.pubsub import listen_pubsubs -from rgdps.services.storage import LocalStorage -from rgdps.services.storage import S3Storage from . import context from . import gd @@ -98,19 +98,20 @@ async def on_shutdown() -> None: def init_redis(app: FastAPI) -> None: - app.state.redis = Redis.from_url( - f"redis://{settings.REDIS_HOST}:{settings.REDIS_PORT}/{settings.REDIS_DB}", + app.state.redis = RedisClient( + settings.REDIS_HOST, + settings.REDIS_PORT, + settings.REDIS_DB, ) @app.on_event("startup") async def on_startup() -> None: - await app.state.redis.initialize() + # TODO: Fix. shared_ctx = context.PubsubContext(app) - await listen_pubsubs( - shared_ctx, - app.state.redis, - pubsub.router, - ) + pubsub.inject_context(shared_ctx) + app.state.redis.include_router(pubsub.router) + + await app.state.redis.initialise() # TODO: Custom ratelimit callback that returns `-1`. await FastAPILimiter.init( @@ -132,8 +133,9 @@ async def on_shutdown() -> None: def init_meili(app: FastAPI) -> None: - app.state.meili = MeiliClient( - f"http://{settings.MEILI_HOST}:{settings.MEILI_PORT}", + app.state.meili = MeiliSearchClient.from_host( + settings.MEILI_HOST, + settings.MEILI_PORT, settings.MEILI_KEY, timeout=10, ) @@ -199,32 +201,16 @@ def init_gd(app: FastAPI) -> None: ) -def init_cache_stateful(app: FastAPI) -> None: - app.state.user_cache = SimpleAsyncMemoryCache() +def init_cache(app: FastAPI) -> None: app.state.password_cache = SimpleAsyncMemoryCache() - logger.info("Initialised stateful caching.") + logger.info("Initialised stateful password caching.") -def init_cache_stateless(app: FastAPI) -> None: - app.state.user_cache = SimpleRedisCache( - redis=app.state.redis, - key_prefix="rgdps:cache:user", - ) - app.state.password_cache = SimpleRedisCache( - redis=app.state.redis, - key_prefix="rgdps:cache:password", - deserialise=lambda x: x.decode(), - serialise=lambda x: x.encode(), - ) - - logger.info("Initialised stateless caching.") - - -def init_routers(app: FastAPI) -> None: +def init_gd_routers(app: FastAPI) -> None: import rgdps.api - app.include_router(rgdps.api.gd.router) + app.include_router(rgdps.api.gd.routes.router) def init_middlewares(app: FastAPI) -> None: @@ -306,11 +292,8 @@ def init_api() -> FastAPI: else: init_local_storage(app) - if settings.SERVER_STATELESS: - init_cache_stateless(app) - else: - init_cache_stateful(app) + init_cache(app) - init_routers(app) + init_gd_routers(app) return app diff --git a/rgdps/api/commands/framework.py b/rgdps/api/commands/framework.py index 904334c..2fa4035 100644 --- a/rgdps/api/commands/framework.py +++ b/rgdps/api/commands/framework.py @@ -27,11 +27,11 @@ from meilisearch_python_sdk import AsyncClient as MeiliClient from redis.asyncio import Redis + from rgdps.adapters.boomlings import GeometryDashClient + from rgdps.adapters.mysql import AbstractMySQLService + from rgdps.adapters.storage import AbstractStorage from rgdps.common.cache.base import AbstractAsyncCache from rgdps.models.user import User - from rgdps.services.boomlings import GeometryDashClient - from rgdps.services.mysql import AbstractMySQLService - from rgdps.services.storage import AbstractStorage # Private parsing functions. @@ -237,10 +237,6 @@ def meili(self) -> MeiliClient: def storage(self) -> AbstractStorage: return self._base_context.storage - @property - def user_cache(self) -> AbstractAsyncCache[User]: - return self._base_context.user_cache - @property def password_cache(self) -> AbstractAsyncCache[str]: return self._base_context.password_cache diff --git a/rgdps/api/commands/levels.py b/rgdps/api/commands/levels.py index 75b1bd4..6e3912c 100644 --- a/rgdps/api/commands/levels.py +++ b/rgdps/api/commands/levels.py @@ -7,7 +7,7 @@ from rgdps.constants.users import UserPrivileges from rgdps.models.level import Level from rgdps.models.user import User -from rgdps.usecases import levels +from rgdps.services import levels router = CommandRouter("levels_root") diff --git a/rgdps/api/commands/schedule.py b/rgdps/api/commands/schedule.py index aa51014..d63eb8d 100644 --- a/rgdps/api/commands/schedule.py +++ b/rgdps/api/commands/schedule.py @@ -5,7 +5,7 @@ from rgdps.api.commands.framework import unwrap_service from rgdps.constants.level_schedules import LevelScheduleType from rgdps.constants.users import UserPrivileges -from rgdps.usecases import level_schedules +from rgdps.services import level_schedules router = CommandRouter("schedule_root") diff --git a/rgdps/api/commands/sync.py b/rgdps/api/commands/sync.py index 679544b..4fb9e98 100644 --- a/rgdps/api/commands/sync.py +++ b/rgdps/api/commands/sync.py @@ -5,9 +5,9 @@ from rgdps.api.commands.framework import CommandContext from rgdps.api.commands.framework import CommandRouter from rgdps.constants.users import UserPrivileges -from rgdps.usecases import leaderboards -from rgdps.usecases import levels -from rgdps.usecases import users +from rgdps.services import leaderboards +from rgdps.services import levels +from rgdps.services import users router = CommandRouter("sunc_root") diff --git a/rgdps/api/commands/users.py b/rgdps/api/commands/users.py index 7fcb1c6..7c37a8d 100644 --- a/rgdps/api/commands/users.py +++ b/rgdps/api/commands/users.py @@ -6,7 +6,7 @@ from rgdps.constants.users import UserPrivileges from rgdps.models.rgb import RGB from rgdps.models.user import User -from rgdps.usecases import users +from rgdps.services import users router = CommandRouter("users_root") diff --git a/rgdps/api/context.py b/rgdps/api/context.py index c689b6a..aae8e26 100644 --- a/rgdps/api/context.py +++ b/rgdps/api/context.py @@ -1,5 +1,6 @@ # from __future__ import annotations # This causes a pydantic issue. Yikes. -from typing import TYPE_CHECKING + +from typing import override from fastapi import FastAPI from fastapi import Request @@ -7,45 +8,44 @@ from redis.asyncio import Redis from types_aiobotocore_s3 import S3Client +from rgdps.adapters.boomlings import GeometryDashClient +from rgdps.adapters.mysql import AbstractMySQLService +from rgdps.adapters.storage import AbstractStorage from rgdps.common.cache.base import AbstractAsyncCache from rgdps.common.context import Context -from rgdps.services.boomlings import GeometryDashClient -from rgdps.services.mysql import AbstractMySQLService -from rgdps.services.storage import AbstractStorage - -if TYPE_CHECKING: - from rgdps.models.user import User class HTTPContext(Context): def __init__(self, request: Request) -> None: self.request = request + @override @property def mysql(self) -> AbstractMySQLService: # NOTE: This is a per-request transaction. return self.request.state.mysql + @override @property def redis(self) -> Redis: return self.request.app.state.redis + @override @property def meili(self) -> MeiliClient: return self.request.app.state.meili + @override @property def storage(self) -> AbstractStorage: return self.request.app.state.storage - @property - def user_cache(self) -> "AbstractAsyncCache[User]": - return self.request.app.state.user_cache - + @override @property def password_cache(self) -> AbstractAsyncCache[str]: return self.request.app.state.password_cache + @override @property def gd(self) -> GeometryDashClient: return self.request.app.state.gd @@ -58,34 +58,37 @@ class PubsubContext(Context): def __init__(self, app: FastAPI) -> None: self.state = app.state + @override @property def mysql(self) -> AbstractMySQLService: return self.state.mysql + @override @property def redis(self) -> Redis: return self.state.redis + @override @property def meili(self) -> MeiliClient: return self.state.meili + @override @property def s3(self) -> S3Client | None: return self.state.s3 - @property - def user_cache(self) -> "AbstractAsyncCache[User]": - return self.state.user_cache - + @override @property def password_cache(self) -> AbstractAsyncCache[str]: return self.state.password_cache + @override @property def storage(self) -> AbstractStorage: return self.state.storage + @override @property def gd(self) -> GeometryDashClient: return self.state.gd diff --git a/rgdps/api/gd/__init__.py b/rgdps/api/gd/__init__.py index 90df286..1cc4f93 100644 --- a/rgdps/api/gd/__init__.py +++ b/rgdps/api/gd/__init__.py @@ -1,321 +1,5 @@ from __future__ import annotations -from fastapi import APIRouter -from fastapi import Depends -from fastapi.responses import PlainTextResponse -from fastapi_limiter.depends import RateLimiter - -from rgdps import settings - -from . import leaderboards -from . import level_comments -from . import levels -from . import messages -from . import misc -from . import rewards -from . import save_data -from . import user_comments -from . import user_relationships -from . import users - -router = APIRouter( - prefix=settings.APP_URL_PREFIX, - default_response_class=PlainTextResponse, -) - -router.add_api_route( - "/accounts/registerGJAccount.php", - users.register_post, - methods=["POST"], - dependencies=[ - Depends(RateLimiter(times=10, minutes=10)), - ], -) - -router.add_api_route( - "/", - misc.main_get, -) - -router.add_api_route( - "/getGJUserInfo20.php", - users.user_info_get, - methods=["POST"], -) - -router.add_api_route( - "/accounts/loginGJAccount.php", - users.login_post, - methods=["POST"], -) - -router.add_api_route( - "/updateGJUserScore22.php", - users.user_info_update, - methods=["POST"], -) - -router.add_api_route( - "/getGJFriendRequests20.php", - user_relationships.friend_requests_get, - methods=["POST"], -) - -router.add_api_route( - "/uploadFriendRequest20.php", - user_relationships.friend_request_post, - methods=["POST"], - dependencies=[ - Depends(RateLimiter(times=1, seconds=30)), - ], -) - -router.add_api_route( - "/readGJFriendRequest20.php", - user_relationships.friend_request_read, - methods=["POST"], -) - -router.add_api_route( - "/deleteGJFriendRequests20.php", - user_relationships.friend_requests_delete, - methods=["POST"], -) - -router.add_api_route( - "/acceptGJFriendRequest20.php", - user_relationships.friend_request_accept, - methods=["POST"], -) - -router.add_api_route( - "/getGJUserList20.php", - user_relationships.user_relationships_get, - methods=["POST"], -) - -router.add_api_route( - "/removeGJFriend20.php", - user_relationships.friend_remove_post, - methods=["POST"], -) - -router.add_api_route( - "/blockGJUser20.php", - user_relationships.block_user_post, - methods=["POST"], - dependencies=[ - Depends(RateLimiter(times=1, seconds=30)), - ], -) - -router.add_api_route( - "/unblockGJUser20.php", - user_relationships.unblock_user_post, - methods=["POST"], -) - -router.add_api_route( - "/uploadGJAccComment20.php", - user_comments.user_comments_post, - methods=["POST"], - dependencies=[ - Depends(RateLimiter(times=4, minutes=1)), - ], -) - -router.add_api_route( - "/getGJAccountComments20.php", - user_comments.user_comments_get, - methods=["POST"], -) - -router.add_api_route( - "/updateGJAccSettings20.php", - users.user_settings_update, - methods=["POST"], -) - -router.add_api_route( - "/getGJSongInfo.php", - levels.song_info_get, - methods=["POST"], -) - -# Geometry Dash forces these 2 to be prefixed with /database -router.add_api_route( - "/database/accounts/syncGJAccountNew.php", - save_data.save_data_get, - methods=["POST"], -) - -router.add_api_route( - "/database/accounts/backupGJAccountNew.php", - save_data.save_data_post, - methods=["POST"], - dependencies=[ - Depends(RateLimiter(times=1, minutes=5)), - ], -) - -router.add_api_route( - "/getAccountURL.php", - save_data.get_save_endpoint, - methods=["POST"], -) - -router.add_api_route( - "/uploadGJLevel21.php", - levels.level_post, - methods=["POST"], - # TODO: Tweak based on average user behaviour. May be way too high. - dependencies=[ - Depends(RateLimiter(times=3, minutes=10)), - ], -) - -router.add_api_route( - "/getGJLevels21.php", - levels.levels_get, - methods=["POST"], -) - -router.add_api_route( - "/downloadGJLevel22.php", - levels.level_get, - methods=["POST"], - # TODO: Tweak based on average user behaviour. May be too low. - dependencies=[ - Depends(RateLimiter(times=100, minutes=10)), - ], -) - -router.add_api_route( - "/getGJScores20.php", - leaderboards.leaderboard_get, - methods=["POST"], -) - -router.add_api_route( - "/likeGJItem211.php", - user_comments.like_target_post, - methods=["POST"], - dependencies=[ - Depends(RateLimiter(times=50, minutes=10)), - ], -) - -router.add_api_route( - "/deleteGJAccComment20.php", - user_comments.user_comment_delete, - methods=["POST"], -) - -router.add_api_route( - "/uploadGJComment21.php", - level_comments.create_comment_post, - methods=["POST"], - dependencies=[ - Depends(RateLimiter(times=15, minutes=1)), - ], -) - -router.add_api_route( - "/requestUserAccess.php", - users.request_status_get, - methods=["POST"], -) - -router.add_api_route( - "/getGJComments21.php", - level_comments.level_comments_get, - methods=["POST"], -) - -router.add_api_route( - "/getGJMessages20.php", - messages.messages_get, - methods=["POST"], -) - -router.add_api_route( - "/uploadGJMessage20.php", - messages.message_post, - methods=["POST"], - dependencies=[ - Depends(RateLimiter(times=5, minutes=5)), - ], -) - -router.add_api_route( - "/deleteGJMessages20.php", - messages.message_delete, - methods=["POST"], -) - -router.add_api_route( - "/downloadGJMessage20.php", - messages.message_get, - methods=["POST"], -) - - -router.add_api_route( - "/suggestGJStars20.php", - levels.suggest_level_stars, - methods=["POST"], -) - -router.add_api_route( - "/getGJCommentHistory.php", - level_comments.comment_history_get, - methods=["POST"], -) - -router.add_api_route( - "/deleteGJComment20.php", - level_comments.level_comment_delete, - methods=["POST"], -) - -router.add_api_route( - "/getGJRewards.php", - rewards.daily_chest_get, - methods=["POST"], -) - -router.add_api_route( - "/getGJUsers20.php", - users.users_get, - methods=["POST"], -) - -router.add_api_route( - "/updateGJDesc20.php", - levels.level_desc_post, - methods=["POST"], -) - -router.add_api_route( - "/deleteGJLevelUser20.php", - levels.level_delete_post, - methods=["POST"], -) - -router.add_api_route( - "/getGJDailyLevel.php", - levels.daily_level_info_get, - methods=["POST"], -) - -router.add_api_route( - "/rateGJDemon21.php", - levels.demon_difficulty_post, - methods=["POST"], -) - -router.add_api_route( - "/getCustomContentURL.php", - levels.custom_content_cdn_get, - methods=["POST"], -) +from . import dependencies +from . import responses +from . import routes diff --git a/rgdps/api/dependencies.py b/rgdps/api/gd/dependencies.py similarity index 95% rename from rgdps/api/dependencies.py rename to rgdps/api/gd/dependencies.py index 17632de..213e570 100644 --- a/rgdps/api/dependencies.py +++ b/rgdps/api/gd/dependencies.py @@ -8,7 +8,7 @@ from fastapi.exceptions import HTTPException from rgdps import logger -from rgdps import usecases +from rgdps import services from rgdps.api.context import HTTPContext from rgdps.constants.errors import ServiceError from rgdps.constants.responses import GenericResponse @@ -28,7 +28,7 @@ async def wrapper( # A gjp2 is a hash thats always 40 characters long. gjp: str = Form(..., alias=password_alias, min_length=40, max_length=40), ) -> User: - user = await usecases.user_credentials.authenticate_from_gjp2( + user = await services.user_credentials.authenticate_from_gjp2( ctx, user_id, gjp, @@ -78,7 +78,7 @@ async def wrapper( username: str = Form(..., alias=username_alias), password_plain: str = Form(..., alias=password_alias), ) -> User: - user = await usecases.user_credentials.authenticate_from_name_plain( + user = await services.user_credentials.authenticate_from_name_plain( ctx, username, password_plain, diff --git a/rgdps/api/gd/responses.py b/rgdps/api/gd/responses.py new file mode 100644 index 0000000..e6034f4 --- /dev/null +++ b/rgdps/api/gd/responses.py @@ -0,0 +1,51 @@ +from __future__ import annotations + +from typing import NoReturn + +from fastapi import HTTPException +from fastapi.responses import PlainTextResponse + +from rgdps.constants.responses import GenericResponse +from rgdps.services import ErrorOr +from rgdps.services import ServiceError +from rgdps.utilities.typing import SupportsStr + +_SERVEICE_ERROR_CODE_MAP = { + ServiceError.USER_USERNAME_EXISTS: -2, +} +"""A map linking a Service Error to its corresponding GD error code.""" + + +def _resolve_error_from_service_error(service_error: ServiceError) -> int: + return _SERVEICE_ERROR_CODE_MAP.get(service_error, -1) + + +def interrupt_with_error(error: SupportsStr) -> NoReturn: + """Interrupts the HTTP execution with the given error code.""" + + raise HTTPException( + status_code=200, + detail=str(error), + ) + + +def unwrap[T](value: ErrorOr[T]) -> T: + """Unwraps a service response. Returns the value if given unchanged. + Else, interrupts HTTP execution.""" + + if isinstance(value, ServiceError): + interrupt_with_error(_resolve_error_from_service_error(value)) + + return value + + +def success() -> PlainTextResponse: + return PlainTextResponse(str(GenericResponse.SUCCESS)) + + +def fail() -> PlainTextResponse: + return PlainTextResponse(str(GenericResponse.FAIL)) + + +def code(code: SupportsStr) -> PlainTextResponse: + return PlainTextResponse(str(code)) diff --git a/rgdps/api/gd/routes/__init__.py b/rgdps/api/gd/routes/__init__.py new file mode 100644 index 0000000..90df286 --- /dev/null +++ b/rgdps/api/gd/routes/__init__.py @@ -0,0 +1,321 @@ +from __future__ import annotations + +from fastapi import APIRouter +from fastapi import Depends +from fastapi.responses import PlainTextResponse +from fastapi_limiter.depends import RateLimiter + +from rgdps import settings + +from . import leaderboards +from . import level_comments +from . import levels +from . import messages +from . import misc +from . import rewards +from . import save_data +from . import user_comments +from . import user_relationships +from . import users + +router = APIRouter( + prefix=settings.APP_URL_PREFIX, + default_response_class=PlainTextResponse, +) + +router.add_api_route( + "/accounts/registerGJAccount.php", + users.register_post, + methods=["POST"], + dependencies=[ + Depends(RateLimiter(times=10, minutes=10)), + ], +) + +router.add_api_route( + "/", + misc.main_get, +) + +router.add_api_route( + "/getGJUserInfo20.php", + users.user_info_get, + methods=["POST"], +) + +router.add_api_route( + "/accounts/loginGJAccount.php", + users.login_post, + methods=["POST"], +) + +router.add_api_route( + "/updateGJUserScore22.php", + users.user_info_update, + methods=["POST"], +) + +router.add_api_route( + "/getGJFriendRequests20.php", + user_relationships.friend_requests_get, + methods=["POST"], +) + +router.add_api_route( + "/uploadFriendRequest20.php", + user_relationships.friend_request_post, + methods=["POST"], + dependencies=[ + Depends(RateLimiter(times=1, seconds=30)), + ], +) + +router.add_api_route( + "/readGJFriendRequest20.php", + user_relationships.friend_request_read, + methods=["POST"], +) + +router.add_api_route( + "/deleteGJFriendRequests20.php", + user_relationships.friend_requests_delete, + methods=["POST"], +) + +router.add_api_route( + "/acceptGJFriendRequest20.php", + user_relationships.friend_request_accept, + methods=["POST"], +) + +router.add_api_route( + "/getGJUserList20.php", + user_relationships.user_relationships_get, + methods=["POST"], +) + +router.add_api_route( + "/removeGJFriend20.php", + user_relationships.friend_remove_post, + methods=["POST"], +) + +router.add_api_route( + "/blockGJUser20.php", + user_relationships.block_user_post, + methods=["POST"], + dependencies=[ + Depends(RateLimiter(times=1, seconds=30)), + ], +) + +router.add_api_route( + "/unblockGJUser20.php", + user_relationships.unblock_user_post, + methods=["POST"], +) + +router.add_api_route( + "/uploadGJAccComment20.php", + user_comments.user_comments_post, + methods=["POST"], + dependencies=[ + Depends(RateLimiter(times=4, minutes=1)), + ], +) + +router.add_api_route( + "/getGJAccountComments20.php", + user_comments.user_comments_get, + methods=["POST"], +) + +router.add_api_route( + "/updateGJAccSettings20.php", + users.user_settings_update, + methods=["POST"], +) + +router.add_api_route( + "/getGJSongInfo.php", + levels.song_info_get, + methods=["POST"], +) + +# Geometry Dash forces these 2 to be prefixed with /database +router.add_api_route( + "/database/accounts/syncGJAccountNew.php", + save_data.save_data_get, + methods=["POST"], +) + +router.add_api_route( + "/database/accounts/backupGJAccountNew.php", + save_data.save_data_post, + methods=["POST"], + dependencies=[ + Depends(RateLimiter(times=1, minutes=5)), + ], +) + +router.add_api_route( + "/getAccountURL.php", + save_data.get_save_endpoint, + methods=["POST"], +) + +router.add_api_route( + "/uploadGJLevel21.php", + levels.level_post, + methods=["POST"], + # TODO: Tweak based on average user behaviour. May be way too high. + dependencies=[ + Depends(RateLimiter(times=3, minutes=10)), + ], +) + +router.add_api_route( + "/getGJLevels21.php", + levels.levels_get, + methods=["POST"], +) + +router.add_api_route( + "/downloadGJLevel22.php", + levels.level_get, + methods=["POST"], + # TODO: Tweak based on average user behaviour. May be too low. + dependencies=[ + Depends(RateLimiter(times=100, minutes=10)), + ], +) + +router.add_api_route( + "/getGJScores20.php", + leaderboards.leaderboard_get, + methods=["POST"], +) + +router.add_api_route( + "/likeGJItem211.php", + user_comments.like_target_post, + methods=["POST"], + dependencies=[ + Depends(RateLimiter(times=50, minutes=10)), + ], +) + +router.add_api_route( + "/deleteGJAccComment20.php", + user_comments.user_comment_delete, + methods=["POST"], +) + +router.add_api_route( + "/uploadGJComment21.php", + level_comments.create_comment_post, + methods=["POST"], + dependencies=[ + Depends(RateLimiter(times=15, minutes=1)), + ], +) + +router.add_api_route( + "/requestUserAccess.php", + users.request_status_get, + methods=["POST"], +) + +router.add_api_route( + "/getGJComments21.php", + level_comments.level_comments_get, + methods=["POST"], +) + +router.add_api_route( + "/getGJMessages20.php", + messages.messages_get, + methods=["POST"], +) + +router.add_api_route( + "/uploadGJMessage20.php", + messages.message_post, + methods=["POST"], + dependencies=[ + Depends(RateLimiter(times=5, minutes=5)), + ], +) + +router.add_api_route( + "/deleteGJMessages20.php", + messages.message_delete, + methods=["POST"], +) + +router.add_api_route( + "/downloadGJMessage20.php", + messages.message_get, + methods=["POST"], +) + + +router.add_api_route( + "/suggestGJStars20.php", + levels.suggest_level_stars, + methods=["POST"], +) + +router.add_api_route( + "/getGJCommentHistory.php", + level_comments.comment_history_get, + methods=["POST"], +) + +router.add_api_route( + "/deleteGJComment20.php", + level_comments.level_comment_delete, + methods=["POST"], +) + +router.add_api_route( + "/getGJRewards.php", + rewards.daily_chest_get, + methods=["POST"], +) + +router.add_api_route( + "/getGJUsers20.php", + users.users_get, + methods=["POST"], +) + +router.add_api_route( + "/updateGJDesc20.php", + levels.level_desc_post, + methods=["POST"], +) + +router.add_api_route( + "/deleteGJLevelUser20.php", + levels.level_delete_post, + methods=["POST"], +) + +router.add_api_route( + "/getGJDailyLevel.php", + levels.daily_level_info_get, + methods=["POST"], +) + +router.add_api_route( + "/rateGJDemon21.php", + levels.demon_difficulty_post, + methods=["POST"], +) + +router.add_api_route( + "/getCustomContentURL.php", + levels.custom_content_cdn_get, + methods=["POST"], +) diff --git a/rgdps/api/gd/leaderboards.py b/rgdps/api/gd/routes/leaderboards.py similarity index 96% rename from rgdps/api/gd/leaderboards.py rename to rgdps/api/gd/routes/leaderboards.py index 9bc4751..d2179d8 100644 --- a/rgdps/api/gd/leaderboards.py +++ b/rgdps/api/gd/routes/leaderboards.py @@ -9,7 +9,7 @@ from rgdps.common import gd_obj from rgdps.constants.errors import ServiceError from rgdps.constants.leaderboards import LeaderboardType -from rgdps.usecases import leaderboards +from rgdps.services import leaderboards async def leaderboard_get( diff --git a/rgdps/api/gd/level_comments.py b/rgdps/api/gd/routes/level_comments.py similarity index 97% rename from rgdps/api/gd/level_comments.py rename to rgdps/api/gd/routes/level_comments.py index 449bf70..dc06d4b 100644 --- a/rgdps/api/gd/level_comments.py +++ b/rgdps/api/gd/routes/level_comments.py @@ -7,14 +7,14 @@ from rgdps.api import commands from rgdps.api import responses from rgdps.api.context import HTTPContext -from rgdps.api.dependencies import authenticate_dependency +from rgdps.api.gd.dependencies import authenticate_dependency +from rgdps.api.validators import Base64String from rgdps.common import gd_obj -from rgdps.common.validators import Base64String from rgdps.constants.errors import ServiceError from rgdps.constants.level_comments import LevelCommentSorting from rgdps.constants.users import UserPrivileges from rgdps.models.user import User -from rgdps.usecases import level_comments +from rgdps.services import level_comments PAGE_SIZE = 10 diff --git a/rgdps/api/gd/levels.py b/rgdps/api/gd/routes/levels.py similarity index 97% rename from rgdps/api/gd/levels.py rename to rgdps/api/gd/routes/levels.py index 1c2504f..5227627 100644 --- a/rgdps/api/gd/levels.py +++ b/rgdps/api/gd/routes/levels.py @@ -8,22 +8,22 @@ from rgdps import logger from rgdps.api import responses from rgdps.api.context import HTTPContext -from rgdps.api.dependencies import authenticate_dependency +from rgdps.api.gd.dependencies import authenticate_dependency +from rgdps.api.validators import Base64String +from rgdps.api.validators import CommaSeparatedIntList +from rgdps.api.validators import TextBoxString from rgdps.common import gd_obj -from rgdps.common.validators import Base64String -from rgdps.common.validators import CommaSeparatedIntList -from rgdps.common.validators import TextBoxString from rgdps.constants.errors import ServiceError from rgdps.constants.level_schedules import LevelScheduleType from rgdps.constants.levels import LevelDemonRating +from rgdps.constants.levels import LevelFeature from rgdps.constants.levels import LevelLength from rgdps.constants.levels import LevelSearchType -from rgdps.constants.levels import LevelFeature from rgdps.constants.users import UserPrivileges from rgdps.models.user import User -from rgdps.usecases import level_schedules -from rgdps.usecases import levels -from rgdps.usecases import songs +from rgdps.services import level_schedules +from rgdps.services import levels +from rgdps.services import songs PAGE_SIZE = 10 diff --git a/rgdps/api/gd/messages.py b/rgdps/api/gd/routes/messages.py similarity index 96% rename from rgdps/api/gd/messages.py rename to rgdps/api/gd/routes/messages.py index 1c07880..15d77a8 100644 --- a/rgdps/api/gd/messages.py +++ b/rgdps/api/gd/routes/messages.py @@ -5,15 +5,15 @@ from rgdps.api import commands from rgdps.api import responses from rgdps.api.context import HTTPContext -from rgdps.api.dependencies import authenticate_dependency +from rgdps.api.gd.dependencies import authenticate_dependency +from rgdps.api.validators import Base64String +from rgdps.api.validators import MessageContentString from rgdps.common import gd_obj -from rgdps.common.validators import Base64String -from rgdps.common.validators import MessageContentString from rgdps.constants.errors import ServiceError from rgdps.constants.users import UserPrivileges from rgdps.models.message import MessageDirection from rgdps.models.user import User -from rgdps.usecases import messages +from rgdps.services import messages PAGE_SIZE = 10 diff --git a/rgdps/api/gd/misc.py b/rgdps/api/gd/routes/misc.py similarity index 100% rename from rgdps/api/gd/misc.py rename to rgdps/api/gd/routes/misc.py diff --git a/rgdps/api/gd/rewards.py b/rgdps/api/gd/routes/rewards.py similarity index 94% rename from rgdps/api/gd/rewards.py rename to rgdps/api/gd/routes/rewards.py index 9ef7148..822f77f 100644 --- a/rgdps/api/gd/rewards.py +++ b/rgdps/api/gd/routes/rewards.py @@ -6,12 +6,12 @@ from rgdps import logger from rgdps.api import responses from rgdps.api.context import HTTPContext -from rgdps.api.dependencies import authenticate_dependency +from rgdps.api.gd.dependencies import authenticate_dependency from rgdps.common import gd_obj from rgdps.constants.daily_chests import DailyChestView from rgdps.constants.errors import ServiceError from rgdps.models.user import User -from rgdps.usecases import daily_chests +from rgdps.services import daily_chests async def daily_chest_get( diff --git a/rgdps/api/gd/save_data.py b/rgdps/api/gd/routes/save_data.py similarity index 93% rename from rgdps/api/gd/save_data.py rename to rgdps/api/gd/routes/save_data.py index cf64e02..54fa64f 100644 --- a/rgdps/api/gd/save_data.py +++ b/rgdps/api/gd/routes/save_data.py @@ -8,11 +8,11 @@ from rgdps import settings from rgdps.api import responses from rgdps.api.context import HTTPContext -from rgdps.api.dependencies import authenticate_dependency -from rgdps.common.validators import GameSaveData +from rgdps.api.gd.dependencies import authenticate_dependency +from rgdps.api.validators import GameSaveData from rgdps.constants.errors import ServiceError from rgdps.models.user import User -from rgdps.usecases import save_data +from rgdps.services import save_data async def save_data_get( diff --git a/rgdps/api/gd/user_comments.py b/rgdps/api/gd/routes/user_comments.py similarity index 96% rename from rgdps/api/gd/user_comments.py rename to rgdps/api/gd/routes/user_comments.py index c30abd2..3c920d7 100644 --- a/rgdps/api/gd/user_comments.py +++ b/rgdps/api/gd/routes/user_comments.py @@ -7,15 +7,15 @@ from rgdps.api import commands from rgdps.api import responses from rgdps.api.context import HTTPContext -from rgdps.api.dependencies import authenticate_dependency +from rgdps.api.gd.dependencies import authenticate_dependency +from rgdps.api.validators import Base64String from rgdps.common import gd_obj -from rgdps.common.validators import Base64String from rgdps.constants.errors import ServiceError from rgdps.constants.likes import LikeType from rgdps.constants.users import UserPrivileges from rgdps.models.user import User -from rgdps.usecases import likes -from rgdps.usecases import user_comments +from rgdps.services import likes +from rgdps.services import user_comments PAGE_SIZE = 10 diff --git a/rgdps/api/gd/user_relationships.py b/rgdps/api/gd/routes/user_relationships.py similarity index 98% rename from rgdps/api/gd/user_relationships.py rename to rgdps/api/gd/routes/user_relationships.py index 320311b..3bcfa13 100644 --- a/rgdps/api/gd/user_relationships.py +++ b/rgdps/api/gd/routes/user_relationships.py @@ -6,14 +6,14 @@ from rgdps import logger from rgdps.api import responses from rgdps.api.context import HTTPContext -from rgdps.api.dependencies import authenticate_dependency +from rgdps.api.gd.dependencies import authenticate_dependency +from rgdps.api.validators import Base64String from rgdps.common import gd_obj -from rgdps.common.validators import Base64String from rgdps.constants.errors import ServiceError from rgdps.constants.users import UserRelationshipType from rgdps.models.user import User -from rgdps.usecases import friend_requests -from rgdps.usecases import user_relationships +from rgdps.services import friend_requests +from rgdps.services import user_relationships PAGE_SIZE = 10 diff --git a/rgdps/api/gd/users.py b/rgdps/api/gd/routes/users.py similarity index 97% rename from rgdps/api/gd/users.py rename to rgdps/api/gd/routes/users.py index af3710c..ff81cc8 100644 --- a/rgdps/api/gd/users.py +++ b/rgdps/api/gd/routes/users.py @@ -7,10 +7,10 @@ from rgdps import logger from rgdps.api import responses from rgdps.api.context import HTTPContext -from rgdps.api.dependencies import authenticate_dependency +from rgdps.api.gd.dependencies import authenticate_dependency +from rgdps.api.validators import SocialMediaString +from rgdps.api.validators import TextBoxString from rgdps.common import gd_obj -from rgdps.common.validators import SocialMediaString -from rgdps.common.validators import TextBoxString from rgdps.constants.errors import ServiceError from rgdps.constants.responses import LoginResponse from rgdps.constants.responses import RegisterResponse @@ -18,8 +18,8 @@ from rgdps.constants.users import UserPrivilegeLevel from rgdps.constants.users import UserPrivileges from rgdps.models.user import User -from rgdps.usecases import user_credentials -from rgdps.usecases import users +from rgdps.services import user_credentials +from rgdps.services import users PAGE_SIZE = 10 diff --git a/rgdps/api/pubsub.py b/rgdps/api/pubsub.py index b2a588c..f77b761 100644 --- a/rgdps/api/pubsub.py +++ b/rgdps/api/pubsub.py @@ -1,20 +1,33 @@ from __future__ import annotations from rgdps import logger -from rgdps.common.context import Context -from rgdps.services.pubsub import RedisPubsubRouter -from rgdps.usecases import leaderboards -from rgdps.usecases import levels -from rgdps.usecases import users +from rgdps.adapters import RedisPubsubRouter +from rgdps.resources import Context +from rgdps.services import leaderboards +from rgdps.services import levels +from rgdps.services import users router = RedisPubsubRouter() +# XXX: This is really hacky. +redis_context: Context + + +def inject_context(ctx: Context) -> None: + global redis_context + redis_context = ctx + + +def context() -> Context: + return redis_context + + # TODO: Look into creating unique UUIDs for each pubsub message, # for easier identification in logging. @router.register("rgdps:ping") -async def ping_handler(ctx: Context, data: bytes) -> None: +async def ping_handler(data: str) -> None: logger.debug( "Redis received a ping.", extra={ @@ -24,24 +37,28 @@ async def ping_handler(ctx: Context, data: bytes) -> None: @router.register("rgdps:levels:sync_meili") -async def level_sync_meili_handler(ctx: Context, _) -> None: +async def level_sync_meili_handler(_) -> None: + ctx = context() logger.debug("Redis received a level sync request.") await levels.synchronise_search(ctx) @router.register("rgdps:users:sync_meili") -async def user_sync_meili_handler(ctx: Context, _) -> None: +async def user_sync_meili_handler(_) -> None: + ctx = context() logger.debug("Redis received a user sync request.") await users.synchronise_search(ctx) @router.register("rgdps:leaderboards:sync_stars") -async def leaderboard_sync_stars_handler(ctx: Context, _) -> None: +async def leaderboard_sync_stars_handler(_) -> None: + ctx = context() logger.debug("Redis received a leaderboard sync request.") await leaderboards.synchronise_top_stars(ctx) @router.register("rgdps:leaderboards:sync_creators") -async def leaderboard_sync_creators_handler(ctx: Context, _) -> None: +async def leaderboard_sync_creators_handler(_) -> None: + ctx = context() logger.debug("Redis received a leaderboard sync request.") await leaderboards.synchronise_top_creators(ctx) diff --git a/rgdps/api/responses.py b/rgdps/api/responses.py deleted file mode 100644 index b208437..0000000 --- a/rgdps/api/responses.py +++ /dev/null @@ -1,18 +0,0 @@ -from __future__ import annotations - -from fastapi.responses import PlainTextResponse - -from rgdps.common.typing import SupportsStr -from rgdps.constants.responses import GenericResponse - - -def success() -> PlainTextResponse: - return PlainTextResponse(str(GenericResponse.SUCCESS)) - - -def fail() -> PlainTextResponse: - return PlainTextResponse(str(GenericResponse.FAIL)) - - -def code(code: SupportsStr) -> PlainTextResponse: - return PlainTextResponse(str(code)) diff --git a/rgdps/common/validators.py b/rgdps/api/validators.py similarity index 100% rename from rgdps/common/validators.py rename to rgdps/api/validators.py diff --git a/rgdps/common/__init__.py b/rgdps/common/__init__.py index fa76301..b41ac23 100644 --- a/rgdps/common/__init__.py +++ b/rgdps/common/__init__.py @@ -1,6 +1,7 @@ from __future__ import annotations from . import cache +from . import colour from . import context from . import data_utils from . import gd_logic @@ -10,4 +11,3 @@ from . import modelling from . import time from . import typing -from . import validators diff --git a/rgdps/common/cache/__init__.py b/rgdps/common/cache/__init__.py deleted file mode 100644 index b5bd129..0000000 --- a/rgdps/common/cache/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -from __future__ import annotations - -from . import base -from . import memory -from . import redis diff --git a/rgdps/common/context.py b/rgdps/common/context.py deleted file mode 100644 index 847f092..0000000 --- a/rgdps/common/context.py +++ /dev/null @@ -1,45 +0,0 @@ -from __future__ import annotations - -from abc import ABC -from abc import abstractmethod -from typing import TYPE_CHECKING - -from meilisearch_python_sdk import AsyncClient as MeiliClient -from redis.asyncio import Redis - -if TYPE_CHECKING: - from rgdps.common.cache.base import AbstractAsyncCache - from rgdps.models.user import User - from rgdps.services.boomlings import GeometryDashClient - from rgdps.services.mysql import AbstractMySQLService - from rgdps.services.storage import AbstractStorage - - -class Context(ABC): - @property - @abstractmethod - def mysql(self) -> AbstractMySQLService: ... - - @property - @abstractmethod - def redis(self) -> Redis: ... - - @property - @abstractmethod - def meili(self) -> MeiliClient: ... - - @property - @abstractmethod - def storage(self) -> AbstractStorage: ... - - @property - @abstractmethod - def user_cache(self) -> AbstractAsyncCache[User]: ... - - @property - @abstractmethod - def password_cache(self) -> AbstractAsyncCache[str]: ... - - @property - @abstractmethod - def gd(self) -> GeometryDashClient: ... diff --git a/rgdps/common/gd_obj.py b/rgdps/common/gd_obj.py index 829bb76..afccd87 100644 --- a/rgdps/common/gd_obj.py +++ b/rgdps/common/gd_obj.py @@ -10,7 +10,6 @@ from rgdps.constants.daily_chests import DailyChestType from rgdps.constants.friends import FriendStatus from rgdps.constants.levels import LevelDifficulty -from rgdps.constants.levels import LevelSearchFlag from rgdps.constants.users import UserPrivileges from rgdps.models.daily_chest import DailyChest from rgdps.models.friend_request import FriendRequest diff --git a/rgdps/common/hashes.py b/rgdps/common/hashes.py deleted file mode 100644 index 261fc1f..0000000 --- a/rgdps/common/hashes.py +++ /dev/null @@ -1,117 +0,0 @@ -from __future__ import annotations - -import asyncio -import base64 -import hashlib -import random -import string - -import bcrypt -import xor_cipher - -from rgdps.constants.xor import XorKeys - - -def _compare_bcrypt(hashed: str, plain: str) -> bool: - return bcrypt.checkpw(plain.encode(), hashed.encode()) - - -def hash_bcrypt(plain: str) -> str: - return bcrypt.hashpw(plain.encode(), bcrypt.gensalt()).decode() - - -async def compare_bcrypt(hashed: str, plain: str) -> bool: - return await asyncio.to_thread(_compare_bcrypt, hashed, plain) - - -async def hash_bcrypt_async(plain: str) -> str: - """Hashes a plaintext password using bcrypt, running the hashing in an - asynchronous thread. - - Args: - plain (str): The plaintext password to hash. - - Returns: - str: The bcrypt hash of the password. - """ - - return await asyncio.to_thread(hash_bcrypt, plain) - - -def hash_md5(plain: str) -> str: - return hashlib.md5(plain.encode()).hexdigest() - - -def hash_sha1(plain: str) -> str: - return hashlib.sha1(plain.encode()).hexdigest() - - -def hash_level_password(password: int) -> str: - if not password: - return "0" - - xor_password = xor_cipher.cyclic_xor_unsafe( - data=str(password).encode(), - key=XorKeys.LEVEL_PASSWORD, - ) - - return base64.urlsafe_b64encode(xor_password).decode() - - -def encrypt_chests(response: str) -> str: - return base64.urlsafe_b64encode( - xor_cipher.cyclic_xor_unsafe( - data=response.encode(), - key=XorKeys.CHESTS, - ), - ).decode() - - -def encode_base64(data: str) -> str: - return base64.urlsafe_b64encode(data.encode()).decode() - - -def decode_base64(data: str) -> str: - return base64.urlsafe_b64decode(data.encode()).decode() - - -CHARSET = string.ascii_letters + string.digits - - -def random_string(length: int) -> str: - return "".join(random.choice(CHARSET) for _ in range(length)) - - -def decrypt_chest_check(check_string: str) -> str: - valid_check = check_string[5:] - de_b64 = decode_base64(valid_check) - - return xor_cipher.cyclic_xor_unsafe( - data=de_b64.encode(), - key=XorKeys.CHESTS, - ).decode() - - -def encrypt_message_content(content: str) -> str: - return base64.urlsafe_b64encode( - xor_cipher.cyclic_xor_unsafe( - data=content.encode(), - key=XorKeys.MESSAGE, - ), - ).decode() - - -def decrypt_message_content(content: str) -> str: - de_b64 = decode_base64(content) - - return xor_cipher.cyclic_xor_unsafe( - data=de_b64.encode(), - key=XorKeys.MESSAGE, - ).decode() - - -GJP2_PEPPER = "mI29fmAnxgTs" - - -def hash_gjp2(plain: str) -> str: - return hashlib.sha1((plain + GJP2_PEPPER).encode()).hexdigest() diff --git a/rgdps/utilities/README.md b/rgdps/components/README.md similarity index 100% rename from rgdps/utilities/README.md rename to rgdps/components/README.md diff --git a/rgdps/utilities/gmdps_converter.py b/rgdps/components/gmdps_converter.py similarity index 99% rename from rgdps/utilities/gmdps_converter.py rename to rgdps/components/gmdps_converter.py index 771354a..e386262 100755 --- a/rgdps/utilities/gmdps_converter.py +++ b/rgdps/components/gmdps_converter.py @@ -21,6 +21,9 @@ from rgdps import logger from rgdps import repositories from rgdps import settings +from rgdps.adapters.boomlings import GeometryDashClient +from rgdps.adapters.mysql import MySQLService +from rgdps.adapters.storage import AbstractStorage from rgdps.common import gd_obj from rgdps.common import hashes from rgdps.common.cache.memory import SimpleAsyncMemoryCache @@ -38,9 +41,6 @@ from rgdps.constants.users import UserPrivileges from rgdps.constants.users import UserRelationshipType from rgdps.models.user import User -from rgdps.services.boomlings import GeometryDashClient -from rgdps.services.mysql import MySQLService -from rgdps.services.storage import AbstractStorage if TYPE_CHECKING: from rgdps.common.cache.base import AbstractAsyncCache diff --git a/rgdps/constants/daily_chests.py b/rgdps/constants/daily_chests.py deleted file mode 100644 index cb4e2bc..0000000 --- a/rgdps/constants/daily_chests.py +++ /dev/null @@ -1,37 +0,0 @@ -from __future__ import annotations - -from enum import IntEnum - - -class DailyChestView(IntEnum): - VIEW = 0 - CLAIM_SMALL = 1 - CLAIM_LARGE = 2 - - @property - def is_claim(self) -> bool: - return self in (DailyChestView.CLAIM_SMALL, DailyChestView.CLAIM_LARGE) - - -class DailyChestType(IntEnum): - SMALL = 0 - LARGE = 1 - - -class DailyChestShardType(IntEnum): - FIRE = 0 - ICE = 1 - POISON = 2 - SHADOW = 3 - LAVA = 4 - - -class DailyChestRewardType(IntEnum): - MANA = 0 - DIAMONDS = 1 - FIRE_SHARD = 2 - ICE_SHARD = 3 - POISON_SHARD = 4 - SHADOW_SHARD = 5 - LAVA_SHARD = 6 - DEMON_KEY = 7 diff --git a/rgdps/constants/levels.py b/rgdps/constants/levels.py deleted file mode 100644 index a829153..0000000 --- a/rgdps/constants/levels.py +++ /dev/null @@ -1,166 +0,0 @@ -from __future__ import annotations - -from enum import Enum -from enum import IntEnum -from enum import IntFlag - - -class LevelSearchFlag(IntFlag): - NONE = 0 - EPIC = 1 << 0 - AWARDED = 1 << 1 - MAGIC = 1 << 2 - LEGENDARY = 1 << 3 - MYTHICAL = 1 << 4 - - def as_feature(self) -> LevelFeature: - if self & LevelSearchFlag.MYTHICAL: - return LevelFeature.MYTHICAL - - if self & LevelSearchFlag.LEGENDARY: - return LevelFeature.LEGENDARY - - if self & LevelSearchFlag.EPIC: - return LevelFeature.EPIC - - return LevelFeature.NONE - - -class LevelFeature(IntEnum): - NONE = 0 - FEATURE = 1 - EPIC = 2 - LEGENDARY = 3 - MYTHICAL = 4 - - def as_search_flag(self) -> LevelSearchFlag: - return _LEVEL_FEATURE_MAP[self] - - -_LEVEL_FEATURE_MAP = { - LevelFeature.NONE: LevelSearchFlag.NONE, - LevelFeature.FEATURE: LevelSearchFlag.NONE, - LevelFeature.EPIC: LevelSearchFlag.EPIC, - LevelFeature.LEGENDARY: LevelSearchFlag.EPIC | LevelSearchFlag.LEGENDARY, - LevelFeature.MYTHICAL: LevelSearchFlag.EPIC | LevelSearchFlag.LEGENDARY | LevelSearchFlag.MYTHICAL, -} - - -class LevelDifficulty(IntEnum): - NA = 0 - EASY = 10 - NORMAL = 20 - HARD = 30 - HARDER = 40 - INSANE = 50 - - @staticmethod - def from_stars(stars: int) -> LevelDifficulty: - return _DIFFICULTY_STAR_MAP.get( - stars, - LevelDifficulty.NA, - ) - - -_DIFFICULTY_STAR_MAP = { - 2: LevelDifficulty.EASY, - 3: LevelDifficulty.NORMAL, - 4: LevelDifficulty.HARD, - 5: LevelDifficulty.HARD, - 6: LevelDifficulty.HARDER, - 7: LevelDifficulty.HARDER, - 8: LevelDifficulty.INSANE, - 9: LevelDifficulty.INSANE, -} - - -class LevelDifficultyName(Enum): - """A string equivalent of `LevelDifficulty` enum used for validation.""" - - NA = "na" - EASY = "easy" - NORMAL = "normal" - HARD = "hard" - HARDER = "harder" - INSANE = "insane" - - def as_difficulty(self) -> LevelDifficulty: - return _NAME_DIFFICULTY_MAP[self] - - -_NAME_DIFFICULTY_MAP = { - LevelDifficultyName.NA: LevelDifficulty.NA, - LevelDifficultyName.EASY: LevelDifficulty.EASY, - LevelDifficultyName.NORMAL: LevelDifficulty.NORMAL, - LevelDifficultyName.HARD: LevelDifficulty.HARD, - LevelDifficultyName.HARDER: LevelDifficulty.HARDER, - LevelDifficultyName.INSANE: LevelDifficulty.INSANE, -} - - -class LevelLength(IntEnum): - TINY = 0 - SHORT = 1 - MEDIUM = 2 - LONG = 3 - XL = 4 - PLATFORMER = 5 - - -class LevelDemonDifficulty(IntEnum): - HARD = 0 - EASY = 3 - MEDIUM = 4 - INSANE = 5 - EXTREME = 6 - - -class LevelDemonRating(IntEnum): - """Demon difficulty rating used by the client to send demon ratings - (but not receive).""" - - EASY = 1 - MEDIUM = 2 - HARD = 3 - INSANE = 4 - EXTREME = 5 - - def as_difficulty(self) -> LevelDemonDifficulty: - return _RATING_DIFFICULTY_MAP[self] - - -_RATING_DIFFICULTY_MAP = { - LevelDemonRating.EASY: LevelDemonDifficulty.EASY, - LevelDemonRating.MEDIUM: LevelDemonDifficulty.MEDIUM, - LevelDemonRating.HARD: LevelDemonDifficulty.HARD, - LevelDemonRating.INSANE: LevelDemonDifficulty.INSANE, - LevelDemonRating.EXTREME: LevelDemonDifficulty.EXTREME, -} - - -# Ideas: -# Listed only for friends -class LevelPublicity(IntEnum): - PUBLIC = 0 - # Levels only accessible through direct ID. - GLOBAL_UNLISTED = 1 - FRIENDS_UNLISTED = 2 - - -class LevelSearchType(IntEnum): - SEARCH_QUERY = 0 - MOST_DOWNLOADED = 1 - MOST_LIKED = 2 - TRENDING = 3 - RECENT = 4 - USER_LEVELS = 5 - FEATURED = 6 - MAGIC = 7 - MODERATOR_SENT = 8 - LEVEL_LIST = 9 - AWARDED = 11 - FOLLOWED = 12 - FRIENDS = 13 - EPIC = 16 - DAILY = 21 - WEEKLY = 22 diff --git a/rgdps/constants/likes.py b/rgdps/constants/likes.py deleted file mode 100644 index 879ab54..0000000 --- a/rgdps/constants/likes.py +++ /dev/null @@ -1,9 +0,0 @@ -from __future__ import annotations - -from enum import IntEnum - - -class LikeType(IntEnum): - LEVEL = 1 - COMMENT = 2 - USER_COMMENT = 3 diff --git a/rgdps/constants/songs.py b/rgdps/constants/songs.py deleted file mode 100644 index 2d220f9..0000000 --- a/rgdps/constants/songs.py +++ /dev/null @@ -1,9 +0,0 @@ -from __future__ import annotations - -from enum import IntEnum - - -class SongSource(IntEnum): - BOOMLINGS = 0 - NEWGROUNDS = 1 - CUSTOM = 2 diff --git a/rgdps/constants/users.py b/rgdps/constants/users.py deleted file mode 100644 index 3989bcf..0000000 --- a/rgdps/constants/users.py +++ /dev/null @@ -1,121 +0,0 @@ -from __future__ import annotations - -from enum import IntEnum -from enum import IntFlag - -from rgdps.common.mixins import IntEnumStringMixin - - -# 128-bit integer -class UserPrivileges(IntFlag): - USER_AUTHENTICATE = 1 << 0 - USER_PROFILE_PUBLIC = 1 << 1 - USER_STAR_LEADERBOARD_PUBLIC = 1 << 2 - USER_CREATOR_LEADERBOARD_PUBLIC = 1 << 3 - USER_DISPLAY_ELDER_BADGE = 1 << 4 - USER_DISPLAY_MOD_BADGE = 1 << 5 - USER_REQUEST_ELDER = 1 << 6 - USER_REQUEST_MODERATOR = 1 << 7 - USER_CREATE_USER_COMMENTS = 1 << 8 - USER_MODIFY_PRIVILEGES = 1 << 9 - USER_CHANGE_CREDENTIALS_OWN = 1 << 10 - USER_CHANGE_CREDENTIALS_OTHER = 1 << 11 - - LEVEL_UPLOAD = 1 << 12 - LEVEL_UPDATE = 1 << 13 - LEVEL_DELETE_OWN = 1 << 14 - LEVEL_DELETE_OTHER = 1 << 15 - LEVEL_RATE_STARS = 1 << 16 - LEVEL_ENQUEUE_DAILY = 1 << 17 - LEVEL_ENQUEUE_WEEKLY = 1 << 18 - LEVEL_MODIFY_VISIBILITY = 1 << 19 - LEVEL_RENAME_OTHER = 1 << 20 - LEVEL_MARK_MAGIC = 1 << 21 - LEVEL_MARK_AWARDED = 1 << 22 - - COMMENTS_POST = 1 << 23 - COMMENTS_DELETE_OWN = 1 << 24 - COMMENTS_DELETE_OTHER = 1 << 25 - COMMANDS_TRIGGER = 1 << 26 - COMMENTS_BYPASS_SPAM_FILTER = 1 << 27 - - MESSAGES_SEND = 1 << 28 - MESSAGES_DELETE_OWN = 1 << 29 - - FRIEND_REQUESTS_SEND = 1 << 30 - FRIEND_REQUESTS_ACCEPT = 1 << 31 - FRIEND_REQUESTS_DELETE_OWN = 1 << 32 - - MAP_PACK_CREATE = 1 << 33 - - GAUNTLET_CREATE = 1 << 34 - - SERVER_RESYNC_SEARCH = 1 << 35 - SERVER_STOP = 1 << 36 - - USER_VIEW_PRIVATE_PROFILE = 1 << 37 - COMMENTS_LIKE = 1 << 38 - - LEVEL_CHANGE_DESCRIPTION_OTHER = 1 << 39 - - SERVER_RESYNC_LEADERBOARDS = 1 << 40 - - LEVEL_MOVE_USER = 1 << 41 - - def as_bytes(self) -> bytes: - return self.to_bytes(16, "little", signed=False) - - @staticmethod - def from_bytes(b: bytes) -> UserPrivileges: - return UserPrivileges(int.from_bytes(b, "little", signed=False)) - - -class UserPrivacySetting(IntEnum): - PUBLIC = 0 - FRIENDS = 1 - PRIVATE = 2 - - -class UserRelationshipType(IntEnum): - FRIEND = 0 - BLOCKED = 1 - - -class UserPrivilegeLevel(IntEnumStringMixin, IntEnum): - """Enum for determining whether a user should be displayed as a - moderator, elder moderator, or neither. - """ - - NONE = 0 - MODERATOR = 1 - ELDER_MODERATOR = 2 - - -STAR_PRIVILEGES = ( - UserPrivileges.USER_STAR_LEADERBOARD_PUBLIC | UserPrivileges.USER_PROFILE_PUBLIC -) - -CREATOR_PRIVILEGES = ( - UserPrivileges.USER_CREATOR_LEADERBOARD_PUBLIC | UserPrivileges.USER_PROFILE_PUBLIC -) - -DEFAULT_PRIVILEGES = ( - UserPrivileges.USER_AUTHENTICATE - | UserPrivileges.USER_PROFILE_PUBLIC - | UserPrivileges.USER_STAR_LEADERBOARD_PUBLIC - | UserPrivileges.USER_CREATOR_LEADERBOARD_PUBLIC - | UserPrivileges.USER_CREATE_USER_COMMENTS - | UserPrivileges.USER_CHANGE_CREDENTIALS_OWN - | UserPrivileges.LEVEL_UPLOAD - | UserPrivileges.LEVEL_UPDATE - | UserPrivileges.LEVEL_DELETE_OWN - | UserPrivileges.COMMENTS_POST - | UserPrivileges.COMMENTS_DELETE_OWN - | UserPrivileges.COMMANDS_TRIGGER - | UserPrivileges.MESSAGES_SEND - | UserPrivileges.MESSAGES_DELETE_OWN - | UserPrivileges.FRIEND_REQUESTS_SEND - | UserPrivileges.FRIEND_REQUESTS_ACCEPT - | UserPrivileges.FRIEND_REQUESTS_DELETE_OWN - | UserPrivileges.COMMENTS_LIKE -) diff --git a/rgdps/constants/xor.py b/rgdps/constants/xor.py deleted file mode 100644 index 42850ac..0000000 --- a/rgdps/constants/xor.py +++ /dev/null @@ -1,8 +0,0 @@ -from __future__ import annotations - - -class XorKeys: - LEVEL_PASSWORD = b"26364" - MESSAGE = b"14251" - QUESTS = b"19847" - CHESTS = b"59182" diff --git a/rgdps/helpers/__init__.py b/rgdps/helpers/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/rgdps/common/gd_logic.py b/rgdps/helpers/chest.py similarity index 64% rename from rgdps/common/gd_logic.py rename to rgdps/helpers/chest.py index fa38049..b242cbe 100644 --- a/rgdps/common/gd_logic.py +++ b/rgdps/helpers/chest.py @@ -1,35 +1,13 @@ from __future__ import annotations +import base64 import random from typing import NamedTuple -from rgdps.constants.daily_chests import DailyChestRewardType -from rgdps.constants.levels import LevelSearchFlag -from rgdps.models.level import Level +import xor_cipher - -def calculate_creator_points(level: Level) -> int: - creator_points = 0 - - # One for a rated level - if level.stars > 0: - creator_points += 1 - - # One for a featured level - if level.feature_order > 0: - creator_points += 1 - - # One for being rated epic - if level.search_flags & LevelSearchFlag.EPIC: - creator_points += 1 - - if level.search_flags & LevelSearchFlag.LEGENDARY: - creator_points += 1 - - if level.search_flags & LevelSearchFlag.MYTHICAL: - creator_points += 1 - - return creator_points +from rgdps.resources import DailyChestRewardType +from rgdps.utilities import cryptography class ChestReward(NamedTuple): @@ -48,17 +26,6 @@ class ChestReward(NamedTuple): ] SMALL_CHEST_DIAMONDS = [1, 2, 3, 4] - -def get_small_chest() -> list[ChestReward]: - mana = random.choice(SMALL_CHEST_MANA) - diamonds = random.choice(SMALL_CHEST_DIAMONDS) - - return [ - ChestReward(DailyChestRewardType.MANA, mana), - ChestReward(DailyChestRewardType.DIAMONDS, diamonds), - ] - - LARGE_CHEST_MANA = [ 100, 150, @@ -79,7 +46,17 @@ def get_small_chest() -> list[ChestReward]: LOW_DIAMONDS_ROLL = [4, 5] -def get_large_chest() -> list[ChestReward]: +def generate_small_chest() -> list[ChestReward]: + mana = random.choice(SMALL_CHEST_MANA) + diamonds = random.choice(SMALL_CHEST_DIAMONDS) + + return [ + ChestReward(DailyChestRewardType.MANA, mana), + ChestReward(DailyChestRewardType.DIAMONDS, diamonds), + ] + + +def generate_large_chest() -> list[ChestReward]: rewards = [ChestReward(DailyChestRewardType.MANA, random.choice(LARGE_CHEST_MANA))] diamonds = random.choice(LOW_DIAMONDS_ROLL) @@ -93,3 +70,25 @@ def get_large_chest() -> list[ChestReward]: rewards.append(ChestReward(random.choice(POSSIBLE_SHARDS), 1)) return rewards + + +CHEST_XOR_KEY = b"59182" + + +def encrypt_chests(response: str) -> str: + return base64.urlsafe_b64encode( + xor_cipher.cyclic_xor( + data=response.encode(), + key=CHEST_XOR_KEY, + ), + ).decode() + + +def decrypt_chest_check(check_string: str) -> str: + valid_check = check_string[5:] + de_b64 = cryptography.decode_base64(valid_check) + + return xor_cipher.cyclic_xor( + data=de_b64.encode(), + key=CHEST_XOR_KEY, + ).decode() diff --git a/rgdps/helpers/credential.py b/rgdps/helpers/credential.py new file mode 100644 index 0000000..b84ae6a --- /dev/null +++ b/rgdps/helpers/credential.py @@ -0,0 +1,9 @@ +from __future__ import annotations + +import hashlib + +GJP2_PEPPER = "mI29fmAnxgTs" + + +def hash_gjp2(plain: str) -> str: + return hashlib.sha1((plain + GJP2_PEPPER).encode()).hexdigest() diff --git a/rgdps/helpers/level.py b/rgdps/helpers/level.py new file mode 100644 index 0000000..db7ab5c --- /dev/null +++ b/rgdps/helpers/level.py @@ -0,0 +1,50 @@ +from __future__ import annotations + +import base64 + +import xor_cipher + +from rgdps.constants.levels import LevelSearchFlag + + +def calculate_creator_points( + stars: int, + feature_order: int, + search_flags: LevelSearchFlag, +) -> int: + creator_points = 0 + + # One for a rated level + if stars > 0: + creator_points += 1 + + # One for a featured level + if feature_order > 0: + creator_points += 1 + + # One for being rated epic + if search_flags & LevelSearchFlag.EPIC: + creator_points += 1 + + if search_flags & LevelSearchFlag.LEGENDARY: + creator_points += 1 + + if search_flags & LevelSearchFlag.MYTHICAL: + creator_points += 1 + + return creator_points + + +LEVEL_PASSWORD_XOR_KEY = b"26364" + + +def hash_level_password(password: int) -> str: + if not password: + return "0" + + xor_password = xor_cipher.cyclic_xor( + data=str(password).encode(), + key=LEVEL_PASSWORD_XOR_KEY, + ) + + return base64.urlsafe_b64encode(xor_password).decode() diff --git a/rgdps/helpers/message.py b/rgdps/helpers/message.py new file mode 100644 index 0000000..6c10ae9 --- /dev/null +++ b/rgdps/helpers/message.py @@ -0,0 +1,27 @@ +from __future__ import annotations + +import base64 + +import xor_cipher + +from rgdps.utilities import cryptography + +MESSAGE_XOR_KEY = b"14251" + + +def encrypt_message_content(content: str) -> str: + return base64.urlsafe_b64encode( + xor_cipher.cyclic_xor( + data=content.encode(), + key=MESSAGE_XOR_KEY, + ), + ).decode() + + +def decrypt_message_content(content: str) -> str: + de_b64 = cryptography.decode_base64(content) + + return xor_cipher.cyclic_xor( + data=de_b64.encode(), + key=MESSAGE_XOR_KEY, + ).decode() diff --git a/rgdps/models/daily_chest.py b/rgdps/models/daily_chest.py deleted file mode 100644 index 0497701..0000000 --- a/rgdps/models/daily_chest.py +++ /dev/null @@ -1,61 +0,0 @@ -from __future__ import annotations - -from collections.abc import Mapping -from dataclasses import dataclass -from datetime import datetime -from typing import Any - -from rgdps.constants.daily_chests import DailyChestType - - -@dataclass -class DailyChest: - id: int - user_id: int - type: DailyChestType - mana: int - diamonds: int - fire_shards: int - ice_shards: int - poison_shards: int - shadow_shards: int - lava_shards: int - demon_keys: int - claimed_ts: datetime - - @staticmethod - def from_mapping(mapping: Mapping[str, Any]) -> DailyChest: - return DailyChest( - id=mapping["id"], - user_id=mapping["user_id"], - type=DailyChestType(mapping["type"]), - mana=mapping["mana"], - diamonds=mapping["diamonds"], - fire_shards=mapping["fire_shards"], - ice_shards=mapping["ice_shards"], - poison_shards=mapping["poison_shards"], - shadow_shards=mapping["shadow_shards"], - lava_shards=mapping["lava_shards"], - demon_keys=mapping["demon_keys"], - claimed_ts=mapping["claimed_ts"], - ) - - def as_dict(self, *, include_id: bool = True) -> dict[str, Any]: - mapping = { - "user_id": self.user_id, - "type": self.type.value, - "mana": self.mana, - "diamonds": self.diamonds, - "fire_shards": self.fire_shards, - "ice_shards": self.ice_shards, - "poison_shards": self.poison_shards, - "shadow_shards": self.shadow_shards, - "lava_shards": self.lava_shards, - "demon_keys": self.demon_keys, - "claimed_ts": self.claimed_ts, - } - - if include_id: - mapping["id"] = self.id or None - - return mapping diff --git a/rgdps/models/level.py b/rgdps/models/level.py deleted file mode 100644 index bbe5ed9..0000000 --- a/rgdps/models/level.py +++ /dev/null @@ -1,152 +0,0 @@ -from __future__ import annotations - -from collections.abc import Mapping -from dataclasses import dataclass -from datetime import datetime -from typing import Any - -from rgdps.constants.levels import LevelDemonDifficulty -from rgdps.constants.levels import LevelDifficulty -from rgdps.constants.levels import LevelLength -from rgdps.constants.levels import LevelPublicity -from rgdps.constants.levels import LevelSearchFlag - - -@dataclass -class Level: - id: int - name: str - user_id: int - description: str - custom_song_id: int | None - official_song_id: int | None - version: int - length: LevelLength - two_player: bool - publicity: LevelPublicity - render_str: str # Officially called extra string - game_version: int - binary_version: int - upload_ts: datetime - update_ts: datetime - original_id: int | None - - # Statistics - downloads: int - likes: int - stars: int - difficulty: LevelDifficulty - demon_difficulty: LevelDemonDifficulty | None - coins: int - coins_verified: bool - requested_stars: int - feature_order: int - search_flags: LevelSearchFlag - low_detail_mode: bool - object_count: int - building_time: int - update_locked: bool - song_ids: list[int] - sfx_ids: list[int] - deleted: bool - - # verification_replay: str - - @property - def is_demon(self) -> bool: - return self.stars == 10 - - @property - def is_auto(self) -> bool: - return self.stars == 1 - - @staticmethod - def from_mapping(level_dict: Mapping[str, Any]) -> Level: - demon_difficulty = None - if level_dict["demon_difficulty"] is not None: - demon_difficulty = LevelDemonDifficulty(level_dict["demon_difficulty"]) - - return Level( - id=level_dict["id"], - name=level_dict["name"], - user_id=level_dict["user_id"], - description=level_dict["description"], - custom_song_id=level_dict["custom_song_id"], - official_song_id=level_dict["official_song_id"], - version=level_dict["version"], - length=LevelLength(level_dict["length"]), - two_player=bool(level_dict["two_player"]), - publicity=LevelPublicity(level_dict["publicity"]), - render_str=level_dict["render_str"], - game_version=level_dict["game_version"], - binary_version=level_dict["binary_version"], - upload_ts=level_dict["upload_ts"], - update_ts=level_dict["update_ts"], - original_id=level_dict["original_id"], - downloads=level_dict["downloads"], - likes=level_dict["likes"], - stars=level_dict["stars"], - difficulty=LevelDifficulty(level_dict["difficulty"]), - demon_difficulty=demon_difficulty, - coins=level_dict["coins"], - coins_verified=bool(level_dict["coins_verified"]), - requested_stars=level_dict["requested_stars"], - feature_order=level_dict["feature_order"], - search_flags=LevelSearchFlag(level_dict["search_flags"]), - low_detail_mode=bool(level_dict["low_detail_mode"]), - object_count=level_dict["object_count"], - building_time=level_dict["building_time"], - update_locked=bool(level_dict["update_locked"]), - deleted=bool(level_dict["deleted"]), - song_ids=level_dict["song_ids"], - sfx_ids=level_dict["sfx_ids"], - ) - - def as_dict(self, *, include_id: bool) -> dict[str, Any]: - res = { - "name": self.name, - "user_id": self.user_id, - "description": self.description, - "custom_song_id": self.custom_song_id, - "official_song_id": self.official_song_id, - "version": self.version, - "length": self.length.value, - "two_player": self.two_player, - "publicity": self.publicity.value, - "render_str": self.render_str, - "game_version": self.game_version, - "binary_version": self.binary_version, - "upload_ts": self.upload_ts, - "update_ts": self.update_ts, - "original_id": self.original_id, - "downloads": self.downloads, - "likes": self.likes, - "stars": self.stars, - "difficulty": self.difficulty.value, - "demon_difficulty": ( - self.demon_difficulty.value - if self.demon_difficulty is not None - else None - ), - "coins": self.coins, - "coins_verified": self.coins_verified, - "requested_stars": self.requested_stars, - "feature_order": self.feature_order, - "search_flags": self.search_flags.value, - "low_detail_mode": self.low_detail_mode, - "object_count": self.object_count, - "building_time": self.building_time, - "update_locked": self.update_locked, - "deleted": self.deleted, - "song_ids": self.song_ids, - "sfx_ids": self.sfx_ids, - } - - if include_id: - res["id"] = self.id or None - - return res - - # Dunder methods - def __hash__(self) -> int: - return self.id diff --git a/rgdps/models/like.py b/rgdps/models/like.py deleted file mode 100644 index c139c16..0000000 --- a/rgdps/models/like.py +++ /dev/null @@ -1,43 +0,0 @@ -from __future__ import annotations - -from collections.abc import Mapping -from dataclasses import dataclass -from typing import Any - -from rgdps.constants.likes import LikeType - - -@dataclass -class Like: - id: int - target_type: LikeType - target_id: int - user_id: int - value: int - - @staticmethod - def from_mapping(like_dict: Mapping[str, Any]) -> Like: - return Like( - id=like_dict["id"], - target_type=LikeType(like_dict["target_type"]), - target_id=like_dict["target_id"], - user_id=like_dict["user_id"], - value=like_dict["value"], - ) - - def as_dict(self, *, include_id: bool) -> dict[str, Any]: - res: dict[str, Any] = { - "target_type": self.target_type.value, - "target_id": self.target_id, - "user_id": self.user_id, - "value": self.value, - } - - if include_id: - res["id"] = self.id or None - - return res - - # Dunder methods - def __hash__(self) -> int: - return self.id diff --git a/rgdps/models/message.py b/rgdps/models/message.py deleted file mode 100644 index f7889c0..0000000 --- a/rgdps/models/message.py +++ /dev/null @@ -1,52 +0,0 @@ -from __future__ import annotations - -from collections.abc import Mapping -from dataclasses import dataclass -from datetime import datetime -from enum import Enum -from typing import Any - - -class MessageDirection(str, Enum): - # NOTE: message direction is relative to the user who is - # making the request. - SENT = "sent" - RECEIVED = "received" - - -@dataclass -class Message: - id: int - sender_user_id: int - recipient_user_id: int - subject: str - content: str - post_ts: datetime - seen_ts: datetime | None - - @staticmethod - def from_mapping(message_dict: Mapping[str, Any]) -> Message: - return Message( - id=message_dict["id"], - sender_user_id=message_dict["sender_user_id"], - recipient_user_id=message_dict["recipient_user_id"], - subject=message_dict["subject"], - content=message_dict["content"], - post_ts=message_dict["post_ts"], - seen_ts=message_dict["seen_ts"], - ) - - def as_dict(self, *, include_id: bool) -> dict[str, Any]: - res = { - "sender_user_id": self.sender_user_id, - "recipient_user_id": self.recipient_user_id, - "subject": self.subject, - "content": self.content, - "post_ts": self.post_ts, - "seen_ts": self.seen_ts, - } - - if include_id: - res["id"] = self.id - - return res diff --git a/rgdps/models/rgb.py b/rgdps/models/rgb.py deleted file mode 100644 index 80fed97..0000000 --- a/rgdps/models/rgb.py +++ /dev/null @@ -1,41 +0,0 @@ -from __future__ import annotations - - -class RGB: - def __init__(self, r: int, g: int, b: int) -> None: - self.r = r - self.g = g - self.b = b - - @staticmethod - def from_str(text: str) -> RGB | None: - text = text.lower() - if text in COLOUR_PRESETS: - return COLOUR_PRESETS[text] - - text_split = text.replace(", ", ",").strip(" ").split(",", maxsplit=3) - if len(text_split) != 3: - return None - - r, g, b = text_split - if not r.isdigit() or not g.isdigit() or b.isdigit(): - return None - - return RGB(r=int(r), g=int(g), b=int(b)) - - def __str__(self) -> str: - return f"{self.r},{self.g},{self.b}" - - -COLOUR_PRESETS: dict[str, RGB] = { - "red": RGB(235, 64, 52), - "blue": RGB(66, 135, 245), - "yellow": RGB(252, 186, 3), - "green": RGB(50, 168, 82), - "blush": RGB(255, 204, 229), - "cream": RGB(255, 229, 204), - "turquoise": RGB(204, 229, 255), - "mint": RGB(204, 255, 235), - "gold": RGB(255, 236, 153), - "black": RGB(0, 0, 0), -} diff --git a/rgdps/models/song.py b/rgdps/models/song.py deleted file mode 100644 index 4a068e7..0000000 --- a/rgdps/models/song.py +++ /dev/null @@ -1,58 +0,0 @@ -from __future__ import annotations - -from collections.abc import Mapping -from dataclasses import dataclass -from typing import Any - -from rgdps.constants.songs import SongSource - - -@dataclass -class Song: - id: int - name: str - author_id: int - author: str - author_youtube: str | None - size: float - download_url: str - source: SongSource - blocked: bool - - def __str__(self) -> str: - return f"{self.author} - {self.name} ({self.id})" - - @staticmethod - def from_mapping(song_dict: Mapping[str, Any]) -> Song: - return Song( - id=song_dict["id"], - name=song_dict["name"], - author_id=song_dict["author_id"], - author=song_dict["author"], - author_youtube=song_dict["author_youtube"], - size=song_dict["size"], - download_url=song_dict["download_url"], - source=SongSource(song_dict["source"]), - blocked=bool(song_dict["blocked"]), - ) - - def as_dict(self, *, include_id: bool) -> dict[str, Any]: - res = { - "name": self.name, - "author_id": self.author_id, - "author": self.author, - "author_youtube": self.author_youtube, - "size": self.size, - "download_url": self.download_url, - "source": self.source.value, - "blocked": self.blocked, - } - - if include_id: - res["id"] = self.id or None - - return res - - # Dunder methods - def __hash__(self) -> int: - return self.id diff --git a/rgdps/models/user.py b/rgdps/models/user.py deleted file mode 100644 index 3d49ed6..0000000 --- a/rgdps/models/user.py +++ /dev/null @@ -1,143 +0,0 @@ -from __future__ import annotations - -from collections.abc import Mapping -from dataclasses import dataclass -from datetime import datetime -from typing import Any - -from rgdps.constants.users import UserPrivacySetting -from rgdps.constants.users import UserPrivileges - - -@dataclass -class User: - id: int - username: str - email: str - privileges: UserPrivileges - - message_privacy: UserPrivacySetting - friend_privacy: UserPrivacySetting - comment_privacy: UserPrivacySetting - - youtube_name: str | None - twitter_name: str | None - twitch_name: str | None - - register_ts: datetime - - # Stats - stars: int - demons: int - moons: int - primary_colour: int - secondary_colour: int - glow_colour: int - display_type: int - icon: int - ship: int - ball: int - ufo: int - wave: int - robot: int - spider: int - swing_copter: int - jetpack: int - explosion: int - glow: bool - creator_points: int - coins: int - user_coins: int - diamonds: int - comment_colour: str - - @staticmethod - def from_mapping(user_dict: Mapping[str, Any]) -> User: - return User( - id=user_dict["id"], - username=user_dict["username"], - email=user_dict["email"], - # TODO: look into avoiding using bytes in mappings - privileges=UserPrivileges.from_bytes(user_dict["privileges"]), - message_privacy=UserPrivacySetting(user_dict["message_privacy"]), - friend_privacy=UserPrivacySetting(user_dict["friend_privacy"]), - comment_privacy=UserPrivacySetting(user_dict["comment_privacy"]), - youtube_name=user_dict["youtube_name"], - twitter_name=user_dict["twitter_name"], - twitch_name=user_dict["twitch_name"], - register_ts=user_dict["register_ts"], - stars=user_dict["stars"], - demons=user_dict["demons"], - primary_colour=user_dict["primary_colour"], - secondary_colour=user_dict["secondary_colour"], - display_type=user_dict["display_type"], - icon=user_dict["icon"], - ship=user_dict["ship"], - ball=user_dict["ball"], - ufo=user_dict["ufo"], - wave=user_dict["wave"], - robot=user_dict["robot"], - spider=user_dict["spider"], - explosion=user_dict["explosion"], - glow=bool(user_dict["glow"]), - creator_points=user_dict["creator_points"], - coins=user_dict["coins"], - user_coins=user_dict["user_coins"], - diamonds=user_dict["diamonds"], - comment_colour=user_dict["comment_colour"], - # 2.2 stats - moons=user_dict["moons"], - swing_copter=user_dict["swing_copter"], - jetpack=user_dict["jetpack"], - glow_colour=user_dict["glow_colour"], - ) - - def as_dict(self, *, include_id: bool) -> dict[str, Any]: - res = { - "username": self.username, - "email": self.email, - "privileges": self.privileges.as_bytes(), - "message_privacy": self.message_privacy.value, - "friend_privacy": self.friend_privacy.value, - "comment_privacy": self.comment_privacy.value, - "twitter_name": self.twitter_name, - "youtube_name": self.youtube_name, - "twitch_name": self.twitch_name, - "register_ts": self.register_ts, - "stars": self.stars, - "demons": self.demons, - "primary_colour": self.primary_colour, - "secondary_colour": self.secondary_colour, - "display_type": self.display_type, - "icon": self.icon, - "ship": self.ship, - "ball": self.ball, - "ufo": self.ufo, - "wave": self.wave, - "robot": self.robot, - "spider": self.spider, - "explosion": self.explosion, - "glow": self.glow, - "creator_points": self.creator_points, - "coins": self.coins, - "user_coins": self.user_coins, - "diamonds": self.diamonds, - "comment_colour": self.comment_colour, - # 2.2 stats - "moons": self.moons, - "swing_copter": self.swing_copter, - "jetpack": self.jetpack, - "glow_colour": self.glow_colour, - } - - if include_id: - res["id"] = self.id or None - - return res - - # Dunder methods - def __str__(self) -> str: - return f"{self.username} ({self.id})" - - def __hash__(self) -> int: - return self.id diff --git a/rgdps/models/user_comment.py b/rgdps/models/user_comment.py deleted file mode 100644 index 77ad68e..0000000 --- a/rgdps/models/user_comment.py +++ /dev/null @@ -1,45 +0,0 @@ -from __future__ import annotations - -from collections.abc import Mapping -from dataclasses import dataclass -from datetime import datetime -from typing import Any - - -@dataclass -class UserComment: - id: int - user_id: int - content: str - likes: int - post_ts: datetime - deleted: bool - - @staticmethod - def from_mapping(comment_dict: Mapping[str, Any]) -> UserComment: - return UserComment( - id=comment_dict["id"], - user_id=comment_dict["user_id"], - content=comment_dict["content"], - likes=comment_dict["likes"], - post_ts=comment_dict["post_ts"], - deleted=bool(comment_dict["deleted"]), - ) - - def as_dict(self, *, include_id: bool) -> dict[str, Any]: - res = { - "user_id": self.user_id, - "content": self.content, - "likes": self.likes, - "post_ts": self.post_ts, - "deleted": self.deleted, - } - - if include_id: - res["id"] = self.id or None - - return res - - # Dunder methods - def __hash__(self) -> int: - return self.id diff --git a/rgdps/models/user_credential.py b/rgdps/models/user_credential.py deleted file mode 100644 index a083879..0000000 --- a/rgdps/models/user_credential.py +++ /dev/null @@ -1,36 +0,0 @@ -from __future__ import annotations - -from collections.abc import Mapping -from dataclasses import dataclass -from typing import Any - -from rgdps.constants.user_credentials import CredentialVersion - - -@dataclass -class UserCredential: - id: int - user_id: int - version: CredentialVersion - value: str - - @staticmethod - def from_mapping(credential_dict: Mapping[str, Any]) -> UserCredential: - return UserCredential( - id=credential_dict["id"], - user_id=credential_dict["user_id"], - version=CredentialVersion(credential_dict["version"]), - value=credential_dict["value"], - ) - - def as_dict(self, *, include_id: bool) -> dict[str, Any]: - res = { - "user_id": self.user_id, - "version": self.version.value, - "value": self.value, - } - - if include_id: - res["id"] = self.id or None - - return res diff --git a/rgdps/models/user_relationship.py b/rgdps/models/user_relationship.py deleted file mode 100644 index 060660f..0000000 --- a/rgdps/models/user_relationship.py +++ /dev/null @@ -1,45 +0,0 @@ -from __future__ import annotations - -from collections.abc import Mapping -from dataclasses import dataclass -from datetime import datetime -from typing import Any - -from rgdps.constants.users import UserRelationshipType - - -@dataclass -class UserRelationship: - id: int - relationship_type: UserRelationshipType - user_id: int - target_user_id: int - post_ts: datetime - seen_ts: datetime | None - - @staticmethod - def from_mapping(mapping: Mapping[str, Any]) -> UserRelationship: - return UserRelationship( - id=mapping["id"], - relationship_type=UserRelationshipType( - mapping["relationship_type"], - ), - user_id=mapping["user_id"], - target_user_id=mapping["target_user_id"], - post_ts=mapping["post_ts"], - seen_ts=mapping["seen_ts"], - ) - - def as_dict(self, *, include_id: bool) -> dict[str, Any]: - res = { - "relationship_type": self.relationship_type.value, - "user_id": self.user_id, - "target_user_id": self.target_user_id, - "post_ts": self.post_ts, - "seen_ts": self.seen_ts, - } - - if include_id: - res["id"] = self.id - - return res diff --git a/rgdps/repositories/__init__.py b/rgdps/repositories/__init__.py deleted file mode 100644 index f129138..0000000 --- a/rgdps/repositories/__init__.py +++ /dev/null @@ -1,17 +0,0 @@ -from __future__ import annotations - -from . import daily_chest -from . import friend_requests -from . import leaderboard -from . import level -from . import level_comment -from . import level_data -from . import level_schedule -from . import like -from . import message -from . import save_data -from . import song -from . import user -from . import user_comment -from . import user_credential -from . import user_relationship diff --git a/rgdps/repositories/daily_chest.py b/rgdps/repositories/daily_chest.py deleted file mode 100644 index 0692b0c..0000000 --- a/rgdps/repositories/daily_chest.py +++ /dev/null @@ -1,122 +0,0 @@ -# NOTE: These serve more as logs than anything else. Aside from the latest -# claimed timestamp, logging these has no impact on the game. Therefore, -# editing these is not necessary. -from __future__ import annotations - -from datetime import datetime - -from rgdps.common import modelling -from rgdps.common.context import Context -from rgdps.constants.daily_chests import DailyChestType -from rgdps.models.daily_chest import DailyChest - -ALL_FIELDS = modelling.get_model_fields(DailyChest) -CUSTOMISABLE_FIELDS = modelling.remove_id_field(ALL_FIELDS) - - -_ALL_FIELDS_COMMA = modelling.comma_separated(ALL_FIELDS) -_CUSTOMISABLE_FIELDS_COMMA = modelling.comma_separated(CUSTOMISABLE_FIELDS) -_ALL_FIELDS_COLON = modelling.colon_prefixed_comma_separated(ALL_FIELDS) -_CUSTOMISABLE_FIELDS_COLON = modelling.colon_prefixed_comma_separated( - CUSTOMISABLE_FIELDS, -) - - -async def from_id( - ctx: Context, - chest_id: int, -) -> DailyChest | None: - chest_db = await ctx.mysql.fetch_one( - f"SELECT {_ALL_FIELDS_COMMA} FROM daily_chests WHERE id = :chest_id", - {"chest_id": chest_id}, - ) - - if chest_db is None: - return None - - return DailyChest.from_mapping(chest_db) - - -async def from_user_id_and_type_latest( - ctx: Context, - user_id: int, - chest_type: DailyChestType, -) -> DailyChest | None: - chest_db = await ctx.mysql.fetch_one( - f"SELECT {_ALL_FIELDS_COMMA} FROM daily_chests WHERE user_id = :user_id AND type = :chest_type " - "ORDER BY claimed_ts DESC LIMIT 1", - {"user_id": user_id, "chest_type": chest_type.value}, - ) - - if chest_db is None: - return None - - return DailyChest.from_mapping(chest_db) - - -async def create( - ctx: Context, - user_id: int, - chest_type: DailyChestType, - mana: int = 0, - diamonds: int = 0, - fire_shards: int = 0, - ice_shards: int = 0, - poison_shards: int = 0, - shadow_shards: int = 0, - lava_shards: int = 0, - demon_keys: int = 0, - claimed_ts: datetime | None = None, -) -> DailyChest: - if claimed_ts is None: - claimed_ts = datetime.now() - - chest = DailyChest( - id=0, - user_id=user_id, - type=chest_type, - mana=mana, - diamonds=diamonds, - fire_shards=fire_shards, - ice_shards=ice_shards, - poison_shards=poison_shards, - shadow_shards=shadow_shards, - lava_shards=lava_shards, - demon_keys=demon_keys, - claimed_ts=claimed_ts, - ) - - chest.id = await ctx.mysql.execute( - f"INSERT INTO daily_chests ({_CUSTOMISABLE_FIELDS_COMMA}) " - f"VALUES ({_CUSTOMISABLE_FIELDS_COLON})", - chest.as_dict(include_id=False), - ) - - return chest - - -async def sum_reward_mana( - ctx: Context, - user_id: int, -) -> int: - return int( - await ctx.mysql.fetch_val( - "SELECT SUM(mana) FROM daily_chests WHERE user_id = :user_id", - {"user_id": user_id}, - ) - or 0, - ) - - -async def count_of_type( - ctx: Context, - user_id: int, - chest_type: DailyChestType, -) -> int: - return ( - await ctx.mysql.fetch_val( - "SELECT COUNT(*) FROM daily_chests WHERE user_id = :user_id AND type = :chest_type", - {"user_id": user_id, "chest_type": chest_type.value}, - ) - or 0 - ) diff --git a/rgdps/repositories/leaderboard.py b/rgdps/repositories/leaderboard.py deleted file mode 100644 index 4d60d7a..0000000 --- a/rgdps/repositories/leaderboard.py +++ /dev/null @@ -1,95 +0,0 @@ -from __future__ import annotations - -from rgdps.common.context import Context - - -async def get_star_rank(ctx: Context, user_id: int) -> int: - redis_rank = await ctx.redis.zrevrank( - "rgdps:leaderboards:stars", - user_id, - ) - - if redis_rank is None: - return 0 - - return redis_rank + 1 - - -async def set_star_count(ctx: Context, user_id: int, stars: int) -> None: - if stars <= 0: - await ctx.redis.zrem( - "rgdps:leaderboards:stars", - user_id, - ) - return - await ctx.redis.zadd( - "rgdps:leaderboards:stars", - {str(user_id): stars}, # is str necessary? - ) - - -async def get_top_stars_paginated( - ctx: Context, - page: int, - page_size: int, -) -> list[int]: - top_stars = await ctx.redis.zrevrange( - "rgdps:leaderboards:stars", - page * page_size, - (page + 1) * page_size, - ) - return [int(top_star) for top_star in top_stars] - - -async def remove_star_count(ctx: Context, user_id: int) -> None: - await ctx.redis.zrem( - "rgdps:leaderboards:stars", - user_id, - ) - - -async def get_creator_rank(ctx: Context, user_id: int) -> int: - redis_rank = await ctx.redis.zrevrank( - "rgdps:leaderboards:creators", - user_id, - ) - - if redis_rank is None: - return 0 - - return redis_rank + 1 - - -async def set_creator_count(ctx: Context, user_id: int, points: int) -> None: - if points <= 0: - await ctx.redis.zrem( - "rgdps:leaderboards:creators", - user_id, - ) - return - - await ctx.redis.zadd( - "rgdps:leaderboards:creators", - {str(user_id): points}, - ) - - -async def get_top_creators_paginated( - ctx: Context, - page: int, - page_size: int, -) -> list[int]: - top_creators = await ctx.redis.zrevrange( - "rgdps:leaderboards:creators", - page * page_size, - (page + 1) * page_size, - ) - - return [int(top_creator) for top_creator in top_creators] - - -async def remove_creator_count(ctx: Context, user_id: int) -> None: - await ctx.redis.zrem( - "rgdps:leaderboards:creators", - user_id, - ) diff --git a/rgdps/repositories/level.py b/rgdps/repositories/level.py deleted file mode 100644 index 51b1840..0000000 --- a/rgdps/repositories/level.py +++ /dev/null @@ -1,552 +0,0 @@ -from __future__ import annotations - -from collections.abc import AsyncGenerator -from datetime import datetime -from typing import Any -from typing import NamedTuple -from typing import NotRequired -from typing import TypedDict -from typing import Unpack - -import orjson - -from rgdps.common import data_utils -from rgdps.common import modelling -from rgdps.common import time as time_utils -from rgdps.common.context import Context -from rgdps.constants.levels import LevelDemonDifficulty -from rgdps.constants.levels import LevelDifficulty -from rgdps.constants.levels import LevelLength -from rgdps.constants.levels import LevelPublicity -from rgdps.constants.levels import LevelSearchFlag -from rgdps.constants.levels import LevelSearchType -from rgdps.models.level import Level - -ALL_FIELDS = modelling.get_model_fields(Level) -CUSTOMISABLE_FIELDS = modelling.remove_id_field(ALL_FIELDS) - - -_ALL_FIELDS_COMMA = modelling.comma_separated(ALL_FIELDS) -_CUSTOMISABLE_FIELDS_COMMA = modelling.comma_separated(CUSTOMISABLE_FIELDS) -_ALL_FIELDS_COLON = modelling.colon_prefixed_comma_separated(ALL_FIELDS) - - -async def from_id( - ctx: Context, - level_id: int, - include_deleted: bool = False, -) -> Level | None: - condition = "" - if not include_deleted: - condition = " AND NOT deleted" - - level_db = await ctx.mysql.fetch_one( - f"SELECT {_ALL_FIELDS_COMMA} FROM levels WHERE id = :id" + condition, - { - "id": level_id, - }, - ) - - if level_db is None: - return None - - return Level.from_mapping(_from_mysql_dict(dict(level_db))) # type: ignore - - -async def create( - ctx: Context, - name: str, - user_id: int, - description: str = "", - custom_song_id: int | None = None, - official_song_id: int | None = 1, - version: int = 1, - length: LevelLength = LevelLength.TINY, - two_player: bool = False, - publicity: LevelPublicity = LevelPublicity.PUBLIC, - render_str: str = "", - game_version: int = 22, - binary_version: int = 34, - upload_ts: datetime | None = None, - update_ts: datetime | None = None, - original_id: int | None = None, - downloads: int = 0, - likes: int = 0, - stars: int = 0, - difficulty: LevelDifficulty = LevelDifficulty.NA, - demon_difficulty: LevelDemonDifficulty | None = None, - coins: int = 0, - coins_verified: bool = False, - requested_stars: int = 0, - feature_order: int = 0, - search_flags: LevelSearchFlag = LevelSearchFlag.NONE, - low_detail_mode: bool = False, - object_count: int = 0, - building_time: int = 0, - update_locked: bool = False, - song_ids: list[int] | None = None, - sfx_ids: list[int] | None = None, - deleted: bool = False, - level_id: int = 0, -) -> Level: - if upload_ts is None: - upload_ts = datetime.now() - if update_ts is None: - update_ts = datetime.now() - - if sfx_ids is None: - sfx_ids = [] - if song_ids is None: - song_ids = [] - - level = Level( - id=level_id, - name=name, - user_id=user_id, - description=description, - custom_song_id=custom_song_id, - official_song_id=official_song_id, - version=version, - length=length, - two_player=two_player, - publicity=publicity, - render_str=render_str, - game_version=game_version, - binary_version=binary_version, - upload_ts=upload_ts, - update_ts=update_ts, - original_id=original_id, - downloads=downloads, - likes=likes, - stars=stars, - difficulty=difficulty, - demon_difficulty=demon_difficulty, - coins=coins, - coins_verified=coins_verified, - requested_stars=requested_stars, - feature_order=feature_order, - search_flags=search_flags, - low_detail_mode=low_detail_mode, - object_count=object_count, - building_time=building_time, - update_locked=update_locked, - deleted=deleted, - song_ids=song_ids, - sfx_ids=sfx_ids, - ) - - level.id = await create_sql(ctx, level) - await create_meili(ctx, level) - return level - - -async def create_sql(ctx: Context, level: Level) -> int: - return await ctx.mysql.execute( - f"INSERT INTO levels ({_ALL_FIELDS_COMMA}) VALUES ({_ALL_FIELDS_COLON})", - _make_mysql_dict(level.as_dict(include_id=True)), - ) - - -def _make_meili_dict(level_dict: dict[str, Any]) -> dict[str, Any]: - level_dict = level_dict.copy() - if "upload_ts" in level_dict: - level_dict["upload_ts"] = time_utils.into_unix_ts(level_dict["upload_ts"]) - - if "update_ts" in level_dict: - level_dict["update_ts"] = time_utils.into_unix_ts(level_dict["update_ts"]) - - # Split up bitwise enums as meili does not support bitwise operations. - if "search_flags" in level_dict: - level_dict["epic"] = bool(level_dict["search_flags"] & LevelSearchFlag.EPIC) - level_dict["magic"] = bool(level_dict["search_flags"] & LevelSearchFlag.MAGIC) - level_dict["awarded"] = bool( - level_dict["search_flags"] & LevelSearchFlag.AWARDED, - ) - level_dict["legendary"] = bool(level_dict["search_flags"] & LevelSearchFlag.LEGENDARY) - level_dict["mythical"] = bool(level_dict["search_flags"] & LevelSearchFlag.MYTHICAL) - - return level_dict - - -def _from_meili_dict(level_dict: dict[str, Any]) -> dict[str, Any]: - level_dict = level_dict.copy() - # Meili returns unix timestamps, so we need to convert them back to datetime. - level_dict["upload_ts"] = time_utils.from_unix_ts(level_dict["upload_ts"]) - level_dict["update_ts"] = time_utils.from_unix_ts(level_dict["update_ts"]) - - search_flags = LevelSearchFlag.NONE - - if level_dict["epic"]: - search_flags |= LevelSearchFlag.EPIC - - if level_dict["magic"]: - search_flags |= LevelSearchFlag.MAGIC - - if level_dict["awarded"]: - search_flags |= LevelSearchFlag.AWARDED - - if level_dict["legendary"]: - search_flags |= LevelSearchFlag.LEGENDARY - - if level_dict["mythical"]: - search_flags |= LevelSearchFlag.MYTHICAL - - level_dict["search_flags"] = search_flags - - del level_dict["epic"] - del level_dict["magic"] - del level_dict["awarded"] - del level_dict["legendary"] - del level_dict["mythical"] - - # FIXME: Temporary migration measure. - if "song_ids" not in level_dict: - level_dict["song_ids"] = [level_dict["custom_song_id"]] - level_dict["sfx_ids"] = [] - - return level_dict - - -# These are required due to Databases not working well with `JSON` field types. -def _make_mysql_dict(level_dict: dict[str, Any]) -> dict[str, Any]: - level_dict = level_dict.copy() - - level_dict["song_ids"] = orjson.dumps(level_dict["song_ids"]).decode() - level_dict["sfx_ids"] = orjson.dumps(level_dict["sfx_ids"]).decode() - - return level_dict - - -def _from_mysql_dict(level_dict: dict[str, Any]) -> dict[str, Any]: - level_dict = level_dict.copy() - - level_dict["song_ids"] = orjson.loads(level_dict["song_ids"]) - level_dict["sfx_ids"] = orjson.loads(level_dict["sfx_ids"]) - - return level_dict - - -async def create_meili(ctx: Context, level: Level) -> None: - level_dict = _make_meili_dict(level.as_dict(include_id=True)) - - index = ctx.meili.index("levels") - await index.add_documents([level_dict]) - - -async def multiple_create_meili(ctx: Context, levels: list[Level]) -> None: - level_dicts = [_make_meili_dict(level.as_dict(include_id=True)) for level in levels] - - index = ctx.meili.index("levels") - await index.add_documents(level_dicts) - - -class _LevelUpdatePartial(TypedDict): - name: NotRequired[str] - user_id: NotRequired[int] - description: NotRequired[str] - custom_song_id: NotRequired[int | None] - official_song_id: NotRequired[int | None] - version: NotRequired[int] - length: NotRequired[LevelLength] - two_player: NotRequired[bool] - publicity: NotRequired[LevelPublicity] - render_str: NotRequired[str] - game_version: NotRequired[int] - binary_version: NotRequired[int] - upload_ts: NotRequired[datetime] - update_ts: NotRequired[datetime] - original_id: NotRequired[int | None] - downloads: NotRequired[int] - likes: NotRequired[int] - stars: NotRequired[int] - difficulty: NotRequired[LevelDifficulty] - demon_difficulty: NotRequired[LevelDemonDifficulty | None] - coins: NotRequired[int] - coins_verified: NotRequired[bool] - requested_stars: NotRequired[int] - feature_order: NotRequired[int] - search_flags: NotRequired[LevelSearchFlag] - low_detail_mode: NotRequired[bool] - object_count: NotRequired[int] - building_time: NotRequired[int] - update_locked: NotRequired[bool] - song_ids: NotRequired[list[int]] - sfx_ids: NotRequired[list[int]] - deleted: NotRequired[bool] - - -async def update_sql_partial( - ctx: Context, - level_id: int, - **kwargs: Unpack[_LevelUpdatePartial], -) -> Level | None: - changed_fields = modelling.unpack_enum_types(kwargs) - - await ctx.mysql.execute( - modelling.update_from_partial_dict("levels", level_id, changed_fields), - changed_fields, - ) - - return await from_id(ctx, level_id, include_deleted=True) - - -async def update_meili_partial( - ctx: Context, - level_id: int, - **kwargs: Unpack[_LevelUpdatePartial], -) -> None: - changed_fields = modelling.unpack_enum_types(kwargs) - # Meili primary key - changed_fields["id"] = level_id - changed_fields = _make_meili_dict(changed_fields) - - index = ctx.meili.index("levels") - await index.update_documents([changed_fields]) - - -async def update_partial( - ctx: Context, - level_id: int, - **kwargs: Unpack[_LevelUpdatePartial], -) -> Level | None: - level = await update_sql_partial( - ctx, - level_id=level_id, - **kwargs, - ) - - if level is None: - return None - - await update_meili_partial( - ctx, - level_id=level_id, - **kwargs, - ) - - return level - - -async def delete_meili(ctx: Context, level_id: int) -> None: - index = ctx.meili.index("levels") - await index.delete_documents([str(level_id)]) - - -class LevelSearchResults(NamedTuple): - results: list[Level] - total: int - - -async def search( - ctx: Context, - page: int, - page_size: int, - query: str | None = None, - search_type: LevelSearchType | None = None, - level_lengths: list[LevelLength] | None = None, - completed_levels: list[int] | None = None, - featured: bool = False, - original: bool = False, - two_player: bool = False, - unrated: bool = False, - rated: bool = False, - song_id: int | None = None, - custom_song_id: int | None = None, - followed_list: list[int] | None = None, -) -> LevelSearchResults: - # Create the filters. - filters = [] - sort = [] - - match search_type: - case LevelSearchType.MOST_DOWNLOADED: - sort.append("downloads:desc") - - case LevelSearchType.MOST_LIKED: - sort.append("likes:desc") - - # TODO: Trending - case LevelSearchType.RECENT: - sort.append("upload_ts:desc") - - case LevelSearchType.USER_LEVELS: - filters.append(f"user_id = {query}") - sort.append("upload_ts:desc") - - case LevelSearchType.FEATURED: - filters.append("feature_order > 0") - sort.append("feature_order:desc") - - case LevelSearchType.MAGIC: - filters.append("magic = true") - - case LevelSearchType.AWARDED: - filters.append("awarded = true") - - case LevelSearchType.FOLLOWED if followed_list is not None: - filters.append(f"user_id IN {followed_list}") - - case LevelSearchType.FRIENDS: - raise NotImplementedError("Friends not implemented yet.") - - case LevelSearchType.EPIC: - filters.append("epic = true") - sort.append("feature_order:desc") - - case LevelSearchType.DAILY: - raise NotImplementedError("Daily not implemented yet.") - - case LevelSearchType.WEEKLY: - raise NotImplementedError("Weekly not implemented yet.") - - # Optional filters. - if level_lengths is not None: - # FIXME: Type ignore - length_ints = data_utils.enum_int_list(level_lengths) # type: ignore - filters.append(f"length IN {length_ints}") - - if featured: - filters.append("feature_order > 0") - - if original: - filters.append("original_id IS NULL") - - if two_player: - filters.append("two_player = true") - - if unrated: - filters.append("stars = 0") - - if rated: - filters.append("stars > 0") - - if song_id is not None: - filters.append(f"official_song_id = {song_id}") - - if custom_song_id is not None: - filters.append(f"custom_song_id = {custom_song_id}") - - if completed_levels is not None: - filters.append(f"id NOT IN {completed_levels}") - - # TODO: More unlisted logic, such as friends - filters.append(f"publicity = {LevelPublicity.PUBLIC.value}") - - offset = page * page_size - index = ctx.meili.index("levels") - results_db = await index.search( - query, - offset=offset, - limit=page_size, - filter=filters, - sort=sort, - ) - - if (not results_db.hits) or (not results_db.estimated_total_hits): - return LevelSearchResults([], 0) - - results = [ - Level.from_mapping(_from_meili_dict(result)) for result in results_db.hits - ] - return LevelSearchResults(results, results_db.estimated_total_hits) - - -async def all( - ctx: Context, - include_deleted: bool = False, -) -> AsyncGenerator[Level, None]: - async for level_db in ctx.mysql.iterate( - f"SELECT {_ALL_FIELDS_COMMA} FROM levels WHERE deleted IN :deleted", - { - "deleted": (0, 1) if include_deleted else (0,), - }, - ): - yield Level.from_mapping(_from_mysql_dict(dict(level_db))) - - -async def get_count(ctx: Context) -> int: - return await ctx.mysql.fetch_val("SELECT COUNT(*) FROM levels") - - -async def nuke_meili(ctx: Context) -> None: - await ctx.meili.index("levels").delete_all_documents() - - -async def from_name_and_user_id( - ctx: Context, - level_name: str, - user_id: int, - include_deleted: bool = False, -) -> Level | None: - result_id = await ctx.mysql.fetch_val( - "SELECT id FROM levels WHERE name LIKE :name AND user_id = :user_id AND deleted = :deleted", - { - "name": level_name, - "user_id": user_id, - "deleted": (0, 1) if include_deleted else (0,), - }, - ) - - if result_id is None: - return None - - return await from_id(ctx, result_id, include_deleted) - - -async def from_name( - ctx: Context, - level_name: str, - include_deleted: bool = False, -) -> Level | None: - condition = "" - if not include_deleted: - condition = " AND NOT deleted" - - result_id = await ctx.mysql.fetch_val( - "SELECT id FROM levels WHERE name LIKE :name" + condition, - { - "name": level_name, - }, - ) - - if result_id is None: - return None - - return await from_id(ctx, result_id, include_deleted) - - -# A function primarily used for some recommendation algorithms that returns a list of levels -# ordered by how well received they are, assessed using a formula.. -async def get_well_received( - ctx: Context, - minimum_stars: int, - minimum_length: LevelLength, - maximum_stars: int = 0, - maximum_demon_rating: LevelDemonDifficulty = LevelDemonDifficulty.EXTREME, - excluded_level_ids: list[ - int - ] = [], # The list isnt mutable, so we can set it to an empty list. - limit: int = 100, -) -> list[int]: - # BOTCH! Avoiding a sql syntax error. - if not excluded_level_ids: - excluded_level_ids = [0] - - # The formula in the order clause is made to emphasis lower downloads, but still have a - # significant impact on likes. - values = await ctx.mysql.fetch_all( - "SELECT id FROM levels WHERE stars >= :minimum_stars AND stars <= :maximum_stars " - "AND demon_difficulty <= :maximum_demon_rating AND length >= :minimum_length " - "AND id NOT IN :excluded_level_ids AND deleted = 0 ORDER BY (SQRT(downloads) / likes) DESC " - "LIMIT :limit", - { - "minimum_stars": minimum_stars, - "maximum_stars": maximum_stars, - "maximum_demon_rating": maximum_demon_rating.value, - "minimum_length": minimum_length.value, - "excluded_level_ids": tuple(excluded_level_ids), - "limit": limit, - }, - ) - - return [x["id"] for x in values] diff --git a/rgdps/repositories/level_data.py b/rgdps/repositories/level_data.py deleted file mode 100644 index 84f4cb9..0000000 --- a/rgdps/repositories/level_data.py +++ /dev/null @@ -1,23 +0,0 @@ -from __future__ import annotations - -from rgdps.common.context import Context - - -async def from_level_id( - ctx: Context, - level_id: int, -) -> str | None: - res = await ctx.storage.load(f"levels/{level_id}") - - if res is not None: - return res.decode() - - return None - - -async def create( - ctx: Context, - level_id: int, - data: str, -) -> None: - return await ctx.storage.save(f"levels/{level_id}", data.encode()) diff --git a/rgdps/repositories/like.py b/rgdps/repositories/like.py deleted file mode 100644 index 1b98a41..0000000 --- a/rgdps/repositories/like.py +++ /dev/null @@ -1,108 +0,0 @@ -from __future__ import annotations - -from rgdps.common import modelling -from rgdps.common.context import Context -from rgdps.constants.likes import LikeType -from rgdps.models.like import Like - -ALL_FIELDS = modelling.get_model_fields(Like) -CUSTOMISABLE_FIELDS = modelling.remove_id_field(ALL_FIELDS) - - -_ALL_FIELDS_COMMA = modelling.comma_separated(ALL_FIELDS) -_CUSTOMISABLE_FIELDS_COMMA = modelling.comma_separated(CUSTOMISABLE_FIELDS) -_ALL_FIELDS_COLON = modelling.colon_prefixed_comma_separated(ALL_FIELDS) -_CUSTOMISABLE_FIELDS_COLON = modelling.colon_prefixed_comma_separated( - CUSTOMISABLE_FIELDS, -) - - -async def from_id(ctx: Context, id: int) -> Like | None: - like_db = await ctx.mysql.fetch_one( - f"SELECT {_CUSTOMISABLE_FIELDS_COMMA} FROM user_likes WHERE id = :id", - { - "id": id, - }, - ) - - if like_db is None: - return None - - return Like.from_mapping(like_db) - - -async def create( - ctx: Context, - target_type: LikeType, - target_id: int, - user_id: int, - value: int, - like_id: int = 0, -) -> Like: - like = Like( - id=like_id, - target_type=target_type, - target_id=target_id, - user_id=user_id, - value=value, - ) - like.id = await ctx.mysql.execute( - f"INSERT INTO user_likes ({_ALL_FIELDS_COMMA}) VALUES " - f"({_ALL_FIELDS_COLON})", - like.as_dict(include_id=True), - ) - - return like - - -async def exists_by_target_and_user( - ctx: Context, - target_type: LikeType, - target_id: int, - user_id: int, -) -> bool: - return ( - await ctx.mysql.fetch_one( - "SELECT id FROM user_likes WHERE target_type = :target_type AND target_id = :target_id AND user_id = :user_id", - { - "target_type": target_type.value, - "target_id": target_id, - "user_id": user_id, - }, - ) - is not None - ) - - -async def sum_by_target( - ctx: Context, - target_type: LikeType, - target_id: int, -) -> int: - like_db = await ctx.mysql.fetch_val( - "SELECT SUM(value) AS sum FROM user_likes WHERE target_type = :target_type " - "AND target_id = :target_id", - { - "target_type": target_type.value, - "target_id": target_id, - }, - ) - - if like_db is None: - return 0 - - return int(like_db) - - -async def update_value( - ctx: Context, - like_id: int, - value: int, -) -> None: - await ctx.mysql.execute( - "UPDATE likes SET value = :value WHERE id = :id", - { - "id": like_id, - "value": value, - }, - ) diff --git a/rgdps/repositories/message.py b/rgdps/repositories/message.py deleted file mode 100644 index 89d55b4..0000000 --- a/rgdps/repositories/message.py +++ /dev/null @@ -1,189 +0,0 @@ -from __future__ import annotations - -from datetime import datetime -from typing import NotRequired -from typing import TypedDict -from typing import Unpack - -from rgdps.common import modelling -from rgdps.common.context import Context -from rgdps.models.message import Message - -ALL_FIELDS = modelling.get_model_fields(Message) -CUSTOMISABLE_FIELDS = modelling.remove_id_field(ALL_FIELDS) - - -_ALL_FIELDS_COMMA = modelling.comma_separated(ALL_FIELDS) -_CUSTOMISABLE_FIELDS_COMMA = modelling.comma_separated(CUSTOMISABLE_FIELDS) -_ALL_FIELDS_COLON = modelling.colon_prefixed_comma_separated(ALL_FIELDS) -_CUSTOMISABLE_FIELDS_COLON = modelling.colon_prefixed_comma_separated( - CUSTOMISABLE_FIELDS, -) - - -async def from_id( - ctx: Context, - message_id: int, - include_deleted: bool = False, -) -> Message | None: - condition = "" - if not include_deleted: - condition = "AND deleted = 0" - - message_db = await ctx.mysql.fetch_one( - f"SELECT {_ALL_FIELDS_COMMA} FROM messages WHERE id = :message_id {condition}", - {"message_id": message_id}, - ) - - if not message_db: - return None - - return Message.from_mapping(message_db) - - -async def from_recipient_user_id( - ctx: Context, - recipient_user_id: int, - page: int, - page_size: int, - include_deleted: bool = False, -) -> list[Message]: - condition = "" - if not include_deleted: - condition = "AND deleted = 0 AND recipient_deleted = 0" - - messages_db = await ctx.mysql.fetch_all( - f"SELECT {_ALL_FIELDS_COMMA} FROM messages WHERE recipient_user_id = :recipient_user_id {condition} " - "ORDER BY post_ts DESC LIMIT :limit OFFSET :offset", - { - "recipient_user_id": recipient_user_id, - "limit": page_size, - "offset": page * page_size, - }, - ) - - return [Message.from_mapping(message_db) for message_db in messages_db] - - -async def from_sender_user_id( - ctx: Context, - sender_user_id: int, - page: int, - page_size: int, - include_deleted: bool = False, -) -> list[Message]: - condition = "" - if not include_deleted: - condition = "AND deleted = 0 AND sender_deleted = 0" - - messages_db = await ctx.mysql.fetch_all( - f"SELECT {_ALL_FIELDS_COMMA} FROM messages WHERE sender_user_id = :sender_user_id {condition} " - "ORDER BY post_ts DESC LIMIT :limit OFFSET :offset", - { - "sender_user_id": sender_user_id, - "limit": page_size, - "offset": page * page_size, - }, - ) - - return [Message.from_mapping(message_db) for message_db in messages_db] - - -async def from_recipient_user_id_count( - ctx: Context, - recipient_user_id: int, - is_new: bool = False, - include_deleted: bool = False, -) -> int: - condition = "" - if not include_deleted: - condition = "AND deleted = 0 AND recipient_deleted = 0" - - if is_new: - condition += " AND seen_ts IS NULL" - - messages_count = await ctx.mysql.fetch_val( - f"SELECT COUNT(*) FROM messages WHERE recipient_user_id = :recipient_user_id {condition}", - { - "recipient_user_id": recipient_user_id, - }, - ) - - return messages_count - - -async def from_sender_user_id_count( - ctx: Context, - sender_user_id: int, - is_new: bool = False, - include_deleted: bool = False, -) -> int: - condition = "" - if not include_deleted: - condition = "AND deleted = 0 AND sender_deleted = 0" - - if is_new: - condition += " AND seen_ts IS NULL" - - messages_count = await ctx.mysql.fetch_val( - f"SELECT COUNT(*) FROM messages WHERE sender_user_id = :sender_user_id {condition}", - { - "sender_user_id": sender_user_id, - }, - ) - - return messages_count - - -async def create( - ctx: Context, - sender_user_id: int, - recipient_user_id: int, - subject: str, - content: str, - post_ts: datetime = datetime.now(), - seen_ts: None | datetime = None, -) -> Message: - message = Message( - id=0, - sender_user_id=sender_user_id, - recipient_user_id=recipient_user_id, - subject=subject, - content=content, - post_ts=post_ts, - seen_ts=seen_ts, - ) - - message.id = await ctx.mysql.execute( - f"INSERT INTO messages ({_CUSTOMISABLE_FIELDS_COMMA}) " - f"VALUES ({_CUSTOMISABLE_FIELDS_COLON})", - message.as_dict(include_id=False), - ) - - return message - - -class _MessageUpdatePartial(TypedDict): - seen_ts: NotRequired[datetime] - sender_deleted: NotRequired[bool] - recipient_deleted: NotRequired[bool] - deleted: NotRequired[bool] - - -async def update_partial( - ctx: Context, - message_id: int, - **kwargs: Unpack[_MessageUpdatePartial], -) -> Message | None: - changed_fields = modelling.unpack_enum_types(kwargs) - - await ctx.mysql.execute( - modelling.update_from_partial_dict("messages", message_id, changed_fields), - changed_fields, - ) - - return await from_id(ctx, message_id, include_deleted=True) - - -async def get_count(ctx: Context) -> int: - return await ctx.mysql.fetch_val("SELECT COUNT(*) FROM messages") diff --git a/rgdps/repositories/save_data.py b/rgdps/repositories/save_data.py deleted file mode 100644 index 8c92c29..0000000 --- a/rgdps/repositories/save_data.py +++ /dev/null @@ -1,23 +0,0 @@ -from __future__ import annotations - -from rgdps.common.context import Context - - -async def from_user_id( - ctx: Context, - user_id: int, -) -> str | None: - res = await ctx.storage.load(f"saves/{user_id}") - - if res is not None: - return res.decode() - - return None - - -async def create( - ctx: Context, - user_id: int, - data: str, -) -> None: - await ctx.storage.save(f"saves/{user_id}", data.encode()) diff --git a/rgdps/repositories/song.py b/rgdps/repositories/song.py deleted file mode 100644 index 2fbc558..0000000 --- a/rgdps/repositories/song.py +++ /dev/null @@ -1,194 +0,0 @@ -from __future__ import annotations - -import urllib.parse -from datetime import timedelta - -from rgdps import logger -from rgdps.common import modelling -from rgdps.common.context import Context -from rgdps.constants.songs import SongSource -from rgdps.models.song import Song -from rgdps.services.boomlings import GDRequestStatus - -ALL_FIELDS = modelling.get_model_fields(Song) -CUSTOMISABLE_FIELDS = modelling.remove_id_field(ALL_FIELDS) - - -_ALL_FIELDS_COMMA = modelling.comma_separated(ALL_FIELDS) -_CUSTOMISABLE_FIELDS_COMMA = modelling.comma_separated(CUSTOMISABLE_FIELDS) -_ALL_FIELDS_COLON = modelling.colon_prefixed_comma_separated(ALL_FIELDS) -_CUSTOMISABLE_FIELDS_COLON = modelling.colon_prefixed_comma_separated( - CUSTOMISABLE_FIELDS, -) - - -async def from_db( - ctx: Context, - song_id: int, - allow_blocked: bool = False, -) -> Song | None: - song_db = await ctx.mysql.fetch_one( - f"SELECT {_ALL_FIELDS_COMMA} FROM songs WHERE id = :song_id " - "AND blocked IN :blocked", - { - "song_id": song_id, - "blocked": (0, 1) if allow_blocked else (0,), - }, - ) - - if song_db is None: - return None - - return Song.from_mapping(song_db) - - -async def multiple_from_db( - ctx: Context, - song_ids: list[int], - allow_blocked: bool = False, -) -> list[Song]: - if not song_ids: - return [] - - songs_db = await ctx.mysql.fetch_all( - f"SELECT {_ALL_FIELDS_COMMA} FROM songs WHERE id IN :song_ids " - "AND blocked IN :blocked", - { - "song_ids": tuple(song_ids), - "blocked": (0, 1) if allow_blocked else (0,), - }, - ) - - return [Song.from_mapping(song_db) for song_db in songs_db] - - -async def _create_sql(ctx: Context, song: Song) -> int: - return await ctx.mysql.execute( - f"INSERT INTO songs ({_ALL_FIELDS_COMMA}) VALUES ({_ALL_FIELDS_COLON})", - song.as_dict(include_id=True), - ) - - -async def create( - ctx: Context, - name: str, - author_id: int, - author: str, - download_url: str, - author_youtube: str | None = None, - size: float = 0.0, - source: SongSource = SongSource.CUSTOM, - blocked: bool = False, - song_id: int = 0, -) -> Song: - - song = Song( - id=song_id, - name=name, - author_id=author_id, - author=author, - author_youtube=author_youtube, - size=size, - download_url=download_url, - source=source, - blocked=blocked, - ) - - song.id = await _create_sql(ctx, song) - - return song - - -async def from_boomlings(ctx: Context, song_id: int) -> Song | None: - song_data = await ctx.gd.get_song(song_id) - - if isinstance(song_data, GDRequestStatus): - return None - - # TODO: maybe make a gd_obj.load_song - return Song( - id=int(song_data[1]), - name=song_data[2], - author_id=int(song_data[3]), - author=song_data[4], - author_youtube=song_data[7] or None, - size=float(song_data[5]), - download_url=urllib.parse.unquote(song_data[10]), - source=SongSource.BOOMLINGS, - blocked=False, - ) - - -async def from_id( - ctx: Context, - song_id: int, - allow_blocked: bool = False, -) -> Song | None: - # TODO: Implement song LRU Caching - song_db = await from_db(ctx, song_id, allow_blocked) - if song_db is not None: - return song_db - - song_boomlings = await from_boomlings(ctx, song_id) - if song_boomlings is not None: - await _create_sql(ctx, song_boomlings) - return song_boomlings - - return None - - -async def multiple_from_id( - ctx: Context, - song_ids: list[int], - allow_blocked: bool = False, -) -> list[Song]: - if not song_ids: - return [] - - songs: list[Song] = [] - - db_songs = await multiple_from_db(ctx, song_ids, allow_blocked) - songs.extend(db_songs) - - db_song_ids = [db_song.id for db_song in db_songs] - unsaved_song_ids = [song_id for song_id in song_ids if song_id not in db_song_ids] - for unsaved_song_id in unsaved_song_ids: - song_boomlings = await from_boomlings(ctx, unsaved_song_id) - if song_boomlings is not None: - await _create_sql(ctx, song_boomlings) - songs.append(song_boomlings) - - # since we fetch from cache first and db for the rest - # songs may not be in the same order they were provided in - songs.sort(key=lambda song: song_ids.index(song.id)) - - return songs - - -async def get_count(ctx: Context) -> int: - return await ctx.mysql.fetch_val("SELECT COUNT(*) FROM songs") - - -CDN_URL_CACHE_KEY = "rgdps:cache:cdn_url" - - -async def get_cdn_url(ctx: Context) -> str | None: - cached = await ctx.redis.get(CDN_URL_CACHE_KEY) - - if cached is not None: - return cached.decode() - - logger.debug("CDN URL cache miss. Querying the servers.") - - queried_url = await ctx.gd.get_cdn_url() - - if isinstance(queried_url, GDRequestStatus): - return None - - await ctx.redis.set( - CDN_URL_CACHE_KEY, - queried_url, - ex=timedelta(minutes=20), - ) - - return queried_url diff --git a/rgdps/repositories/user.py b/rgdps/repositories/user.py deleted file mode 100644 index d1a8be7..0000000 --- a/rgdps/repositories/user.py +++ /dev/null @@ -1,379 +0,0 @@ -from __future__ import annotations - -from collections.abc import AsyncGenerator -from datetime import datetime -from typing import Any -from typing import NamedTuple -from typing import NotRequired -from typing import TypedDict -from typing import Unpack - -from rgdps.common import modelling -from rgdps.common import time as time_utils -from rgdps.common.context import Context -from rgdps.constants.users import DEFAULT_PRIVILEGES -from rgdps.constants.users import UserPrivacySetting -from rgdps.constants.users import UserPrivileges -from rgdps.models.user import User - -ALL_FIELDS = modelling.get_model_fields(User) -CUSTOMISABLE_FIELDS = modelling.remove_id_field(ALL_FIELDS) - - -_ALL_FIELDS_COMMA = modelling.comma_separated(ALL_FIELDS) -_CUSTOMISABLE_FIELDS_COMMA = modelling.comma_separated(CUSTOMISABLE_FIELDS) -_ALL_FIELDS_COLON = modelling.colon_prefixed_comma_separated(ALL_FIELDS) -_CUSTOMISABLE_FIELDS_COLON = modelling.colon_prefixed_comma_separated( - CUSTOMISABLE_FIELDS, -) - - -async def from_db(ctx: Context, user_id: int) -> User | None: - user_db = await ctx.mysql.fetch_one( - f"SELECT {_ALL_FIELDS_COMMA} FROM users WHERE id = :id", - {"id": user_id}, - ) - - if user_db is None: - return None - - return User.from_mapping(user_db) - - -async def multiple_from_db(ctx: Context, user_ids: list[int]) -> list[User]: - if not user_ids: - return [] - - users_db = await ctx.mysql.fetch_all( - f"SELECT {_ALL_FIELDS_COMMA} FROM users WHERE id IN :ids", - {"ids": tuple(user_ids)}, - ) - - return [User.from_mapping(user_db) for user_db in users_db] - - -async def create( - ctx: Context, - username: str, - email: str, - privileges: UserPrivileges = DEFAULT_PRIVILEGES, - message_privacy: UserPrivacySetting = UserPrivacySetting.PUBLIC, - friend_privacy: UserPrivacySetting = UserPrivacySetting.PUBLIC, - comment_privacy: UserPrivacySetting = UserPrivacySetting.PUBLIC, - youtube_name: str | None = None, - twitter_name: str | None = None, - twitch_name: str | None = None, - register_ts: datetime | None = None, - stars: int = 0, - demons: int = 0, - moons: int = 0, - primary_colour: int = 0, - # NOTE: secondary_colour is 4 by default in the game - secondary_colour: int = 4, - glow_colour: int = 0, - display_type: int = 0, - icon: int = 0, - ship: int = 0, - ball: int = 0, - ufo: int = 0, - wave: int = 0, - robot: int = 0, - spider: int = 0, - swing_copter: int = 0, - jetpack: int = 0, - explosion: int = 0, - glow: bool = False, - creator_points: int = 0, - coins: int = 0, - user_coins: int = 0, - diamonds: int = 0, - user_id: int = 0, - comment_colour: str = "0,0,0", -) -> User: - if register_ts is None: - register_ts = datetime.now() - - user = User( - id=user_id, - username=username, - email=email, - privileges=privileges, - message_privacy=message_privacy, - friend_privacy=friend_privacy, - comment_privacy=comment_privacy, - youtube_name=youtube_name, - twitter_name=twitter_name, - twitch_name=twitch_name, - register_ts=register_ts, - stars=stars, - demons=demons, - moons=moons, - primary_colour=primary_colour, - secondary_colour=secondary_colour, - glow_colour=glow_colour, - display_type=display_type, - icon=icon, - ship=ship, - ball=ball, - ufo=ufo, - wave=wave, - robot=robot, - spider=spider, - swing_copter=swing_copter, - jetpack=jetpack, - explosion=explosion, - glow=glow, - creator_points=creator_points, - coins=coins, - user_coins=user_coins, - diamonds=diamonds, - comment_colour=comment_colour, - ) - - user.id = await create_sql(ctx, user) - await create_meili(ctx, user) - - return user - - -def _make_meili_dict(user_dict: dict[str, Any]) -> dict[str, Any]: - user_dict = user_dict.copy() - - if "privileges" in user_dict: - user_dict["privileges"] = int.from_bytes( - user_dict["privileges"], - byteorder="little", - signed=False, - ) - user_dict["is_public"] = ( - user_dict["privileges"] & UserPrivileges.USER_PROFILE_PUBLIC > 0 - ) - - if "register_ts" in user_dict: - user_dict["register_ts"] = time_utils.into_unix_ts(user_dict["register_ts"]) - - return user_dict - - -def _from_meili_dict(user_dict: dict[str, Any]) -> dict[str, Any]: - user_dict = user_dict.copy() - - user_dict["privileges"] = UserPrivileges(int(user_dict["privileges"])).as_bytes() - - user_dict["register_ts"] = time_utils.from_unix_ts(user_dict["register_ts"]) - - del user_dict["is_public"] - - return user_dict - - -async def create_sql(ctx: Context, user: User) -> int: - return await ctx.mysql.execute( - f"INSERT INTO users ({_ALL_FIELDS_COMMA}) VALUES ({_ALL_FIELDS_COLON})", - user.as_dict(include_id=True), - ) - - -async def create_meili(ctx: Context, user: User) -> None: - user_dict = _make_meili_dict(user.as_dict(include_id=True)) - - index = ctx.meili.index("users") - await index.add_documents([user_dict]) - - -class _UserUpdatePartial(TypedDict): - username: NotRequired[str] - email: NotRequired[str] - privileges: NotRequired[UserPrivileges] - message_privacy: NotRequired[UserPrivacySetting] - friend_privacy: NotRequired[UserPrivacySetting] - comment_privacy: NotRequired[UserPrivacySetting] - youtube_name: NotRequired[str | None] - twitter_name: NotRequired[str | None] - twitch_name: NotRequired[str | None] - stars: NotRequired[int] - demons: NotRequired[int] - moons: NotRequired[int] - primary_colour: NotRequired[int] - secondary_colour: NotRequired[int] - glow_colour: NotRequired[int] - display_type: NotRequired[int] - icon: NotRequired[int] - ship: NotRequired[int] - ball: NotRequired[int] - ufo: NotRequired[int] - wave: NotRequired[int] - robot: NotRequired[int] - spider: NotRequired[int] - swing_copter: NotRequired[int] - jetpack: NotRequired[int] - explosion: NotRequired[int] - glow: NotRequired[bool] - creator_points: NotRequired[int] - coins: NotRequired[int] - user_coins: NotRequired[int] - diamonds: NotRequired[int] - comment_colour: NotRequired[str] - - -async def update_sql_partial( - ctx: Context, - user_id: int, - **kwargs: Unpack[_UserUpdatePartial], -) -> User | None: - changed_fields = modelling.unpack_enum_types(kwargs) - - await ctx.mysql.execute( - modelling.update_from_partial_dict("users", user_id, changed_fields), - changed_fields, - ) - - return await from_id(ctx, user_id) - - -async def update_meili_partial( - ctx: Context, - user_id: int, - **kwargs: Unpack[_UserUpdatePartial], -) -> None: - changed_data = modelling.unpack_enum_types(kwargs) - changed_data["id"] = user_id - changed_data = _make_meili_dict(changed_data) - - index = ctx.meili.index("users") - await index.update_documents([changed_data]) - - -async def update_partial( - ctx: Context, - user_id: int, - **kwargs: Unpack[_UserUpdatePartial], -) -> User | None: - user = await update_sql_partial(ctx, user_id, **kwargs) - - if user is None: - return None - - await update_meili_partial(ctx, user_id, **kwargs) - - await drop_cache(ctx, user_id) - - return user - - -async def drop_cache(ctx: Context, user_id: int) -> None: - await ctx.user_cache.delete(user_id) - - -async def multiple_from_id(ctx: Context, user_ids: list[int]) -> list[User]: - if not user_ids: - return [] - - users: list[User] = [] - uncached_ids = [] - - for user_id in user_ids: - cache_user = await ctx.user_cache.get(user_id) - if cache_user is not None: - users.append(cache_user) - else: - uncached_ids.append(user_id) - - db_users = await multiple_from_db(ctx, uncached_ids) - users.extend(db_users) - - # since we fetch from cache first and db for the rest - # users may not be in the same order they were provided in - users.sort(key=lambda user: user_ids.index(user.id)) - - return users - - -async def from_id(ctx: Context, user_id: int) -> User | None: - cache_user = await ctx.user_cache.get(user_id) - if cache_user is not None: - return cache_user - - user = await from_db(ctx, user_id) - if user is not None: - await ctx.user_cache.set(user_id, user) - - return user - - -async def check_email_exists(ctx: Context, email: str) -> bool: - return await ctx.mysql.fetch_val( - "SELECT EXISTS(SELECT 1 FROM users WHERE email = :email)", - { - "email": email, - }, - ) - - -async def check_username_exists(ctx: Context, username: str) -> bool: - return await ctx.mysql.fetch_val( - "SELECT EXISTS(SELECT 1 FROM users WHERE username = :username)", - { - "username": username, - }, - ) - - -async def from_name(ctx: Context, username: str) -> User | None: - user_id = await ctx.mysql.fetch_val( - "SELECT id FROM users WHERE username = :username", - { - "username": username, - }, - ) - - if user_id is None: - return None - - return await from_id(ctx, user_id) - - -async def get_count(ctx: Context) -> int: - return await ctx.mysql.fetch_val("SELECT COUNT(*) FROM users") - - -async def all(ctx: Context) -> AsyncGenerator[User, None]: - async for db_user in ctx.mysql.iterate( - f"SELECT {_ALL_FIELDS_COMMA} FROM users", - ): - yield User.from_mapping(db_user) - - -class UserSearchResults(NamedTuple): - results: list[User] - total: int - - -async def search( - ctx: Context, - page: int, - page_size: int, - query: str, - include_hidden: bool = False, -) -> UserSearchResults: - index = ctx.meili.index("users") - - filters = [] - if not include_hidden: - filters.append("is_public = true") - - results_db = await index.search( - query, - offset=page * page_size, - limit=page_size, - filter=filters, - ) - - if (not results_db.hits) or (not results_db.estimated_total_hits): - return UserSearchResults([], 0) - - results = [ - User.from_mapping(_from_meili_dict(result)) for result in results_db.hits - ] - - return UserSearchResults(results, results_db.estimated_total_hits) diff --git a/rgdps/repositories/user_comment.py b/rgdps/repositories/user_comment.py deleted file mode 100644 index bb0247b..0000000 --- a/rgdps/repositories/user_comment.py +++ /dev/null @@ -1,153 +0,0 @@ -from __future__ import annotations - -from datetime import datetime -from typing import NotRequired -from typing import TypedDict -from typing import Unpack - -from rgdps.common import modelling -from rgdps.common.context import Context -from rgdps.models.user_comment import UserComment - -ALL_FIELDS = modelling.get_model_fields(UserComment) -CUSTOMISABLE_FIELDS = modelling.remove_id_field(ALL_FIELDS) - - -_ALL_FIELDS_COMMA = modelling.comma_separated(ALL_FIELDS) -_CUSTOMISABLE_FIELDS_COMMA = modelling.comma_separated(CUSTOMISABLE_FIELDS) -_ALL_FIELDS_COLON = modelling.colon_prefixed_comma_separated(ALL_FIELDS) -_CUSTOMISABLE_FIELDS_COLON = modelling.colon_prefixed_comma_separated( - CUSTOMISABLE_FIELDS, -) - - -async def from_id( - ctx: Context, - comment_id: int, - include_deleted: bool = False, -) -> UserComment | None: - condition = "" - if not include_deleted: - condition = " AND NOT deleted" - comment_db = await ctx.mysql.fetch_one( - f"SELECT {_ALL_FIELDS_COMMA} FROM user_comments WHERE id = :id" + condition, - { - "id": comment_id, - }, - ) - - if comment_db is None: - return None - - return UserComment.from_mapping(comment_db) - - -async def from_user_id( - ctx: Context, - user_id: int, - include_deleted: bool = False, -) -> list[UserComment]: - condition = "" - if not include_deleted: - condition = " AND NOT deleted" - comments_db = await ctx.mysql.fetch_all( - f"SELECT {_ALL_FIELDS_COMMA} FROM user_comments WHERE user_id = :user_id" - + condition, - {"user_id": user_id}, - ) - - return [UserComment.from_mapping(comment_db) for comment_db in comments_db] - - -async def from_user_id_paginated( - ctx: Context, - user_id: int, - page: int, - page_size: int, - include_deleted: bool = False, -) -> list[UserComment]: - condition = "" - if not include_deleted: - condition = "AND NOT deleted" - - comments_db = await ctx.mysql.fetch_all( - f"SELECT {_ALL_FIELDS_COMMA} FROM user_comments WHERE user_id = :user_id {condition} " - "ORDER BY id DESC LIMIT :limit OFFSET :offset", - { - "user_id": user_id, - "limit": page_size, - "offset": page * page_size, - }, - ) - - return [UserComment.from_mapping(comment_db) for comment_db in comments_db] - - -async def get_user_comment_count( - ctx: Context, - user_id: int, - include_deleted: bool = False, -) -> int: - return await ctx.mysql.fetch_val( - ( - "SELECT COUNT(*) FROM user_comments WHERE user_id = :user_id " - "AND deleted = 0" - if not include_deleted - else "" - ), - {"user_id": user_id}, - ) - - -async def create( - ctx: Context, - user_id: int, - content: str, - likes: int = 0, - post_ts: datetime | None = None, - deleted: bool = False, - comment_id: int = 0, -) -> UserComment: - comment = UserComment( - id=comment_id, - user_id=user_id, - content=content, - likes=likes, - post_ts=post_ts or datetime.now(), - deleted=deleted, - ) - - comment.id = await ctx.mysql.execute( - f"INSERT INTO user_comments ({_ALL_FIELDS_COMMA}) " - f"VALUES ({_ALL_FIELDS_COLON})", - comment.as_dict(include_id=True), - ) - - return comment - - -class _UserCommentUpdatePartial(TypedDict): - user_id: NotRequired[int] - content: NotRequired[str] - likes: NotRequired[int] - post_ts: NotRequired[datetime] - deleted: NotRequired[bool] - - -async def update_partial( - ctx: Context, - comment_id: int, - **kwargs: Unpack[_UserCommentUpdatePartial], -) -> UserComment | None: - changed_fields = modelling.unpack_enum_types(kwargs) - - await ctx.mysql.execute( - modelling.update_from_partial_dict("user_comments", comment_id, changed_fields), - changed_fields, - ) - - return await from_id(ctx, comment_id, include_deleted=True) - - -async def get_count(ctx: Context) -> int: - return await ctx.mysql.fetch_val("SELECT COUNT(*) FROM user_comments") diff --git a/rgdps/repositories/user_credential.py b/rgdps/repositories/user_credential.py deleted file mode 100644 index 508febb..0000000 --- a/rgdps/repositories/user_credential.py +++ /dev/null @@ -1,74 +0,0 @@ -from __future__ import annotations - -from rgdps.common import modelling -from rgdps.common.context import Context -from rgdps.constants.user_credentials import CredentialVersion -from rgdps.models.user_credential import UserCredential - -ALL_FIELDS = modelling.get_model_fields(UserCredential) -CUSTOMISABLE_FIELDS = modelling.remove_id_field(ALL_FIELDS) - - -_ALL_FIELDS_COMMA = modelling.comma_separated(ALL_FIELDS) -_CUSTOMISABLE_FIELDS_COMMA = modelling.comma_separated(CUSTOMISABLE_FIELDS) -_ALL_FIELDS_COLON = modelling.colon_prefixed_comma_separated(ALL_FIELDS) -_CUSTOMISABLE_FIELDS_COLON = modelling.colon_prefixed_comma_separated( - CUSTOMISABLE_FIELDS, -) - - -async def create( - ctx: Context, - user_id: int, - credential_version: CredentialVersion, - value: str, -) -> UserCredential: - credential = UserCredential( - id=0, - user_id=user_id, - version=credential_version, - value=value, - ) - credential.id = await ctx.mysql.execute( - f"INSERT INTO user_credentials ({_CUSTOMISABLE_FIELDS_COMMA}) " - f"VALUES ({_CUSTOMISABLE_FIELDS_COLON})", - credential.as_dict(include_id=False), - ) - - return credential - - -async def from_user_id( - ctx: Context, - user_id: int, -) -> UserCredential | None: - res = await ctx.mysql.fetch_one( - f"SELECT {_ALL_FIELDS_COMMA} FROM user_credentials WHERE user_id = :user_id " - "ORDER BY id DESC LIMIT 1", - {"user_id": user_id}, - ) - - if not res: - return None - - return UserCredential.from_mapping(res) - - -async def delete_from_id( - ctx: Context, - credential_id: int, -) -> None: - await ctx.mysql.execute( - "DELETE FROM user_credentials WHERE id = :credential_id", - {"credential_id": credential_id}, - ) - - -async def delete_from_user_id( - ctx: Context, - user_id: int, -) -> None: - await ctx.mysql.execute( - "DELETE FROM user_credentials WHERE user_id = :user_id", - {"user_id": user_id}, - ) diff --git a/rgdps/repositories/user_relationship.py b/rgdps/repositories/user_relationship.py deleted file mode 100644 index 0ddae99..0000000 --- a/rgdps/repositories/user_relationship.py +++ /dev/null @@ -1,241 +0,0 @@ -from __future__ import annotations - -from datetime import datetime -from typing import NotRequired -from typing import TypedDict -from typing import Unpack - -from rgdps.common import modelling -from rgdps.common.context import Context -from rgdps.constants.users import UserRelationshipType -from rgdps.models.user_relationship import UserRelationship - -ALL_FIELDS = modelling.get_model_fields(UserRelationship) -CUSTOMISABLE_FIELDS = modelling.remove_id_field(ALL_FIELDS) - - -_ALL_FIELDS_COMMA = modelling.comma_separated(ALL_FIELDS) -_CUSTOMISABLE_FIELDS_COMMA = modelling.comma_separated(CUSTOMISABLE_FIELDS) -_ALL_FIELDS_COLON = modelling.colon_prefixed_comma_separated(ALL_FIELDS) -_CUSTOMISABLE_FIELDS_COLON = modelling.colon_prefixed_comma_separated( - CUSTOMISABLE_FIELDS, -) - - -async def from_id( - ctx: Context, - relationship_id: int, - include_deleted: bool = False, -) -> UserRelationship | None: - condition = "" - if not include_deleted: - condition = "AND deleted = 0" - - relationship_db = await ctx.mysql.fetch_one( - f"SELECT {_ALL_FIELDS_COMMA} FROM user_relationships WHERE id = :relationship_id {condition}", - {"relationship_id": relationship_id}, - ) - - if not relationship_db: - return None - - return UserRelationship.from_mapping(relationship_db) - - -async def from_user_id( - ctx: Context, - user_id: int, - relationship_type: UserRelationshipType, - include_deleted: bool = False, -) -> list[UserRelationship]: - condition = "" - if not include_deleted: - condition = "AND deleted = 0" - - relationships_db = await ctx.mysql.fetch_all( - f"SELECT {_ALL_FIELDS_COMMA} FROM user_relationships WHERE user_id = :user_id AND " - f"relationship_type = :relationship_type {condition} " - "ORDER BY post_ts DESC", - {"user_id": user_id, "relationship_type": relationship_type.value}, - ) - - return [ - UserRelationship.from_mapping(relationship_db) - for relationship_db in relationships_db - ] - - -async def from_user_id_paginated( - ctx: Context, - user_id: int, - relationship_type: UserRelationshipType, - page: int, - page_size: int, - include_deleted: bool = False, -) -> list[UserRelationship]: - condition = "" - if not include_deleted: - condition = "AND deleted = 0" - - relationships_db = await ctx.mysql.fetch_all( - f"SELECT {_ALL_FIELDS_COMMA} FROM user_relationships WHERE user_id = :user_id AND " - f"relationship_type = :relationship_type {condition} " - "ORDER BY post_ts DESC LIMIT :limit OFFSET :offset", - { - "user_id": user_id, - "relationship_type": relationship_type.value, - "limit": page_size, - "offset": page * page_size, - }, - ) - - return [ - UserRelationship.from_mapping(relationship_db) - for relationship_db in relationships_db - ] - - -async def from_user_and_target_user( - ctx: Context, - user_id: int, - target_user_id: int, - relationship_type: UserRelationshipType, - include_deleted: bool = False, -) -> UserRelationship | None: - condition = "" - if not include_deleted: - condition = "AND deleted = 0" - - relationship_db = await ctx.mysql.fetch_one( - f"SELECT {_ALL_FIELDS_COMMA} FROM user_relationships WHERE user_id = :user_id AND target_user_id = :target_user_id " - f"AND relationship_type = :relationship_type {condition}", - { - "user_id": user_id, - "target_user_id": target_user_id, - "relationship_type": relationship_type.value, - }, - ) - - if not relationship_db: - return None - - return UserRelationship.from_mapping(relationship_db) - - -async def get_user_relationship_count( - ctx: Context, - user_id: int, - relationship_type: UserRelationshipType, - is_new: bool = False, - include_deleted: bool = False, -) -> int: - condition = "" - if not include_deleted: - condition = "AND deleted = 0" - - if is_new: - condition += " AND seen_ts IS NULL" - - return await ctx.mysql.fetch_val( - "SELECT COUNT(*) FROM user_relationships WHERE user_id = :user_id " - f"AND relationship_type = :relationship_type {condition}", - {"user_id": user_id, "relationship_type": relationship_type.value}, - ) - - -async def check_relationship_exists( - ctx: Context, - user_id: int, - target_user_id: int, - relationship_type: UserRelationshipType, - include_deleted: bool = False, -) -> bool: - condition = "" - if not include_deleted: - condition = "AND deleted = 0" - - return await ctx.mysql.fetch_val( - "SELECT EXISTS(SELECT 1 FROM user_relationships WHERE user_id = :user_id " - f"AND target_user_id = :target_user_id AND relationship_type = :relationship_type {condition})", - { - "user_id": user_id, - "target_user_id": target_user_id, - "relationship_type": relationship_type.value, - }, - ) - - -async def mark_all_as_seen( - ctx: Context, - user_id: int, - relationship_type: UserRelationshipType, - seen_ts: datetime, - include_deleted: bool = False, -) -> None: - condition = "" - if not include_deleted: - condition = "AND deleted = 0" - - await ctx.mysql.execute( - "UPDATE user_relationships SET seen_ts = :seen_ts WHERE user_id = :user_id " - f"AND relationship_type = :relationship_type AND seen_ts IS NULL {condition}", - { - "seen_ts": seen_ts, - "user_id": user_id, - "relationship_type": relationship_type.value, - }, - ) - - -async def create( - ctx: Context, - user_id: int, - target_user_id: int, - relationship_type: UserRelationshipType, - post_ts: datetime = datetime.now(), - seen_ts: None | datetime = None, -) -> UserRelationship: - relationship = UserRelationship( - id=0, - relationship_type=relationship_type, - user_id=user_id, - target_user_id=target_user_id, - post_ts=post_ts, - seen_ts=seen_ts, - ) - - relationship.id = await ctx.mysql.execute( - f"INSERT INTO user_relationships ({_CUSTOMISABLE_FIELDS_COMMA}) " - f"VALUES ({_CUSTOMISABLE_FIELDS_COLON})", - relationship.as_dict(include_id=False), - ) - - return relationship - - -class _UserRelationshipUpdatePartial(TypedDict): - seen_ts: NotRequired[datetime] - deleted: NotRequired[bool] - - -async def update_partial( - ctx: Context, - relationship_id: int, - **kwargs: Unpack[_UserRelationshipUpdatePartial], -) -> UserRelationship | None: - changed_fields = modelling.unpack_enum_types(kwargs) - - await ctx.mysql.execute( - modelling.update_from_partial_dict( - "user_relationships", - relationship_id, - changed_fields, - ), - changed_fields, - ) - - return await from_id(ctx, relationship_id, include_deleted=True) - - -async def get_count(ctx: Context) -> int: - return await ctx.mysql.fetch_val("SELECT COUNT(*) FROM user_relationships") diff --git a/rgdps/resources/__init__.py b/rgdps/resources/__init__.py new file mode 100644 index 0000000..e9f5ff0 --- /dev/null +++ b/rgdps/resources/__init__.py @@ -0,0 +1,70 @@ +from __future__ import annotations + +from abc import ABC +from abc import abstractmethod + +from .daily_chest import DailyChest +from .daily_chest import DailyChestRepository +from .daily_chest import DailyChestRewardType +from .daily_chest import DailyChestType +from .leaderboard import LeaderboardRepository +from .level_data import LevelData +from .level_data import LevelDataRepository +from .like import Like +from .like import LikeRepository +from .like import LikeType +from .message import Message +from .message import MessageRepository +from .save_data import SaveData +from .save_data import SaveDataRepository +from .user import User +from .user import UserRepository +from .user_comment import UserComment +from .user_comment import UserCommentRepository +from .user_credential import UserCredential +from .user_credential import UserCredentialRepository +from .user_replationship import UserRelationship +from .user_replationship import UserRelationshipRepository +from .user_replationship import UserRelationshipType + + +class Context(ABC): + @property + @abstractmethod + def save_data(self) -> SaveDataRepository: ... + + @property + @abstractmethod + def users(self) -> UserRepository: ... + + @property + @abstractmethod + def level_data(self) -> LevelDataRepository: ... + + @property + @abstractmethod + def relationships(self) -> UserRelationshipRepository: ... + + @property + @abstractmethod + def credentials(self) -> UserCredentialRepository: ... + + @property + @abstractmethod + def daily_chests(self) -> DailyChestRepository: ... + + @property + @abstractmethod + def leaderboards(self) -> LeaderboardRepository: ... + + @property + @abstractmethod + def messages(self) -> MessageRepository: ... + + @property + @abstractmethod + def user_comments(self) -> UserCommentRepository: ... + + @property + @abstractmethod + def likes(self) -> LikeRepository: ... diff --git a/rgdps/resources/_common.py b/rgdps/resources/_common.py new file mode 100644 index 0000000..3d88b9d --- /dev/null +++ b/rgdps/resources/_common.py @@ -0,0 +1,25 @@ +from __future__ import annotations + +from typing import NamedTuple + +from pydantic import BaseModel +from pydantic import ConfigDict + +from rgdps.utilities.colour import Colour + + +class DatabaseModel(BaseModel): + """An expansion of Pydantic's `BaseModel` froviding extended functionality + for RealistikGDPS.""" + + model_config = ConfigDict( + json_encoders={ + Colour: lambda c: c.as_format_str(), + }, + ) + + +class SearchResults[T](NamedTuple): + results: list[T] + total: int + page_size: int diff --git a/rgdps/resources/daily_chest.py b/rgdps/resources/daily_chest.py new file mode 100644 index 0000000..1dcf2b6 --- /dev/null +++ b/rgdps/resources/daily_chest.py @@ -0,0 +1,167 @@ +from __future__ import annotations + +from datetime import datetime +from enum import IntEnum + +from rgdps.adapters import AbstractMySQLService +from rgdps.common import modelling +from rgdps.resources._common import DatabaseModel + + +class DailyChestView(IntEnum): + VIEW = 0 + CLAIM_SMALL = 1 + CLAIM_LARGE = 2 + + @property + def is_claim(self) -> bool: + return self in (DailyChestView.CLAIM_SMALL, DailyChestView.CLAIM_LARGE) + + +class DailyChestType(IntEnum): + SMALL = 0 + LARGE = 1 + + +class DailyChestShardType(IntEnum): + FIRE = 0 + ICE = 1 + POISON = 2 + SHADOW = 3 + LAVA = 4 + + +class DailyChestRewardType(IntEnum): + MANA = 0 + DIAMONDS = 1 + FIRE_SHARD = 2 + ICE_SHARD = 3 + POISON_SHARD = 4 + SHADOW_SHARD = 5 + LAVA_SHARD = 6 + DEMON_KEY = 7 + + +class DailyChest(DatabaseModel): + id: int + user_id: int + type: DailyChestType + mana: int + diamonds: int + fire_shards: int + ice_shards: int + poison_shards: int + shadow_shards: int + lava_shards: int + demon_keys: int + claimed_ts: datetime + + +ALL_FIELDS = modelling.get_model_fields(DailyChest) +CUSTOMISABLE_FIELDS = modelling.remove_id_field(ALL_FIELDS) + + +_ALL_FIELDS_COMMA = modelling.comma_separated(ALL_FIELDS) +_CUSTOMISABLE_FIELDS_COMMA = modelling.comma_separated(CUSTOMISABLE_FIELDS) +_ALL_FIELDS_COLON = modelling.colon_prefixed_comma_separated(ALL_FIELDS) +_CUSTOMISABLE_FIELDS_COLON = modelling.colon_prefixed_comma_separated( + CUSTOMISABLE_FIELDS, +) + + +class DailyChestRepository: + __slots__ = ("_mysql",) + + def __init__(self, mysql: AbstractMySQLService) -> None: + self._mysql = mysql + + async def from_id(self, chest_id: int) -> DailyChest | None: + chest_db = await self._mysql.fetch_one( + "SELECT * FROM daily_chests WHERE id = :chest_id", + {"chest_id": chest_id}, + ) + + if chest_db is None: + return None + + return DailyChest(**chest_db) + + async def from_user_id_and_type_latest( + self, + user_id: int, + chest_type: DailyChestType, + ) -> DailyChest | None: + chest_db = await self._mysql.fetch_one( + "SELECT * FROM daily_chests WHERE user_id = :user_id AND " + "type = :chest_type ORDER BY claimed_ts DESC LIMIT 1", + {"user_id": user_id, "chest_type": chest_type.value}, + ) + + if chest_db is None: + return None + + return DailyChest(**chest_db) + + async def create( + self, + user_id: int, + chest_type: DailyChestType, + *, + mana: int = 0, + diamonds: int = 0, + fire_shards: int = 0, + ice_shards: int = 0, + poison_shards: int = 0, + shadow_shards: int = 0, + lava_shards: int = 0, + demon_keys: int = 0, + claimed_ts: datetime | None = None, + ) -> DailyChest: + if claimed_ts is None: + claimed_ts = datetime.now() + + model = DailyChest( + id=0, + user_id=user_id, + type=chest_type, + mana=mana, + diamonds=diamonds, + fire_shards=fire_shards, + ice_shards=ice_shards, + poison_shards=poison_shards, + shadow_shards=shadow_shards, + lava_shards=lava_shards, + demon_keys=demon_keys, + claimed_ts=claimed_ts, + ) + model.id = await self._mysql.execute( + f"INSERT INTO daily_chests ({_CUSTOMISABLE_FIELDS_COMMA}) " + f"VALUES ({_CUSTOMISABLE_FIELDS_COLON})", + model.model_dump(exclude={"id"}), + ) + return model + + async def sum_mana_from_user_id( + self, + user_id: int, + ) -> int: + return int( + await self._mysql.fetch_val( + "SELECT SUM(mana) FROM daily_chests WHERE user_id = :user_id", + {"user_id": user_id}, + ) + or 0, + ) + + async def count_of_type( + self, + user_id: int, + chest_type: DailyChestType, + ) -> int: + return ( + await self._mysql.fetch_val( + "SELECT COUNT(*) FROM daily_chests WHERE user_id = :user_id AND type = :chest_type", + {"user_id": user_id, "chest_type": chest_type.value}, + ) + or 0 + ) diff --git a/rgdps/resources/leaderboard.py b/rgdps/resources/leaderboard.py new file mode 100644 index 0000000..c761747 --- /dev/null +++ b/rgdps/resources/leaderboard.py @@ -0,0 +1,80 @@ +from __future__ import annotations + +from rgdps.adapters import RedisClient + + +class LeaderboardRepository: + __slots__ = ("_redis",) + + def __init__(self, redis: RedisClient) -> None: + self._redis = redis + + async def get_star_rank(self, user_id: int) -> int | None: + redis_rank = await self._redis.zrevrank( + "rgdps:leaderboards:stars", + user_id, + ) + + if redis_rank is None: + return None + + return redis_rank + 1 + + async def get_creator_rank(self, user_id: int) -> int | None: + redis_rank = await self._redis.zrevrank( + "rgdps:leaderboards:creators", + user_id, + ) + + if redis_rank is None: + return None + + return redis_rank + 1 + + async def set_star_count(self, user_id: int, stars: int) -> None: + await self._redis.zadd( + "rgdps:leaderboards:stars", + {str(user_id): stars}, # is str necessary? + ) + + async def remove_star_count(self, user_id: int) -> None: + await self._redis.zrem( + "rgdps:leaderboards:stars", + user_id, + ) + + async def set_creator_count(self, user_id: int, stars: int) -> None: + await self._redis.zadd( + "rgdps:leaderboards:creators", + {str(user_id): stars}, # is str necessary? + ) + + async def remove_creator_count(self, user_id: int) -> None: + await self._redis.zrem( + "rgdps:leaderboards:creators", + user_id, + ) + + async def get_top_stars_paginated( + self, + page: int, + page_size: int, + ) -> list[int]: + top_stars = await self._redis.zrevrange( + "rgdps:leaderboards:stars", + page * page_size, + (page + 1) * page_size, + ) + return [int(top_star) for top_star in top_stars] + + async def get_top_creators_paginated( + self, + page: int, + page_size: int, + ) -> list[int]: + top_creators = await self._redis.zrevrange( + "rgdps:leaderboards:creators", + page * page_size, + (page + 1) * page_size, + ) + return [int(top_creator) for top_creator in top_creators] diff --git a/rgdps/resources/level.py b/rgdps/resources/level.py new file mode 100644 index 0000000..71ae39b --- /dev/null +++ b/rgdps/resources/level.py @@ -0,0 +1,653 @@ +from __future__ import annotations + +from datetime import datetime +from enum import Enum +from enum import IntEnum +from enum import IntFlag +from typing import Any +from typing import TypedDict +from typing import NotRequired +from typing import Unpack +from typing import Literal +from typing import NamedTuple +from typing import AsyncGenerator + +from rgdps.adapters import AbstractMySQLService +from rgdps.adapters import MeiliSearchClient +from rgdps.resources._common import DatabaseModel +from rgdps.common import modelling +from rgdps.common import data_utils +from rgdps.utilities import time as time_utils + + +class LevelSearchFlag(IntFlag): + NONE = 0 + EPIC = 1 << 0 + AWARDED = 1 << 1 + MAGIC = 1 << 2 + LEGENDARY = 1 << 3 + MYTHICAL = 1 << 4 + + def as_feature(self) -> LevelFeature: + if self & LevelSearchFlag.MYTHICAL: + return LevelFeature.MYTHICAL + + if self & LevelSearchFlag.LEGENDARY: + return LevelFeature.LEGENDARY + + if self & LevelSearchFlag.EPIC: + return LevelFeature.EPIC + + return LevelFeature.NONE + + +class LevelFeature(IntEnum): + NONE = 0 + FEATURE = 1 + EPIC = 2 + LEGENDARY = 3 + MYTHICAL = 4 + + def as_search_flag(self) -> LevelSearchFlag: + return _LEVEL_FEATURE_MAP[self] + + +_LEVEL_FEATURE_MAP = { + LevelFeature.NONE: LevelSearchFlag.NONE, + LevelFeature.FEATURE: LevelSearchFlag.NONE, + LevelFeature.EPIC: LevelSearchFlag.EPIC, + LevelFeature.LEGENDARY: LevelSearchFlag.EPIC | LevelSearchFlag.LEGENDARY, + LevelFeature.MYTHICAL: LevelSearchFlag.EPIC + | LevelSearchFlag.LEGENDARY + | LevelSearchFlag.MYTHICAL, +} + + +class LevelDifficulty(IntEnum): + NA = 0 + EASY = 10 + NORMAL = 20 + HARD = 30 + HARDER = 40 + INSANE = 50 + + @staticmethod + def from_stars(stars: int) -> LevelDifficulty: + return _DIFFICULTY_STAR_MAP.get( + stars, + LevelDifficulty.NA, + ) + + +_DIFFICULTY_STAR_MAP = { + 2: LevelDifficulty.EASY, + 3: LevelDifficulty.NORMAL, + 4: LevelDifficulty.HARD, + 5: LevelDifficulty.HARD, + 6: LevelDifficulty.HARDER, + 7: LevelDifficulty.HARDER, + 8: LevelDifficulty.INSANE, + 9: LevelDifficulty.INSANE, +} + + +class LevelDifficultyName(Enum): + """A string equivalent of `LevelDifficulty` enum used for validation.""" + + NA = "na" + EASY = "easy" + NORMAL = "normal" + HARD = "hard" + HARDER = "harder" + INSANE = "insane" + + def as_difficulty(self) -> LevelDifficulty: + return _NAME_DIFFICULTY_MAP[self] + + +_NAME_DIFFICULTY_MAP = { + LevelDifficultyName.NA: LevelDifficulty.NA, + LevelDifficultyName.EASY: LevelDifficulty.EASY, + LevelDifficultyName.NORMAL: LevelDifficulty.NORMAL, + LevelDifficultyName.HARD: LevelDifficulty.HARD, + LevelDifficultyName.HARDER: LevelDifficulty.HARDER, + LevelDifficultyName.INSANE: LevelDifficulty.INSANE, +} + + +class LevelLength(IntEnum): + TINY = 0 + SHORT = 1 + MEDIUM = 2 + LONG = 3 + XL = 4 + PLATFORMER = 5 + + +class LevelDemonDifficulty(IntEnum): + HARD = 0 + EASY = 3 + MEDIUM = 4 + INSANE = 5 + EXTREME = 6 + + +class LevelDemonRating(IntEnum): + """Demon difficulty rating used by the client to send demon ratings + (but not receive).""" + + EASY = 1 + MEDIUM = 2 + HARD = 3 + INSANE = 4 + EXTREME = 5 + + def as_difficulty(self) -> LevelDemonDifficulty: + return _RATING_DIFFICULTY_MAP[self] + + +_RATING_DIFFICULTY_MAP = { + LevelDemonRating.EASY: LevelDemonDifficulty.EASY, + LevelDemonRating.MEDIUM: LevelDemonDifficulty.MEDIUM, + LevelDemonRating.HARD: LevelDemonDifficulty.HARD, + LevelDemonRating.INSANE: LevelDemonDifficulty.INSANE, + LevelDemonRating.EXTREME: LevelDemonDifficulty.EXTREME, +} + + +# Ideas: +# Listed only for friends +class LevelPublicity(IntEnum): + PUBLIC = 0 + # Levels only accessible through direct ID. + GLOBAL_UNLISTED = 1 + FRIENDS_UNLISTED = 2 + + +class LevelSearchType(IntEnum): + SEARCH_QUERY = 0 + MOST_DOWNLOADED = 1 + MOST_LIKED = 2 + TRENDING = 3 + RECENT = 4 + USER_LEVELS = 5 + FEATURED = 6 + MAGIC = 7 + MODERATOR_SENT = 8 + LEVEL_LIST = 9 + AWARDED = 11 + FOLLOWED = 12 + FRIENDS = 13 + EPIC = 16 + DAILY = 21 + WEEKLY = 22 + +class Level(DatabaseModel): + id: int + name: str + user_id: int + description: str + custom_song_id: int | None + official_song_id: int | None + version: int + length: LevelLength + two_player: bool + publicity: LevelPublicity + render_str: str # Officially called extra string + game_version: int + binary_version: int + upload_ts: datetime + update_ts: datetime + original_id: int | None + + # Statistics + downloads: int + likes: int + stars: int + difficulty: LevelDifficulty + demon_difficulty: LevelDemonDifficulty | None + coins: int + coins_verified: bool + requested_stars: int + feature_order: int + search_flags: LevelSearchFlag + low_detail_mode: bool + object_count: int + building_time: int + update_locked: bool + song_ids: list[int] + sfx_ids: list[int] + deleted: bool + +class _LevelUpdatePartial(TypedDict): + name: NotRequired[str] + user_id: NotRequired[int] + description: NotRequired[str] + custom_song_id: NotRequired[int | None] + official_song_id: NotRequired[int | None] + version: NotRequired[int] + length: NotRequired[LevelLength] + two_player: NotRequired[bool] + publicity: NotRequired[LevelPublicity] + render_str: NotRequired[str] + game_version: NotRequired[int] + binary_version: NotRequired[int] + upload_ts: NotRequired[datetime] + update_ts: NotRequired[datetime] + original_id: NotRequired[int | None] + downloads: NotRequired[int] + likes: NotRequired[int] + stars: NotRequired[int] + difficulty: NotRequired[LevelDifficulty] + demon_difficulty: NotRequired[LevelDemonDifficulty | None] + coins: NotRequired[int] + coins_verified: NotRequired[bool] + requested_stars: NotRequired[int] + feature_order: NotRequired[int] + search_flags: NotRequired[LevelSearchFlag] + low_detail_mode: NotRequired[bool] + object_count: NotRequired[int] + building_time: NotRequired[int] + update_locked: NotRequired[bool] + song_ids: NotRequired[list[int]] + sfx_ids: NotRequired[list[int]] + deleted: NotRequired[bool] + + +ALL_FIELDS = modelling.get_model_fields(Level) +CUSTOMISABLE_FIELDS = modelling.remove_id_field(ALL_FIELDS) + + +_ALL_FIELDS_COMMA = modelling.comma_separated(ALL_FIELDS) +_CUSTOMISABLE_FIELDS_COMMA = modelling.comma_separated(CUSTOMISABLE_FIELDS) +_ALL_FIELDS_COLON = modelling.colon_prefixed_comma_separated(ALL_FIELDS) + +def _make_meili_dict(level_dict: dict[str, Any]) -> dict[str, Any]: + level_dict = level_dict.copy() + if "upload_ts" in level_dict: + level_dict["upload_ts"] = time_utils.into_unix_ts(level_dict["upload_ts"]) + + if "update_ts" in level_dict: + level_dict["update_ts"] = time_utils.into_unix_ts(level_dict["update_ts"]) + + # Split up bitwise enums as meili does not support bitwise operations. + if "search_flags" in level_dict: + level_dict["epic"] = bool(level_dict["search_flags"] & LevelSearchFlag.EPIC) + level_dict["magic"] = bool(level_dict["search_flags"] & LevelSearchFlag.MAGIC) + level_dict["awarded"] = bool( + level_dict["search_flags"] & LevelSearchFlag.AWARDED, + ) + level_dict["legendary"] = bool( + level_dict["search_flags"] & LevelSearchFlag.LEGENDARY, + ) + level_dict["mythical"] = bool( + level_dict["search_flags"] & LevelSearchFlag.MYTHICAL, + ) + + return level_dict + + +def _from_meili_dict(level_dict: dict[str, Any]) -> dict[str, Any]: + level_dict = level_dict.copy() + # Meili returns unix timestamps, so we need to convert them back to datetime. + level_dict["upload_ts"] = time_utils.from_unix_ts(level_dict["upload_ts"]) + level_dict["update_ts"] = time_utils.from_unix_ts(level_dict["update_ts"]) + + search_flags = LevelSearchFlag.NONE + + if level_dict["epic"]: + search_flags |= LevelSearchFlag.EPIC + + if level_dict["magic"]: + search_flags |= LevelSearchFlag.MAGIC + + if level_dict["awarded"]: + search_flags |= LevelSearchFlag.AWARDED + + if level_dict["legendary"]: + search_flags |= LevelSearchFlag.LEGENDARY + + if level_dict["mythical"]: + search_flags |= LevelSearchFlag.MYTHICAL + + level_dict["search_flags"] = search_flags + + del level_dict["epic"] + del level_dict["magic"] + del level_dict["awarded"] + del level_dict["legendary"] + del level_dict["mythical"] + + # FIXME: Temporary migration measure. + if "song_ids" not in level_dict: + level_dict["song_ids"] = [level_dict["custom_song_id"]] + level_dict["sfx_ids"] = [] + + return level_dict + + +class LevelSearchResults(NamedTuple): + results: list[Level] + total: int + +class LevelRepository: + __slots__ = ( + "_mysql", + "_meili", + ) + + def __init__(self, mysql: AbstractMySQLService, meili: MeiliSearchClient) -> None: + self._mysql = mysql + self._meili = meili.index("levels") + + async def create( + self, + name: str, + user_id: int, + description: str = "", + custom_song_id: int | None = None, + official_song_id: int | None = 1, + version: int = 1, + length: LevelLength = LevelLength.TINY, + two_player: bool = False, + publicity: LevelPublicity = LevelPublicity.PUBLIC, + render_str: str = "", + game_version: int = 22, + binary_version: int = 34, + upload_ts: datetime | None = None, + update_ts: datetime | None = None, + original_id: int | None = None, + downloads: int = 0, + likes: int = 0, + stars: int = 0, + difficulty: LevelDifficulty = LevelDifficulty.NA, + demon_difficulty: LevelDemonDifficulty | None = None, + coins: int = 0, + coins_verified: bool = False, + requested_stars: int = 0, + feature_order: int = 0, + search_flags: LevelSearchFlag = LevelSearchFlag.NONE, + low_detail_mode: bool = False, + object_count: int = 0, + building_time: int = 0, + update_locked: bool = False, + song_ids: list[int] | None = None, + sfx_ids: list[int] | None = None, + deleted: bool = False, + level_id: int | None = None, + ) -> Level: + if upload_ts is None: + upload_ts = datetime.now() + if update_ts is None: + update_ts = datetime.now() + + if sfx_ids is None: + sfx_ids = [] + if song_ids is None: + song_ids = [] + + level = Level( + id=0, + name=name, + user_id=user_id, + description=description, + custom_song_id=custom_song_id, + official_song_id=official_song_id, + version=version, + length=length, + two_player=two_player, + publicity=publicity, + render_str=render_str, + game_version=game_version, + binary_version=binary_version, + upload_ts=upload_ts, + update_ts=update_ts, + original_id=original_id, + downloads=downloads, + likes=likes, + stars=stars, + difficulty=difficulty, + demon_difficulty=demon_difficulty, + coins=coins, + coins_verified=coins_verified, + requested_stars=requested_stars, + feature_order=feature_order, + search_flags=search_flags, + low_detail_mode=low_detail_mode, + object_count=object_count, + building_time=building_time, + update_locked=update_locked, + deleted=deleted, + song_ids=song_ids, + sfx_ids=sfx_ids, + ) + level_dump = level.model_dump() + level_dump["id"] = level_id + + level.id = await self._mysql.execute( + f"INSERT INTO levels ({_ALL_FIELDS_COMMA}) VALUES ({_ALL_FIELDS_COLON})", + level_dump, + ) + + meili_dict = _make_meili_dict(level.model_dump()) + await self._meili.add_documents([meili_dict]) + return level + + + async def from_id(self, level_id: int) -> Level | None: + level_dict = await self._mysql.fetch_one( + f"SELECT {_ALL_FIELDS_COMMA} FROM levels WHERE id = :level_id", + {"level_id": level_id}, + ) + + if not level_dict: + return None + + return Level(**level_dict) + + + async def multiple_from_id(self, level_ids: list[int]) -> list[Level]: + if not level_ids: + return [] + + levels = await self._mysql.fetch_all( + f"SELECT {_ALL_FIELDS_COMMA} FROM levels WHERE id IN :level_ids", + {"level_ids": tuple(level_ids)}, + ) + levels = sorted(levels, key=lambda level: level_ids.index(level["id"])) + + return [Level(**level) for level in levels] + + async def update_partial( + self, + level_id: int, + **kwargs: Unpack[_LevelUpdatePartial], + ) -> Level | None: + changed_fields = modelling.unpack_enum_types(kwargs) + changed_rows = await self._mysql.execute( + modelling.update_from_partial_dict("levels", level_id, changed_fields), + changed_fields, + ) + + if not changed_rows: + return None + + changed_fields["id"] = level_id + changed_fields = _make_meili_dict(changed_fields) + await self._meili.update_documents([changed_fields]) + + return await self.from_id(level_id) + + + async def search( + self, + query: str | None = None, + *, + page: int = 0, + page_size: int = 10, + required_lengths: list[LevelLength] | None = None, + required_difficulties: list[LevelDifficulty] | None = None, + required_demon_difficulties: list[LevelDemonDifficulty] | None = None, + song_id: int | None = None, + custom_song_id: int | None = None, + rated_only: bool | None = None, + two_player_only: bool | None = None, + excluded_user_ids: list[int] | None = None, + required_user_ids: list[int] | None = None, + required_level_ids: list[int] | None = None, + excluded_level_ids: list[int] | None = None, + order_by: Literal["downloads", "likes", "stars"] = "downloads", + )-> LevelSearchResults: + sort = [] + filters = [ + "deleted = 0", + # TODO: More unlisted logic, such as friends only. + f"publicity = {LevelPublicity.PUBLIC.value}" + ] + + if required_lengths is not None: + required_lengths = data_utils.enum_int_list(required_lengths) # type: ignore + filters.append(f"length IN {required_lengths}") + + if required_difficulties is not None: + required_difficulties = data_utils.enum_int_list(required_difficulties) # type: ignore + filters.append(f"difficulty IN {required_difficulties}") + + if required_demon_difficulties is not None: + required_demon_difficulties = data_utils.enum_int_list(required_demon_difficulties) # type: ignore + filters.append(f"demon_difficulty IN {required_demon_difficulties}") + + # FIXME: THIS IS OBV SO WRONG IHREGIUEHRGIUERH + if song_id is not None: + filters.append(f"{song_id} = ANY(song_ids)") + + if custom_song_id is not None: + filters.append(f"{custom_song_id} = ANY(song_ids)") + + if rated_only is not None: + if rated_only: + filters.append("stars > 0") + else: + filters.append("stars = 0") + + if two_player_only is not None: + filters.append(f"two_player = {int(two_player_only)}") + + if excluded_user_ids is not None: + filters.append(f"user_id NOT IN {excluded_user_ids}") + + elif required_user_ids is not None: + filters.append(f"user_id IN {required_user_ids}") + + if required_level_ids is not None: + filters.append(f"id IN {required_level_ids}") + + elif excluded_level_ids is not None: + filters.append(f"id NOT IN {excluded_level_ids}") + + sort.append(f"{order_by} DESC") + + levels_res = await self._meili.search( + query, + offset=page * page_size, + limit=page_size, + filter=" AND ".join(filters), # ??? + sort=sort, + ) + + levels = [Level(**_from_meili_dict(level)) for level in levels_res.hits] + return LevelSearchResults(results=levels, total=levels_res.estimated_total_hits or 0) + + async def iterate_all( + self, + *, + include_deleted: bool = False, + ) -> AsyncGenerator[Level, None]: + condition = "" + if not include_deleted: + condition = "WHERE deleted = 0" + + async for level_dict in self._mysql.iterate( + f"SELECT * FROM levels {condition}", + ): + yield Level(**level_dict) + + + async def count_all(self) -> int: + return await self._mysql.fetch_val("SELECT COUNT(*) FROM levels") + + async def from_name_and_user_id( + self, + name: str, + user_id: int, + *, + include_deleted: bool = False, + ) -> Level | None: + level_dict = await self._mysql.fetch_one( + "SELECT * FROM levels WHERE name = :name AND user_id = :user_id" + " AND deleted = 0" if not include_deleted else "", + {"name": name, "user_id": user_id}, + ) + + if not level_dict: + return None + + return Level(**level_dict) + + + async def from_name( + self, + name: str, + *, + include_deleted: bool = False, + ) -> Level | None: + levels = await self._mysql.fetch_one( + "SELECT * FROM levels WHERE name = :name" + " AND deleted = 0" if not include_deleted else "" + " LIMIT 1", + {"name": name}, + ) + + if not levels: + return None + + return Level(**levels) + + + # TODO: Move LOL + # A function primarily used for some recommendation algorithms that returns a list of levels + # ordered by how well received they are, assessed using a formula.. + async def get_well_received( + self, + minimum_stars: int, + minimum_length: LevelLength, + maximum_stars: int = 0, + maximum_demon_rating: LevelDemonDifficulty = LevelDemonDifficulty.EXTREME, + excluded_level_ids: list[ + int + ] = [], # The list isnt mutable, so we can set it to an empty list. + limit: int = 100, + ) -> list[int]: + # BOTCH! Avoiding a sql syntax error. + if not excluded_level_ids: + excluded_level_ids = [0] + + # The formula in the order clause is made to emphasis lower downloads, but still have a + # significant impact on likes. + values = await self._mysql.fetch_all( + "SELECT id FROM levels WHERE stars >= :minimum_stars AND stars <= :maximum_stars " + "AND demon_difficulty <= :maximum_demon_rating AND length >= :minimum_length " + "AND id NOT IN :excluded_level_ids AND deleted = 0 ORDER BY (SQRT(downloads) / likes) DESC " + "LIMIT :limit", + { + "minimum_stars": minimum_stars, + "maximum_stars": maximum_stars, + "maximum_demon_rating": maximum_demon_rating.value, + "minimum_length": minimum_length.value, + "excluded_level_ids": tuple(excluded_level_ids), + "limit": limit, + }, + ) + + return [x["id"] for x in values] + \ No newline at end of file diff --git a/rgdps/resources/level_data.py b/rgdps/resources/level_data.py new file mode 100644 index 0000000..b7c8dbc --- /dev/null +++ b/rgdps/resources/level_data.py @@ -0,0 +1,36 @@ +from __future__ import annotations + +from rgdps.adapters import AbstractStorage + + +class LevelData: + """A wrapper class around pure-string level data for type + clarity.""" + + __slots__ = ("_data",) + + def __init__(self, data: str) -> None: + self._data = data + + def data(self) -> str: + return self._data + + +class LevelDataRepository: + def __init__(self, storage: AbstractStorage) -> None: + self._storage = storage + + async def from_user_id(self, user_id: str) -> LevelData | None: + res = await self._storage.load(f"levels/{user_id}") + if res is not None: + return LevelData(res.decode()) + + return None + + async def create( + self, + user_id: int, + data: str, + ) -> LevelData: + await self._storage.save(f"levels/{user_id}", data.encode()) + return LevelData(data) diff --git a/rgdps/resources/like.py b/rgdps/resources/like.py new file mode 100644 index 0000000..c71ac1a --- /dev/null +++ b/rgdps/resources/like.py @@ -0,0 +1,108 @@ +from __future__ import annotations + +from enum import IntEnum + +from rgdps.adapters import AbstractMySQLService +from rgdps.common import modelling +from rgdps.resources._common import DatabaseModel + + +class LikeType(IntEnum): + LEVEL = 1 + COMMENT = 2 + USER_COMMENT = 3 + + +class Like(DatabaseModel): + id: int + target_type: LikeType + target_id: int + user_id: int + value: int + + +ALL_FIELDS = modelling.get_model_fields(Like) +CUSTOMISABLE_FIELDS = modelling.remove_id_field(ALL_FIELDS) + +_ALL_FIELDS_COMMA = modelling.comma_separated(ALL_FIELDS) +_CUSTOMISABLE_FIELDS_COMMA = modelling.comma_separated(CUSTOMISABLE_FIELDS) +_ALL_FIELDS_COLON = modelling.colon_prefixed_comma_separated(ALL_FIELDS) +_CUSTOMISABLE_FIELDS_COLON = modelling.colon_prefixed_comma_separated( + CUSTOMISABLE_FIELDS, +) + + +class LikeRepository: + __slots__ = ("_mysql",) + + def __init__(self, mysql: AbstractMySQLService) -> None: + self._mysql = mysql + + async def from_id(self, like_id: int) -> Like | None: + like_db = await self._mysql.fetch_one( + "SELECT * FROM user_likes WHERE id = :like_id", + { + "like_id": like_id, + }, + ) + + if like_db is None: + return None + + return Like(**like_db) + + async def create( + self, + target_type: LikeType, + target_id: int, + user_id: int, + value: int, + ) -> Like: + like = Like( + id=0, + target_type=target_type, + target_id=target_id, + user_id=user_id, + value=value, + ) + + like.id = await self._mysql.execute( + f"INSERT INTO user_likes ({_CUSTOMISABLE_FIELDS_COMMA}) VALUES " + f"({_CUSTOMISABLE_FIELDS_COLON})", + like.model_dump(exclude={"id"}), + ) + + return like + + async def exists_from_target_and_user( + self, + target_type: LikeType, + target_id: int, + user_id: int, + ) -> bool: + return ( + await self._mysql.fetch_val( + "SELECT 1 FROM user_likes WHERE target_type = :target_type AND " + "target_id = :target_id AND user_id = :user_id", + { + "target_type": target_type.value, + "target_id": target_id, + "user_id": user_id, + }, + ) + ) is not None + + async def sum_from_target( + self, + target_type: LikeType, + target_id: int, + ) -> int: + return ( + await self._mysql.fetch_val( + "SELECT SUM(value) FROM user_likes WHERE target_type = :target_type AND target_id = :target_id", + { + "target_type": target_type.value, + "target_id": target_id, + }, + ) + ) or 0 diff --git a/rgdps/resources/message.py b/rgdps/resources/message.py new file mode 100644 index 0000000..eea9bcb --- /dev/null +++ b/rgdps/resources/message.py @@ -0,0 +1,221 @@ +from __future__ import annotations + +from datetime import datetime +from typing import NotRequired +from typing import TypedDict +from typing import Unpack + +from rgdps.adapters import AbstractMySQLService +from rgdps.common import modelling +from rgdps.resources._common import DatabaseModel +from rgdps.utilities.enum import StrEnum + + +class MessageDirection(StrEnum): + # NOTE: message direction is relative to the user who is + # making the request. + SENT = "sent" + RECEIVED = "received" + + +class Message(DatabaseModel): + id: int + sender_user_id: int + recipient_user_id: int + subject: str + content: str + post_ts: datetime + seen_ts: datetime | None + + +class _MessageUpdatePartial(TypedDict): + seen_ts: NotRequired[datetime] + sender_deleted: NotRequired[bool] + recipient_deleted: NotRequired[bool] + deleted: NotRequired[bool] + + +class MessageRepository: + __slots__ = ("_mysql",) + + def __init__(self, mysql: AbstractMySQLService) -> None: + self._mysql = mysql + + async def from_id(self, message_id: int) -> Message | None: + message_db = await self._mysql.fetch_one( + "SELECT * FROM messages WHERE id = :message_id", + { + "message_id": message_id, + }, + ) + + if message_db is None: + return None + + return Message(**message_db) + + async def from_recipient_user_id_paginated( + self, + recipient_user_id: int, + page: int, + page_size: int, + *, + include_deleted: bool = False, + ) -> list[Message]: + condition = "" + if not include_deleted: + condition = "AND deleted = 0 AND recipient_deleted = 0" + + messages_db = self._mysql.iterate( + f"SELECT * FROM messages WHERE recipient_user_id = :recipient_user_id {condition} " + "ORDER BY post_ts DESC LIMIT :limit OFFSET :offset", + { + "recipient_user_id": recipient_user_id, + "limit": page_size, + "offset": page * page_size, + }, + ) + + return [Message(**message_db) async for message_db in messages_db] + + async def from_sender_user_id_paginated( + self, + sender_user_id: int, + page: int, + page_size: int, + *, + include_deleted: bool = False, + ) -> list[Message]: + condition = "" + if not include_deleted: + condition = "AND deleted = 0 AND sender_deleted = 0" + + messages_db = self._mysql.iterate( + f"SELECT * FROM messages WHERE sender_user_id = :sender_user_id {condition} " + "ORDER BY post_ts DESC LIMIT :limit OFFSET :offset", + { + "sender_user_id": sender_user_id, + "limit": page_size, + "offset": page * page_size, + }, + ) + + return [Message(**message_db) async for message_db in messages_db] + + async def count_from_recipient_user_id( + self, + recipient_user_id: int, + *, + include_deleted: bool = False, + ) -> int: + condition = "" + if not include_deleted: + condition = "AND deleted = 0 AND recipient_deleted = 0" + + message_count = await self._mysql.fetch_val( + f"SELECT COUNT(*) FROM messages WHERE recipient_user_id = :recipient_user_id {condition}", + { + "recipient_user_id": recipient_user_id, + }, + ) + + return message_count + + async def count_new_from_recipient_user_id( + self, + recipient_user_id: int, + *, + include_deleted: bool = False, + ) -> int: + condition = "" + if not include_deleted: + condition = "AND deleted = 0 AND recipient_deleted = 0" + + message_count = await self._mysql.fetch_val( + f"SELECT COUNT(*) FROM messages WHERE recipient_user_id = :recipient_user_id {condition} " + "AND seen_ts IS NULL", + { + "recipient_user_id": recipient_user_id, + }, + ) + + return message_count + + async def count_from_sender_user_id( + self, + sender_user_id: int, + *, + include_deleted: bool = False, + ) -> int: + condition = "" + if not include_deleted: + condition = "AND deleted = 0 AND sender_deleted = 0" + + message_count = await self._mysql.fetch_val( + f"SELECT COUNT(*) FROM messages WHERE sender_user_id = :sender_user_id {condition}", + { + "sender_user_id": sender_user_id, + }, + ) + + return message_count + + async def count_new_from_sender_user_id( + self, + sender_user_id: int, + *, + include_deleted: bool = False, + ) -> int: + condition = "" + if not include_deleted: + condition = "AND deleted = 0 AND sender_deleted = 0" + + message_count = await self._mysql.fetch_val( + f"SELECT COUNT(*) FROM messages WHERE sender_user_id = :sender_user_id {condition} " + "AND seen_ts IS NULL", + { + "sender_user_id": sender_user_id, + }, + ) + + return message_count + + async def create( + self, + sender_user_id: int, + recipient_user_id: int, + subject: str, + content: str, + deleted: bool = False, + sender_deleted: bool = False, + ) -> int: + message_id = await self._mysql.execute( + "INSERT INTO messages (sender_user_id, recipient_user_id, subject, content, deleted, sender_deleted) " + "VALUES (:sender_user_id, :recipient_user_id, :subject, :content, :deleted, :sender_deleted)", + { + "sender_user_id": sender_user_id, + "recipient_user_id": recipient_user_id, + "subject": subject, + "content": content, + "deleted": deleted, + "sender_deleted": sender_deleted, + }, + ) + + return message_id + + async def update_partial( + self, + message_id: int, + **kwargs: Unpack[_MessageUpdatePartial], + ) -> Message | None: + changed_fields = modelling.unpack_enum_types(kwargs) + + await self._mysql.execute( + modelling.update_from_partial_dict("messages", message_id, changed_fields), + changed_fields, + ) + return await self.from_id(message_id) + + async def count_all(self) -> int: + return (await self._mysql.fetch_val("SELECT COUNT(*) FROM messages")) or 0 diff --git a/rgdps/resources/save_data.py b/rgdps/resources/save_data.py new file mode 100644 index 0000000..4f85564 --- /dev/null +++ b/rgdps/resources/save_data.py @@ -0,0 +1,36 @@ +from __future__ import annotations + +from rgdps.adapters import AbstractStorage + + +class SaveData: + """A wrapper class around a pure-string save data for type + clarity.""" + + __slots__ = ("_data",) + + def __init__(self, data: str) -> None: + self._data = data + + def data(self) -> str: + return self._data + + +class SaveDataRepository: + def __init__(self, storage: AbstractStorage) -> None: + self._storage = storage + + async def from_user_id(self, user_id: str) -> SaveData | None: + res = await self._storage.load(f"saves/{user_id}") + if res is not None: + return SaveData(res.decode()) + + return None + + async def create( + self, + user_id: int, + data: str, + ) -> SaveData: + await self._storage.save(f"saves/{user_id}", data.encode()) + return SaveData(data) diff --git a/rgdps/resources/song.py b/rgdps/resources/song.py new file mode 100644 index 0000000..848b444 --- /dev/null +++ b/rgdps/resources/song.py @@ -0,0 +1,177 @@ +from __future__ import annotations + +from enum import IntEnum + +from rgdps.adapters import AbstractMySQLService +from rgdps.adapters import GeometryDashClient +from rgdps.adapters.boomlings import GDRequestStatus +from rgdps.common import modelling +from rgdps.resources._common import DatabaseModel + + +class SongSource(IntEnum): + BOOMLINGS = 0 + NEWGROUNDS = 1 + CUSTOM = 2 + + +class Song(DatabaseModel): + id: int + name: str + author_id: int + author: str + author_youtube: str | None + size: float + download_url: str + source: SongSource + blocked: bool + + +ALL_FIELDS = modelling.get_model_fields(Song) +_ALL_FIELDS_COMMA = modelling.comma_separated(ALL_FIELDS) +_ALL_FIELDS_COLON = modelling.colon_prefixed_comma_separated(ALL_FIELDS) + + +class SongRepository: + def __init__( + self, + mysql: AbstractMySQLService, + geometry_dash: GeometryDashClient, + ) -> None: + self._mysql = mysql + self._geometry_dash = geometry_dash + + async def __from_db( + self, + song_id: int, + *, + allow_blocked: bool = False, + ) -> Song | None: + song_db = await self._mysql.fetch_one( + f"SELECT * FROM songs id = :song_id AND " "blocked IN :blocked", + { + "song_id": song_id, + "blocked": (0, 1) if allow_blocked else (0,), + }, + ) + + if song_db is None: + return None + + return Song(**song_db) + + async def __multiple_from_db( + self, + song_ids: list[int], + *, + allow_blocked: bool = False, + ) -> list[Song]: + songs_db = self._mysql.iterate( + f"SELECT * FROM songs WHERE id IN :song_ids " "AND blocked IN :blocked", + { + "song_ids": tuple(song_ids), + "blocked": (0, 1) if allow_blocked else (0,), + }, + ) + + return [Song(**song_db) async for song_db in songs_db] + + async def __from_boomlings(self, song_id: int) -> Song | None: + song_boomlings = await self._geometry_dash.song_from_id(song_id) + + if isinstance(song_boomlings, GDRequestStatus): + return None + + return Song( + id=song_boomlings.id, + name=song_boomlings.name, + author_id=song_boomlings.author_id, + author=song_boomlings.author, + author_youtube=song_boomlings.author_youtube, + size=song_boomlings.size, + download_url=song_boomlings.download_url, + source=SongSource.BOOMLINGS, + blocked=False, + ) + + async def __insert_model(self, song_model: Song) -> int: + return await self._mysql.execute( + f"INSERT INTO songs ({_ALL_FIELDS_COMMA}) VALUES ({_ALL_FIELDS_COLON})", + song_model.model_dump(), + ) + + async def create( + self, + name: str, + author_id: int, + author: str, + download_url: str, + author_youtube: str | None = None, + size: float = 0.0, + source: SongSource = SongSource.CUSTOM, + blocked: bool = False, + *, + song_id: int | None = None, + ) -> Song: + song = Song( + id=0, + name=name, + author_id=author_id, + author=author, + author_youtube=author_youtube, + size=size, + download_url=download_url, + source=source, + blocked=blocked, + ) + song_dump = song.model_dump() + song_dump["id"] = song_id + + song.id = await self._mysql.execute( + f"INSERT INTO songs ({_ALL_FIELDS_COMMA}) VALUES ({_ALL_FIELDS_COLON})", + song_dump, + ) + return song + + async def from_id( + self, + song_id: int, + *, + allow_blocked: bool = False, + ) -> Song | None: + song_db = await self.__from_db(song_id, allow_blocked=allow_blocked) + + if song_db is not None: + return song_db + + song_gd = await self.__from_boomlings(song_id) + + if song_gd is not None: + await self.__insert_model(song_gd) + return song_gd + + async def multiple_from_id( + self, + song_ids: list[int], + *, + allow_blocked: bool = False, + ) -> list[Song]: + songs_db = await self.__multiple_from_db(song_ids, allow_blocked=allow_blocked) + + # All found within the database. + if len(song_ids) == len(songs_db): + return songs_db + fetched_ids = [song.id for song in songs_db] + + # Fetch remaining results. + for song_id in filter(lambda x: x not in fetched_ids, song_ids): + song_boomlings = await self.__from_boomlings(song_id) + if song_boomlings is None: + continue + + songs_db.append(song_boomlings) + + return sorted(songs_db, key=lambda x: song_ids.index(x.id)) + + async def count_all(self) -> int: + return await self._mysql.fetch_val("SELECT COUNT(*) FROM songs") diff --git a/rgdps/resources/user.py b/rgdps/resources/user.py new file mode 100644 index 0000000..9927037 --- /dev/null +++ b/rgdps/resources/user.py @@ -0,0 +1,477 @@ +from __future__ import annotations + +from collections.abc import AsyncGenerator +from datetime import datetime +from enum import IntEnum +from enum import IntFlag +from typing import Any +from typing import NotRequired +from typing import TypedDict +from typing import Unpack + +from rgdps.adapters import AbstractMySQLService +from rgdps.adapters import MeiliSearchClient +from rgdps.common import modelling +from rgdps.common import time as time_utils +from rgdps.common.colour import Colour +from rgdps.common.mixins import IntEnumStringMixin +from rgdps.resources._common import DatabaseModel +from rgdps.resources._common import SearchResults + + +# TODO: Move all of these to string enums and then have a GD equivalent. +class UserPrivileges(IntFlag): + USER_AUTHENTICATE = 1 << 0 + USER_PROFILE_PUBLIC = 1 << 1 + USER_STAR_LEADERBOARD_PUBLIC = 1 << 2 + USER_CREATOR_LEADERBOARD_PUBLIC = 1 << 3 + USER_DISPLAY_ELDER_BADGE = 1 << 4 + USER_DISPLAY_MOD_BADGE = 1 << 5 + USER_REQUEST_ELDER = 1 << 6 + USER_REQUEST_MODERATOR = 1 << 7 + USER_CREATE_USER_COMMENTS = 1 << 8 + USER_MODIFY_PRIVILEGES = 1 << 9 + USER_CHANGE_CREDENTIALS_OWN = 1 << 10 + USER_CHANGE_CREDENTIALS_OTHER = 1 << 11 + + LEVEL_UPLOAD = 1 << 12 + LEVEL_UPDATE = 1 << 13 + LEVEL_DELETE_OWN = 1 << 14 + LEVEL_DELETE_OTHER = 1 << 15 + LEVEL_RATE_STARS = 1 << 16 + LEVEL_ENQUEUE_DAILY = 1 << 17 + LEVEL_ENQUEUE_WEEKLY = 1 << 18 + LEVEL_MODIFY_VISIBILITY = 1 << 19 + LEVEL_RENAME_OTHER = 1 << 20 + LEVEL_MARK_MAGIC = 1 << 21 + LEVEL_MARK_AWARDED = 1 << 22 + + COMMENTS_POST = 1 << 23 + COMMENTS_DELETE_OWN = 1 << 24 + COMMENTS_DELETE_OTHER = 1 << 25 + COMMANDS_TRIGGER = 1 << 26 + COMMENTS_BYPASS_SPAM_FILTER = 1 << 27 + + MESSAGES_SEND = 1 << 28 + MESSAGES_DELETE_OWN = 1 << 29 + + FRIEND_REQUESTS_SEND = 1 << 30 + FRIEND_REQUESTS_ACCEPT = 1 << 31 + FRIEND_REQUESTS_DELETE_OWN = 1 << 32 + + MAP_PACK_CREATE = 1 << 33 + + GAUNTLET_CREATE = 1 << 34 + + SERVER_RESYNC_SEARCH = 1 << 35 + SERVER_STOP = 1 << 36 + + USER_VIEW_PRIVATE_PROFILE = 1 << 37 + COMMENTS_LIKE = 1 << 38 + + LEVEL_CHANGE_DESCRIPTION_OTHER = 1 << 39 + + SERVER_RESYNC_LEADERBOARDS = 1 << 40 + + LEVEL_MOVE_USER = 1 << 41 + + def as_bytes(self) -> bytes: + return self.to_bytes(16, "little", signed=False) + + @staticmethod + def from_db_bytes(b: bytes) -> UserPrivileges: + return UserPrivileges(int.from_bytes(b, "little", signed=False)) + + +class UserPrivacySetting(IntEnum): + PUBLIC = 0 + FRIENDS = 1 + PRIVATE = 2 + + +class UserPrivilegeLevel(IntEnumStringMixin, IntEnum): + """Enum for determining whether a user should be displayed as a + moderator, elder moderator, or neither. + """ + + NONE = 0 + MODERATOR = 1 + ELDER_MODERATOR = 2 + + +# TODO: Move +STAR_PRIVILEGES = ( + UserPrivileges.USER_STAR_LEADERBOARD_PUBLIC | UserPrivileges.USER_PROFILE_PUBLIC +) +"""A set of privileges required for a user to appear on the star leaderboards.""" + +CREATOR_PRIVILEGES = ( + UserPrivileges.USER_CREATOR_LEADERBOARD_PUBLIC | UserPrivileges.USER_PROFILE_PUBLIC +) +"""A set of privileges required for a user to appear on the creator leaderboards.""" + +DEFAULT_PRIVILEGES = ( + UserPrivileges.USER_AUTHENTICATE + | UserPrivileges.USER_PROFILE_PUBLIC + | UserPrivileges.USER_STAR_LEADERBOARD_PUBLIC + | UserPrivileges.USER_CREATOR_LEADERBOARD_PUBLIC + | UserPrivileges.USER_CREATE_USER_COMMENTS + | UserPrivileges.USER_CHANGE_CREDENTIALS_OWN + | UserPrivileges.LEVEL_UPLOAD + | UserPrivileges.LEVEL_UPDATE + | UserPrivileges.LEVEL_DELETE_OWN + | UserPrivileges.COMMENTS_POST + | UserPrivileges.COMMENTS_DELETE_OWN + | UserPrivileges.COMMANDS_TRIGGER + | UserPrivileges.MESSAGES_SEND + | UserPrivileges.MESSAGES_DELETE_OWN + | UserPrivileges.FRIEND_REQUESTS_SEND + | UserPrivileges.FRIEND_REQUESTS_ACCEPT + | UserPrivileges.FRIEND_REQUESTS_DELETE_OWN + | UserPrivileges.COMMENTS_LIKE +) +"""A set of default privileges to be assigned to users upon registration.""" + + +class User(DatabaseModel): + id: int + username: str + email: str + privileges: UserPrivileges + + message_privacy: UserPrivacySetting + friend_privacy: UserPrivacySetting + comment_privacy: UserPrivacySetting + + youtube_name: str | None + twitter_name: str | None + twitch_name: str | None + + register_ts: datetime + comment_colour: Colour + + # TODO: Move? + stars: int + demons: int + moons: int + primary_colour: int + secondary_colour: int + glow_colour: int + display_type: int + icon: int + ship: int + ball: int + ufo: int + wave: int + robot: int + spider: int + swing_copter: int + jetpack: int + explosion: int + glow: bool + creator_points: int + coins: int + user_coins: int + diamonds: int + + +# In case we want to move to a less direct model approach later. +type UserModel = User + +ALL_FIELDS = modelling.get_model_fields(User) +CUSTOMISABLE_FIELDS = modelling.remove_id_field(ALL_FIELDS) + + +_ALL_FIELDS_COMMA = modelling.comma_separated(ALL_FIELDS) +_ALL_FIELDS_COLON = modelling.colon_prefixed_comma_separated(ALL_FIELDS) + +DEFAULT_PAGE_SIZE = 10 + + +class _UserUpdatePartial(TypedDict): + """Set of optional key-word arguments that may be used to update a user.""" + + username: NotRequired[str] + email: NotRequired[str] + privileges: NotRequired[UserPrivileges] + message_privacy: NotRequired[UserPrivacySetting] + friend_privacy: NotRequired[UserPrivacySetting] + comment_privacy: NotRequired[UserPrivacySetting] + youtube_name: NotRequired[str | None] + twitter_name: NotRequired[str | None] + twitch_name: NotRequired[str | None] + stars: NotRequired[int] + demons: NotRequired[int] + moons: NotRequired[int] + primary_colour: NotRequired[int] + secondary_colour: NotRequired[int] + glow_colour: NotRequired[int] + display_type: NotRequired[int] + icon: NotRequired[int] + ship: NotRequired[int] + ball: NotRequired[int] + ufo: NotRequired[int] + wave: NotRequired[int] + robot: NotRequired[int] + spider: NotRequired[int] + swing_copter: NotRequired[int] + jetpack: NotRequired[int] + explosion: NotRequired[int] + glow: NotRequired[bool] + creator_points: NotRequired[int] + coins: NotRequired[int] + user_coins: NotRequired[int] + diamonds: NotRequired[int] + comment_colour: NotRequired[Colour] + + +# Meili type accommodation. +def _meili_dict_from_model(user_model: UserModel) -> dict[str, Any]: + return _meili_dict_from_dict(user_model.model_dump()) + + +def _meili_dict_from_dict(user_dict: dict[str, Any]) -> dict[str, Any]: + if "privileges" in user_dict: + user_dict["privileges"] = int.from_bytes( + user_dict["privileges"], + byteorder="little", + signed=False, + ) + user_dict["is_public"] = ( + user_dict["privileges"] & UserPrivileges.USER_PROFILE_PUBLIC > 0 + ) + + if "register_ts" in user_dict: + user_dict["register_ts"] = time_utils.into_unix_ts(user_dict["register_ts"]) + + return user_dict + + +def _model_from_meili_dict(user_dict: dict[str, Any]) -> UserModel: + user_dict = user_dict.copy() + + user_dict["privileges"] = UserPrivileges(int(user_dict["privileges"])).as_bytes() + + user_dict["register_ts"] = time_utils.from_unix_ts(user_dict["register_ts"]) + + del user_dict["is_public"] + + return User(**user_dict) + + +class UserRepository: + def __init__( + self, + mysql: AbstractMySQLService, + meili: MeiliSearchClient, + ) -> None: + self._mysql = mysql + self._meili = meili.index("users") + + async def from_id(self, user_id: int) -> User | None: + user_db = await self._mysql.fetch_one( + f"SELECT {_ALL_FIELDS_COMMA} FROM users WHERE id = :id", + {"id": user_id}, + ) + + if user_db is None: + return None + + return User(**user_db) + + async def multiple_from_id(self, user_ids: list[int]) -> list[User]: + if not user_ids: + return [] + + users_db = self._mysql.iterate( + f"SELECT {_ALL_FIELDS_COMMA} FROM users WHERE id IN :ids", + {"ids": tuple(user_ids)}, + ) + + return [User(**user_row) async for user_row in users_db] + + async def __update_meili(self, model: User) -> None: + user_dict = _meili_dict_from_model(model) + await self._meili.add_documents([user_dict]) + + async def create( + self, + username: str, + email: str, + *, + privileges: UserPrivileges = DEFAULT_PRIVILEGES, + message_privacy: UserPrivacySetting = UserPrivacySetting.PUBLIC, + friend_privacy: UserPrivacySetting = UserPrivacySetting.PUBLIC, + comment_privacy: UserPrivacySetting = UserPrivacySetting.PUBLIC, + youtube_name: str | None = None, + twitter_name: str | None = None, + twitch_name: str | None = None, + register_ts: datetime | None = None, + stars: int = 0, + demons: int = 0, + moons: int = 0, + primary_colour: int = 0, + # NOTE: secondary_colour is 4 by default in the game + secondary_colour: int = 4, + glow_colour: int = 0, + display_type: int = 0, + icon: int = 0, + ship: int = 0, + ball: int = 0, + ufo: int = 0, + wave: int = 0, + robot: int = 0, + spider: int = 0, + swing_copter: int = 0, + jetpack: int = 0, + explosion: int = 0, + glow: bool = False, + creator_points: int = 0, + coins: int = 0, + user_coins: int = 0, + diamonds: int = 0, + user_id: int | None = 0, + comment_colour: Colour = Colour.default(), + ) -> User: + if register_ts is None: + register_ts = datetime.now() + + user_id_provided = user_id is not None + if user_id is None: + user_id = 0 + + user = User( + id=user_id, + username=username, + email=email, + privileges=privileges, + message_privacy=message_privacy, + friend_privacy=friend_privacy, + comment_privacy=comment_privacy, + youtube_name=youtube_name, + twitter_name=twitter_name, + twitch_name=twitch_name, + register_ts=register_ts, + stars=stars, + demons=demons, + moons=moons, + primary_colour=primary_colour, + secondary_colour=secondary_colour, + glow_colour=glow_colour, + display_type=display_type, + icon=icon, + ship=ship, + ball=ball, + ufo=ufo, + wave=wave, + robot=robot, + spider=spider, + swing_copter=swing_copter, + jetpack=jetpack, + explosion=explosion, + glow=glow, + creator_points=creator_points, + coins=coins, + user_coins=user_coins, + diamonds=diamonds, + comment_colour=comment_colour, + ) + + if user_id_provided: + user_dict = user.model_dump() + else: + user_dict = user.model_dump(exclude={"id"}) | { + "id": None, + } + + user.id = await self._mysql.execute( + f"INSERT INTO users ({_ALL_FIELDS_COMMA}) VALUES ({_ALL_FIELDS_COLON})", + user_dict, + ) + + await self.__update_meili(user) + return user + + async def update_partial( + self, + user_id: int, + **kwargs: Unpack[_UserUpdatePartial], + ) -> User | None: + changed_fields = modelling.unpack_enum_types(kwargs) + + await self._mysql.execute( + modelling.update_from_partial_dict("users", user_id, changed_fields), + changed_fields, + ) + + meili_dict = _meili_dict_from_dict(dict(kwargs)) | { + "id": user_id, + } + + await self._meili.update_documents([meili_dict]) + return await self.from_id(user_id) + + async def from_username(self, username: str) -> User | None: + user_id = await self._mysql.fetch_val( + "SELECT id FROM users WHERE username = :username", + {"username": username}, + ) + + if user_id is None: + return None + + return await self.from_id(user_id) + + async def all(self) -> AsyncGenerator[User, None]: + async for user_db in self._mysql.iterate( + f"SELECT {_ALL_FIELDS_COMMA} FROM users", + ): + yield User(**user_db) + + # Search related. + async def search( + self, + query: str, + *, + page: int = 0, + page_size: int = DEFAULT_PAGE_SIZE, + include_hidden: bool = False, + ) -> SearchResults[User]: + filters = [] + if not include_hidden: + filters.append("is_public = true") + + results_db = await self._meili.search( + query, + offset=page * page_size, + limit=page_size, + filter=filters, + ) + + results = [_model_from_meili_dict(result) for result in results_db.hits] + + return SearchResults( + results, + results_db.estimated_total_hits or 0, + page_size, + ) + + # Non-model related checks. + async def is_email_available(self, email: str) -> bool: + return not self._mysql.fetch_val( + "SELECT EXISTS(SELECT 1 FROM users WHERE email = :email)", + { + "email": email, + }, + ) + + async def is_username_available(self, username: str) -> bool: + return not self._mysql.fetch_val( + "SELECT EXISTS(SELECT 1 FROM users WHERE username = :username)", + { + "username": username, + }, + ) + + async def count_all(self) -> int: + return await self._mysql.fetch_val("SELECT COUNT(*) FROM users") diff --git a/rgdps/resources/user_comment.py b/rgdps/resources/user_comment.py new file mode 100644 index 0000000..ba8728f --- /dev/null +++ b/rgdps/resources/user_comment.py @@ -0,0 +1,159 @@ +from __future__ import annotations + +from datetime import datetime +from typing import NotRequired +from typing import TypedDict +from typing import Unpack + +from rgdps.adapters import AbstractMySQLService +from rgdps.common import modelling +from rgdps.resources._common import DatabaseModel + + +class UserComment(DatabaseModel): + id: int + user_id: int + content: str + likes: int + post_ts: datetime + deleted: bool + + +class _UserCommentUpdatePartial(TypedDict): + user_id: NotRequired[int] + content: NotRequired[str] + likes: NotRequired[int] + post_ts: NotRequired[datetime] + deleted: NotRequired[bool] + + +ALL_FIELDS = modelling.get_model_fields(UserComment) +_ALL_FIELDS_COMMA = modelling.comma_separated(ALL_FIELDS) +_ALL_FIELDS_COLON = modelling.colon_prefixed_comma_separated(ALL_FIELDS) + + +class UserCommentRepository: + __slots__ = ("_mysql",) + + def __init__(self, mysql: AbstractMySQLService) -> None: + self._mysql = mysql + + async def from_id(self, comment_id: int) -> UserComment | None: + comment_db = await self._mysql.fetch_one( + "SELECT * FROM user_comments WHERE id = :comment_id", + { + "comment_id": comment_id, + }, + ) + + if comment_db is None: + return None + + return UserComment(**comment_db) + + async def from_user_id( + self, + user_id: int, + *, + include_deleted: bool = False, + ) -> list[UserComment]: + comments_db = self._mysql.iterate( + "SELECT * FROM user_comments WHERE user_id = :user_id " + "AND deleted IN :deleted", + { + "user_id": user_id, + "deleted": (0, 1) if include_deleted else (0,), + }, + ) + + return [UserComment(**comment_db) async for comment_db in comments_db] + + async def from_user_id_paginated( + self, + user_id: int, + *, + page: int, + page_size: int, + include_deleted: bool = False, + ) -> list[UserComment]: + condition = "" + if not include_deleted: + condition = "AND NOT deleted" + + comments_db = await self._mysql.fetch_all( + f"SELECT * FROM user_comments WHERE user_id = :user_id {condition} " + "ORDER BY id DESC LIMIT :limit OFFSET :offset", + { + "user_id": user_id, + "limit": page_size, + "offset": page * page_size, + }, + ) + + return [UserComment(**comment_db) for comment_db in comments_db] + + async def count_from_user_id( + self, + user_id: int, + *, + include_deleted: bool = False, + ) -> int: + return ( + await self._mysql.fetch_val( + "SELECT COUNT(*) FROM user_comments WHERE user_id = :user_id " + "AND deleted IN :deleted", + { + "user_id": user_id, + "deleted": (0, 1) if include_deleted else (0,), + }, + ) + ) or 0 + + async def create( + self, + user_id: int, + content: str, + likes: int = 0, + post_ts: datetime | None = None, + deleted: bool = False, + *, + comment_id: int | None = None, + ) -> UserComment: + model = UserComment( + id=0, + user_id=user_id, + content=content, + likes=likes, + post_ts=post_ts or datetime.now(), + deleted=deleted, + ) + + model_dump = model.model_dump() + model_dump["id"] = comment_id + + model.id = await self._mysql.execute( + f"INSERT INTO user_comments ({_ALL_FIELDS_COMMA}) VALUES " + f"({_ALL_FIELDS_COLON})", + model_dump, + ) + return model + + async def update_partial( + self, + comment_id: int, + **kwargs: Unpack[_UserCommentUpdatePartial], + ) -> UserComment | None: + changed_fields = modelling.unpack_enum_types(kwargs) + + await self._mysql.execute( + modelling.update_from_partial_dict( + "user_comments", + comment_id, + changed_fields, + ), + changed_fields, + ) + return await self.from_id(comment_id) + + async def count_all(self) -> int: + return (await self._mysql.fetch_val("SELECT COUNT(*) FROM user_comments")) or 0 diff --git a/rgdps/resources/user_credential.py b/rgdps/resources/user_credential.py new file mode 100644 index 0000000..e8c89bf --- /dev/null +++ b/rgdps/resources/user_credential.py @@ -0,0 +1,83 @@ +from __future__ import annotations + +from enum import IntEnum + +from rgdps.adapters import AbstractMySQLService +from rgdps.common import modelling +from rgdps.resources._common import DatabaseModel + + +class CredentialVersion(IntEnum): + PLAIN_BCRYPT = 1 + GJP2_BCRYPT = 2 # 2.2 + GJP2 + + +class UserCredential(DatabaseModel): + id: int + user_id: int + version: CredentialVersion + value: str + + +ALL_FIELDS = modelling.get_model_fields(UserCredential) +CUSTOMISABLE_FIELDS = modelling.remove_id_field(ALL_FIELDS) + + +_ALL_FIELDS_COMMA = modelling.comma_separated(ALL_FIELDS) +_CUSTOMISABLE_FIELDS_COMMA = modelling.comma_separated(CUSTOMISABLE_FIELDS) +_ALL_FIELDS_COLON = modelling.colon_prefixed_comma_separated(ALL_FIELDS) +_CUSTOMISABLE_FIELDS_COLON = modelling.colon_prefixed_comma_separated( + CUSTOMISABLE_FIELDS, +) + + +class UserCredentialRepository: + def __init__(self, mysql: AbstractMySQLService) -> None: + self._mysql = mysql + + async def create( + self, + user_id: int, + credential_version: CredentialVersion, + value: str, + ) -> UserCredential: + credential = UserCredential( + id=0, + user_id=user_id, + version=credential_version, + value=value, + ) + + credential.id = await self._mysql.execute( + f"INSERT INTO user_credentials ({_CUSTOMISABLE_FIELDS_COMMA}) " + f"VALUES ({_CUSTOMISABLE_FIELDS_COLON})", + credential.model_dump(exclude={"id"}), + ) + return credential + + async def from_user_id( + self, + user_id: int, + ) -> UserCredential | None: + res = await self._mysql.fetch_one( + f"SELECT {_ALL_FIELDS_COMMA} FROM user_credentials WHERE user_id = :user_id " + "ORDER BY id DESC LIMIT 1", + {"user_id": user_id}, + ) + + if not res: + return None + + return UserCredential(**res) + + async def delete_from_user_id(self, user_id: int) -> None: + await self._mysql.execute( + "DELETE FROM user_credentials WHERE user_id = :user_id", + {"user_id": user_id}, + ) + + async def delete_from_id(self, credential_id: int) -> None: + await self._mysql.execute( + "DELETE FROM user_credentials WHERE id = :credential_id", + {"credential_id": credential_id}, + ) diff --git a/rgdps/resources/user_replationship.py b/rgdps/resources/user_replationship.py new file mode 100644 index 0000000..620899e --- /dev/null +++ b/rgdps/resources/user_replationship.py @@ -0,0 +1,273 @@ +from __future__ import annotations + +from datetime import datetime +from enum import IntEnum +from typing import NotRequired +from typing import TypedDict +from typing import Unpack + +from rgdps.adapters import AbstractMySQLService +from rgdps.common import modelling +from rgdps.resources._common import DatabaseModel + + +class UserRelationshipType(IntEnum): + FRIEND = 0 + BLOCKED = 1 + + +class UserRelationship(DatabaseModel): + id: int + relationship_type: UserRelationshipType + user_id: int + target_user_id: int + post_ts: datetime + seen_ts: datetime | None + + +DEFAULT_PAGE_SIZE = 10 + +ALL_FIELDS = modelling.get_model_fields(UserRelationship) +CUSTOMISABLE_FIELDS = modelling.remove_id_field(ALL_FIELDS) + + +_ALL_FIELDS_COMMA = modelling.comma_separated(ALL_FIELDS) +_CUSTOMISABLE_FIELDS_COMMA = modelling.comma_separated(CUSTOMISABLE_FIELDS) +_CUSTOMISABLE_FIELDS_COLON = modelling.colon_prefixed_comma_separated( + CUSTOMISABLE_FIELDS, +) + + +class _UserRelationshipUpdatePartial(TypedDict): + seen_ts: NotRequired[datetime] + deleted: NotRequired[bool] + + +class UserRelationshipRepository: + def __init__(self, mysql: AbstractMySQLService) -> None: + self._mysql = mysql + + async def from_id( + self, + relationship_id: int, + *, + include_deleted: bool = False, + ) -> UserRelationship | None: + condition = "AND NOT deleted" if not include_deleted else "" + + relationship_db = await self._mysql.fetch_one( + f"SELECT {_ALL_FIELDS_COMMA} FROM user_relationships WHERE id = " + f":relationship_id {condition}", + {"relationship_id": relationship_id}, + ) + + if not relationship_db: + return None + + return UserRelationship(**relationship_db) + + async def create( + self, + user_id: int, + target_user_id: int, + relationship_type: UserRelationshipType, + post_ts: datetime | None = None, + seen_ts: datetime | None = None, + ) -> UserRelationship: + if post_ts is None: + post_ts = datetime.now() + + relationship = UserRelationship( + id=0, + relationship_type=relationship_type, + user_id=user_id, + target_user_id=target_user_id, + post_ts=post_ts, + seen_ts=seen_ts, + ) + + relationship.id = await self._mysql.execute( + f"INSERT INTO user_relationships ({_CUSTOMISABLE_FIELDS_COMMA}) " + f"VALUES ({_CUSTOMISABLE_FIELDS_COLON})", + relationship.model_dump(exclude={"id"}), + ) + return relationship + + # TODO: The API here might be made nicer. + async def from_user_id( + self, + user_id: int, + relationship_type: UserRelationshipType, + *, + include_deleted: bool = False, + ) -> list[UserRelationship]: + condition = "AND NOT deleted" if not include_deleted else "" + + relationships_db = self._mysql.iterate( + f"SELECT {_ALL_FIELDS_COMMA} FROM user_relationships WHERE user_id = :user_id AND " + f"relationship_type = :relationship_type {condition} " + "ORDER BY post_ts DESC", + {"user_id": user_id, "relationship_type": relationship_type.value}, + ) + + return [ + UserRelationship(**relationship_row) + async for relationship_row in relationships_db + ] + + async def from_user_id_paginated( + self, + user_id: int, + relationship_type: UserRelationshipType, + *, + page: int = 0, + page_size: int = DEFAULT_PAGE_SIZE, + include_deleted: bool = False, + ) -> list[UserRelationship]: + condition = "AND NOT deleted" if not include_deleted else "" + + relationships_db = self._mysql.iterate( + f"SELECT {_ALL_FIELDS_COMMA} FROM user_relationships WHERE user_id = :user_id AND " + f"relationship_type = :relationship_type {condition} " + "ORDER BY post_ts DESC LIMIT :limit OFFSET :offset", + { + "user_id": user_id, + "relationship_type": relationship_type.value, + "limit": page_size, + "offset": page * page_size, + }, + ) + + return [ + UserRelationship(**relationship_row) + async for relationship_row in relationships_db + ] + + # The nicer API in question. + async def blocked_from_user_id( + self, + user_id: int, + *, + include_deleted: bool = False, + ) -> list[UserRelationship]: + return await self.from_user_id( + user_id, + UserRelationshipType.BLOCKED, + include_deleted=include_deleted, + ) + + async def blocked_from_user_id_paginated( + self, + user_id: int, + *, + page: int = 0, + page_size: int = DEFAULT_PAGE_SIZE, + include_deleted: bool = False, + ) -> list[UserRelationship]: + return await self.from_user_id_paginated( + user_id, + UserRelationshipType.BLOCKED, + include_deleted=include_deleted, + page=page, + page_size=page_size, + ) + + async def friends_from_user_id( + self, + user_id: int, + *, + include_deleted: bool = False, + ) -> list[UserRelationship]: + return await self.from_user_id( + user_id, + UserRelationshipType.FRIEND, + include_deleted=include_deleted, + ) + + async def friends_from_user_id_paginated( + self, + user_id: int, + *, + page: int = 0, + page_size: int = DEFAULT_PAGE_SIZE, + include_deleted: bool = False, + ) -> list[UserRelationship]: + return await self.from_user_id_paginated( + user_id, + UserRelationshipType.FRIEND, + include_deleted=include_deleted, + page=page, + page_size=page_size, + ) + + async def from_user_and_target( + self, + user_id: int, + target_user_id: int, + *, + include_deleted: bool = False, + ) -> UserRelationship | None: + condition = "AND NOT deleted" if not include_deleted else "" + + result_db = await self._mysql.fetch_one( + f"SELECT {_ALL_FIELDS_COMMA} FROM user_relationships WHERE " + f"user_id = :user_id AND target_user_id = :target_user_id {condition} " + "ORDER BY id DESC", + {"user_id": user_id, "target_user_id": target_user_id}, + ) + + if result_db is None: + return None + + return UserRelationship(**result_db) + + async def count_user_relationships( + self, + user_id: int, + relationship_type: UserRelationshipType, + *, + include_deleted: bool = False, + ) -> int: + condition = "AND NOT deleted" if not include_deleted else "" + + return await self._mysql.fetch_val( + "SELECT COUNT(*) FROM user_relationships WHERE user_id = :user_id " + f"AND relationship_type = :relationship_type {condition}", + {"user_id": user_id, "relationship_type": relationship_type.value}, + ) + + async def count_unseen_user_relationships( + self, + user_id: int, + relationship_type: UserRelationshipType, + *, + include_deleted: bool = False, + ) -> int: + condition = "AND NOT deleted" if not include_deleted else "" + + return await self._mysql.fetch_val( + "SELECT COUNT(*) FROM user_relationships WHERE user_id = :user_id " + f"AND relationship_type = :relationship_type AND seen_ts = NULL {condition}", + {"user_id": user_id, "relationship_type": relationship_type.value}, + ) + + async def update_partial( + self, + relationship_id: int, + **kwargs: Unpack[_UserRelationshipUpdatePartial], + ) -> UserRelationship | None: + changed_fields = modelling.unpack_enum_types(kwargs) + + await self._mysql.execute( + modelling.update_from_partial_dict( + "user_relationships", + relationship_id, + changed_fields, + ), + changed_fields, + ) + + return await self.from_id(relationship_id, include_deleted=True) + + async def count_all(self) -> int: + return await self._mysql.fetch_val("SELECT COUNT(*) FROM user_relationships") diff --git a/rgdps/services/__init__.py b/rgdps/services/__init__.py index 4eb2b04..47c558c 100644 --- a/rgdps/services/__init__.py +++ b/rgdps/services/__init__.py @@ -1,6 +1,17 @@ from __future__ import annotations -from . import boomlings -from . import mysql -from . import pubsub -from . import storage +from . import daily_chests +from . import friend_requests +from . import leaderboards +from . import level_comments +from . import levels +from . import likes +from . import messages +from . import save_data +from . import songs +from . import user_comments +from . import user_credentials +from . import user_relationships +from . import users +from ._common import ErrorOr +from ._common import ServiceError diff --git a/rgdps/constants/errors.py b/rgdps/services/_common.py similarity index 93% rename from rgdps/constants/errors.py rename to rgdps/services/_common.py index b0aaccf..5346cae 100644 --- a/rgdps/constants/errors.py +++ b/rgdps/services/_common.py @@ -1,6 +1,7 @@ from __future__ import annotations from enum import Enum +from typing import TypeGuard class ServiceError(str, Enum): @@ -66,3 +67,10 @@ class ServiceError(str, Enum): def __bool__(self) -> bool: return False + + +type ErrorOr[T] = T | ServiceError + + +def is_service_error[T](result: ErrorOr[T]) -> TypeGuard[ServiceError]: + return isinstance(result, ServiceError) diff --git a/rgdps/usecases/daily_chests.py b/rgdps/services/daily_chests.py similarity index 100% rename from rgdps/usecases/daily_chests.py rename to rgdps/services/daily_chests.py diff --git a/rgdps/usecases/friend_requests.py b/rgdps/services/friend_requests.py similarity index 100% rename from rgdps/usecases/friend_requests.py rename to rgdps/services/friend_requests.py diff --git a/rgdps/usecases/leaderboards.py b/rgdps/services/leaderboards.py similarity index 100% rename from rgdps/usecases/leaderboards.py rename to rgdps/services/leaderboards.py diff --git a/rgdps/usecases/level_comments.py b/rgdps/services/level_comments.py similarity index 100% rename from rgdps/usecases/level_comments.py rename to rgdps/services/level_comments.py diff --git a/rgdps/usecases/level_schedules.py b/rgdps/services/level_schedules.py similarity index 100% rename from rgdps/usecases/level_schedules.py rename to rgdps/services/level_schedules.py diff --git a/rgdps/usecases/levels.py b/rgdps/services/levels.py similarity index 100% rename from rgdps/usecases/levels.py rename to rgdps/services/levels.py index de75ceb..1b68d35 100644 --- a/rgdps/usecases/levels.py +++ b/rgdps/services/levels.py @@ -11,11 +11,11 @@ from rgdps.constants.level_schedules import LevelScheduleType from rgdps.constants.levels import LevelDemonDifficulty from rgdps.constants.levels import LevelDifficulty +from rgdps.constants.levels import LevelFeature from rgdps.constants.levels import LevelLength from rgdps.constants.levels import LevelPublicity from rgdps.constants.levels import LevelSearchFlag from rgdps.constants.levels import LevelSearchType -from rgdps.constants.levels import LevelFeature from rgdps.constants.users import CREATOR_PRIVILEGES from rgdps.models.level import Level from rgdps.models.song import Song diff --git a/rgdps/usecases/likes.py b/rgdps/services/likes.py similarity index 100% rename from rgdps/usecases/likes.py rename to rgdps/services/likes.py diff --git a/rgdps/usecases/messages.py b/rgdps/services/messages.py similarity index 100% rename from rgdps/usecases/messages.py rename to rgdps/services/messages.py diff --git a/rgdps/services/pubsub.py b/rgdps/services/pubsub.py deleted file mode 100644 index ae5627f..0000000 --- a/rgdps/services/pubsub.py +++ /dev/null @@ -1,106 +0,0 @@ -# Not named `redis.py` as it would fight with the `redis` package. -from __future__ import annotations - -import asyncio -from collections.abc import Awaitable -from collections.abc import Callable - -from redis.asyncio import Redis - -from rgdps import logger -from rgdps.common.context import Context - -RedisHandler = Callable[[Context, bytes], Awaitable[None]] - - -async def _listen_router( - ctx: Context, - redis: Redis, - router: RedisPubsubRouter, -) -> None: - redis_handlers = router.route_map() - async with redis.pubsub() as pubsub: - for channel in redis_handlers: - await pubsub.subscribe(channel) - logger.debug( - "Subscribed to Redis a channel.", - extra={ - "channel": channel.decode(), - }, - ) - - while True: - # TODO: Handle errors (different message types) - message = await pubsub.get_message() - if message is not None: - if message.get("type") != "message": - continue - try: - # TODO: Investigate if spinning up tasks for each message - # is a good idea. - handler = redis_handlers[message["channel"]] - await handler(ctx, message["data"]) - except Exception: - logger.exception( - "Error while handling Redis message.", - extra={ - "channel": message["channel"].decode(), - "data": message["data"].decode(), - }, - ) - - # NOTE: This is a hack to prevent the event loop from blocking. - await asyncio.sleep(0.1) - - -def listen_router( - ctx: Context, - redis: Redis, - router: RedisPubsubRouter, -) -> None: - asyncio.create_task(_listen_router(ctx, redis, router)) - - -async def listen_pubsubs( - ctx: Context, - redis: Redis, - *routers: RedisPubsubRouter, -) -> None: - main_handler = RedisPubsubRouter() - - for router in routers: - main_handler.merge(router) - - listen_router(ctx, redis, main_handler) - - -class RedisPubsubRouter: - """A router for Redis subscriptions.""" - - def __init__(self) -> None: - # NOTE: Redis pubsub channels are bytes, not strings. - self._routes: dict[bytes, RedisHandler] = {} - - def register( - self, - channel: str, - ) -> Callable[[RedisHandler], RedisHandler]: - def decorator(handler: RedisHandler) -> RedisHandler: - self._routes[channel.encode()] = handler - return handler - - return decorator - - def merge(self, other: RedisPubsubRouter) -> None: - for channel, handler in other.route_map().items(): - if channel in self._routes: - logger.warning( - "Overwritten route when merging Redis routers!", - extra={ - "channel": channel.decode(), - }, - ) - self._routes[channel] = handler - - def route_map(self) -> dict[bytes, RedisHandler]: - return self._routes diff --git a/rgdps/usecases/save_data.py b/rgdps/services/save_data.py similarity index 100% rename from rgdps/usecases/save_data.py rename to rgdps/services/save_data.py diff --git a/rgdps/usecases/songs.py b/rgdps/services/songs.py similarity index 100% rename from rgdps/usecases/songs.py rename to rgdps/services/songs.py diff --git a/rgdps/usecases/user_comments.py b/rgdps/services/user_comments.py similarity index 100% rename from rgdps/usecases/user_comments.py rename to rgdps/services/user_comments.py diff --git a/rgdps/usecases/user_credentials.py b/rgdps/services/user_credentials.py similarity index 100% rename from rgdps/usecases/user_credentials.py rename to rgdps/services/user_credentials.py diff --git a/rgdps/usecases/user_relationships.py b/rgdps/services/user_relationships.py similarity index 100% rename from rgdps/usecases/user_relationships.py rename to rgdps/services/user_relationships.py diff --git a/rgdps/usecases/users.py b/rgdps/services/users.py similarity index 100% rename from rgdps/usecases/users.py rename to rgdps/services/users.py diff --git a/rgdps/settings.py b/rgdps/settings.py index 5d2b384..dc3ed17 100644 --- a/rgdps/settings.py +++ b/rgdps/settings.py @@ -1,14 +1,13 @@ from __future__ import annotations import os -from typing import Any from dotenv import load_dotenv load_dotenv() -def read_comma_separated_list(value: str) -> list[Any]: +def read_comma_separated_list(value: str) -> list[str]: return [x.strip() for x in value.split(",")] @@ -46,7 +45,6 @@ def read_boolean(value: str) -> bool: SERVER_NAME = os.environ["SERVER_NAME"] SERVER_COMMAND_PREFIX = os.environ["SERVER_COMMAND_PREFIX"] SERVER_GD_URL = os.environ["SERVER_GD_URL"] -SERVER_STATELESS = read_boolean(os.environ["SERVER_STATELESS"]) LOG_LEVEL = os.environ["LOG_LEVEL"] diff --git a/rgdps/usecases/__init__.py b/rgdps/usecases/__init__.py deleted file mode 100644 index 60d0c58..0000000 --- a/rgdps/usecases/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -from __future__ import annotations - -from . import daily_chests -from . import friend_requests -from . import leaderboards -from . import level_comments -from . import levels -from . import likes -from . import messages -from . import save_data -from . import songs -from . import user_comments -from . import user_credentials -from . import user_relationships -from . import users diff --git a/rgdps/utilities/cache/__init__.py b/rgdps/utilities/cache/__init__.py new file mode 100644 index 0000000..9824e8e --- /dev/null +++ b/rgdps/utilities/cache/__init__.py @@ -0,0 +1,8 @@ +from __future__ import annotations + +from .base import AbstractAsyncCache +from .base import AbstractCache +from .memory import LRUAsyncMemoryCache +from .memory import LRUMemoryCache +from .memory import SimpleMemoryCache +from .redis import SimpleRedisCache diff --git a/rgdps/common/cache/base.py b/rgdps/utilities/cache/base.py similarity index 100% rename from rgdps/common/cache/base.py rename to rgdps/utilities/cache/base.py diff --git a/rgdps/common/cache/memory.py b/rgdps/utilities/cache/memory.py similarity index 100% rename from rgdps/common/cache/memory.py rename to rgdps/utilities/cache/memory.py diff --git a/rgdps/common/cache/redis.py b/rgdps/utilities/cache/redis.py similarity index 100% rename from rgdps/common/cache/redis.py rename to rgdps/utilities/cache/redis.py diff --git a/rgdps/utilities/colour.py b/rgdps/utilities/colour.py new file mode 100644 index 0000000..48b0845 --- /dev/null +++ b/rgdps/utilities/colour.py @@ -0,0 +1,76 @@ +from __future__ import annotations + +from typing import Any + + +class Colour: + """An immutable representation of a colour using the RGB specturm.""" + + def __init__( + self, + red: int, + green: int, + blue: int, + ) -> None: + self.red = clamp_rgb(red) + self.green = clamp_rgb(green) + self.blue = clamp_rgb(blue) + + # Immutability + def __setattr__(self, name: str, value: Any) -> None: + raise TypeError("`Colour` is immutable.") + + def as_hex(self) -> str: + return "#{:02x}{:02x}{:02x}".format( + clamp_rgb(self.red), + clamp_rgb(self.green), + clamp_rgb(self.blue), + ) + + def as_format_str(self) -> str: + return f"{self.red},{self.green},{self.blue}" + + @staticmethod + def from_format_string(format_string: str) -> Colour: + format_string = format_string.replace(", ", ",").strip() + colour_components = format_string.split(",") + + if len(colour_components) != 3: + raise ValueError( + f"RGB colour string requires 3 values. Got {len(colour_components)}.", + ) + + return Colour( + red=int(colour_components[0]), + green=int(colour_components[1]), + blue=int(colour_components[2]), + ) + + @staticmethod + def default() -> Colour: + return Colour(255, 255, 255) + + # Pydantic Logic + @classmethod + def __get_validators__(cls): + yield cls.validate + + @classmethod + def validate(cls, value): + if isinstance(value, cls): + return value + if isinstance(value, str): + return cls.from_format_string(value) + + raise ValueError(f"Invalid value for Colour: {value}") + + @classmethod + def __modify_schema__(cls, field_schema: dict[str, Any]): + field_schema.update( + type="string", + example="255,0,0", + ) + + +def clamp_rgb(value: int) -> int: + return max(0, min(value, 255)) diff --git a/rgdps/utilities/cryptography.py b/rgdps/utilities/cryptography.py new file mode 100644 index 0000000..f7776ca --- /dev/null +++ b/rgdps/utilities/cryptography.py @@ -0,0 +1,48 @@ +from __future__ import annotations + +import asyncio +import base64 +import hashlib +import random +import string + +import bcrypt + + +def _compare_bcrypt(hashed: str, plain: str) -> bool: + return bcrypt.checkpw(plain.encode(), hashed.encode()) + + +def hash_bcrypt(plain: str) -> str: + return bcrypt.hashpw(plain.encode(), bcrypt.gensalt()).decode() + + +async def compare_bcrypt(hashed: str, plain: str) -> bool: + return await asyncio.to_thread(_compare_bcrypt, hashed, plain) + + +async def hash_bcrypt_async(plain: str) -> str: + return await asyncio.to_thread(hash_bcrypt, plain) + + +def hash_md5(plain: str) -> str: + return hashlib.md5(plain.encode()).hexdigest() + + +def hash_sha1(plain: str) -> str: + return hashlib.sha1(plain.encode()).hexdigest() + + +def encode_base64(data: str) -> str: + return base64.urlsafe_b64encode(data.encode()).decode() + + +def decode_base64(data: str) -> str: + return base64.urlsafe_b64decode(data.encode()).decode() + + +CHARSET = string.ascii_letters + string.digits + + +def random_string(length: int) -> str: + return "".join(random.choice(CHARSET) for _ in range(length)) diff --git a/rgdps/utilities/enum.py b/rgdps/utilities/enum.py new file mode 100644 index 0000000..88eab0a --- /dev/null +++ b/rgdps/utilities/enum.py @@ -0,0 +1,13 @@ +from __future__ import annotations + +from enum import Enum + +from rgdps.utilities.typing import HasIntValue + + +class StrEnum(str, Enum): + pass + + +def list_enum_values(l: list[HasIntValue]) -> list[int]: + return [x.value for x in l] diff --git a/rgdps/common/time.py b/rgdps/utilities/time.py similarity index 100% rename from rgdps/common/time.py rename to rgdps/utilities/time.py diff --git a/rgdps/common/typing.py b/rgdps/utilities/typing.py similarity index 100% rename from rgdps/common/typing.py rename to rgdps/utilities/typing.py diff --git a/scripts/await_service.sh b/scripts/await_service.sh deleted file mode 100755 index 4662285..0000000 --- a/scripts/await_service.sh +++ /dev/null @@ -1,30 +0,0 @@ -#!/usr/bin/env bash -set -uo pipefail - -await_service() -{ - local start_ts=$(date +%s) - while [ $(date +%s) -lt $((start_ts + $3)) ]; - do - (echo -n > /dev/tcp/$1/$2) > /dev/null - if [[ $? -eq 0 ]]; then - break - fi - sleep 1 - done - local end_ts=$(date +%s) - - if [ $(date +%s) -ge $((start_ts + $3)) ]; then - echo "Timeout occurred while waiting for $1:$2 to become available" - exit 1 - fi - - echo "$1:$2 is available after $((end_ts - start_ts)) seconds" -} - -if [[ $# -ne 3 ]]; then - echo "Usage: $0 " - exit 1 -fi - -await_service $1 $2 $3 diff --git a/scripts/bootstrap.sh b/scripts/bootstrap.sh index a2573b2..f0ab9fe 100755 --- a/scripts/bootstrap.sh +++ b/scripts/bootstrap.sh @@ -6,14 +6,6 @@ if [ -z "$APP_COMPONENT" ]; then exit 1 fi -echo "Waiting for services to become available..." - -SERVICE_READINESS_TIMEOUT=60 -./scripts/await_service.sh $SQL_HOST $SQL_PORT $SERVICE_READINESS_TIMEOUT -./scripts/await_service.sh $REDIS_HOST $REDIS_PORT $SERVICE_READINESS_TIMEOUT -./scripts/await_service.sh $MEILI_HOST $MEILI_PORT $SERVICE_READINESS_TIMEOUT - - ./scripts/ensure_sql.sh ./scripts/ensure_meili.sh ./scripts/migrate.sh up