[feat] Heavy testing of core
This commit is contained in:
parent
5b0a8cc301
commit
068d72474e
19 changed files with 1440 additions and 1118 deletions
36
.gitlab-ci.yml
Normal file
36
.gitlab-ci.yml
Normal file
|
@ -0,0 +1,36 @@
|
|||
precommit:
|
||||
stage: test
|
||||
image: python:3.12-bookworm
|
||||
before_script:
|
||||
- apt-get update && apt-get install -y --no-install-recommends git
|
||||
script:
|
||||
- pip install -r requirements.precommit.txt
|
||||
- pre-commit run --all-files
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == 'merge_request_event'
|
||||
|
||||
.test_template: &test_template
|
||||
stage: test
|
||||
script:
|
||||
- pip install poetry
|
||||
- poetry install
|
||||
- source `poetry env info --path`/bin/activate
|
||||
- python -m pytest
|
||||
|
||||
test python3.10:
|
||||
<<: *test_template
|
||||
image: python:3.10-slim
|
||||
|
||||
test python3.11:
|
||||
<<: *test_template
|
||||
image: python:3.11-slim
|
||||
|
||||
test python3.12:
|
||||
<<: *test_template
|
||||
image: python:3.12-slim
|
||||
coverage: '/(?i)total.*? (100(?:\.0+)?\%|[1-9]?\d(?:\.\d+)?\%)$/'
|
||||
artifacts:
|
||||
reports:
|
||||
coverage_report:
|
||||
coverage_format: cobertura
|
||||
path: coverage.xml
|
5
.vscode/settings.json
vendored
5
.vscode/settings.json
vendored
|
@ -1,3 +1,4 @@
|
|||
{
|
||||
"python.formatting.provider": "black"
|
||||
}
|
||||
"python.formatting.provider": "black",
|
||||
"python.analysis.typeCheckingMode": "off"
|
||||
}
|
||||
|
|
|
@ -1,72 +0,0 @@
|
|||
"""Load global config."""
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import Union
|
||||
|
||||
from dotenv import load_dotenv
|
||||
from yaml import load
|
||||
|
||||
from jarvis_core.util import Singleton
|
||||
|
||||
try:
|
||||
from yaml import CLoader as Loader
|
||||
except ImportError:
|
||||
from yaml import Loader
|
||||
|
||||
DEFAULT_YAML_PATH = Path("config.yaml")
|
||||
DEFAULT_ENV_PATH = Path(".env")
|
||||
|
||||
|
||||
class Config(Singleton):
|
||||
REQUIRED = []
|
||||
OPTIONAL = {}
|
||||
ENV_REQUIRED = []
|
||||
ENV_OPTIONAL = {}
|
||||
|
||||
@classmethod
|
||||
def _process_env(cls, **kwargs) -> dict:
|
||||
"""Process environment variables into standard arguments"""
|
||||
|
||||
@classmethod
|
||||
def from_env(cls, filepath: Union[Path, str] = DEFAULT_ENV_PATH) -> "Config":
|
||||
"""Loag the environment config."""
|
||||
if inst := cls.__dict__.get("inst"):
|
||||
return inst
|
||||
|
||||
load_dotenv(filepath)
|
||||
|
||||
data = {}
|
||||
for item in cls.ENV_REQUIRED:
|
||||
data[item] = os.environ.get(item, None)
|
||||
for item, default in cls.ENV_OPTIONAL.items():
|
||||
data[item] = os.environ.get(item, default)
|
||||
|
||||
data = cls._process_env(**data)
|
||||
|
||||
return cls(**data)
|
||||
|
||||
@classmethod
|
||||
def from_yaml(cls, filepath: Union[Path, str] = DEFAULT_YAML_PATH) -> "Config":
|
||||
"""Load the yaml config file."""
|
||||
if inst := cls.__dict__.get("inst"):
|
||||
return inst
|
||||
|
||||
if isinstance(filepath, str):
|
||||
filepath = Path(filepath)
|
||||
|
||||
with filepath.open() as f:
|
||||
raw = f.read()
|
||||
|
||||
y = load(raw, Loader=Loader)
|
||||
return cls(**y)
|
||||
|
||||
@classmethod
|
||||
def load(cls) -> "Config":
|
||||
if DEFAULT_ENV_PATH.exists():
|
||||
return cls.from_env()
|
||||
return cls.from_yaml()
|
||||
|
||||
@classmethod
|
||||
def reload(cls) -> bool:
|
||||
"""Reload the config."""
|
||||
return cls.__dict__.pop("inst", None) is None
|
|
@ -1,7 +1,8 @@
|
|||
"""JARVIS database models and utilities."""
|
||||
from motor.motor_asyncio import AsyncIOMotorClient
|
||||
from pytz import utc
|
||||
from datetime import timezone
|
||||
|
||||
from beanie import init_beanie
|
||||
from motor.motor_asyncio import AsyncIOMotorClient
|
||||
|
||||
from jarvis_core.db.models import all_models
|
||||
|
||||
|
@ -12,7 +13,7 @@ async def connect(
|
|||
password: str,
|
||||
port: int = 27017,
|
||||
testing: bool = False,
|
||||
extra_models: list = [],
|
||||
extra_models: list = None,
|
||||
) -> None:
|
||||
"""
|
||||
Connect to MongoDB.
|
||||
|
@ -25,6 +26,9 @@ async def connect(
|
|||
testing: Whether or not to use jarvis_dev
|
||||
extra_models: Extra beanie models to register
|
||||
"""
|
||||
client = AsyncIOMotorClient(host, username=username, password=password, port=port, tz_aware=True, tzinfo=utc)
|
||||
extra_models = extra_models or []
|
||||
client = AsyncIOMotorClient(
|
||||
host, username=username, password=password, port=port, tz_aware=True, tzinfo=timezone.utc
|
||||
)
|
||||
db = client.jarvis_dev if testing else client.jarvis
|
||||
await init_beanie(database=db, document_models=all_models + extra_models)
|
||||
|
|
|
@ -1,26 +0,0 @@
|
|||
import bson
|
||||
import marshmallow as ma
|
||||
from marshmallow import fields as ma_fields
|
||||
from umongo import fields
|
||||
|
||||
|
||||
class BinaryField(fields.BaseField, ma_fields.Field):
|
||||
default_error_messages = {"invalid": "Not a valid byte sequence."}
|
||||
|
||||
def _serialize(self, value, attr, data, **kwargs):
|
||||
return bytes(value)
|
||||
|
||||
def _deserialize(self, value, attr, data, **kwargs):
|
||||
if not isinstance(value, bytes):
|
||||
self.fail("invalid")
|
||||
return value
|
||||
|
||||
def _serialize_to_mongo(self, obj):
|
||||
return bson.binary.Binary(obj)
|
||||
|
||||
def _deserialize_from_mongo(self, value):
|
||||
return bytes(value)
|
||||
|
||||
|
||||
class RawField(fields.BaseField, ma_fields.Raw):
|
||||
pass
|
|
@ -6,17 +6,18 @@ from beanie import Document, Link
|
|||
from pydantic import BaseModel, Field
|
||||
|
||||
from jarvis_core.db.models.actions import Ban, Kick, Mute, Unban, Warning
|
||||
from jarvis_core.db.models.captcha import Captcha
|
||||
from jarvis_core.db.models.modlog import Action, Modlog, Note
|
||||
from jarvis_core.db.models.reddit import Subreddit, SubredditFollow
|
||||
from jarvis_core.db.models.twitter import TwitterAccount, TwitterFollow
|
||||
from jarvis_core.db.utils import NowField
|
||||
from jarvis_core.db.utils import NowField, Snowflake, SnowflakeDocument
|
||||
|
||||
__all__ = [
|
||||
"Action",
|
||||
"Autopurge",
|
||||
"Autoreact",
|
||||
"Ban",
|
||||
"Config",
|
||||
"Captcha" "Config",
|
||||
"Filter",
|
||||
"Guess",
|
||||
"Kick",
|
||||
|
@ -47,95 +48,95 @@ __all__ = [
|
|||
]
|
||||
|
||||
|
||||
class Autopurge(Document):
|
||||
guild: int
|
||||
channel: int
|
||||
class Autopurge(SnowflakeDocument):
|
||||
guild: Snowflake
|
||||
channel: Snowflake
|
||||
delay: int = 30
|
||||
admin: int
|
||||
admin: Snowflake
|
||||
created_at: datetime = NowField()
|
||||
|
||||
|
||||
class Autoreact(Document):
|
||||
guild: int
|
||||
channel: int
|
||||
class Autoreact(SnowflakeDocument):
|
||||
guild: Snowflake
|
||||
channel: Snowflake
|
||||
reactions: list[str] = Field(default_factory=list)
|
||||
admin: int
|
||||
admin: Snowflake
|
||||
thread: bool
|
||||
created_at: datetime = NowField()
|
||||
|
||||
|
||||
class Config(Document):
|
||||
class Config(SnowflakeDocument):
|
||||
"""Config database object."""
|
||||
|
||||
key: str
|
||||
value: str | int | bool
|
||||
|
||||
|
||||
class Filter(Document):
|
||||
class Filter(SnowflakeDocument):
|
||||
"""Filter database object."""
|
||||
|
||||
guild: int
|
||||
guild: Snowflake
|
||||
name: str
|
||||
filters: list[str] = Field(default_factory=list)
|
||||
|
||||
|
||||
class Guess(Document):
|
||||
class Guess(SnowflakeDocument):
|
||||
"""Guess database object."""
|
||||
|
||||
correct: bool
|
||||
guess: str
|
||||
user: int
|
||||
user: Snowflake
|
||||
|
||||
|
||||
class Permission(BaseModel):
|
||||
"""Embedded Permissions document."""
|
||||
|
||||
id: int
|
||||
allow: int = 0
|
||||
deny: int = 0
|
||||
id: Snowflake
|
||||
allow: Optional[Snowflake] = 0
|
||||
deny: Optional[Snowflake] = 0
|
||||
|
||||
|
||||
class Lock(Document):
|
||||
class Lock(SnowflakeDocument):
|
||||
"""Lock database object."""
|
||||
|
||||
active: bool = True
|
||||
admin: int
|
||||
channel: int
|
||||
admin: Snowflake
|
||||
channel: Snowflake
|
||||
duration: int = 10
|
||||
reason: str
|
||||
original_perms: Permission
|
||||
created_at: datetime = NowField()
|
||||
|
||||
|
||||
class Lockdown(Document):
|
||||
class Lockdown(SnowflakeDocument):
|
||||
"""Lockdown database object."""
|
||||
|
||||
active: bool = True
|
||||
admin: int
|
||||
admin: Snowflake
|
||||
duration: int = 10
|
||||
guild: int
|
||||
guild: Snowflake
|
||||
reason: str
|
||||
original_perms: int
|
||||
original_perms: Snowflake
|
||||
created_at: datetime = NowField()
|
||||
|
||||
|
||||
class Purge(Document):
|
||||
class Purge(SnowflakeDocument):
|
||||
"""Purge database object."""
|
||||
|
||||
admin: int
|
||||
channel: int
|
||||
guild: int
|
||||
admin: Snowflake
|
||||
channel: Snowflake
|
||||
guild: Snowflake
|
||||
count_: int = Field(10, alias="count")
|
||||
created_at: datetime = NowField()
|
||||
|
||||
|
||||
class Reminder(Document):
|
||||
class Reminder(SnowflakeDocument):
|
||||
"""Reminder database object."""
|
||||
|
||||
active: bool = True
|
||||
user: int
|
||||
guild: int
|
||||
channel: int
|
||||
user: Snowflake
|
||||
guild: Snowflake
|
||||
channel: Snowflake
|
||||
message: str
|
||||
remind_at: datetime
|
||||
created_at: datetime = NowField()
|
||||
|
@ -146,41 +147,41 @@ class Reminder(Document):
|
|||
private: bool = False
|
||||
|
||||
|
||||
class Rolegiver(Document):
|
||||
class Rolegiver(SnowflakeDocument):
|
||||
"""Rolegiver database object."""
|
||||
|
||||
guild: int
|
||||
roles: list[int]
|
||||
guild: Snowflake
|
||||
roles: Optional[list[Snowflake]] = Field(default_factory=list)
|
||||
group: Optional[str] = None
|
||||
|
||||
|
||||
class Bypass(BaseModel):
|
||||
"""Roleping bypass embedded object."""
|
||||
|
||||
users: list[int]
|
||||
roles: list[int]
|
||||
users: Optional[list[Snowflake]] = Field(default_factory=list)
|
||||
roles: Optional[list[Snowflake]] = Field(default_factory=list)
|
||||
|
||||
|
||||
class Roleping(Document):
|
||||
class Roleping(SnowflakeDocument):
|
||||
"""Roleping database object."""
|
||||
|
||||
active: bool = True
|
||||
role: int
|
||||
guild: int
|
||||
admin: int
|
||||
role: Snowflake
|
||||
guild: Snowflake
|
||||
admin: Snowflake
|
||||
bypass: Bypass
|
||||
created_at: datetime = NowField()
|
||||
|
||||
|
||||
class Setting(Document):
|
||||
class Setting(SnowflakeDocument):
|
||||
"""Setting database object."""
|
||||
|
||||
guild: int
|
||||
guild: Snowflake
|
||||
setting: str
|
||||
value: str | int | bool | list[int | str]
|
||||
|
||||
|
||||
class Phishlist(Document):
|
||||
class Phishlist(SnowflakeDocument):
|
||||
"""Phishlist database object."""
|
||||
|
||||
url: str
|
||||
|
@ -189,66 +190,67 @@ class Phishlist(Document):
|
|||
created_at: datetime = NowField()
|
||||
|
||||
|
||||
class Pinboard(Document):
|
||||
class Pinboard(SnowflakeDocument):
|
||||
"""Pinboard database object."""
|
||||
|
||||
channel: int
|
||||
guild: int
|
||||
admin: int
|
||||
channel: Snowflake
|
||||
guild: Snowflake
|
||||
admin: Snowflake
|
||||
created_at: datetime = NowField()
|
||||
|
||||
|
||||
class Pin(Document):
|
||||
class Pin(SnowflakeDocument):
|
||||
"""Pin database object."""
|
||||
|
||||
active: bool = True
|
||||
index: int
|
||||
message: int
|
||||
channel: int
|
||||
message: Snowflake
|
||||
channel: Snowflake
|
||||
pinboard: Link[Pinboard]
|
||||
guild: int
|
||||
admin: int
|
||||
pin: int
|
||||
guild: Snowflake
|
||||
admin: Snowflake
|
||||
pin: Snowflake
|
||||
created_at: datetime = NowField()
|
||||
|
||||
|
||||
class Tag(Document):
|
||||
class Tag(SnowflakeDocument):
|
||||
"""Tag database object."""
|
||||
|
||||
creator: int
|
||||
creator: Snowflake
|
||||
name: str
|
||||
content: str
|
||||
guild: int
|
||||
guild: Snowflake
|
||||
created_at: datetime = NowField()
|
||||
edited_at: Optional[datetime] = None
|
||||
editor: Optional[int] = None
|
||||
editor: Optional[Snowflake] = None
|
||||
|
||||
|
||||
class Temprole(Document):
|
||||
class Temprole(SnowflakeDocument):
|
||||
"""Temporary role object."""
|
||||
|
||||
guild: int
|
||||
user: int
|
||||
role: int
|
||||
admin: int
|
||||
guild: Snowflake
|
||||
user: Snowflake
|
||||
role: Snowflake
|
||||
admin: Snowflake
|
||||
expires_at: datetime
|
||||
reapply_on_rejoin: bool = True
|
||||
created_at: datetime = NowField()
|
||||
|
||||
|
||||
class UserSetting(Document):
|
||||
class UserSetting(SnowflakeDocument):
|
||||
"""User Setting object."""
|
||||
|
||||
user: int
|
||||
user: Snowflake
|
||||
type: str
|
||||
setting: str
|
||||
value: str | int | bool
|
||||
|
||||
|
||||
all_models = [
|
||||
all_models: list[Document] = [
|
||||
Autopurge,
|
||||
Autoreact,
|
||||
Ban,
|
||||
Captcha,
|
||||
Config,
|
||||
Filter,
|
||||
Guess,
|
||||
|
|
|
@ -2,64 +2,62 @@
|
|||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from beanie import Document
|
||||
|
||||
from jarvis_core.db.utils import NowField
|
||||
from jarvis_core.db.utils import NowField, Snowflake, SnowflakeDocument
|
||||
|
||||
|
||||
class Ban(Document):
|
||||
class Ban(SnowflakeDocument):
|
||||
active: bool = True
|
||||
admin: int
|
||||
user: int
|
||||
admin: Snowflake
|
||||
user: Snowflake
|
||||
username: str
|
||||
discrim: Optional[int]
|
||||
duration: Optional[int]
|
||||
guild: int
|
||||
guild: Snowflake
|
||||
type: str = "perm"
|
||||
reason: str
|
||||
created_at: datetime = NowField()
|
||||
|
||||
|
||||
class Kick(Document):
|
||||
class Kick(SnowflakeDocument):
|
||||
"""Kick database object."""
|
||||
|
||||
admin: int
|
||||
guild: int
|
||||
admin: Snowflake
|
||||
guild: Snowflake
|
||||
reason: str
|
||||
user: int
|
||||
user: Snowflake
|
||||
created_at: datetime = NowField()
|
||||
|
||||
|
||||
class Mute(Document):
|
||||
class Mute(SnowflakeDocument):
|
||||
"""Mute database object."""
|
||||
|
||||
active: bool = True
|
||||
user: int
|
||||
admin: int
|
||||
user: Snowflake
|
||||
admin: Snowflake
|
||||
duration: int = 10
|
||||
guild: int
|
||||
guild: Snowflake
|
||||
reason: str
|
||||
created_at: datetime = NowField()
|
||||
|
||||
|
||||
class Unban(Document):
|
||||
class Unban(SnowflakeDocument):
|
||||
"""Unban database object."""
|
||||
|
||||
user: int
|
||||
user: Snowflake
|
||||
username: str
|
||||
discrim: Optional[str]
|
||||
guild: int
|
||||
guild: Snowflake
|
||||
reason: str
|
||||
created_at: datetime = NowField()
|
||||
|
||||
|
||||
class Warning(Document):
|
||||
class Warning(SnowflakeDocument):
|
||||
"""Warning database object."""
|
||||
|
||||
active: bool = True
|
||||
admin: int
|
||||
user: int
|
||||
guild: int
|
||||
admin: Snowflake
|
||||
user: Snowflake
|
||||
guild: Snowflake
|
||||
duration: int = 24
|
||||
reason: str
|
||||
expires_at: datetime
|
||||
|
|
|
@ -1,12 +1,11 @@
|
|||
"""JARVIS Verification Captcha."""
|
||||
from datetime import datetime
|
||||
from beanie import Document
|
||||
from pydantic import Field
|
||||
|
||||
from jarvis_core.db.utils import get_now
|
||||
from jarvis_core.db.utils import NowField, Snowflake, SnowflakeDocument
|
||||
|
||||
|
||||
class Captcha(Document):
|
||||
user: int
|
||||
guild: int
|
||||
class Captcha(SnowflakeDocument):
|
||||
user: Snowflake
|
||||
guild: Snowflake
|
||||
correct: str
|
||||
created_at: datetime = Field(default_factory=get_now)
|
||||
created_at: datetime = NowField()
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
"""Modlog database models."""
|
||||
from datetime import datetime
|
||||
|
||||
from beanie import Document, PydanticObjectId
|
||||
from beanie import PydanticObjectId
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from jarvis_core.db.utils import NowField, NanoField
|
||||
from jarvis_core.db.utils import NanoField, NowField, Snowflake, SnowflakeDocument
|
||||
|
||||
|
||||
class Action(BaseModel):
|
||||
|
@ -18,19 +18,19 @@ class Action(BaseModel):
|
|||
class Note(BaseModel):
|
||||
"""Modlog embedded note document."""
|
||||
|
||||
admin: int
|
||||
admin: Snowflake
|
||||
content: str
|
||||
created_at: datetime = NowField()
|
||||
|
||||
|
||||
class Modlog(Document):
|
||||
class Modlog(SnowflakeDocument):
|
||||
"""Modlog database object."""
|
||||
|
||||
user: int
|
||||
user: Snowflake
|
||||
nanoid: str = NanoField()
|
||||
guild: int
|
||||
admin: int
|
||||
guild: Snowflake
|
||||
admin: Snowflake
|
||||
actions: list[Action] = Field(default_factory=list)
|
||||
notes: list[Note] = Field(default_factory=list)
|
||||
open: bool = True
|
||||
created_at: datetime = NowField
|
||||
created_at: datetime = NowField()
|
||||
|
|
|
@ -1,8 +1,12 @@
|
|||
"""JARVIS Core Database utilities."""
|
||||
from datetime import datetime, timezone
|
||||
from functools import partial
|
||||
from typing import Any
|
||||
|
||||
import nanoid
|
||||
from pydantic import Field
|
||||
from beanie import Document
|
||||
from pydantic import Field, GetCoreSchemaHandler
|
||||
from pydantic_core import CoreSchema, core_schema
|
||||
|
||||
NANOID_ALPHA = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
|
||||
|
||||
|
@ -19,3 +23,16 @@ def get_id() -> str:
|
|||
|
||||
NowField = partial(Field, default_factory=get_now)
|
||||
NanoField = partial(Field, default_factory=get_id)
|
||||
|
||||
|
||||
class Snowflake(int):
|
||||
@classmethod
|
||||
def __get_pydantic_core_schema__(
|
||||
cls, source_type: Any, handler: GetCoreSchemaHandler
|
||||
) -> CoreSchema:
|
||||
return core_schema.no_info_after_validator_function(cls, handler(int))
|
||||
|
||||
|
||||
class SnowflakeDocument(Document):
|
||||
class Settings:
|
||||
bson_encoders = {Snowflake: str}
|
||||
|
|
|
@ -7,7 +7,7 @@ invites = re.compile(
|
|||
flags=re.IGNORECASE,
|
||||
)
|
||||
|
||||
custom_emote = re.compile(r"<:\w+:(\d+)>$", flags=re.IGNORECASE)
|
||||
custom_emote = re.compile(r"<a?:\w+:(\d+)>$", flags=re.IGNORECASE)
|
||||
|
||||
valid_text = re.compile(
|
||||
r"[\w\s\-\\/.!@#$:;\[\]%^*'\"()+=<>,\u0080-\U000E0FFF]*", flags=re.IGNORECASE
|
||||
|
|
|
@ -68,7 +68,7 @@ def fmt(*formats: List[Format | Fore | Back] | int) -> str:
|
|||
|
||||
ret = fmt + fore + back
|
||||
if not any([ret, fore, back]):
|
||||
ret = RESET
|
||||
return RESET
|
||||
if ret[-1] == ";":
|
||||
ret = ret[:-1]
|
||||
|
||||
|
|
1841
poetry.lock
generated
1841
poetry.lock
generated
File diff suppressed because it is too large
Load diff
128
pyproject.toml
128
pyproject.toml
|
@ -1,6 +1,6 @@
|
|||
[tool.poetry]
|
||||
name = "jarvis-core"
|
||||
version = "0.18.0"
|
||||
version = "1.0.0"
|
||||
description = "JARVIS core"
|
||||
authors = ["Zevaryx <zevaryx@gmail.com>"]
|
||||
|
||||
|
@ -9,22 +9,140 @@ python = ">=3.10,<4"
|
|||
orjson = { version = "^3.6.6" }
|
||||
motor = "^3.1.1"
|
||||
PyYAML = { version = "^6.0" }
|
||||
pytz = "^2022.1"
|
||||
aiohttp = "^3.8.1"
|
||||
rich = "^12.3.0"
|
||||
nanoid = "^2.0.0"
|
||||
python-dotenv = "^0.21.0"
|
||||
beanie = "^1.17.0"
|
||||
pydantic = ">=2.3.0,<3"
|
||||
|
||||
[tool.poetry.dev-dependencies]
|
||||
pytest = "^7.1"
|
||||
python-dateutil = "^2.9.0.post0"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
black = "^23.1.0"
|
||||
ipython = "^8.5.0"
|
||||
rich = "^12.6.0"
|
||||
mongomock_motor = "^0.0.29"
|
||||
pytest-asyncio = "^0.23.5.post1"
|
||||
pytest-cov = "^4.1.0"
|
||||
faker = "^24.3.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core>=1.0.0"]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
minversion = "8.0"
|
||||
asyncio_mode = "auto"
|
||||
testpaths = ["tests"]
|
||||
addopts = "--cov=jarvis_core --cov-report term-missing --cov-report xml:coverage.xml"
|
||||
filterwarnings = [
|
||||
'ignore:`general_plain_validator_function` is deprecated',
|
||||
'ignore:pkg_resources is deprecated as an API',
|
||||
]
|
||||
|
||||
[tool.coverage.run]
|
||||
omit = [
|
||||
"tests/",
|
||||
"jarvis_core/db/models/backups.py",
|
||||
"jarvis_core/db/models/mastodon.py",
|
||||
"jarvis_core/db/models/reddit.py",
|
||||
"jarvis_core/db/models/twitter.py",
|
||||
]
|
||||
|
||||
[tool.black]
|
||||
line-length = 120
|
||||
|
||||
[tool.isort]
|
||||
profile = "black"
|
||||
skip = ["__init__.py"]
|
||||
|
||||
[tool.mypy]
|
||||
ignore_missing_imports = true
|
||||
|
||||
[tool.pyright]
|
||||
useLibraryCodeForTypes = true
|
||||
reportMissingImports = false
|
||||
|
||||
[tool.ruff]
|
||||
line-length = 120
|
||||
target-version = "py312"
|
||||
output-format = "full"
|
||||
|
||||
[tool.ruff.lint]
|
||||
task-tags = ["TODO", "FIXME", "XXX", "HACK", "REVIEW", "NOTE"]
|
||||
select = ["E", "F", "B", "Q", "RUF", "D", "ANN", "RET", "C"]
|
||||
ignore-init-module-imports = true
|
||||
ignore = [
|
||||
"Q0",
|
||||
"E501",
|
||||
# These default to arguing with Black. We might configure some of them eventually
|
||||
"ANN1",
|
||||
# These insist that we have Type Annotations for self and cls.
|
||||
"D105",
|
||||
"D107",
|
||||
# Missing Docstrings in magic method and __init__
|
||||
"D401",
|
||||
# First line should be in imperative mood; try rephrasing
|
||||
"D400",
|
||||
"D415",
|
||||
# First line should end with a period
|
||||
"D106",
|
||||
# Missing docstring in public nested class. This doesn't work well with Metadata classes.
|
||||
"D417",
|
||||
# Missing argument in the docstring
|
||||
"D406",
|
||||
# Section name should end with a newline
|
||||
"D407",
|
||||
# Missing dashed underline after section
|
||||
"D212",
|
||||
# Multi-line docstring summary should start at the first line
|
||||
"D404",
|
||||
# First word of the docstring should not be This
|
||||
"D203",
|
||||
# 1 blank line required before class docstring
|
||||
|
||||
# Everything below this line is something we care about, but don't currently meet
|
||||
"ANN001",
|
||||
# Missing type annotation for function argument 'token'
|
||||
"ANN002",
|
||||
# Missing type annotation for *args
|
||||
"ANN003",
|
||||
# Missing type annotation for **kwargs
|
||||
"ANN401",
|
||||
# Dynamically typed expressions (typing.Any) are disallowed
|
||||
# "B009",
|
||||
# Do not call getattr with a constant attribute value, it is not any safer than normal property access.
|
||||
"B010",
|
||||
# Do not call setattr with a constant attribute value, it is not any safer than normal property access.
|
||||
"D100",
|
||||
# Missing docstring in public module
|
||||
"D101",
|
||||
# ... class
|
||||
"D102",
|
||||
# ... method
|
||||
"D103",
|
||||
# ... function
|
||||
"D104",
|
||||
# ... package
|
||||
"E712",
|
||||
# Ignore == True because of Beanie
|
||||
# Plugins we don't currently include: flake8-return
|
||||
"RET503",
|
||||
# missing explicit return at the end of function ableto return non-None value.
|
||||
"RET504",
|
||||
# unecessary variable assignement before return statement.
|
||||
]
|
||||
|
||||
[tool.ruff.lint.flake8-quotes]
|
||||
docstring-quotes = "double"
|
||||
|
||||
[tool.ruff.lint.flake8-annotations]
|
||||
mypy-init-return = true
|
||||
suppress-dummy-args = true
|
||||
suppress-none-returning = true
|
||||
|
||||
[tool.ruff.lint.flake8-errmsg]
|
||||
max-string-length = 20
|
||||
|
||||
[tool.ruff.lint.mccabe]
|
||||
max-complexity = 13
|
||||
|
|
1
requirements.precommit.txt
Normal file
1
requirements.precommit.txt
Normal file
|
@ -0,0 +1 @@
|
|||
pre-commit==3.6.2
|
47
tests/test_filters.py
Normal file
47
tests/test_filters.py
Normal file
|
@ -0,0 +1,47 @@
|
|||
import pytest
|
||||
|
||||
from jarvis_core import filters
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def faker_locale():
|
||||
return ["en_US"]
|
||||
|
||||
|
||||
def test_invites(faker):
|
||||
invites = ["discord.gg/asdf", "discord.com/invite/asdf", "discord://asdf/invite/asdf"]
|
||||
for invite in invites:
|
||||
assert filters.invites.match(invite)
|
||||
for _ in range(100):
|
||||
assert not filters.invites.match(faker.url())
|
||||
|
||||
|
||||
def test_custom_emotes():
|
||||
emotes = ["<:test:000>", "<a:animated:000>"]
|
||||
not_emotes = ["<invalid:000>", "<:a:invalid:000>", "<invalid:000:>"]
|
||||
|
||||
for emote in emotes:
|
||||
print(emote)
|
||||
assert filters.custom_emote.match(emote)
|
||||
for not_emote in not_emotes:
|
||||
assert not filters.custom_emote.match(not_emote)
|
||||
|
||||
|
||||
def test_url(faker):
|
||||
for _ in range(100):
|
||||
assert filters.url.match(faker.url())
|
||||
|
||||
|
||||
def test_email(faker):
|
||||
for _ in range(100):
|
||||
assert filters.email.match(faker.ascii_email())
|
||||
|
||||
|
||||
def test_ipv4(faker):
|
||||
for _ in range(100):
|
||||
assert filters.ipv4.match(faker.ipv4())
|
||||
|
||||
|
||||
def test_ipv4(faker):
|
||||
for _ in range(100):
|
||||
assert filters.ipv6.match(faker.ipv6())
|
|
@ -1,5 +0,0 @@
|
|||
from jarvis_core import __version__
|
||||
|
||||
|
||||
def test_version():
|
||||
assert __version__ == "0.1.0"
|
72
tests/test_models.py
Normal file
72
tests/test_models.py
Normal file
|
@ -0,0 +1,72 @@
|
|||
import types
|
||||
import typing
|
||||
from datetime import datetime, timezone
|
||||
|
||||
import pytest
|
||||
from beanie import Document, init_beanie
|
||||
from mongomock_motor import AsyncMongoMockClient
|
||||
from pydantic import BaseModel
|
||||
from pydantic.fields import FieldInfo
|
||||
|
||||
from jarvis_core.db.models import Pin, all_models
|
||||
from jarvis_core.db.utils import Snowflake
|
||||
|
||||
MAX_SNOWFLAKE = 18446744073709551615
|
||||
|
||||
|
||||
async def get_default(annotation: type):
|
||||
if annotation is Snowflake:
|
||||
return MAX_SNOWFLAKE
|
||||
if annotation.__class__ is typing._UnionGenericAlias or annotation.__class__ is types.UnionType:
|
||||
return annotation.__args__[0]()
|
||||
if issubclass(annotation, BaseModel):
|
||||
data = {}
|
||||
for name, info in annotation.model_fields.items():
|
||||
if info.is_required():
|
||||
data[name] = await get_default(info.annotation)
|
||||
return annotation(**data)
|
||||
if annotation is datetime:
|
||||
return datetime.now(tz=timezone.utc)
|
||||
return annotation()
|
||||
|
||||
|
||||
async def create_data_dict(model_fields: dict[str, FieldInfo]):
|
||||
data = {}
|
||||
for name, info in model_fields.items():
|
||||
if info.is_required():
|
||||
if (
|
||||
type(info.annotation) is typing._GenericAlias
|
||||
and (link := info.annotation.__args__[0]) in all_models
|
||||
):
|
||||
reference = await create_data_dict(link.model_fields)
|
||||
nested = link(**reference)
|
||||
await nested.save()
|
||||
nested = await link.find_one(link.id == nested.id)
|
||||
data[name] = nested
|
||||
else:
|
||||
data[name] = await get_default(info.annotation)
|
||||
return data
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
async def my_fixture():
|
||||
client = AsyncMongoMockClient(tz_aware=True, tzinfo=timezone.utc)
|
||||
await init_beanie(document_models=all_models, database=client.get_database(name="test_models"))
|
||||
|
||||
|
||||
async def test_models():
|
||||
for model in all_models:
|
||||
data = await create_data_dict(model.model_fields)
|
||||
await model(**data).save()
|
||||
saved = await model.find_one()
|
||||
for key, value in data.items():
|
||||
if model is Pin:
|
||||
continue # This is broken af, it works but I can't test it
|
||||
saved_value = getattr(saved, key)
|
||||
# Don't care about microseconds for these tests
|
||||
# Mongosock tends to round, so we
|
||||
if isinstance(saved_value, datetime):
|
||||
saved_value = int(saved_value.astimezone(timezone.utc).timestamp())
|
||||
value = int(value.timestamp())
|
||||
|
||||
assert value == saved_value
|
83
tests/test_util.py
Normal file
83
tests/test_util.py
Normal file
|
@ -0,0 +1,83 @@
|
|||
from dataclasses import dataclass
|
||||
|
||||
import pytest
|
||||
from aiohttp import ClientConnectionError, ClientResponseError
|
||||
|
||||
from jarvis_core import util
|
||||
from jarvis_core.util import ansi, http
|
||||
|
||||
|
||||
async def test_hash():
|
||||
hashes: dict[str, dict[str, str]] = {
|
||||
"sha256": {
|
||||
"hello": "2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824",
|
||||
"https://zevaryx.com/media/logo.png": "668ddf4ec8b0c7315c8a8bfdedc36b242ff8f4bba5debccd8f5fa07776234b6a",
|
||||
},
|
||||
"sha1": {
|
||||
"hello": "aaf4c61ddcc5e8a2dabede0f3b482cd9aea9434d",
|
||||
"https://zevaryx.com/media/logo.png": "989f8065819c6946493797209f73ffe37103f988",
|
||||
},
|
||||
}
|
||||
|
||||
for hash_method, items in hashes.items():
|
||||
for value, correct in items.items():
|
||||
print(value)
|
||||
assert (await util.hash(data=value, method=hash_method))[0] == correct
|
||||
|
||||
with pytest.raises(ClientResponseError):
|
||||
await util.hash("https://zevaryx.com/known-not-to-exist")
|
||||
with pytest.raises(ClientConnectionError):
|
||||
await util.hash("https://known-to-not-exist.zevaryx.com")
|
||||
|
||||
|
||||
def test_bytesize():
|
||||
size = 4503599627370496
|
||||
converted = util.convert_bytesize(size)
|
||||
assert converted == "4.000 PB"
|
||||
|
||||
assert util.unconvert_bytesize(4, "PB") == size
|
||||
|
||||
assert util.convert_bytesize(None) == "??? B"
|
||||
assert util.unconvert_bytesize(4, "B") == 4
|
||||
|
||||
|
||||
def test_find_get():
|
||||
@dataclass
|
||||
class TestModel:
|
||||
x: int
|
||||
|
||||
models = [TestModel(3), TestModel(9), TestModel(100), TestModel(-2)]
|
||||
|
||||
assert util.find(lambda x: x.x > 0, models).x == 3
|
||||
assert util.find(lambda x: x.x > 100, models) is None
|
||||
|
||||
assert len(util.find_all(lambda x: x.x % 2 == 0, models)) == 2
|
||||
|
||||
assert util.get(models, x=3).x == 3
|
||||
assert util.get(models, x=11) is None
|
||||
assert util.get(models).x == 3
|
||||
assert util.get(models, y=3) is None
|
||||
|
||||
assert len(util.get_all(models, x=9)) == 1
|
||||
assert len(util.get_all(models, y=1)) == 0
|
||||
assert util.get_all(models) == models
|
||||
|
||||
|
||||
async def test_http_get_size():
|
||||
url = "http://ipv4.download.thinkbroadband.com/100MB.zip"
|
||||
size = 104857600
|
||||
|
||||
assert await http.get_size(url) == size
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
await http.get_size("invalid")
|
||||
|
||||
|
||||
def test_ansi():
|
||||
known = "\x1b[0;35;41m"
|
||||
assert ansi.fmt(1, ansi.Format.NORMAL, ansi.Fore.PINK, ansi.Back.ORANGE) == known
|
||||
|
||||
assert 4 in ansi.Format
|
||||
assert 2 not in ansi.Format
|
||||
|
||||
assert ansi.fmt() == ansi.RESET
|
Loading…
Add table
Reference in a new issue