diff --git a/.github/workflows/check-pr.yaml b/.github/workflows/check-pr.yaml
index 3b6902a..919a6ca 100644
--- a/.github/workflows/check-pr.yaml
+++ b/.github/workflows/check-pr.yaml
@@ -45,3 +45,7 @@ jobs:
working-directory: backend
run: |
task test-container
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ TELEGRAM_TOKEN: ${{ secrets.TELEGRAM_TOKEN }}
+ TELEGRAM_CHAT_ID: ${{ secrets.TELEGRAM_CHAT_ID }}
diff --git a/.prettierignore b/.prettierignore
index ef8fb06..7e1eb12 100644
--- a/.prettierignore
+++ b/.prettierignore
@@ -9,6 +9,7 @@ __pycache__/
.pytest_cache/
.ruff_cache/
.coverage/
+htmlcov/
# Backend temporary files
backend/example/state
diff --git a/backend/Taskfile.yaml b/backend/Taskfile.yaml
index 46beb1a..f53a96f 100644
--- a/backend/Taskfile.yaml
+++ b/backend/Taskfile.yaml
@@ -143,7 +143,7 @@ tasks:
cmds:
- echo 'Running pytest...'
- task: _python
- vars: { COMMAND: "-m pytest" }
+ vars: { COMMAND: "-m pytest {{.CLI_ARGS}}" }
test-container:
desc: Run tests in container
diff --git a/backend/lib/github/clients/gql.py b/backend/lib/github/clients/gql.py
index cfc7d78..5d5e816 100644
--- a/backend/lib/github/clients/gql.py
+++ b/backend/lib/github/clients/gql.py
@@ -12,19 +12,22 @@
import pydantic.alias_generators as pydantic_alias_generators
import lib.github.models as github_models
+import lib.utils.pydantic as pydantic_utils
logger = logging.getLogger(__name__)
class BaseRequest(abc.ABC):
- document: graphql.DocumentNode = NotImplemented
+ @property
+ @abc.abstractmethod
+ def document(self) -> graphql.DocumentNode: ...
@property
def params(self) -> dict[str, typing.Any]:
raise NotImplementedError
-class BaseModel(pydantic.BaseModel):
+class BaseModel(pydantic_utils.BaseModel):
model_config = pydantic.ConfigDict(alias_generator=pydantic_alias_generators.to_camel)
@@ -33,32 +36,34 @@ def to_dataclass(self) -> typing.Any:
raise NotImplementedError
-@dataclasses.dataclass
+@dataclasses.dataclass(frozen=True)
class GetRepositoriesRequest(BaseRequest):
owner: str
limit: int = 100
after: str | None = None
- document: graphql.DocumentNode = gql.gql(
- """
- query myOrgRepos($query: String!, $limit: Int!, $after: String) {
- search(query: $query, type: REPOSITORY, first: $limit, after: $after) {
- nodes {
- ... on Repository {
- name
- owner {
- login
+ @property
+ def document(self) -> graphql.DocumentNode:
+ return gql.gql(
+ """
+ query myOrgRepos($query: String!, $limit: Int!, $after: String) {
+ search(query: $query, type: REPOSITORY, first: $limit, after: $after) {
+ nodes {
+ ... on Repository {
+ name
+ owner {
+ login
+ }
}
}
- }
- pageInfo {
- endCursor
- hasNextPage
+ pageInfo {
+ endCursor
+ hasNextPage
+ }
}
}
- }
- """
- )
+ """
+ )
@property
def params(self) -> dict[str, typing.Any]:
@@ -79,7 +84,7 @@ class Owner(BaseModel):
owner: Owner
class PageInfo(BaseModel):
- end_cursor: str
+ end_cursor: str | None
has_next_page: bool
nodes: list[Repository]
@@ -97,33 +102,35 @@ def to_dataclass(self) -> list[github_models.Repository]:
]
-@dataclasses.dataclass
+@dataclasses.dataclass(frozen=True)
class GetRepositoryIssuesRequest(BaseRequest):
owner: str
repository: str
created_after: datetime.datetime
limit: int = 100
- document: graphql.DocumentNode = gql.gql(
- """
- query getIssues($query: String!, $limit: Int!) {
- search(query: $query, type: ISSUE, first: $limit) {
- nodes {
- ... on Issue {
- id
- url
- title
- body
- createdAt
- author {
- login
+ @property
+ def document(self) -> graphql.DocumentNode:
+ return gql.gql(
+ """
+ query getIssues($query: String!, $limit: Int!) {
+ search(query: $query, type: ISSUE, first: $limit) {
+ nodes {
+ ... on Issue {
+ id
+ url
+ title
+ body
+ createdAt
+ author {
+ login
+ }
}
}
}
}
- }
- """
- )
+ """
+ )
@property
def params(self) -> dict[str, typing.Any]:
@@ -170,33 +177,35 @@ def to_dataclass(self) -> list[github_models.Issue]:
]
-@dataclasses.dataclass
+@dataclasses.dataclass(frozen=True)
class GetRepositoryPRsRequest(BaseRequest):
owner: str
repository: str
created_after: datetime.datetime
limit: int = 100
- document: graphql.DocumentNode = gql.gql(
- """
- query getPRs($query: String!, $limit: Int!) {
- search(query: $query, type: ISSUE, first: $limit) {
- nodes {
- ... on PullRequest {
- id
- url
- title
- body
- createdAt
- author {
- login
+ @property
+ def document(self) -> graphql.DocumentNode:
+ return gql.gql(
+ """
+ query getPRs($query: String!, $limit: Int!) {
+ search(query: $query, type: ISSUE, first: $limit) {
+ nodes {
+ ... on PullRequest {
+ id
+ url
+ title
+ body
+ createdAt
+ author {
+ login
+ }
}
}
}
}
- }
- """
- )
+ """
+ )
@property
def params(self) -> dict[str, typing.Any]:
@@ -243,15 +252,16 @@ def to_dataclass(self) -> list[github_models.PullRequest]:
]
+@dataclasses.dataclass(frozen=True)
class GqlGithubClient:
- def __init__(self, token: str) -> None:
- self._token = token
+ token: str
@contextlib.asynccontextmanager
- async def gql_client(self) -> typing.AsyncGenerator[gql.Client, None]:
+ async def _gql_client(self) -> typing.AsyncGenerator[gql.Client, None]:
gql_transport = gql_aiohttp.AIOHTTPTransport(
url="https://api.github.com/graphql",
- headers={"Authorization": f"Bearer {self._token}"},
+ headers={"Authorization": f"Bearer {self.token}"},
+ ssl=True,
)
gql_client = gql.Client(
transport=gql_transport,
@@ -268,7 +278,7 @@ async def _request[ResponseT: BaseResponse](
response_model: type[ResponseT],
) -> ResponseT:
logger.debug("Requesting document(%s) params(%s)", request.document, request.params)
- async with self.gql_client() as gql_client:
+ async with self._gql_client() as gql_client:
response = await gql_client.execute_async(
document=request.document,
variable_values=request.params,
@@ -277,19 +287,32 @@ async def _request[ResponseT: BaseResponse](
return parsed_response
+ async def _get_repositories(
+ self,
+ request: GetRepositoriesRequest,
+ ) -> GetRepositoriesResponse:
+ return await self._request(request, GetRepositoriesResponse)
+
async def get_repositories(
self,
request: GetRepositoriesRequest,
- ) -> list[github_models.Repository]:
- result: list[github_models.Repository] = []
+ ) -> typing.AsyncGenerator[github_models.Repository, None]:
+ after = request.after
while True:
- response = await self._request(request, GetRepositoriesResponse)
- result.extend(response.to_dataclass())
+ response = await self._get_repositories(
+ request=GetRepositoriesRequest(
+ owner=request.owner,
+ limit=request.limit,
+ after=after,
+ ),
+ )
+ for repository in response.to_dataclass():
+ yield repository
if not response.search.page_info.has_next_page:
break
- request.after = response.search.page_info.end_cursor
- return result
+ after = response.search.page_info.end_cursor
+ assert after is not None
async def get_repository_issues(
self,
diff --git a/backend/lib/github/clients/rest.py b/backend/lib/github/clients/rest.py
index 43a9847..45fa0c9 100644
--- a/backend/lib/github/clients/rest.py
+++ b/backend/lib/github/clients/rest.py
@@ -5,34 +5,33 @@
import typing
import aiohttp
-import pydantic
import lib.github.models as github_models
+import lib.utils.pydantic as pydantic_utils
logger = logging.getLogger(__name__)
class BaseRequest(abc.ABC):
- method: str
+ @property
+ @abc.abstractmethod
+ def method(self) -> str: ...
@property
@abc.abstractmethod
def url(self) -> str: ...
@property
- def params(self) -> dict[str, typing.Any]:
- return {}
-
-
-class BaseModel(pydantic.BaseModel): ...
+ @abc.abstractmethod
+ def params(self) -> dict[str, typing.Any]: ...
-class BaseResponse(BaseModel):
+class BaseResponse(pydantic_utils.BaseModel):
def to_dataclass(self) -> typing.Any:
raise NotImplementedError
-@dataclasses.dataclass
+@dataclasses.dataclass(frozen=True)
class GetRepositoryWorkflowRunsRequest(BaseRequest):
owner: str
repository: str
@@ -40,7 +39,10 @@ class GetRepositoryWorkflowRunsRequest(BaseRequest):
per_page: int = 100
page: int = 1
exclude_pull_requests: bool = True
- method: str = "GET"
+
+ @property
+ def method(self) -> str:
+ return "GET"
@property
def url(self) -> str:
@@ -57,7 +59,7 @@ def params(self) -> dict[str, typing.Any]:
class GetRepositoryWorkflowRunsResponse(BaseResponse):
- class WorkflowRun(pydantic.BaseModel):
+ class WorkflowRun(pydantic_utils.BaseModel):
id: int
name: str
html_url: str
@@ -82,14 +84,10 @@ def to_dataclass(self) -> list[github_models.WorkflowRun]:
]
+@dataclasses.dataclass(frozen=True)
class RestGithubClient:
- def __init__(
- self,
- aiohttp_client: aiohttp.ClientSession,
- token: str,
- ) -> None:
- self._aiohttp_client = aiohttp_client
- self._token = token
+ aiohttp_client: aiohttp.ClientSession
+ token: str
@classmethod
def from_token(cls, token: str) -> typing.Self:
@@ -97,7 +95,7 @@ def from_token(cls, token: str) -> typing.Self:
return cls(aiohttp_client=aiohttp_client, token=token)
async def dispose(self) -> None:
- await self._aiohttp_client.close()
+ await self.aiohttp_client.close()
async def _request[ResponseT: BaseResponse](
self,
@@ -105,11 +103,11 @@ async def _request[ResponseT: BaseResponse](
response_model: type[ResponseT],
) -> ResponseT:
headers = {
- "Authorization": f"Bearer {self._token}",
+ "Authorization": f"Bearer {self.token}",
"Accept": "application/vnd.github.v3+json",
}
logger.debug("Requesting method(%s) url(%s) params(%s)", request.method, request.url, request.params)
- async with self._aiohttp_client.request(
+ async with self.aiohttp_client.request(
method=request.method,
url=request.url,
params=request.params,
@@ -119,19 +117,35 @@ async def _request[ResponseT: BaseResponse](
data = await response.json()
return response_model.model_validate(data)
+ async def _get_repository_workflow_runs(
+ self,
+ request: GetRepositoryWorkflowRunsRequest,
+ ) -> GetRepositoryWorkflowRunsResponse:
+ return await self._request(request, GetRepositoryWorkflowRunsResponse)
+
async def get_repository_workflow_runs(
self,
request: GetRepositoryWorkflowRunsRequest,
) -> typing.AsyncGenerator[github_models.WorkflowRun, None]:
+ page = request.page
+
while True:
- response = await self._request(request, GetRepositoryWorkflowRunsResponse)
+ response = await self._get_repository_workflow_runs(
+ request=GetRepositoryWorkflowRunsRequest(
+ owner=request.owner,
+ repository=request.repository,
+ created_after=request.created_after,
+ page=page,
+ per_page=request.per_page,
+ ),
+ )
if not response.workflow_runs:
return
for workflow_run in response.to_dataclass():
yield workflow_run
- request.page += 1
+ page += 1
__all__ = [
diff --git a/backend/lib/github/models.py b/backend/lib/github/models.py
index 6b8e436..5c913c8 100644
--- a/backend/lib/github/models.py
+++ b/backend/lib/github/models.py
@@ -2,13 +2,13 @@
import datetime
-@dataclasses.dataclass
+@dataclasses.dataclass(frozen=True)
class Repository:
owner: str
name: str
-@dataclasses.dataclass
+@dataclasses.dataclass(frozen=True)
class Issue:
id: str
author: str | None
@@ -18,7 +18,7 @@ class Issue:
created_at: datetime.datetime
-@dataclasses.dataclass
+@dataclasses.dataclass(frozen=True)
class PullRequest:
id: str
author: str | None
diff --git a/backend/lib/github/triggers.py b/backend/lib/github/triggers.py
index 600427c..56ecec3 100644
--- a/backend/lib/github/triggers.py
+++ b/backend/lib/github/triggers.py
@@ -1,4 +1,5 @@
import contextlib
+import dataclasses
import datetime
import logging
import re
@@ -219,18 +220,12 @@ class GithubTriggerState(pydantic_utils.BaseModel):
)
+@dataclasses.dataclass(frozen=True)
class GithubTriggerProcessor(task_base.TriggerProcessor[GithubTriggerConfig]):
- def __init__(
- self,
- raw_state: lib.task.protocols.StateProtocol,
- gql_github_client: github_clients.GqlGithubClient,
- rest_github_client: github_clients.RestGithubClient,
- config: GithubTriggerConfig,
- ) -> None:
- self._raw_state = raw_state
- self._gql_github_client = gql_github_client
- self._rest_github_client = rest_github_client
- self._config = config
+ config: GithubTriggerConfig
+ raw_state: lib.task.protocols.StateProtocol
+ gql_github_client: github_clients.GqlGithubClient
+ rest_github_client: github_clients.RestGithubClient
@classmethod
def from_config(
@@ -249,11 +244,11 @@ def from_config(
)
async def dispose(self) -> None:
- await self._rest_github_client.dispose()
+ await self.rest_github_client.dispose()
@contextlib.asynccontextmanager
async def _acquire_state(self) -> typing.AsyncIterator[GithubTriggerState]:
- async with self._raw_state.acquire() as raw_state:
+ async with self.raw_state.acquire() as raw_state:
if raw_state is None:
raw_state = {}
@@ -261,15 +256,18 @@ async def _acquire_state(self) -> typing.AsyncIterator[GithubTriggerState]:
try:
yield state
finally:
- await self._raw_state.set(state.model_dump(mode="json"))
+ await self.raw_state.set(state.model_dump(mode="json"))
async def produce_events(self) -> typing.AsyncGenerator[task_base.Event, None]:
- repositories = await self._gql_github_client.get_repositories(
+ repositories: list[github_models.Repository] = []
+
+ async for repository in self.gql_github_client.get_repositories(
github_clients.GetRepositoriesRequest(
- owner=self._config.owner,
+ owner=self.config.owner,
)
- )
- repositories = [repository for repository in repositories if self._config.is_repository_applicable(repository)]
+ ):
+ if self.config.is_repository_applicable(repository):
+ repositories.append(repository)
async with self._acquire_state() as state:
async for event in asyncio_utils.GatherIterators(
@@ -278,7 +276,7 @@ async def produce_events(self) -> typing.AsyncGenerator[task_base.Event, None]:
state=state,
repositories=repositories,
)
- for subtrigger in self._config.subtriggers
+ for subtrigger in self.config.subtriggers
):
yield event
@@ -333,15 +331,15 @@ async def _process_repository_issue_created(
) -> typing.AsyncGenerator[task_base.Event, None]:
if repository not in state.repository_issue_created:
state.repository_issue_created[repository] = RepositoryIssueCreatedState.default_factory(
- self._config.default_timedelta,
+ self.config.default_timedelta,
)
repository_state = state.repository_issue_created[repository]
last_issue_created = repository_state.last_issue_created
while True:
- issues = await self._gql_github_client.get_repository_issues(
+ issues = await self.gql_github_client.get_repository_issues(
github_clients.GetRepositoryIssuesRequest(
- owner=self._config.owner,
+ owner=self.config.owner,
repository=repository,
created_after=last_issue_created,
)
@@ -353,7 +351,7 @@ async def _process_repository_issue_created(
if subtrigger.is_applicable(issue):
yield task_base.Event(
id=f"issue_created__{issue.id}",
- title=f"📋New issue in {self._config.owner}/{repository}",
+ title=f"📋New issue in {self.config.owner}/{repository}",
body=f"Issue created by {issue.author}: {issue.title}",
url=issue.url,
)
@@ -384,15 +382,15 @@ async def _process_repository_pr_created(
) -> typing.AsyncGenerator[task_base.Event, None]:
if repository not in state.repository_pr_created:
state.repository_pr_created[repository] = RepositoryPRCreatedState.default_factory(
- self._config.default_timedelta,
+ self.config.default_timedelta,
)
repository_state = state.repository_pr_created[repository]
last_pr_created = repository_state.last_pr_created
while True:
- prs = await self._gql_github_client.get_repository_pull_requests(
+ prs = await self.gql_github_client.get_repository_pull_requests(
github_clients.GetRepositoryPRsRequest(
- owner=self._config.owner,
+ owner=self.config.owner,
repository=repository,
created_after=last_pr_created,
)
@@ -404,7 +402,7 @@ async def _process_repository_pr_created(
if subtrigger.is_applicable(pr):
yield task_base.Event(
id=f"pr_created__{pr.id}",
- title=f"🛠New PR in {self._config.owner}/{repository}",
+ title=f"🛠New PR in {self.config.owner}/{repository}",
body=f"PR created by {pr.author}: {pr.title}",
url=pr.url,
)
@@ -435,27 +433,27 @@ async def _process_repository_failed_workflow_run(
) -> typing.AsyncGenerator[task_base.Event, None]:
if repository not in state.repository_failed_workflow_run:
state.repository_failed_workflow_run[repository] = RepositoryFailedWorkflowRunState.default_factory(
- self._config.default_timedelta,
+ self.config.default_timedelta,
)
repository_state = state.repository_failed_workflow_run[repository]
- request = github_clients.GetRepositoryWorkflowRunsRequest(
- owner=self._config.owner,
- repository=repository,
- created_after=repository_state.oldest_incomplete_created,
- )
-
oldest_incomplete_created = None
last_created = None
- async for workflow_run in self._rest_github_client.get_repository_workflow_runs(request):
+ async for workflow_run in self.rest_github_client.get_repository_workflow_runs(
+ request=github_clients.GetRepositoryWorkflowRunsRequest(
+ owner=self.config.owner,
+ repository=repository,
+ created_after=repository_state.oldest_incomplete_created,
+ ),
+ ):
if (
subtrigger.is_applicable(workflow_run)
and workflow_run not in repository_state.already_reported_failed_runs
):
yield task_base.Event(
- id=f"failed_workflow_run__{self._config.owner}__{repository}__{workflow_run.id}",
- title=f"🔥Failed workflow run in {self._config.owner}/{repository}",
+ id=f"failed_workflow_run__{self.config.owner}__{repository}__{workflow_run.id}",
+ title=f"🔥Failed workflow run in {self.config.owner}/{repository}",
body=f"Workflow run {workflow_run.name} failed",
url=workflow_run.url,
)
diff --git a/backend/lib/task/base/action.py b/backend/lib/task/base/action.py
index d95e072..d729c78 100644
--- a/backend/lib/task/base/action.py
+++ b/backend/lib/task/base/action.py
@@ -32,7 +32,7 @@ async def dispose(self) -> None: ...
async def process(self, event: task_configs_event.Event) -> None: ...
-@dataclasses.dataclass
+@dataclasses.dataclass(frozen=True)
class RegistryRecord[ConfigT: BaseActionConfig]:
config_class: type[ConfigT]
processor_class: type[ActionProcessor[ConfigT]]
diff --git a/backend/lib/task/base/task.py b/backend/lib/task/base/task.py
index b4a6034..8b3af70 100644
--- a/backend/lib/task/base/task.py
+++ b/backend/lib/task/base/task.py
@@ -37,7 +37,7 @@ def is_ready(self, last_run: datetime.datetime | None) -> bool:
return schedule.next() <= datetime.datetime.now(tz=last_run.tzinfo)
-@dataclasses.dataclass
+@dataclasses.dataclass(frozen=True)
class RegistryRecord[ConfigT: BaseTaskConfig]:
config_class: type[ConfigT]
diff --git a/backend/lib/task/base/trigger.py b/backend/lib/task/base/trigger.py
index d6f71c0..8a374b2 100644
--- a/backend/lib/task/base/trigger.py
+++ b/backend/lib/task/base/trigger.py
@@ -33,7 +33,7 @@ def produce_events(self) -> typing.AsyncGenerator[task_base.Event, None]: ...
async def dispose(self) -> None: ...
-@dataclasses.dataclass
+@dataclasses.dataclass(frozen=True)
class RegistryRecord[ConfigT: BaseTriggerConfig]:
config_class: type[ConfigT]
processor_class: type[TriggerProcessor[ConfigT]]
diff --git a/backend/lib/task/repositories/config/base.py b/backend/lib/task/repositories/config/base.py
index 20d1700..8366788 100644
--- a/backend/lib/task/repositories/config/base.py
+++ b/backend/lib/task/repositories/config/base.py
@@ -29,7 +29,7 @@ async def dispose(self) -> None: ...
async def get_config(self) -> task_base.RootConfig: ...
-@dataclasses.dataclass
+@dataclasses.dataclass(frozen=True)
class RegistryRecord[SettingsT: BaseConfigSettings]:
settings_class: type[SettingsT]
repository_class: type[BaseConfigRepository[SettingsT]]
diff --git a/backend/lib/task/repositories/queue/base.py b/backend/lib/task/repositories/queue/base.py
index 63e5b23..429b70e 100644
--- a/backend/lib/task/repositories/queue/base.py
+++ b/backend/lib/task/repositories/queue/base.py
@@ -100,7 +100,7 @@ async def consume(self, topic: JobTopic, item: QueueItem) -> None: ...
async def close_topic(self, topic: JobTopic) -> None: ...
-@dataclasses.dataclass
+@dataclasses.dataclass(frozen=True)
class RegistryRecord[SettingsT: BaseQueueSettings]:
settings_class: type[SettingsT]
repository_class: type[BaseQueueRepository[SettingsT]]
diff --git a/backend/lib/task/repositories/state/base.py b/backend/lib/task/repositories/state/base.py
index f07d394..e99e8f2 100644
--- a/backend/lib/task/repositories/state/base.py
+++ b/backend/lib/task/repositories/state/base.py
@@ -55,7 +55,7 @@ async def get_state(self, path: str) -> task_protocols.StateProtocol:
return State(repository=self, path=path)
-@dataclasses.dataclass
+@dataclasses.dataclass(frozen=True)
class RegistryRecord[SettingsT: BaseStateSettings]:
settings_class: type[SettingsT]
repository_class: type[BaseStateRepository[SettingsT]]
diff --git a/backend/lib/telegram/actions.py b/backend/lib/telegram/actions.py
index e713592..16a21cd 100644
--- a/backend/lib/telegram/actions.py
+++ b/backend/lib/telegram/actions.py
@@ -1,9 +1,11 @@
+import dataclasses
import logging
import typing
import aiohttp
import lib.task.base as task_base
+import lib.telegram.clients as telegram_clients
import lib.utils.pydantic as pydantic_utils
logger = logging.getLogger(__name__)
@@ -20,17 +22,14 @@ class TelegramWebhookActionConfig(task_base.BaseActionConfig):
max_message_body_length: int = 500
+@dataclasses.dataclass(frozen=True)
class TelegramWebhookProcessor(task_base.ActionProcessor[TelegramWebhookActionConfig]):
- def __init__(
- self,
- config: TelegramWebhookActionConfig,
- aiohttp_client: aiohttp.ClientSession,
- ):
- self._config = config
- self._aiohttp_client = aiohttp_client
+ config: TelegramWebhookActionConfig
+ aiohttp_client: aiohttp.ClientSession
+ telegram_client: telegram_clients.RestTelegramClient
async def dispose(self) -> None:
- await self._aiohttp_client.close()
+ await self.aiohttp_client.close()
@classmethod
def from_config(
@@ -38,25 +37,32 @@ def from_config(
config: TelegramWebhookActionConfig,
) -> typing.Self:
aiohttp_client = aiohttp.ClientSession()
+ telegram_client = telegram_clients.RestTelegramClient(
+ token=config.token_secret.value,
+ aiohttp_client=aiohttp_client,
+ )
return cls(
config=config,
aiohttp_client=aiohttp_client,
+ telegram_client=telegram_client,
+ )
+
+ def _format_message(self, event: task_base.Event) -> str:
+ return (
+ f"{trim_string(event.title, self.config.max_message_title_length)}"
+ f"\n{trim_string(event.body, self.config.max_message_body_length)}"
+ f"\n{event.url}"
)
async def process(self, event: task_base.Event) -> None:
- async with self._aiohttp_client.post(
- f"https://api.telegram.org/bot{self._config.token_secret.value}/sendMessage",
- params={
- "chat_id": self._config.chat_id_secret.value,
- "text": f"{trim_string(event.title, self._config.max_message_title_length)}"
- f"\n{trim_string(event.body, self._config.max_message_body_length)}"
- f"\n{event.url}",
- "parse_mode": "HTML",
- "disable_web_page_preview": "true",
- },
- ) as response:
- response.raise_for_status()
+ text = self._format_message(event)
+ await self.telegram_client.send_message(
+ request=telegram_clients.SendMessageRequest(
+ chat_id=self.config.chat_id_secret.value,
+ text=text,
+ ),
+ )
def register_default_plugins() -> None:
diff --git a/backend/lib/telegram/clients.py b/backend/lib/telegram/clients.py
new file mode 100644
index 0000000..2b4629e
--- /dev/null
+++ b/backend/lib/telegram/clients.py
@@ -0,0 +1,52 @@
+import dataclasses
+
+import aiohttp
+import aiohttp.typedefs as aiohttp_typedefs
+
+
+@dataclasses.dataclass(frozen=True)
+class SendMessageRequest:
+ chat_id: str
+ text: str
+ parse_mode: str = "HTML"
+ disable_web_page_preview: bool = True
+
+ @property
+ def method(self) -> str:
+ return "POST"
+
+ @property
+ def path(self) -> str:
+ return "/sendMessage"
+
+ @property
+ def params(self) -> aiohttp_typedefs.Query:
+ return {
+ "chat_id": self.chat_id,
+ "text": self.text,
+ "parse_mode": self.parse_mode,
+ "disable_web_page_preview": "true" if self.disable_web_page_preview else "false",
+ }
+
+
+@dataclasses.dataclass(frozen=True)
+class RestTelegramClient:
+ aiohttp_client: aiohttp.ClientSession
+ token: str
+
+ def _prepare_url(self, path: str) -> str:
+ return f"https://api.telegram.org/bot{self.token}{path}"
+
+ async def send_message(self, request: SendMessageRequest) -> None:
+ async with self.aiohttp_client.request(
+ method=request.method,
+ url=self._prepare_url(request.path),
+ params=request.params,
+ ) as response:
+ response.raise_for_status()
+
+
+__all__ = [
+ "RestTelegramClient",
+ "SendMessageRequest",
+]
diff --git a/backend/lib/utils/aiojobs/scheduler.py b/backend/lib/utils/aiojobs/scheduler.py
index c413d9f..1402ace 100644
--- a/backend/lib/utils/aiojobs/scheduler.py
+++ b/backend/lib/utils/aiojobs/scheduler.py
@@ -12,7 +12,7 @@
AioJobsScheduler = aiojobs.Scheduler
-@dataclasses.dataclass
+@dataclasses.dataclass(frozen=True)
class Settings:
limit: int | None
pending_limit: int | None
diff --git a/backend/lib/utils/lifecycle.py b/backend/lib/utils/lifecycle.py
index 89723aa..00770fc 100644
--- a/backend/lib/utils/lifecycle.py
+++ b/backend/lib/utils/lifecycle.py
@@ -7,7 +7,7 @@
Task = asyncio.Task[typing.Any]
-@dataclasses.dataclass
+@dataclasses.dataclass(frozen=True)
class Callback:
awaitable: Awaitable
error_message: str
diff --git a/backend/lib/utils/logging/config.py b/backend/lib/utils/logging/config.py
index 7a47dff..5b13e03 100644
--- a/backend/lib/utils/logging/config.py
+++ b/backend/lib/utils/logging/config.py
@@ -14,7 +14,7 @@ def initialize(
logging_config.dictConfig(config)
-@dataclasses.dataclass
+@dataclasses.dataclass(frozen=True)
class LoggerConfig:
propagate: bool
level: LogLevel
diff --git a/backend/tests/conftest.py b/backend/tests/conftest.py
new file mode 100644
index 0000000..078f8a4
--- /dev/null
+++ b/backend/tests/conftest.py
@@ -0,0 +1,8 @@
+import pytest
+
+import tests.settings as test_settings
+
+
+@pytest.fixture(name="settings")
+def settings_fixture() -> test_settings.Settings:
+ return test_settings.Settings()
diff --git a/backend/tests/integration/github/__init__.py b/backend/tests/integration/github/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/backend/tests/integration/github/clients/__init__.py b/backend/tests/integration/github/clients/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/backend/tests/integration/github/clients/gql/__init__.py b/backend/tests/integration/github/clients/gql/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/backend/tests/integration/github/clients/gql/conftest.py b/backend/tests/integration/github/clients/gql/conftest.py
new file mode 100644
index 0000000..75429df
--- /dev/null
+++ b/backend/tests/integration/github/clients/gql/conftest.py
@@ -0,0 +1,9 @@
+import pytest
+
+import lib.github.clients as github_clients
+import tests.settings as test_settings
+
+
+@pytest.fixture(name="github_gql_client")
+def github_gql_client_fixture(settings: test_settings.Settings) -> github_clients.GqlGithubClient:
+ return github_clients.GqlGithubClient(token=settings.github_token)
diff --git a/backend/tests/integration/github/clients/gql/test_get_repositories.py b/backend/tests/integration/github/clients/gql/test_get_repositories.py
new file mode 100644
index 0000000..90ba582
--- /dev/null
+++ b/backend/tests/integration/github/clients/gql/test_get_repositories.py
@@ -0,0 +1,48 @@
+import pytest
+
+import lib.github.clients as github_clients
+import lib.github.models as github_models
+
+
+@pytest.mark.asyncio
+async def test_default(github_gql_client: github_clients.GqlGithubClient):
+ iterator = github_gql_client.get_repositories(
+ request=github_clients.GetRepositoriesRequest(
+ owner="python",
+ ),
+ )
+
+ assert await anext(iterator) == github_models.Repository(
+ name="cpython",
+ owner="python",
+ )
+
+
+@pytest.mark.asyncio
+async def test_multiple_requests(github_gql_client: github_clients.GqlGithubClient):
+ iterator = github_gql_client.get_repositories(
+ request=github_clients.GetRepositoriesRequest(
+ owner="python",
+ limit=1,
+ ),
+ )
+
+ assert await anext(iterator) == github_models.Repository(
+ name="cpython",
+ owner="python",
+ )
+ assert await anext(iterator) == github_models.Repository(
+ name="mypy",
+ owner="python",
+ )
+
+
+@pytest.mark.asyncio
+async def test_finite(github_gql_client: github_clients.GqlGithubClient):
+ iterator = github_gql_client.get_repositories(
+ request=github_clients.GetRepositoriesRequest(
+ owner="definetly-not-a-real-org",
+ ),
+ )
+
+ assert len([item async for item in iterator]) == 0
diff --git a/backend/tests/integration/github/clients/gql/test_get_repository_issues.py b/backend/tests/integration/github/clients/gql/test_get_repository_issues.py
new file mode 100644
index 0000000..3b0f166
--- /dev/null
+++ b/backend/tests/integration/github/clients/gql/test_get_repository_issues.py
@@ -0,0 +1,26 @@
+import datetime
+
+import pytest
+
+import lib.github.clients as github_clients
+
+
+@pytest.mark.asyncio
+async def test_default(github_gql_client: github_clients.GqlGithubClient):
+ """Test getting issues from a repository with default parameters."""
+ issues = await github_gql_client.get_repository_issues(
+ request=github_clients.GetRepositoryIssuesRequest(
+ owner="python",
+ repository="cpython",
+ created_after=datetime.datetime(2025, 1, 1),
+ limit=1,
+ ),
+ )
+
+ assert len(issues) == 1
+ issue = issues[0]
+ assert issue.id == "I_kwDOBN0Z8c6kzGKa"
+ assert issue.author == "paulie4"
+ assert issue.url == "https://github.com/python/cpython/issues/128388"
+ assert issue.title == "`pyrepl` on Windows: add Ctrl+← and Ctrl+→ word-skipping and other keybindings"
+ assert issue.created_at == datetime.datetime(2025, 1, 1, 5, 33, 57, tzinfo=datetime.UTC)
diff --git a/backend/tests/integration/github/clients/gql/test_get_repository_pull_requests.py b/backend/tests/integration/github/clients/gql/test_get_repository_pull_requests.py
new file mode 100644
index 0000000..d0c3db2
--- /dev/null
+++ b/backend/tests/integration/github/clients/gql/test_get_repository_pull_requests.py
@@ -0,0 +1,25 @@
+import datetime
+
+import pytest
+
+import lib.github.clients as github_clients
+
+
+@pytest.mark.asyncio
+async def test__default(github_gql_client: github_clients.GqlGithubClient):
+ pull_requests = await github_gql_client.get_repository_pull_requests(
+ request=github_clients.GetRepositoryPRsRequest(
+ owner="python",
+ repository="cpython",
+ created_after=datetime.datetime(2025, 1, 1),
+ limit=1,
+ ),
+ )
+
+ assert len(pull_requests) == 1
+ pull_request = pull_requests[0]
+ assert pull_request.id == "PR_kwDOBN0Z8c6GhTCQ"
+ assert pull_request.author == "paulie4"
+ assert pull_request.url == "https://github.com/python/cpython/pull/128389"
+ assert pull_request.title == "gh-128388: pyrepl on Windows: add meta and ctrl+arrow keybindings"
+ assert pull_request.created_at == datetime.datetime(2025, 1, 1, 5, 54, 29, tzinfo=datetime.UTC)
diff --git a/backend/tests/integration/github/clients/rest/__init__.py b/backend/tests/integration/github/clients/rest/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/backend/tests/integration/github/clients/rest/conftest.py b/backend/tests/integration/github/clients/rest/conftest.py
new file mode 100644
index 0000000..f2902b9
--- /dev/null
+++ b/backend/tests/integration/github/clients/rest/conftest.py
@@ -0,0 +1,17 @@
+import typing
+
+import pytest_asyncio
+
+import lib.github.clients as github_clients
+import tests.settings as test_settings
+
+
+@pytest_asyncio.fixture(name="github_rest_client")
+async def github_rest_client_fixture(
+ settings: test_settings.Settings,
+) -> typing.AsyncGenerator[github_clients.RestGithubClient, None]:
+ client = github_clients.RestGithubClient.from_token(settings.github_token)
+ try:
+ yield client
+ finally:
+ await client.dispose()
diff --git a/backend/tests/integration/github/clients/rest/test_get_repository_workflow_runs.py b/backend/tests/integration/github/clients/rest/test_get_repository_workflow_runs.py
new file mode 100644
index 0000000..43ad9ed
--- /dev/null
+++ b/backend/tests/integration/github/clients/rest/test_get_repository_workflow_runs.py
@@ -0,0 +1,48 @@
+import datetime
+
+import pytest
+
+import lib.github.clients as github_clients
+import lib.github.models as github_models
+
+
+@pytest.mark.asyncio
+async def test_default(github_rest_client: github_clients.RestGithubClient):
+ iterator = github_rest_client.get_repository_workflow_runs(
+ request=github_clients.GetRepositoryWorkflowRunsRequest(
+ owner="ovsds",
+ repository="github-watcher",
+ created_after=datetime.datetime(2025, 1, 1),
+ ),
+ )
+
+ workflow_run = await anext(iterator)
+ assert isinstance(workflow_run, github_models.WorkflowRun)
+
+
+@pytest.mark.asyncio
+async def test_per_page(github_rest_client: github_clients.RestGithubClient):
+ iterator = github_rest_client.get_repository_workflow_runs(
+ request=github_clients.GetRepositoryWorkflowRunsRequest(
+ owner="ovsds",
+ repository="github-watcher",
+ created_after=datetime.datetime(2025, 1, 1),
+ per_page=1,
+ ),
+ )
+
+ for _ in range(2):
+ workflow_run = await anext(iterator)
+ assert isinstance(workflow_run, github_models.WorkflowRun)
+
+
+@pytest.mark.asyncio
+async def test_finite(github_rest_client: github_clients.RestGithubClient):
+ iterator = github_rest_client.get_repository_workflow_runs(
+ request=github_clients.GetRepositoryWorkflowRunsRequest(
+ owner="ovsds",
+ repository="github-watcher",
+ created_after=datetime.datetime(3025, 1, 1),
+ ),
+ )
+ assert len([item async for item in iterator]) == 0
diff --git a/backend/tests/integration/telegram/__init__.py b/backend/tests/integration/telegram/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/backend/tests/integration/telegram/clients/__init__.py b/backend/tests/integration/telegram/clients/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/backend/tests/integration/telegram/clients/rest/__init__.py b/backend/tests/integration/telegram/clients/rest/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/backend/tests/integration/telegram/clients/rest/conftest.py b/backend/tests/integration/telegram/clients/rest/conftest.py
new file mode 100644
index 0000000..ffc97d1
--- /dev/null
+++ b/backend/tests/integration/telegram/clients/rest/conftest.py
@@ -0,0 +1,27 @@
+import typing
+
+import aiohttp
+import pytest_asyncio
+
+import lib.telegram.clients as telegram_clients
+import tests.settings as test_settings
+
+
+@pytest_asyncio.fixture(name="aiohttp_client")
+async def aiohttp_client_fixture() -> typing.AsyncGenerator[aiohttp.ClientSession, None]:
+ client = aiohttp.ClientSession()
+ try:
+ yield client
+ finally:
+ await client.close()
+
+
+@pytest_asyncio.fixture(name="telegram_client")
+async def telegram_client_fixture(
+ aiohttp_client: aiohttp.ClientSession,
+ settings: test_settings.Settings,
+) -> telegram_clients.RestTelegramClient:
+ return telegram_clients.RestTelegramClient(
+ token=settings.telegram_token,
+ aiohttp_client=aiohttp_client,
+ )
diff --git a/backend/tests/integration/telegram/clients/rest/test_send_message.py b/backend/tests/integration/telegram/clients/rest/test_send_message.py
new file mode 100644
index 0000000..91c1802
--- /dev/null
+++ b/backend/tests/integration/telegram/clients/rest/test_send_message.py
@@ -0,0 +1,17 @@
+import pytest
+
+import lib.telegram.clients as telegram_clients
+import tests.settings as test_settings
+
+
+@pytest.mark.asyncio
+async def test_default(
+ telegram_client: telegram_clients.RestTelegramClient,
+ settings: test_settings.Settings,
+):
+ await telegram_client.send_message(
+ request=telegram_clients.SendMessageRequest(
+ chat_id=settings.telegram_chat_id,
+ text="Integration test",
+ ),
+ )
diff --git a/backend/tests/settings.py b/backend/tests/settings.py
new file mode 100644
index 0000000..e9868b2
--- /dev/null
+++ b/backend/tests/settings.py
@@ -0,0 +1,13 @@
+import lib.utils.pydantic as pydantic_utils
+
+
+class Settings(pydantic_utils.BaseSettings):
+ github_token: str = NotImplemented
+
+ telegram_token: str = NotImplemented
+ telegram_chat_id: str = NotImplemented
+
+
+__all__ = [
+ "Settings",
+]