mirror of
https://github.com/Kludex/awesome-fastapi-projects.git
synced 2024-11-27 22:11:08 +00:00
2fdd348a15
* Set up the web project dependencies - Add linters, pre-commit, and GitHub Actions - Add a Makefile - Add a pyproject.toml * Fix pyupgrade job - Remove continue_on_error everywhere * Remove old code * Rename a GithubActions job * Change README * Adjust pre-commit and GitHub actions * Add tables and set up alembic * Set up tests * Extend tests * Add coverage config * Adjust the GithubActions workflow * Fix GithubActions workflow * Try fixing pyproject-fmt config * Fix formatting of pyproject.toml * Fix formatting of pyproject.toml * Add coverage report * Test listing the repositories * Add a working prototype of SourceGraph client * Add parsing of the SourceGraph SSE data * Fix tests * Ged rid of packages replaced by ruff * Fix waits in the SourceGraph client * Refactor the models and add a mapper - A new mapper allows to create database repositories from the SourceGraph data * Add mypy * Try fixing mypy action * Remove redundant configs * Exclude tests from type checking * Fix mypy pre-commit and GitHub action * Ignore factories * Make upserting possible for source graph data * Add logic for parsing the dependencies and populating the database * Add a database and a cron GitHub Action job * Try manually trigger a workflow * Bring back the old config * Add ReadTimeout for errors to retry for in SourceGraph client * Add typer * Adjust the docstrings * Update the database * Refactor and optimize scraping and dependencies parsing * Make scraping run on push for now * Add a unique constraint for the repo url and source graph repo id * Change the index columns in on_conflict statement for repo creation * Optimize dependencies parsing - Do not parse dependencies for a repo when revision did not change * Scraped repositories from Source Graph * Refactor scraping * Set up frontend * Scraped repositories from Source Graph * Add TODOs * Skip scraping when testing * Fix a test with updating the repos * Scraped repositories from Source Graph * Add some more TODOs * Scraped repositories from Source Graph * Add some more TODO comments * Add chadcn/ui * Scraped repositories from Source Graph * Create index.json * Scraped repositories from Source Graph * Add a draft of data table and display all the repos * Scraped repositories from Source Graph * Implement stars badges and description with overflow * Format the links to Github repos * Fix link clicking * Scraped repositories from Source Graph * Add simple pagination and stars column sorting * Scraped repositories from Source Graph * Implement basic searching * Scraped repositories from Source Graph * Implement a multiselect for dependencies * Scraped repositories from Source Graph * Implement actual filtering by dependencies * Scraped repositories from Source Graph * Add a workflow to deploy nextjs on github pages * Try fixing the deployment job * Enable static exports for app router * Fix uploading arifacts for nextjs job * Set base path to properly load JS and CSS * Fix the base path * Scraped repositories from Source Graph * Add header * Remove language version * Scraped repositories from Source Graph * Add some more TODOs * Scraped repositories from Source Graph * Adjust the pre-commit config * Fix pipelines * Scraped repositories from Source Graph * Add a footer * Create the indexes * Scraped repositories from Source Graph * Add more TODOs * Introduce minor footer adjustments * Adjust the scraping actions * Scraped repositories from Source Graph, parsed the dependencies, and generated the indexes * Implement query params state * Scraped repositories from Source Graph, parsed the dependencies, and generated the indexes * Do not commit query state on unmount * Scraped repositories from Source Graph, parsed the dependencies, and generated the indexes * Hopefully fix query states and multiselect input * Scraped repositories from Source Graph, parsed the dependencies, and generated the indexes * Extend the Makefile * Resolve most of TODOs * Resolve the conflicts with anyio version, bring back httpx * Adjust the Makefile and README.md * Fix a typo in README.md * Adjust readme * Fix the Makefile * Fix some stuff * Make some adjustments * Possibly fix failing scraping jobs * Load the repo project URL from env --------- Co-authored-by: vladfedoriuk <vladfedoriuk@users.noreply.github.com> Co-authored-by: Vladyslav Fedoriuk <vladyslav.fedoriuk@deployed.pl>
106 lines
3.3 KiB
Python
106 lines
3.3 KiB
Python
"""The application-level conftest."""
|
|
import asyncio
|
|
import contextlib
|
|
from collections.abc import AsyncGenerator, Generator
|
|
from typing import Literal
|
|
|
|
import pytest
|
|
import stamina
|
|
from dirty_equals import IsList
|
|
from pytest_mock import MockerFixture
|
|
from sqlalchemy.ext.asyncio import AsyncConnection, AsyncSession
|
|
|
|
from app.database import Dependency, Repo
|
|
from app.factories import DependencyCreateDataFactory
|
|
from app.source_graph.factories import SourceGraphRepoDataFactory
|
|
from app.source_graph.models import SourceGraphRepoData
|
|
|
|
|
|
@pytest.fixture(autouse=True, scope="session")
|
|
def anyio_backend() -> Literal["asyncio"]:
|
|
"""Use asyncio as the async backend."""
|
|
return "asyncio"
|
|
|
|
|
|
@pytest.fixture(autouse=True, scope="session")
|
|
def _deactivate_retries() -> None:
|
|
"""Deactivate stamina retries."""
|
|
stamina.set_active(False)
|
|
|
|
|
|
@pytest.fixture(autouse=True)
|
|
def _test_db(mocker: MockerFixture) -> None:
|
|
"""Use the in-memory database for tests."""
|
|
mocker.patch("app.database.DB_PATH", "")
|
|
|
|
|
|
@pytest.fixture(scope="session")
|
|
def event_loop(
|
|
request: pytest.FixtureRequest,
|
|
) -> Generator[asyncio.AbstractEventLoop, None, None]:
|
|
"""
|
|
Create an instance of the default event loop for a session.
|
|
|
|
An event loop is destroyed at the end of the test session.
|
|
https://docs.pytest.org/en/6.2.x/fixture.html#fixture-scopes
|
|
"""
|
|
with contextlib.closing(loop := asyncio.get_event_loop_policy().get_event_loop()):
|
|
yield loop
|
|
|
|
|
|
@pytest.fixture(scope="session")
|
|
async def test_db_connection() -> AsyncGenerator[AsyncConnection, None]:
|
|
"""Use the in-memory database for tests."""
|
|
from app.database import Base, engine
|
|
|
|
try:
|
|
async with engine.begin() as conn:
|
|
await conn.run_sync(Base.metadata.drop_all)
|
|
await conn.run_sync(Base.metadata.create_all)
|
|
yield conn
|
|
finally:
|
|
# for AsyncEngine created in function scope, close and
|
|
# clean-up pooled connections
|
|
await engine.dispose()
|
|
|
|
|
|
@pytest.fixture()
|
|
async def test_db_session(
|
|
test_db_connection: AsyncConnection,
|
|
) -> AsyncGenerator[AsyncSession, None]:
|
|
"""Use the in-memory database for tests."""
|
|
from app.uow import async_session_uow
|
|
|
|
async with async_session_uow() as session:
|
|
yield session
|
|
|
|
|
|
@pytest.fixture()
|
|
async def some_repos(
|
|
test_db_session: AsyncSession,
|
|
source_graph_repo_data_factory: SourceGraphRepoDataFactory,
|
|
dependency_create_data_factory: DependencyCreateDataFactory,
|
|
) -> list[Repo]:
|
|
"""Create some repos."""
|
|
source_graph_repos_data: list[
|
|
SourceGraphRepoData
|
|
] = source_graph_repo_data_factory.batch(10)
|
|
assert source_graph_repos_data == IsList(length=10)
|
|
repos = [
|
|
Repo(
|
|
url=str(source_graph_repo_data.repo_url),
|
|
description=source_graph_repo_data.description,
|
|
stars=source_graph_repo_data.stars,
|
|
source_graph_repo_id=source_graph_repo_data.repo_id,
|
|
dependencies=[
|
|
Dependency(**dependency_create_data.model_dump())
|
|
for dependency_create_data in dependency_create_data_factory.batch(5)
|
|
],
|
|
)
|
|
for source_graph_repo_data in source_graph_repos_data
|
|
]
|
|
test_db_session.add_all(repos)
|
|
await test_db_session.flush()
|
|
await asyncio.gather(*[test_db_session.refresh(repo) for repo in repos])
|
|
return repos
|