mirror of
https://github.com/Kludex/awesome-fastapi-projects.git
synced 2024-12-18 00:00:14 +00:00
2fdd348a15
* Set up the web project dependencies - Add linters, pre-commit, and GitHub Actions - Add a Makefile - Add a pyproject.toml * Fix pyupgrade job - Remove continue_on_error everywhere * Remove old code * Rename a GithubActions job * Change README * Adjust pre-commit and GitHub actions * Add tables and set up alembic * Set up tests * Extend tests * Add coverage config * Adjust the GithubActions workflow * Fix GithubActions workflow * Try fixing pyproject-fmt config * Fix formatting of pyproject.toml * Fix formatting of pyproject.toml * Add coverage report * Test listing the repositories * Add a working prototype of SourceGraph client * Add parsing of the SourceGraph SSE data * Fix tests * Ged rid of packages replaced by ruff * Fix waits in the SourceGraph client * Refactor the models and add a mapper - A new mapper allows to create database repositories from the SourceGraph data * Add mypy * Try fixing mypy action * Remove redundant configs * Exclude tests from type checking * Fix mypy pre-commit and GitHub action * Ignore factories * Make upserting possible for source graph data * Add logic for parsing the dependencies and populating the database * Add a database and a cron GitHub Action job * Try manually trigger a workflow * Bring back the old config * Add ReadTimeout for errors to retry for in SourceGraph client * Add typer * Adjust the docstrings * Update the database * Refactor and optimize scraping and dependencies parsing * Make scraping run on push for now * Add a unique constraint for the repo url and source graph repo id * Change the index columns in on_conflict statement for repo creation * Optimize dependencies parsing - Do not parse dependencies for a repo when revision did not change * Scraped repositories from Source Graph * Refactor scraping * Set up frontend * Scraped repositories from Source Graph * Add TODOs * Skip scraping when testing * Fix a test with updating the repos * Scraped repositories from Source Graph * Add some more TODOs * Scraped repositories from Source Graph * Add some more TODO comments * Add chadcn/ui * Scraped repositories from Source Graph * Create index.json * Scraped repositories from Source Graph * Add a draft of data table and display all the repos * Scraped repositories from Source Graph * Implement stars badges and description with overflow * Format the links to Github repos * Fix link clicking * Scraped repositories from Source Graph * Add simple pagination and stars column sorting * Scraped repositories from Source Graph * Implement basic searching * Scraped repositories from Source Graph * Implement a multiselect for dependencies * Scraped repositories from Source Graph * Implement actual filtering by dependencies * Scraped repositories from Source Graph * Add a workflow to deploy nextjs on github pages * Try fixing the deployment job * Enable static exports for app router * Fix uploading arifacts for nextjs job * Set base path to properly load JS and CSS * Fix the base path * Scraped repositories from Source Graph * Add header * Remove language version * Scraped repositories from Source Graph * Add some more TODOs * Scraped repositories from Source Graph * Adjust the pre-commit config * Fix pipelines * Scraped repositories from Source Graph * Add a footer * Create the indexes * Scraped repositories from Source Graph * Add more TODOs * Introduce minor footer adjustments * Adjust the scraping actions * Scraped repositories from Source Graph, parsed the dependencies, and generated the indexes * Implement query params state * Scraped repositories from Source Graph, parsed the dependencies, and generated the indexes * Do not commit query state on unmount * Scraped repositories from Source Graph, parsed the dependencies, and generated the indexes * Hopefully fix query states and multiselect input * Scraped repositories from Source Graph, parsed the dependencies, and generated the indexes * Extend the Makefile * Resolve most of TODOs * Resolve the conflicts with anyio version, bring back httpx * Adjust the Makefile and README.md * Fix a typo in README.md * Adjust readme * Fix the Makefile * Fix some stuff * Make some adjustments * Possibly fix failing scraping jobs * Load the repo project URL from env --------- Co-authored-by: vladfedoriuk <vladfedoriuk@users.noreply.github.com> Co-authored-by: Vladyslav Fedoriuk <vladyslav.fedoriuk@deployed.pl>
105 lines
2.8 KiB
Python
105 lines
2.8 KiB
Python
"""
|
|
Create repos and dependencies indexes.
|
|
|
|
This script creates can create two indexes:
|
|
|
|
- ``repos_index.json``: Contains all the repositories and their dependencies.
|
|
- ``dependencies_index.json``: Contains all the dependencies and the
|
|
repositories that depend on them.
|
|
|
|
The indexes are used by the frontend to display the data and perform searches.
|
|
"""
|
|
import asyncio
|
|
import json
|
|
from pathlib import Path
|
|
from typing import Final
|
|
|
|
import aiofiles
|
|
import sqlalchemy.orm
|
|
import typer
|
|
|
|
from app.database import Dependency, Repo
|
|
from app.models import DependencyDetail, RepoDetail
|
|
from app.uow import async_session_uow
|
|
|
|
#: The path to the repos index file.
|
|
REPOS_INDEX_PATH: Final[Path] = Path(__file__).parent.parent / "repos_index.json"
|
|
#: The path to the dependencies index file.
|
|
DEPENDENCIES_INDEX_PATH: Final[Path] = (
|
|
Path(__file__).parent.parent / "dependencies_index.json"
|
|
)
|
|
|
|
app = typer.Typer()
|
|
|
|
|
|
async def create_repos_index() -> None:
|
|
"""
|
|
Create repos_index.json file from database.
|
|
|
|
:return: None
|
|
"""
|
|
async with async_session_uow() as session, aiofiles.open(
|
|
REPOS_INDEX_PATH, "w"
|
|
) as index_file:
|
|
await index_file.write(
|
|
json.dumps(
|
|
{
|
|
"repos": [
|
|
RepoDetail.model_validate(repo).model_dump()
|
|
async for repo in (
|
|
await session.stream_scalars(
|
|
sqlalchemy.select(Repo)
|
|
.order_by(Repo.id)
|
|
.options(sqlalchemy.orm.selectinload(Repo.dependencies))
|
|
)
|
|
)
|
|
],
|
|
},
|
|
indent=4,
|
|
)
|
|
)
|
|
|
|
|
|
async def create_dependencies_index() -> None:
|
|
"""
|
|
Create dependencies_index.json file from database.
|
|
|
|
:return: None
|
|
"""
|
|
async with async_session_uow() as session, aiofiles.open(
|
|
DEPENDENCIES_INDEX_PATH, "w"
|
|
) as index_file:
|
|
dependencies = [
|
|
DependencyDetail.model_validate(dependency).model_dump()
|
|
async for dependency in (
|
|
await session.stream_scalars(
|
|
sqlalchemy.select(Dependency).order_by(Dependency.id)
|
|
)
|
|
)
|
|
if dependency.name
|
|
]
|
|
await index_file.write(
|
|
json.dumps(
|
|
{
|
|
"dependencies": dependencies,
|
|
},
|
|
indent=4,
|
|
)
|
|
)
|
|
|
|
|
|
@app.command()
|
|
def index_repos() -> None:
|
|
"""Create ``repos_index.json``."""
|
|
asyncio.run(create_repos_index())
|
|
|
|
|
|
@app.command()
|
|
def index_dependencies() -> None:
|
|
"""Create ``dependencies_index.json``."""
|
|
asyncio.run(create_dependencies_index())
|
|
|
|
|
|
if __name__ == "__main__":
|
|
app()
|