awesome-fastapi-projects/app/source_graph/tests/test_client.py
Vladyslav Fedoriuk 2fdd348a15
Web App (#25)
* Set up the web project dependencies

- Add linters, pre-commit, and GitHub Actions
- Add a Makefile
- Add a pyproject.toml

* Fix pyupgrade job

- Remove continue_on_error everywhere

* Remove old code

* Rename a GithubActions job

* Change README

* Adjust pre-commit and GitHub actions

* Add tables and set up alembic

* Set up tests

* Extend tests

* Add coverage config

* Adjust the GithubActions workflow

* Fix GithubActions workflow

* Try fixing pyproject-fmt config

* Fix formatting of pyproject.toml

* Fix formatting of pyproject.toml

* Add coverage report

* Test listing the repositories

* Add a working prototype of SourceGraph client

* Add parsing of the SourceGraph SSE data

* Fix tests

* Ged rid of packages replaced by ruff

* Fix waits in the SourceGraph client

* Refactor the models and add a mapper

- A new mapper allows to create database repositories from the SourceGraph data

* Add mypy

* Try fixing mypy action

* Remove redundant configs

* Exclude tests from type checking

* Fix mypy pre-commit and GitHub action

* Ignore factories

* Make upserting possible for source graph data

* Add logic for parsing the dependencies and populating the database

* Add a database and a cron GitHub Action job

* Try manually trigger a workflow

* Bring back the old config

* Add ReadTimeout for errors to retry for in SourceGraph client

* Add typer

* Adjust the docstrings

* Update the database

* Refactor and optimize scraping and dependencies parsing

* Make scraping run on push for now

* Add a unique constraint for the repo url and source graph repo id

* Change the index columns in on_conflict statement for repo creation

* Optimize dependencies parsing

- Do not parse dependencies for a repo  when revision did not change

* Scraped repositories from Source Graph

* Refactor scraping

* Set up frontend

* Scraped repositories from Source Graph

* Add TODOs

* Skip scraping when testing

* Fix a test with updating the repos

* Scraped repositories from Source Graph

* Add some more TODOs

* Scraped repositories from Source Graph

* Add some more TODO comments

* Add chadcn/ui

* Scraped repositories from Source Graph

* Create index.json

* Scraped repositories from Source Graph

* Add a draft of data table and display all the repos

* Scraped repositories from Source Graph

* Implement stars badges and description with overflow

* Format the links to Github repos

* Fix link clicking

* Scraped repositories from Source Graph

* Add simple pagination and stars column sorting

* Scraped repositories from Source Graph

* Implement basic searching

* Scraped repositories from Source Graph

* Implement a multiselect for dependencies

* Scraped repositories from Source Graph

* Implement actual filtering by dependencies

* Scraped repositories from Source Graph

* Add a workflow to deploy nextjs on github pages

* Try fixing the deployment job

* Enable static exports for app router

* Fix uploading arifacts for nextjs job

* Set base path to properly load JS and CSS

* Fix the base path

* Scraped repositories from Source Graph

* Add header

* Remove language version

* Scraped repositories from Source Graph

* Add some more TODOs

* Scraped repositories from Source Graph

* Adjust the pre-commit config

* Fix pipelines

* Scraped repositories from Source Graph

* Add a footer

* Create the indexes

* Scraped repositories from Source Graph

* Add more TODOs

* Introduce minor footer adjustments

* Adjust the scraping actions

* Scraped repositories from Source Graph, parsed the dependencies, and generated the indexes

* Implement query params state

* Scraped repositories from Source Graph, parsed the dependencies, and generated the indexes

* Do not commit query state on unmount

* Scraped repositories from Source Graph, parsed the dependencies, and generated the indexes

* Hopefully fix query states and multiselect input

* Scraped repositories from Source Graph, parsed the dependencies, and generated the indexes

* Extend the Makefile

* Resolve most of TODOs

* Resolve the conflicts with anyio version, bring back httpx

* Adjust the Makefile and README.md

* Fix a typo in README.md

* Adjust readme

* Fix the Makefile

* Fix some stuff

* Make some adjustments

* Possibly fix failing scraping jobs

* Load the repo project URL from env

---------

Co-authored-by: vladfedoriuk <vladfedoriuk@users.noreply.github.com>
Co-authored-by: Vladyslav Fedoriuk <vladyslav.fedoriuk@deployed.pl>
2023-10-28 21:39:02 +02:00

121 lines
4.4 KiB
Python
Raw Blame History

This file contains invisible Unicode characters

This file contains invisible Unicode characters that are indistinguishable to humans but may be processed differently by a computer. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

"""Test the client module for the source graph."""
from typing import Any
import pytest
from dirty_equals import HasLen, IsDatetime, IsInstance, IsPositiveInt
from pydantic import Json, TypeAdapter
from app.source_graph.models import SourceGraphRepoData
@pytest.fixture()
def source_graph_matched_repos_data() -> Json[Any]:
"""Return the sample data of the matched repositories."""
return [
{
"type": "repo",
"repositoryID": 55636527,
"repository": "github.com/tiangolo/sqlmodel",
"repoStars": 10277,
"repoLastFetched": "2023-07-31T18:47:22.875731Z",
"description": (
"SQL databases in Python, designed "
"for simplicity, compatibility, "
"and robustness."
),
"metadata": {
"fastapi": "null",
"json": "null",
"json-schema": "null",
"pydantic": "null",
"python": "null",
"sql": "null",
"sqlalchemy": "null",
},
},
{
"type": "repo",
"repositoryID": 59434622,
"repository": "github.com/reflex-dev/reflex",
"repoStars": 10061,
"repoLastFetched": "2023-07-31T08:58:42.692906Z",
"description": "(Previously Pynecone) 🕸 Web apps in pure Python 🐍",
},
{
"type": "repo",
"repositoryID": 42982149,
"repository": "github.com/PaddlePaddle/PaddleNLP",
"repoStars": 9804,
"repoLastFetched": "2023-07-31T16:48:08.839209Z",
"description": (
"👑 Easy-to-use and powerful NLP library with 🤗 "
"Awesome model zoo, supporting wide-range of NLP tasks "
"from research to industrial applications, including"
" 🗂Text Classification, 🔍 Neural Search, ❓ Question "
"Answering, Information Extraction, "
"📄 Document Intelligence, 💌 Sentiment Analysis etc."
),
"metadata": {
"bert": "null",
"embedding": "null",
"ernie": "null",
"information-extraction": "null",
"neural-search": "null",
"nlp": "null",
"paddlenlp": "null",
"pretrained-models": "null",
"question-answering": "null",
"search-engine": "null",
"semantic-analysis": "null",
"sentiment-analysis": "null",
"seq2seq": "null",
"transformer": "null",
"transformers": "null",
"uie": "null",
},
},
{
"type": "repo",
"repositoryID": 36246068,
"repository": "github.com/realpython/materials",
"repoStars": 4359,
"repoLastFetched": "2023-07-31T05:15:16.993896Z",
},
]
def test_source_graph_repo_data(source_graph_matched_repos_data: Json[Any]) -> None:
"""Test the SourceGraphRepoData deserialization."""
assert source_graph_matched_repos_data == HasLen(4)
_SourceGraphRepoDataListValidator = TypeAdapter(list[SourceGraphRepoData])
repos_parsed = _SourceGraphRepoDataListValidator.validate_python(
source_graph_matched_repos_data
)
assert repos_parsed == HasLen(4)
assert all(repo == IsInstance[SourceGraphRepoData] for repo in repos_parsed)
assert all(
repo.repo_id == repo_data["repositoryID"]
for repo, repo_data in zip(
repos_parsed, source_graph_matched_repos_data, strict=True
)
)
assert all(
repo.repo_handle == repo_data["repository"]
for repo, repo_data in zip(
repos_parsed, source_graph_matched_repos_data, strict=True
)
)
assert all(
repo.stars == IsPositiveInt and repo.stars == repo_data["repoStars"]
for repo, repo_data in zip(
repos_parsed, source_graph_matched_repos_data, strict=True
)
)
assert all(
str(repo.repo_url) == f"https://{repo_data['repository']}"
for repo, repo_data in zip(
repos_parsed, source_graph_matched_repos_data, strict=True
)
)
assert all(repo.last_fetched_at == IsDatetime for repo in repos_parsed)