Vladyslav Fedoriuk 2fdd348a15
Web App (#25)
* Set up the web project dependencies

- Add linters, pre-commit, and GitHub Actions
- Add a Makefile
- Add a pyproject.toml

* Fix pyupgrade job

- Remove continue_on_error everywhere

* Remove old code

* Rename a GithubActions job

* Change README

* Adjust pre-commit and GitHub actions

* Add tables and set up alembic

* Set up tests

* Extend tests

* Add coverage config

* Adjust the GithubActions workflow

* Fix GithubActions workflow

* Try fixing pyproject-fmt config

* Fix formatting of pyproject.toml

* Fix formatting of pyproject.toml

* Add coverage report

* Test listing the repositories

* Add a working prototype of SourceGraph client

* Add parsing of the SourceGraph SSE data

* Fix tests

* Ged rid of packages replaced by ruff

* Fix waits in the SourceGraph client

* Refactor the models and add a mapper

- A new mapper allows to create database repositories from the SourceGraph data

* Add mypy

* Try fixing mypy action

* Remove redundant configs

* Exclude tests from type checking

* Fix mypy pre-commit and GitHub action

* Ignore factories

* Make upserting possible for source graph data

* Add logic for parsing the dependencies and populating the database

* Add a database and a cron GitHub Action job

* Try manually trigger a workflow

* Bring back the old config

* Add ReadTimeout for errors to retry for in SourceGraph client

* Add typer

* Adjust the docstrings

* Update the database

* Refactor and optimize scraping and dependencies parsing

* Make scraping run on push for now

* Add a unique constraint for the repo url and source graph repo id

* Change the index columns in on_conflict statement for repo creation

* Optimize dependencies parsing

- Do not parse dependencies for a repo  when revision did not change

* Scraped repositories from Source Graph

* Refactor scraping

* Set up frontend

* Scraped repositories from Source Graph

* Add TODOs

* Skip scraping when testing

* Fix a test with updating the repos

* Scraped repositories from Source Graph

* Add some more TODOs

* Scraped repositories from Source Graph

* Add some more TODO comments

* Add chadcn/ui

* Scraped repositories from Source Graph

* Create index.json

* Scraped repositories from Source Graph

* Add a draft of data table and display all the repos

* Scraped repositories from Source Graph

* Implement stars badges and description with overflow

* Format the links to Github repos

* Fix link clicking

* Scraped repositories from Source Graph

* Add simple pagination and stars column sorting

* Scraped repositories from Source Graph

* Implement basic searching

* Scraped repositories from Source Graph

* Implement a multiselect for dependencies

* Scraped repositories from Source Graph

* Implement actual filtering by dependencies

* Scraped repositories from Source Graph

* Add a workflow to deploy nextjs on github pages

* Try fixing the deployment job

* Enable static exports for app router

* Fix uploading arifacts for nextjs job

* Set base path to properly load JS and CSS

* Fix the base path

* Scraped repositories from Source Graph

* Add header

* Remove language version

* Scraped repositories from Source Graph

* Add some more TODOs

* Scraped repositories from Source Graph

* Adjust the pre-commit config

* Fix pipelines

* Scraped repositories from Source Graph

* Add a footer

* Create the indexes

* Scraped repositories from Source Graph

* Add more TODOs

* Introduce minor footer adjustments

* Adjust the scraping actions

* Scraped repositories from Source Graph, parsed the dependencies, and generated the indexes

* Implement query params state

* Scraped repositories from Source Graph, parsed the dependencies, and generated the indexes

* Do not commit query state on unmount

* Scraped repositories from Source Graph, parsed the dependencies, and generated the indexes

* Hopefully fix query states and multiselect input

* Scraped repositories from Source Graph, parsed the dependencies, and generated the indexes

* Extend the Makefile

* Resolve most of TODOs

* Resolve the conflicts with anyio version, bring back httpx

* Adjust the Makefile and README.md

* Fix a typo in README.md

* Adjust readme

* Fix the Makefile

* Fix some stuff

* Make some adjustments

* Possibly fix failing scraping jobs

* Load the repo project URL from env

---------

Co-authored-by: vladfedoriuk <vladfedoriuk@users.noreply.github.com>
Co-authored-by: Vladyslav Fedoriuk <vladyslav.fedoriuk@deployed.pl>
2023-10-28 21:39:02 +02:00

83 lines
2.4 KiB
TypeScript

"use client";
import { Repo, Dependency } from "@/lib/schemas";
import { search } from "@orama/orama";
import { SearchForm } from "./search-form";
import { columns } from "./columns";
import { DataTable } from "./data-table";
import { useReposOrama } from "@/lib/search";
import { useState } from "react";
import { useQuerySearchFormData } from "@/lib/hooks";
import React from "react";
export function ReposTable({
repos,
dependencies,
}: {
repos: Repo[];
dependencies: Dependency[];
}) {
const reposOrama = useReposOrama();
const [searchedRepos, setSearchedRepos] = useState<Repo[]>(repos);
const { searchQueryFromQueryParam, dependenciesQueryFromQueryParam } =
useQuerySearchFormData(dependencies);
const onSearchSubmit = React.useCallback(
async ({
search: description,
dependencies,
}: {
search: string;
dependencies: Dependency[];
}) => {
if (!reposOrama.isIndexed || !reposOrama.orama) {
throw new Error("Repos Orama is not initialized");
}
const results = await search<Repo>(reposOrama.orama, {
term: description,
properties: ["description"],
limit: repos.length,
});
const searchedRepos = results.hits.map((hit) => hit.document as Repo);
// Workaround because Orama doesn't support filtering by properties of objects in arrays
const filteredRepos = searchedRepos.filter((repo) => {
return dependencies.every((dependency) => {
return repo.dependencies.some(
(repoDependency) => repoDependency.id === dependency.id,
);
});
});
setSearchedRepos(filteredRepos);
},
[repos, reposOrama.isIndexed, reposOrama.orama],
);
const _ref = React.useCallback(
(node: HTMLDivElement | null) => {
if (node !== null) {
if (reposOrama.isIndexed && reposOrama.orama) {
onSearchSubmit({
search: searchQueryFromQueryParam(),
dependencies: dependenciesQueryFromQueryParam(),
});
}
}
},
[
dependenciesQueryFromQueryParam,
onSearchSubmit,
reposOrama.isIndexed,
reposOrama.orama,
searchQueryFromQueryParam,
],
);
return (
<>
<div className="container mb-4 max-w-xl" ref={_ref}>
<SearchForm onSubmit={onSearchSubmit} dependencies={dependencies} />
</div>
<DataTable columns={columns} data={searchedRepos} />
</>
);
}