brestok's picture
init
ab0a73d
"""
Chat API views module.
"""
import asyncio
from fastapi import Query
from app.api.common.db_requests import (
get_all_objs,
delete_obj,
get_obj_by_id,
)
from app.api.common.dto import Paging
from app.api.common.schemas import AllObjectsResponse, FilterRequest
from app.api.scraper import scraper_router
from app.api.scraper.db_requests import (
filter_jobs,
search_field_options,
get_statistics,
)
from app.api.scraper.schemas import SearchOptionRequest, StatisticsResponse
from app.api.scraper.dto import JobFilter
from app.api.scraper.models import JobModel
from app.core.wrappers import CbhResponseWrapper
from app.api.scraper.services import run_update
@scraper_router.get("/all")
async def get_all_jobs(
pageSize: int = Query( # pylint: disable=C0103
10, description="Number of objects to return per page"
),
pageIndex: int = Query( # pylint: disable=C0103
0, description="Page index to retrieve"
),
) -> CbhResponseWrapper[AllObjectsResponse[JobModel]]:
"""
Get a paginated list of all jobs for a user.
"""
jobs, total_count = await get_all_objs(JobModel, pageSize, pageIndex)
response = AllObjectsResponse(
paging=Paging(pageSize=pageSize, pageIndex=pageIndex, totalCount=total_count),
data=jobs,
)
return CbhResponseWrapper(data=response)
@scraper_router.post("/filter")
async def scrape(
search_request: FilterRequest[JobFilter],
) -> CbhResponseWrapper[AllObjectsResponse[JobModel]]:
"""
Scrape a specific URL.
"""
jobs, total_count = await filter_jobs(search_request)
return CbhResponseWrapper(
data=AllObjectsResponse(
paging=Paging(
pageSize=search_request.pageSize,
pageIndex=search_request.pageIndex,
totalCount=total_count,
),
data=jobs,
)
)
@scraper_router.get("/statistics")
async def get_update_statistics() -> CbhResponseWrapper[StatisticsResponse]:
"""
Get the update statistics.
"""
statistics = await get_statistics()
return CbhResponseWrapper(data=statistics)
@scraper_router.post("/option/{field}/search")
async def scrape_option_search(
field: str,
request: SearchOptionRequest,
) -> CbhResponseWrapper[AllObjectsResponse[str]]:
"""
Scrape a specific URL.
"""
objects = await search_field_options(field, request.value)
return CbhResponseWrapper(
data=AllObjectsResponse(
paging=Paging(
pageSize=5,
pageIndex=0,
totalCount=len(objects),
),
data=objects,
)
)
@scraper_router.get("/{jobId}")
async def get_job(
jobId: str, # pylint: disable=C0103
) -> CbhResponseWrapper[JobModel]:
"""
Get a specific job by ID.
"""
job = await get_obj_by_id(JobModel, jobId)
return CbhResponseWrapper(data=job)
@scraper_router.delete("/{jobId}")
async def delete_job(
jobId: str, # pylint: disable=C0103
) -> CbhResponseWrapper:
"""
Delete a specific job by ID.
"""
await delete_obj(JobModel, jobId)
return CbhResponseWrapper()
@scraper_router.post("/run")
async def run_scraper() -> CbhResponseWrapper:
"""
Run the scraper.
"""
asyncio.create_task(run_update())
return CbhResponseWrapper()