Spaces:
Runtime error
Runtime error
Charles Azam
commited on
Commit
·
4fbfc2b
1
Parent(s):
892c58b
feat: skip expensive tests in pipeline
Browse files- .github/workflows/pipeline.yml +2 -14
- pytest.ini +5 -0
- tests/deepsearch/test_main_agent.py +1 -1
- tests/deepsearch/test_pdf_agent.py +1 -2
- tests/deepsearch/test_web_agent.py +1 -1
- tests/webcrawler/test_async_crawl.py +1 -1
- tests/webcrawler/test_crawl_database.py +4 -2
- tests/webcrawler/test_draw_agent.py +1 -0
.github/workflows/pipeline.yml
CHANGED
|
@@ -15,25 +15,13 @@ jobs:
|
|
| 15 |
- name: Install uv
|
| 16 |
uses: astral-sh/setup-uv@v5
|
| 17 |
|
| 18 |
-
- uses: actions/setup-node@v4
|
| 19 |
-
with:
|
| 20 |
-
node-version: lts/*
|
| 21 |
-
|
| 22 |
-
|
| 23 |
-
- name: Install Playwright Browsers
|
| 24 |
-
run: npx playwright install --with-deps
|
| 25 |
-
|
| 26 |
- name: Install the project
|
| 27 |
run: uv sync --locked --all-extras --dev
|
| 28 |
|
| 29 |
-
- name: Run formatter check
|
| 30 |
-
run: uv run black --check src tests
|
| 31 |
-
|
| 32 |
- name: Run tests
|
| 33 |
-
run: uv run pytest tests
|
| 34 |
env:
|
| 35 |
MISTRAL_API_KEY: ${{ secrets.MISTRAL_API_KEY }}
|
| 36 |
LINKUP_API_KEY: ${{ secrets.LINKUP_API_KEY }}
|
| 37 |
TAVILY_API_KEY: ${{ secrets.TAVILY_API_KEY }}
|
| 38 |
-
DEEPSEEK_API_KEY: ${{ secrets.DEEPSEEK_API_KEY }}
|
| 39 |
-
|
|
|
|
| 15 |
- name: Install uv
|
| 16 |
uses: astral-sh/setup-uv@v5
|
| 17 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 18 |
- name: Install the project
|
| 19 |
run: uv sync --locked --all-extras --dev
|
| 20 |
|
|
|
|
|
|
|
|
|
|
| 21 |
- name: Run tests
|
| 22 |
+
run: uv run pytest -m "not (playwright or expensive)" tests
|
| 23 |
env:
|
| 24 |
MISTRAL_API_KEY: ${{ secrets.MISTRAL_API_KEY }}
|
| 25 |
LINKUP_API_KEY: ${{ secrets.LINKUP_API_KEY }}
|
| 26 |
TAVILY_API_KEY: ${{ secrets.TAVILY_API_KEY }}
|
| 27 |
+
DEEPSEEK_API_KEY: ${{ secrets.DEEPSEEK_API_KEY }}
|
|
|
pytest.ini
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[pytest]
|
| 2 |
+
markers =
|
| 3 |
+
playwright: marks tests that need playwright (deselect with '-m "not playwright"')
|
| 4 |
+
expensive: marks tests that are expensive to run (deselect with '-m "not expensive"')
|
| 5 |
+
asyncio: marks tests that are asyncio (deselect with '-m "not asyncio"')
|
tests/deepsearch/test_main_agent.py
CHANGED
|
@@ -5,7 +5,7 @@ import pytest
|
|
| 5 |
from deepengineer.deepsearch.main_agent import main_search
|
| 6 |
|
| 7 |
|
| 8 |
-
@pytest.mark.
|
| 9 |
def test_main_agent():
|
| 10 |
log_queue = queue.Queue()
|
| 11 |
while not log_queue.empty():
|
|
|
|
| 5 |
from deepengineer.deepsearch.main_agent import main_search
|
| 6 |
|
| 7 |
|
| 8 |
+
@pytest.mark.expensive
|
| 9 |
def test_main_agent():
|
| 10 |
log_queue = queue.Queue()
|
| 11 |
while not log_queue.empty():
|
tests/deepsearch/test_pdf_agent.py
CHANGED
|
@@ -14,7 +14,6 @@ def load_mock_ocr_response() -> OCRResponse:
|
|
| 14 |
with open(DATA_DIR / "report_thermal_neutron.json") as f:
|
| 15 |
return OCRResponse.model_validate_json(f.read())
|
| 16 |
|
| 17 |
-
|
| 18 |
def test_pdf_agent():
|
| 19 |
ocr_response = load_mock_ocr_response()
|
| 20 |
pdf_agent = create_agent(ocr_response)
|
|
@@ -28,7 +27,7 @@ def test_pdf_agent():
|
|
| 28 |
GetPagesContentTool(ocr_response).forward([1, 2, 3])
|
| 29 |
FindInMarkdownTool(ocr_response).forward(["thermal neutron", "neutron"])
|
| 30 |
|
| 31 |
-
|
| 32 |
def test_run_pdf_agent():
|
| 33 |
ocr_response = load_mock_ocr_response()
|
| 34 |
pdf_agent = create_agent(ocr_response)
|
|
|
|
| 14 |
with open(DATA_DIR / "report_thermal_neutron.json") as f:
|
| 15 |
return OCRResponse.model_validate_json(f.read())
|
| 16 |
|
|
|
|
| 17 |
def test_pdf_agent():
|
| 18 |
ocr_response = load_mock_ocr_response()
|
| 19 |
pdf_agent = create_agent(ocr_response)
|
|
|
|
| 27 |
GetPagesContentTool(ocr_response).forward([1, 2, 3])
|
| 28 |
FindInMarkdownTool(ocr_response).forward(["thermal neutron", "neutron"])
|
| 29 |
|
| 30 |
+
@pytest.mark.expensive
|
| 31 |
def test_run_pdf_agent():
|
| 32 |
ocr_response = load_mock_ocr_response()
|
| 33 |
pdf_agent = create_agent(ocr_response)
|
tests/deepsearch/test_web_agent.py
CHANGED
|
@@ -6,7 +6,7 @@ def test_create_web_search_agent():
|
|
| 6 |
create_web_search_agent()
|
| 7 |
|
| 8 |
|
| 9 |
-
@pytest.mark.
|
| 10 |
def test_run_web_search_agent():
|
| 11 |
agent = create_web_search_agent()
|
| 12 |
assert (
|
|
|
|
| 6 |
create_web_search_agent()
|
| 7 |
|
| 8 |
|
| 9 |
+
@pytest.mark.expensive
|
| 10 |
def test_run_web_search_agent():
|
| 11 |
agent = create_web_search_agent()
|
| 12 |
assert (
|
tests/webcrawler/test_async_crawl.py
CHANGED
|
@@ -8,13 +8,13 @@ from deepengineer.webcrawler.async_crawl import (
|
|
| 8 |
from deepengineer.webcrawler.testing import ARXIV_URL, URL_PDF, URL_WIKIPEDIA
|
| 9 |
|
| 10 |
|
|
|
|
| 11 |
@pytest.mark.asyncio
|
| 12 |
async def test_crawl4ai_extract_markdown_of_url_async():
|
| 13 |
markdown = await crawl4ai_extract_markdown_of_url_async(URL_WIKIPEDIA)
|
| 14 |
assert isinstance(markdown, str)
|
| 15 |
assert "Graphite-moderated reactor" in markdown
|
| 16 |
|
| 17 |
-
|
| 18 |
@pytest.mark.asyncio
|
| 19 |
async def test_download_pdf_async():
|
| 20 |
output_path = DATA_DIR / "temp.pdf"
|
|
|
|
| 8 |
from deepengineer.webcrawler.testing import ARXIV_URL, URL_PDF, URL_WIKIPEDIA
|
| 9 |
|
| 10 |
|
| 11 |
+
@pytest.mark.playwright
|
| 12 |
@pytest.mark.asyncio
|
| 13 |
async def test_crawl4ai_extract_markdown_of_url_async():
|
| 14 |
markdown = await crawl4ai_extract_markdown_of_url_async(URL_WIKIPEDIA)
|
| 15 |
assert isinstance(markdown, str)
|
| 16 |
assert "Graphite-moderated reactor" in markdown
|
| 17 |
|
|
|
|
| 18 |
@pytest.mark.asyncio
|
| 19 |
async def test_download_pdf_async():
|
| 20 |
output_path = DATA_DIR / "temp.pdf"
|
tests/webcrawler/test_crawl_database.py
CHANGED
|
@@ -1,6 +1,7 @@
|
|
| 1 |
from deepengineer.webcrawler.crawl_database import DataBase
|
|
|
|
| 2 |
|
| 3 |
-
|
| 4 |
def test_crawl_database_arxiv_pdf():
|
| 5 |
db = DataBase()
|
| 6 |
db.crawl_url("https://arxiv.org/pdf/2105.00643")
|
|
@@ -12,7 +13,7 @@ def test_crawl_database_arxiv_pdf():
|
|
| 12 |
)
|
| 13 |
assert len(db.get_markdown_of_url("https://arxiv.org/pdf/2105.00643").pages) == 20
|
| 14 |
|
| 15 |
-
|
| 16 |
def test_crawl_database_arxiv_link():
|
| 17 |
db = DataBase()
|
| 18 |
db.crawl_url("https://arxiv.org/abs/2105.00643")
|
|
@@ -25,6 +26,7 @@ def test_crawl_database_arxiv_link():
|
|
| 25 |
assert len(db.get_markdown_of_url("https://arxiv.org/abs/2105.00643").pages) == 20
|
| 26 |
|
| 27 |
|
|
|
|
| 28 |
def test_crawl_database_wikipedia_url():
|
| 29 |
db = DataBase()
|
| 30 |
db.crawl_url("https://en.wikipedia.org/wiki/Deep_learning")
|
|
|
|
| 1 |
from deepengineer.webcrawler.crawl_database import DataBase
|
| 2 |
+
import pytest
|
| 3 |
|
| 4 |
+
@pytest.mark.expensive
|
| 5 |
def test_crawl_database_arxiv_pdf():
|
| 6 |
db = DataBase()
|
| 7 |
db.crawl_url("https://arxiv.org/pdf/2105.00643")
|
|
|
|
| 13 |
)
|
| 14 |
assert len(db.get_markdown_of_url("https://arxiv.org/pdf/2105.00643").pages) == 20
|
| 15 |
|
| 16 |
+
@pytest.mark.expensive
|
| 17 |
def test_crawl_database_arxiv_link():
|
| 18 |
db = DataBase()
|
| 19 |
db.crawl_url("https://arxiv.org/abs/2105.00643")
|
|
|
|
| 26 |
assert len(db.get_markdown_of_url("https://arxiv.org/abs/2105.00643").pages) == 20
|
| 27 |
|
| 28 |
|
| 29 |
+
@pytest.mark.expensive
|
| 30 |
def test_crawl_database_wikipedia_url():
|
| 31 |
db = DataBase()
|
| 32 |
db.crawl_url("https://en.wikipedia.org/wiki/Deep_learning")
|
tests/webcrawler/test_draw_agent.py
CHANGED
|
@@ -20,6 +20,7 @@ def test_draw_image_agent():
|
|
| 20 |
assert output_path.exists()
|
| 21 |
|
| 22 |
|
|
|
|
| 23 |
def test_save_matplotlib_fig_tool():
|
| 24 |
model = LiteLLMModel(model_id="mistral/mistral-medium-latest")
|
| 25 |
agent = CodeAgent(
|
|
|
|
| 20 |
assert output_path.exists()
|
| 21 |
|
| 22 |
|
| 23 |
+
@pytest.mark.expensive
|
| 24 |
def test_save_matplotlib_fig_tool():
|
| 25 |
model = LiteLLMModel(model_id="mistral/mistral-medium-latest")
|
| 26 |
agent = CodeAgent(
|