Spaces:
Running
Running
GitHub Actions commited on
Commit ·
767c4e6
1
Parent(s): fd63c95
Deploy f4d188a
Browse files
app/pipeline/nodes/log_eval.py
CHANGED
|
@@ -178,7 +178,7 @@ def make_log_eval_node(db_path: str) -> Callable[[PipelineState], dict]:
|
|
| 178 |
"source_hit_proxy": _source_hit_proxy(state),
|
| 179 |
}
|
| 180 |
|
| 181 |
-
def log_eval_node(state: PipelineState) -> dict:
|
| 182 |
try:
|
| 183 |
row_id = _write_to_sqlite(state)
|
| 184 |
try:
|
|
|
|
| 178 |
"source_hit_proxy": _source_hit_proxy(state),
|
| 179 |
}
|
| 180 |
|
| 181 |
+
async def log_eval_node(state: PipelineState) -> dict:
|
| 182 |
try:
|
| 183 |
row_id = _write_to_sqlite(state)
|
| 184 |
try:
|
tests/test_axiom_sink.py
CHANGED
|
@@ -156,7 +156,7 @@ async def test_source_hit_proxy_logged_to_sqlite(tmp_path) -> None:
|
|
| 156 |
"follow_ups": [],
|
| 157 |
}
|
| 158 |
|
| 159 |
-
result = node(state)
|
| 160 |
assert "interaction_id" in result
|
| 161 |
|
| 162 |
with sqlite3.connect(db_path) as conn:
|
|
|
|
| 156 |
"follow_ups": [],
|
| 157 |
}
|
| 158 |
|
| 159 |
+
result = await node(state)
|
| 160 |
assert "interaction_id" in result
|
| 161 |
|
| 162 |
with sqlite3.connect(db_path) as conn:
|
tests/test_log_eval_privacy.py
CHANGED
|
@@ -7,7 +7,8 @@ import pytest
|
|
| 7 |
from app.pipeline.nodes.log_eval import _PENDING_TASKS, make_log_eval_node
|
| 8 |
|
| 9 |
|
| 10 |
-
|
|
|
|
| 11 |
db_path = str(tmp_path / "interactions.db")
|
| 12 |
node = make_log_eval_node(db_path)
|
| 13 |
|
|
@@ -33,7 +34,7 @@ def test_log_eval_stores_chunk_metadata_without_text(tmp_path) -> None:
|
|
| 33 |
"is_enumeration_query": False,
|
| 34 |
}
|
| 35 |
|
| 36 |
-
node(state)
|
| 37 |
|
| 38 |
with sqlite3.connect(db_path) as conn:
|
| 39 |
row = conn.execute("SELECT reranked_chunks_json FROM interactions LIMIT 1").fetchone()
|
|
@@ -57,7 +58,7 @@ async def test_log_eval_sends_sanitized_axiom_payload(monkeypatch, tmp_path) ->
|
|
| 57 |
|
| 58 |
monkeypatch.setattr("app.pipeline.nodes.log_eval.ship_to_axiom", _fake_ship_to_axiom)
|
| 59 |
|
| 60 |
-
node(
|
| 61 |
{
|
| 62 |
"session_id": "s1",
|
| 63 |
"query": "What work experience does Darshan have?",
|
|
|
|
| 7 |
from app.pipeline.nodes.log_eval import _PENDING_TASKS, make_log_eval_node
|
| 8 |
|
| 9 |
|
| 10 |
+
@pytest.mark.asyncio
|
| 11 |
+
async def test_log_eval_stores_chunk_metadata_without_text(tmp_path) -> None:
|
| 12 |
db_path = str(tmp_path / "interactions.db")
|
| 13 |
node = make_log_eval_node(db_path)
|
| 14 |
|
|
|
|
| 34 |
"is_enumeration_query": False,
|
| 35 |
}
|
| 36 |
|
| 37 |
+
await node(state)
|
| 38 |
|
| 39 |
with sqlite3.connect(db_path) as conn:
|
| 40 |
row = conn.execute("SELECT reranked_chunks_json FROM interactions LIMIT 1").fetchone()
|
|
|
|
| 58 |
|
| 59 |
monkeypatch.setattr("app.pipeline.nodes.log_eval.ship_to_axiom", _fake_ship_to_axiom)
|
| 60 |
|
| 61 |
+
await node(
|
| 62 |
{
|
| 63 |
"session_id": "s1",
|
| 64 |
"query": "What work experience does Darshan have?",
|