Spaces:
Runtime error
Runtime error
add in CPU switch
Browse files
app.py
CHANGED
|
@@ -10,6 +10,16 @@ from docquery.document import load_document
|
|
| 10 |
# Chatbot model
|
| 11 |
#model = pipeline("document-question-answering", model="impira/layoutlm-document-qa")
|
| 12 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 13 |
def run_pipeline(question, document):
|
| 14 |
pipeline = construct_pipeline("document-question-answering", "impira/layoutlm-document-qa")
|
| 15 |
return pipeline(question=question, **document.context, top_k=3)
|
|
|
|
| 10 |
# Chatbot model
|
| 11 |
#model = pipeline("document-question-answering", model="impira/layoutlm-document-qa")
|
| 12 |
|
| 13 |
+
def construct_pipeline(task, model):
|
| 14 |
+
global PIPELINES
|
| 15 |
+
if model in PIPELINES:
|
| 16 |
+
return PIPELINES[model]
|
| 17 |
+
|
| 18 |
+
device = "cuda" if torch.cuda.is_available() else "cpu"
|
| 19 |
+
ret = pipeline(task=task, model=CHECKPOINTS[model], device=device)
|
| 20 |
+
PIPELINES[model] = ret
|
| 21 |
+
return ret
|
| 22 |
+
|
| 23 |
def run_pipeline(question, document):
|
| 24 |
pipeline = construct_pipeline("document-question-answering", "impira/layoutlm-document-qa")
|
| 25 |
return pipeline(question=question, **document.context, top_k=3)
|