Spaces:
Build error
Build error
| from transformers import AutoTokenizer, AutoModelForTableQuestionAnswering | |
| import pandas as pd | |
| from io import StringIO | |
| def initialize_tapas(): | |
| tokenizer = AutoTokenizer.from_pretrained("google/tapas-large-finetuned-wtq") | |
| model = AutoModelForTableQuestionAnswering.from_pretrained("google/tapas-large-finetuned-wtq") | |
| return tokenizer, model | |
| def ask_llm_chunk(tokenizer, model, chunk, questions): | |
| # ... [same as in your code] | |
| def summarize_map_reduce(tokenizer, model, data, questions): | |
| # ... [same as in your code] | |