gursul commited on
Commit
e919158
·
verified ·
1 Parent(s): 99aa3c8

Upload app.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. app.py +9 -17
app.py CHANGED
@@ -239,16 +239,21 @@ def toggle_article_input(word_type):
239
  return gr.update(interactive=(word_type == "noun"))
240
 
241
  def get_words_table_en_de(search_query=""):
242
- eng_data = load_english() # Load fresh
243
  ger_data = load_german() # German→English (artikel lives here)
244
  filtered = {}
245
  for eng_word, meta in sorted(eng_data.items(), key=lambda kv: kv[0].lower()):
246
  translations = meta.get("translations", [])
247
  # check if search matches the word or any translation
248
  if search_query.lower() in eng_word.lower() or any(search_query.lower() in t.lower() for t in translations):
249
- arts = _articles_for_translations(translations, ger_data)
250
- artikel_text = ", ".join(arts) if arts else "" # ✅ Bug 1: no null
251
- translations_text = ", ".join(translations) # ✅ Bug 2: join translations
 
 
 
 
 
252
  filtered[eng_word] = [eng_word, artikel_text, translations_text, meta.get("type", "")]
253
  return list(filtered.values())
254
 
@@ -267,19 +272,6 @@ def get_words_table_de_en(search_query=""):
267
  def reset_filter():
268
  return "", get_words_table_en_de(), get_words_table_de_en()
269
 
270
- def _articles_for_translations(translations, ger_data):
271
- order = {"der": 0, "die": 1, "das": 2}
272
- seen = set()
273
- arts = []
274
- for g in translations:
275
- art = (ger_data.get(g, {}).get("artikel") or "").strip().lower()
276
- if art and art not in seen:
277
- seen.add(art)
278
- arts.append(art)
279
- # sort to show as der, die, das (when applicable)
280
- arts.sort(key=lambda a: order.get(a, 99))
281
- return arts
282
-
283
  def sync_from_hf():
284
  en_de_cache_path = hf_hub_download(repo_id=DATASET_REPO, filename="en_de.json", repo_type="dataset")
285
  de_en_cache_path = hf_hub_download(repo_id=DATASET_REPO, filename="de_en.json", repo_type="dataset")
 
239
  return gr.update(interactive=(word_type == "noun"))
240
 
241
  def get_words_table_en_de(search_query=""):
242
+ eng_data = load_english() # English→German (no artikel here)
243
  ger_data = load_german() # German→English (artikel lives here)
244
  filtered = {}
245
  for eng_word, meta in sorted(eng_data.items(), key=lambda kv: kv[0].lower()):
246
  translations = meta.get("translations", [])
247
  # check if search matches the word or any translation
248
  if search_query.lower() in eng_word.lower() or any(search_query.lower() in t.lower() for t in translations):
249
+ artikel_list = []
250
+ german_list = []
251
+ for g in translations:
252
+ art = (ger_data.get(g, {}).get("artikel") or "").strip()
253
+ artikel_list.append(art)
254
+ german_list.append(g)
255
+ artikel_text = ", ".join(artikel_list) # ✅ Bug 1: no null
256
+ translations_text = ", ".join(german_list) # ✅ Bug 2: join translations
257
  filtered[eng_word] = [eng_word, artikel_text, translations_text, meta.get("type", "")]
258
  return list(filtered.values())
259
 
 
272
  def reset_filter():
273
  return "", get_words_table_en_de(), get_words_table_de_en()
274
 
 
 
 
 
 
 
 
 
 
 
 
 
 
275
  def sync_from_hf():
276
  en_de_cache_path = hf_hub_download(repo_id=DATASET_REPO, filename="en_de.json", repo_type="dataset")
277
  de_en_cache_path = hf_hub_download(repo_id=DATASET_REPO, filename="de_en.json", repo_type="dataset")