bluestpanda commited on
Commit
b2db71c
Β·
1 Parent(s): a827599

Add FREE Hugging Face LLM option - no API key required!

Browse files
Files changed (2) hide show
  1. Dockerfile.full +1 -1
  2. app.py +66 -7
Dockerfile.full CHANGED
@@ -41,5 +41,5 @@ RUN echo '[supervisord]' > /etc/supervisor/conf.d/supervisord.conf && \
41
  EXPOSE 7860
42
 
43
  # Start services with supervisor
44
- CMD ["/usr/bin/supervisord", "-c", "/etc/supervisor/conf.d/supervisord.conf"]
45
 
 
41
  EXPOSE 7860
42
 
43
  # Start services with supervisor
44
+ CMD ["sh", "-c", "ollama serve & sleep 5 && streamlit run /app/app.py --server.address 0.0.0.0 --server.port 7860 --server.headless true --server.enableCORS false --server.enableXsrfProtection false"]
45
 
app.py CHANGED
@@ -282,6 +282,8 @@ Output ONLY valid JSON:
282
  return self._call_openai(prompt)
283
  elif self.llm_provider == "anthropic":
284
  return self._call_anthropic(prompt)
 
 
285
  else:
286
  raise ValueError(f"Unknown LLM provider: {self.llm_provider}")
287
 
@@ -356,6 +358,53 @@ Output ONLY valid JSON:
356
  except Exception as e:
357
  raise Exception(f"Failed to call Anthropic API - {e}")
358
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
359
  def parse_llm_output(self, output: str) -> Dict[str, Any]:
360
  """Parse and validate the LLM JSON output."""
361
  try:
@@ -388,7 +437,7 @@ def main():
388
  st.title("πŸ“Š JSON Field Analyzer")
389
 
390
  if IS_HUGGINGFACE:
391
- st.info("πŸ†“ Running on Hugging Face - Please use cloud LLM providers (OpenAI or Anthropic)")
392
 
393
  st.markdown("**Upload a JSON file and analyze important fields using LLM**")
394
 
@@ -401,17 +450,17 @@ def main():
401
  st.info("🌐 Running online - Cloud LLM required")
402
 
403
  # LLM Provider Selection
404
- # Default to Anthropic if on Streamlit Cloud or Hugging Face, Ollama on local
405
  if IS_ONLINE:
406
- default_index = 2 # Anthropic Claude
407
  else:
408
  default_index = 0 # Ollama
409
 
410
  llm_provider = st.selectbox(
411
  "πŸ€– LLM Provider",
412
- ["Ollama (Local)", "OpenAI (Cloud)", "Anthropic Claude (Cloud)"],
413
  index=default_index,
414
- help="Choose your LLM provider"
415
  )
416
 
417
  # Extract provider name and model
@@ -435,7 +484,7 @@ def main():
435
  if not api_key:
436
  st.warning("⚠️ Please enter your OpenAI API key")
437
  st.info("πŸ’‘ Get key: https://platform.openai.com/api-keys")
438
- else: # Anthropic
439
  provider_name = "anthropic"
440
  api_key = os.getenv("ANTHROPIC_API_KEY") or st.text_input(
441
  "Anthropic API Key",
@@ -445,6 +494,16 @@ def main():
445
  if not api_key:
446
  st.warning("⚠️ Please enter your Anthropic API key")
447
  st.info("πŸ’‘ Get key: https://console.anthropic.com")
 
 
 
 
 
 
 
 
 
 
448
 
449
  st.markdown("---")
450
 
@@ -515,7 +574,7 @@ ollama pull llama3.2:3b
515
  st.error("❌ Ollama is not available on this platform")
516
  st.info("πŸ’‘ Please select 'Anthropic Claude (Cloud)' or 'OpenAI (Cloud)' from the sidebar")
517
 
518
- # Validate API key for cloud providers
519
  elif provider_name in ["openai", "anthropic"] and not api_key:
520
  st.error("❌ Please enter an API key for the selected cloud provider")
521
  else:
 
282
  return self._call_openai(prompt)
283
  elif self.llm_provider == "anthropic":
284
  return self._call_anthropic(prompt)
285
+ elif self.llm_provider == "huggingface":
286
+ return self._call_huggingface(prompt)
287
  else:
288
  raise ValueError(f"Unknown LLM provider: {self.llm_provider}")
289
 
 
358
  except Exception as e:
359
  raise Exception(f"Failed to call Anthropic API - {e}")
360
 
361
+ def _call_huggingface(self, prompt: str) -> str:
362
+ """Call the Hugging Face Inference API (FREE) to generate a response."""
363
+ try:
364
+ # Use a good free model for text generation
365
+ model_name = self.api_key or "mistralai/Mistral-7B-Instruct-v0.3" # Default free model
366
+
367
+ headers = {
368
+ "Authorization": f"Bearer {self.api_key}" if self.api_key else None,
369
+ "Content-Type": "application/json"
370
+ }
371
+ # Remove None values
372
+ headers = {k: v for k, v in headers.items() if v is not None}
373
+
374
+ # Create a properly formatted prompt
375
+ full_prompt = f"""<s>[INST]You are a JSON data analysis assistant. Always respond with valid JSON only, no explanations.
376
+
377
+ {prompt}[/INST]"""
378
+
379
+ payload = {
380
+ "inputs": full_prompt,
381
+ "parameters": {
382
+ "max_new_tokens": 1000,
383
+ "temperature": 0.3,
384
+ "return_full_text": False
385
+ }
386
+ }
387
+
388
+ api_url = f"https://api-inference.huggingface.co/models/{model_name}"
389
+ response = requests.post(api_url, json=payload, headers=headers, timeout=60)
390
+
391
+ if response.status_code == 503:
392
+ raise Exception("Model is loading. Please wait a moment and try again.")
393
+
394
+ response.raise_for_status()
395
+ result = response.json()
396
+
397
+ # Handle different response formats
398
+ if isinstance(result, list) and len(result) > 0:
399
+ return result[0].get('generated_text', '')
400
+ elif isinstance(result, dict):
401
+ return result.get('generated_text', '')
402
+ else:
403
+ return str(result)
404
+
405
+ except Exception as e:
406
+ raise Exception(f"Failed to call Hugging Face API - {e}")
407
+
408
  def parse_llm_output(self, output: str) -> Dict[str, Any]:
409
  """Parse and validate the LLM JSON output."""
410
  try:
 
437
  st.title("πŸ“Š JSON Field Analyzer")
438
 
439
  if IS_HUGGINGFACE:
440
+ st.info("πŸ†“ Running on Hugging Face - FREE Hugging Face AI model available! No API key needed.")
441
 
442
  st.markdown("**Upload a JSON file and analyze important fields using LLM**")
443
 
 
450
  st.info("🌐 Running online - Cloud LLM required")
451
 
452
  # LLM Provider Selection
453
+ # Default to Hugging Face (free) if online, Ollama on local
454
  if IS_ONLINE:
455
+ default_index = 3 # Hugging Face (Free)
456
  else:
457
  default_index = 0 # Ollama
458
 
459
  llm_provider = st.selectbox(
460
  "πŸ€– LLM Provider",
461
+ ["Ollama (Local)", "OpenAI (Cloud)", "Anthropic Claude (Cloud)", "Hugging Face (Free 🌟)"],
462
  index=default_index,
463
+ help="Choose your LLM provider - Hugging Face is FREE and no API key needed!"
464
  )
465
 
466
  # Extract provider name and model
 
484
  if not api_key:
485
  st.warning("⚠️ Please enter your OpenAI API key")
486
  st.info("πŸ’‘ Get key: https://platform.openai.com/api-keys")
487
+ elif llm_provider == "Anthropic Claude (Cloud)":
488
  provider_name = "anthropic"
489
  api_key = os.getenv("ANTHROPIC_API_KEY") or st.text_input(
490
  "Anthropic API Key",
 
494
  if not api_key:
495
  st.warning("⚠️ Please enter your Anthropic API key")
496
  st.info("πŸ’‘ Get key: https://console.anthropic.com")
497
+ else: # Hugging Face (Free)
498
+ provider_name = "huggingface"
499
+ api_key = os.getenv("HUGGINGFACE_API_KEY") or st.text_input(
500
+ "Hugging Face API Key (Optional)",
501
+ type="password",
502
+ help="Optional: Enter your HF token for faster inference (or set HUGGINGFACE_API_KEY env var)"
503
+ )
504
+ if not api_key:
505
+ st.info("✨ Using free Hugging Face Inference API - no key needed!")
506
+ st.info("πŸ’‘ Optional: Add your token in Settings > Secrets for better performance")
507
 
508
  st.markdown("---")
509
 
 
574
  st.error("❌ Ollama is not available on this platform")
575
  st.info("πŸ’‘ Please select 'Anthropic Claude (Cloud)' or 'OpenAI (Cloud)' from the sidebar")
576
 
577
+ # Validate API key for cloud providers (except Hugging Face which is optional)
578
  elif provider_name in ["openai", "anthropic"] and not api_key:
579
  st.error("❌ Please enter an API key for the selected cloud provider")
580
  else: