Pratyush Maini commited on
Commit
619989b
·
1 Parent(s): 14ecd64
Files changed (1) hide show
  1. app.py +1 -3
app.py CHANGED
@@ -2,7 +2,6 @@ import os
2
  import gradio as gr
3
  import torch
4
  from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
5
- from keys import HF_TOKEN
6
 
7
  # Set cache directory for HF Spaces persistent storage
8
  os.environ.setdefault("HF_HOME", "/data/.huggingface")
@@ -17,8 +16,7 @@ model_list = {
17
 
18
  # Use token from environment variables (HF Spaces) or keys.py (local)
19
  HF_TOKEN_FROM_ENV = os.getenv("HUGGINGFACEHUB_API_TOKEN") or os.getenv("HF_TOKEN")
20
- if HF_TOKEN_FROM_ENV:
21
- HF_TOKEN = HF_TOKEN_FROM_ENV
22
 
23
  # Model cache for loaded models
24
  model_cache = {}
 
2
  import gradio as gr
3
  import torch
4
  from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
 
5
 
6
  # Set cache directory for HF Spaces persistent storage
7
  os.environ.setdefault("HF_HOME", "/data/.huggingface")
 
16
 
17
  # Use token from environment variables (HF Spaces) or keys.py (local)
18
  HF_TOKEN_FROM_ENV = os.getenv("HUGGINGFACEHUB_API_TOKEN") or os.getenv("HF_TOKEN")
19
+ HF_TOKEN = HF_TOKEN_FROM_ENV
 
20
 
21
  # Model cache for loaded models
22
  model_cache = {}