CaptMetal commited on
Commit
4ca2e6f
·
verified ·
1 Parent(s): 86c4fc8

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +20 -32
app.py CHANGED
@@ -1,38 +1,26 @@
1
- from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
2
- import gradio as gr
3
  import os
4
 
5
- # Load OpenHermes
6
- model_id = "BuddAi" if os.path.exists("BuddAi") else "CaptMetal/BuddAi"
7
- try:
8
- tokenizer = AutoTokenizer.from_pretrained(
9
- model_id,
10
- trust_remote_code=True # Important for custom tokenizers
11
- )
12
- model = AutoModelForCausalLM.from_pretrained(
13
- model_id,
14
- device_map="auto"
15
- )
16
- except Exception as e:
17
- raise ValueError(f"Failed to load model: {str(e)}. Please verify all files exist in {model_id}")
18
 
19
- # Set chat template (OpenHermes-specific)
20
- tokenizer.chat_template = """{% for message in messages %}
21
- {{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>'}}
22
- {% endfor %}"""
23
 
24
- chatbot = pipeline(
25
- "text-generation",
26
- model=model,
27
- tokenizer=tokenizer,
28
- temperature=0.7,
29
- max_new_tokens=200
30
- )
31
 
32
- def respond(message, history):
33
- messages = [{"role": "user", "content": message}]
34
- prompt = tokenizer.apply_chat_template(messages, tokenize=False)
35
- outputs = chatbot(prompt)
36
- return outputs[0]["generated_text"][len(prompt):].split("<|im_end|>")[0]
 
 
37
 
38
- gr.ChatInterface(respond).launch()
 
1
+ from transformers import AutoModelForCausalLM, AutoTokenizer
2
+ from huggingface_hub import HfApi, login
3
  import os
4
 
5
+ # 1. Login (add HF_TOKEN in Space Settings -> Secrets)
6
+ login(token=os.environ["HF_TOKEN"])
 
 
 
 
 
 
 
 
 
 
 
7
 
8
+ # 2. Load and save model files
9
+ model_id = "teknium/OpenHermes-2.5-Mistral-7B"
10
+ tokenizer = AutoTokenizer.from_pretrained(model_id)
11
+ model = AutoModelForCausalLM.from_pretrained(model_id)
12
 
13
+ # Save files temporarily
14
+ os.makedirs("BuddAi", exist_ok=True)
15
+ model.save_pretrained("BuddAi", safe_serialization=True)
16
+ tokenizer.save_pretrained("BuddAi")
 
 
 
17
 
18
+ # 3. Upload to your model repo
19
+ api = HfApi()
20
+ api.upload_folder(
21
+ folder_path="BuddAi",
22
+ repo_id="CaptMetal/BuddAi",
23
+ repo_type="model"
24
+ )
25
 
26
+ print("Upload complete! Visit https://huggingface.co/CaptMetal/BuddAi")