peterpeter8585 commited on
Commit
b88889e
·
verified ·
1 Parent(s): 0b876fd

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +9 -1
app.py CHANGED
@@ -1,4 +1,9 @@
1
  from tqdm import tqdm
 
 
 
 
 
2
  from subprocess import Popen, PIPE as P
3
  #b=Popen("wget 1https://huggingface.co/peterpeter8585/deepseek-llm-7b-chat-Q8_0-GGUF/resolve/main/deepseek-llm-7b-chat-q8_0.gguf -O ./model.gguf",shell=True,stderr=P,stdout=P)
4
  print(b.stdout.read()+b.stderr.read())
@@ -74,13 +79,16 @@ import os
74
  #t=T.from_pretrained("peterpeter8585/syai4.3")
75
  #pipe=pipeline(model=m,tokenizer=t,task="text-generation")
76
  import multiprocessing
 
77
 
 
 
78
  from langchain_huggingface.llms import HuggingFacePipeline
79
 
80
  llm = HuggingFacePipeline.from_model_id(
81
  model_id="peterpeter8585/zephyr",
82
  task="text-generation"
83
- )
84
  from langchain.retrievers import WikipediaRetriever as Wiki
85
  import gradio as gr
86
  chatbot = gr.Chatbot(
 
1
  from tqdm import tqdm
2
+ import zipfile
3
+ file_name = ".zip"
4
+ output_dir = "./"
5
+ zip_file = zipfile.ZipFile(file_name)
6
+ zip_file.extractall(path=output_dir)
7
  from subprocess import Popen, PIPE as P
8
  #b=Popen("wget 1https://huggingface.co/peterpeter8585/deepseek-llm-7b-chat-Q8_0-GGUF/resolve/main/deepseek-llm-7b-chat-q8_0.gguf -O ./model.gguf",shell=True,stderr=P,stdout=P)
9
  print(b.stdout.read()+b.stderr.read())
 
79
  #t=T.from_pretrained("peterpeter8585/syai4.3")
80
  #pipe=pipeline(model=m,tokenizer=t,task="text-generation")
81
  import multiprocessing
82
+ from langchain.llms import GPT4All
83
 
84
+ llm = GPT4All(model=("./"))
85
+ '''
86
  from langchain_huggingface.llms import HuggingFacePipeline
87
 
88
  llm = HuggingFacePipeline.from_model_id(
89
  model_id="peterpeter8585/zephyr",
90
  task="text-generation"
91
+ )'''
92
  from langchain.retrievers import WikipediaRetriever as Wiki
93
  import gradio as gr
94
  chatbot = gr.Chatbot(