JagmeetMinhas22 commited on
Commit
37d1c08
·
verified ·
1 Parent(s): 4c13239

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +16 -4
app.py CHANGED
@@ -1,5 +1,10 @@
1
  from transformers import pipeline
2
  import streamlit as st
 
 
 
 
 
3
 
4
  summarizer = pipeline("summarization", model="facebook/bart-large-cnn")
5
 
@@ -9,7 +14,14 @@ if textToParaphrase:
9
  summary = summarizer(textToParaphrase, max_length=130, min_length=30, do_sample=False)
10
  st.write(summary[0]['summary_text']) # Displaying the summarized text in Streamlit
11
 
12
- # Use a pipeline as a high-level helper
13
- messages = [{"role": "user", "content": "Who are you?"}]
14
- pipe = pipeline("text-generation", model="microsoft/Phi-3.5-mini-instruct", trust_remote_code=True)
15
- pipe(messages)
 
 
 
 
 
 
 
 
1
  from transformers import pipeline
2
  import streamlit as st
3
+ from huggingface_hub import InferenceClient
4
+
5
+
6
+ apiToken = os.getenv("my_API_Key")
7
+ client = InferenceClient(api_key=apiToken)
8
 
9
  summarizer = pipeline("summarization", model="facebook/bart-large-cnn")
10
 
 
14
  summary = summarizer(textToParaphrase, max_length=130, min_length=30, do_sample=False)
15
  st.write(summary[0]['summary_text']) # Displaying the summarized text in Streamlit
16
 
17
+ messages = [{ "role": "user", "content": "What is the capital of France?" }]
18
+
19
+ stream = client.chat.completions.create(
20
+ model="microsoft/Phi-3.5-mini-instruct",
21
+ messages=messages,
22
+ max_tokens=500,
23
+ stream=True
24
+ )
25
+
26
+ for chunk in stream:
27
+ print(chunk.choices[0].delta.content)