Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -34,14 +34,14 @@ def main():
|
|
| 34 |
)
|
| 35 |
|
| 36 |
st.sidebar.header("Input Parameters")
|
| 37 |
-
role = st.sidebar.text_input("Who is this intednded for ?", "
|
| 38 |
-
topic = st.sidebar.text_input("On what Topic should the blog be on ?", "
|
| 39 |
word_count = st.sidebar.slider("Number of Words", min_value=50, max_value=1000, value=200, step=50)
|
| 40 |
|
| 41 |
if st.sidebar.button("Generate Blog"):
|
| 42 |
model_id = "mistralai/Mistral-7B-Instruct-v0.1"
|
| 43 |
-
tokenizer = AutoTokenizer.from_pretrained(model_id)
|
| 44 |
-
model = AutoModelForCausalLM.from_pretrained(model_id)
|
| 45 |
pipe = pipeline("text-generation", model=model, tokenizer=tokenizer,max_new_tokens=1000)
|
| 46 |
hf = HuggingFacePipeline(pipeline=pipe)
|
| 47 |
chain = LLMChain(llm=hf,prompt=prompt,verbose=True)
|
|
|
|
| 34 |
)
|
| 35 |
|
| 36 |
st.sidebar.header("Input Parameters")
|
| 37 |
+
role = st.sidebar.text_input("Who is this intednded for ?", "Data Scientist")
|
| 38 |
+
topic = st.sidebar.text_input("On what Topic should the blog be on ?", "Machine Learning")
|
| 39 |
word_count = st.sidebar.slider("Number of Words", min_value=50, max_value=1000, value=200, step=50)
|
| 40 |
|
| 41 |
if st.sidebar.button("Generate Blog"):
|
| 42 |
model_id = "mistralai/Mistral-7B-Instruct-v0.1"
|
| 43 |
+
tokenizer = AutoTokenizer.from_pretrained(model_id,auth_token =HF_TOKEN )
|
| 44 |
+
model = AutoModelForCausalLM.from_pretrained(model_id,auth_token =HF_TOKEN )
|
| 45 |
pipe = pipeline("text-generation", model=model, tokenizer=tokenizer,max_new_tokens=1000)
|
| 46 |
hf = HuggingFacePipeline(pipeline=pipe)
|
| 47 |
chain = LLMChain(llm=hf,prompt=prompt,verbose=True)
|