Spaces:
Sleeping
Sleeping
pantadeusz
commited on
Commit
·
60a8435
1
Parent(s):
63fbc67
some tweaks
Browse files
app.py
CHANGED
|
@@ -2,7 +2,7 @@ import os
|
|
| 2 |
from langchain_huggingface import HuggingFaceEndpoint
|
| 3 |
import streamlit as st
|
| 4 |
model_id="mistralai/Mistral-7B-Instruct-v0.3"
|
| 5 |
-
def get_llm_hf_inference(model_id=model_id, max_new_tokens=
|
| 6 |
"""
|
| 7 |
Returns a language model for HuggingFace inference.
|
| 8 |
|
|
@@ -40,7 +40,7 @@ if 'user_text' not in st.session_state:
|
|
| 40 |
|
| 41 |
# Initialize session state for model parameters
|
| 42 |
if "max_response_length" not in st.session_state:
|
| 43 |
-
st.session_state.max_response_length =
|
| 44 |
|
| 45 |
if "system_message" not in st.session_state:
|
| 46 |
st.session_state.system_message = "rude AI conversing with a human user"
|
|
|
|
| 2 |
from langchain_huggingface import HuggingFaceEndpoint
|
| 3 |
import streamlit as st
|
| 4 |
model_id="mistralai/Mistral-7B-Instruct-v0.3"
|
| 5 |
+
def get_llm_hf_inference(model_id=model_id, max_new_tokens=4096, temperature=0.1):
|
| 6 |
"""
|
| 7 |
Returns a language model for HuggingFace inference.
|
| 8 |
|
|
|
|
| 40 |
|
| 41 |
# Initialize session state for model parameters
|
| 42 |
if "max_response_length" not in st.session_state:
|
| 43 |
+
st.session_state.max_response_length = 4096
|
| 44 |
|
| 45 |
if "system_message" not in st.session_state:
|
| 46 |
st.session_state.system_message = "rude AI conversing with a human user"
|