degbu embeddings
Browse files
app.py
CHANGED
|
@@ -82,11 +82,12 @@ with st.sidebar:
|
|
| 82 |
# Create LLM
|
| 83 |
if llm_token is not None:
|
| 84 |
if provider == 'openai':
|
| 85 |
-
os.environ[
|
| 86 |
llm = OpenAI(
|
| 87 |
model=llm_name,
|
| 88 |
temperature=temperature,
|
| 89 |
-
max_tokens=max_tokens
|
|
|
|
| 90 |
)
|
| 91 |
# Global tokenization needs to be consistent with LLM
|
| 92 |
# https://docs.llamaindex.ai/en/stable/module_guides/models/llms/
|
|
|
|
| 82 |
# Create LLM
|
| 83 |
if llm_token is not None:
|
| 84 |
if provider == 'openai':
|
| 85 |
+
os.environ["OPENAI_API_KEY"] = str(llm_token)
|
| 86 |
llm = OpenAI(
|
| 87 |
model=llm_name,
|
| 88 |
temperature=temperature,
|
| 89 |
+
max_tokens=max_tokens,
|
| 90 |
+
api_key=os.environ.get("OPENAI_API_KEY")
|
| 91 |
)
|
| 92 |
# Global tokenization needs to be consistent with LLM
|
| 93 |
# https://docs.llamaindex.ai/en/stable/module_guides/models/llms/
|