sahilmayekar commited on
Commit
f67351d
·
verified ·
1 Parent(s): 1928fe7

Update src/app.py

Browse files
Files changed (1) hide show
  1. src/app.py +29 -14
src/app.py CHANGED
@@ -1,19 +1,34 @@
1
  import streamlit as st
2
- from ollama import Client
 
 
3
 
4
- st.set_page_config(page_title="Ollama Chat", layout="centered")
5
- st.title("🧠 Chat with Ollama via Python Client")
6
 
7
- client = Client(host="http://localhost:11434")
8
 
9
- prompt = st.text_area("Enter your prompt:", height=150)
10
- submit = st.button("Generate")
11
 
12
- if submit and prompt.strip():
13
- with st.spinner("Thinking..."):
14
- try:
15
- response = client.generate(model="llama3", prompt=prompt)
16
- st.markdown("### Response")
17
- st.write(response['response'])
18
- except Exception as e:
19
- st.error(f"Error: {e}")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import streamlit as st
2
+ from langchain_community.llms import Ollama
3
+ from langchain.chains import LLMChain
4
+ from langchain.prompts import PromptTemplate
5
 
6
+ st.set_page_config(page_title="🧪 LangChain + Ollama Notebook")
 
7
 
8
+ st.title("📓 LangChain + Ollama Jupyter-like App")
9
 
10
+ # Setup LangChain LLM with Ollama backend
11
+ llm = Ollama(model="llama3", base_url="http://localhost:11434")
12
 
13
+ # Notebook-style prompt block
14
+ code_cell = st.text_area("🧠 Enter your notebook-style cell (text or code):", height=200)
15
+ run_button = st.button("▶️ Run")
16
+
17
+ # Prompt template with notebook context
18
+ template = """
19
+ You are a helpful AI assistant embedded in a Jupyter-style notebook.
20
+ Interpret the input below and respond helpfully.
21
+
22
+ Input:
23
+ {cell_input}
24
+
25
+ Response:"""
26
+
27
+ prompt = PromptTemplate(template=template, input_variables=["cell_input"])
28
+ chain = LLMChain(llm=llm, prompt=prompt)
29
+
30
+ if run_button and code_cell.strip():
31
+ with st.spinner("Running cell..."):
32
+ result = chain.run(cell_input=code_cell)
33
+ st.markdown("### 📬 Response")
34
+ st.write(result)