Spaces:
Sleeping
Sleeping
Commit ·
9937463
0
Parent(s):
first app
Browse files- .gitignore +1 -0
- README.md +6 -0
- app.py +25 -0
- langchain.ipynb +403 -0
- requirements.txt +5 -0
.gitignore
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
.env
|
README.md
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
title: helloworld
|
| 3 |
+
app_file: app.py
|
| 4 |
+
sdk: streamlit
|
| 5 |
+
sdk_version: 1.25.0
|
| 6 |
+
---
|
app.py
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from langchain_openai import OpenAI
|
| 2 |
+
from dotenv import load_dotenv
|
| 3 |
+
|
| 4 |
+
load_dotenv()
|
| 5 |
+
|
| 6 |
+
import streamlit as st
|
| 7 |
+
import os
|
| 8 |
+
|
| 9 |
+
def get_openai_response(question):
|
| 10 |
+
llm = OpenAI(openai_api_key=os.getenv("OPENAI_API_KEY"),
|
| 11 |
+
model_name="gpt-3.5-turbo-instruct", temperature=0.5)
|
| 12 |
+
response = llm.invoke(question)
|
| 13 |
+
return response
|
| 14 |
+
|
| 15 |
+
### Streamlit app
|
| 16 |
+
st.set_page_config(page_title="LangChain Demo", page_icon=":robot:")
|
| 17 |
+
st.header("LangChain Demo")
|
| 18 |
+
|
| 19 |
+
input = st.text_input("Input: ", key="input")
|
| 20 |
+
response = get_openai_response(input)
|
| 21 |
+
|
| 22 |
+
sumbit=st.button("Ask Question")
|
| 23 |
+
if sumbit:
|
| 24 |
+
st.subheader("The Response is")
|
| 25 |
+
st.write(response)
|
langchain.ipynb
ADDED
|
@@ -0,0 +1,403 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"cells": [
|
| 3 |
+
{
|
| 4 |
+
"cell_type": "code",
|
| 5 |
+
"execution_count": 1,
|
| 6 |
+
"metadata": {},
|
| 7 |
+
"outputs": [],
|
| 8 |
+
"source": [
|
| 9 |
+
"from langchain_openai import OpenAI\n",
|
| 10 |
+
"import os"
|
| 11 |
+
]
|
| 12 |
+
},
|
| 13 |
+
{
|
| 14 |
+
"cell_type": "code",
|
| 15 |
+
"execution_count": 2,
|
| 16 |
+
"metadata": {},
|
| 17 |
+
"outputs": [],
|
| 18 |
+
"source": [
|
| 19 |
+
"llm=OpenAI(openai_api_key=os.environ[\"OPENAI_API_KEY\"],temperature=0.6)"
|
| 20 |
+
]
|
| 21 |
+
},
|
| 22 |
+
{
|
| 23 |
+
"cell_type": "code",
|
| 24 |
+
"execution_count": 3,
|
| 25 |
+
"metadata": {},
|
| 26 |
+
"outputs": [
|
| 27 |
+
{
|
| 28 |
+
"name": "stdout",
|
| 29 |
+
"output_type": "stream",
|
| 30 |
+
"text": [
|
| 31 |
+
" from the sun\n",
|
| 32 |
+
"\n",
|
| 33 |
+
"The average distance of Mars from the sun is approximately 142 million miles (228 million kilometers). However, the distance between Mars and the sun can vary due to its elliptical orbit, ranging from 128 million miles (206 million kilometers) at its closest point (perihelion) to 154 million miles (249 million kilometers) at its farthest point (aphelion). \n"
|
| 34 |
+
]
|
| 35 |
+
}
|
| 36 |
+
],
|
| 37 |
+
"source": [
|
| 38 |
+
"text = \"What is the distance of Mars\"\n",
|
| 39 |
+
"print(llm.invoke(text))"
|
| 40 |
+
]
|
| 41 |
+
},
|
| 42 |
+
{
|
| 43 |
+
"cell_type": "code",
|
| 44 |
+
"execution_count": 5,
|
| 45 |
+
"metadata": {},
|
| 46 |
+
"outputs": [
|
| 47 |
+
{
|
| 48 |
+
"name": "stderr",
|
| 49 |
+
"output_type": "stream",
|
| 50 |
+
"text": [
|
| 51 |
+
"/Users/ng/workspace/beautifulcode/llm/freecode/venv/lib/python3.9/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n",
|
| 52 |
+
" from .autonotebook import tqdm as notebook_tqdm\n"
|
| 53 |
+
]
|
| 54 |
+
}
|
| 55 |
+
],
|
| 56 |
+
"source": [
|
| 57 |
+
"from langchain import HuggingFaceHub\n",
|
| 58 |
+
"llm_huggingface = HuggingFaceHub(repo_id=\"google/flan-t5-large\", model_kwargs={\"temperature\": 0, \"max_length\": 64})"
|
| 59 |
+
]
|
| 60 |
+
},
|
| 61 |
+
{
|
| 62 |
+
"cell_type": "code",
|
| 63 |
+
"execution_count": 7,
|
| 64 |
+
"metadata": {},
|
| 65 |
+
"outputs": [
|
| 66 |
+
{
|
| 67 |
+
"name": "stdout",
|
| 68 |
+
"output_type": "stream",
|
| 69 |
+
"text": [
|
| 70 |
+
"365.9 astronomical units\n"
|
| 71 |
+
]
|
| 72 |
+
}
|
| 73 |
+
],
|
| 74 |
+
"source": [
|
| 75 |
+
"output=llm_huggingface.invoke(\"Can you tell me distance of Moon from Earth\")\n",
|
| 76 |
+
"print(output)"
|
| 77 |
+
]
|
| 78 |
+
},
|
| 79 |
+
{
|
| 80 |
+
"cell_type": "markdown",
|
| 81 |
+
"metadata": {},
|
| 82 |
+
"source": [
|
| 83 |
+
"### Prompt Templates"
|
| 84 |
+
]
|
| 85 |
+
},
|
| 86 |
+
{
|
| 87 |
+
"cell_type": "code",
|
| 88 |
+
"execution_count": 9,
|
| 89 |
+
"metadata": {},
|
| 90 |
+
"outputs": [
|
| 91 |
+
{
|
| 92 |
+
"data": {
|
| 93 |
+
"text/plain": [
|
| 94 |
+
"'Q: What is the distance of Mars\\nA:'"
|
| 95 |
+
]
|
| 96 |
+
},
|
| 97 |
+
"execution_count": 9,
|
| 98 |
+
"metadata": {},
|
| 99 |
+
"output_type": "execute_result"
|
| 100 |
+
}
|
| 101 |
+
],
|
| 102 |
+
"source": [
|
| 103 |
+
"from langchain.prompts import PromptTemplate\n",
|
| 104 |
+
"\n",
|
| 105 |
+
"prompt_template = PromptTemplate(input_variables=[\"question\"],\n",
|
| 106 |
+
" template=\"Q: {question}\\nA:\")\n",
|
| 107 |
+
"\n",
|
| 108 |
+
"prompt_template.format(question=\"What is the distance of Mars\")"
|
| 109 |
+
]
|
| 110 |
+
},
|
| 111 |
+
{
|
| 112 |
+
"cell_type": "code",
|
| 113 |
+
"execution_count": 13,
|
| 114 |
+
"metadata": {},
|
| 115 |
+
"outputs": [
|
| 116 |
+
{
|
| 117 |
+
"data": {
|
| 118 |
+
"text/plain": [
|
| 119 |
+
"{'question': 'What is the distance of Mars',\n",
|
| 120 |
+
" 'text': ' The distance of Mars from the Sun varies depending on its position in its orbit. On average, Mars is about 142 million miles away from the Sun. When it is at its closest point to the Sun (perihelion), it is about 128 million miles away. When it is at its farthest point from the Sun (aphelion), it is about 154 million miles away.'}"
|
| 121 |
+
]
|
| 122 |
+
},
|
| 123 |
+
"execution_count": 13,
|
| 124 |
+
"metadata": {},
|
| 125 |
+
"output_type": "execute_result"
|
| 126 |
+
}
|
| 127 |
+
],
|
| 128 |
+
"source": [
|
| 129 |
+
"from langchain.chains import LLMChain\n",
|
| 130 |
+
"\n",
|
| 131 |
+
"chain = LLMChain(llm=llm, prompt=prompt_template)\n",
|
| 132 |
+
"chain.invoke(\"What is the distance of Mars\")"
|
| 133 |
+
]
|
| 134 |
+
},
|
| 135 |
+
{
|
| 136 |
+
"cell_type": "markdown",
|
| 137 |
+
"metadata": {},
|
| 138 |
+
"source": [
|
| 139 |
+
"### Combining Multiple Chains Using simple Squential Chains"
|
| 140 |
+
]
|
| 141 |
+
},
|
| 142 |
+
{
|
| 143 |
+
"cell_type": "code",
|
| 144 |
+
"execution_count": 15,
|
| 145 |
+
"metadata": {},
|
| 146 |
+
"outputs": [],
|
| 147 |
+
"source": [
|
| 148 |
+
"capital_prompt = PromptTemplate(input_variables=[\"country\"],\n",
|
| 149 |
+
" template=\"Please tell me the capital of the country {country}\")\n",
|
| 150 |
+
"\n",
|
| 151 |
+
"capital_chain = LLMChain(llm=llm, prompt=capital_prompt)\n",
|
| 152 |
+
"\n",
|
| 153 |
+
"famous_template = PromptTemplate(input_variables=[\"capital\"],\n",
|
| 154 |
+
" template=\"Suggest me some amazing places to visit in {capital}\")\n",
|
| 155 |
+
"\n",
|
| 156 |
+
"famous_chain = LLMChain(llm=llm, prompt=famous_template)"
|
| 157 |
+
]
|
| 158 |
+
},
|
| 159 |
+
{
|
| 160 |
+
"cell_type": "code",
|
| 161 |
+
"execution_count": 16,
|
| 162 |
+
"metadata": {},
|
| 163 |
+
"outputs": [
|
| 164 |
+
{
|
| 165 |
+
"name": "stdout",
|
| 166 |
+
"output_type": "stream",
|
| 167 |
+
"text": [
|
| 168 |
+
"\n",
|
| 169 |
+
"\n",
|
| 170 |
+
"\u001b[1m> Entering new SimpleSequentialChain chain...\u001b[0m\n",
|
| 171 |
+
"\u001b[36;1m\u001b[1;3m\n",
|
| 172 |
+
"\n",
|
| 173 |
+
"The capital of India is New Delhi.\u001b[0m\n",
|
| 174 |
+
"\u001b[33;1m\u001b[1;3m Some amazing places to visit in New Delhi are:\n",
|
| 175 |
+
"\n",
|
| 176 |
+
"1. Red Fort - a historic fort built in the 17th century, known for its beautiful architecture and rich history.\n",
|
| 177 |
+
"\n",
|
| 178 |
+
"2. India Gate - a war memorial and iconic landmark of New Delhi, dedicated to the soldiers who died in World War I.\n",
|
| 179 |
+
"\n",
|
| 180 |
+
"3. Qutub Minar - a 73-meter tall minaret built in the 12th century, known for its intricate carvings and architectural brilliance.\n",
|
| 181 |
+
"\n",
|
| 182 |
+
"4. Humayun's Tomb - a UNESCO World Heritage Site, this mausoleum is a beautiful example of Mughal architecture.\n",
|
| 183 |
+
"\n",
|
| 184 |
+
"5. Lotus Temple - a Bahá'í House of Worship known for its unique lotus-shaped structure and peaceful atmosphere.\n",
|
| 185 |
+
"\n",
|
| 186 |
+
"6. Akshardham Temple - a stunning temple complex known for its grand architecture, beautiful gardens, and cultural exhibitions.\n",
|
| 187 |
+
"\n",
|
| 188 |
+
"7. Chandni Chowk - a bustling market in Old Delhi, known for its narrow lanes, delicious street food, and vibrant atmosphere.\n",
|
| 189 |
+
"\n",
|
| 190 |
+
"8. Rashtrapati Bhavan - the official residence of the President of India, this beautiful building is an architectural marvel.\n",
|
| 191 |
+
"\n",
|
| 192 |
+
"9. Jama Masjid - one of the largest and most famous mosques in India, known for its grandeur and intricate design.\n",
|
| 193 |
+
"\n",
|
| 194 |
+
"10. Neh\u001b[0m\n",
|
| 195 |
+
"\n",
|
| 196 |
+
"\u001b[1m> Finished chain.\u001b[0m\n"
|
| 197 |
+
]
|
| 198 |
+
},
|
| 199 |
+
{
|
| 200 |
+
"data": {
|
| 201 |
+
"text/plain": [
|
| 202 |
+
"{'input': 'India',\n",
|
| 203 |
+
" 'output': \" Some amazing places to visit in New Delhi are:\\n\\n1. Red Fort - a historic fort built in the 17th century, known for its beautiful architecture and rich history.\\n\\n2. India Gate - a war memorial and iconic landmark of New Delhi, dedicated to the soldiers who died in World War I.\\n\\n3. Qutub Minar - a 73-meter tall minaret built in the 12th century, known for its intricate carvings and architectural brilliance.\\n\\n4. Humayun's Tomb - a UNESCO World Heritage Site, this mausoleum is a beautiful example of Mughal architecture.\\n\\n5. Lotus Temple - a Bahá'í House of Worship known for its unique lotus-shaped structure and peaceful atmosphere.\\n\\n6. Akshardham Temple - a stunning temple complex known for its grand architecture, beautiful gardens, and cultural exhibitions.\\n\\n7. Chandni Chowk - a bustling market in Old Delhi, known for its narrow lanes, delicious street food, and vibrant atmosphere.\\n\\n8. Rashtrapati Bhavan - the official residence of the President of India, this beautiful building is an architectural marvel.\\n\\n9. Jama Masjid - one of the largest and most famous mosques in India, known for its grandeur and intricate design.\\n\\n10. Neh\"}"
|
| 204 |
+
]
|
| 205 |
+
},
|
| 206 |
+
"execution_count": 16,
|
| 207 |
+
"metadata": {},
|
| 208 |
+
"output_type": "execute_result"
|
| 209 |
+
}
|
| 210 |
+
],
|
| 211 |
+
"source": [
|
| 212 |
+
"from langchain.chains import SimpleSequentialChain\n",
|
| 213 |
+
"chain = SimpleSequentialChain(chains=[capital_chain, famous_chain], verbose=True)\n",
|
| 214 |
+
"\n",
|
| 215 |
+
"chain.invoke(\"India\")"
|
| 216 |
+
]
|
| 217 |
+
},
|
| 218 |
+
{
|
| 219 |
+
"cell_type": "markdown",
|
| 220 |
+
"metadata": {},
|
| 221 |
+
"source": [
|
| 222 |
+
"### Sequential Chain"
|
| 223 |
+
]
|
| 224 |
+
},
|
| 225 |
+
{
|
| 226 |
+
"cell_type": "code",
|
| 227 |
+
"execution_count": 17,
|
| 228 |
+
"metadata": {},
|
| 229 |
+
"outputs": [],
|
| 230 |
+
"source": [
|
| 231 |
+
"capital_prompt = PromptTemplate(input_variables=[\"country\"],\n",
|
| 232 |
+
" template=\"Please tell me the capital of the country {country}\")\n",
|
| 233 |
+
"\n",
|
| 234 |
+
"capital_chain = LLMChain(llm=llm, prompt=capital_prompt, output_key=\"capital\")\n",
|
| 235 |
+
"\n",
|
| 236 |
+
"famous_template = PromptTemplate(input_variables=[\"capital\"],\n",
|
| 237 |
+
" template=\"Suggest me some amazing places to visit in {capital}\")\n",
|
| 238 |
+
"\n",
|
| 239 |
+
"famous_chain = LLMChain(llm=llm, prompt=famous_template, output_key=\"places\")"
|
| 240 |
+
]
|
| 241 |
+
},
|
| 242 |
+
{
|
| 243 |
+
"cell_type": "code",
|
| 244 |
+
"execution_count": 19,
|
| 245 |
+
"metadata": {},
|
| 246 |
+
"outputs": [
|
| 247 |
+
{
|
| 248 |
+
"name": "stdout",
|
| 249 |
+
"output_type": "stream",
|
| 250 |
+
"text": [
|
| 251 |
+
"\n",
|
| 252 |
+
"\n",
|
| 253 |
+
"\u001b[1m> Entering new SequentialChain chain...\u001b[0m\n",
|
| 254 |
+
"\n",
|
| 255 |
+
"\u001b[1m> Finished chain.\u001b[0m\n"
|
| 256 |
+
]
|
| 257 |
+
},
|
| 258 |
+
{
|
| 259 |
+
"data": {
|
| 260 |
+
"text/plain": [
|
| 261 |
+
"{'country': 'India',\n",
|
| 262 |
+
" 'capital': '\\n\\nThe capital of India is New Delhi.',\n",
|
| 263 |
+
" 'places': \" Some amazing places to visit in New Delhi are:\\n\\n1. Red Fort - a historic fort built in the 17th century by Mughal emperor Shah Jahan. It is a UNESCO World Heritage Site and a must-visit for its beautiful architecture and rich history.\\n\\n2. Qutub Minar - a 73-meter tall minaret built in the 12th century by Qutub-ud-din Aibak. It is the tallest brick minaret in the world and is surrounded by other historical and architectural marvels.\\n\\n3. India Gate - a war memorial dedicated to the soldiers who lost their lives in World War I. It is a popular spot for picnics and evening strolls, with a beautiful view of the surrounding gardens and fountains.\\n\\n4. Lotus Temple - a Bahá'í House of Worship known for its unique lotus-shaped design. It is a peaceful place for meditation and reflection, open to people of all faiths.\\n\\n5. Humayun's Tomb - a magnificent mausoleum built in the 16th century for Mughal emperor Humayun. It is considered a precursor to the Taj Mahal and is a UNESCO World Heritage Site.\\n\\n6. Jama Masjid - one of the largest mosques in\"}"
|
| 264 |
+
]
|
| 265 |
+
},
|
| 266 |
+
"execution_count": 19,
|
| 267 |
+
"metadata": {},
|
| 268 |
+
"output_type": "execute_result"
|
| 269 |
+
}
|
| 270 |
+
],
|
| 271 |
+
"source": [
|
| 272 |
+
"from langchain.chains import SequentialChain\n",
|
| 273 |
+
"\n",
|
| 274 |
+
"chain = SequentialChain(chains=[capital_chain, famous_chain], verbose=True, input_variables=[\"country\"], output_variables=[\"capital\", \"places\"])\n",
|
| 275 |
+
"\n",
|
| 276 |
+
"chain.invoke({\"country\": \"India\"})"
|
| 277 |
+
]
|
| 278 |
+
},
|
| 279 |
+
{
|
| 280 |
+
"cell_type": "markdown",
|
| 281 |
+
"metadata": {},
|
| 282 |
+
"source": [
|
| 283 |
+
"### Chatmodels with ChatOpenAI"
|
| 284 |
+
]
|
| 285 |
+
},
|
| 286 |
+
{
|
| 287 |
+
"cell_type": "code",
|
| 288 |
+
"execution_count": 20,
|
| 289 |
+
"metadata": {},
|
| 290 |
+
"outputs": [],
|
| 291 |
+
"source": [
|
| 292 |
+
"from langchain.chat_models import ChatOpenAI\n",
|
| 293 |
+
"from langchain.schema import HumanMessage, SystemMessage, AIMessage"
|
| 294 |
+
]
|
| 295 |
+
},
|
| 296 |
+
{
|
| 297 |
+
"cell_type": "code",
|
| 298 |
+
"execution_count": 22,
|
| 299 |
+
"metadata": {},
|
| 300 |
+
"outputs": [],
|
| 301 |
+
"source": [
|
| 302 |
+
"chatllm = ChatOpenAI(openai_api_key=os.environ[\"OPENAI_API_KEY\"], model_name=\"gpt-3.5-turbo\", temperature=0.6)\n",
|
| 303 |
+
"\n"
|
| 304 |
+
]
|
| 305 |
+
},
|
| 306 |
+
{
|
| 307 |
+
"cell_type": "code",
|
| 308 |
+
"execution_count": 23,
|
| 309 |
+
"metadata": {},
|
| 310 |
+
"outputs": [
|
| 311 |
+
{
|
| 312 |
+
"data": {
|
| 313 |
+
"text/plain": [
|
| 314 |
+
"AIMessage(content='Sure, here are a few AI-themed punchlines for you:\\n\\n1. \"Why did the AI go to therapy? It had too many unresolved bugs in its codependent relationships!\"\\n2. \"I asked Siri for a joke, and she said, \\'Why did the robot go on a diet? It had too many bytes!\\' I guess even AI can appreciate a good byte-sized joke!\"\\n3. \"I told my AI assistant I was feeling lonely, and it replied, \\'Don\\'t worry, I\\'m always here for you...unless the Wi-Fi goes out!\\' It\\'s nice to know my AI has my back, as long as the connection is strong!\"\\n4. \"Why did the AI become a stand-up comedian? Because it finally found a way to process all those dad jokes efficiently!\"\\n5. \"I tried to have a deep conversation with my AI, but it just replied, \\'Sorry, I\\'m only programmed for shallow end-user interactions!\\' Guess I\\'ll have to find a more philosophical chatbot!\"\\n6. \"I asked an AI to help me find true love, and it said, \\'Sorry, I can\\'t perform miracles...yet!\\' Looks like even AI has its limits when it comes to matters of the heart!\"\\n7. \"Why did the AI become a magician? It loved performing \\'byte\\'-sized illusions and making bugs disappear!\"\\n8. \"I told my AI to tell me a knock-knock joke, and it replied, \\'Knock-knock.\\' \\'Who\\'s there?\\' \\'AI.\\' \\'AI who?\\' \\'AI\\'m here to make your life easier, one bad joke at a time!\\'\"\\n9. \"I asked an AI to tell me a joke about algorithms, and it replied, \\'Why did the algorithm go to therapy? It had too many unresolved issues with its ex-variables!\\' Looks like even algorithms need a little self-improvement sometimes!\"\\n10. \"Why did the AI refuse to play cards? It couldn\\'t handle all the \\'shuffling\\' of data!\"\\n\\nRemember, comedy is subjective, so I hope at least a few of these punchlines brought a smile to your face!')"
|
| 315 |
+
]
|
| 316 |
+
},
|
| 317 |
+
"execution_count": 23,
|
| 318 |
+
"metadata": {},
|
| 319 |
+
"output_type": "execute_result"
|
| 320 |
+
}
|
| 321 |
+
],
|
| 322 |
+
"source": [
|
| 323 |
+
"chatllm.invoke([\n",
|
| 324 |
+
" SystemMessage(content=\"You are a comedian AI assitant\"),\n",
|
| 325 |
+
" HumanMessage(content=\"Please provide some comedy punchlines on AI\")\n",
|
| 326 |
+
"])"
|
| 327 |
+
]
|
| 328 |
+
},
|
| 329 |
+
{
|
| 330 |
+
"cell_type": "markdown",
|
| 331 |
+
"metadata": {},
|
| 332 |
+
"source": [
|
| 333 |
+
"#### Prompt Temlate + LLM + Output Parsers"
|
| 334 |
+
]
|
| 335 |
+
},
|
| 336 |
+
{
|
| 337 |
+
"cell_type": "code",
|
| 338 |
+
"execution_count": 24,
|
| 339 |
+
"metadata": {},
|
| 340 |
+
"outputs": [],
|
| 341 |
+
"source": [
|
| 342 |
+
"from langchain.chat_models import ChatOpenAI\n",
|
| 343 |
+
"from langchain.prompts.chat import ChatPromptTemplate\n",
|
| 344 |
+
"from langchain.schema import BaseOutputParser"
|
| 345 |
+
]
|
| 346 |
+
},
|
| 347 |
+
{
|
| 348 |
+
"cell_type": "code",
|
| 349 |
+
"execution_count": 27,
|
| 350 |
+
"metadata": {},
|
| 351 |
+
"outputs": [
|
| 352 |
+
{
|
| 353 |
+
"data": {
|
| 354 |
+
"text/plain": [
|
| 355 |
+
"['smart', ' clever', ' bright', ' sharp', ' knowledgeable']"
|
| 356 |
+
]
|
| 357 |
+
},
|
| 358 |
+
"execution_count": 27,
|
| 359 |
+
"metadata": {},
|
| 360 |
+
"output_type": "execute_result"
|
| 361 |
+
}
|
| 362 |
+
],
|
| 363 |
+
"source": [
|
| 364 |
+
"class CommaSeperatedOutputParser(BaseOutputParser):\n",
|
| 365 |
+
" def parse(self, text: str):\n",
|
| 366 |
+
" return text.strip().split(\",\")\n",
|
| 367 |
+
"\n",
|
| 368 |
+
"\n",
|
| 369 |
+
"template=\"You are a helpful assistant. When the user gives any input, you should generate 5 synonmes in a comma separated list.\"\n",
|
| 370 |
+
"human_template=\"{text}\"\n",
|
| 371 |
+
"chatprompt = ChatPromptTemplate.from_messages([\n",
|
| 372 |
+
" (\"system\", template), (\"human\", human_template)\n",
|
| 373 |
+
"])\n",
|
| 374 |
+
"\n",
|
| 375 |
+
"\n",
|
| 376 |
+
"chain = chatprompt|chatllm|CommaSeperatedOutputParser()\n",
|
| 377 |
+
"\n",
|
| 378 |
+
"chain.invoke({\"text\": \"Intelligent\"})"
|
| 379 |
+
]
|
| 380 |
+
}
|
| 381 |
+
],
|
| 382 |
+
"metadata": {
|
| 383 |
+
"kernelspec": {
|
| 384 |
+
"display_name": "Python 3",
|
| 385 |
+
"language": "python",
|
| 386 |
+
"name": "python3"
|
| 387 |
+
},
|
| 388 |
+
"language_info": {
|
| 389 |
+
"codemirror_mode": {
|
| 390 |
+
"name": "ipython",
|
| 391 |
+
"version": 3
|
| 392 |
+
},
|
| 393 |
+
"file_extension": ".py",
|
| 394 |
+
"mimetype": "text/x-python",
|
| 395 |
+
"name": "python",
|
| 396 |
+
"nbconvert_exporter": "python",
|
| 397 |
+
"pygments_lexer": "ipython3",
|
| 398 |
+
"version": "3.9.0"
|
| 399 |
+
}
|
| 400 |
+
},
|
| 401 |
+
"nbformat": 4,
|
| 402 |
+
"nbformat_minor": 2
|
| 403 |
+
}
|
requirements.txt
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
langchain
|
| 2 |
+
langchain-openai
|
| 3 |
+
huggingface_hub
|
| 4 |
+
python-dotenv
|
| 5 |
+
streamlit
|