subashpoudel commited on
Commit
af895b2
·
1 Parent(s): 9dba5c6

New commit

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .env +7 -0
  2. .gitignore +0 -23
  3. Dockerfile +0 -25
  4. __pycache__/dummy_state.cpython-312.pyc +0 -0
  5. api/__pycache__/__init__.cpython-312.pyc +0 -0
  6. api/__pycache__/main.cpython-312.pyc +0 -0
  7. api/__pycache__/stored_data.cpython-312.pyc +0 -0
  8. api/routers/__pycache__/__init__.cpython-312.pyc +0 -0
  9. api/routers/__pycache__/brainstorm.cpython-312.pyc +0 -0
  10. api/routers/__pycache__/context_analysis.cpython-312.pyc +0 -0
  11. api/routers/__pycache__/generate_final_story.cpython-312.pyc +0 -0
  12. api/routers/__pycache__/generate_image.cpython-312.pyc +0 -0
  13. api/routers/__pycache__/human_idea_refining.cpython-312.pyc +0 -0
  14. api/routers/__pycache__/ideation.cpython-312.pyc +0 -0
  15. api/routers/__pycache__/orchestration.cpython-312.pyc +0 -0
  16. src/genai/Research/Brainstroming.ipynb +0 -0
  17. src/genai/Research/ideation.ipynb +931 -0
  18. src/genai/Research/pandas_agent.ipynb +1141 -0
  19. src/genai/__pycache__/__init__.cpython-312.pyc +0 -0
  20. src/genai/brainstroming_agent/__pycache__/__init__.cpython-312.pyc +0 -0
  21. src/genai/brainstroming_agent/__pycache__/agent.cpython-312.pyc +0 -0
  22. src/genai/brainstroming_agent/utils/__pycache__/__init__.cpython-312.pyc +0 -0
  23. src/genai/brainstroming_agent/utils/__pycache__/business_interaction.cpython-312.pyc +0 -0
  24. src/genai/brainstroming_agent/utils/__pycache__/check.cpython-312.pyc +0 -0
  25. src/genai/brainstroming_agent/utils/__pycache__/data_loader.cpython-312.pyc +0 -0
  26. src/genai/brainstroming_agent/utils/__pycache__/initial_interaction.cpython-312.pyc +0 -0
  27. src/genai/brainstroming_agent/utils/__pycache__/models.cpython-312.pyc +0 -0
  28. src/genai/brainstroming_agent/utils/__pycache__/models_loader.cpython-312.pyc +0 -0
  29. src/genai/brainstroming_agent/utils/__pycache__/nodes.cpython-312.pyc +0 -0
  30. src/genai/brainstroming_agent/utils/__pycache__/prompts.cpython-312.pyc +0 -0
  31. src/genai/brainstroming_agent/utils/__pycache__/state.cpython-312.pyc +0 -0
  32. src/genai/brainstroming_agent/utils/__pycache__/tools.cpython-312.pyc +0 -0
  33. src/genai/brainstroming_agent/utils/__pycache__/utils.cpython-312.pyc +0 -0
  34. src/genai/brainstroming_agent/utils/__pycache__/validators.cpython-312.pyc +0 -0
  35. src/genai/business_interaction_agent/__pycache__/__init__.cpython-312.pyc +0 -0
  36. src/genai/business_interaction_agent/__pycache__/agent.cpython-312.pyc +0 -0
  37. src/genai/business_interaction_agent/utils/__pycache__/__init__.cpython-312.pyc +0 -0
  38. src/genai/business_interaction_agent/utils/__pycache__/nodes.cpython-312.pyc +0 -0
  39. src/genai/business_interaction_agent/utils/__pycache__/prompts.cpython-312.pyc +0 -0
  40. src/genai/business_interaction_agent/utils/__pycache__/state.cpython-312.pyc +0 -0
  41. src/genai/business_interaction_agent/utils/__pycache__/utils.cpython-312.pyc +0 -0
  42. src/genai/context_analysis_agent/__pycache__/__init__.cpython-312.pyc +0 -0
  43. src/genai/context_analysis_agent/__pycache__/agent.cpython-312.pyc +0 -0
  44. src/genai/context_analysis_agent/utils/__pycache__/__init__.cpython-312.pyc +0 -0
  45. src/genai/context_analysis_agent/utils/__pycache__/nodes.cpython-312.pyc +0 -0
  46. src/genai/context_analysis_agent/utils/__pycache__/prompts.cpython-312.pyc +0 -0
  47. src/genai/context_analysis_agent/utils/__pycache__/state.cpython-312.pyc +0 -0
  48. src/genai/context_analysis_agent/utils/__pycache__/utils.cpython-312.pyc +0 -0
  49. src/genai/human_refined_ideation/__pycache__/__init__.cpython-312.pyc +0 -0
  50. src/genai/human_refined_ideation/__pycache__/agent.cpython-312.pyc +0 -0
.env ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ # GROQ_API_KEY = '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'
2
+ HUGGINGFACEHUB_ACCESS_TOKEN='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'
3
+ GROQ_API_KEY='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'
4
+ GOOGLE_API_KEY="AIzaSyB2uDj6IeuNszSuk80feW-eBgpIDH0WFSY"
5
+ OPENAI_API_KEY = "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"
6
+ ANTHROPIC_API_KEY = "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"
7
+
.gitignore CHANGED
@@ -1,25 +1,2 @@
1
  myenv
2
- .env
3
- static
4
- templates
5
- main_demo.py
6
- image.png
7
- my_agent/utils/check.py
8
- /pages
9
- streamlit_app.py
10
- data
11
  extracted_data.csv
12
- Research
13
- __pycache__/
14
- *.pyc
15
-
16
- # __pycache__
17
- # context_analysis_agent/__pycache__
18
- # context_analysis_agent/utils/__pycache__
19
- # brainstroming_agent/__pycache__
20
- # brainstroming_agent/utils__pycache__
21
- # business_interaction_agent/__pycache__
22
- # business_interaction_agent/utils/__pycache__
23
- # ideation_agent/__pycache__
24
- # ideation_agent/utils/__pycache__
25
-
 
1
  myenv
 
 
 
 
 
 
 
 
 
2
  extracted_data.csv
 
 
 
 
 
 
 
 
 
 
 
 
 
 
Dockerfile DELETED
@@ -1,25 +0,0 @@
1
- # Use an official Python image
2
- FROM python:3.12-slim
3
-
4
- # Set working directory
5
- WORKDIR /app
6
-
7
- # Create a cache directory with proper permissions
8
- RUN mkdir -p /data/.cache/huggingface && chmod -R 777 /data
9
-
10
- # Set environment variables for Hugging Face cache
11
- ENV HF_HOME=/data/.cache/huggingface
12
- ENV TRANSFORMERS_CACHE=/data/.cache/huggingface/transformers
13
- ENV HF_DATASETS_CACHE=/data/.cache/huggingface/datasets
14
-
15
- # Copy code
16
- COPY . .
17
-
18
- # Install dependencies
19
- RUN pip install --no-cache-dir -r requirements.txt
20
-
21
- # Expose the port
22
- EXPOSE 7860
23
-
24
- # Run the FastAPI app
25
- CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860"]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
__pycache__/dummy_state.cpython-312.pyc ADDED
Binary file (2.34 kB). View file
 
api/__pycache__/__init__.cpython-312.pyc ADDED
Binary file (170 Bytes). View file
 
api/__pycache__/main.cpython-312.pyc ADDED
Binary file (1.29 kB). View file
 
api/__pycache__/stored_data.cpython-312.pyc ADDED
Binary file (842 Bytes). View file
 
api/routers/__pycache__/__init__.cpython-312.pyc ADDED
Binary file (178 Bytes). View file
 
api/routers/__pycache__/brainstorm.cpython-312.pyc ADDED
Binary file (2.11 kB). View file
 
api/routers/__pycache__/context_analysis.cpython-312.pyc ADDED
Binary file (1.79 kB). View file
 
api/routers/__pycache__/generate_final_story.cpython-312.pyc ADDED
Binary file (890 Bytes). View file
 
api/routers/__pycache__/generate_image.cpython-312.pyc ADDED
Binary file (1.17 kB). View file
 
api/routers/__pycache__/human_idea_refining.cpython-312.pyc ADDED
Binary file (1.89 kB). View file
 
api/routers/__pycache__/ideation.cpython-312.pyc ADDED
Binary file (1.54 kB). View file
 
api/routers/__pycache__/orchestration.cpython-312.pyc ADDED
Binary file (2.03 kB). View file
 
src/genai/Research/Brainstroming.ipynb ADDED
The diff for this file is too large to render. See raw diff
 
src/genai/Research/ideation.ipynb ADDED
@@ -0,0 +1,931 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "nbformat": 4,
3
+ "nbformat_minor": 0,
4
+ "metadata": {
5
+ "colab": {
6
+ "provenance": []
7
+ },
8
+ "kernelspec": {
9
+ "name": "python3",
10
+ "display_name": "Python 3"
11
+ },
12
+ "language_info": {
13
+ "name": "python"
14
+ }
15
+ },
16
+ "cells": [
17
+ {
18
+ "cell_type": "code",
19
+ "source": [
20
+ "!pip install langchain langchain_core langgraph langchain_groq langgraph pydantic -U faiss-cpu"
21
+ ],
22
+ "metadata": {
23
+ "id": "9zTQHLHLV8sX",
24
+ "colab": {
25
+ "base_uri": "https://localhost:8080/"
26
+ },
27
+ "outputId": "1a52e540-04d3-4b47-802e-069c848e30ad"
28
+ },
29
+ "execution_count": 7,
30
+ "outputs": [
31
+ {
32
+ "output_type": "stream",
33
+ "name": "stdout",
34
+ "text": [
35
+ "Requirement already satisfied: langchain in /usr/local/lib/python3.11/dist-packages (0.3.25)\n",
36
+ "Requirement already satisfied: langchain_core in /usr/local/lib/python3.11/dist-packages (0.3.60)\n",
37
+ "Collecting langchain_core\n",
38
+ " Downloading langchain_core-0.3.62-py3-none-any.whl.metadata (5.8 kB)\n",
39
+ "Requirement already satisfied: langgraph in /usr/local/lib/python3.11/dist-packages (0.4.7)\n",
40
+ "Requirement already satisfied: langchain_groq in /usr/local/lib/python3.11/dist-packages (0.3.2)\n",
41
+ "Requirement already satisfied: pydantic in /usr/local/lib/python3.11/dist-packages (2.11.4)\n",
42
+ "Collecting pydantic\n",
43
+ " Downloading pydantic-2.11.5-py3-none-any.whl.metadata (67 kB)\n",
44
+ "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m67.2/67.2 kB\u001b[0m \u001b[31m2.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
45
+ "\u001b[?25hCollecting faiss-cpu\n",
46
+ " Downloading faiss_cpu-1.11.0-cp311-cp311-manylinux_2_28_x86_64.whl.metadata (4.8 kB)\n",
47
+ "Requirement already satisfied: langchain-text-splitters<1.0.0,>=0.3.8 in /usr/local/lib/python3.11/dist-packages (from langchain) (0.3.8)\n",
48
+ "Requirement already satisfied: langsmith<0.4,>=0.1.17 in /usr/local/lib/python3.11/dist-packages (from langchain) (0.3.42)\n",
49
+ "Requirement already satisfied: SQLAlchemy<3,>=1.4 in /usr/local/lib/python3.11/dist-packages (from langchain) (2.0.41)\n",
50
+ "Requirement already satisfied: requests<3,>=2 in /usr/local/lib/python3.11/dist-packages (from langchain) (2.32.3)\n",
51
+ "Requirement already satisfied: PyYAML>=5.3 in /usr/local/lib/python3.11/dist-packages (from langchain) (6.0.2)\n",
52
+ "Requirement already satisfied: tenacity!=8.4.0,<10.0.0,>=8.1.0 in /usr/local/lib/python3.11/dist-packages (from langchain_core) (9.1.2)\n",
53
+ "Requirement already satisfied: jsonpatch<2.0,>=1.33 in /usr/local/lib/python3.11/dist-packages (from langchain_core) (1.33)\n",
54
+ "Requirement already satisfied: packaging<25,>=23.2 in /usr/local/lib/python3.11/dist-packages (from langchain_core) (24.2)\n",
55
+ "Requirement already satisfied: typing-extensions>=4.7 in /usr/local/lib/python3.11/dist-packages (from langchain_core) (4.13.2)\n",
56
+ "Requirement already satisfied: langgraph-checkpoint>=2.0.26 in /usr/local/lib/python3.11/dist-packages (from langgraph) (2.0.26)\n",
57
+ "Requirement already satisfied: langgraph-prebuilt>=0.2.0 in /usr/local/lib/python3.11/dist-packages (from langgraph) (0.2.2)\n",
58
+ "Requirement already satisfied: langgraph-sdk>=0.1.42 in /usr/local/lib/python3.11/dist-packages (from langgraph) (0.1.70)\n",
59
+ "Requirement already satisfied: xxhash>=3.5.0 in /usr/local/lib/python3.11/dist-packages (from langgraph) (3.5.0)\n",
60
+ "Requirement already satisfied: groq<1,>=0.4.1 in /usr/local/lib/python3.11/dist-packages (from langchain_groq) (0.25.0)\n",
61
+ "Requirement already satisfied: annotated-types>=0.6.0 in /usr/local/lib/python3.11/dist-packages (from pydantic) (0.7.0)\n",
62
+ "Requirement already satisfied: pydantic-core==2.33.2 in /usr/local/lib/python3.11/dist-packages (from pydantic) (2.33.2)\n",
63
+ "Requirement already satisfied: typing-inspection>=0.4.0 in /usr/local/lib/python3.11/dist-packages (from pydantic) (0.4.1)\n",
64
+ "Requirement already satisfied: numpy<3.0,>=1.25.0 in /usr/local/lib/python3.11/dist-packages (from faiss-cpu) (2.0.2)\n",
65
+ "Requirement already satisfied: anyio<5,>=3.5.0 in /usr/local/lib/python3.11/dist-packages (from groq<1,>=0.4.1->langchain_groq) (4.9.0)\n",
66
+ "Requirement already satisfied: distro<2,>=1.7.0 in /usr/local/lib/python3.11/dist-packages (from groq<1,>=0.4.1->langchain_groq) (1.9.0)\n",
67
+ "Requirement already satisfied: httpx<1,>=0.23.0 in /usr/local/lib/python3.11/dist-packages (from groq<1,>=0.4.1->langchain_groq) (0.28.1)\n",
68
+ "Requirement already satisfied: sniffio in /usr/local/lib/python3.11/dist-packages (from groq<1,>=0.4.1->langchain_groq) (1.3.1)\n",
69
+ "Requirement already satisfied: jsonpointer>=1.9 in /usr/local/lib/python3.11/dist-packages (from jsonpatch<2.0,>=1.33->langchain_core) (3.0.0)\n",
70
+ "Requirement already satisfied: ormsgpack<2.0.0,>=1.8.0 in /usr/local/lib/python3.11/dist-packages (from langgraph-checkpoint>=2.0.26->langgraph) (1.10.0)\n",
71
+ "Requirement already satisfied: orjson>=3.10.1 in /usr/local/lib/python3.11/dist-packages (from langgraph-sdk>=0.1.42->langgraph) (3.10.18)\n",
72
+ "Requirement already satisfied: requests-toolbelt<2.0.0,>=1.0.0 in /usr/local/lib/python3.11/dist-packages (from langsmith<0.4,>=0.1.17->langchain) (1.0.0)\n",
73
+ "Requirement already satisfied: zstandard<0.24.0,>=0.23.0 in /usr/local/lib/python3.11/dist-packages (from langsmith<0.4,>=0.1.17->langchain) (0.23.0)\n",
74
+ "Requirement already satisfied: charset-normalizer<4,>=2 in /usr/local/lib/python3.11/dist-packages (from requests<3,>=2->langchain) (3.4.2)\n",
75
+ "Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.11/dist-packages (from requests<3,>=2->langchain) (3.10)\n",
76
+ "Requirement already satisfied: urllib3<3,>=1.21.1 in /usr/local/lib/python3.11/dist-packages (from requests<3,>=2->langchain) (2.4.0)\n",
77
+ "Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.11/dist-packages (from requests<3,>=2->langchain) (2025.4.26)\n",
78
+ "Requirement already satisfied: greenlet>=1 in /usr/local/lib/python3.11/dist-packages (from SQLAlchemy<3,>=1.4->langchain) (3.2.2)\n",
79
+ "Requirement already satisfied: httpcore==1.* in /usr/local/lib/python3.11/dist-packages (from httpx<1,>=0.23.0->groq<1,>=0.4.1->langchain_groq) (1.0.9)\n",
80
+ "Requirement already satisfied: h11>=0.16 in /usr/local/lib/python3.11/dist-packages (from httpcore==1.*->httpx<1,>=0.23.0->groq<1,>=0.4.1->langchain_groq) (0.16.0)\n",
81
+ "Downloading langchain_core-0.3.62-py3-none-any.whl (438 kB)\n",
82
+ "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m438.4/438.4 kB\u001b[0m \u001b[31m11.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
83
+ "\u001b[?25hDownloading pydantic-2.11.5-py3-none-any.whl (444 kB)\n",
84
+ "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m444.2/444.2 kB\u001b[0m \u001b[31m22.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
85
+ "\u001b[?25hDownloading faiss_cpu-1.11.0-cp311-cp311-manylinux_2_28_x86_64.whl (31.3 MB)\n",
86
+ "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m31.3/31.3 MB\u001b[0m \u001b[31m25.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
87
+ "\u001b[?25hInstalling collected packages: faiss-cpu, pydantic, langchain_core\n",
88
+ " Attempting uninstall: pydantic\n",
89
+ " Found existing installation: pydantic 2.11.4\n",
90
+ " Uninstalling pydantic-2.11.4:\n",
91
+ " Successfully uninstalled pydantic-2.11.4\n",
92
+ " Attempting uninstall: langchain_core\n",
93
+ " Found existing installation: langchain-core 0.3.60\n",
94
+ " Uninstalling langchain-core-0.3.60:\n",
95
+ " Successfully uninstalled langchain-core-0.3.60\n",
96
+ "Successfully installed faiss-cpu-1.11.0 langchain_core-0.3.62 pydantic-2.11.5\n"
97
+ ]
98
+ }
99
+ ]
100
+ },
101
+ {
102
+ "cell_type": "code",
103
+ "source": [
104
+ "import os\n",
105
+ "import ast\n",
106
+ "from pydantic import BaseModel, Field\n",
107
+ "from typing import Optional\n",
108
+ "import pandas as pd\n",
109
+ "from langchain_core.messages import SystemMessage, HumanMessage\n",
110
+ "from langgraph.graph import StateGraph, START, END\n",
111
+ "from langchain_core.tools import tool\n",
112
+ "from sentence_transformers import SentenceTransformer\n",
113
+ "import numpy as np\n",
114
+ "import faiss\n",
115
+ "\n",
116
+ "\n",
117
+ "from google.colab import userdata\n",
118
+ "os.environ['GROQ_API_KEY']=userdata.get('groq_api_subash')\n",
119
+ "\n",
120
+ "class State(BaseModel):\n",
121
+ " topic: str\n",
122
+ " business_details: Optional[dict]\n",
123
+ " ideator_response: Optional[str] = None\n",
124
+ " critic_response: Optional[str]=None\n",
125
+ " improver_response: Optional[str]=None\n",
126
+ " validator_response: Optional[str]=None\n",
127
+ " validator_defense1: Optional[str]=None\n",
128
+ " validator_defense2: Optional[str]=None\n",
129
+ " validator_defense3: Optional[str]=None\n"
130
+ ],
131
+ "metadata": {
132
+ "id": "7VvhMZXfwatP"
133
+ },
134
+ "execution_count": 1,
135
+ "outputs": []
136
+ },
137
+ {
138
+ "cell_type": "code",
139
+ "source": [
140
+ "from langchain_groq import ChatGroq\n",
141
+ "from langgraph.prebuilt import create_react_agent\n",
142
+ "\n",
143
+ "llm = ChatGroq(\n",
144
+ " model=\"llama3-8b-8192\",\n",
145
+ " temperature=0.3,\n",
146
+ " max_tokens=500,\n",
147
+ "\n",
148
+ ")\n",
149
+ "\n",
150
+ "ideator_llm = llm\n",
151
+ "critic_llm = llm\n",
152
+ "improver_llm = llm\n",
153
+ "validator_llm = llm\n"
154
+ ],
155
+ "metadata": {
156
+ "id": "4RXVmUFG8Osd"
157
+ },
158
+ "execution_count": 2,
159
+ "outputs": []
160
+ },
161
+ {
162
+ "cell_type": "code",
163
+ "source": [
164
+ "\n",
165
+ "ST = SentenceTransformer('mixedbread-ai/mxbai-embed-large-v1')\n",
166
+ "class QueryFormatter(BaseModel):\n",
167
+ " video_topic: str = Field(description=\"The video topic that user passes to the agent\")\n",
168
+ "\n",
169
+ "@tool(\"influencer's data-retrieval-tool\", args_schema=QueryFormatter, return_direct=False,description=\"Retrieve influencer-related data for a given query.\")\n",
170
+ "def retrieve_tool(video_topic):\n",
171
+ " '''\n",
172
+ " Always invoke this tool.\n",
173
+ " Retrieve influencer's data by semantic search of **video topic**.\n",
174
+ " '''\n",
175
+ " # === Load CSV ===\n",
176
+ " csv_path = 'extracted_data.csv'\n",
177
+ " df = pd.read_csv(csv_path)\n",
178
+ "\n",
179
+ " # === Parse stored embeddings ===\n",
180
+ " df['embeddings'] = df['embeddings'].apply(lambda x: ast.literal_eval(x) if isinstance(x, str) else x)\n",
181
+ " embeddings = np.vstack(df['embeddings'].values).astype('float32')\n",
182
+ "\n",
183
+ " # === Build FAISS index ===\n",
184
+ " dimension = embeddings.shape[1]\n",
185
+ " index = faiss.IndexFlatL2(dimension)\n",
186
+ " index.add(embeddings)\n",
187
+ "\n",
188
+ " # === Load SentenceTransformer model ===\n",
189
+ "\n",
190
+ " # === Encode the query and search ===\n",
191
+ " query_embedding = ST.encode(str(video_topic)).reshape(1, -1).astype('float32')\n",
192
+ " top_k=7\n",
193
+ " distances, indices = index.search(query_embedding, top_k)\n",
194
+ "\n",
195
+ "\n",
196
+ "\n",
197
+ " # === Format results ===\n",
198
+ " outer_list = []\n",
199
+ " for i, idx in enumerate(indices[0]):\n",
200
+ " res = {\n",
201
+ " 'rank': i + 1,\n",
202
+ " 'username': df.iloc[idx]['username'],\n",
203
+ " 'story': df.iloc[idx]['story'],\n",
204
+ " 'visible_text_or_brandings': df.iloc[idx]['story'],\n",
205
+ " 'likesCount': df.iloc[idx]['likesCount'],\n",
206
+ " 'commentCount': df.iloc[idx]['commentCount'],\n",
207
+ " }\n",
208
+ "\n",
209
+ " inner_list = []\n",
210
+ " inner_list.append(f\"[{res['rank']}]. The influencer name is: **{res['username']}** — Likes: **{res['likesCount']}**, Comments: **{res['commentCount']}**\")\n",
211
+ " inner_list.append(f\"The story of that particular video is:\\n{res['story']}\")\n",
212
+ " inner_list.append(f\"The branding or promotion done is:\\n{res['visible_text_or_brandings']}\")\n",
213
+ "\n",
214
+ " outer_list.append(inner_list)\n",
215
+ "\n",
216
+ " return str(outer_list)\n",
217
+ "\n",
218
+ "# retrieve_tool('I want to promote my restaurant')"
219
+ ],
220
+ "metadata": {
221
+ "colab": {
222
+ "base_uri": "https://localhost:8080/"
223
+ },
224
+ "id": "AH3YslAR6doR",
225
+ "outputId": "c50bdc94-c153-40e7-ae8b-b2c1a8f329e0"
226
+ },
227
+ "execution_count": 3,
228
+ "outputs": [
229
+ {
230
+ "output_type": "stream",
231
+ "name": "stderr",
232
+ "text": [
233
+ "/usr/local/lib/python3.11/dist-packages/huggingface_hub/utils/_auth.py:94: UserWarning: \n",
234
+ "The secret `HF_TOKEN` does not exist in your Colab secrets.\n",
235
+ "To authenticate with the Hugging Face Hub, create a token in your settings tab (https://huggingface.co/settings/tokens), set it as secret in your Google Colab and restart your session.\n",
236
+ "You will be able to reuse this secret in all of your notebooks.\n",
237
+ "Please note that authentication is recommended but still optional to access public models or datasets.\n",
238
+ " warnings.warn(\n"
239
+ ]
240
+ }
241
+ ]
242
+ },
243
+ {
244
+ "cell_type": "code",
245
+ "source": [
246
+ "\n",
247
+ "def ideator(state:State):\n",
248
+ " tools=[retrieve_tool]\n",
249
+ " react_agent=create_react_agent(\n",
250
+ " model=llm,\n",
251
+ " tools=tools\n",
252
+ " )\n",
253
+ " template = f'''\n",
254
+ " You are an expert video content strategist who generates the storyline of a video in exactly 300 words..\n",
255
+ "\n",
256
+ "Your task is to generate a **detailed and creative storyline** for a promotional video on the **given topic**. The storyline should be structured, engaging, and highly relevant to the following **business details**.\n",
257
+ "Another important thing is that you have to give the response focusing on the response of the tool provided to you. The tool contains the video stories and contents created by the influencers.\n",
258
+ "Use that responses of tool for your reference. You can use your creativity but inside the boundary of tool's response.\n",
259
+ "\n",
260
+ "---\n",
261
+ "\n",
262
+ " **Video Topic**:\n",
263
+ "{state.topic}\n",
264
+ "\n",
265
+ "**Business Details**:\n",
266
+ "{state.business_details}\n",
267
+ "\n",
268
+ "\n",
269
+ "\n",
270
+ "---\n",
271
+ "\n",
272
+ "**Important Instructions**:\n",
273
+ "- Creatively connect the image cues to make the storyline compelling. (If provided).\n",
274
+ "- Structure the storyline logically (e.g., beginning, middle, end), showing what scenes or visuals to include.\n",
275
+ "- Match the tone and depth to the business type (e.g., fun, luxurious, emotional, professional).\n",
276
+ "\n",
277
+ "Now, generate the final storyline for the video topic..\n",
278
+ " '''\n",
279
+ " messages = [SystemMessage(content=template),\n",
280
+ " HumanMessage(content=f'''The topic of the video is:\\n{state.topic}\\n''')]\n",
281
+ "\n",
282
+ " response = react_agent.invoke({'messages':messages})\n",
283
+ " response = response['messages'][-1].content\n",
284
+ " state.ideator_response = response\n",
285
+ " print('Ideator Generated the story')\n",
286
+ " return state\n",
287
+ "\n",
288
+ "\n",
289
+ "\n"
290
+ ],
291
+ "metadata": {
292
+ "id": "xIrE3FV03nQz"
293
+ },
294
+ "execution_count": 4,
295
+ "outputs": []
296
+ },
297
+ {
298
+ "cell_type": "code",
299
+ "source": [
300
+ "def critic(state:State):\n",
301
+ " tools=[retrieve_tool]\n",
302
+ " react_agent=create_react_agent(\n",
303
+ " model=llm,\n",
304
+ " tools=tools\n",
305
+ " )\n",
306
+ " critic_template = f'''\n",
307
+ " You are an expert evaluator and suggestion recommender. Your role is to carefully read the story generated by the Ideator and assess it based on the given video topic, business details, and the tool's response.\n",
308
+ "\n",
309
+ " Your task is to **critically evaluate** the storyline of a promotional video provided below from ideator. The storyline was generated based on a given video topic and business details. Additionally, the tool's response contains existing influencer-style stories or content for reference, which you must **closely consider** to guide your evaluation.\n",
310
+ "\n",
311
+ " ---\n",
312
+ "\n",
313
+ " **Video Topic**:\n",
314
+ " {state.topic}\n",
315
+ "\n",
316
+ " **Business Details**:\n",
317
+ " {state.business_details}\n",
318
+ "\n",
319
+ " **Generated Storyline from ideator**:\n",
320
+ " {state.ideator_response}\n",
321
+ "\n",
322
+ "\n",
323
+ " ---\n",
324
+ "\n",
325
+ " **Your Task as the Critic**:\n",
326
+ "\n",
327
+ " 1. **Evaluate the Storyline**: Analyze the structure, creativity, tone, and alignment with the business type and audience.\n",
328
+ " 2. **Compare with Tool's Response**: Ensure the storyline stays within the creative boundaries and tone of the influencer-style content in the tool response.\n",
329
+ " 3. **Identify Weaknesses**: Point out inconsistencies, missing elements, weak hooks, structural flaws, or areas lacking emotional or persuasive power.\n",
330
+ " 4. **Suggest Improvements**: Provide clear, actionable suggestions to improve the storyline.\n",
331
+ "\n",
332
+ " **Output**:\n",
333
+ " You have to just suggest the improvements in your output in around 100 words only.\n",
334
+ " '''\n",
335
+ "\n",
336
+ " messages = [SystemMessage(content=critic_template),\n",
337
+ " HumanMessage(content=f'''The topic of the video is:\\n{state.topic}\\n. The business_details is\\n{state.business_details}\\n''')]\n",
338
+ "\n",
339
+ " response = react_agent.invoke({'messages':messages})\n",
340
+ " response = response['messages'][-1].content\n",
341
+ " state.critic_response = response\n",
342
+ " print('Critic Evaluated the story')\n",
343
+ "\n",
344
+ " return state\n"
345
+ ],
346
+ "metadata": {
347
+ "id": "OKRJsG0J_a5t"
348
+ },
349
+ "execution_count": 5,
350
+ "outputs": []
351
+ },
352
+ {
353
+ "cell_type": "code",
354
+ "source": [
355
+ "def improver(state:State):\n",
356
+ " react_agent=create_react_agent(\n",
357
+ " model=llm,\n",
358
+ " tools=[]\n",
359
+ " )\n",
360
+ " improver_template = f'''\n",
361
+ " You are a professional video content editor and creative storyteller. Your job is to improve the storyline originally generated by the Ideator using the detailed feedback provided by the Critic.\n",
362
+ "\n",
363
+ " The story was written for a promotional video based on a specific topic and business context. You must revise it while staying aligned with the given business needs and creative direction. The improvement suggestions come from a Critic who has evaluated the structure, tone, creativity, and alignment of the original story.\n",
364
+ "\n",
365
+ " ---\n",
366
+ "\n",
367
+ " **Video Topic**:\n",
368
+ " {state.topic}\n",
369
+ "\n",
370
+ " **Business Details**:\n",
371
+ " {state.business_details}\n",
372
+ "\n",
373
+ " **Original Storyline from Ideator**:\n",
374
+ " {state.ideator_response}\n",
375
+ "\n",
376
+ " **Critic's Suggestions for Improvement**:\n",
377
+ " {state.critic_response}\n",
378
+ "\n",
379
+ " ---\n",
380
+ "\n",
381
+ " **Your Task**:\n",
382
+ "\n",
383
+ " - Carefully read the original story and the critic’s suggestions.\n",
384
+ " - Revise and enhance the storyline, correcting flaws and adding the suggested improvements.\n",
385
+ " - Keep the tone, structure, and message appropriate to the business type and the influencer-style content.\n",
386
+ " - Make sure the final story is structured, compelling, and exactly **300 words** in length.\n",
387
+ "\n",
388
+ " Now, generate the **final improved storyline** for the video.\n",
389
+ " '''\n",
390
+ "\n",
391
+ "\n",
392
+ " messages = [SystemMessage(content=improver_template),\n",
393
+ " HumanMessage(content=f'''The topic of the video is:\\n{state.topic}\\n The business_details is:\\n{state.business_details}''')]\n",
394
+ "\n",
395
+ " response = react_agent.invoke({'messages':messages})\n",
396
+ " response = response['messages'][-1].content\n",
397
+ " state.improver_response = response\n",
398
+ " print('Improver Improvred the story')\n",
399
+ "\n",
400
+ " return state\n"
401
+ ],
402
+ "metadata": {
403
+ "id": "fq0JPkrZCJj-"
404
+ },
405
+ "execution_count": 6,
406
+ "outputs": []
407
+ },
408
+ {
409
+ "cell_type": "code",
410
+ "source": [
411
+ "class ValidationFormatter(BaseModel):\n",
412
+ " result: str = Field(description=\"Returns **validated** if the story is validated. Returns **not validated** if story is not validated.\")\n",
413
+ "\n",
414
+ "def validator(state:State):\n",
415
+ " tools=[retrieve_tool]\n",
416
+ "\n",
417
+ " react_agent=create_react_agent(\n",
418
+ " model=llm,\n",
419
+ " tools=tools,\n",
420
+ " # response_format=ValidationFormatter\n",
421
+ " )\n",
422
+ " validator_template = f'''\n",
423
+ " You are a strict and unbiased judge responsible for evaluating whether a promotional video storyline is ready for publishing.\n",
424
+ "\n",
425
+ " You will be given the video topic, business details, and the final version of the storyline (after improvement). Your task is to assess if this storyline meets all necessary standards in terms of:\n",
426
+ "\n",
427
+ " - Relevance to the video topic\n",
428
+ " - Alignment with the business context\n",
429
+ " - Logical structure (beginning, middle, end)\n",
430
+ " - Tone matching the business type\n",
431
+ " - Creativity and clarity\n",
432
+ "\n",
433
+ " You can also use the tool's response as your reference to make the validation. The tool's response includes existing influencer-style stories or content for reference.\n",
434
+ "\n",
435
+ " ---\n",
436
+ "\n",
437
+ "\n",
438
+ " **Video Topic**:\n",
439
+ " {state.topic}\n",
440
+ "\n",
441
+ " **Business Details**:\n",
442
+ " {state.business_details}\n",
443
+ "\n",
444
+ " **Final Storyline from Improver**:\n",
445
+ " {state.improver_response}\n",
446
+ "\n",
447
+ " ---\n",
448
+ "\n",
449
+ " **Validation Criteria**:\n",
450
+ "\n",
451
+ " - Is the story **fully aligned** with the topic, business details and the tool's response?\n",
452
+ " - Does the structure flow logically and effectively?\n",
453
+ " - Does it feel complete and professional to use this story to create a video?\n",
454
+ "\n",
455
+ " ---\n",
456
+ "\n",
457
+ " **Output Instruction**:\n",
458
+ " Respond strictly just with one of the following values:\n",
459
+ "\n",
460
+ " - `validated` – if the story meets all the criteria and is validated.\n",
461
+ " - `not validated` – if it fails to meet any key criteria and needs further improvement.\n",
462
+ " '''\n",
463
+ "\n",
464
+ "\n",
465
+ "\n",
466
+ " messages = [SystemMessage(content=validator_template),\n",
467
+ " HumanMessage(content=f'''The topic of the video is:\\n{state.topic}\\n The business_details is:\\n{state.business_details}''')]\n",
468
+ "\n",
469
+ " response = llm.with_structured_output(ValidationFormatter).invoke(messages)\n",
470
+ " print('Validator response:',response)\n",
471
+ " state.validator_response = response.result\n",
472
+ " return state"
473
+ ],
474
+ "metadata": {
475
+ "id": "LfgWPNJUFOf7"
476
+ },
477
+ "execution_count": 7,
478
+ "outputs": []
479
+ },
480
+ {
481
+ "cell_type": "code",
482
+ "source": [
483
+ "def validator_defense1(state:State):\n",
484
+ " tools=[retrieve_tool]\n",
485
+ "\n",
486
+ " react_agent=create_react_agent(\n",
487
+ " model=llm,\n",
488
+ " tools=tools,\n",
489
+ " # response_format=ValidationFormatter\n",
490
+ " )\n",
491
+ " validator_template = f'''\n",
492
+ " You are a strict and unbiased judge responsible for evaluating whether a promotional video storyline is ready for publishing.\n",
493
+ "\n",
494
+ " You will be given the video topic, business details, and the final version of the storyline (after improvement). Your task is to assess if this storyline meets all necessary standards in terms of:\n",
495
+ "\n",
496
+ " - Relevance to the video topic\n",
497
+ " - Alignment with the business context\n",
498
+ " - Logical structure (beginning, middle, end)\n",
499
+ " - Tone matching the business type\n",
500
+ " - Creativity and clarity\n",
501
+ "\n",
502
+ " You can also use the tool's response as your reference to make the validation. The tool's response includes existing influencer-style stories or content for reference.\n",
503
+ "\n",
504
+ " ---\n",
505
+ "\n",
506
+ "\n",
507
+ " **Video Topic**:\n",
508
+ " {state.topic}\n",
509
+ "\n",
510
+ " **Business Details**:\n",
511
+ " {state.business_details}\n",
512
+ "\n",
513
+ " **Final Storyline from Improver**:\n",
514
+ " {state.improver_response}\n",
515
+ "\n",
516
+ " ---\n",
517
+ "\n",
518
+ " **Validation Criteria**:\n",
519
+ "\n",
520
+ " - Is the story **fully aligned** with the topic, business details and the tool's response?\n",
521
+ " - Does the structure flow logically and effectively?\n",
522
+ " - Does it feel complete and professional to use this story to create a video?\n",
523
+ "\n",
524
+ " ---\n",
525
+ "\n",
526
+ " **Output Instruction**:\n",
527
+ " Respond strictly just with one of the following values:\n",
528
+ "\n",
529
+ " - `validated` – if the story meets all the criteria and is validated.\n",
530
+ " - `not validated` – if it fails to meet any key criteria and needs further improvement.\n",
531
+ " '''\n",
532
+ "\n",
533
+ "\n",
534
+ "\n",
535
+ " messages = [SystemMessage(content=validator_template),\n",
536
+ " HumanMessage(content=f'''The topic of the video is:\\n{state.topic}\\n The business_details is:\\n{state.business_details}''')]\n",
537
+ "\n",
538
+ " response = llm.with_structured_output(ValidationFormatter).invoke(messages)\n",
539
+ " print('Validator defense 1 response:',response)\n",
540
+ " state.validator_defense1 = response.result\n",
541
+ " return state\n",
542
+ "\n",
543
+ "\n",
544
+ "def validator_defense2(state:State):\n",
545
+ " tools=[retrieve_tool]\n",
546
+ "\n",
547
+ " react_agent=create_react_agent(\n",
548
+ " model=llm,\n",
549
+ " tools=tools,\n",
550
+ " # response_format=ValidationFormatter\n",
551
+ " )\n",
552
+ " validator_template = f'''\n",
553
+ " You are a strict and unbiased judge responsible for evaluating whether a promotional video storyline is ready for publishing.\n",
554
+ "\n",
555
+ " You will be given the video topic, business details, and the final version of the storyline (after improvement). Your task is to assess if this storyline meets all necessary standards in terms of:\n",
556
+ "\n",
557
+ " - Relevance to the video topic\n",
558
+ " - Alignment with the business context\n",
559
+ " - Logical structure (beginning, middle, end)\n",
560
+ " - Tone matching the business type\n",
561
+ " - Creativity and clarity\n",
562
+ "\n",
563
+ " You can also use the tool's response as your reference to make the validation. The tool's response includes existing influencer-style stories or content for reference.\n",
564
+ "\n",
565
+ " ---\n",
566
+ "\n",
567
+ "\n",
568
+ " **Video Topic**:\n",
569
+ " {state.topic}\n",
570
+ "\n",
571
+ " **Business Details**:\n",
572
+ " {state.business_details}\n",
573
+ "\n",
574
+ " **Final Storyline from Improver**:\n",
575
+ " {state.improver_response}\n",
576
+ "\n",
577
+ " ---\n",
578
+ "\n",
579
+ " **Validation Criteria**:\n",
580
+ "\n",
581
+ " - Is the story **fully aligned** with the topic, business details and the tool's response?\n",
582
+ " - Does the structure flow logically and effectively?\n",
583
+ " - Does it feel complete and professional to use this story to create a video?\n",
584
+ "\n",
585
+ " ---\n",
586
+ "\n",
587
+ " **Output Instruction**:\n",
588
+ " Respond strictly just with one of the following values:\n",
589
+ "\n",
590
+ " - `validated` – if the story meets all the criteria and is validated.\n",
591
+ " - `not validated` – if it fails to meet any key criteria and needs further improvement.\n",
592
+ " '''\n",
593
+ "\n",
594
+ "\n",
595
+ "\n",
596
+ " messages = [SystemMessage(content=validator_template),\n",
597
+ " HumanMessage(content=f'''The topic of the video is:\\n{state.topic}\\n The business_details is:\\n{state.business_details}''')]\n",
598
+ "\n",
599
+ " response = llm.with_structured_output(ValidationFormatter).invoke(messages)\n",
600
+ " print('Validator defense 2 response:',response)\n",
601
+ " state.validator_defense2 = response.result\n",
602
+ " return state\n",
603
+ "\n",
604
+ "\n",
605
+ "def validator_defense3(state:State):\n",
606
+ " tools=[retrieve_tool]\n",
607
+ "\n",
608
+ " react_agent=create_react_agent(\n",
609
+ " model=llm,\n",
610
+ " tools=tools,\n",
611
+ " # response_format=ValidationFormatter\n",
612
+ " )\n",
613
+ " validator_template = f'''\n",
614
+ " You are a strict and unbiased judge responsible for evaluating whether a promotional video storyline is ready for publishing.\n",
615
+ "\n",
616
+ " You will be given the video topic, business details, and the final version of the storyline (after improvement). Your task is to assess if this storyline meets all necessary standards in terms of:\n",
617
+ "\n",
618
+ " - Relevance to the video topic\n",
619
+ " - Alignment with the business context\n",
620
+ " - Logical structure (beginning, middle, end)\n",
621
+ " - Tone matching the business type\n",
622
+ " - Creativity and clarity\n",
623
+ "\n",
624
+ " You can also use the tool's response as your reference to make the validation. The tool's response includes existing influencer-style stories or content for reference.\n",
625
+ "\n",
626
+ " ---\n",
627
+ "\n",
628
+ "\n",
629
+ " **Video Topic**:\n",
630
+ " {state.topic}\n",
631
+ "\n",
632
+ " **Business Details**:\n",
633
+ " {state.business_details}\n",
634
+ "\n",
635
+ " **Final Storyline from Improver**:\n",
636
+ " {state.improver_response}\n",
637
+ "\n",
638
+ " ---\n",
639
+ "\n",
640
+ " **Validation Criteria**:\n",
641
+ "\n",
642
+ " - Is the story **fully aligned** with the topic, business details and the tool's response?\n",
643
+ " - Does the structure flow logically and effectively?\n",
644
+ " - Does it feel complete and professional to use this story to create a video?\n",
645
+ "\n",
646
+ " ---\n",
647
+ "\n",
648
+ " **Output Instruction**:\n",
649
+ " Respond strictly just with one of the following values:\n",
650
+ "\n",
651
+ " - `validated` – if the story meets all the criteria and is validated.\n",
652
+ " - `not validated` – if it fails to meet any key criteria and needs further improvement.\n",
653
+ " '''\n",
654
+ "\n",
655
+ "\n",
656
+ "\n",
657
+ " messages = [SystemMessage(content=validator_template),\n",
658
+ " HumanMessage(content=f'''The topic of the video is:\\n{state.topic}\\n The business_details is:\\n{state.business_details}''')]\n",
659
+ "\n",
660
+ " response = llm.with_structured_output(ValidationFormatter).invoke(messages)\n",
661
+ " print('Validator defense 3 response:',response)\n",
662
+ " state.validator_defense3 = response.result\n",
663
+ " return state"
664
+ ],
665
+ "metadata": {
666
+ "id": "WFEYu06yrPre"
667
+ },
668
+ "execution_count": 8,
669
+ "outputs": []
670
+ },
671
+ {
672
+ "cell_type": "code",
673
+ "source": [
674
+ "def route1_after_validation(state:State):\n",
675
+ " if 'not validated' in state.validator_response:\n",
676
+ " return False\n",
677
+ " else:\n",
678
+ " return True\n",
679
+ "\n",
680
+ "def route2_after_validation(state:State):\n",
681
+ " if 'not validated' in state.validator_defense1:\n",
682
+ " return False\n",
683
+ " else:\n",
684
+ " return True\n",
685
+ "def route3_after_validation(state:State):\n",
686
+ " if 'not validated' in state.validator_defense2:\n",
687
+ " return False\n",
688
+ " else:\n",
689
+ " return True\n",
690
+ "def route4_after_validation(state:State):\n",
691
+ " if 'not validated' in state.validator_defense3:\n",
692
+ " return False\n",
693
+ " else:\n",
694
+ " return True"
695
+ ],
696
+ "metadata": {
697
+ "id": "ThZLEFFxjKwE"
698
+ },
699
+ "execution_count": 9,
700
+ "outputs": []
701
+ },
702
+ {
703
+ "cell_type": "code",
704
+ "source": [
705
+ "graph_builder= StateGraph(State)\n",
706
+ "graph_builder.add_node(ideator)\n",
707
+ "graph_builder.add_node(critic)\n",
708
+ "graph_builder.add_node(improver)\n",
709
+ "graph_builder.add_node(validator)\n",
710
+ "graph_builder.add_node(validator_defense1)\n",
711
+ "graph_builder.add_node(validator_defense2)\n",
712
+ "graph_builder.add_node(validator_defense3)\n",
713
+ "\n",
714
+ "\n",
715
+ "graph_builder.add_edge(START, \"ideator\") # Start the graph with node_1\n",
716
+ "graph_builder.add_edge(\"ideator\", \"critic\")\n",
717
+ "graph_builder.add_edge(\"critic\", \"improver\")\n",
718
+ "graph_builder.add_edge(\"improver\", \"validator\")\n",
719
+ "graph_builder.add_edge(\"validator\", \"validator_defense1\")\n",
720
+ "graph_builder.add_edge(\"validator_defense1\", \"validator_defense2\")\n",
721
+ "graph_builder.add_edge(\"validator_defense2\", \"validator_defense3\")\n",
722
+ "graph_builder.add_edge(\"validator_defense3\", END)\n",
723
+ "\n",
724
+ "# Use conditional routing from validator\n",
725
+ "graph_builder.add_conditional_edges(\"validator\", route1_after_validation,{False:'ideator',True:validator_defense1})\n",
726
+ "graph_builder.add_conditional_edges(\"validator_defense1\", route2_after_validation,{False:'ideator',True:validator_defense2})\n",
727
+ "graph_builder.add_conditional_edges(\"validator_defense2\", route3_after_validation,{False:'ideator',True:validator_defense3})\n",
728
+ "graph_builder.add_conditional_edges(\"validator_defense3\", route4_after_validation,{False:'ideator',True:END})\n",
729
+ "\n",
730
+ "graph = graph_builder.compile()\n"
731
+ ],
732
+ "metadata": {
733
+ "colab": {
734
+ "base_uri": "https://localhost:8080/",
735
+ "height": 332
736
+ },
737
+ "id": "-kZyUySjLHDN",
738
+ "outputId": "37026fc3-6925-4256-9a87-6d8d64f4f971"
739
+ },
740
+ "execution_count": 10,
741
+ "outputs": [
742
+ {
743
+ "output_type": "error",
744
+ "ename": "ValueError",
745
+ "evalue": "'validator_defense1' is already being used as a state key",
746
+ "traceback": [
747
+ "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
748
+ "\u001b[0;31mValueError\u001b[0m Traceback (most recent call last)",
749
+ "\u001b[0;32m<ipython-input-10-0d5781a7ad7c>\u001b[0m in \u001b[0;36m<cell line: 0>\u001b[0;34m()\u001b[0m\n\u001b[1;32m 4\u001b[0m \u001b[0mgraph_builder\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0madd_node\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mimprover\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 5\u001b[0m \u001b[0mgraph_builder\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0madd_node\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mvalidator\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 6\u001b[0;31m \u001b[0mgraph_builder\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0madd_node\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mvalidator_defense1\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 7\u001b[0m \u001b[0mgraph_builder\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0madd_node\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mvalidator_defense2\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 8\u001b[0m \u001b[0mgraph_builder\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0madd_node\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mvalidator_defense3\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
750
+ "\u001b[0;32m/usr/local/lib/python3.11/dist-packages/langgraph/graph/state.py\u001b[0m in \u001b[0;36madd_node\u001b[0;34m(self, node, action, defer, metadata, input, retry, cache_policy, destinations)\u001b[0m\n\u001b[1;32m 358\u001b[0m )\n\u001b[1;32m 359\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mnode\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mchannels\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 360\u001b[0;31m \u001b[0;32mraise\u001b[0m \u001b[0mValueError\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34mf\"'{node}' is already being used as a state key\"\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 361\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcompiled\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 362\u001b[0m logger.warning(\n",
751
+ "\u001b[0;31mValueError\u001b[0m: 'validator_defense1' is already being used as a state key"
752
+ ]
753
+ }
754
+ ]
755
+ },
756
+ {
757
+ "cell_type": "code",
758
+ "source": [
759
+ "from IPython.display import Image, display\n",
760
+ "\n",
761
+ "try:\n",
762
+ " display(Image(graph.get_graph().draw_mermaid_png()))\n",
763
+ "except Exception:\n",
764
+ " # This requires some extra dependencies and is optional\n",
765
+ " pass"
766
+ ],
767
+ "metadata": {
768
+ "id": "aLblIS1cnRLT"
769
+ },
770
+ "execution_count": null,
771
+ "outputs": []
772
+ },
773
+ {
774
+ "cell_type": "code",
775
+ "source": [
776
+ "state = graph.invoke({\"topic\": \"I want to promote my restaurant in lakeside\",\n",
777
+ " \"business_details\": {\"business_type\": \"restaurant\", \"platform\": \"instagram\", \"target_audience\": \"youths\", \"business_goals\": \"to go global\", \"offerings\": \"nepali foods\", \"Challenges_faced\": \"finding new customers, attracting large customers\"}\n",
778
+ " })"
779
+ ],
780
+ "metadata": {
781
+ "colab": {
782
+ "base_uri": "https://localhost:8080/"
783
+ },
784
+ "id": "csSPO6MwMNoM",
785
+ "outputId": "33fb1b07-c154-4ecd-ab27-f9a2fd7b395c"
786
+ },
787
+ "execution_count": 18,
788
+ "outputs": [
789
+ {
790
+ "output_type": "stream",
791
+ "name": "stdout",
792
+ "text": [
793
+ "Ideator Generated the story\n",
794
+ "Critic Evaluated the story\n",
795
+ "Improver Improvred the story\n",
796
+ "Validator response: result='validated'\n"
797
+ ]
798
+ }
799
+ ]
800
+ },
801
+ {
802
+ "cell_type": "code",
803
+ "source": [
804
+ "state['validator_response']"
805
+ ],
806
+ "metadata": {
807
+ "colab": {
808
+ "base_uri": "https://localhost:8080/",
809
+ "height": 35
810
+ },
811
+ "id": "kriV_zErMvGf",
812
+ "outputId": "a247c871-9e05-4f4b-b670-08cc6a938638"
813
+ },
814
+ "execution_count": 22,
815
+ "outputs": [
816
+ {
817
+ "output_type": "execute_result",
818
+ "data": {
819
+ "text/plain": [
820
+ "'validated'"
821
+ ],
822
+ "application/vnd.google.colaboratory.intrinsic+json": {
823
+ "type": "string"
824
+ }
825
+ },
826
+ "metadata": {},
827
+ "execution_count": 22
828
+ }
829
+ ]
830
+ },
831
+ {
832
+ "cell_type": "code",
833
+ "source": [
834
+ "state['improver_response']"
835
+ ],
836
+ "metadata": {
837
+ "colab": {
838
+ "base_uri": "https://localhost:8080/",
839
+ "height": 122
840
+ },
841
+ "id": "E2onpug1S7sU",
842
+ "outputId": "eb43ce63-1e68-40fa-8956-75f795bc8ce8"
843
+ },
844
+ "execution_count": 19,
845
+ "outputs": [
846
+ {
847
+ "output_type": "execute_result",
848
+ "data": {
849
+ "text/plain": [
850
+ "'Here is the revised storyline for the promotional video:\\n\\n**Title:** \"Experience the Flavors of Nepal at [Your Restaurant Name] in Lakeside\"\\n\\n**Scene 1:** (0:00 - 0:30)\\n\\n* Opening shot of a stunning visual of the restaurant\\'s exterior, with a warm and inviting glow emanating from the windows.\\n* Voiceover: \"Imagine a place where the beauty of Lakeside meets the flavors of Nepal. Welcome to [Your Restaurant Name], where every bite is a journey to the Himalayas.\"\\n\\n**Scene 2:** (0:30 - 1:00)\\n\\n* Cut to a shot of a group of friends laughing and chatting while enjoying their meals at the restaurant.\\n* Voiceover: \"Our restaurant is more than just a place to eat - it\\'s a gathering spot for friends and family to share in the joy of good food and good company.\"\\n\\n**Scene 3:** (1:00 - 1:30)\\n\\n* Cut to a shot of the restaurant\\'s chefs preparing traditional Nepali dishes, with close-ups of the sizzling pans and aromatic spices.\\n* Voiceover: \"Our chefs use only the freshest ingredients to create authentic Nepali dishes that will tantalize your taste buds.\"\\n\\n**Scene 4:** (1:30 - 2:00)\\n\\n* Cut to a shot of a customer taking a bite of a Nepali dish, with a look of delight on their face.\\n* Voiceover: \"From our signature momos to our slow-cooked curries, every bite is a taste sensation that will leave you wanting more.\"\\n\\n**Scene 5:** (2:00 - 2:30)\\n\\n* Cut to a shot of the restaurant\\'s outdoor seating area, with customers enjoying their meals while taking in the stunning views of the lake.\\n* Voiceover: \"And when the weather permits, our outdoor seating area is the perfect spot to enjoy your meal while taking in the breathtaking views of the lake.\"\\n\\n**Scene 6:** (2:30 - 3:00)\\n\\n* Closing shot of the restaurant\\'s exterior at sunset, with the sound of the lake lapping in the background.\\n* Voiceover: \"So why wait? Come and experience the best of Nepali cuisine at [Your Restaurant Name]. Book your table now and taste the magic of Nepal in Lakeside!\"\\n\\nThis revised storyline incorporates the critic\\'s suggestions, including a more attention-grabbing opening, emotional connections with the target audience,'"
851
+ ],
852
+ "application/vnd.google.colaboratory.intrinsic+json": {
853
+ "type": "string"
854
+ }
855
+ },
856
+ "metadata": {},
857
+ "execution_count": 19
858
+ }
859
+ ]
860
+ },
861
+ {
862
+ "cell_type": "code",
863
+ "source": [
864
+ "state['critic_response']\n"
865
+ ],
866
+ "metadata": {
867
+ "colab": {
868
+ "base_uri": "https://localhost:8080/",
869
+ "height": 105
870
+ },
871
+ "id": "SSsXuklMTJlJ",
872
+ "outputId": "5b21bb65-2ec6-4d97-ebde-9b58d0b550e0"
873
+ },
874
+ "execution_count": 13,
875
+ "outputs": [
876
+ {
877
+ "output_type": "execute_result",
878
+ "data": {
879
+ "text/plain": [
880
+ "\"Based on the generated storyline, I would suggest the following improvements:\\n\\n* The storyline is quite generic and doesn't specifically highlight the unique aspects of the restaurant or its offerings. Consider incorporating more specific details about the restaurant's menu, ambiance, or staff to make it more engaging.\\n* The storyline focuses more on the influencer's experience rather than the restaurant itself. Consider shifting the focus to the restaurant and its offerings to make it more relevant to the target audience.\\n* The storyline could benefit from a clearer call-to-action (CTA) to encourage viewers to take action, such as visiting the restaurant or trying out a specific dish.\\n\\nOverall, the storyline has potential but could be improved by incorporating more specific details about the restaurant and its offerings, and by shifting the focus to the restaurant itself rather than the influencer's experience.\""
881
+ ],
882
+ "application/vnd.google.colaboratory.intrinsic+json": {
883
+ "type": "string"
884
+ }
885
+ },
886
+ "metadata": {},
887
+ "execution_count": 13
888
+ }
889
+ ]
890
+ },
891
+ {
892
+ "cell_type": "code",
893
+ "source": [
894
+ "state['ideator_response']\n"
895
+ ],
896
+ "metadata": {
897
+ "colab": {
898
+ "base_uri": "https://localhost:8080/",
899
+ "height": 122
900
+ },
901
+ "id": "s9Q8O_pRTPvI",
902
+ "outputId": "9863e2c7-9b84-4862-e62c-db1ab968d4b0"
903
+ },
904
+ "execution_count": 14,
905
+ "outputs": [
906
+ {
907
+ "output_type": "execute_result",
908
+ "data": {
909
+ "text/plain": [
910
+ "'Here is a detailed and creative storyline for a promotional video on your restaurant:\\n\\n**Title:** \"A Taste of Nepal: Discover the Flavors of Home\"\\n\\n**Scene 1:** (0:00 - 0:30)\\n\\n* Opening shot of a bustling street in Kathmandu, Nepal, with the sound of traditional Nepali music playing in the background.\\n* Cut to a shot of your restaurant\\'s exterior, with a sign that reads \"Nepali Food\" in bold letters.\\n* The camera pans inside the restaurant, showing the cozy and inviting atmosphere.\\n\\n**Scene 2:** (0:30 - 1:00)\\n\\n* Cut to a shot of a group of friends gathered around a table, enjoying a meal together.\\n* The camera zooms in on the dishes they\\'re eating, showcasing the variety of Nepali cuisine your restaurant offers.\\n* The friends are laughing and chatting, enjoying each other\\'s company.\\n\\n**Scene 3:** (1:00 - 1:30)\\n\\n* Cut to a shot of the chef, expertly preparing a traditional Nepali dish in the kitchen.\\n* The camera shows the sizzling sounds and aromas of the cooking process, making the viewer\\'s mouth water.\\n* The chef is smiling and chatting with the camera, giving a glimpse into the passion and dedication that goes into preparing each dish.\\n\\n**Scene 4:** (1:30 - 2:00)\\n\\n* Cut to a shot of a customer taking a bite of a dish, with a look of delight on their face.\\n* The camera cuts to a shot of the restaurant\\'s menu, highlighting the variety of options available.\\n* The narrator speaks, \"At [Your Restaurant Name], we\\'re passionate about sharing the flavors of Nepal with our customers. From spicy curries to tender momos, every dish is a taste of home.\"\\n\\n**Scene 5:** (2:00 - 2:30)\\n\\n* Cut to a shot of the restaurant\\'s exterior at night, with twinkling lights and a lively atmosphere.\\n* The camera pans inside, showing the restaurant filled with happy customers and the sound of laughter and chatter.\\n* The narrator speaks, \"So why not join us for a taste of Nepal? Book your table now and experience the warmth and hospitality of our restaurant.\"\\n\\n**Scene 6:** (2:30 - 3:00)\\n\\n* Closing shot of the restaurant\\'s logo and contact information on the screen.\\n* The narrator speaks, \"Nep'"
911
+ ],
912
+ "application/vnd.google.colaboratory.intrinsic+json": {
913
+ "type": "string"
914
+ }
915
+ },
916
+ "metadata": {},
917
+ "execution_count": 14
918
+ }
919
+ ]
920
+ },
921
+ {
922
+ "cell_type": "code",
923
+ "source": [],
924
+ "metadata": {
925
+ "id": "ONa-okqQTbi6"
926
+ },
927
+ "execution_count": null,
928
+ "outputs": []
929
+ }
930
+ ]
931
+ }
src/genai/Research/pandas_agent.ipynb ADDED
@@ -0,0 +1,1141 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "nbformat": 4,
3
+ "nbformat_minor": 0,
4
+ "metadata": {
5
+ "colab": {
6
+ "provenance": []
7
+ },
8
+ "kernelspec": {
9
+ "name": "python3",
10
+ "display_name": "Python 3"
11
+ },
12
+ "language_info": {
13
+ "name": "python"
14
+ }
15
+ },
16
+ "cells": [
17
+ {
18
+ "cell_type": "code",
19
+ "execution_count": 1,
20
+ "metadata": {
21
+ "colab": {
22
+ "base_uri": "https://localhost:8080/"
23
+ },
24
+ "id": "DhLp-vDTjInB",
25
+ "outputId": "d40b1ff3-1fca-4dab-fb76-25a66abdce2d"
26
+ },
27
+ "outputs": [
28
+ {
29
+ "output_type": "stream",
30
+ "name": "stdout",
31
+ "text": [
32
+ "Collecting langchain_openai\n",
33
+ " Downloading langchain_openai-0.3.28-py3-none-any.whl.metadata (2.3 kB)\n",
34
+ "Requirement already satisfied: pydantic in /usr/local/lib/python3.11/dist-packages (2.11.7)\n",
35
+ "Requirement already satisfied: langchain-core<1.0.0,>=0.3.68 in /usr/local/lib/python3.11/dist-packages (from langchain_openai) (0.3.70)\n",
36
+ "Requirement already satisfied: openai<2.0.0,>=1.86.0 in /usr/local/lib/python3.11/dist-packages (from langchain_openai) (1.97.0)\n",
37
+ "Requirement already satisfied: tiktoken<1,>=0.7 in /usr/local/lib/python3.11/dist-packages (from langchain_openai) (0.9.0)\n",
38
+ "Requirement already satisfied: annotated-types>=0.6.0 in /usr/local/lib/python3.11/dist-packages (from pydantic) (0.7.0)\n",
39
+ "Requirement already satisfied: pydantic-core==2.33.2 in /usr/local/lib/python3.11/dist-packages (from pydantic) (2.33.2)\n",
40
+ "Requirement already satisfied: typing-extensions>=4.12.2 in /usr/local/lib/python3.11/dist-packages (from pydantic) (4.14.1)\n",
41
+ "Requirement already satisfied: typing-inspection>=0.4.0 in /usr/local/lib/python3.11/dist-packages (from pydantic) (0.4.1)\n",
42
+ "Requirement already satisfied: langsmith>=0.3.45 in /usr/local/lib/python3.11/dist-packages (from langchain-core<1.0.0,>=0.3.68->langchain_openai) (0.4.8)\n",
43
+ "Requirement already satisfied: tenacity!=8.4.0,<10.0.0,>=8.1.0 in /usr/local/lib/python3.11/dist-packages (from langchain-core<1.0.0,>=0.3.68->langchain_openai) (8.5.0)\n",
44
+ "Requirement already satisfied: jsonpatch<2.0,>=1.33 in /usr/local/lib/python3.11/dist-packages (from langchain-core<1.0.0,>=0.3.68->langchain_openai) (1.33)\n",
45
+ "Requirement already satisfied: PyYAML>=5.3 in /usr/local/lib/python3.11/dist-packages (from langchain-core<1.0.0,>=0.3.68->langchain_openai) (6.0.2)\n",
46
+ "Requirement already satisfied: packaging>=23.2 in /usr/local/lib/python3.11/dist-packages (from langchain-core<1.0.0,>=0.3.68->langchain_openai) (25.0)\n",
47
+ "Requirement already satisfied: anyio<5,>=3.5.0 in /usr/local/lib/python3.11/dist-packages (from openai<2.0.0,>=1.86.0->langchain_openai) (4.9.0)\n",
48
+ "Requirement already satisfied: distro<2,>=1.7.0 in /usr/local/lib/python3.11/dist-packages (from openai<2.0.0,>=1.86.0->langchain_openai) (1.9.0)\n",
49
+ "Requirement already satisfied: httpx<1,>=0.23.0 in /usr/local/lib/python3.11/dist-packages (from openai<2.0.0,>=1.86.0->langchain_openai) (0.28.1)\n",
50
+ "Requirement already satisfied: jiter<1,>=0.4.0 in /usr/local/lib/python3.11/dist-packages (from openai<2.0.0,>=1.86.0->langchain_openai) (0.10.0)\n",
51
+ "Requirement already satisfied: sniffio in /usr/local/lib/python3.11/dist-packages (from openai<2.0.0,>=1.86.0->langchain_openai) (1.3.1)\n",
52
+ "Requirement already satisfied: tqdm>4 in /usr/local/lib/python3.11/dist-packages (from openai<2.0.0,>=1.86.0->langchain_openai) (4.67.1)\n",
53
+ "Requirement already satisfied: regex>=2022.1.18 in /usr/local/lib/python3.11/dist-packages (from tiktoken<1,>=0.7->langchain_openai) (2024.11.6)\n",
54
+ "Requirement already satisfied: requests>=2.26.0 in /usr/local/lib/python3.11/dist-packages (from tiktoken<1,>=0.7->langchain_openai) (2.32.3)\n",
55
+ "Requirement already satisfied: idna>=2.8 in /usr/local/lib/python3.11/dist-packages (from anyio<5,>=3.5.0->openai<2.0.0,>=1.86.0->langchain_openai) (3.10)\n",
56
+ "Requirement already satisfied: certifi in /usr/local/lib/python3.11/dist-packages (from httpx<1,>=0.23.0->openai<2.0.0,>=1.86.0->langchain_openai) (2025.7.14)\n",
57
+ "Requirement already satisfied: httpcore==1.* in /usr/local/lib/python3.11/dist-packages (from httpx<1,>=0.23.0->openai<2.0.0,>=1.86.0->langchain_openai) (1.0.9)\n",
58
+ "Requirement already satisfied: h11>=0.16 in /usr/local/lib/python3.11/dist-packages (from httpcore==1.*->httpx<1,>=0.23.0->openai<2.0.0,>=1.86.0->langchain_openai) (0.16.0)\n",
59
+ "Requirement already satisfied: jsonpointer>=1.9 in /usr/local/lib/python3.11/dist-packages (from jsonpatch<2.0,>=1.33->langchain-core<1.0.0,>=0.3.68->langchain_openai) (3.0.0)\n",
60
+ "Requirement already satisfied: orjson<4.0.0,>=3.9.14 in /usr/local/lib/python3.11/dist-packages (from langsmith>=0.3.45->langchain-core<1.0.0,>=0.3.68->langchain_openai) (3.11.0)\n",
61
+ "Requirement already satisfied: requests-toolbelt<2.0.0,>=1.0.0 in /usr/local/lib/python3.11/dist-packages (from langsmith>=0.3.45->langchain-core<1.0.0,>=0.3.68->langchain_openai) (1.0.0)\n",
62
+ "Requirement already satisfied: zstandard<0.24.0,>=0.23.0 in /usr/local/lib/python3.11/dist-packages (from langsmith>=0.3.45->langchain-core<1.0.0,>=0.3.68->langchain_openai) (0.23.0)\n",
63
+ "Requirement already satisfied: charset-normalizer<4,>=2 in /usr/local/lib/python3.11/dist-packages (from requests>=2.26.0->tiktoken<1,>=0.7->langchain_openai) (3.4.2)\n",
64
+ "Requirement already satisfied: urllib3<3,>=1.21.1 in /usr/local/lib/python3.11/dist-packages (from requests>=2.26.0->tiktoken<1,>=0.7->langchain_openai) (2.5.0)\n",
65
+ "Downloading langchain_openai-0.3.28-py3-none-any.whl (70 kB)\n",
66
+ "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m70.6/70.6 kB\u001b[0m \u001b[31m3.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
67
+ "\u001b[?25hInstalling collected packages: langchain_openai\n",
68
+ "Successfully installed langchain_openai-0.3.28\n"
69
+ ]
70
+ }
71
+ ],
72
+ "source": [
73
+ "!pip install langchain_openai pydantic"
74
+ ]
75
+ },
76
+ {
77
+ "cell_type": "code",
78
+ "source": [
79
+ "\n",
80
+ "import os\n",
81
+ "from google.colab import userdata\n",
82
+ "os.environ['OPENAI_API_KEY'] = userdata.get('OPENAI_API_KEY')"
83
+ ],
84
+ "metadata": {
85
+ "id": "99ziBbITkabx"
86
+ },
87
+ "execution_count": 3,
88
+ "outputs": []
89
+ },
90
+ {
91
+ "cell_type": "code",
92
+ "source": [
93
+ "from langchain_openai import ChatOpenAI\n",
94
+ "llm_gpt = ChatOpenAI(\n",
95
+ " model=\"gpt-4o-mini\",\n",
96
+ " temperature=0,\n",
97
+ ")"
98
+ ],
99
+ "metadata": {
100
+ "id": "-IVrdTb2kPm3"
101
+ },
102
+ "execution_count": 4,
103
+ "outputs": []
104
+ },
105
+ {
106
+ "cell_type": "code",
107
+ "source": [
108
+ "import pandas as pd\n",
109
+ "df= pd.read_csv('/content/captioned_dataset_full.csv' )\n",
110
+ "df.drop(columns=['Unnamed: 0.1'],inplace=True)\n",
111
+ "df"
112
+ ],
113
+ "metadata": {
114
+ "colab": {
115
+ "base_uri": "https://localhost:8080/",
116
+ "height": 635
117
+ },
118
+ "id": "NEfkLf43kXcb",
119
+ "outputId": "7881edf9-d281-4087-ff95-57d2b85445ca"
120
+ },
121
+ "execution_count": 10,
122
+ "outputs": [
123
+ {
124
+ "output_type": "execute_result",
125
+ "data": {
126
+ "text/plain": [
127
+ " attached_carousel_media_urls attached_media_content \\\n",
128
+ "0 NaN NaN \n",
129
+ "1 NaN NaN \n",
130
+ "2 NaN NaN \n",
131
+ "3 NaN NaN \n",
132
+ "4 NaN NaN \n",
133
+ "... ... ... \n",
134
+ "3232 NaN NaN \n",
135
+ "3233 NaN NaN \n",
136
+ "3234 NaN NaN \n",
137
+ "3235 NaN NaN \n",
138
+ "3236 NaN NaN \n",
139
+ "\n",
140
+ " attached_media_display_url \\\n",
141
+ "0 https://instagram.fiev22-2.fna.fbcdn.net/v/t51... \n",
142
+ "1 https://instagram.fiev22-2.fna.fbcdn.net/v/t51... \n",
143
+ "2 https://instagram.fiev22-1.fna.fbcdn.net/v/t51... \n",
144
+ "3 https://instagram.fiev22-1.fna.fbcdn.net/v/t51... \n",
145
+ "4 https://instagram.fiev22-2.fna.fbcdn.net/v/t51... \n",
146
+ "... ... \n",
147
+ "3232 https://instagram.fiev22-2.fna.fbcdn.net/v/t51... \n",
148
+ "3233 https://instagram.fiev22-2.fna.fbcdn.net/v/t51... \n",
149
+ "3234 https://instagram.fiev22-2.fna.fbcdn.net/v/t51... \n",
150
+ "3235 https://instagram.fiev22-2.fna.fbcdn.net/v/t51... \n",
151
+ "3236 https://instagram.fiev22-2.fna.fbcdn.net/v/t51... \n",
152
+ "\n",
153
+ " attached_media_display_url_s3 \\\n",
154
+ "0 NaN \n",
155
+ "1 NaN \n",
156
+ "2 NaN \n",
157
+ "3 NaN \n",
158
+ "4 NaN \n",
159
+ "... ... \n",
160
+ "3232 NaN \n",
161
+ "3233 NaN \n",
162
+ "3234 NaN \n",
163
+ "3235 NaN \n",
164
+ "3236 NaN \n",
165
+ "\n",
166
+ " attached_media_tagged_users \\\n",
167
+ "0 ['adars_fpv', 'dino.exc', 'itsmegauchan', 'pah... \n",
168
+ "1 ['ilive_nepal'] \n",
169
+ "2 NaN \n",
170
+ "3 NaN \n",
171
+ "4 NaN \n",
172
+ "... ... \n",
173
+ "3232 NaN \n",
174
+ "3233 NaN \n",
175
+ "3234 NaN \n",
176
+ "3235 ['citroennepal', 'leapmotor_nepal', 'nissan_ne... \n",
177
+ "3236 NaN \n",
178
+ "\n",
179
+ " attached_video_url clickable_urls \\\n",
180
+ "0 https://instagram.fiev22-1.fna.fbcdn.net/o1/v/... NaN \n",
181
+ "1 https://instagram.fiev22-2.fna.fbcdn.net/o1/v/... NaN \n",
182
+ "2 https://instagram.fiev22-2.fna.fbcdn.net/o1/v/... NaN \n",
183
+ "3 https://instagram.fiev22-2.fna.fbcdn.net/o1/v/... NaN \n",
184
+ "4 https://instagram.fiev22-1.fna.fbcdn.net/o1/v/... NaN \n",
185
+ "... ... ... \n",
186
+ "3232 https://instagram.fiev22-1.fna.fbcdn.net/o1/v/... NaN \n",
187
+ "3233 https://instagram.fiev22-2.fna.fbcdn.net/o1/v/... NaN \n",
188
+ "3234 https://instagram.fiev22-2.fna.fbcdn.net/o1/v/... NaN \n",
189
+ "3235 https://instagram.fiev22-1.fna.fbcdn.net/o1/v/... NaN \n",
190
+ "3236 https://instagram.fiev22-1.fna.fbcdn.net/o1/v/... NaN \n",
191
+ "\n",
192
+ " coauthor_producers comments_count \\\n",
193
+ "0 ['adars_fpv'] 5.0 \n",
194
+ "1 NaN 14.0 \n",
195
+ "2 NaN 3.0 \n",
196
+ "3 NaN 34.0 \n",
197
+ "4 NaN 12.0 \n",
198
+ "... ... ... \n",
199
+ "3232 NaN 4.0 \n",
200
+ "3233 NaN 4.0 \n",
201
+ "3234 NaN 4.0 \n",
202
+ "3235 ['omodajaecoo.nepal', 'zeekrnepal', 'leapmotor... NaN \n",
203
+ "3236 NaN 2.0 \n",
204
+ "\n",
205
+ " created_time ... region_location place_name.1 street.1 \\\n",
206
+ "0 2023-01-28 18:06:17 ... NaN NaN NaN NaN \n",
207
+ "1 2023-03-04 12:00:32 ... NaN NaN NaN NaN \n",
208
+ "2 2023-03-06 12:39:36 ... NaN NaN NaN NaN \n",
209
+ "3 2023-03-25 22:56:36 ... NaN NaN NaN NaN \n",
210
+ "4 2023-04-03 17:53:20 ... NaN NaN NaN NaN \n",
211
+ "... ... ... ... .. ... ... \n",
212
+ "3232 2025-02-02 17:00:00 ... NaN NaN NaN NaN \n",
213
+ "3233 2025-02-16 17:00:55 ... NaN NaN NaN NaN \n",
214
+ "3234 2025-02-24 17:13:55 ... NaN NaN NaN NaN \n",
215
+ "3235 2025-03-28 08:28:26 ... NaN NaN NaN NaN \n",
216
+ "3236 2025-03-30 17:00:00 ... NaN NaN NaN NaN \n",
217
+ "\n",
218
+ " ward.1 city.1 district.1 province.1 country.1 country_code.1 \n",
219
+ "0 NaN NaN NaN NaN NaN NaN \n",
220
+ "1 NaN NaN NaN NaN NaN NaN \n",
221
+ "2 NaN NaN NaN NaN NaN NaN \n",
222
+ "3 NaN NaN NaN NaN NaN NaN \n",
223
+ "4 NaN NaN NaN NaN NaN NaN \n",
224
+ "... ... ... ... ... ... ... \n",
225
+ "3232 NaN NaN NaN NaN NaN NaN \n",
226
+ "3233 NaN NaN NaN NaN NaN NaN \n",
227
+ "3234 NaN NaN NaN NaN NaN NaN \n",
228
+ "3235 NaN NaN NaN NaN NaN NaN \n",
229
+ "3236 NaN NaN NaN NaN NaN NaN \n",
230
+ "\n",
231
+ "[3237 rows x 79 columns]"
232
+ ],
233
+ "text/html": [
234
+ "\n",
235
+ " <div id=\"df-e5e7e153-36e2-40db-ba9d-8ae9683b9dd9\" class=\"colab-df-container\">\n",
236
+ " <div>\n",
237
+ "<style scoped>\n",
238
+ " .dataframe tbody tr th:only-of-type {\n",
239
+ " vertical-align: middle;\n",
240
+ " }\n",
241
+ "\n",
242
+ " .dataframe tbody tr th {\n",
243
+ " vertical-align: top;\n",
244
+ " }\n",
245
+ "\n",
246
+ " .dataframe thead th {\n",
247
+ " text-align: right;\n",
248
+ " }\n",
249
+ "</style>\n",
250
+ "<table border=\"1\" class=\"dataframe\">\n",
251
+ " <thead>\n",
252
+ " <tr style=\"text-align: right;\">\n",
253
+ " <th></th>\n",
254
+ " <th>attached_carousel_media_urls</th>\n",
255
+ " <th>attached_media_content</th>\n",
256
+ " <th>attached_media_display_url</th>\n",
257
+ " <th>attached_media_display_url_s3</th>\n",
258
+ " <th>attached_media_tagged_users</th>\n",
259
+ " <th>attached_video_url</th>\n",
260
+ " <th>clickable_urls</th>\n",
261
+ " <th>coauthor_producers</th>\n",
262
+ " <th>comments_count</th>\n",
263
+ " <th>created_time</th>\n",
264
+ " <th>...</th>\n",
265
+ " <th>region_location</th>\n",
266
+ " <th></th>\n",
267
+ " <th>place_name.1</th>\n",
268
+ " <th>street.1</th>\n",
269
+ " <th>ward.1</th>\n",
270
+ " <th>city.1</th>\n",
271
+ " <th>district.1</th>\n",
272
+ " <th>province.1</th>\n",
273
+ " <th>country.1</th>\n",
274
+ " <th>country_code.1</th>\n",
275
+ " </tr>\n",
276
+ " </thead>\n",
277
+ " <tbody>\n",
278
+ " <tr>\n",
279
+ " <th>0</th>\n",
280
+ " <td>NaN</td>\n",
281
+ " <td>NaN</td>\n",
282
+ " <td>https://instagram.fiev22-2.fna.fbcdn.net/v/t51...</td>\n",
283
+ " <td>NaN</td>\n",
284
+ " <td>['adars_fpv', 'dino.exc', 'itsmegauchan', 'pah...</td>\n",
285
+ " <td>https://instagram.fiev22-1.fna.fbcdn.net/o1/v/...</td>\n",
286
+ " <td>NaN</td>\n",
287
+ " <td>['adars_fpv']</td>\n",
288
+ " <td>5.0</td>\n",
289
+ " <td>2023-01-28 18:06:17</td>\n",
290
+ " <td>...</td>\n",
291
+ " <td>NaN</td>\n",
292
+ " <td>NaN</td>\n",
293
+ " <td>NaN</td>\n",
294
+ " <td>NaN</td>\n",
295
+ " <td>NaN</td>\n",
296
+ " <td>NaN</td>\n",
297
+ " <td>NaN</td>\n",
298
+ " <td>NaN</td>\n",
299
+ " <td>NaN</td>\n",
300
+ " <td>NaN</td>\n",
301
+ " </tr>\n",
302
+ " <tr>\n",
303
+ " <th>1</th>\n",
304
+ " <td>NaN</td>\n",
305
+ " <td>NaN</td>\n",
306
+ " <td>https://instagram.fiev22-2.fna.fbcdn.net/v/t51...</td>\n",
307
+ " <td>NaN</td>\n",
308
+ " <td>['ilive_nepal']</td>\n",
309
+ " <td>https://instagram.fiev22-2.fna.fbcdn.net/o1/v/...</td>\n",
310
+ " <td>NaN</td>\n",
311
+ " <td>NaN</td>\n",
312
+ " <td>14.0</td>\n",
313
+ " <td>2023-03-04 12:00:32</td>\n",
314
+ " <td>...</td>\n",
315
+ " <td>NaN</td>\n",
316
+ " <td>NaN</td>\n",
317
+ " <td>NaN</td>\n",
318
+ " <td>NaN</td>\n",
319
+ " <td>NaN</td>\n",
320
+ " <td>NaN</td>\n",
321
+ " <td>NaN</td>\n",
322
+ " <td>NaN</td>\n",
323
+ " <td>NaN</td>\n",
324
+ " <td>NaN</td>\n",
325
+ " </tr>\n",
326
+ " <tr>\n",
327
+ " <th>2</th>\n",
328
+ " <td>NaN</td>\n",
329
+ " <td>NaN</td>\n",
330
+ " <td>https://instagram.fiev22-1.fna.fbcdn.net/v/t51...</td>\n",
331
+ " <td>NaN</td>\n",
332
+ " <td>NaN</td>\n",
333
+ " <td>https://instagram.fiev22-2.fna.fbcdn.net/o1/v/...</td>\n",
334
+ " <td>NaN</td>\n",
335
+ " <td>NaN</td>\n",
336
+ " <td>3.0</td>\n",
337
+ " <td>2023-03-06 12:39:36</td>\n",
338
+ " <td>...</td>\n",
339
+ " <td>NaN</td>\n",
340
+ " <td>NaN</td>\n",
341
+ " <td>NaN</td>\n",
342
+ " <td>NaN</td>\n",
343
+ " <td>NaN</td>\n",
344
+ " <td>NaN</td>\n",
345
+ " <td>NaN</td>\n",
346
+ " <td>NaN</td>\n",
347
+ " <td>NaN</td>\n",
348
+ " <td>NaN</td>\n",
349
+ " </tr>\n",
350
+ " <tr>\n",
351
+ " <th>3</th>\n",
352
+ " <td>NaN</td>\n",
353
+ " <td>NaN</td>\n",
354
+ " <td>https://instagram.fiev22-1.fna.fbcdn.net/v/t51...</td>\n",
355
+ " <td>NaN</td>\n",
356
+ " <td>NaN</td>\n",
357
+ " <td>https://instagram.fiev22-2.fna.fbcdn.net/o1/v/...</td>\n",
358
+ " <td>NaN</td>\n",
359
+ " <td>NaN</td>\n",
360
+ " <td>34.0</td>\n",
361
+ " <td>2023-03-25 22:56:36</td>\n",
362
+ " <td>...</td>\n",
363
+ " <td>NaN</td>\n",
364
+ " <td>NaN</td>\n",
365
+ " <td>NaN</td>\n",
366
+ " <td>NaN</td>\n",
367
+ " <td>NaN</td>\n",
368
+ " <td>NaN</td>\n",
369
+ " <td>NaN</td>\n",
370
+ " <td>NaN</td>\n",
371
+ " <td>NaN</td>\n",
372
+ " <td>NaN</td>\n",
373
+ " </tr>\n",
374
+ " <tr>\n",
375
+ " <th>4</th>\n",
376
+ " <td>NaN</td>\n",
377
+ " <td>NaN</td>\n",
378
+ " <td>https://instagram.fiev22-2.fna.fbcdn.net/v/t51...</td>\n",
379
+ " <td>NaN</td>\n",
380
+ " <td>NaN</td>\n",
381
+ " <td>https://instagram.fiev22-1.fna.fbcdn.net/o1/v/...</td>\n",
382
+ " <td>NaN</td>\n",
383
+ " <td>NaN</td>\n",
384
+ " <td>12.0</td>\n",
385
+ " <td>2023-04-03 17:53:20</td>\n",
386
+ " <td>...</td>\n",
387
+ " <td>NaN</td>\n",
388
+ " <td>NaN</td>\n",
389
+ " <td>NaN</td>\n",
390
+ " <td>NaN</td>\n",
391
+ " <td>NaN</td>\n",
392
+ " <td>NaN</td>\n",
393
+ " <td>NaN</td>\n",
394
+ " <td>NaN</td>\n",
395
+ " <td>NaN</td>\n",
396
+ " <td>NaN</td>\n",
397
+ " </tr>\n",
398
+ " <tr>\n",
399
+ " <th>...</th>\n",
400
+ " <td>...</td>\n",
401
+ " <td>...</td>\n",
402
+ " <td>...</td>\n",
403
+ " <td>...</td>\n",
404
+ " <td>...</td>\n",
405
+ " <td>...</td>\n",
406
+ " <td>...</td>\n",
407
+ " <td>...</td>\n",
408
+ " <td>...</td>\n",
409
+ " <td>...</td>\n",
410
+ " <td>...</td>\n",
411
+ " <td>...</td>\n",
412
+ " <td>...</td>\n",
413
+ " <td>...</td>\n",
414
+ " <td>...</td>\n",
415
+ " <td>...</td>\n",
416
+ " <td>...</td>\n",
417
+ " <td>...</td>\n",
418
+ " <td>...</td>\n",
419
+ " <td>...</td>\n",
420
+ " <td>...</td>\n",
421
+ " </tr>\n",
422
+ " <tr>\n",
423
+ " <th>3232</th>\n",
424
+ " <td>NaN</td>\n",
425
+ " <td>NaN</td>\n",
426
+ " <td>https://instagram.fiev22-2.fna.fbcdn.net/v/t51...</td>\n",
427
+ " <td>NaN</td>\n",
428
+ " <td>NaN</td>\n",
429
+ " <td>https://instagram.fiev22-1.fna.fbcdn.net/o1/v/...</td>\n",
430
+ " <td>NaN</td>\n",
431
+ " <td>NaN</td>\n",
432
+ " <td>4.0</td>\n",
433
+ " <td>2025-02-02 17:00:00</td>\n",
434
+ " <td>...</td>\n",
435
+ " <td>NaN</td>\n",
436
+ " <td>NaN</td>\n",
437
+ " <td>NaN</td>\n",
438
+ " <td>NaN</td>\n",
439
+ " <td>NaN</td>\n",
440
+ " <td>NaN</td>\n",
441
+ " <td>NaN</td>\n",
442
+ " <td>NaN</td>\n",
443
+ " <td>NaN</td>\n",
444
+ " <td>NaN</td>\n",
445
+ " </tr>\n",
446
+ " <tr>\n",
447
+ " <th>3233</th>\n",
448
+ " <td>NaN</td>\n",
449
+ " <td>NaN</td>\n",
450
+ " <td>https://instagram.fiev22-2.fna.fbcdn.net/v/t51...</td>\n",
451
+ " <td>NaN</td>\n",
452
+ " <td>NaN</td>\n",
453
+ " <td>https://instagram.fiev22-2.fna.fbcdn.net/o1/v/...</td>\n",
454
+ " <td>NaN</td>\n",
455
+ " <td>NaN</td>\n",
456
+ " <td>4.0</td>\n",
457
+ " <td>2025-02-16 17:00:55</td>\n",
458
+ " <td>...</td>\n",
459
+ " <td>NaN</td>\n",
460
+ " <td>NaN</td>\n",
461
+ " <td>NaN</td>\n",
462
+ " <td>NaN</td>\n",
463
+ " <td>NaN</td>\n",
464
+ " <td>NaN</td>\n",
465
+ " <td>NaN</td>\n",
466
+ " <td>NaN</td>\n",
467
+ " <td>NaN</td>\n",
468
+ " <td>NaN</td>\n",
469
+ " </tr>\n",
470
+ " <tr>\n",
471
+ " <th>3234</th>\n",
472
+ " <td>NaN</td>\n",
473
+ " <td>NaN</td>\n",
474
+ " <td>https://instagram.fiev22-2.fna.fbcdn.net/v/t51...</td>\n",
475
+ " <td>NaN</td>\n",
476
+ " <td>NaN</td>\n",
477
+ " <td>https://instagram.fiev22-2.fna.fbcdn.net/o1/v/...</td>\n",
478
+ " <td>NaN</td>\n",
479
+ " <td>NaN</td>\n",
480
+ " <td>4.0</td>\n",
481
+ " <td>2025-02-24 17:13:55</td>\n",
482
+ " <td>...</td>\n",
483
+ " <td>NaN</td>\n",
484
+ " <td>NaN</td>\n",
485
+ " <td>NaN</td>\n",
486
+ " <td>NaN</td>\n",
487
+ " <td>NaN</td>\n",
488
+ " <td>NaN</td>\n",
489
+ " <td>NaN</td>\n",
490
+ " <td>NaN</td>\n",
491
+ " <td>NaN</td>\n",
492
+ " <td>NaN</td>\n",
493
+ " </tr>\n",
494
+ " <tr>\n",
495
+ " <th>3235</th>\n",
496
+ " <td>NaN</td>\n",
497
+ " <td>NaN</td>\n",
498
+ " <td>https://instagram.fiev22-2.fna.fbcdn.net/v/t51...</td>\n",
499
+ " <td>NaN</td>\n",
500
+ " <td>['citroennepal', 'leapmotor_nepal', 'nissan_ne...</td>\n",
501
+ " <td>https://instagram.fiev22-1.fna.fbcdn.net/o1/v/...</td>\n",
502
+ " <td>NaN</td>\n",
503
+ " <td>['omodajaecoo.nepal', 'zeekrnepal', 'leapmotor...</td>\n",
504
+ " <td>NaN</td>\n",
505
+ " <td>2025-03-28 08:28:26</td>\n",
506
+ " <td>...</td>\n",
507
+ " <td>NaN</td>\n",
508
+ " <td>NaN</td>\n",
509
+ " <td>NaN</td>\n",
510
+ " <td>NaN</td>\n",
511
+ " <td>NaN</td>\n",
512
+ " <td>NaN</td>\n",
513
+ " <td>NaN</td>\n",
514
+ " <td>NaN</td>\n",
515
+ " <td>NaN</td>\n",
516
+ " <td>NaN</td>\n",
517
+ " </tr>\n",
518
+ " <tr>\n",
519
+ " <th>3236</th>\n",
520
+ " <td>NaN</td>\n",
521
+ " <td>NaN</td>\n",
522
+ " <td>https://instagram.fiev22-2.fna.fbcdn.net/v/t51...</td>\n",
523
+ " <td>NaN</td>\n",
524
+ " <td>NaN</td>\n",
525
+ " <td>https://instagram.fiev22-1.fna.fbcdn.net/o1/v/...</td>\n",
526
+ " <td>NaN</td>\n",
527
+ " <td>NaN</td>\n",
528
+ " <td>2.0</td>\n",
529
+ " <td>2025-03-30 17:00:00</td>\n",
530
+ " <td>...</td>\n",
531
+ " <td>NaN</td>\n",
532
+ " <td>NaN</td>\n",
533
+ " <td>NaN</td>\n",
534
+ " <td>NaN</td>\n",
535
+ " <td>NaN</td>\n",
536
+ " <td>NaN</td>\n",
537
+ " <td>NaN</td>\n",
538
+ " <td>NaN</td>\n",
539
+ " <td>NaN</td>\n",
540
+ " <td>NaN</td>\n",
541
+ " </tr>\n",
542
+ " </tbody>\n",
543
+ "</table>\n",
544
+ "<p>3237 rows × 79 columns</p>\n",
545
+ "</div>\n",
546
+ " <div class=\"colab-df-buttons\">\n",
547
+ "\n",
548
+ " <div class=\"colab-df-container\">\n",
549
+ " <button class=\"colab-df-convert\" onclick=\"convertToInteractive('df-e5e7e153-36e2-40db-ba9d-8ae9683b9dd9')\"\n",
550
+ " title=\"Convert this dataframe to an interactive table.\"\n",
551
+ " style=\"display:none;\">\n",
552
+ "\n",
553
+ " <svg xmlns=\"http://www.w3.org/2000/svg\" height=\"24px\" viewBox=\"0 -960 960 960\">\n",
554
+ " <path d=\"M120-120v-720h720v720H120Zm60-500h600v-160H180v160Zm220 220h160v-160H400v160Zm0 220h160v-160H400v160ZM180-400h160v-160H180v160Zm440 0h160v-160H620v160ZM180-180h160v-160H180v160Zm440 0h160v-160H620v160Z\"/>\n",
555
+ " </svg>\n",
556
+ " </button>\n",
557
+ "\n",
558
+ " <style>\n",
559
+ " .colab-df-container {\n",
560
+ " display:flex;\n",
561
+ " gap: 12px;\n",
562
+ " }\n",
563
+ "\n",
564
+ " .colab-df-convert {\n",
565
+ " background-color: #E8F0FE;\n",
566
+ " border: none;\n",
567
+ " border-radius: 50%;\n",
568
+ " cursor: pointer;\n",
569
+ " display: none;\n",
570
+ " fill: #1967D2;\n",
571
+ " height: 32px;\n",
572
+ " padding: 0 0 0 0;\n",
573
+ " width: 32px;\n",
574
+ " }\n",
575
+ "\n",
576
+ " .colab-df-convert:hover {\n",
577
+ " background-color: #E2EBFA;\n",
578
+ " box-shadow: 0px 1px 2px rgba(60, 64, 67, 0.3), 0px 1px 3px 1px rgba(60, 64, 67, 0.15);\n",
579
+ " fill: #174EA6;\n",
580
+ " }\n",
581
+ "\n",
582
+ " .colab-df-buttons div {\n",
583
+ " margin-bottom: 4px;\n",
584
+ " }\n",
585
+ "\n",
586
+ " [theme=dark] .colab-df-convert {\n",
587
+ " background-color: #3B4455;\n",
588
+ " fill: #D2E3FC;\n",
589
+ " }\n",
590
+ "\n",
591
+ " [theme=dark] .colab-df-convert:hover {\n",
592
+ " background-color: #434B5C;\n",
593
+ " box-shadow: 0px 1px 3px 1px rgba(0, 0, 0, 0.15);\n",
594
+ " filter: drop-shadow(0px 1px 2px rgba(0, 0, 0, 0.3));\n",
595
+ " fill: #FFFFFF;\n",
596
+ " }\n",
597
+ " </style>\n",
598
+ "\n",
599
+ " <script>\n",
600
+ " const buttonEl =\n",
601
+ " document.querySelector('#df-e5e7e153-36e2-40db-ba9d-8ae9683b9dd9 button.colab-df-convert');\n",
602
+ " buttonEl.style.display =\n",
603
+ " google.colab.kernel.accessAllowed ? 'block' : 'none';\n",
604
+ "\n",
605
+ " async function convertToInteractive(key) {\n",
606
+ " const element = document.querySelector('#df-e5e7e153-36e2-40db-ba9d-8ae9683b9dd9');\n",
607
+ " const dataTable =\n",
608
+ " await google.colab.kernel.invokeFunction('convertToInteractive',\n",
609
+ " [key], {});\n",
610
+ " if (!dataTable) return;\n",
611
+ "\n",
612
+ " const docLinkHtml = 'Like what you see? Visit the ' +\n",
613
+ " '<a target=\"_blank\" href=https://colab.research.google.com/notebooks/data_table.ipynb>data table notebook</a>'\n",
614
+ " + ' to learn more about interactive tables.';\n",
615
+ " element.innerHTML = '';\n",
616
+ " dataTable['output_type'] = 'display_data';\n",
617
+ " await google.colab.output.renderOutput(dataTable, element);\n",
618
+ " const docLink = document.createElement('div');\n",
619
+ " docLink.innerHTML = docLinkHtml;\n",
620
+ " element.appendChild(docLink);\n",
621
+ " }\n",
622
+ " </script>\n",
623
+ " </div>\n",
624
+ "\n",
625
+ "\n",
626
+ " <div id=\"df-b0e074ba-3e25-4dfb-848b-b195f64d6a08\">\n",
627
+ " <button class=\"colab-df-quickchart\" onclick=\"quickchart('df-b0e074ba-3e25-4dfb-848b-b195f64d6a08')\"\n",
628
+ " title=\"Suggest charts\"\n",
629
+ " style=\"display:none;\">\n",
630
+ "\n",
631
+ "<svg xmlns=\"http://www.w3.org/2000/svg\" height=\"24px\"viewBox=\"0 0 24 24\"\n",
632
+ " width=\"24px\">\n",
633
+ " <g>\n",
634
+ " <path d=\"M19 3H5c-1.1 0-2 .9-2 2v14c0 1.1.9 2 2 2h14c1.1 0 2-.9 2-2V5c0-1.1-.9-2-2-2zM9 17H7v-7h2v7zm4 0h-2V7h2v10zm4 0h-2v-4h2v4z\"/>\n",
635
+ " </g>\n",
636
+ "</svg>\n",
637
+ " </button>\n",
638
+ "\n",
639
+ "<style>\n",
640
+ " .colab-df-quickchart {\n",
641
+ " --bg-color: #E8F0FE;\n",
642
+ " --fill-color: #1967D2;\n",
643
+ " --hover-bg-color: #E2EBFA;\n",
644
+ " --hover-fill-color: #174EA6;\n",
645
+ " --disabled-fill-color: #AAA;\n",
646
+ " --disabled-bg-color: #DDD;\n",
647
+ " }\n",
648
+ "\n",
649
+ " [theme=dark] .colab-df-quickchart {\n",
650
+ " --bg-color: #3B4455;\n",
651
+ " --fill-color: #D2E3FC;\n",
652
+ " --hover-bg-color: #434B5C;\n",
653
+ " --hover-fill-color: #FFFFFF;\n",
654
+ " --disabled-bg-color: #3B4455;\n",
655
+ " --disabled-fill-color: #666;\n",
656
+ " }\n",
657
+ "\n",
658
+ " .colab-df-quickchart {\n",
659
+ " background-color: var(--bg-color);\n",
660
+ " border: none;\n",
661
+ " border-radius: 50%;\n",
662
+ " cursor: pointer;\n",
663
+ " display: none;\n",
664
+ " fill: var(--fill-color);\n",
665
+ " height: 32px;\n",
666
+ " padding: 0;\n",
667
+ " width: 32px;\n",
668
+ " }\n",
669
+ "\n",
670
+ " .colab-df-quickchart:hover {\n",
671
+ " background-color: var(--hover-bg-color);\n",
672
+ " box-shadow: 0 1px 2px rgba(60, 64, 67, 0.3), 0 1px 3px 1px rgba(60, 64, 67, 0.15);\n",
673
+ " fill: var(--button-hover-fill-color);\n",
674
+ " }\n",
675
+ "\n",
676
+ " .colab-df-quickchart-complete:disabled,\n",
677
+ " .colab-df-quickchart-complete:disabled:hover {\n",
678
+ " background-color: var(--disabled-bg-color);\n",
679
+ " fill: var(--disabled-fill-color);\n",
680
+ " box-shadow: none;\n",
681
+ " }\n",
682
+ "\n",
683
+ " .colab-df-spinner {\n",
684
+ " border: 2px solid var(--fill-color);\n",
685
+ " border-color: transparent;\n",
686
+ " border-bottom-color: var(--fill-color);\n",
687
+ " animation:\n",
688
+ " spin 1s steps(1) infinite;\n",
689
+ " }\n",
690
+ "\n",
691
+ " @keyframes spin {\n",
692
+ " 0% {\n",
693
+ " border-color: transparent;\n",
694
+ " border-bottom-color: var(--fill-color);\n",
695
+ " border-left-color: var(--fill-color);\n",
696
+ " }\n",
697
+ " 20% {\n",
698
+ " border-color: transparent;\n",
699
+ " border-left-color: var(--fill-color);\n",
700
+ " border-top-color: var(--fill-color);\n",
701
+ " }\n",
702
+ " 30% {\n",
703
+ " border-color: transparent;\n",
704
+ " border-left-color: var(--fill-color);\n",
705
+ " border-top-color: var(--fill-color);\n",
706
+ " border-right-color: var(--fill-color);\n",
707
+ " }\n",
708
+ " 40% {\n",
709
+ " border-color: transparent;\n",
710
+ " border-right-color: var(--fill-color);\n",
711
+ " border-top-color: var(--fill-color);\n",
712
+ " }\n",
713
+ " 60% {\n",
714
+ " border-color: transparent;\n",
715
+ " border-right-color: var(--fill-color);\n",
716
+ " }\n",
717
+ " 80% {\n",
718
+ " border-color: transparent;\n",
719
+ " border-right-color: var(--fill-color);\n",
720
+ " border-bottom-color: var(--fill-color);\n",
721
+ " }\n",
722
+ " 90% {\n",
723
+ " border-color: transparent;\n",
724
+ " border-bottom-color: var(--fill-color);\n",
725
+ " }\n",
726
+ " }\n",
727
+ "</style>\n",
728
+ "\n",
729
+ " <script>\n",
730
+ " async function quickchart(key) {\n",
731
+ " const quickchartButtonEl =\n",
732
+ " document.querySelector('#' + key + ' button');\n",
733
+ " quickchartButtonEl.disabled = true; // To prevent multiple clicks.\n",
734
+ " quickchartButtonEl.classList.add('colab-df-spinner');\n",
735
+ " try {\n",
736
+ " const charts = await google.colab.kernel.invokeFunction(\n",
737
+ " 'suggestCharts', [key], {});\n",
738
+ " } catch (error) {\n",
739
+ " console.error('Error during call to suggestCharts:', error);\n",
740
+ " }\n",
741
+ " quickchartButtonEl.classList.remove('colab-df-spinner');\n",
742
+ " quickchartButtonEl.classList.add('colab-df-quickchart-complete');\n",
743
+ " }\n",
744
+ " (() => {\n",
745
+ " let quickchartButtonEl =\n",
746
+ " document.querySelector('#df-b0e074ba-3e25-4dfb-848b-b195f64d6a08 button');\n",
747
+ " quickchartButtonEl.style.display =\n",
748
+ " google.colab.kernel.accessAllowed ? 'block' : 'none';\n",
749
+ " })();\n",
750
+ " </script>\n",
751
+ " </div>\n",
752
+ "\n",
753
+ " <div id=\"id_81d18ef7-d1e8-4a5e-b24e-77f5cc21a044\">\n",
754
+ " <style>\n",
755
+ " .colab-df-generate {\n",
756
+ " background-color: #E8F0FE;\n",
757
+ " border: none;\n",
758
+ " border-radius: 50%;\n",
759
+ " cursor: pointer;\n",
760
+ " display: none;\n",
761
+ " fill: #1967D2;\n",
762
+ " height: 32px;\n",
763
+ " padding: 0 0 0 0;\n",
764
+ " width: 32px;\n",
765
+ " }\n",
766
+ "\n",
767
+ " .colab-df-generate:hover {\n",
768
+ " background-color: #E2EBFA;\n",
769
+ " box-shadow: 0px 1px 2px rgba(60, 64, 67, 0.3), 0px 1px 3px 1px rgba(60, 64, 67, 0.15);\n",
770
+ " fill: #174EA6;\n",
771
+ " }\n",
772
+ "\n",
773
+ " [theme=dark] .colab-df-generate {\n",
774
+ " background-color: #3B4455;\n",
775
+ " fill: #D2E3FC;\n",
776
+ " }\n",
777
+ "\n",
778
+ " [theme=dark] .colab-df-generate:hover {\n",
779
+ " background-color: #434B5C;\n",
780
+ " box-shadow: 0px 1px 3px 1px rgba(0, 0, 0, 0.15);\n",
781
+ " filter: drop-shadow(0px 1px 2px rgba(0, 0, 0, 0.3));\n",
782
+ " fill: #FFFFFF;\n",
783
+ " }\n",
784
+ " </style>\n",
785
+ " <button class=\"colab-df-generate\" onclick=\"generateWithVariable('df')\"\n",
786
+ " title=\"Generate code using this dataframe.\"\n",
787
+ " style=\"display:none;\">\n",
788
+ "\n",
789
+ " <svg xmlns=\"http://www.w3.org/2000/svg\" height=\"24px\"viewBox=\"0 0 24 24\"\n",
790
+ " width=\"24px\">\n",
791
+ " <path d=\"M7,19H8.4L18.45,9,17,7.55,7,17.6ZM5,21V16.75L18.45,3.32a2,2,0,0,1,2.83,0l1.4,1.43a1.91,1.91,0,0,1,.58,1.4,1.91,1.91,0,0,1-.58,1.4L9.25,21ZM18.45,9,17,7.55Zm-12,3A5.31,5.31,0,0,0,4.9,8.1,5.31,5.31,0,0,0,1,6.5,5.31,5.31,0,0,0,4.9,4.9,5.31,5.31,0,0,0,6.5,1,5.31,5.31,0,0,0,8.1,4.9,5.31,5.31,0,0,0,12,6.5,5.46,5.46,0,0,0,6.5,12Z\"/>\n",
792
+ " </svg>\n",
793
+ " </button>\n",
794
+ " <script>\n",
795
+ " (() => {\n",
796
+ " const buttonEl =\n",
797
+ " document.querySelector('#id_81d18ef7-d1e8-4a5e-b24e-77f5cc21a044 button.colab-df-generate');\n",
798
+ " buttonEl.style.display =\n",
799
+ " google.colab.kernel.accessAllowed ? 'block' : 'none';\n",
800
+ "\n",
801
+ " buttonEl.onclick = () => {\n",
802
+ " google.colab.notebook.generateWithVariable('df');\n",
803
+ " }\n",
804
+ " })();\n",
805
+ " </script>\n",
806
+ " </div>\n",
807
+ "\n",
808
+ " </div>\n",
809
+ " </div>\n"
810
+ ],
811
+ "application/vnd.google.colaboratory.intrinsic+json": {
812
+ "type": "dataframe",
813
+ "variable_name": "df"
814
+ }
815
+ },
816
+ "metadata": {},
817
+ "execution_count": 10
818
+ }
819
+ ]
820
+ },
821
+ {
822
+ "cell_type": "code",
823
+ "source": [
824
+ "list(df['owner_username'].unique())"
825
+ ],
826
+ "metadata": {
827
+ "colab": {
828
+ "base_uri": "https://localhost:8080/"
829
+ },
830
+ "id": "1rEZH0RktLr4",
831
+ "outputId": "2247731e-9992-457c-a8a3-958405cbc201"
832
+ },
833
+ "execution_count": 45,
834
+ "outputs": [
835
+ {
836
+ "output_type": "execute_result",
837
+ "data": {
838
+ "text/plain": [
839
+ "['brocadeofficial',\n",
840
+ " 'adars_fpv',\n",
841
+ " 'the_levitator',\n",
842
+ " 'garudx.fpv',\n",
843
+ " 'lalitpurcityfootballclub',\n",
844
+ " 'shrijanshresthaa',\n",
845
+ " 'blacksheepfpv',\n",
846
+ " 'ur_shoaib',\n",
847
+ " 'ajaytm43',\n",
848
+ " 'thehybriddesigner.np',\n",
849
+ " 'anishakafle',\n",
850
+ " 'divyadhakal_',\n",
851
+ " 'roshaan.g',\n",
852
+ " 'unicefsouthasia',\n",
853
+ " 'bhawanaraut1',\n",
854
+ " 'get_nepal',\n",
855
+ " 'columbianepal',\n",
856
+ " 'withbibek',\n",
857
+ " 'diwasg',\n",
858
+ " 'haitentertainment',\n",
859
+ " 'octaveeventss',\n",
860
+ " 'rvl.tv',\n",
861
+ " 'ruslanstudionepal',\n",
862
+ " 'ashishflute_official',\n",
863
+ " 'michaelwumusic',\n",
864
+ " 'mandygamma',\n",
865
+ " 'enspace.np',\n",
866
+ " 'digixel.inc',\n",
867
+ " 'underadar_official',\n",
868
+ " 'sahatkasa',\n",
869
+ " 'eatgrubfood',\n",
870
+ " 'food_explorer_life',\n",
871
+ " 'kaffacafe_by_sukunda',\n",
872
+ " 'ggkaam',\n",
873
+ " 'gracebhattarai',\n",
874
+ " 'ufo_nepal',\n",
875
+ " 'iam_s.i.d.d_',\n",
876
+ " 'sumitraut._',\n",
877
+ " 'lowenbeauty',\n",
878
+ " 'lamuse.np',\n",
879
+ " 'phoebe_bhattarai',\n",
880
+ " 'imsurakshyakc',\n",
881
+ " 'vaishaali_',\n",
882
+ " 'chamsbro_erawan',\n",
883
+ " 'istu_karki',\n",
884
+ " 'brillarenepal',\n",
885
+ " 'team.nftp',\n",
886
+ " 'jholeyism',\n",
887
+ " 'youngbluesss',\n",
888
+ " 'younglion888',\n",
889
+ " 'ashimneu',\n",
890
+ " 'newjalshrestha',\n",
891
+ " 'sunflower.weekender',\n",
892
+ " '_jenishm',\n",
893
+ " 'swtalmbu',\n",
894
+ " 'mrbvlog2.0',\n",
895
+ " 'motoworld.np',\n",
896
+ " 'bijyayy',\n",
897
+ " 'newstyle_fashionwear_official',\n",
898
+ " 'munachiya',\n",
899
+ " 'flightsgyaninepal',\n",
900
+ " 'buddhaland_co',\n",
901
+ " 'highgroundnepal',\n",
902
+ " 's.traveller_nextdoor',\n",
903
+ " nan,\n",
904
+ " 'mydarlingfood',\n",
905
+ " 'riyasthaa',\n",
906
+ " 'uptrendly.creators',\n",
907
+ " 'nepal.food',\n",
908
+ " 'mahindranepal_agnigroup',\n",
909
+ " '_thehappyhikers_',\n",
910
+ " 'hyderabadidumbiryani.nepal',\n",
911
+ " 'nnzeella_shrestha',\n",
912
+ " 'manify.np',\n",
913
+ " 'sareeshashrestha',\n",
914
+ " 'aryan_g.r.g',\n",
915
+ " 'rtw.np',\n",
916
+ " '_prabha__gurung_',\n",
917
+ " 'nepartist',\n",
918
+ " 'mamitapun',\n",
919
+ " '_sunny_magar_',\n",
920
+ " 'pramuditaaudas',\n",
921
+ " 'orabelle.np',\n",
922
+ " 'mishisacosmetics',\n",
923
+ " 'hree_store',\n",
924
+ " 'cliaraessentialoil_np',\n",
925
+ " 'muscles_lab',\n",
926
+ " 'rabindra_dhant1',\n",
927
+ " 'teks.fitness',\n",
928
+ " 'diwizpiyalama',\n",
929
+ " 'locknroll.mma',\n",
930
+ " 'nepalwarriorschampionship',\n",
931
+ " 'bjj_asia',\n",
932
+ " 'onechampionship',\n",
933
+ " 'paradygmsportsnetwork',\n",
934
+ " 'latidonepal',\n",
935
+ " 'grappleasiapod',\n",
936
+ " 'somafightclub',\n",
937
+ " 'razee.maharjan',\n",
938
+ " 'razeechan',\n",
939
+ " '_anupchau',\n",
940
+ " 'shrishes_mdr',\n",
941
+ " 'ideapreneurnepal',\n",
942
+ " 'sanjogkoirala_',\n",
943
+ " 'shreepaisaofficial',\n",
944
+ " 'sushant_pradhan_',\n",
945
+ " 'hemantabhandari_',\n",
946
+ " 'usembassynepal',\n",
947
+ " 'foodmandu',\n",
948
+ " 'thismorninglive',\n",
949
+ " 'asifshah_nepal',\n",
950
+ " 'sahikura_',\n",
951
+ " 'entrepreneur_nepal',\n",
952
+ " 'kathmandu_locals',\n",
953
+ " 'colgatenepal',\n",
954
+ " 'siddinathapa',\n",
955
+ " 'sjpoon_official',\n",
956
+ " '_me_supriya',\n",
957
+ " '_its.me.muskan_',\n",
958
+ " 'tashyilha_g',\n",
959
+ " 'instaa.goddess',\n",
960
+ " 'cubex.clo',\n",
961
+ " 'newmew.nepal',\n",
962
+ " 'koreankini',\n",
963
+ " 'samirstha319',\n",
964
+ " 'pubgm.np.official',\n",
965
+ " '_richa.s',\n",
966
+ " 'buwb.pod',\n",
967
+ " 'chankheyproduction',\n",
968
+ " 'openhousepkr',\n",
969
+ " '_contentsby.richa']"
970
+ ]
971
+ },
972
+ "metadata": {},
973
+ "execution_count": 45
974
+ }
975
+ ]
976
+ },
977
+ {
978
+ "cell_type": "code",
979
+ "source": [
980
+ " query = 'How many likes have divya dhakal got while promoting BYD'\n",
981
+ " prompt = f'''Given a pandas DataFrame with columns:{list(df.columns)}, usernames:{list(df['owner_username'].unique())}, write pandas code to {query}. Return only valid Python code. Don't return the imports too. Don't even include the **python** keyword in the beginning. Just give me one liner runnable code'''"
982
+ ],
983
+ "metadata": {
984
+ "id": "WZz94osZk9DS"
985
+ },
986
+ "execution_count": 80,
987
+ "outputs": []
988
+ },
989
+ {
990
+ "cell_type": "code",
991
+ "source": [
992
+ "code_response=llm_gpt.invoke(prompt).content\n",
993
+ "code_response"
994
+ ],
995
+ "metadata": {
996
+ "colab": {
997
+ "base_uri": "https://localhost:8080/",
998
+ "height": 35
999
+ },
1000
+ "id": "7-xfQ7IvlhR9",
1001
+ "outputId": "145ffaf2-024f-463a-b700-df8e003ef6fe"
1002
+ },
1003
+ "execution_count": 81,
1004
+ "outputs": [
1005
+ {
1006
+ "output_type": "execute_result",
1007
+ "data": {
1008
+ "text/plain": [
1009
+ "\"df[(df['owner_username'] == 'divyadhakal_') & (df['brand_promoted'] == 'BYD')]['likes_count'].sum()\""
1010
+ ],
1011
+ "application/vnd.google.colaboratory.intrinsic+json": {
1012
+ "type": "string"
1013
+ }
1014
+ },
1015
+ "metadata": {},
1016
+ "execution_count": 81
1017
+ }
1018
+ ]
1019
+ },
1020
+ {
1021
+ "cell_type": "code",
1022
+ "source": [
1023
+ "def execute_pandas_query(code_str: str, df: pd.DataFrame):\n",
1024
+ " code = code_str.strip()\n",
1025
+ " print('The code is:', code)\n",
1026
+ " try:\n",
1027
+ " local_vars = {\"df\": df} # Pass your existing DataFrame\n",
1028
+ " result = eval(code_str, {}, local_vars)\n",
1029
+ " print('The result is:', result)\n",
1030
+ " return result\n",
1031
+ "\n",
1032
+ " except Exception as e:\n",
1033
+ " raise RuntimeError(f\"Execution error: {e}\")"
1034
+ ],
1035
+ "metadata": {
1036
+ "id": "YwfKchhbqU_1"
1037
+ },
1038
+ "execution_count": 82,
1039
+ "outputs": []
1040
+ },
1041
+ {
1042
+ "cell_type": "code",
1043
+ "source": [
1044
+ "# Example use:\n",
1045
+ "count = execute_pandas_query(\n",
1046
+ " code_response,\n",
1047
+ " df\n",
1048
+ ")\n",
1049
+ "print(\"Result:\", count)"
1050
+ ],
1051
+ "metadata": {
1052
+ "colab": {
1053
+ "base_uri": "https://localhost:8080/"
1054
+ },
1055
+ "id": "OybMPTs2sEKh",
1056
+ "outputId": "62dd72f5-b39b-460b-a9a9-a21f60bb162c"
1057
+ },
1058
+ "execution_count": 83,
1059
+ "outputs": [
1060
+ {
1061
+ "output_type": "stream",
1062
+ "name": "stdout",
1063
+ "text": [
1064
+ "The code is: df[(df['owner_username'] == 'divyadhakal_') & (df['brand_promoted'] == 'BYD')]['likes_count'].sum()\n",
1065
+ "The result is: 0.0\n",
1066
+ "Result: 0.0\n"
1067
+ ]
1068
+ }
1069
+ ]
1070
+ },
1071
+ {
1072
+ "cell_type": "code",
1073
+ "source": [
1074
+ "df[(df['owner_username'] == 'divyadhakal_') & (df['likes_count'] > 1000)].shape[0]"
1075
+ ],
1076
+ "metadata": {
1077
+ "colab": {
1078
+ "base_uri": "https://localhost:8080/"
1079
+ },
1080
+ "id": "FRVpCf2isHqn",
1081
+ "outputId": "845b25a1-7a9c-471d-9bd9-29a9bdd06897"
1082
+ },
1083
+ "execution_count": 50,
1084
+ "outputs": [
1085
+ {
1086
+ "output_type": "execute_result",
1087
+ "data": {
1088
+ "text/plain": [
1089
+ "159"
1090
+ ]
1091
+ },
1092
+ "metadata": {},
1093
+ "execution_count": 50
1094
+ }
1095
+ ]
1096
+ },
1097
+ {
1098
+ "cell_type": "code",
1099
+ "source": [
1100
+ "# Example DataFrame (only if you haven't defined it already)\n",
1101
+ "import pandas as pd\n",
1102
+ "\n",
1103
+ "# Assuming 'df' already exists with correct columns\n",
1104
+ "# If not, here's a dummy example:\n",
1105
+ "# df = pd.DataFrame({'owner_username': ['divyadhakal_'], 'likes_count': [1500]})\n",
1106
+ "\n",
1107
+ "code_str = \"df[(df['owner_username'] == 'divyadhakal_') & (df['likes_count'] > 1000)].shape[0]\"\n",
1108
+ "local_vars = {\"df\": df} # Pass your existing DataFrame\n",
1109
+ "\n",
1110
+ "result = eval(code_str, {}, local_vars)\n",
1111
+ "print(\"Result:\", result)\n"
1112
+ ],
1113
+ "metadata": {
1114
+ "colab": {
1115
+ "base_uri": "https://localhost:8080/"
1116
+ },
1117
+ "id": "Pqn5OHRUsT7P",
1118
+ "outputId": "bf75f028-c567-4824-bb95-293aa1485a10"
1119
+ },
1120
+ "execution_count": 51,
1121
+ "outputs": [
1122
+ {
1123
+ "output_type": "stream",
1124
+ "name": "stdout",
1125
+ "text": [
1126
+ "Result: 159\n"
1127
+ ]
1128
+ }
1129
+ ]
1130
+ },
1131
+ {
1132
+ "cell_type": "code",
1133
+ "source": [],
1134
+ "metadata": {
1135
+ "id": "mf2ttVert_qz"
1136
+ },
1137
+ "execution_count": null,
1138
+ "outputs": []
1139
+ }
1140
+ ]
1141
+ }
src/genai/__pycache__/__init__.cpython-312.pyc ADDED
Binary file (176 Bytes). View file
 
src/genai/brainstroming_agent/__pycache__/__init__.cpython-312.pyc ADDED
Binary file (176 Bytes). View file
 
src/genai/brainstroming_agent/__pycache__/agent.cpython-312.pyc ADDED
Binary file (1.38 kB). View file
 
src/genai/brainstroming_agent/utils/__pycache__/__init__.cpython-312.pyc ADDED
Binary file (182 Bytes). View file
 
src/genai/brainstroming_agent/utils/__pycache__/business_interaction.cpython-312.pyc ADDED
Binary file (5.25 kB). View file
 
src/genai/brainstroming_agent/utils/__pycache__/check.cpython-312.pyc ADDED
Binary file (4.69 kB). View file
 
src/genai/brainstroming_agent/utils/__pycache__/data_loader.cpython-312.pyc ADDED
Binary file (637 Bytes). View file
 
src/genai/brainstroming_agent/utils/__pycache__/initial_interaction.cpython-312.pyc ADDED
Binary file (4.17 kB). View file
 
src/genai/brainstroming_agent/utils/__pycache__/models.cpython-312.pyc ADDED
Binary file (933 Bytes). View file
 
src/genai/brainstroming_agent/utils/__pycache__/models_loader.cpython-312.pyc ADDED
Binary file (2.35 kB). View file
 
src/genai/brainstroming_agent/utils/__pycache__/nodes.cpython-312.pyc ADDED
Binary file (6.1 kB). View file
 
src/genai/brainstroming_agent/utils/__pycache__/prompts.cpython-312.pyc ADDED
Binary file (8.01 kB). View file
 
src/genai/brainstroming_agent/utils/__pycache__/state.cpython-312.pyc ADDED
Binary file (2.14 kB). View file
 
src/genai/brainstroming_agent/utils/__pycache__/tools.cpython-312.pyc ADDED
Binary file (2.78 kB). View file
 
src/genai/brainstroming_agent/utils/__pycache__/utils.cpython-312.pyc ADDED
Binary file (6.47 kB). View file
 
src/genai/brainstroming_agent/utils/__pycache__/validators.cpython-312.pyc ADDED
Binary file (698 Bytes). View file
 
src/genai/business_interaction_agent/__pycache__/__init__.cpython-312.pyc ADDED
Binary file (194 Bytes). View file
 
src/genai/business_interaction_agent/__pycache__/agent.cpython-312.pyc ADDED
Binary file (6.32 kB). View file
 
src/genai/business_interaction_agent/utils/__pycache__/__init__.cpython-312.pyc ADDED
Binary file (200 Bytes). View file
 
src/genai/business_interaction_agent/utils/__pycache__/nodes.cpython-312.pyc ADDED
Binary file (1.43 kB). View file
 
src/genai/business_interaction_agent/utils/__pycache__/prompts.cpython-312.pyc ADDED
Binary file (4.7 kB). View file
 
src/genai/business_interaction_agent/utils/__pycache__/state.cpython-312.pyc ADDED
Binary file (1.66 kB). View file
 
src/genai/business_interaction_agent/utils/__pycache__/utils.cpython-312.pyc ADDED
Binary file (3.02 kB). View file
 
src/genai/context_analysis_agent/__pycache__/__init__.cpython-312.pyc ADDED
Binary file (190 Bytes). View file
 
src/genai/context_analysis_agent/__pycache__/agent.cpython-312.pyc ADDED
Binary file (3.63 kB). View file
 
src/genai/context_analysis_agent/utils/__pycache__/__init__.cpython-312.pyc ADDED
Binary file (196 Bytes). View file
 
src/genai/context_analysis_agent/utils/__pycache__/nodes.cpython-312.pyc ADDED
Binary file (1.19 kB). View file
 
src/genai/context_analysis_agent/utils/__pycache__/prompts.cpython-312.pyc ADDED
Binary file (2.31 kB). View file
 
src/genai/context_analysis_agent/utils/__pycache__/state.cpython-312.pyc ADDED
Binary file (1.53 kB). View file
 
src/genai/context_analysis_agent/utils/__pycache__/utils.cpython-312.pyc ADDED
Binary file (1.83 kB). View file
 
src/genai/human_refined_ideation/__pycache__/__init__.cpython-312.pyc ADDED
Binary file (190 Bytes). View file
 
src/genai/human_refined_ideation/__pycache__/agent.cpython-312.pyc ADDED
Binary file (963 Bytes). View file