7 files uploaded
Browse files- .gitignore +207 -0
- README.md +143 -14
- app.py +115 -64
- logger.py +20 -0
- main.py +48 -0
- requirements.txt +6 -1
- template.py +40 -0
.gitignore
ADDED
|
@@ -0,0 +1,207 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Byte-compiled / optimized / DLL files
|
| 2 |
+
__pycache__/
|
| 3 |
+
*.py[codz]
|
| 4 |
+
*$py.class
|
| 5 |
+
|
| 6 |
+
# C extensions
|
| 7 |
+
*.so
|
| 8 |
+
|
| 9 |
+
# Distribution / packaging
|
| 10 |
+
.Python
|
| 11 |
+
build/
|
| 12 |
+
develop-eggs/
|
| 13 |
+
dist/
|
| 14 |
+
downloads/
|
| 15 |
+
eggs/
|
| 16 |
+
.eggs/
|
| 17 |
+
lib/
|
| 18 |
+
lib64/
|
| 19 |
+
parts/
|
| 20 |
+
sdist/
|
| 21 |
+
var/
|
| 22 |
+
wheels/
|
| 23 |
+
share/python-wheels/
|
| 24 |
+
*.egg-info/
|
| 25 |
+
.installed.cfg
|
| 26 |
+
*.egg
|
| 27 |
+
MANIFEST
|
| 28 |
+
|
| 29 |
+
# PyInstaller
|
| 30 |
+
# Usually these files are written by a python script from a template
|
| 31 |
+
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
| 32 |
+
*.manifest
|
| 33 |
+
*.spec
|
| 34 |
+
|
| 35 |
+
# Installer logs
|
| 36 |
+
pip-log.txt
|
| 37 |
+
pip-delete-this-directory.txt
|
| 38 |
+
|
| 39 |
+
# Unit test / coverage reports
|
| 40 |
+
htmlcov/
|
| 41 |
+
.tox/
|
| 42 |
+
.nox/
|
| 43 |
+
.coverage
|
| 44 |
+
.coverage.*
|
| 45 |
+
.cache
|
| 46 |
+
nosetests.xml
|
| 47 |
+
coverage.xml
|
| 48 |
+
*.cover
|
| 49 |
+
*.py.cover
|
| 50 |
+
.hypothesis/
|
| 51 |
+
.pytest_cache/
|
| 52 |
+
cover/
|
| 53 |
+
|
| 54 |
+
# Translations
|
| 55 |
+
*.mo
|
| 56 |
+
*.pot
|
| 57 |
+
|
| 58 |
+
# Django stuff:
|
| 59 |
+
*.log
|
| 60 |
+
local_settings.py
|
| 61 |
+
db.sqlite3
|
| 62 |
+
db.sqlite3-journal
|
| 63 |
+
|
| 64 |
+
# Flask stuff:
|
| 65 |
+
instance/
|
| 66 |
+
.webassets-cache
|
| 67 |
+
|
| 68 |
+
# Scrapy stuff:
|
| 69 |
+
.scrapy
|
| 70 |
+
|
| 71 |
+
# Sphinx documentation
|
| 72 |
+
docs/_build/
|
| 73 |
+
|
| 74 |
+
# PyBuilder
|
| 75 |
+
.pybuilder/
|
| 76 |
+
target/
|
| 77 |
+
|
| 78 |
+
# Jupyter Notebook
|
| 79 |
+
.ipynb_checkpoints
|
| 80 |
+
|
| 81 |
+
# IPython
|
| 82 |
+
profile_default/
|
| 83 |
+
ipython_config.py
|
| 84 |
+
|
| 85 |
+
# pyenv
|
| 86 |
+
# For a library or package, you might want to ignore these files since the code is
|
| 87 |
+
# intended to run in multiple environments; otherwise, check them in:
|
| 88 |
+
# .python-version
|
| 89 |
+
|
| 90 |
+
# pipenv
|
| 91 |
+
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
| 92 |
+
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
| 93 |
+
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
| 94 |
+
# install all needed dependencies.
|
| 95 |
+
#Pipfile.lock
|
| 96 |
+
|
| 97 |
+
# UV
|
| 98 |
+
# Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control.
|
| 99 |
+
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
| 100 |
+
# commonly ignored for libraries.
|
| 101 |
+
#uv.lock
|
| 102 |
+
|
| 103 |
+
# poetry
|
| 104 |
+
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
| 105 |
+
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
| 106 |
+
# commonly ignored for libraries.
|
| 107 |
+
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
| 108 |
+
#poetry.lock
|
| 109 |
+
#poetry.toml
|
| 110 |
+
|
| 111 |
+
# pdm
|
| 112 |
+
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
| 113 |
+
# pdm recommends including project-wide configuration in pdm.toml, but excluding .pdm-python.
|
| 114 |
+
# https://pdm-project.org/en/latest/usage/project/#working-with-version-control
|
| 115 |
+
#pdm.lock
|
| 116 |
+
#pdm.toml
|
| 117 |
+
.pdm-python
|
| 118 |
+
.pdm-build/
|
| 119 |
+
|
| 120 |
+
# pixi
|
| 121 |
+
# Similar to Pipfile.lock, it is generally recommended to include pixi.lock in version control.
|
| 122 |
+
#pixi.lock
|
| 123 |
+
# Pixi creates a virtual environment in the .pixi directory, just like venv module creates one
|
| 124 |
+
# in the .venv directory. It is recommended not to include this directory in version control.
|
| 125 |
+
.pixi
|
| 126 |
+
|
| 127 |
+
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
| 128 |
+
__pypackages__/
|
| 129 |
+
|
| 130 |
+
# Celery stuff
|
| 131 |
+
celerybeat-schedule
|
| 132 |
+
celerybeat.pid
|
| 133 |
+
|
| 134 |
+
# SageMath parsed files
|
| 135 |
+
*.sage.py
|
| 136 |
+
|
| 137 |
+
# Environments
|
| 138 |
+
.env
|
| 139 |
+
.envrc
|
| 140 |
+
.venv
|
| 141 |
+
env/
|
| 142 |
+
venv/
|
| 143 |
+
ENV/
|
| 144 |
+
env.bak/
|
| 145 |
+
venv.bak/
|
| 146 |
+
|
| 147 |
+
# Spyder project settings
|
| 148 |
+
.spyderproject
|
| 149 |
+
.spyproject
|
| 150 |
+
|
| 151 |
+
# Rope project settings
|
| 152 |
+
.ropeproject
|
| 153 |
+
|
| 154 |
+
# mkdocs documentation
|
| 155 |
+
/site
|
| 156 |
+
|
| 157 |
+
# mypy
|
| 158 |
+
.mypy_cache/
|
| 159 |
+
.dmypy.json
|
| 160 |
+
dmypy.json
|
| 161 |
+
|
| 162 |
+
# Pyre type checker
|
| 163 |
+
.pyre/
|
| 164 |
+
|
| 165 |
+
# pytype static type analyzer
|
| 166 |
+
.pytype/
|
| 167 |
+
|
| 168 |
+
# Cython debug symbols
|
| 169 |
+
cython_debug/
|
| 170 |
+
|
| 171 |
+
# PyCharm
|
| 172 |
+
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
| 173 |
+
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
| 174 |
+
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
| 175 |
+
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
| 176 |
+
#.idea/
|
| 177 |
+
|
| 178 |
+
# Abstra
|
| 179 |
+
# Abstra is an AI-powered process automation framework.
|
| 180 |
+
# Ignore directories containing user credentials, local state, and settings.
|
| 181 |
+
# Learn more at https://abstra.io/docs
|
| 182 |
+
.abstra/
|
| 183 |
+
|
| 184 |
+
# Visual Studio Code
|
| 185 |
+
# Visual Studio Code specific template is maintained in a separate VisualStudioCode.gitignore
|
| 186 |
+
# that can be found at https://github.com/github/gitignore/blob/main/Global/VisualStudioCode.gitignore
|
| 187 |
+
# and can be added to the global gitignore or merged into this file. However, if you prefer,
|
| 188 |
+
# you could uncomment the following to ignore the entire vscode folder
|
| 189 |
+
# .vscode/
|
| 190 |
+
|
| 191 |
+
# Ruff stuff:
|
| 192 |
+
.ruff_cache/
|
| 193 |
+
|
| 194 |
+
# PyPI configuration file
|
| 195 |
+
.pypirc
|
| 196 |
+
|
| 197 |
+
# Cursor
|
| 198 |
+
# Cursor is an AI-powered code editor. `.cursorignore` specifies files/directories to
|
| 199 |
+
# exclude from AI features like autocomplete and code analysis. Recommended for sensitive data
|
| 200 |
+
# refer to https://docs.cursor.com/context/ignore-files
|
| 201 |
+
.cursorignore
|
| 202 |
+
.cursorindexingignore
|
| 203 |
+
|
| 204 |
+
# Marimo
|
| 205 |
+
marimo/_static/
|
| 206 |
+
marimo/_lsp/
|
| 207 |
+
__marimo__/
|
README.md
CHANGED
|
@@ -1,14 +1,143 @@
|
|
| 1 |
-
|
| 2 |
-
|
| 3 |
-
|
| 4 |
-
|
| 5 |
-
|
| 6 |
-
|
| 7 |
-
|
| 8 |
-
|
| 9 |
-
|
| 10 |
-
|
| 11 |
-
|
| 12 |
-
|
| 13 |
-
|
| 14 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# 🌾 AgriEdge: Smart Farm Assistant
|
| 2 |
+
|
| 3 |
+
An AI-powered assistant that uses real-time sensor data and textbook-based agricultural knowledge to provide insights, analysis, and actionable suggestions for small to medium-scale farms. Comes with both command-line and web interfaces.
|
| 4 |
+
|
| 5 |
+
---
|
| 6 |
+
|
| 7 |
+
## 🚀 Features
|
| 8 |
+
|
| 9 |
+
- 📡 Analyzes **real-time farm sensor data** (soil, water, environment)
|
| 10 |
+
- 📚 Retrieves context from **agricultural PDF documents**
|
| 11 |
+
- 🤖 Uses **retrieval-augmented generation (RAG)** for grounded reasoning
|
| 12 |
+
- 🧠 Powered by **Ollama + LLaMA 3**
|
| 13 |
+
- 📝 Generates **natural language summaries and actionable insights**
|
| 14 |
+
- 🔒 Runs **fully local** — no cloud, no data sharing
|
| 15 |
+
- 🌐 Supports **Streamlit-based dashboard** for non-technical users
|
| 16 |
+
|
| 17 |
+
---
|
| 18 |
+
|
| 19 |
+
## 📁 Project Structure
|
| 20 |
+
|
| 21 |
+
```bash
|
| 22 |
+
smartfarm/
|
| 23 |
+
├── main.py # Command-line interface
|
| 24 |
+
├── app.py # Streamlit web app
|
| 25 |
+
├── llm/
|
| 26 |
+
│ ├── ollama_llm.py # Query handler using LLM + sensor data + RAG
|
| 27 |
+
│ └── rag_pipeline.py # PDF retrieval pipeline using FAISS
|
| 28 |
+
├── logger.py # Logging setup
|
| 29 |
+
├── prompt.txt # Prompt template for LLM
|
| 30 |
+
├── data/
|
| 31 |
+
│ ├── farm_data_log.json # JSON file logging sensor readings
|
| 32 |
+
│ ├── docs/ # Agricultural PDFs for knowledge retrieval
|
| 33 |
+
│ └── faiss_index/ # Auto-generated FAISS vector index
|
| 34 |
+
```
|
| 35 |
+
|
| 36 |
+
---
|
| 37 |
+
|
| 38 |
+
## 🛠️ Installation & Setup
|
| 39 |
+
|
| 40 |
+
### 1. Clone the Repository
|
| 41 |
+
|
| 42 |
+
```bash
|
| 43 |
+
git clone https://github.com/your-username/smartfarm.git
|
| 44 |
+
cd smartfarm
|
| 45 |
+
```
|
| 46 |
+
|
| 47 |
+
### 2. Install Python Dependencies
|
| 48 |
+
|
| 49 |
+
```bash
|
| 50 |
+
pip install -r requirements.txt
|
| 51 |
+
```
|
| 52 |
+
|
| 53 |
+
### 3. Set Up Ollama
|
| 54 |
+
|
| 55 |
+
Make sure you have Ollama installed and running.
|
| 56 |
+
|
| 57 |
+
Download the LLaMA 3 model:
|
| 58 |
+
|
| 59 |
+
```bash
|
| 60 |
+
ollama run llama3
|
| 61 |
+
```
|
| 62 |
+
|
| 63 |
+
Make sure Ollama is running in the background before using the assistant.
|
| 64 |
+
|
| 65 |
+
### 4. Add Sensor Data
|
| 66 |
+
|
| 67 |
+
Append new entries to `data/farm_data_log.json`. Example format:
|
| 68 |
+
|
| 69 |
+
```json
|
| 70 |
+
{
|
| 71 |
+
"timestamp": "2025-07-22T21:00:00+01:00",
|
| 72 |
+
"soil": {"moisture": "High", "pH": 6.8, "temperature": 24.9},
|
| 73 |
+
"water": {"pH": 7.2, "turbidity": "8 NTU", "temperature": 23.3},
|
| 74 |
+
"environment": {"humidity": "85%", "temperature": 26.0, "rainfall": "Moderate"}
|
| 75 |
+
}
|
| 76 |
+
```
|
| 77 |
+
|
| 78 |
+
### 5. Add Agricultural Documents (Optional)
|
| 79 |
+
|
| 80 |
+
Place your farming-related PDFs inside:
|
| 81 |
+
|
| 82 |
+
```bash
|
| 83 |
+
data/docs/
|
| 84 |
+
```
|
| 85 |
+
|
| 86 |
+
The system will automatically build a searchable vector index.
|
| 87 |
+
|
| 88 |
+
---
|
| 89 |
+
|
| 90 |
+
## ▶️ Usage
|
| 91 |
+
|
| 92 |
+
### 📟 Command-Line Mode
|
| 93 |
+
|
| 94 |
+
```bash
|
| 95 |
+
python main.py
|
| 96 |
+
```
|
| 97 |
+
|
| 98 |
+
You’ll be prompted to enter queries like:
|
| 99 |
+
|
| 100 |
+
```markdown
|
| 101 |
+
> Is the soil suitable for planting now?
|
| 102 |
+
> Has the turbidity improved compared to earlier?
|
| 103 |
+
```
|
| 104 |
+
|
| 105 |
+
Type `exit` to quit.
|
| 106 |
+
|
| 107 |
+
### 🌐 Streamlit Web Interface
|
| 108 |
+
|
| 109 |
+
Launch the UI with:
|
| 110 |
+
|
| 111 |
+
```bash
|
| 112 |
+
streamlit run app.py
|
| 113 |
+
```
|
| 114 |
+
|
| 115 |
+
What you can do:
|
| 116 |
+
|
| 117 |
+
- View the most recent sensor snapshot
|
| 118 |
+
- Ask farm-related questions like:
|
| 119 |
+
- "What is the current soil condition?"
|
| 120 |
+
- "Is it safe to irrigate now?"
|
| 121 |
+
- "Has rainfall increased compared to earlier?"
|
| 122 |
+
|
| 123 |
+
---
|
| 124 |
+
|
| 125 |
+
## 💡 Notes
|
| 126 |
+
|
| 127 |
+
- The system analyzes only the most recent sensor reading but uses the previous 2 for historical comparison (internally).
|
| 128 |
+
- No internet connection is required once the vector store and model are set up.
|
| 129 |
+
- Logs are written automatically to `logs/`.
|
| 130 |
+
|
| 131 |
+
---
|
| 132 |
+
|
| 133 |
+
## 🧪 Example Questions
|
| 134 |
+
|
| 135 |
+
- "Is the soil moisture improving?"
|
| 136 |
+
- "What is the overall environmental condition right now?"
|
| 137 |
+
- "Is the water quality good for irrigation?"
|
| 138 |
+
|
| 139 |
+
---
|
| 140 |
+
|
| 141 |
+
## 📄 License
|
| 142 |
+
|
| 143 |
+
MIT License
|
app.py
CHANGED
|
@@ -1,64 +1,115 @@
|
|
| 1 |
-
import gradio as gr
|
| 2 |
-
|
| 3 |
-
|
| 4 |
-
|
| 5 |
-
|
| 6 |
-
|
| 7 |
-
|
| 8 |
-
|
| 9 |
-
|
| 10 |
-
def
|
| 11 |
-
|
| 12 |
-
|
| 13 |
-
|
| 14 |
-
|
| 15 |
-
|
| 16 |
-
|
| 17 |
-
|
| 18 |
-
|
| 19 |
-
|
| 20 |
-
|
| 21 |
-
|
| 22 |
-
|
| 23 |
-
|
| 24 |
-
|
| 25 |
-
|
| 26 |
-
|
| 27 |
-
|
| 28 |
-
|
| 29 |
-
|
| 30 |
-
|
| 31 |
-
|
| 32 |
-
|
| 33 |
-
|
| 34 |
-
|
| 35 |
-
|
| 36 |
-
|
| 37 |
-
|
| 38 |
-
|
| 39 |
-
|
| 40 |
-
|
| 41 |
-
|
| 42 |
-
|
| 43 |
-
|
| 44 |
-
|
| 45 |
-
""
|
| 46 |
-
|
| 47 |
-
|
| 48 |
-
|
| 49 |
-
|
| 50 |
-
|
| 51 |
-
|
| 52 |
-
|
| 53 |
-
|
| 54 |
-
|
| 55 |
-
|
| 56 |
-
|
| 57 |
-
|
| 58 |
-
|
| 59 |
-
|
| 60 |
-
)
|
| 61 |
-
|
| 62 |
-
|
| 63 |
-
|
| 64 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import gradio as gr
|
| 2 |
+
import json
|
| 3 |
+
from llm.ollama_llm import query_ollama
|
| 4 |
+
from llm.rag_pipeline import retrieve_context
|
| 5 |
+
from logger import get_logger
|
| 6 |
+
|
| 7 |
+
logger = get_logger(__name__)
|
| 8 |
+
|
| 9 |
+
# Function to load latest sensor data
|
| 10 |
+
def get_latest_sensor_data(path="data/farm_data_log.json", num_entries=3):
|
| 11 |
+
try:
|
| 12 |
+
with open(path, "r") as f:
|
| 13 |
+
data = json.load(f)
|
| 14 |
+
return data[-num_entries:] if data else []
|
| 15 |
+
except FileNotFoundError:
|
| 16 |
+
logger.error(f"Sensor data file {path} not found.")
|
| 17 |
+
return []
|
| 18 |
+
except json.JSONDecodeError as e:
|
| 19 |
+
logger.error(f"Invalid JSON in {path}: {e}")
|
| 20 |
+
return []
|
| 21 |
+
|
| 22 |
+
# Global query history
|
| 23 |
+
query_history = []
|
| 24 |
+
|
| 25 |
+
def process_query(user_query):
|
| 26 |
+
"""Handles user query and returns response + updated history."""
|
| 27 |
+
if not user_query.strip():
|
| 28 |
+
return "Please enter a question.", "\n".join(format_history())
|
| 29 |
+
|
| 30 |
+
logger.info("User query: %s", user_query)
|
| 31 |
+
try:
|
| 32 |
+
# Prepare sensor data
|
| 33 |
+
sensor_data_entries = get_latest_sensor_data()
|
| 34 |
+
combined_sensor_data = {
|
| 35 |
+
entry["timestamp"]: {
|
| 36 |
+
"soil": entry["soil"],
|
| 37 |
+
"water": entry["water"],
|
| 38 |
+
"environment": entry["environment"]
|
| 39 |
+
}
|
| 40 |
+
for entry in sensor_data_entries
|
| 41 |
+
}
|
| 42 |
+
# Retrieve context and query LLM
|
| 43 |
+
rag_context = retrieve_context(user_query)
|
| 44 |
+
response = query_ollama(user_query, combined_sensor_data, rag_context)
|
| 45 |
+
logger.info("--- FARM ASSISTANT RESPONSE ---")
|
| 46 |
+
|
| 47 |
+
# Add to query history
|
| 48 |
+
query_history.append((user_query, response))
|
| 49 |
+
|
| 50 |
+
return response, format_history()
|
| 51 |
+
except Exception as e:
|
| 52 |
+
logger.error(f"Query processing failed: {e}")
|
| 53 |
+
return "Error: Could not process query. Please try again.", "\n".join(format_history())
|
| 54 |
+
|
| 55 |
+
def format_history():
|
| 56 |
+
"""Format query history as text for display."""
|
| 57 |
+
lines = []
|
| 58 |
+
for i, (q, r) in enumerate(query_history[-5:], start=1):
|
| 59 |
+
lines.append(f"### Query {i}\n**Q:** {q}\n**A:** {r}\n")
|
| 60 |
+
return "\n\n".join(lines)
|
| 61 |
+
|
| 62 |
+
def clear_history():
|
| 63 |
+
query_history.clear()
|
| 64 |
+
return "", "" # Clears both output panels
|
| 65 |
+
|
| 66 |
+
# Show latest sensor data as markdown
|
| 67 |
+
def display_sensor_data():
|
| 68 |
+
sensor_data_entries = get_latest_sensor_data()
|
| 69 |
+
if not sensor_data_entries:
|
| 70 |
+
return "No sensor data available."
|
| 71 |
+
|
| 72 |
+
latest_entry = sensor_data_entries[-1]
|
| 73 |
+
text = f"""
|
| 74 |
+
**Latest Reading: {latest_entry['timestamp']}**
|
| 75 |
+
|
| 76 |
+
### Soil
|
| 77 |
+
- Moisture: {latest_entry['soil']['moisture']}
|
| 78 |
+
- pH: {latest_entry['soil']['pH']}
|
| 79 |
+
- Temperature: {latest_entry['soil']['temperature']}
|
| 80 |
+
|
| 81 |
+
### Water
|
| 82 |
+
- pH: {latest_entry['water']['pH']}
|
| 83 |
+
- Turbidity: {latest_entry['water']['turbidity']}
|
| 84 |
+
- Temperature: {latest_entry['water']['temperature']}
|
| 85 |
+
|
| 86 |
+
### Environment
|
| 87 |
+
- Humidity: {latest_entry['environment']['humidity']}
|
| 88 |
+
- Temperature: {latest_entry['environment']['temperature']}
|
| 89 |
+
- Rainfall: {latest_entry['environment']['rainfall']}
|
| 90 |
+
"""
|
| 91 |
+
return text
|
| 92 |
+
|
| 93 |
+
# Gradio UI
|
| 94 |
+
with gr.Blocks(theme=gr.themes.Soft(primary_hue="green")) as demo:
|
| 95 |
+
gr.Markdown("# 🌾 AgriEdge: Smart Farm Assistant")
|
| 96 |
+
gr.Markdown("Ask about your farm's conditions and get tailored advice based on sensor data.")
|
| 97 |
+
|
| 98 |
+
with gr.Tab("Ask Assistant"):
|
| 99 |
+
query = gr.Textbox(
|
| 100 |
+
label="Enter your farm-related question",
|
| 101 |
+
placeholder="e.g., What should I do about soil moisture?"
|
| 102 |
+
)
|
| 103 |
+
submit_btn = gr.Button("Submit Query")
|
| 104 |
+
clear_btn = gr.Button("Clear History")
|
| 105 |
+
|
| 106 |
+
response_box = gr.Markdown()
|
| 107 |
+
history_box = gr.Markdown()
|
| 108 |
+
|
| 109 |
+
submit_btn.click(process_query, inputs=query, outputs=[response_box, history_box])
|
| 110 |
+
clear_btn.click(clear_history, inputs=None, outputs=[response_box, history_box])
|
| 111 |
+
|
| 112 |
+
with gr.Tab("Recent Sensor Data"):
|
| 113 |
+
sensor_md = gr.Markdown(display_sensor_data())
|
| 114 |
+
|
| 115 |
+
demo.launch()
|
logger.py
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# logger.py
|
| 2 |
+
import logging
|
| 3 |
+
|
| 4 |
+
def get_logger(name: str):
|
| 5 |
+
logger = logging.getLogger(name)
|
| 6 |
+
logger.setLevel(logging.INFO)
|
| 7 |
+
|
| 8 |
+
# Console handler
|
| 9 |
+
ch = logging.StreamHandler()
|
| 10 |
+
ch.setLevel(logging.INFO)
|
| 11 |
+
|
| 12 |
+
# Formatter
|
| 13 |
+
formatter = logging.Formatter('[%(levelname)s] %(asctime)s - %(name)s: %(message)s', "%H:%M:%S")
|
| 14 |
+
ch.setFormatter(formatter)
|
| 15 |
+
|
| 16 |
+
# Avoid duplicate logs
|
| 17 |
+
if not logger.handlers:
|
| 18 |
+
logger.addHandler(ch)
|
| 19 |
+
|
| 20 |
+
return logger
|
main.py
ADDED
|
@@ -0,0 +1,48 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import json
|
| 2 |
+
from llm.ollama_llm import query_ollama
|
| 3 |
+
from llm.rag_pipeline import retrieve_context
|
| 4 |
+
from logger import get_logger
|
| 5 |
+
|
| 6 |
+
logger = get_logger(__name__)
|
| 7 |
+
|
| 8 |
+
def get_latest_sensor_data(path="data/farm_data_log.json", num_entries=3):
|
| 9 |
+
try:
|
| 10 |
+
with open(path, "r") as f:
|
| 11 |
+
data = json.load(f)
|
| 12 |
+
return data[-num_entries:] if data else []
|
| 13 |
+
except FileNotFoundError:
|
| 14 |
+
logger.error(f"Sensor data file {path} not found.")
|
| 15 |
+
return []
|
| 16 |
+
except json.JSONDecodeError as e:
|
| 17 |
+
logger.error(f"Invalid JSON in {path}: {e}")
|
| 18 |
+
return []
|
| 19 |
+
|
| 20 |
+
def main():
|
| 21 |
+
logger.info("Smart Farm Assistant started.")
|
| 22 |
+
while True:
|
| 23 |
+
user_query = input("> ")
|
| 24 |
+
if user_query.lower() == 'exit':
|
| 25 |
+
logger.info("Exiting Smart Farm Assistant.")
|
| 26 |
+
break
|
| 27 |
+
logger.info("User query: %s", user_query)
|
| 28 |
+
logger.info("Retrieving latest sensor data...")
|
| 29 |
+
sensor_data_entries = get_latest_sensor_data()
|
| 30 |
+
combined_sensor_data = {
|
| 31 |
+
entry["timestamp"]: {
|
| 32 |
+
"soil": entry["soil"],
|
| 33 |
+
"water": entry["water"],
|
| 34 |
+
"environment": entry["environment"]
|
| 35 |
+
}
|
| 36 |
+
for entry in sensor_data_entries
|
| 37 |
+
}
|
| 38 |
+
try:
|
| 39 |
+
rag_context = retrieve_context(user_query)
|
| 40 |
+
response = query_ollama(user_query, combined_sensor_data, rag_context)
|
| 41 |
+
logger.info("\n--- FARM ASSISTANT RESPONSE ---\n")
|
| 42 |
+
print(response)
|
| 43 |
+
except Exception as e:
|
| 44 |
+
logger.error(f"Query processing failed: {e}")
|
| 45 |
+
print("Error: Could not process query. Please try again.")
|
| 46 |
+
|
| 47 |
+
if __name__ == "__main__":
|
| 48 |
+
main()
|
requirements.txt
CHANGED
|
@@ -1 +1,6 @@
|
|
| 1 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
numpy
|
| 2 |
+
faiss-cpu
|
| 3 |
+
tqdm
|
| 4 |
+
PyPDF2
|
| 5 |
+
streamlit
|
| 6 |
+
requests
|
template.py
ADDED
|
@@ -0,0 +1,40 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
|
| 3 |
+
# Define directory structure
|
| 4 |
+
project_structure = {
|
| 5 |
+
"sensors": [
|
| 6 |
+
"sensor_collector.py"
|
| 7 |
+
],
|
| 8 |
+
"llm": [
|
| 9 |
+
"ollama_llm.py",
|
| 10 |
+
"rag_pipeline.py"
|
| 11 |
+
],
|
| 12 |
+
"data": [
|
| 13 |
+
"farm_data_log.json",
|
| 14 |
+
"docs/", # Folder
|
| 15 |
+
"faiss_index/" # Folder
|
| 16 |
+
],
|
| 17 |
+
".": [ # Root directory
|
| 18 |
+
"main.py",
|
| 19 |
+
"requirements.txt"
|
| 20 |
+
]
|
| 21 |
+
}
|
| 22 |
+
|
| 23 |
+
def create_structure(base_path="."):
|
| 24 |
+
for folder, items in project_structure.items():
|
| 25 |
+
folder_path = os.path.join(base_path, folder) if folder != "." else base_path
|
| 26 |
+
os.makedirs(folder_path, exist_ok=True)
|
| 27 |
+
|
| 28 |
+
for item in items:
|
| 29 |
+
item_path = os.path.join(folder_path, item)
|
| 30 |
+
if item.endswith("/"):
|
| 31 |
+
os.makedirs(item_path, exist_ok=True)
|
| 32 |
+
print(f"Created folder: {item_path}")
|
| 33 |
+
else:
|
| 34 |
+
if not os.path.exists(item_path):
|
| 35 |
+
with open(item_path, "w") as f:
|
| 36 |
+
pass
|
| 37 |
+
print(f"Created file: {item_path}")
|
| 38 |
+
|
| 39 |
+
if __name__ == "__main__":
|
| 40 |
+
create_structure()
|