Upload 520 files
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +35 -0
- pyspur/.cursor/rules/frontend-api-calls.mdc +6 -0
- pyspur/.devcontainer/.bashrc +21 -0
- pyspur/.devcontainer/Dockerfile +28 -0
- pyspur/.devcontainer/README.md +130 -0
- pyspur/.devcontainer/devcontainer.json +146 -0
- pyspur/.devcontainer/docker-compose.yml +14 -0
- pyspur/.devcontainer/post-create.sh +18 -0
- pyspur/.dockerignore +88 -0
- pyspur/.env.example +127 -0
- pyspur/.github/dependabot.yml +12 -0
- pyspur/.github/workflows/release.yml +72 -0
- pyspur/.gitignore +178 -0
- pyspur/.pre-commit-config.yaml +26 -0
- pyspur/Dockerfile.backend +38 -0
- pyspur/Dockerfile.frontend +15 -0
- pyspur/LICENSE +201 -0
- pyspur/README.md +187 -0
- pyspur/README_CN.md +156 -0
- pyspur/README_DE.md +146 -0
- pyspur/README_ES.md +148 -0
- pyspur/README_FR.md +148 -0
- pyspur/README_JA.md +145 -0
- pyspur/README_KR.md +146 -0
- pyspur/__init__.py +0 -0
- pyspur/__pycache__/__init__.cpython-312.pyc +0 -0
- pyspur/backend/.gitignore +7 -0
- pyspur/backend/.pre-commit-config.yaml +18 -0
- pyspur/backend/__init__.py +0 -0
- pyspur/backend/__pycache__/__init__.cpython-312.pyc +0 -0
- pyspur/backend/alembic.ini +117 -0
- pyspur/backend/entrypoint.sh +21 -0
- pyspur/backend/llms-ctx.txt +0 -0
- pyspur/backend/log_conf.yaml +54 -0
- pyspur/backend/output_files/.gitignore +2 -0
- pyspur/backend/pyproject.toml +142 -0
- pyspur/backend/pyspur/__init__.py +0 -0
- pyspur/backend/pyspur/__pycache__/__init__.cpython-312.pyc +0 -0
- pyspur/backend/pyspur/api/__init__.py +0 -0
- pyspur/backend/pyspur/api/ai_management.py +352 -0
- pyspur/backend/pyspur/api/api_app.py +53 -0
- pyspur/backend/pyspur/api/dataset_management.py +121 -0
- pyspur/backend/pyspur/api/evals_management.py +197 -0
- pyspur/backend/pyspur/api/file_management.py +144 -0
- pyspur/backend/pyspur/api/key_management.py +477 -0
- pyspur/backend/pyspur/api/main.py +128 -0
- pyspur/backend/pyspur/api/node_management.py +69 -0
- pyspur/backend/pyspur/api/openai_compatible_api.py +107 -0
- pyspur/backend/pyspur/api/openapi_management.py +180 -0
- pyspur/backend/pyspur/api/output_file_management.py +92 -0
.gitattributes
CHANGED
|
@@ -33,3 +33,38 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
| 33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
| 36 |
+
pyspur/docs/images/checks-passed.png filter=lfs diff=lfs merge=lfs -text
|
| 37 |
+
pyspur/docs/images/deploy/dark_deploy_python.png filter=lfs diff=lfs merge=lfs -text
|
| 38 |
+
pyspur/docs/images/deploy/dark_deploy_ts.png filter=lfs diff=lfs merge=lfs -text
|
| 39 |
+
pyspur/docs/images/deploy/light_deploy_python.png filter=lfs diff=lfs merge=lfs -text
|
| 40 |
+
pyspur/docs/images/deploy/light_deploy_ts.png filter=lfs diff=lfs merge=lfs -text
|
| 41 |
+
pyspur/docs/images/evals/evals.mp4 filter=lfs diff=lfs merge=lfs -text
|
| 42 |
+
pyspur/docs/images/example_walkthrough/0_dark.png filter=lfs diff=lfs merge=lfs -text
|
| 43 |
+
pyspur/docs/images/example_walkthrough/0_light.png filter=lfs diff=lfs merge=lfs -text
|
| 44 |
+
pyspur/docs/images/example_walkthrough/1_dark.png filter=lfs diff=lfs merge=lfs -text
|
| 45 |
+
pyspur/docs/images/example_walkthrough/1_light.png filter=lfs diff=lfs merge=lfs -text
|
| 46 |
+
pyspur/docs/images/example_walkthrough/3_dark.png filter=lfs diff=lfs merge=lfs -text
|
| 47 |
+
pyspur/docs/images/example_walkthrough/3_light.png filter=lfs diff=lfs merge=lfs -text
|
| 48 |
+
pyspur/docs/images/example_walkthrough/4_dark.png filter=lfs diff=lfs merge=lfs -text
|
| 49 |
+
pyspur/docs/images/example_walkthrough/4_light.png filter=lfs diff=lfs merge=lfs -text
|
| 50 |
+
pyspur/docs/images/example_walkthrough/5_dark.gif filter=lfs diff=lfs merge=lfs -text
|
| 51 |
+
pyspur/docs/images/example_walkthrough/5_light.gif filter=lfs diff=lfs merge=lfs -text
|
| 52 |
+
pyspur/docs/images/example_walkthrough/6_dark.mp4 filter=lfs diff=lfs merge=lfs -text
|
| 53 |
+
pyspur/docs/images/example_walkthrough/6_light.mp4 filter=lfs diff=lfs merge=lfs -text
|
| 54 |
+
pyspur/docs/images/example_walkthrough/7_dark.mp4 filter=lfs diff=lfs merge=lfs -text
|
| 55 |
+
pyspur/docs/images/example_walkthrough/7_light.mp4 filter=lfs diff=lfs merge=lfs -text
|
| 56 |
+
pyspur/docs/images/example_walkthrough/8_dark.mp4 filter=lfs diff=lfs merge=lfs -text
|
| 57 |
+
pyspur/docs/images/example_walkthrough/8_light.mp4 filter=lfs diff=lfs merge=lfs -text
|
| 58 |
+
pyspur/docs/images/example_walkthrough/9_dark.mp4 filter=lfs diff=lfs merge=lfs -text
|
| 59 |
+
pyspur/docs/images/example_walkthrough/9_light.mp4 filter=lfs diff=lfs merge=lfs -text
|
| 60 |
+
pyspur/docs/images/hero-dark.mp4 filter=lfs diff=lfs merge=lfs -text
|
| 61 |
+
pyspur/docs/images/hero-light.mp4 filter=lfs diff=lfs merge=lfs -text
|
| 62 |
+
pyspur/docs/images/hero.png filter=lfs diff=lfs merge=lfs -text
|
| 63 |
+
pyspur/docs/images/rag/rag1.mp4 filter=lfs diff=lfs merge=lfs -text
|
| 64 |
+
pyspur/docs/images/rag/rag2.mp4 filter=lfs diff=lfs merge=lfs -text
|
| 65 |
+
pyspur/docs/images/rag/rag3.mp4 filter=lfs diff=lfs merge=lfs -text
|
| 66 |
+
pyspur/frontend/public/images/firecrawl.png filter=lfs diff=lfs merge=lfs -text
|
| 67 |
+
pyspur/frontend/public/images/google_sheets.png filter=lfs diff=lfs merge=lfs -text
|
| 68 |
+
pyspur/frontend/public/images/meta.png filter=lfs diff=lfs merge=lfs -text
|
| 69 |
+
pyspur/frontend/public/images/slack.png filter=lfs diff=lfs merge=lfs -text
|
| 70 |
+
pyspur/frontend/public/pyspur-black.png filter=lfs diff=lfs merge=lfs -text
|
pyspur/.cursor/rules/frontend-api-calls.mdc
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
description: API calls in frontend
|
| 3 |
+
globs:
|
| 4 |
+
alwaysApply: false
|
| 5 |
+
---
|
| 6 |
+
API calls inside the frontend should always be stored inside [api.ts](mdc:frontend/src/utils/api.ts) and use the API_BASE_URL defined there
|
pyspur/.devcontainer/.bashrc
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Enable bash completion
|
| 2 |
+
if [ -f /etc/bash_completion ]; then
|
| 3 |
+
. /etc/bash_completion
|
| 4 |
+
fi
|
| 5 |
+
|
| 6 |
+
# Docker compose aliases
|
| 7 |
+
alias dcup='docker compose -f docker-compose.dev.yml up --build -d'
|
| 8 |
+
alias dlogb='docker logs -f pyspur-backend-1 --since 5m'
|
| 9 |
+
alias dlogf='docker logs -f pyspur-frontend-1 --since 5m'
|
| 10 |
+
alias dlogn='docker logs -f pyspur-nginx-1 --since 5m'
|
| 11 |
+
alias dlogs='docker compose logs -f --since 5m'
|
| 12 |
+
|
| 13 |
+
# Test frontend build in temporary container
|
| 14 |
+
alias tfeb='docker build --target production -f Dockerfile.frontend \
|
| 15 |
+
--no-cache -t temp-frontend-build . && \
|
| 16 |
+
echo "✅ Frontend build successful!" && \
|
| 17 |
+
docker rmi temp-frontend-build || \
|
| 18 |
+
echo "❌ Frontend build failed!"'
|
| 19 |
+
|
| 20 |
+
# Add color to the terminal
|
| 21 |
+
export PS1='\[\033[01;32m\]\u@\h\[\033[00m\]:\[\033[01;34m\]\w\[\033[00m\]\$ '
|
pyspur/.devcontainer/Dockerfile
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Base stage
|
| 2 |
+
FROM python:3.12 as base
|
| 3 |
+
WORKDIR /pyspur
|
| 4 |
+
|
| 5 |
+
# Install bash completion
|
| 6 |
+
RUN apt-get update && apt-get install -y \
|
| 7 |
+
bash-completion \
|
| 8 |
+
nano \
|
| 9 |
+
vim \
|
| 10 |
+
&& rm -rf /var/lib/apt/lists/*
|
| 11 |
+
|
| 12 |
+
RUN pip install uv
|
| 13 |
+
|
| 14 |
+
COPY backend/ backend/
|
| 15 |
+
RUN uv pip install --system -e "/pyspur/backend/[dev]"
|
| 16 |
+
|
| 17 |
+
# Install Node.js for frontend development
|
| 18 |
+
RUN curl -fsSL https://deb.nodesource.com/setup_23.x | bash - \
|
| 19 |
+
&& apt-get install -y nodejs \
|
| 20 |
+
&& npm install -g npm@latest
|
| 21 |
+
|
| 22 |
+
# Development stage
|
| 23 |
+
FROM base as development
|
| 24 |
+
WORKDIR /pyspur/frontend
|
| 25 |
+
COPY frontend/package*.json ./
|
| 26 |
+
RUN npm install
|
| 27 |
+
|
| 28 |
+
WORKDIR /pyspur
|
pyspur/.devcontainer/README.md
ADDED
|
@@ -0,0 +1,130 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Development Container Configuration
|
| 2 |
+
|
| 3 |
+
[](https://codespaces.new/pyspur-dev/pyspur)
|
| 4 |
+
|
| 5 |
+
This directory contains configuration files for Visual Studio Code Dev Containers / GitHub Codespaces. Dev containers provide a consistent, isolated development environment for this project.
|
| 6 |
+
|
| 7 |
+
## Contents
|
| 8 |
+
|
| 9 |
+
- `devcontainer.json` - The main configuration file that defines the development container settings
|
| 10 |
+
- `Dockerfile` - Defines the container image and development environment
|
| 11 |
+
|
| 12 |
+
## Usage
|
| 13 |
+
|
| 14 |
+
### Prerequisites
|
| 15 |
+
|
| 16 |
+
- Visual Studio Code
|
| 17 |
+
- Docker installation:
|
| 18 |
+
- Docker Desktop (Windows/macOS)
|
| 19 |
+
- Docker Engine (Linux)
|
| 20 |
+
- [Remote - Containers](https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.remote-containers) extension for VS Code
|
| 21 |
+
|
| 22 |
+
### Getting Started
|
| 23 |
+
|
| 24 |
+
1. Open this project in Visual Studio Code
|
| 25 |
+
2. When prompted, click "Reopen in Container"
|
| 26 |
+
- Alternatively, press `F1` and select "Remote-Containers: Reopen in Container"
|
| 27 |
+
3. Wait for the container to build and initialize
|
| 28 |
+
4. Launch the application using:
|
| 29 |
+
```bash
|
| 30 |
+
dcup
|
| 31 |
+
```
|
| 32 |
+
5. Access the application (assuming the ports are forwarded as is to the host machine)
|
| 33 |
+
- Main application: http://localhost:6080
|
| 34 |
+
- Frontend development server: http://localhost:3000
|
| 35 |
+
- Backend API: http://localhost:8000
|
| 36 |
+
|
| 37 |
+
The development environment will be automatically configured with all necessary tools and extensions.
|
| 38 |
+
|
| 39 |
+
### Viewing Logs
|
| 40 |
+
|
| 41 |
+
You can monitor the application logs using these commands:
|
| 42 |
+
|
| 43 |
+
- View all container logs:
|
| 44 |
+
```bash
|
| 45 |
+
dlogs
|
| 46 |
+
```
|
| 47 |
+
- View backend logs only:
|
| 48 |
+
```bash
|
| 49 |
+
dlogb
|
| 50 |
+
```
|
| 51 |
+
- View frontend logs only:
|
| 52 |
+
```bash
|
| 53 |
+
dlogf
|
| 54 |
+
```
|
| 55 |
+
- View nginx logs only:
|
| 56 |
+
```bash
|
| 57 |
+
dlogn
|
| 58 |
+
```
|
| 59 |
+
|
| 60 |
+
All log commands show the last 5 minutes of logs and continue to tail new entries.
|
| 61 |
+
|
| 62 |
+
### Modifying the database schemas
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
1. **Stop Containers**
|
| 66 |
+
```bash
|
| 67 |
+
docker compose down
|
| 68 |
+
```
|
| 69 |
+
|
| 70 |
+
2. **Generate a Migration**
|
| 71 |
+
```bash
|
| 72 |
+
./generate_migrations.sh 002 <short_description_in_snake_case>
|
| 73 |
+
```
|
| 74 |
+
- Migration file appears in `./backend/app/models/management/alembic/versions/` with prefix `002_...`.
|
| 75 |
+
|
| 76 |
+
3. **Review the Generated Script**
|
| 77 |
+
- Open the file to ensure it has the intended changes.
|
| 78 |
+
|
| 79 |
+
4. **Apply the Migration**
|
| 80 |
+
```bash
|
| 81 |
+
docker compose down
|
| 82 |
+
docker compose up --build
|
| 83 |
+
```
|
| 84 |
+
- Alembic applies the new migration automatically on startup.
|
| 85 |
+
|
| 86 |
+
5. **Test the App**
|
| 87 |
+
- Confirm new tables/columns work as expected.
|
| 88 |
+
|
| 89 |
+
6. **Commit & Push**
|
| 90 |
+
```bash
|
| 91 |
+
git add .
|
| 92 |
+
git commit -m "Add migration 002 <description>"
|
| 93 |
+
git push origin <branch>
|
| 94 |
+
```
|
| 95 |
+
|
| 96 |
+
### Troubleshooting DBs issues
|
| 97 |
+
|
| 98 |
+
When modifying the DB models, one needs to be careful to not destroy the local DB due to lacking migrations.
|
| 99 |
+
|
| 100 |
+
Sometimes the local dev DB gets corrupted. In such cases, assuming it does not contain production data, the quickest fix is to simply delete it and let the backend rebuild it the next time you run `docker compose up` (or `dcup`).
|
| 101 |
+
|
| 102 |
+
You can do so via running
|
| 103 |
+
|
| 104 |
+
```bash
|
| 105 |
+
docker volume rm pyspur_postgres_data
|
| 106 |
+
```
|
| 107 |
+
|
| 108 |
+
## Customization
|
| 109 |
+
|
| 110 |
+
You can customize the development environment by:
|
| 111 |
+
|
| 112 |
+
- Modifying `devcontainer.json` to:
|
| 113 |
+
- Add VS Code extensions
|
| 114 |
+
- Set container-specific settings
|
| 115 |
+
- Configure environment variables
|
| 116 |
+
- Updating the `Dockerfile` to:
|
| 117 |
+
- Install additional packages
|
| 118 |
+
- Configure system settings
|
| 119 |
+
- Add development tools
|
| 120 |
+
|
| 121 |
+
## Troubleshooting
|
| 122 |
+
|
| 123 |
+
If you encounter issues:
|
| 124 |
+
|
| 125 |
+
1. Rebuild the container: `F1` → "Remote-Containers: Rebuild Container"
|
| 126 |
+
2. Check Docker logs for build errors
|
| 127 |
+
3. Verify Docker Desktop is running
|
| 128 |
+
4. Ensure all prerequisites are installed
|
| 129 |
+
|
| 130 |
+
For more information, see the [VS Code Remote Development documentation](https://code.visualstudio.com/docs/remote/containers).
|
pyspur/.devcontainer/devcontainer.json
ADDED
|
@@ -0,0 +1,146 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"name": "PySpur Development",
|
| 3 |
+
|
| 4 |
+
"dockerComposeFile": [
|
| 5 |
+
"./docker-compose.yml"
|
| 6 |
+
],
|
| 7 |
+
|
| 8 |
+
"service": "devdocker",
|
| 9 |
+
|
| 10 |
+
"runServices": ["devdocker"],
|
| 11 |
+
|
| 12 |
+
"workspaceFolder": "/pyspur",
|
| 13 |
+
|
| 14 |
+
"features": {
|
| 15 |
+
"ghcr.io/devcontainers/features/docker-in-docker:2": {
|
| 16 |
+
"version": "latest",
|
| 17 |
+
"moby": true
|
| 18 |
+
}
|
| 19 |
+
},
|
| 20 |
+
|
| 21 |
+
"customizations": {
|
| 22 |
+
"vscode": {
|
| 23 |
+
"extensions": [
|
| 24 |
+
"github.copilot",
|
| 25 |
+
"github.copilot-chat",
|
| 26 |
+
// Backend extensions
|
| 27 |
+
"ms-python.python",
|
| 28 |
+
"charliermarsh.ruff",
|
| 29 |
+
"tamasfe.even-better-toml",
|
| 30 |
+
// Frontend extensions
|
| 31 |
+
"dbaeumer.vscode-eslint",
|
| 32 |
+
"esbenp.prettier-vscode",
|
| 33 |
+
"ms-vscode.vscode-typescript-next"
|
| 34 |
+
],
|
| 35 |
+
"settings": {
|
| 36 |
+
// Git settings
|
| 37 |
+
// bypass pre-commit hooks not allowed
|
| 38 |
+
"git.allowNoVerifyCommit": false,
|
| 39 |
+
|
| 40 |
+
// Python analysis settings
|
| 41 |
+
"python.analysis.autoImportCompletions": true,
|
| 42 |
+
"python.analysis.autoImportUserSymbols": true,
|
| 43 |
+
"python.analysis.importFormat": "relative",
|
| 44 |
+
"python.analysis.typeCheckingMode": "strict",
|
| 45 |
+
"python.defaultInterpreterPath": "/usr/local/bin/python",
|
| 46 |
+
|
| 47 |
+
// Python linting and formatting
|
| 48 |
+
"python.linting.enabled": true,
|
| 49 |
+
"python.linting.mypyEnabled": false,
|
| 50 |
+
"python.linting.ruffEnabled": true,
|
| 51 |
+
|
| 52 |
+
// TypeScript settings
|
| 53 |
+
"typescript.tsdk": "/pyspur/frontend/node_modules/typescript/lib",
|
| 54 |
+
"typescript.preferences.importModuleSpecifier": "non-relative",
|
| 55 |
+
"typescript.preferences.projectRoot": "/pyspur/frontend",
|
| 56 |
+
"npm.packageManager": "npm",
|
| 57 |
+
|
| 58 |
+
// Editor formatting settings
|
| 59 |
+
"editor.formatOnSave": true,
|
| 60 |
+
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
| 61 |
+
|
| 62 |
+
// Language specific editor settings
|
| 63 |
+
"[python]": {
|
| 64 |
+
"editor.formatOnType": true,
|
| 65 |
+
"editor.formatOnSave": true,
|
| 66 |
+
"editor.defaultFormatter": "charliermarsh.ruff",
|
| 67 |
+
"editor.codeActionsOnSave": {
|
| 68 |
+
"source.organizeImports": "always",
|
| 69 |
+
"source.fixAll.ruff": "always"
|
| 70 |
+
}
|
| 71 |
+
},
|
| 72 |
+
"[typescript]": {
|
| 73 |
+
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
| 74 |
+
"editor.formatOnSave": true,
|
| 75 |
+
"editor.codeActionsOnSave": {
|
| 76 |
+
"source.fixAll.eslint": "explicit",
|
| 77 |
+
"source.organizeImports": "explicit"
|
| 78 |
+
}
|
| 79 |
+
},
|
| 80 |
+
"[typescriptreact]": {
|
| 81 |
+
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
| 82 |
+
"editor.formatOnSave": true,
|
| 83 |
+
"editor.codeActionsOnSave": {
|
| 84 |
+
"source.fixAll.eslint": "explicit",
|
| 85 |
+
"source.organizeImports": "explicit"
|
| 86 |
+
}
|
| 87 |
+
},
|
| 88 |
+
"[javascript]": {
|
| 89 |
+
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
| 90 |
+
"editor.formatOnSave": true,
|
| 91 |
+
"editor.codeActionsOnSave": {
|
| 92 |
+
"source.fixAll.eslint": "explicit",
|
| 93 |
+
"source.organizeImports": "explicit"
|
| 94 |
+
}
|
| 95 |
+
},
|
| 96 |
+
"[javascriptreact]": {
|
| 97 |
+
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
| 98 |
+
"editor.formatOnSave": true,
|
| 99 |
+
"editor.codeActionsOnSave": {
|
| 100 |
+
"source.fixAll.eslint": "explicit",
|
| 101 |
+
"source.organizeImports": "explicit"
|
| 102 |
+
}
|
| 103 |
+
},
|
| 104 |
+
"[json]": {
|
| 105 |
+
"editor.quickSuggestions": {
|
| 106 |
+
"strings": true
|
| 107 |
+
},
|
| 108 |
+
"editor.suggest.insertMode": "replace",
|
| 109 |
+
"editor.formatOnSave": true,
|
| 110 |
+
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
| 111 |
+
},
|
| 112 |
+
"[shellscript]": {
|
| 113 |
+
"editor.formatOnSave": true,
|
| 114 |
+
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
| 115 |
+
},
|
| 116 |
+
"[yaml]": {
|
| 117 |
+
"editor.insertSpaces": true,
|
| 118 |
+
"editor.tabSize": 2,
|
| 119 |
+
"editor.autoIndent": "advanced",
|
| 120 |
+
"diffEditor.ignoreTrimWhitespace": false,
|
| 121 |
+
"editor.formatOnSave": true,
|
| 122 |
+
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
| 123 |
+
},
|
| 124 |
+
"prettier.configPath": "/pyspur/frontend/.prettierrc"
|
| 125 |
+
}
|
| 126 |
+
}
|
| 127 |
+
},
|
| 128 |
+
"remoteUser": "root",
|
| 129 |
+
"shutdownAction": "none",
|
| 130 |
+
"forwardPorts": [6080, "backend:8000", "frontend:3000"],
|
| 131 |
+
"portsAttributes": {
|
| 132 |
+
"frontend:3000" :{
|
| 133 |
+
"label": "frontend",
|
| 134 |
+
"onAutoForward": "silent"
|
| 135 |
+
},
|
| 136 |
+
"backend:8000" :{
|
| 137 |
+
"label": "backend",
|
| 138 |
+
"onAutoForward": "silent"
|
| 139 |
+
},
|
| 140 |
+
"6080" :{
|
| 141 |
+
"label": "app",
|
| 142 |
+
"onAutoForward": "silent"
|
| 143 |
+
}
|
| 144 |
+
},
|
| 145 |
+
"postCreateCommand": "chmod +x .devcontainer/post-create.sh && .devcontainer/post-create.sh"
|
| 146 |
+
}
|
pyspur/.devcontainer/docker-compose.yml
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
services:
|
| 2 |
+
devdocker:
|
| 3 |
+
build:
|
| 4 |
+
context: ..
|
| 5 |
+
dockerfile: .devcontainer/Dockerfile
|
| 6 |
+
target: development
|
| 7 |
+
volumes:
|
| 8 |
+
# Project files
|
| 9 |
+
- ../:/pyspur:cached
|
| 10 |
+
- ../.env:/pyspur/backend/.env:cached
|
| 11 |
+
- /pyspur/frontend/node_modules
|
| 12 |
+
environment:
|
| 13 |
+
- PYTHONPATH=/pyspur/backend
|
| 14 |
+
command: sleep infinity
|
pyspur/.devcontainer/post-create.sh
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash
|
| 2 |
+
|
| 3 |
+
# Install pre-commit hooks
|
| 4 |
+
uv pip install --system pre-commit==4.1.0
|
| 5 |
+
pre-commit install
|
| 6 |
+
|
| 7 |
+
# Check if package.json has changed and reinstall if needed
|
| 8 |
+
if [ -f /pyspur/frontend/package.json ]; then
|
| 9 |
+
cd /pyspur/frontend && npm install
|
| 10 |
+
fi
|
| 11 |
+
|
| 12 |
+
# Add source command to main bashrc
|
| 13 |
+
echo '
|
| 14 |
+
# Source custom settings
|
| 15 |
+
# Source custom bashrc settings if the file exists
|
| 16 |
+
if [ -f /pyspur/.devcontainer/.bashrc ]; then
|
| 17 |
+
source /pyspur/.devcontainer/.bashrc
|
| 18 |
+
fi' >> ~/.bashrc
|
pyspur/.dockerignore
ADDED
|
@@ -0,0 +1,88 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Version control
|
| 2 |
+
.git
|
| 3 |
+
.gitignore
|
| 4 |
+
|
| 5 |
+
# Dependencies
|
| 6 |
+
**/node_modules
|
| 7 |
+
**/__pycache__
|
| 8 |
+
**/*.pyc
|
| 9 |
+
**/*.pyo
|
| 10 |
+
**/*.pyd
|
| 11 |
+
**/*.so
|
| 12 |
+
**/.Python
|
| 13 |
+
**/env
|
| 14 |
+
**/venv
|
| 15 |
+
**/.env
|
| 16 |
+
**/.env.local
|
| 17 |
+
**/.env.development.local
|
| 18 |
+
**/.env.test.local
|
| 19 |
+
**/.env.production.local
|
| 20 |
+
|
| 21 |
+
# Python specific
|
| 22 |
+
**/develop-eggs
|
| 23 |
+
**/eggs
|
| 24 |
+
**/.eggs
|
| 25 |
+
**/parts
|
| 26 |
+
**/sdist
|
| 27 |
+
**/var
|
| 28 |
+
**/wheels
|
| 29 |
+
**/*.egg-info
|
| 30 |
+
**/.installed.cfg
|
| 31 |
+
**/*.egg
|
| 32 |
+
|
| 33 |
+
# Build outputs
|
| 34 |
+
**/dist
|
| 35 |
+
**/build
|
| 36 |
+
**/.next
|
| 37 |
+
**/out
|
| 38 |
+
**/*.egg-info
|
| 39 |
+
|
| 40 |
+
# Development/IDE files
|
| 41 |
+
**/.idea
|
| 42 |
+
**/.vscode
|
| 43 |
+
**/.DS_Store
|
| 44 |
+
**/*.swp
|
| 45 |
+
**/*.swo
|
| 46 |
+
|
| 47 |
+
# Docker files
|
| 48 |
+
**/Dockerfile*
|
| 49 |
+
**/.dockerignore
|
| 50 |
+
docker-compose*.yml
|
| 51 |
+
|
| 52 |
+
# Test files
|
| 53 |
+
**/__tests__
|
| 54 |
+
**/test
|
| 55 |
+
**/*.test.js
|
| 56 |
+
**/*.spec.js
|
| 57 |
+
**/*.test.py
|
| 58 |
+
**/*.spec.py
|
| 59 |
+
**/coverage
|
| 60 |
+
**/htmlcov
|
| 61 |
+
|
| 62 |
+
# Documentation
|
| 63 |
+
**/*.md
|
| 64 |
+
**/docs
|
| 65 |
+
|
| 66 |
+
# Logs
|
| 67 |
+
**/logs
|
| 68 |
+
**/*.log
|
| 69 |
+
**/npm-debug.log*
|
| 70 |
+
**/yarn-debug.log*
|
| 71 |
+
**/yarn-error.log*
|
| 72 |
+
|
| 73 |
+
# Cache
|
| 74 |
+
**/.cache
|
| 75 |
+
**/.npm
|
| 76 |
+
**/.eslintcache
|
| 77 |
+
**/.pytest_cache
|
| 78 |
+
**/__pycache__
|
| 79 |
+
**/.coverage
|
| 80 |
+
|
| 81 |
+
# Data directories
|
| 82 |
+
**/data
|
| 83 |
+
**/uploads
|
| 84 |
+
**/downloads
|
| 85 |
+
|
| 86 |
+
# Databases
|
| 87 |
+
**/*.db
|
| 88 |
+
**/sqlite/*.db
|
pyspur/.env.example
ADDED
|
@@ -0,0 +1,127 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# ======================
|
| 2 |
+
# Core Configuration
|
| 3 |
+
# ======================
|
| 4 |
+
|
| 5 |
+
# Environment
|
| 6 |
+
# ENVIRONMENT=development
|
| 7 |
+
ENVIRONMENT=production
|
| 8 |
+
PYTHONUNBUFFERED=1 # This is to prevent Python from buffering stdout and stderr
|
| 9 |
+
OAUTHLIB_INSECURE_TRANSPORT=1 # This is to allow OAuth2 to work with http
|
| 10 |
+
|
| 11 |
+
# Version tag for Docker images in production
|
| 12 |
+
VERSION=latest
|
| 13 |
+
|
| 14 |
+
# GitHub repository (username/repo-name)
|
| 15 |
+
GITHUB_REPOSITORY=pyspur-dev/pyspur
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
# ======================
|
| 19 |
+
# Application Configuration
|
| 20 |
+
# ======================
|
| 21 |
+
|
| 22 |
+
# Application Host Configuration
|
| 23 |
+
# This is the host that the application will be running on
|
| 24 |
+
# By default, the application will be running on
|
| 25 |
+
|
| 26 |
+
PYSPUR_HOST=0.0.0.0
|
| 27 |
+
PYSPUR_PORT=6080
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
# Backend Configuration
|
| 31 |
+
DEBUG=False
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
# ======================
|
| 35 |
+
# Database Settings
|
| 36 |
+
# ======================
|
| 37 |
+
# PySpur uses PostgreSQL as the database. By default, the database is hosted in a separate container.
|
| 38 |
+
# If you want to use an external database, you can provide the connection details here.
|
| 39 |
+
# PostgreSQL Configuration
|
| 40 |
+
POSTGRES_DB=pyspur
|
| 41 |
+
POSTGRES_USER=pyspur
|
| 42 |
+
POSTGRES_PASSWORD=pyspur
|
| 43 |
+
POSTGRES_HOST=db
|
| 44 |
+
POSTGRES_PORT=5432
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
# ======================
|
| 48 |
+
# Model Provider API Keys
|
| 49 |
+
# ======================
|
| 50 |
+
|
| 51 |
+
# OPENAI_API_KEY=your_openai_api_key
|
| 52 |
+
# GEMINI_API_KEY=your_gemini_api_key
|
| 53 |
+
# ANTHROPIC_API_KEY=your_anthropic_api_key
|
| 54 |
+
|
| 55 |
+
# ======================
|
| 56 |
+
# OpenAI API URL Configuration
|
| 57 |
+
# ======================
|
| 58 |
+
# In case you are using OpenAI-compatible API service, you can specify the base URL of the API here
|
| 59 |
+
# OPENAI_API_BASE=https://api.openai.com/v1
|
| 60 |
+
|
| 61 |
+
# ======================
|
| 62 |
+
# Ollama Configuration
|
| 63 |
+
# ======================
|
| 64 |
+
|
| 65 |
+
# NOTE:
|
| 66 |
+
# if the ollama service is running on port 11434 of the host machine,
|
| 67 |
+
# then use http://host.docker.internal:11434 as the base url
|
| 68 |
+
# if the ollama service is running on a different host, use the ip address or domain name of the host
|
| 69 |
+
|
| 70 |
+
# Also make sure the ollama service is configured to accept requests.
|
| 71 |
+
# This can be done setting OLLAMA_HOST=0.0.0.0 environment variable before launching the ollama service.
|
| 72 |
+
|
| 73 |
+
# OLLAMA_BASE_URL=http://host.docker.internal:11434
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
# ======================
|
| 77 |
+
# Azure OpenAI Configuration
|
| 78 |
+
# ======================
|
| 79 |
+
|
| 80 |
+
# AZURE_OPENAI_API_KEY=your_azure_openai_api_key
|
| 81 |
+
# AZURE_OPENAI_API_BASE=https://your-resource-name.openai.azure.com
|
| 82 |
+
# AZURE_OPENAI_API_VERSION=your_azure_openai_api_version
|
| 83 |
+
# AZURE_OPENAI_DEPLOYMENT_NAME=your_azure_openai_deployment_name
|
| 84 |
+
# ======================
|
| 85 |
+
|
| 86 |
+
# ======================
|
| 87 |
+
# Google configuration
|
| 88 |
+
# ======================
|
| 89 |
+
|
| 90 |
+
# NEXT_PUBLIC_GOOGLE_CLIENT_ID=your_google_client_id # Google OAuth Client ID
|
| 91 |
+
# # This environment variable is used to configure Google OAuth for your application.
|
| 92 |
+
# # It should be set to the client id obtained from the Google Developer Console.
|
| 93 |
+
# # The prefix 'NEXT_PUBLIC_' is used to expose this variable to the frontend,
|
| 94 |
+
# # allowing client-side code to access it.
|
| 95 |
+
|
| 96 |
+
# ======================
|
| 97 |
+
|
| 98 |
+
# ======================
|
| 99 |
+
# GitHub configuration
|
| 100 |
+
# ======================
|
| 101 |
+
|
| 102 |
+
# GITHUB_ACCESS_TOKEN=your_github_access_token # GitHub Personal Access Token
|
| 103 |
+
# # This environment variable is used to configure GitHub OAuth for your application.
|
| 104 |
+
# # It should be set to the personal access token obtained from the GitHub Developer Settings.
|
| 105 |
+
|
| 106 |
+
# ======================
|
| 107 |
+
|
| 108 |
+
# ======================
|
| 109 |
+
# Firecrawl configuration
|
| 110 |
+
# ======================
|
| 111 |
+
|
| 112 |
+
# FIRECRAWL_API_KEY=your_firecrawl_api_key # Firecrawl API Key
|
| 113 |
+
# # This environment variable is used to configure Firecrawl API for your application.
|
| 114 |
+
# # It should be set to the API key obtained from the Firecrawl Developer Console.
|
| 115 |
+
|
| 116 |
+
# ======================
|
| 117 |
+
|
| 118 |
+
# Frontend Configuration
|
| 119 |
+
# ======================
|
| 120 |
+
# Usage Data
|
| 121 |
+
# ======================
|
| 122 |
+
# We use PostHog to collect anonymous usage data for the PySpur UI.
|
| 123 |
+
# This helps us understand how our users are interacting with the application
|
| 124 |
+
# and improve the user experience.
|
| 125 |
+
# If you want to disable usage data collection, uncomment the following line:
|
| 126 |
+
# DISABLE_ANONYMOUS_TELEMETRY=true
|
| 127 |
+
# ======================
|
pyspur/.github/dependabot.yml
ADDED
|
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# To get started with Dependabot version updates, you'll need to specify which
|
| 2 |
+
# package ecosystems to update and where the package manifests are located.
|
| 3 |
+
# Please see the documentation for more information:
|
| 4 |
+
# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
|
| 5 |
+
# https://containers.dev/guide/dependabot
|
| 6 |
+
|
| 7 |
+
version: 2
|
| 8 |
+
updates:
|
| 9 |
+
- package-ecosystem: "devcontainers"
|
| 10 |
+
directory: "/"
|
| 11 |
+
schedule:
|
| 12 |
+
interval: weekly
|
pyspur/.github/workflows/release.yml
ADDED
|
@@ -0,0 +1,72 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
name: Release
|
| 2 |
+
|
| 3 |
+
on:
|
| 4 |
+
release:
|
| 5 |
+
types: [created]
|
| 6 |
+
|
| 7 |
+
env:
|
| 8 |
+
REGISTRY: ghcr.io
|
| 9 |
+
BACKEND_IMAGE_NAME: ${{ github.repository }}-backend
|
| 10 |
+
|
| 11 |
+
jobs:
|
| 12 |
+
build-and-push-docker:
|
| 13 |
+
runs-on: ubuntu-latest
|
| 14 |
+
permissions:
|
| 15 |
+
contents: read
|
| 16 |
+
packages: write
|
| 17 |
+
id-token: write # needed for PyPI publishing
|
| 18 |
+
outputs:
|
| 19 |
+
image_name: ${{ steps.meta-backend.outputs.tags }}
|
| 20 |
+
|
| 21 |
+
steps:
|
| 22 |
+
- name: Checkout repository
|
| 23 |
+
uses: actions/checkout@v4
|
| 24 |
+
with:
|
| 25 |
+
ref: ${{ github.event.release.tag_name }}
|
| 26 |
+
|
| 27 |
+
- name: Set up QEMU
|
| 28 |
+
uses: docker/setup-qemu-action@v3
|
| 29 |
+
|
| 30 |
+
- name: Set up Docker Buildx
|
| 31 |
+
uses: docker/setup-buildx-action@v3
|
| 32 |
+
|
| 33 |
+
- name: Log in to the Container registry
|
| 34 |
+
uses: docker/login-action@v3
|
| 35 |
+
with:
|
| 36 |
+
registry: ${{ env.REGISTRY }}
|
| 37 |
+
username: ${{ github.actor }}
|
| 38 |
+
password: ${{ secrets.GITHUB_TOKEN }}
|
| 39 |
+
|
| 40 |
+
- name: Extract metadata (tags, labels) for Backend
|
| 41 |
+
id: meta-backend
|
| 42 |
+
uses: docker/metadata-action@v5
|
| 43 |
+
with:
|
| 44 |
+
images: ${{ env.REGISTRY }}/${{ env.BACKEND_IMAGE_NAME }}
|
| 45 |
+
tags: |
|
| 46 |
+
type=semver,pattern={{version}}
|
| 47 |
+
type=semver,pattern={{major}}.{{minor}}
|
| 48 |
+
|
| 49 |
+
- name: Build and push Backend image
|
| 50 |
+
uses: docker/build-push-action@v6
|
| 51 |
+
with:
|
| 52 |
+
context: .
|
| 53 |
+
file: ./Dockerfile.backend
|
| 54 |
+
push: true
|
| 55 |
+
platforms: linux/amd64,linux/arm64
|
| 56 |
+
target: production
|
| 57 |
+
tags: ${{ steps.meta-backend.outputs.tags }}
|
| 58 |
+
labels: ${{ steps.meta-backend.outputs.labels }}
|
| 59 |
+
|
| 60 |
+
- name: Build Python package
|
| 61 |
+
run: |
|
| 62 |
+
# Create dist directory
|
| 63 |
+
mkdir -p dist
|
| 64 |
+
|
| 65 |
+
# Build package using the container we just built - use first tag
|
| 66 |
+
DOCKER_TAG=$(echo "${{ steps.meta-backend.outputs.tags }}" | head -n1)
|
| 67 |
+
docker run --rm -v "$(pwd)/dist:/dist" "$DOCKER_TAG" sh -c "cd /pyspur/backend && uv build && cp dist/* /dist/"
|
| 68 |
+
|
| 69 |
+
- name: Publish package to PyPI
|
| 70 |
+
uses: pypa/gh-action-pypi-publish@release/v1
|
| 71 |
+
with:
|
| 72 |
+
packages-dir: dist/
|
pyspur/.gitignore
ADDED
|
@@ -0,0 +1,178 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Byte-compiled / optimized / DLL files
|
| 2 |
+
__pycache__/
|
| 3 |
+
*.py[cod]
|
| 4 |
+
*$py.class
|
| 5 |
+
|
| 6 |
+
# C extensions
|
| 7 |
+
*.so
|
| 8 |
+
|
| 9 |
+
# Distribution / packaging
|
| 10 |
+
.Python
|
| 11 |
+
build/
|
| 12 |
+
develop-eggs/
|
| 13 |
+
dist/
|
| 14 |
+
downloads/
|
| 15 |
+
eggs/
|
| 16 |
+
.eggs/
|
| 17 |
+
lib/
|
| 18 |
+
lib64/
|
| 19 |
+
parts/
|
| 20 |
+
sdist/
|
| 21 |
+
var/
|
| 22 |
+
wheels/
|
| 23 |
+
share/python-wheels/
|
| 24 |
+
*.egg-info/
|
| 25 |
+
.installed.cfg
|
| 26 |
+
*.egg
|
| 27 |
+
MANIFEST
|
| 28 |
+
|
| 29 |
+
# PyInstaller
|
| 30 |
+
# Usually these files are written by a python script from a template
|
| 31 |
+
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
| 32 |
+
*.manifest
|
| 33 |
+
*.spec
|
| 34 |
+
|
| 35 |
+
# Installer logs
|
| 36 |
+
pip-log.txt
|
| 37 |
+
pip-delete-this-directory.txt
|
| 38 |
+
|
| 39 |
+
# Unit test / coverage reports
|
| 40 |
+
htmlcov/
|
| 41 |
+
.tox/
|
| 42 |
+
.nox/
|
| 43 |
+
.coverage
|
| 44 |
+
.coverage.*
|
| 45 |
+
.cache
|
| 46 |
+
nosetests.xml
|
| 47 |
+
coverage.xml
|
| 48 |
+
*.cover
|
| 49 |
+
*.py,cover
|
| 50 |
+
.hypothesis/
|
| 51 |
+
.pytest_cache/
|
| 52 |
+
cover/
|
| 53 |
+
|
| 54 |
+
# Translations
|
| 55 |
+
*.mo
|
| 56 |
+
*.pot
|
| 57 |
+
|
| 58 |
+
# Django stuff:
|
| 59 |
+
*.log
|
| 60 |
+
local_settings.py
|
| 61 |
+
db.sqlite3
|
| 62 |
+
db.sqlite3-journal
|
| 63 |
+
|
| 64 |
+
# Flask stuff:
|
| 65 |
+
instance/
|
| 66 |
+
.webassets-cache
|
| 67 |
+
|
| 68 |
+
# Scrapy stuff:
|
| 69 |
+
.scrapy
|
| 70 |
+
|
| 71 |
+
# Sphinx documentation
|
| 72 |
+
docs/_build/
|
| 73 |
+
|
| 74 |
+
# PyBuilder
|
| 75 |
+
.pybuilder/
|
| 76 |
+
target/
|
| 77 |
+
|
| 78 |
+
# Jupyter Notebook
|
| 79 |
+
.ipynb_checkpoints
|
| 80 |
+
|
| 81 |
+
# IPython
|
| 82 |
+
profile_default/
|
| 83 |
+
ipython_config.py
|
| 84 |
+
|
| 85 |
+
# pyenv
|
| 86 |
+
# For a library or package, you might want to ignore these files since the code is
|
| 87 |
+
# intended to run in multiple environments; otherwise, check them in:
|
| 88 |
+
# .python-version
|
| 89 |
+
|
| 90 |
+
# pipenv
|
| 91 |
+
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
| 92 |
+
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
| 93 |
+
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
| 94 |
+
# install all needed dependencies.
|
| 95 |
+
#Pipfile.lock
|
| 96 |
+
|
| 97 |
+
# poetry
|
| 98 |
+
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
| 99 |
+
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
| 100 |
+
# commonly ignored for libraries.
|
| 101 |
+
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
| 102 |
+
#poetry.lock
|
| 103 |
+
|
| 104 |
+
# pdm
|
| 105 |
+
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
| 106 |
+
#pdm.lock
|
| 107 |
+
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
| 108 |
+
# in version control.
|
| 109 |
+
# https://pdm.fming.dev/latest/usage/project/#working-with-version-control
|
| 110 |
+
.pdm.toml
|
| 111 |
+
.pdm-python
|
| 112 |
+
.pdm-build/
|
| 113 |
+
|
| 114 |
+
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
| 115 |
+
__pypackages__/
|
| 116 |
+
|
| 117 |
+
# Celery stuff
|
| 118 |
+
celerybeat-schedule
|
| 119 |
+
celerybeat.pid
|
| 120 |
+
|
| 121 |
+
# SageMath parsed files
|
| 122 |
+
*.sage.py
|
| 123 |
+
|
| 124 |
+
# Environments
|
| 125 |
+
.env
|
| 126 |
+
.venv
|
| 127 |
+
env/
|
| 128 |
+
venv/
|
| 129 |
+
ENV/
|
| 130 |
+
env.bak/
|
| 131 |
+
venv.bak/
|
| 132 |
+
|
| 133 |
+
# Spyder project settings
|
| 134 |
+
.spyderproject
|
| 135 |
+
.spyproject
|
| 136 |
+
|
| 137 |
+
# Rope project settings
|
| 138 |
+
.ropeproject
|
| 139 |
+
|
| 140 |
+
# mkdocs documentation
|
| 141 |
+
/site
|
| 142 |
+
|
| 143 |
+
# mypy
|
| 144 |
+
.mypy_cache/
|
| 145 |
+
.dmypy.json
|
| 146 |
+
dmypy.json
|
| 147 |
+
|
| 148 |
+
# Pyre type checker
|
| 149 |
+
.pyre/
|
| 150 |
+
|
| 151 |
+
# pytype static type analyzer
|
| 152 |
+
.pytype/
|
| 153 |
+
|
| 154 |
+
# Cython debug symbols
|
| 155 |
+
cython_debug/
|
| 156 |
+
|
| 157 |
+
# PyCharm
|
| 158 |
+
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
| 159 |
+
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
| 160 |
+
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
| 161 |
+
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
| 162 |
+
#.idea/
|
| 163 |
+
|
| 164 |
+
.DS_Store
|
| 165 |
+
.vscode
|
| 166 |
+
|
| 167 |
+
# Ruff cache
|
| 168 |
+
**/.ruff_cache/
|
| 169 |
+
|
| 170 |
+
|
| 171 |
+
# node_modules
|
| 172 |
+
**/node_modules/
|
| 173 |
+
**/node_modules
|
| 174 |
+
|
| 175 |
+
prd/
|
| 176 |
+
|
| 177 |
+
# package* in docs
|
| 178 |
+
docs/package*
|
pyspur/.pre-commit-config.yaml
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
repos:
|
| 2 |
+
- repo: local
|
| 3 |
+
hooks:
|
| 4 |
+
- id: backend-hooks
|
| 5 |
+
name: Backend Hooks
|
| 6 |
+
entry: pre-commit run --config backend/.pre-commit-config.yaml
|
| 7 |
+
language: system
|
| 8 |
+
pass_filenames: false
|
| 9 |
+
always_run: true
|
| 10 |
+
files: ^backend/
|
| 11 |
+
|
| 12 |
+
- id: frontend-hooks
|
| 13 |
+
name: Frontend Hooks
|
| 14 |
+
entry: bash -c 'cd frontend && npx lint-staged'
|
| 15 |
+
language: system
|
| 16 |
+
pass_filenames: false
|
| 17 |
+
always_run: true
|
| 18 |
+
files: ^frontend/
|
| 19 |
+
|
| 20 |
+
- id: frontend-hooks-cleanup
|
| 21 |
+
name: Cleanup files created by frontend hooks
|
| 22 |
+
entry: bash -c 'cd frontend && rm -f tsconfig.*.tsbuildinfo'
|
| 23 |
+
language: system
|
| 24 |
+
pass_filenames: false
|
| 25 |
+
always_run: true
|
| 26 |
+
files: ^frontend/
|
pyspur/Dockerfile.backend
ADDED
|
@@ -0,0 +1,38 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
FROM python:3.12-slim AS base
|
| 2 |
+
RUN apt-get update && apt-get install -y \
|
| 3 |
+
libpq-dev \
|
| 4 |
+
gcc \
|
| 5 |
+
curl \
|
| 6 |
+
&& rm -rf /var/lib/apt/lists/*
|
| 7 |
+
|
| 8 |
+
RUN pip install uv
|
| 9 |
+
WORKDIR /pyspur/backend
|
| 10 |
+
COPY backend/pyproject.toml .
|
| 11 |
+
RUN uv pip compile pyproject.toml > requirements.txt && \
|
| 12 |
+
uv pip install --system --no-cache-dir -r requirements.txt && \
|
| 13 |
+
rm requirements.txt
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
# Development stage
|
| 17 |
+
FROM base AS development
|
| 18 |
+
ENV PYTHONPATH=/pyspur/backend
|
| 19 |
+
# Development-specific instructions here
|
| 20 |
+
|
| 21 |
+
# Frontend build stage
|
| 22 |
+
FROM node:23-slim AS frontend-builder
|
| 23 |
+
WORKDIR /pyspur/frontend
|
| 24 |
+
COPY frontend/package*.json ./
|
| 25 |
+
RUN npm ci
|
| 26 |
+
COPY frontend/ .
|
| 27 |
+
RUN npm run build
|
| 28 |
+
|
| 29 |
+
# Production stage
|
| 30 |
+
FROM base AS production
|
| 31 |
+
ENV PYTHONPATH=/pyspur/backend
|
| 32 |
+
COPY backend/ .
|
| 33 |
+
# Copy frontend static files from frontend build stage
|
| 34 |
+
RUN mkdir -p /pyspur/backend/pyspur/static
|
| 35 |
+
RUN rm -rf /pyspur/backend/pyspur/static/*
|
| 36 |
+
COPY --from=frontend-builder /pyspur/frontend/out/ /pyspur/backend/pyspur/static/
|
| 37 |
+
COPY .env.example /pyspur/backend/pyspur/templates/.env.example
|
| 38 |
+
# Production-specific instructions here
|
pyspur/Dockerfile.frontend
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
FROM node:23-slim AS base
|
| 2 |
+
WORKDIR /pyspur/frontend
|
| 3 |
+
COPY frontend/package*.json ./
|
| 4 |
+
|
| 5 |
+
# Development stage
|
| 6 |
+
FROM base AS development
|
| 7 |
+
RUN npm install
|
| 8 |
+
# Development-specific instructions here
|
| 9 |
+
|
| 10 |
+
# Production stage
|
| 11 |
+
FROM base AS production
|
| 12 |
+
RUN npm ci --only=production
|
| 13 |
+
COPY frontend/ .
|
| 14 |
+
RUN npm run build
|
| 15 |
+
# Production-specific instructions here
|
pyspur/LICENSE
ADDED
|
@@ -0,0 +1,201 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Apache License
|
| 2 |
+
Version 2.0, January 2004
|
| 3 |
+
http://www.apache.org/licenses/
|
| 4 |
+
|
| 5 |
+
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
| 6 |
+
|
| 7 |
+
1. Definitions.
|
| 8 |
+
|
| 9 |
+
"License" shall mean the terms and conditions for use, reproduction,
|
| 10 |
+
and distribution as defined by Sections 1 through 9 of this document.
|
| 11 |
+
|
| 12 |
+
"Licensor" shall mean the copyright owner or entity authorized by
|
| 13 |
+
the copyright owner that is granting the License.
|
| 14 |
+
|
| 15 |
+
"Legal Entity" shall mean the union of the acting entity and all
|
| 16 |
+
other entities that control, are controlled by, or are under common
|
| 17 |
+
control with that entity. For the purposes of this definition,
|
| 18 |
+
"control" means (i) the power, direct or indirect, to cause the
|
| 19 |
+
direction or management of such entity, whether by contract or
|
| 20 |
+
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
| 21 |
+
outstanding shares, or (iii) beneficial ownership of such entity.
|
| 22 |
+
|
| 23 |
+
"You" (or "Your") shall mean an individual or Legal Entity
|
| 24 |
+
exercising permissions granted by this License.
|
| 25 |
+
|
| 26 |
+
"Source" form shall mean the preferred form for making modifications,
|
| 27 |
+
including but not limited to software source code, documentation
|
| 28 |
+
source, and configuration files.
|
| 29 |
+
|
| 30 |
+
"Object" form shall mean any form resulting from mechanical
|
| 31 |
+
transformation or translation of a Source form, including but
|
| 32 |
+
not limited to compiled object code, generated documentation,
|
| 33 |
+
and conversions to other media types.
|
| 34 |
+
|
| 35 |
+
"Work" shall mean the work of authorship, whether in Source or
|
| 36 |
+
Object form, made available under the License, as indicated by a
|
| 37 |
+
copyright notice that is included in or attached to the work
|
| 38 |
+
(an example is provided in the Appendix below).
|
| 39 |
+
|
| 40 |
+
"Derivative Works" shall mean any work, whether in Source or Object
|
| 41 |
+
form, that is based on (or derived from) the Work and for which the
|
| 42 |
+
editorial revisions, annotations, elaborations, or other modifications
|
| 43 |
+
represent, as a whole, an original work of authorship. For the purposes
|
| 44 |
+
of this License, Derivative Works shall not include works that remain
|
| 45 |
+
separable from, or merely link (or bind by name) to the interfaces of,
|
| 46 |
+
the Work and Derivative Works thereof.
|
| 47 |
+
|
| 48 |
+
"Contribution" shall mean any work of authorship, including
|
| 49 |
+
the original version of the Work and any modifications or additions
|
| 50 |
+
to that Work or Derivative Works thereof, that is intentionally
|
| 51 |
+
submitted to Licensor for inclusion in the Work by the copyright owner
|
| 52 |
+
or by an individual or Legal Entity authorized to submit on behalf of
|
| 53 |
+
the copyright owner. For the purposes of this definition, "submitted"
|
| 54 |
+
means any form of electronic, verbal, or written communication sent
|
| 55 |
+
to the Licensor or its representatives, including but not limited to
|
| 56 |
+
communication on electronic mailing lists, source code control systems,
|
| 57 |
+
and issue tracking systems that are managed by, or on behalf of, the
|
| 58 |
+
Licensor for the purpose of discussing and improving the Work, but
|
| 59 |
+
excluding communication that is conspicuously marked or otherwise
|
| 60 |
+
designated in writing by the copyright owner as "Not a Contribution."
|
| 61 |
+
|
| 62 |
+
"Contributor" shall mean Licensor and any individual or Legal Entity
|
| 63 |
+
on behalf of whom a Contribution has been received by Licensor and
|
| 64 |
+
subsequently incorporated within the Work.
|
| 65 |
+
|
| 66 |
+
2. Grant of Copyright License. Subject to the terms and conditions of
|
| 67 |
+
this License, each Contributor hereby grants to You a perpetual,
|
| 68 |
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
| 69 |
+
copyright license to reproduce, prepare Derivative Works of,
|
| 70 |
+
publicly display, publicly perform, sublicense, and distribute the
|
| 71 |
+
Work and such Derivative Works in Source or Object form.
|
| 72 |
+
|
| 73 |
+
3. Grant of Patent License. Subject to the terms and conditions of
|
| 74 |
+
this License, each Contributor hereby grants to You a perpetual,
|
| 75 |
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
| 76 |
+
(except as stated in this section) patent license to make, have made,
|
| 77 |
+
use, offer to sell, sell, import, and otherwise transfer the Work,
|
| 78 |
+
where such license applies only to those patent claims licensable
|
| 79 |
+
by such Contributor that are necessarily infringed by their
|
| 80 |
+
Contribution(s) alone or by combination of their Contribution(s)
|
| 81 |
+
with the Work to which such Contribution(s) was submitted. If You
|
| 82 |
+
institute patent litigation against any entity (including a
|
| 83 |
+
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
| 84 |
+
or a Contribution incorporated within the Work constitutes direct
|
| 85 |
+
or contributory patent infringement, then any patent licenses
|
| 86 |
+
granted to You under this License for that Work shall terminate
|
| 87 |
+
as of the date such litigation is filed.
|
| 88 |
+
|
| 89 |
+
4. Redistribution. You may reproduce and distribute copies of the
|
| 90 |
+
Work or Derivative Works thereof in any medium, with or without
|
| 91 |
+
modifications, and in Source or Object form, provided that You
|
| 92 |
+
meet the following conditions:
|
| 93 |
+
|
| 94 |
+
(a) You must give any other recipients of the Work or
|
| 95 |
+
Derivative Works a copy of this License; and
|
| 96 |
+
|
| 97 |
+
(b) You must cause any modified files to carry prominent notices
|
| 98 |
+
stating that You changed the files; and
|
| 99 |
+
|
| 100 |
+
(c) You must retain, in the Source form of any Derivative Works
|
| 101 |
+
that You distribute, all copyright, patent, trademark, and
|
| 102 |
+
attribution notices from the Source form of the Work,
|
| 103 |
+
excluding those notices that do not pertain to any part of
|
| 104 |
+
the Derivative Works; and
|
| 105 |
+
|
| 106 |
+
(d) If the Work includes a "NOTICE" text file as part of its
|
| 107 |
+
distribution, then any Derivative Works that You distribute must
|
| 108 |
+
include a readable copy of the attribution notices contained
|
| 109 |
+
within such NOTICE file, excluding those notices that do not
|
| 110 |
+
pertain to any part of the Derivative Works, in at least one
|
| 111 |
+
of the following places: within a NOTICE text file distributed
|
| 112 |
+
as part of the Derivative Works; within the Source form or
|
| 113 |
+
documentation, if provided along with the Derivative Works; or,
|
| 114 |
+
within a display generated by the Derivative Works, if and
|
| 115 |
+
wherever such third-party notices normally appear. The contents
|
| 116 |
+
of the NOTICE file are for informational purposes only and
|
| 117 |
+
do not modify the License. You may add Your own attribution
|
| 118 |
+
notices within Derivative Works that You distribute, alongside
|
| 119 |
+
or as an addendum to the NOTICE text from the Work, provided
|
| 120 |
+
that such additional attribution notices cannot be construed
|
| 121 |
+
as modifying the License.
|
| 122 |
+
|
| 123 |
+
You may add Your own copyright statement to Your modifications and
|
| 124 |
+
may provide additional or different license terms and conditions
|
| 125 |
+
for use, reproduction, or distribution of Your modifications, or
|
| 126 |
+
for any such Derivative Works as a whole, provided Your use,
|
| 127 |
+
reproduction, and distribution of the Work otherwise complies with
|
| 128 |
+
the conditions stated in this License.
|
| 129 |
+
|
| 130 |
+
5. Submission of Contributions. Unless You explicitly state otherwise,
|
| 131 |
+
any Contribution intentionally submitted for inclusion in the Work
|
| 132 |
+
by You to the Licensor shall be under the terms and conditions of
|
| 133 |
+
this License, without any additional terms or conditions.
|
| 134 |
+
Notwithstanding the above, nothing herein shall supersede or modify
|
| 135 |
+
the terms of any separate license agreement you may have executed
|
| 136 |
+
with Licensor regarding such Contributions.
|
| 137 |
+
|
| 138 |
+
6. Trademarks. This License does not grant permission to use the trade
|
| 139 |
+
names, trademarks, service marks, or product names of the Licensor,
|
| 140 |
+
except as required for reasonable and customary use in describing the
|
| 141 |
+
origin of the Work and reproducing the content of the NOTICE file.
|
| 142 |
+
|
| 143 |
+
7. Disclaimer of Warranty. Unless required by applicable law or
|
| 144 |
+
agreed to in writing, Licensor provides the Work (and each
|
| 145 |
+
Contributor provides its Contributions) on an "AS IS" BASIS,
|
| 146 |
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
| 147 |
+
implied, including, without limitation, any warranties or conditions
|
| 148 |
+
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
| 149 |
+
PARTICULAR PURPOSE. You are solely responsible for determining the
|
| 150 |
+
appropriateness of using or redistributing the Work and assume any
|
| 151 |
+
risks associated with Your exercise of permissions under this License.
|
| 152 |
+
|
| 153 |
+
8. Limitation of Liability. In no event and under no legal theory,
|
| 154 |
+
whether in tort (including negligence), contract, or otherwise,
|
| 155 |
+
unless required by applicable law (such as deliberate and grossly
|
| 156 |
+
negligent acts) or agreed to in writing, shall any Contributor be
|
| 157 |
+
liable to You for damages, including any direct, indirect, special,
|
| 158 |
+
incidental, or consequential damages of any character arising as a
|
| 159 |
+
result of this License or out of the use or inability to use the
|
| 160 |
+
Work (including but not limited to damages for loss of goodwill,
|
| 161 |
+
work stoppage, computer failure or malfunction, or any and all
|
| 162 |
+
other commercial damages or losses), even if such Contributor
|
| 163 |
+
has been advised of the possibility of such damages.
|
| 164 |
+
|
| 165 |
+
9. Accepting Warranty or Additional Liability. While redistributing
|
| 166 |
+
the Work or Derivative Works thereof, You may choose to offer,
|
| 167 |
+
and charge a fee for, acceptance of support, warranty, indemnity,
|
| 168 |
+
or other liability obligations and/or rights consistent with this
|
| 169 |
+
License. However, in accepting such obligations, You may act only
|
| 170 |
+
on Your own behalf and on Your sole responsibility, not on behalf
|
| 171 |
+
of any other Contributor, and only if You agree to indemnify,
|
| 172 |
+
defend, and hold each Contributor harmless for any liability
|
| 173 |
+
incurred by, or claims asserted against, such Contributor by reason
|
| 174 |
+
of your accepting any such warranty or additional liability.
|
| 175 |
+
|
| 176 |
+
END OF TERMS AND CONDITIONS
|
| 177 |
+
|
| 178 |
+
APPENDIX: How to apply the Apache License to your work.
|
| 179 |
+
|
| 180 |
+
To apply the Apache License to your work, attach the following
|
| 181 |
+
boilerplate notice, with the fields enclosed by brackets "[]"
|
| 182 |
+
replaced with your own identifying information. (Don't include
|
| 183 |
+
the brackets!) The text should be enclosed in the appropriate
|
| 184 |
+
comment syntax for the file format. We also recommend that a
|
| 185 |
+
file or class name and description of purpose be included on the
|
| 186 |
+
same "printed page" as the copyright notice for easier
|
| 187 |
+
identification within third-party archives.
|
| 188 |
+
|
| 189 |
+
Copyright [yyyy] [name of copyright owner]
|
| 190 |
+
|
| 191 |
+
Licensed under the Apache License, Version 2.0 (the "License");
|
| 192 |
+
you may not use this file except in compliance with the License.
|
| 193 |
+
You may obtain a copy of the License at
|
| 194 |
+
|
| 195 |
+
http://www.apache.org/licenses/LICENSE-2.0
|
| 196 |
+
|
| 197 |
+
Unless required by applicable law or agreed to in writing, software
|
| 198 |
+
distributed under the License is distributed on an "AS IS" BASIS,
|
| 199 |
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 200 |
+
See the License for the specific language governing permissions and
|
| 201 |
+
limitations under the License.
|
pyspur/README.md
ADDED
|
@@ -0,0 +1,187 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+

|
| 2 |
+
|
| 3 |
+
<p align="center"><strong>Iterate over your agents 10x faster. AI engineers use PySpur to iterate over AI agents visually without reinventing the wheel.</strong></p>
|
| 4 |
+
|
| 5 |
+
<p align="center">
|
| 6 |
+
<a href="./README.md"><img alt="README in English" src="https://img.shields.io/badge/English-blue"></a>
|
| 7 |
+
<a href="./README_CN.md"><img alt="简体中文版自述文件" src="https://img.shields.io/badge/简体中文-blue"></a>
|
| 8 |
+
<a href="./README_JA.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-blue"></a>
|
| 9 |
+
<a href="./README_KR.md"><img alt="README in Korean" src="https://img.shields.io/badge/한국어-blue"></a>
|
| 10 |
+
<a href="./README_DE.md"><img alt="Deutsche Version der README" src="https://img.shields.io/badge/Deutsch-blue"></a>
|
| 11 |
+
<a href="./README_FR.md"><img alt="Version française du README" src="https://img.shields.io/badge/Français-blue"></a>
|
| 12 |
+
<a href="./README_ES.md"><img alt="Versión en español del README" src="https://img.shields.io/badge/Español-blue"></a>
|
| 13 |
+
</p>
|
| 14 |
+
|
| 15 |
+
<p align="center">
|
| 16 |
+
<a href="https://docs.pyspur.dev/" target="_blank">
|
| 17 |
+
<img alt="Docs" src="https://img.shields.io/badge/Docs-green.svg?style=for-the-badge&logo=readthedocs&logoColor=white">
|
| 18 |
+
</a>
|
| 19 |
+
<a href="https://calendly.com/d/cnf9-57m-bv3/pyspur-founders" target="_blank">
|
| 20 |
+
<img alt="Meet us" src="https://img.shields.io/badge/Meet%20us-blue.svg?style=for-the-badge&logo=calendly&logoColor=white">
|
| 21 |
+
</a>
|
| 22 |
+
<a href="https://forms.gle/5wHRctedMpgfNGah7" target="_blank">
|
| 23 |
+
<img alt="Cloud" src="https://img.shields.io/badge/Cloud-orange.svg?style=for-the-badge&logo=cloud&logoColor=white">
|
| 24 |
+
</a>
|
| 25 |
+
<a href="https://discord.gg/7Spn7C8A5F">
|
| 26 |
+
<img alt="Join Our Discord" src="https://img.shields.io/badge/Discord-7289DA.svg?style=for-the-badge&logo=discord&logoColor=white">
|
| 27 |
+
</a>
|
| 28 |
+
</p>
|
| 29 |
+
|
| 30 |
+
https://github.com/user-attachments/assets/54d0619f-22fd-476c-bf19-9be083d7e710
|
| 31 |
+
|
| 32 |
+
# 🕸️ Why PySpur?
|
| 33 |
+
|
| 34 |
+
## Problem: It takes a 1,000 tiny paper cuts to make AI reliable
|
| 35 |
+
|
| 36 |
+
AI engineers today face three problems of building agents:
|
| 37 |
+
|
| 38 |
+
* **Prompt Hell**: Hours of prompt tweaking and trial-and-error frustration.
|
| 39 |
+
* **Workflow Blindspots**: Lack of visibility into step interactions causing hidden failures and confusion.
|
| 40 |
+
* **Terminal Testing Nightmare** Squinting at raw outputs and manually parsing JSON.
|
| 41 |
+
|
| 42 |
+
We've been there ourselves, too. We launched a graphic design agent early 2024 and quickly reached thousands of users, yet, struggled with the lack of its reliability and existing debugging tools.
|
| 43 |
+
|
| 44 |
+
## Solution: A playground for agents that saves time
|
| 45 |
+
|
| 46 |
+
### Step 1: Define Test Cases
|
| 47 |
+
|
| 48 |
+
https://github.com/user-attachments/assets/ed9ca45f-7346-463f-b8a4-205bf2c4588f
|
| 49 |
+
|
| 50 |
+
### Step 2: Build the agent in Python code or via UI
|
| 51 |
+
|
| 52 |
+
https://github.com/user-attachments/assets/7043aae4-fad1-42bd-953a-80c94fce8253
|
| 53 |
+
|
| 54 |
+
### Step 3: Iterate obsessively
|
| 55 |
+
|
| 56 |
+
https://github.com/user-attachments/assets/72c9901d-a39c-4f80-85a5-f6f76e55f473
|
| 57 |
+
|
| 58 |
+
### Step 4: Deploy
|
| 59 |
+
|
| 60 |
+
https://github.com/user-attachments/assets/b14f34b2-9f16-4bd0-8a0f-1c26e690af93
|
| 61 |
+
|
| 62 |
+
# ✨ Core features:
|
| 63 |
+
|
| 64 |
+
- 👤 **Human in the Loop**: Persistent workflows that wait for human approval.
|
| 65 |
+
- 🔄 **Loops**: Iterative tool calling with memory.
|
| 66 |
+
- 📤 **File Upload**: Upload files or paste URLs to process documents.
|
| 67 |
+
- 📋 **Structured Outputs**: UI editor for JSON Schemas.
|
| 68 |
+
- 🗃️ **RAG**: Parse, Chunk, Embed, and Upsert Data into a Vector DB.
|
| 69 |
+
- 🖼️ **Multimodal**: Support for Video, Images, Audio, Texts, Code.
|
| 70 |
+
- 🧰 **Tools**: Slack, Firecrawl.dev, Google Sheets, GitHub, and more.
|
| 71 |
+
- 📊 **Traces**: Automatically capture execution traces of deployed agents.
|
| 72 |
+
- 🧪 **Evals**: Evaluate agents on real-world datasets.
|
| 73 |
+
- 🚀 **One-Click Deploy**: Publish as an API and integrate wherever you want.
|
| 74 |
+
- 🐍 **Python-Based**: Add new nodes by creating a single Python file.
|
| 75 |
+
- 🎛️ **Any-Vendor-Support**: >100 LLM providers, embedders, and vector DBs.
|
| 76 |
+
|
| 77 |
+
# ⚡ Quick start
|
| 78 |
+
|
| 79 |
+
This is the quickest way to get started. Python 3.11 or higher is required.
|
| 80 |
+
|
| 81 |
+
1. **Install PySpur:**
|
| 82 |
+
```sh
|
| 83 |
+
pip install pyspur
|
| 84 |
+
```
|
| 85 |
+
|
| 86 |
+
2. **Initialize a new project:**
|
| 87 |
+
```sh
|
| 88 |
+
pyspur init my-project
|
| 89 |
+
cd my-project
|
| 90 |
+
```
|
| 91 |
+
This will create a new directory with a `.env` file.
|
| 92 |
+
|
| 93 |
+
3. **Start the server:**
|
| 94 |
+
```sh
|
| 95 |
+
pyspur serve --sqlite
|
| 96 |
+
```
|
| 97 |
+
By default, this will start PySpur app at `http://localhost:6080` using a sqlite database.
|
| 98 |
+
We recommend you configure a postgres instance URL in the `.env` file to get a more stable experience.
|
| 99 |
+
|
| 100 |
+
4. **[Optional] Configure Your Environment and Add API Keys:**
|
| 101 |
+
- **App UI**: Navigate to API Keys tab to add provider keys (OpenAI, Anthropic, etc.)
|
| 102 |
+
- **Manual**: Edit `.env` file (recommended: configure postgres) and restart with `pyspur serve`
|
| 103 |
+
|
| 104 |
+
|
| 105 |
+
# 😎 Feature Reel
|
| 106 |
+
|
| 107 |
+
## Human-in-the-loop breakpoints:
|
| 108 |
+
|
| 109 |
+
These breakpoints pause the workflow when reached and resume whenever a human approves it.
|
| 110 |
+
They enable human oversight for workflows that require quality assurance: verify critical outputs before the workflow proceeds.
|
| 111 |
+
|
| 112 |
+
https://github.com/user-attachments/assets/98cb2b4e-207c-4d97-965b-4fee47c94ce8
|
| 113 |
+
|
| 114 |
+
## Debug at Node Level:
|
| 115 |
+
|
| 116 |
+
https://github.com/user-attachments/assets/6e82ad25-2a46-4c50-b030-415ea9994690
|
| 117 |
+
|
| 118 |
+
## Multimodal (Upload files or paste URLs)
|
| 119 |
+
|
| 120 |
+
PDFs, Videos, Audio, Images, ...
|
| 121 |
+
|
| 122 |
+
https://github.com/user-attachments/assets/83ed9a22-1ec1-4d86-9dd6-5d945588fd0b
|
| 123 |
+
|
| 124 |
+
## Loops
|
| 125 |
+
|
| 126 |
+
<img width="1919" alt="Loops" src="https://github.com/user-attachments/assets/3aea63dc-f46f-46e9-bddd-e2af9c2a56bf" />
|
| 127 |
+
|
| 128 |
+
## RAG
|
| 129 |
+
|
| 130 |
+
### Step 1) Create Document Collection (Chunking + Parsing)
|
| 131 |
+
|
| 132 |
+
https://github.com/user-attachments/assets/c77723b1-c076-4a64-a01d-6d6677e9c60e
|
| 133 |
+
|
| 134 |
+
### Step 2) Create Vector Index (Embedding + Vector DB Upsert)
|
| 135 |
+
|
| 136 |
+
https://github.com/user-attachments/assets/50e5c711-dd01-4d92-bb23-181a1c5bba25
|
| 137 |
+
|
| 138 |
+
## Modular Building Blocks
|
| 139 |
+
|
| 140 |
+
https://github.com/user-attachments/assets/6442f0ad-86d8-43d9-aa70-e5c01e55e876
|
| 141 |
+
|
| 142 |
+
## Evaluate Final Performance
|
| 143 |
+
|
| 144 |
+
https://github.com/user-attachments/assets/4dc2abc3-c6e6-4d6d-a5c3-787d518de7ae
|
| 145 |
+
|
| 146 |
+
## Coming soon: Self-improvement
|
| 147 |
+
|
| 148 |
+
https://github.com/user-attachments/assets/5bef7a16-ef9f-4650-b385-4ea70fa54c8a
|
| 149 |
+
|
| 150 |
+
# 🛠️ PySpur Development Setup
|
| 151 |
+
#### [ Instructions for development on Unix-like systems. Development on Windows/PC not supported ]
|
| 152 |
+
|
| 153 |
+
We recommend using Cursor/VS Code with our dev container (`.devcontainer/devcontainer.json`) for:
|
| 154 |
+
- Consistent development environment with pre-configured tools and extensions
|
| 155 |
+
- Optimized settings for Python and TypeScript development
|
| 156 |
+
- Automatic hot-reloading and port forwarding
|
| 157 |
+
|
| 158 |
+
**Option 1: Cursor/VS Code Dev Container (Recommended)**
|
| 159 |
+
1. Install [Cursor](https://www.cursor.com/)/[VS Code](https://code.visualstudio.com/) and the [Dev Containers extension](https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.remote-containers)
|
| 160 |
+
2. Clone and open the repository
|
| 161 |
+
3. Click "Reopen in Container" when prompted
|
| 162 |
+
|
| 163 |
+
**Option 2: Manual Setup**
|
| 164 |
+
1. **Clone the repository:**
|
| 165 |
+
```sh
|
| 166 |
+
git clone https://github.com/PySpur-com/pyspur.git
|
| 167 |
+
cd pyspur
|
| 168 |
+
```
|
| 169 |
+
|
| 170 |
+
2. **Launch using docker-compose.dev.yml:**
|
| 171 |
+
```sh
|
| 172 |
+
docker compose -f docker-compose.dev.yml up --build -d
|
| 173 |
+
```
|
| 174 |
+
|
| 175 |
+
3. **Customize your setup:**
|
| 176 |
+
Edit `.env` to configure your environment (e.g., PostgreSQL settings).
|
| 177 |
+
|
| 178 |
+
Note: Manual setup requires additional configuration and may not include all dev container features.
|
| 179 |
+
|
| 180 |
+
# ⭐ Support us
|
| 181 |
+
|
| 182 |
+
You can support us in our work by leaving a star! Thank you!
|
| 183 |
+
|
| 184 |
+

|
| 185 |
+
|
| 186 |
+
Your feedback will be massively appreciated.
|
| 187 |
+
Please [tell us](mailto:founders@pyspur.dev?subject=Feature%20Request&body=I%20want%20this%20feature%3Ai) which features on that list you like to see next or request entirely new ones.
|
pyspur/README_CN.md
ADDED
|
@@ -0,0 +1,156 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+

|
| 2 |
+
|
| 3 |
+
<p align="center"><strong>PySpur 是一个基于 Python 编写的 AI 智能体构建器。AI 工程师使用它来构建智能体,逐步执行并检查过去的运行记录。</strong></p>
|
| 4 |
+
|
| 5 |
+
<p align="center">
|
| 6 |
+
<a href="./README.md"><img alt="README in English" src="https://img.shields.io/badge/English-blue"></a>
|
| 7 |
+
<a href="./README_CN.md"><img alt="简体中文版自述文件" src="https://img.shields.io/badge/简体中文-blue"></a>
|
| 8 |
+
<a href="./README_JA.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-blue"></a>
|
| 9 |
+
<a href="./README_KR.md"><img alt="README in Korean" src="https://img.shields.io/badge/한국어-blue"></a>
|
| 10 |
+
<a href="./README_DE.md"><img alt="Deutsche Version der README" src="https://img.shields.io/badge/Deutsch-blue"></a>
|
| 11 |
+
<a href="./README_FR.md"><img alt="Version française du README" src="https://img.shields.io/badge/Français-blue"></a>
|
| 12 |
+
<a href="./README_ES.md"><img alt="Versión en español del README" src="https://img.shields.io/badge/Español-blue"></a>
|
| 13 |
+
</p>
|
| 14 |
+
|
| 15 |
+
<p align="center">
|
| 16 |
+
<a href="https://docs.pyspur.dev/" target="_blank">
|
| 17 |
+
<img alt="Docs" src="https://img.shields.io/badge/Docs-green.svg?style=for-the-badge&logo=readthedocs&logoColor=white">
|
| 18 |
+
</a>
|
| 19 |
+
<a href="https://calendly.com/d/cnf9-57m-bv3/pyspur-founders" target="_blank">
|
| 20 |
+
<img alt="Meet us" src="https://img.shields.io/badge/Meet%20us-blue.svg?style=for-the-badge&logo=calendly&logoColor=white">
|
| 21 |
+
</a>
|
| 22 |
+
<a href="https://forms.gle/5wHRctedMpgfNGah7" target="_blank">
|
| 23 |
+
<img alt="Cloud" src="https://img.shields.io/badge/Cloud-orange.svg?style=for-the-badge&logo=cloud&logoColor=white">
|
| 24 |
+
</a>
|
| 25 |
+
<a href="https://discord.gg/7Spn7C8A5F">
|
| 26 |
+
<img alt="Join Our Discord" src="https://img.shields.io/badge/Discord-7289DA.svg?style=for-the-badge&logo=discord&logoColor=white">
|
| 27 |
+
</a>
|
| 28 |
+
</p>
|
| 29 |
+
|
| 30 |
+
https://github.com/user-attachments/assets/1ebf78c9-94b2-468d-bbbb-566311df16fe
|
| 31 |
+
|
| 32 |
+
# 🕸️ 为什么选择 PySpur?
|
| 33 |
+
|
| 34 |
+
- ✅ **测试驱动**:构建工作流,运行测试用例,并进行迭代。
|
| 35 |
+
- 👤 **人在环路中**:持久化工作流,等待人工批准或拒绝。
|
| 36 |
+
- 🔄 **循环**:具有记忆功能的迭代工具调用。
|
| 37 |
+
- 📤 **文件上传**:上传文件或粘贴 URL 来处理文档。
|
| 38 |
+
- 📋 **结构化输出**:JSON Schema UI 编辑器。
|
| 39 |
+
- 🗃️ **RAG**:解析、分块、嵌入并将数据更新到向量数据库。
|
| 40 |
+
- 🖼️ **多模态**:支持视频、图像、音频、文本、代码。
|
| 41 |
+
- 🧰 **工具**:Slack、Firecrawl.dev、Google Sheets、GitHub 等。
|
| 42 |
+
- 🧪 **评估**:在真实数据集上评估代理。
|
| 43 |
+
- 🚀 **一键部署**:发布为 API 并在任意地方集成。
|
| 44 |
+
- 🐍 **基于 Python**:通过创建单个 Python 文件来添加新节点。
|
| 45 |
+
- 🎛️ **供应商支持**:支持超过 100 个 LLM 供应商、嵌入器和向量数据库。
|
| 46 |
+
|
| 47 |
+
# ⚡ 快速开始
|
| 48 |
+
|
| 49 |
+
这是入门的最快方式。需要 Python 3.11 或更高版本。
|
| 50 |
+
|
| 51 |
+
1. **安装 PySpur:**
|
| 52 |
+
```sh
|
| 53 |
+
pip install pyspur
|
| 54 |
+
```
|
| 55 |
+
|
| 56 |
+
2. **初始化新项目:**
|
| 57 |
+
```sh
|
| 58 |
+
pyspur init my-project
|
| 59 |
+
cd my-project
|
| 60 |
+
```
|
| 61 |
+
这将创建一个包含 `.env` 文件的新目录。
|
| 62 |
+
|
| 63 |
+
3. **启动服务器:**
|
| 64 |
+
```sh
|
| 65 |
+
pyspur serve --sqlite
|
| 66 |
+
```
|
| 67 |
+
默认情况下,这将使用 SQLite 数据库在 `http://localhost:6080` 启动 PySpur 应用。
|
| 68 |
+
我们建议你在 `.env` 文件中配置 Postgres 实例的 URL,以获得更稳定的体验。
|
| 69 |
+
|
| 70 |
+
4. **[可选] 配置环境和添加 API 密钥:**
|
| 71 |
+
- **应用界面**: 导航至 API 密钥标签页添加供应商密钥(OpenAI、Anthropic 等)
|
| 72 |
+
- **手动配置**: 编辑 `.env` 文件(推荐:配置 postgres)并使用 `pyspur serve` 重启
|
| 73 |
+
|
| 74 |
+
# ✨ 核心优势
|
| 75 |
+
|
| 76 |
+
## 人在环路中断点:
|
| 77 |
+
|
| 78 |
+
这些断点在达到时会暂停工作流,并在人工批准后恢复。
|
| 79 |
+
它们为需要质量保证的工作流提供人工监督:在工作流继续之前验证关键输出。
|
| 80 |
+
|
| 81 |
+
https://github.com/user-attachments/assets/98cb2b4e-207c-4d97-965b-4fee47c94ce8
|
| 82 |
+
|
| 83 |
+
## 节点级调试:
|
| 84 |
+
|
| 85 |
+
https://github.com/user-attachments/assets/6e82ad25-2a46-4c50-b030-415ea9994690
|
| 86 |
+
|
| 87 |
+
## 多模态(上传文件或粘贴 URL)
|
| 88 |
+
|
| 89 |
+
支持 PDF、视频、音频、图像等……
|
| 90 |
+
|
| 91 |
+
https://github.com/user-attachments/assets/83ed9a22-1ec1-4d86-9dd6-5d945588fd0b
|
| 92 |
+
|
| 93 |
+
## 循环
|
| 94 |
+
|
| 95 |
+
<img width="1919" alt="Loops" src="https://github.com/user-attachments/assets/3aea63dc-f46f-46e9-bddd-e2af9c2a56bf" />
|
| 96 |
+
|
| 97 |
+
## RAG
|
| 98 |
+
|
| 99 |
+
### 步骤 1) 创建文档集合(分块 + 解析)
|
| 100 |
+
|
| 101 |
+
https://github.com/user-attachments/assets/c77723b1-c076-4a64-a01d-6d6677e9c60e
|
| 102 |
+
|
| 103 |
+
### 步骤 2) 创建向量索引(嵌入 + 向量数据库插入)
|
| 104 |
+
|
| 105 |
+
https://github.com/user-attachments/assets/50e5c711-dd01-4d92-bb23-181a1c5bba25
|
| 106 |
+
|
| 107 |
+
## 模块化构建块
|
| 108 |
+
|
| 109 |
+
https://github.com/user-attachments/assets/6442f0ad-86d8-43d9-aa70-e5c01e55e876
|
| 110 |
+
|
| 111 |
+
## 评估最终性能
|
| 112 |
+
|
| 113 |
+
https://github.com/user-attachments/assets/4dc2abc3-c6e6-4d6d-a5c3-787d518de7ae
|
| 114 |
+
|
| 115 |
+
## 即将推出:自我提升
|
| 116 |
+
|
| 117 |
+
https://github.com/user-attachments/assets/5bef7a16-ef9f-4650-b385-4ea70fa54c8a
|
| 118 |
+
|
| 119 |
+
# 🛠️ PySpur 开发环境设置
|
| 120 |
+
#### [ Unix 类系统开发指南。Windows/PC 开发不支持。 ]
|
| 121 |
+
|
| 122 |
+
我们推荐使用 Cursor/VS Code 和我们的开发容器(`.devcontainer/devcontainer.json`),它提供:
|
| 123 |
+
- 预配置工具和扩展的一致开发环境
|
| 124 |
+
- 针对 Python 和 TypeScript 开发的优化设置
|
| 125 |
+
- 自动热重载和端口转发
|
| 126 |
+
|
| 127 |
+
**选项 1:Cursor/VS Code 开发容器(推荐)**
|
| 128 |
+
1. 安装 [Cursor](https://www.cursor.com/)/[VS Code](https://code.visualstudio.com/) 和 [Dev Containers 扩展](https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.remote-containers)
|
| 129 |
+
2. 克隆并打开仓库
|
| 130 |
+
3. 当提示时点击"在容器中重新打开"
|
| 131 |
+
|
| 132 |
+
**选项 2:手动设置**
|
| 133 |
+
1. **克隆仓库:**
|
| 134 |
+
```sh
|
| 135 |
+
git clone https://github.com/PySpur-com/pyspur.git
|
| 136 |
+
cd pyspur
|
| 137 |
+
```
|
| 138 |
+
|
| 139 |
+
2. **使用 docker-compose.dev.yml 启动:**
|
| 140 |
+
```sh
|
| 141 |
+
docker compose -f docker-compose.dev.yml up --build -d
|
| 142 |
+
```
|
| 143 |
+
|
| 144 |
+
3. **自定义设置:**
|
| 145 |
+
编辑 `.env` 配置环境(例如:PostgreSQL 设置)。
|
| 146 |
+
|
| 147 |
+
注意:手动设置需要额外配置,可能无法包含开发容器提供的所有功能。
|
| 148 |
+
|
| 149 |
+
# ⭐ 支持我们
|
| 150 |
+
|
| 151 |
+
你可以通过给我们项目 Star 来支持我们的工作!谢谢!
|
| 152 |
+
|
| 153 |
+

|
| 154 |
+
|
| 155 |
+
我们非常重视你的反馈。
|
| 156 |
+
请 [告诉我们](mailto:founders@pyspur.dev?subject=Feature%20Request&body=I%20want%20this%20feature%3Ai) 你想在下一次看到列表中的哪些功能或全新的功能。
|
pyspur/README_DE.md
ADDED
|
@@ -0,0 +1,146 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+

|
| 2 |
+
|
| 3 |
+
<p align="center"><strong>PySpur ist ein KI-Agenten-Builder in Python. KI-Entwickler nutzen ihn, um Agenten zu erstellen, sie Schritt für Schritt auszuführen und vergangene Durchläufe zu analysieren.</strong></p>
|
| 4 |
+
|
| 5 |
+
<p align="center">
|
| 6 |
+
<a href="./README.md"><img alt="README auf Englisch" src="https://img.shields.io/badge/English-blue"></a>
|
| 7 |
+
<a href="./README_CN.md"><img alt="README auf vereinfachtem Chinesisch" src="https://img.shields.io/badge/简体中文-blue"></a>
|
| 8 |
+
<a href="./README_JA.md"><img alt="README auf Japanisch" src="https://img.shields.io/badge/日本語-blue"></a>
|
| 9 |
+
<a href="./README_KR.md"><img alt="README auf Koreanisch" src="https://img.shields.io/badge/한국어-blue"></a>
|
| 10 |
+
<a href="./README_DE.md"><img alt="Deutsche Version der README" src="https://img.shields.io/badge/Deutsch-blue"></a>
|
| 11 |
+
<a href="./README_FR.md"><img alt="README auf Französisch" src="https://img.shields.io/badge/Français-blue"></a>
|
| 12 |
+
<a href="./README_ES.md"><img alt="README auf Spanisch" src="https://img.shields.io/badge/Español-blue"></a>
|
| 13 |
+
</p>
|
| 14 |
+
|
| 15 |
+
<p align="center">
|
| 16 |
+
<a href="https://docs.pyspur.dev/" target="_blank">
|
| 17 |
+
<img alt="Dokumentation" src="https://img.shields.io/badge/Docs-green.svg?style=for-the-badge&logo=readthedocs&logoColor=white">
|
| 18 |
+
</a>
|
| 19 |
+
<a href="https://calendly.com/d/cnf9-57m-bv3/pyspur-founders" target="_blank">
|
| 20 |
+
<img alt="Treffen Sie uns" src="https://img.shields.io/badge/Meet%20us-blue.svg?style=for-the-badge&logo=calendly&logoColor=white">
|
| 21 |
+
</a>
|
| 22 |
+
<a href="https://forms.gle/5wHRctedMpgfNGah7" target="_blank">
|
| 23 |
+
<img alt="Cloud" src="https://img.shields.io/badge/Cloud-orange.svg?style=for-the-badge&logo=cloud&logoColor=white">
|
| 24 |
+
</a>
|
| 25 |
+
<a href="https://discord.gg/7Spn7C8A5F">
|
| 26 |
+
<img alt="Discord beitreten" src="https://img.shields.io/badge/Discord-7289DA.svg?style=for-the-badge&logo=discord&logoColor=white">
|
| 27 |
+
</a>
|
| 28 |
+
</p>
|
| 29 |
+
|
| 30 |
+
https://github.com/user-attachments/assets/1ebf78c9-94b2-468d-bbbb-566311df16fe
|
| 31 |
+
|
| 32 |
+
# 🕸️ Warum PySpur?
|
| 33 |
+
|
| 34 |
+
- ✅ **Testgetrieben**: Erstellen Sie Workflows, führen Sie Testfälle aus und iterieren Sie.
|
| 35 |
+
- 👤 **Human in the Loop**: Persistente Workflows, die auf Genehmigung oder Ablehnung des Users warten.
|
| 36 |
+
- 🔄 **Loops**: Wiederholte Toolaufrufe mit Zwischenspeicherung.
|
| 37 |
+
- 📤 **Datei-Upload**: Laden Sie Dateien hoch oder fügen Sie URLs ein, um Dokumente zu verarbeiten.
|
| 38 |
+
- 📋 **Strukturierte Outputs**: UI-Editor für JSON-Schemata.
|
| 39 |
+
- 🗃️ **RAG**: Daten parsen, in Abschnitte unterteilen, einbetten und in eine Vektor-Datenbank einfügen/aktualisieren.
|
| 40 |
+
- 🖼️ **Multimodal**: Unterstützung für Video, Bilder, Audio, Texte, Code.
|
| 41 |
+
- 🧰 **Tools**: Slack, Firecrawl.dev, Google Sheets, GitHub und mehr.
|
| 42 |
+
- 🧪 **Evaluierungen**: Bewerten Sie Agenten anhand von realen Datensätzen.
|
| 43 |
+
- 🚀 **One-Click Deploy**: Veröffentlichen Sie Ihre Lösung als API und integrieren Sie sie überall.
|
| 44 |
+
- 🐍 **Python-basiert**: Fügen Sie neue Knoten hinzu, indem Sie eine einzige Python-Datei erstellen.
|
| 45 |
+
- 🎛️ **Support für jeden Anbieter**: Über 100 LLM-Anbieter, Einbettungslösungen und Vektor-Datenbanken.
|
| 46 |
+
|
| 47 |
+
# ⚡ Schnellstart
|
| 48 |
+
|
| 49 |
+
Dies ist der schnellste Weg, um loszulegen. Python 3.11 oder höher wird benötigt.
|
| 50 |
+
|
| 51 |
+
1. **PySpur installieren:**
|
| 52 |
+
```sh
|
| 53 |
+
pip install pyspur
|
| 54 |
+
```
|
| 55 |
+
|
| 56 |
+
2. **Ein neues Projekt initialisieren:**
|
| 57 |
+
```sh
|
| 58 |
+
pyspur init my-project
|
| 59 |
+
cd my-project
|
| 60 |
+
```
|
| 61 |
+
Dadurch wird ein neues Verzeichnis mit einer `.env`-Datei erstellt.
|
| 62 |
+
|
| 63 |
+
3. **Den Server starten:**
|
| 64 |
+
```sh
|
| 65 |
+
pyspur serve --sqlite
|
| 66 |
+
```
|
| 67 |
+
Standardmäßig startet dies die PySpur-App unter `http://localhost:6080` mit einer SQLite-Datenbank.
|
| 68 |
+
Wir empfehlen, in der `.env`-Datei eine PostgreSQL-Instanz-URL zu konfigurieren, um eine stabilere Erfahrung zu gewährleisten.
|
| 69 |
+
|
| 70 |
+
4. **[Optional] Umgebung konfigurieren und API-Schlüssel hinzufügen:**
|
| 71 |
+
- **App-Oberfläche**: Navigieren Sie zum Tab „API Keys", um Anbieter-Schlüssel hinzuzufügen (OpenAI, Anthropic usw.)
|
| 72 |
+
- **Manuelle Konfiguration**: Bearbeiten Sie die `.env`-Datei (empfohlen: PostgreSQL konfigurieren) und starten Sie mit `pyspur serve` neu
|
| 73 |
+
|
| 74 |
+
# ✨ Kernvorteile
|
| 75 |
+
|
| 76 |
+
## Mensch-im-Regelkreis-Haltepunkte:
|
| 77 |
+
|
| 78 |
+
Diese Haltepunkte pausieren den Workflow, wenn sie erreicht werden, und setzen ihn fort, sobald ein Mensch ihn genehmigt.
|
| 79 |
+
Sie ermöglichen menschliche Aufsicht für Workflows, die Qualitätssicherung erfordern: Überprüfen Sie kritische Ausgaben, bevor der Workflow fortgesetzt wird.
|
| 80 |
+
|
| 81 |
+
https://github.com/user-attachments/assets/98cb2b4e-207c-4d97-965b-4fee47c94ce8
|
| 82 |
+
|
| 83 |
+
## Debuggen auf Node-Ebene:
|
| 84 |
+
|
| 85 |
+
https://github.com/user-attachments/assets/6e82ad25-2a46-4c50-b030-415ea9994690
|
| 86 |
+
|
| 87 |
+
## Multimodal (Dateien hochladen oder URLs einfügen)
|
| 88 |
+
|
| 89 |
+
PDFs, Videos, Audio, Bilder, ...
|
| 90 |
+
|
| 91 |
+
https://github.com/user-attachments/assets/83ed9a22-1ec1-4d86-9dd6-5d945588fd0b
|
| 92 |
+
|
| 93 |
+
## Loops
|
| 94 |
+
|
| 95 |
+
<img width="1919" alt="Loops" src="https://github.com/user-attachments/assets/3aea63dc-f46f-46e9-bddd-e2af9c2a56bf" />
|
| 96 |
+
|
| 97 |
+
## RAG
|
| 98 |
+
|
| 99 |
+
### Schritt 1) Erstellen einer Dokumentensammlung (Chunking + Parsing)
|
| 100 |
+
|
| 101 |
+
https://github.com/user-attachments/assets/c77723b1-c076-4a64-a01d-6d6677e9c60e
|
| 102 |
+
|
| 103 |
+
### Schritt 2) Erstellen eines Vektorindex (Einbettung + Einfügen/Aktualisieren in der Vektor-Datenbank)
|
| 104 |
+
|
| 105 |
+
https://github.com/user-attachments/assets/50e5c711-dd01-4d92-bb23-181a1c5bba25
|
| 106 |
+
|
| 107 |
+
## Modulare Bausteine
|
| 108 |
+
|
| 109 |
+
https://github.com/user-attachments/assets/6442f0ad-86d8-43d9-aa70-e5c01e55e876
|
| 110 |
+
|
| 111 |
+
## Endgültige Leistung bewerten
|
| 112 |
+
|
| 113 |
+
https://github.com/user-attachments/assets/4dc2abc3-c6e6-4d6d-a5c3-787d518de7ae
|
| 114 |
+
|
| 115 |
+
## Demnächst: Selbstverbesserung
|
| 116 |
+
|
| 117 |
+
https://github.com/user-attachments/assets/5bef7a16-ef9f-4650-b385-4ea70fa54c8a
|
| 118 |
+
|
| 119 |
+
# 🛠️ PySpur Entwicklungs-Setup
|
| 120 |
+
#### [ Anweisungen für die Entwicklung auf Unix-ähnlichen Systemen. Entwicklung auf Windows/PC wird nicht unterstützt ]
|
| 121 |
+
|
| 122 |
+
Für die Entwicklung folgen Sie diesen Schritten:
|
| 123 |
+
|
| 124 |
+
1. **Das Repository klonen:**
|
| 125 |
+
```sh
|
| 126 |
+
git clone https://github.com/PySpur-com/pyspur.git
|
| 127 |
+
cd pyspur
|
| 128 |
+
```
|
| 129 |
+
|
| 130 |
+
2. **Mit docker-compose.dev.yml starten:**
|
| 131 |
+
```sh
|
| 132 |
+
docker compose -f docker-compose.dev.yml up --build -d
|
| 133 |
+
```
|
| 134 |
+
Dadurch wird eine lokale Instanz von PySpur mit aktiviertem Hot-Reloading für die Entwicklung gestartet.
|
| 135 |
+
|
| 136 |
+
3. **Ihre Einrichtung anpassen:**
|
| 137 |
+
Bearbeiten Sie die `.env`-Datei, um Ihre Umgebung zu konfigurieren. Standardmäßig verwendet PySpur eine lokale PostgreSQL-Datenbank. Um eine externe Datenbank zu nutzen, ändern Sie die `POSTGRES_*`-Variablen in der `.env`.
|
| 138 |
+
|
| 139 |
+
# ⭐ Unterstützen Sie uns
|
| 140 |
+
|
| 141 |
+
Sie können uns bei unserer Arbeit unterstützen, indem Sie einen Stern hinterlassen! Vielen Dank!
|
| 142 |
+
|
| 143 |
+

|
| 144 |
+
|
| 145 |
+
Ihr Feedback wird sehr geschätzt.
|
| 146 |
+
Bitte [sagen Sie uns](mailto:founders@pyspur.dev?subject=Feature%20Request&body=I%20want%20this%20feature%3Ai), welche Funktionen aus dieser Liste Sie als Nächstes sehen möchten oder schlagen Sie ganz neue vor.
|
pyspur/README_ES.md
ADDED
|
@@ -0,0 +1,148 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+

|
| 2 |
+
|
| 3 |
+
<p align="center"><strong>PySpur es un constructor de agentes de IA en Python. Los ingenieros de IA lo utilizan para crear agentes, ejecutarlos paso a paso e inspeccionar ejecuciones anteriores.</strong></p>
|
| 4 |
+
|
| 5 |
+
<p align="center">
|
| 6 |
+
<a href="./README.md"><img alt="README en inglés" src="https://img.shields.io/badge/English-blue"></a>
|
| 7 |
+
<a href="./README_CN.md"><img alt="Versión en chino simplificado" src="https://img.shields.io/badge/简体中文-blue"></a>
|
| 8 |
+
<a href="./README_JA.md"><img alt="README en japonés" src="https://img.shields.io/badge/日本語-blue"></a>
|
| 9 |
+
<a href="./README_KR.md"><img alt="README en coreano" src="https://img.shields.io/badge/한국어-blue"></a>
|
| 10 |
+
<a href="./README_DE.md"><img alt="Versión en alemán del README" src="https://img.shields.io/badge/Deutsch-blue"></a>
|
| 11 |
+
<a href="./README_FR.md"><img alt="Versión en francés del README" src="https://img.shields.io/badge/Français-blue"></a>
|
| 12 |
+
<a href="./README_ES.md"><img alt="Versión en español del README" src="https://img.shields.io/badge/Español-blue"></a>
|
| 13 |
+
</p>
|
| 14 |
+
|
| 15 |
+
<p align="center">
|
| 16 |
+
<a href="https://docs.pyspur.dev/" target="_blank">
|
| 17 |
+
<img alt="Docs" src="https://img.shields.io/badge/Docs-green.svg?style=for-the-badge&logo=readthedocs&logoColor=white">
|
| 18 |
+
</a>
|
| 19 |
+
<a href="https://calendly.com/d/cnf9-57m-bv3/pyspur-founders" target="_blank">
|
| 20 |
+
<img alt="Conócenos" src="https://img.shields.io/badge/Meet%20us-blue.svg?style=for-the-badge&logo=calendly&logoColor=white">
|
| 21 |
+
</a>
|
| 22 |
+
<a href="https://forms.gle/5wHRctedMpgfNGah7" target="_blank">
|
| 23 |
+
<img alt="Cloud" src="https://img.shields.io/badge/Cloud-orange.svg?style=for-the-badge&logo=cloud&logoColor=white">
|
| 24 |
+
</a>
|
| 25 |
+
<a href="https://discord.gg/7Spn7C8A5F">
|
| 26 |
+
<img alt="Únete a nuestro Discord" src="https://img.shields.io/badge/Discord-7289DA.svg?style=for-the-badge&logo=discord&logoColor=white">
|
| 27 |
+
</a>
|
| 28 |
+
</p>
|
| 29 |
+
|
| 30 |
+
https://github.com/user-attachments/assets/1ebf78c9-94b2-468d-bbbb-566311df16fe
|
| 31 |
+
|
| 32 |
+
# 🕸️ ¿Por qué PySpur?
|
| 33 |
+
|
| 34 |
+
- ✅ **Desarrollo Guiado por Pruebas**: Construye flujos de trabajo, ejecuta casos de prueba e itera.
|
| 35 |
+
- 👤 **Humano en el Bucle**: Flujos de trabajo persistentes que esperan aprobación o rechazo humano.
|
| 36 |
+
- 🔄 **Bucles**: Llamadas iterativas a herramientas con memoria.
|
| 37 |
+
- 📤 **Carga de Archivos**: Sube archivos o pega URLs para procesar documentos.
|
| 38 |
+
- 📋 **Salidas Estructuradas**: Editor de interfaz para esquemas JSON.
|
| 39 |
+
- 🗃️ **RAG**: Analiza, segmenta, incrusta y actualiza datos en una base de datos vectorial.
|
| 40 |
+
- 🖼️ **Multimodal**: Soporte para video, imágenes, audio, textos y código.
|
| 41 |
+
- 🧰 **Herramientas**: Slack, Firecrawl.dev, Google Sheets, GitHub y más.
|
| 42 |
+
- 🧪 **Evaluaciones**: Evalúa agentes en conjuntos de datos del mundo real.
|
| 43 |
+
- 🚀 **Despliegue con un clic**: Publica como una API e intégrala donde desees.
|
| 44 |
+
- 🐍 **Basado en Python**: Agrega nuevos nodos creando un solo archivo Python.
|
| 45 |
+
- 🎛️ **Soporte para Cualquier Proveedor**: Más de 100 proveedores de LLM, embedders y bases de datos vectoriales.
|
| 46 |
+
|
| 47 |
+
# ⚡ Inicio Rápido
|
| 48 |
+
|
| 49 |
+
Esta es la forma más rápida de comenzar. Se requiere Python 3.11 o superior.
|
| 50 |
+
|
| 51 |
+
1. **Instala PySpur:**
|
| 52 |
+
```sh
|
| 53 |
+
pip install pyspur
|
| 54 |
+
```
|
| 55 |
+
|
| 56 |
+
2. **Inicializa un nuevo proyecto:**
|
| 57 |
+
```sh
|
| 58 |
+
pyspur init my-project
|
| 59 |
+
cd my-project
|
| 60 |
+
```
|
| 61 |
+
Esto creará un nuevo directorio con un archivo `.env`.
|
| 62 |
+
|
| 63 |
+
3. **Inicia el servidor:**
|
| 64 |
+
```sh
|
| 65 |
+
pyspur serve --sqlite
|
| 66 |
+
```
|
| 67 |
+
Por defecto, esto iniciará la aplicación PySpur en `http://localhost:6080` utilizando una base de datos SQLite.
|
| 68 |
+
Se recomienda configurar una URL de instancia de Postgres en el archivo `.env` para obtener una experiencia más estable.
|
| 69 |
+
|
| 70 |
+
4. **[Opcional] Configura tu entorno y añade claves API:**
|
| 71 |
+
- **A través de la interfaz de la aplicación**: Navega a la pestaña de API Keys para añadir claves de proveedores (OpenAI, Anthropic, etc.)
|
| 72 |
+
- **Configuración manual**: Edita el archivo `.env` (recomendado: configura postgres) y reinicia con `pyspur serve`
|
| 73 |
+
|
| 74 |
+
¡Eso es todo! Haz clic en "New Spur" para crear un flujo de trabajo, o comienza con una de las plantillas predefinidas.
|
| 75 |
+
|
| 76 |
+
# ✨ Beneficios Principales
|
| 77 |
+
|
| 78 |
+
## Puntos de Interrupción con Humano en el Bucle:
|
| 79 |
+
|
| 80 |
+
Estos puntos de interrupción pausan el flujo de trabajo cuando se alcanzan y lo reanudan tan pronto como un humano lo aprueba.
|
| 81 |
+
Permiten la supervisión humana para flujos de trabajo que requieren garantía de calidad: verifique las salidas críticas antes de que el flujo de trabajo continúe.
|
| 82 |
+
|
| 83 |
+
https://github.com/user-attachments/assets/98cb2b4e-207c-4d97-965b-4fee47c94ce8
|
| 84 |
+
|
| 85 |
+
## Depuración a Nivel de Nodo:
|
| 86 |
+
|
| 87 |
+
https://github.com/user-attachments/assets/6e82ad25-2a46-4c50-b030-415ea9994690
|
| 88 |
+
|
| 89 |
+
## Multimodal (Sube archivos o pega URLs)
|
| 90 |
+
|
| 91 |
+
PDFs, Videos, Audio, Imágenes, ...
|
| 92 |
+
|
| 93 |
+
https://github.com/user-attachments/assets/83ed9a22-1ec1-4d86-9dd6-5d945588fd0b
|
| 94 |
+
|
| 95 |
+
## Bucles
|
| 96 |
+
|
| 97 |
+
<img width="1919" alt="Bucles" src="https://github.com/user-attachments/assets/3aea63dc-f46f-46e9-bddd-e2af9c2a56bf" />
|
| 98 |
+
|
| 99 |
+
## RAG
|
| 100 |
+
|
| 101 |
+
### Paso 1) Crear Colección de Documentos (Segmentación + Análisis)
|
| 102 |
+
|
| 103 |
+
https://github.com/user-attachments/assets/c77723b1-c076-4a64-a01d-6d6677e9c60e
|
| 104 |
+
|
| 105 |
+
### Paso 2) Crear Índice Vectorial (Incrustación + Actualización en DB Vectorial)
|
| 106 |
+
|
| 107 |
+
https://github.com/user-attachments/assets/50e5c711-dd01-4d92-bb23-181a1c5bba25
|
| 108 |
+
|
| 109 |
+
## Bloques Modulares
|
| 110 |
+
|
| 111 |
+
https://github.com/user-attachments/assets/6442f0ad-86d8-43d9-aa70-e5c01e55e876
|
| 112 |
+
|
| 113 |
+
## Evaluar el Rendimiento Final
|
| 114 |
+
|
| 115 |
+
https://github.com/user-attachments/assets/4dc2abc3-c6e6-4d6d-a5c3-787d518de7ae
|
| 116 |
+
|
| 117 |
+
## Próximamente: Auto-mejora
|
| 118 |
+
|
| 119 |
+
https://github.com/user-attachments/assets/5bef7a16-ef9f-4650-b385-4ea70fa54c8a
|
| 120 |
+
|
| 121 |
+
# 🛠️ Configuración de Desarrollo de PySpur
|
| 122 |
+
#### [ Instrucciones para el desarrollo en sistemas tipo Unix. Desarrollo en Windows/PC no es soportado ]
|
| 123 |
+
|
| 124 |
+
Para el desarrollo, sigue estos pasos:
|
| 125 |
+
|
| 126 |
+
1. **Clona el repositorio:**
|
| 127 |
+
```sh
|
| 128 |
+
git clone https://github.com/PySpur-com/pyspur.git
|
| 129 |
+
cd pyspur
|
| 130 |
+
```
|
| 131 |
+
|
| 132 |
+
2. **Inicia utilizando docker-compose.dev.yml:**
|
| 133 |
+
```sh
|
| 134 |
+
docker compose -f docker-compose.dev.yml up --build -d
|
| 135 |
+
```
|
| 136 |
+
Esto iniciará una instancia local de PySpur con recarga en caliente habilitada para el desarrollo.
|
| 137 |
+
|
| 138 |
+
3. **Personaliza tu configuración:**
|
| 139 |
+
Edita el archivo `.env` para configurar tu entorno. Por defecto, PySpur utiliza una base de datos PostgreSQL local. Para usar una base de datos externa, modifica las variables `POSTGRES_*` en el archivo `.env`.
|
| 140 |
+
|
| 141 |
+
# ⭐ Apóyanos
|
| 142 |
+
|
| 143 |
+
¡Puedes apoyarnos en nuestro trabajo dándonos una estrella! ¡Gracias!
|
| 144 |
+
|
| 145 |
+

|
| 146 |
+
|
| 147 |
+
Tu retroalimentación será enormemente apreciada.
|
| 148 |
+
Por favor [dinos](mailto:founders@pyspur.dev?subject=Feature%20Request&body=I%20want%20this%20feature%3Ai) qué características de esa lista te gustaría ver a continuación o solicita nuevas funcionalidades.
|
pyspur/README_FR.md
ADDED
|
@@ -0,0 +1,148 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+

|
| 2 |
+
|
| 3 |
+
<p align="center"><strong>PySpur est un créateur d'agents d'IA en Python. Les ingénieurs en IA l'utilisent pour créer des agents, les exécuter étape par étape et inspecter les exécutions passées.</strong></p>
|
| 4 |
+
|
| 5 |
+
<p align="center">
|
| 6 |
+
<a href="./README.md"><img alt="README in English" src="https://img.shields.io/badge/English-blue"></a>
|
| 7 |
+
<a href="./README_CN.md"><img alt="简体中文版自述文件" src="https://img.shields.io/badge/简体中文-blue"></a>
|
| 8 |
+
<a href="./README_JA.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-blue"></a>
|
| 9 |
+
<a href="./README_KR.md"><img alt="README in Korean" src="https://img.shields.io/badge/한국어-blue"></a>
|
| 10 |
+
<a href="./README_DE.md"><img alt="Deutsche Version der README" src="https://img.shields.io/badge/Deutsch-blue"></a>
|
| 11 |
+
<a href="./README_FR.md"><img alt="Version française du README" src="https://img.shields.io/badge/Français-blue"></a>
|
| 12 |
+
<a href="./README_ES.md"><img alt="Versión en español del README" src="https://img.shields.io/badge/Español-blue"></a>
|
| 13 |
+
</p>
|
| 14 |
+
|
| 15 |
+
<p align="center">
|
| 16 |
+
<a href="https://docs.pyspur.dev/" target="_blank">
|
| 17 |
+
<img alt="Documentation" src="https://img.shields.io/badge/Docs-green.svg?style=for-the-badge&logo=readthedocs&logoColor=white">
|
| 18 |
+
</a>
|
| 19 |
+
<a href="https://calendly.com/d/cnf9-57m-bv3/pyspur-founders" target="_blank">
|
| 20 |
+
<img alt="Rencontrez-nous" src="https://img.shields.io/badge/Meet%20us-blue.svg?style=for-the-badge&logo=calendly&logoColor=white">
|
| 21 |
+
</a>
|
| 22 |
+
<a href="https://forms.gle/5wHRctedMpgfNGah7" target="_blank">
|
| 23 |
+
<img alt="Cloud" src="https://img.shields.io/badge/Cloud-orange.svg?style=for-the-badge&logo=cloud&logoColor=white">
|
| 24 |
+
</a>
|
| 25 |
+
<a href="https://discord.gg/7Spn7C8A5F">
|
| 26 |
+
<img alt="Rejoignez notre Discord" src="https://img.shields.io/badge/Discord-7289DA.svg?style=for-the-badge&logo=discord&logoColor=white">
|
| 27 |
+
</a>
|
| 28 |
+
</p>
|
| 29 |
+
|
| 30 |
+
https://github.com/user-attachments/assets/1ebf78c9-94b2-468d-bbbb-566311df16fe
|
| 31 |
+
|
| 32 |
+
# 🕸️ Pourquoi PySpur ?
|
| 33 |
+
|
| 34 |
+
- ✅ **Piloté par les tests** : Construisez des workflows, exécutez des cas de test et itérez.
|
| 35 |
+
- 👤 **Humain dans la boucle** : Workflows persistants qui attendent l'approbation ou le rejet humain.
|
| 36 |
+
- 🔄 **Boucles** : Appels d'outils itératifs avec mémoire.
|
| 37 |
+
- 📤 **Téléversement de fichiers** : Téléchargez des fichiers ou collez des URL pour traiter des documents.
|
| 38 |
+
- 📋 **Sorties structurées** : Éditeur d'interface utilisateur pour les schémas JSON.
|
| 39 |
+
- 🗃️ **RAG** : Analyser, découper, intégrer et insérer ou mettre à jour des données dans une base de données vectorielle.
|
| 40 |
+
- 🖼️ **Multimodal** : Support pour vidéos, images, audio, textes, code.
|
| 41 |
+
- 🧰 **Outils** : Slack, Firecrawl.dev, Google Sheets, GitHub, et plus encore.
|
| 42 |
+
- 🧪 **Évaluations** : Évaluez les agents sur des ensembles de données réelles.
|
| 43 |
+
- 🚀 **Déploiement en un clic** : Publiez en tant qu'API et intégrez-le où vous le souhaitez.
|
| 44 |
+
- 🐍 **Basé sur Python** : Ajoutez de nouveaux nœuds en créant un seul fichier Python.
|
| 45 |
+
- 🎛️ **Support multi-fournisseurs** : >100 fournisseurs de LLM, intégrateurs et bases de données vectorielles.
|
| 46 |
+
|
| 47 |
+
# ⚡ Démarrage rapide
|
| 48 |
+
|
| 49 |
+
C'est la manière la plus rapide de commencer. Python 3.11 ou une version supérieure est requis.
|
| 50 |
+
|
| 51 |
+
1. **Installer PySpur :**
|
| 52 |
+
```sh
|
| 53 |
+
pip install pyspur
|
| 54 |
+
```
|
| 55 |
+
|
| 56 |
+
2. **Initialiser un nouveau projet :**
|
| 57 |
+
```sh
|
| 58 |
+
pyspur init my-project
|
| 59 |
+
cd my-project
|
| 60 |
+
```
|
| 61 |
+
Cela va créer un nouveau répertoire avec un fichier `.env`.
|
| 62 |
+
|
| 63 |
+
3. **Démarrer le serveur :**
|
| 64 |
+
```sh
|
| 65 |
+
pyspur serve --sqlite
|
| 66 |
+
```
|
| 67 |
+
Par défaut, cela démarrera l'application PySpur sur `http://localhost:6080` en utilisant une base de données SQLite.
|
| 68 |
+
Nous vous recommandons de configurer une URL d'instance Postgres dans le fichier `.env` pour une expérience plus stable.
|
| 69 |
+
|
| 70 |
+
4. **[Optionnel] Configurer votre environnement et ajouter des clés API :**
|
| 71 |
+
- **Via l'interface de l'application** : Naviguez vers l'onglet des clés API pour ajouter des clés de fournisseurs (OpenAI, Anthropic, etc.)
|
| 72 |
+
- **Configuration manuelle** : Éditez le fichier `.env` (recommandé : configurez postgres) et redémarrez avec `pyspur serve`
|
| 73 |
+
|
| 74 |
+
C'est tout ! Cliquez sur « New Spur » pour créer un workflow, ou commencez avec l'un des modèles de base.
|
| 75 |
+
|
| 76 |
+
# ✨ Avantages principaux
|
| 77 |
+
|
| 78 |
+
## Points d'arrêt avec humain dans la boucle :
|
| 79 |
+
|
| 80 |
+
Ces points d'arrêt mettent en pause le flux de travail lorsqu'ils sont atteints et le reprennent dès qu'un humain l'approuve.
|
| 81 |
+
Ils permettent une supervision humaine pour les flux de travail nécessitant une assurance qualité : vérifiez les sorties critiques avant que le flux de travail ne continue.
|
| 82 |
+
|
| 83 |
+
https://github.com/user-attachments/assets/98cb2b4e-207c-4d97-965b-4fee47c94ce8
|
| 84 |
+
|
| 85 |
+
## Déboguer au niveau des nœuds :
|
| 86 |
+
|
| 87 |
+
https://github.com/user-attachments/assets/6e82ad25-2a46-4c50-b030-415ea9994690
|
| 88 |
+
|
| 89 |
+
## Multimodal (tél��verser des fichiers ou coller des URL)
|
| 90 |
+
|
| 91 |
+
PDF, vidéos, audio, images, ...
|
| 92 |
+
|
| 93 |
+
https://github.com/user-attachments/assets/83ed9a22-1ec1-4d86-9dd6-5d945588fd0b
|
| 94 |
+
|
| 95 |
+
## Boucles
|
| 96 |
+
|
| 97 |
+
<img width="1919" alt="Loops" src="https://github.com/user-attachments/assets/3aea63dc-f46f-46e9-bddd-e2af9c2a56bf" />
|
| 98 |
+
|
| 99 |
+
## RAG
|
| 100 |
+
|
| 101 |
+
### Étape 1) Créer une collection de documents (découpage + analyse)
|
| 102 |
+
|
| 103 |
+
https://github.com/user-attachments/assets/c77723b1-c076-4a64-a01d-6d6677e9c60e
|
| 104 |
+
|
| 105 |
+
### Étape 2) Créer un index vectoriel (intégration + insertion/mise à jour dans la base de données vectorielle)
|
| 106 |
+
|
| 107 |
+
https://github.com/user-attachments/assets/50e5c711-dd01-4d92-bb23-181a1c5bba25
|
| 108 |
+
|
| 109 |
+
## Blocs modulaires
|
| 110 |
+
|
| 111 |
+
https://github.com/user-attachments/assets/6442f0ad-86d8-43d9-aa70-e5c01e55e876
|
| 112 |
+
|
| 113 |
+
## Évaluer la performance finale
|
| 114 |
+
|
| 115 |
+
https://github.com/user-attachments/assets/4dc2abc3-c6e6-4d6d-a5c3-787d518de7ae
|
| 116 |
+
|
| 117 |
+
## Bientôt : Auto-amélioration
|
| 118 |
+
|
| 119 |
+
https://github.com/user-attachments/assets/5bef7a16-ef9f-4650-b385-4ea70fa54c8a
|
| 120 |
+
|
| 121 |
+
# 🛠️ Configuration de développement de PySpur
|
| 122 |
+
#### [ Instructions pour le développement sur des systèmes de type Unix. Le développement sur Windows/PC n'est pas supporté ]
|
| 123 |
+
|
| 124 |
+
Pour le développement, suivez ces étapes :
|
| 125 |
+
|
| 126 |
+
1. **Cloner le dépôt :**
|
| 127 |
+
```sh
|
| 128 |
+
git clone https://github.com/PySpur-com/pyspur.git
|
| 129 |
+
cd pyspur
|
| 130 |
+
```
|
| 131 |
+
|
| 132 |
+
2. **Lancer en utilisant docker-compose.dev.yml :**
|
| 133 |
+
```sh
|
| 134 |
+
docker compose -f docker-compose.dev.yml up --build -d
|
| 135 |
+
```
|
| 136 |
+
Cela démarrera une instance locale de PySpur avec le rechargement à chaud activé pour le développement.
|
| 137 |
+
|
| 138 |
+
3. **Personnaliser votre configuration :**
|
| 139 |
+
Modifiez le fichier `.env` pour configurer votre environnement. Par défaut, PySpur utilise une base de données PostgreSQL locale. Pour utiliser une base de données externe, modifiez les variables `POSTGRES_*` dans le fichier `.env`.
|
| 140 |
+
|
| 141 |
+
# ⭐ Soutenez-nous
|
| 142 |
+
|
| 143 |
+
Vous pouvez nous soutenir en laissant une étoile ! Merci !
|
| 144 |
+
|
| 145 |
+

|
| 146 |
+
|
| 147 |
+
Vos retours seront grandement appréciés.
|
| 148 |
+
Veuillez nous [faire part](mailto:founders@pyspur.dev?subject=Feature%20Request&body=I%20want%20this%20feature%3Ai) des fonctionnalités de cette liste que vous souhaitez voir prochainement ou proposer de toutes nouvelles fonctionnalités.
|
pyspur/README_JA.md
ADDED
|
@@ -0,0 +1,145 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+

|
| 2 |
+
|
| 3 |
+
<p align="center"><strong>PySpurはPython製のAIエージェントビルダーです。AIエンジニアはこれを利用してエージェントを構築し、ステップバイステップで実行し、過去の実行結果を検証します。</strong></p>
|
| 4 |
+
|
| 5 |
+
<p align="center">
|
| 6 |
+
<a href="./README.md"><img alt="英語版README" src="https://img.shields.io/badge/English-blue"></a>
|
| 7 |
+
<a href="./README_CN.md"><img alt="简体中文版自述文件" src="https://img.shields.io/badge/简体中文-blue"></a>
|
| 8 |
+
<a href="./README_JA.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-blue"></a>
|
| 9 |
+
<a href="./README_KR.md"><img alt="韓国語版README" src="https://img.shields.io/badge/한국어-blue"></a>
|
| 10 |
+
<a href="./README_DE.md"><img alt="ドイツ語版README" src="https://img.shields.io/badge/Deutsch-blue"></a>
|
| 11 |
+
<a href="./README_FR.md"><img alt="フランス語版README" src="https://img.shields.io/badge/Français-blue"></a>
|
| 12 |
+
<a href="./README_ES.md"><img alt="スペイン語版README" src="https://img.shields.io/badge/Español-blue"></a>
|
| 13 |
+
</p>
|
| 14 |
+
|
| 15 |
+
<p align="center">
|
| 16 |
+
<a href="https://docs.pyspur.dev/" target="_blank">
|
| 17 |
+
<img alt="ドキュメント" src="https://img.shields.io/badge/Docs-green.svg?style=for-the-badge&logo=readthedocs&logoColor=white">
|
| 18 |
+
</a>
|
| 19 |
+
<a href="https://calendly.com/d/cnf9-57m-bv3/pyspur-founders" target="_blank">
|
| 20 |
+
<img alt="お会いしましょう" src="https://img.shields.io/badge/Meet%20us-blue.svg?style=for-the-badge&logo=calendly&logoColor=white">
|
| 21 |
+
</a>
|
| 22 |
+
<a href="https://forms.gle/5wHRctedMpgfNGah7" target="_blank">
|
| 23 |
+
<img alt="クラウド" src="https://img.shields.io/badge/Cloud-orange.svg?style=for-the-badge&logo=cloud&logoColor=white">
|
| 24 |
+
</a>
|
| 25 |
+
<a href="https://discord.gg/7Spn7C8A5F">
|
| 26 |
+
<img alt="Discordに参加する" src="https://img.shields.io/badge/Discord-7289DA.svg?style=for-the-badge&logo=discord&logoColor=white">
|
| 27 |
+
</a>
|
| 28 |
+
</p>
|
| 29 |
+
|
| 30 |
+
https://github.com/user-attachments/assets/1ebf78c9-94b2-468d-bbbb-566311df16fe
|
| 31 |
+
|
| 32 |
+
# 🕸️ なぜ PySpur なのか?
|
| 33 |
+
|
| 34 |
+
- ✅ **テスト駆動型**: ワークフローを構築し、テストケースを実行し、反復します。
|
| 35 |
+
- 👤 **ヒューマンインザループ**: 人間の承認または拒否を待つ永続的なワークフロー。
|
| 36 |
+
- 🔄 **ループ**: メモリを活用した反復的なツール呼び出し。
|
| 37 |
+
- 📤 **ファイルアップロード**: ファイルのアップロードやURLの貼り付けによりドキュメントを処理します。
|
| 38 |
+
- 📋 **構造化された出力**: JSONスキーマ用のUIエディタ。
|
| 39 |
+
- 🗃️ **RAG**: データを解析、分割、埋め込み、そしてVector DBにアップサートします。
|
| 40 |
+
- 🖼️ **マルチモーダル**: ビデオ、画像、オーディオ、テキスト、コードに対応。
|
| 41 |
+
- 🧰 **ツール**: Slack、Firecrawl.dev、Google Sheets、GitHubなど多数。
|
| 42 |
+
- 🧪 **評価**: 実際のデータセットでエージェントを評価します。
|
| 43 |
+
- 🚀 **ワンクリックデプロイ**: APIとして公開し、どこにでも統合可能。
|
| 44 |
+
- 🐍 **Pythonベース**: 単一のPythonファイルを作成するだけで新しいノードを追加できます。
|
| 45 |
+
- 🎛️ **どのベンダーにも対応**: 100以上のLLMプロバイダー、エンベッダー、Vector DBに対応。
|
| 46 |
+
|
| 47 |
+
# ⚡ クイックスタート
|
| 48 |
+
|
| 49 |
+
これは最も迅速なスタート方法です。Python 3.11以上が必要です。
|
| 50 |
+
|
| 51 |
+
1. **PySpurのインストール:**
|
| 52 |
+
```sh
|
| 53 |
+
pip install pyspur
|
| 54 |
+
```
|
| 55 |
+
|
| 56 |
+
2. **新しいプロジェクトの初期化:**
|
| 57 |
+
```sh
|
| 58 |
+
pyspur init my-project
|
| 59 |
+
cd my-project
|
| 60 |
+
```
|
| 61 |
+
これにより、`.env`ファイルを含む新しいディレクトリが作成されます。
|
| 62 |
+
|
| 63 |
+
3. **サーバーの起動:**
|
| 64 |
+
```sh
|
| 65 |
+
pyspur serve --sqlite
|
| 66 |
+
```
|
| 67 |
+
デフォルトでは、SQLiteデータベースを使用して `http://localhost:6080` でPySpurアプリが起動します。より安定した動作を求める場合は、`.env`ファイルにPostgresのインスタンスURLを設定することを推奨します。
|
| 68 |
+
|
| 69 |
+
4. **[オプション] 環境設定とAPIキーの追加:**
|
| 70 |
+
- **アプリUI**: APIキータブに移動して各プロバイダーのキー(OpenAI、Anthropicなど)を追加
|
| 71 |
+
- **手動設定**: `.env`ファイルを編集(推奨:postgresを設定)し、`pyspur serve`で再起動
|
| 72 |
+
|
| 73 |
+
# ✨ 主な利点
|
| 74 |
+
|
| 75 |
+
## ヒューマンインザループブレークポイント:
|
| 76 |
+
|
| 77 |
+
これらのブレークポイントは到達時にワークフローを一時停止し、人間が承認するとすぐに再開します。
|
| 78 |
+
品質保証が必要なワークフローに人間の監視を可能にします:ワークフローが進む前に重要な出力を検証します。
|
| 79 |
+
|
| 80 |
+
https://github.com/user-attachments/assets/98cb2b4e-207c-4d97-965b-4fee47c94ce8
|
| 81 |
+
|
| 82 |
+
## ノードレベルでのデバッ���:
|
| 83 |
+
|
| 84 |
+
https://github.com/user-attachments/assets/6e82ad25-2a46-4c50-b030-415ea9994690
|
| 85 |
+
|
| 86 |
+
## マルチモーダル(ファイルアップロードまたはURL貼り付け)
|
| 87 |
+
|
| 88 |
+
PDF、ビデオ、オーディオ、画像、…
|
| 89 |
+
|
| 90 |
+
https://github.com/user-attachments/assets/83ed9a22-1ec1-4d86-9dd6-5d945588fd0b
|
| 91 |
+
|
| 92 |
+
## ループ
|
| 93 |
+
|
| 94 |
+
<img width="1919" alt="Loops" src="https://github.com/user-attachments/assets/3aea63dc-f46f-46e9-bddd-e2af9c2a56bf" />
|
| 95 |
+
|
| 96 |
+
## RAG
|
| 97 |
+
|
| 98 |
+
### ステップ 1) ドキュメントコレクションの作成(チャンク分割+解析)
|
| 99 |
+
|
| 100 |
+
https://github.com/user-attachments/assets/c77723b1-c076-4a64-a01d-6d6677e9c60e
|
| 101 |
+
|
| 102 |
+
### ステップ 2) ベクターインデックスの作成(埋め込み+Vector DBアップサート)
|
| 103 |
+
|
| 104 |
+
https://github.com/user-attachments/assets/50e5c711-dd01-4d92-bb23-181a1c5bba25
|
| 105 |
+
|
| 106 |
+
## モジュール式ビルディングブロック
|
| 107 |
+
|
| 108 |
+
https://github.com/user-attachments/assets/6442f0ad-86d8-43d9-aa70-e5c01e55e876
|
| 109 |
+
|
| 110 |
+
## 最終パフォーマンスの評価
|
| 111 |
+
|
| 112 |
+
https://github.com/user-attachments/assets/4dc2abc3-c6e6-4d6d-a5c3-787d518de7ae
|
| 113 |
+
|
| 114 |
+
## 近日公開予定:自己改善
|
| 115 |
+
|
| 116 |
+
https://github.com/user-attachments/assets/5bef7a16-ef9f-4650-b385-4ea70fa54c8a
|
| 117 |
+
|
| 118 |
+
# 🛠️ PySpur 開発環境セットアップ
|
| 119 |
+
#### [ Unix系システムでの開発向けの手順です。Windows/PCでの開発はサポートされていません ]
|
| 120 |
+
|
| 121 |
+
開発のためには、以下の手順に従ってください:
|
| 122 |
+
|
| 123 |
+
1. **リポジトリのクローン:**
|
| 124 |
+
```sh
|
| 125 |
+
git clone https://github.com/PySpur-com/pyspur.git
|
| 126 |
+
cd pyspur
|
| 127 |
+
```
|
| 128 |
+
|
| 129 |
+
2. **docker-compose.dev.ymlを使用して起動:**
|
| 130 |
+
```sh
|
| 131 |
+
docker compose -f docker-compose.dev.yml up --build -d
|
| 132 |
+
```
|
| 133 |
+
これにより、開発用にホットリロードが有効なPySpurのローカルインスタンスが起動します。
|
| 134 |
+
|
| 135 |
+
3. **セットアップのカスタマイズ:**
|
| 136 |
+
環境設定のために `.env` ファイルを編集してください。デフォルトでは、PySpurはローカルのPostgreSQLデータベースを使用しています。外部データベースを使用する場合は、`.env` 内の `POSTGRES_*` 変数を変更してください.
|
| 137 |
+
|
| 138 |
+
# ⭐ サポート
|
| 139 |
+
|
| 140 |
+
スターを押していただくことで、私たちの活動をサポートしていただけます。ありがとうございます!
|
| 141 |
+
|
| 142 |
+

|
| 143 |
+
|
| 144 |
+
皆様のフィードバックを大変ありがたく思います。
|
| 145 |
+
次にどの機能を見たいか、または全く新しい機能のリクエストがあれば、ぜひ[お知らせください](mailto:founders@pyspur.dev?subject=Feature%20Request&body=I%20want%20this%20feature%3Ai).
|
pyspur/README_KR.md
ADDED
|
@@ -0,0 +1,146 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+

|
| 2 |
+
|
| 3 |
+
<p align="center"><strong>PySpur은 파이썬 기반의 AI 에이전트 빌더입니다. AI 엔지니어들은 이를 사용해 에이전트를 구축하고, 단계별로 실행하며 과거 실행 기록을 검토합니다.</strong></p>
|
| 4 |
+
|
| 5 |
+
<p align="center">
|
| 6 |
+
<a href="./README.md"><img alt="영문 README" src="https://img.shields.io/badge/English-blue"></a>
|
| 7 |
+
<a href="./README_CN.md"><img alt="简体中文版自述文件" src="https://img.shields.io/badge/简体中文-blue"></a>
|
| 8 |
+
<a href="./README_JA.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-blue"></a>
|
| 9 |
+
<a href="./README_KR.md"><img alt="한국어 README" src="https://img.shields.io/badge/한국어-blue"></a>
|
| 10 |
+
<a href="./README_DE.md"><img alt="독일어 README" src="https://img.shields.io/badge/Deutsch-blue"></a>
|
| 11 |
+
<a href="./README_FR.md"><img alt="프랑스어 README" src="https://img.shields.io/badge/Français-blue"></a>
|
| 12 |
+
<a href="./README_ES.md"><img alt="스페인어 README" src="https://img.shields.io/badge/Español-blue"></a>
|
| 13 |
+
</p>
|
| 14 |
+
|
| 15 |
+
<p align="center">
|
| 16 |
+
<a href="https://docs.pyspur.dev/" target="_blank">
|
| 17 |
+
<img alt="문서" src="https://img.shields.io/badge/Docs-green.svg?style=for-the-badge&logo=readthedocs&logoColor=white">
|
| 18 |
+
</a>
|
| 19 |
+
<a href="https://calendly.com/d/cnf9-57m-bv3/pyspur-founders" target="_blank">
|
| 20 |
+
<img alt="만나기" src="https://img.shields.io/badge/Meet%20us-blue.svg?style=for-the-badge&logo=calendly&logoColor=white">
|
| 21 |
+
</a>
|
| 22 |
+
<a href="https://forms.gle/5wHRctedMpgfNGah7" target="_blank">
|
| 23 |
+
<img alt="클라우드" src="https://img.shields.io/badge/Cloud-orange.svg?style=for-the-badge&logo=cloud&logoColor=white">
|
| 24 |
+
</a>
|
| 25 |
+
<a href="https://discord.gg/7Spn7C8A5F">
|
| 26 |
+
<img alt="디스코드 참여" src="https://img.shields.io/badge/Discord-7289DA.svg?style=for-the-badge&logo=discord&logoColor=white">
|
| 27 |
+
</a>
|
| 28 |
+
</p>
|
| 29 |
+
|
| 30 |
+
https://github.com/user-attachments/assets/1ebf78c9-94b2-468d-bbbb-566311df16fe
|
| 31 |
+
|
| 32 |
+
# 🕸️ 왜 PySpur인가?
|
| 33 |
+
|
| 34 |
+
- ✅ **테스트 주도**: 워크플로우를 구축하고, 테스트 케이스를 실행하며, 반복합니다.
|
| 35 |
+
- 👤 **인간 참여 루프**: 인간의 승인 또는 거부를 기다리는 지속적인 워크플로우.
|
| 36 |
+
- 🔄 **루프**: 메모리를 활용한 반복적 도구 호출.
|
| 37 |
+
- 📤 **파일 업로드**: 파일을 업로드하거나 URL을 붙여넣어 문서를 처리.
|
| 38 |
+
- 📋 **구조화된 출력**: JSON 스키마용 UI 편집기.
|
| 39 |
+
- 🗃️ **RAG**: 데이터를 파싱, 청킹, 임베딩 및 벡터 DB에 업서트.
|
| 40 |
+
- 🖼️ **멀티모달**: 비디오, 이미지, 오디오, 텍스트, 코드 지원.
|
| 41 |
+
- 🧰 **도구**: Slack, Firecrawl.dev, Google Sheets, GitHub 등.
|
| 42 |
+
- 🧪 **평가**: 실제 데이터셋에서 에이전트 평가.
|
| 43 |
+
- 🚀 **원클릭 배포**: API로 발행하여 원하는 곳에 통합.
|
| 44 |
+
- 🐍 **파이썬 기반**: 단일 파이썬 파일 생성으로 새 노드 추가.
|
| 45 |
+
- 🎛️ **모든 벤더 지원**: 100개 이상의 LLM 제공업체, 임베더, 벡터 DB 지원.
|
| 46 |
+
|
| 47 |
+
# ⚡ 빠른 시작
|
| 48 |
+
|
| 49 |
+
시작하는 가장 빠른 방법입니다. 파이썬 3.11 이상이 필요합니다.
|
| 50 |
+
|
| 51 |
+
1. **PySpur 설치:**
|
| 52 |
+
```sh
|
| 53 |
+
pip install pyspur
|
| 54 |
+
```
|
| 55 |
+
|
| 56 |
+
2. **새 프로젝트 초기화:**
|
| 57 |
+
```sh
|
| 58 |
+
pyspur init my-project
|
| 59 |
+
cd my-project
|
| 60 |
+
```
|
| 61 |
+
새 디렉토리와 함께 `.env` 파일이 생성됩니다.
|
| 62 |
+
|
| 63 |
+
3. **서버 시작:**
|
| 64 |
+
```sh
|
| 65 |
+
pyspur serve --sqlite
|
| 66 |
+
```
|
| 67 |
+
기본적으로 SQLite 데이터베이스를 사용하여 `http://localhost:6080`에서 PySpur 앱이 시작됩니다.
|
| 68 |
+
보다 안정적인 사용을 위해 `.env` 파일에 PostgreSQL 인스턴스 URL을 설정하는 것을 권장합니다.
|
| 69 |
+
|
| 70 |
+
4. **[선택 사항] 환경 구성 및 API 키 추가:**
|
| 71 |
+
- **앱 UI**: API 키 탭으로 이동하여 공급자 키(OpenAI, Anthropic 등) 추가
|
| 72 |
+
- **수동 구성**: `.env` 파일 편집(권장: postgres 구성) 후 `pyspur serve`로 재시작
|
| 73 |
+
|
| 74 |
+
# ✨ 핵심 이점
|
| 75 |
+
|
| 76 |
+
## 인간 참여 중단점:
|
| 77 |
+
|
| 78 |
+
이러한 중단점은 도달했을 때 워크플로우를 일시 중지하고 인간이 승인하면 재개됩니다.
|
| 79 |
+
품질 보증이 필요한 워크플로우에 인간의 감독을 가능하게 합니다: 워크플로우가 진행되기 전에 중요한 출력을 검증합니다.
|
| 80 |
+
|
| 81 |
+
https://github.com/user-attachments/assets/98cb2b4e-207c-4d97-965b-4fee47c94ce8
|
| 82 |
+
|
| 83 |
+
## 노드 레벨에서 디버그:
|
| 84 |
+
|
| 85 |
+
https://github.com/user-attachments/assets/6e82ad25-2a46-4c50-b030-415ea9994690
|
| 86 |
+
|
| 87 |
+
## 멀티모달 (파일 업로드 또는 URL 붙여넣기)
|
| 88 |
+
|
| 89 |
+
PDF, 비디오, 오디오, 이미지, ...
|
| 90 |
+
|
| 91 |
+
https://github.com/user-attachments/assets/83ed9a22-1ec1-4d86-9dd6-5d945588fd0b
|
| 92 |
+
|
| 93 |
+
## 루프
|
| 94 |
+
|
| 95 |
+
<img width="1919" alt="Loops" src="https://github.com/user-attachments/assets/3aea63dc-f46f-46e9-bddd-e2af9c2a56bf" />
|
| 96 |
+
|
| 97 |
+
## RAG
|
| 98 |
+
|
| 99 |
+
### 1단계) 문서 컬렉션 생성 (청킹 + 파싱)
|
| 100 |
+
|
| 101 |
+
https://github.com/user-attachments/assets/c77723b1-c076-4a64-a01d-6d6677e9c60e
|
| 102 |
+
|
| 103 |
+
### 2단계) 벡터 인덱스 생성 (임베딩 + 벡터 DB 업서트)
|
| 104 |
+
|
| 105 |
+
https://github.com/user-attachments/assets/50e5c711-dd01-4d92-bb23-181a1c5bba25
|
| 106 |
+
|
| 107 |
+
## 모듈형 빌딩 블록
|
| 108 |
+
|
| 109 |
+
https://github.com/user-attachments/assets/6442f0ad-86d8-43d9-aa70-e5c01e55e876
|
| 110 |
+
|
| 111 |
+
## 최종 성능 평가
|
| 112 |
+
|
| 113 |
+
https://github.com/user-attachments/assets/4dc2abc3-c6e6-4d6d-a5c3-787d518de7ae
|
| 114 |
+
|
| 115 |
+
## 곧 추가될 기능: 자기 개선
|
| 116 |
+
|
| 117 |
+
https://github.com/user-attachments/assets/5bef7a16-ef9f-4650-b385-4ea70fa54c8a
|
| 118 |
+
|
| 119 |
+
# 🛠️ PySpur 개발 환경 설정
|
| 120 |
+
#### [ 유닉스 계열 시스템 개발 지침. Windows/PC 개발은 지원되지 않음 ]
|
| 121 |
+
|
| 122 |
+
개발을 위해 아래 단계를 따르세요:
|
| 123 |
+
|
| 124 |
+
1. **리포지토리 클론:**
|
| 125 |
+
```sh
|
| 126 |
+
git clone https://github.com/PySpur-com/pyspur.git
|
| 127 |
+
cd pyspur
|
| 128 |
+
```
|
| 129 |
+
|
| 130 |
+
2. **docker-compose.dev.yml 사용하여 실행:**
|
| 131 |
+
```sh
|
| 132 |
+
docker compose -f docker-compose.dev.yml up --build -d
|
| 133 |
+
```
|
| 134 |
+
이 명령어는 개발용 핫 리로딩이 활성화된 로컬 PySpur 인스턴스를 시작합니다.
|
| 135 |
+
|
| 136 |
+
3. **환경 설정 맞춤:**
|
| 137 |
+
환경 구성을 위해 `.env` 파일을 수정합니다. 기본적으로 PySpur는 로컬 PostgreSQL 데이터베이스를 사용합니다. 외부 데이터베이스를 사용하려면 `.env` 파일의 `POSTGRES_*` 변수를 수정하세요.
|
| 138 |
+
|
| 139 |
+
# ⭐ 지원해 주세요
|
| 140 |
+
|
| 141 |
+
별을 남겨 주셔서 저희의 작업을 지원하실 수 있습니다! 감사합니다!
|
| 142 |
+
|
| 143 |
+

|
| 144 |
+
|
| 145 |
+
여러분의 피드백은 큰 힘이 됩니다.
|
| 146 |
+
다음에 보고 싶은 기능이나 완전히 새로운 기능 요청이 있다면 [알려주세요](mailto:founders@pyspur.dev?subject=Feature%20Request&body=I%20want%20this%20feature%3Ai).
|
pyspur/__init__.py
ADDED
|
File without changes
|
pyspur/__pycache__/__init__.cpython-312.pyc
ADDED
|
Binary file (191 Bytes). View file
|
|
|
pyspur/backend/.gitignore
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# ignore the test database file
|
| 2 |
+
test.db
|
| 3 |
+
/app/integrations/google/token.json
|
| 4 |
+
data/
|
| 5 |
+
/secure_tokens/
|
| 6 |
+
/.bolt-app-installation/
|
| 7 |
+
pyspur/openapi_specs/
|
pyspur/backend/.pre-commit-config.yaml
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
repos:
|
| 2 |
+
- repo: https://github.com/astral-sh/ruff-pre-commit
|
| 3 |
+
rev: v0.9.10
|
| 4 |
+
hooks:
|
| 5 |
+
- id: ruff
|
| 6 |
+
name: ruff
|
| 7 |
+
entry: ruff check
|
| 8 |
+
args: [--fix, --exit-non-zero-on-fix, --quiet]
|
| 9 |
+
language: system
|
| 10 |
+
types_or: [python, pyi]
|
| 11 |
+
require_serial: true
|
| 12 |
+
- id: ruff-format
|
| 13 |
+
name: ruff-format
|
| 14 |
+
entry: ruff format
|
| 15 |
+
args: [--quiet]
|
| 16 |
+
language: system
|
| 17 |
+
types_or: [python, pyi]
|
| 18 |
+
require_serial: true
|
pyspur/backend/__init__.py
ADDED
|
File without changes
|
pyspur/backend/__pycache__/__init__.cpython-312.pyc
ADDED
|
Binary file (199 Bytes). View file
|
|
|
pyspur/backend/alembic.ini
ADDED
|
@@ -0,0 +1,117 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# A generic, single database configuration.
|
| 2 |
+
|
| 3 |
+
[alembic]
|
| 4 |
+
# path to migration scripts
|
| 5 |
+
# Use forward slashes (/) also on windows to provide an os agnostic path
|
| 6 |
+
script_location = pyspur/models/management/alembic/
|
| 7 |
+
|
| 8 |
+
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
|
| 9 |
+
# Uncomment the line below if you want the files to be prepended with date and time
|
| 10 |
+
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
|
| 11 |
+
# for all available tokens
|
| 12 |
+
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
|
| 13 |
+
|
| 14 |
+
# sys.path path, will be prepended to sys.path if present.
|
| 15 |
+
# defaults to the current working directory.
|
| 16 |
+
prepend_sys_path = .
|
| 17 |
+
|
| 18 |
+
# timezone to use when rendering the date within the migration file
|
| 19 |
+
# as well as the filename.
|
| 20 |
+
# If specified, requires the python>=3.9 or backports.zoneinfo library.
|
| 21 |
+
# Any required deps can installed by adding `alembic[tz]` to the pip requirements
|
| 22 |
+
# string value is passed to ZoneInfo()
|
| 23 |
+
# leave blank for localtime
|
| 24 |
+
# timezone =
|
| 25 |
+
|
| 26 |
+
# max length of characters to apply to the "slug" field
|
| 27 |
+
# truncate_slug_length = 40
|
| 28 |
+
|
| 29 |
+
# set to 'true' to run the environment during
|
| 30 |
+
# the 'revision' command, regardless of autogenerate
|
| 31 |
+
# revision_environment = false
|
| 32 |
+
|
| 33 |
+
# set to 'true' to allow .pyc and .pyo files without
|
| 34 |
+
# a source .py file to be detected as revisions in the
|
| 35 |
+
# versions/ directory
|
| 36 |
+
# sourceless = false
|
| 37 |
+
|
| 38 |
+
# version location specification; This defaults
|
| 39 |
+
# to app/models/management/alembic//versions. When using multiple version
|
| 40 |
+
# directories, initial revisions must be specified with --version-path.
|
| 41 |
+
# The path separator used here should be the separator specified by "version_path_separator" below.
|
| 42 |
+
# version_locations = %(here)s/bar:%(here)s/bat:app/models/management/alembic//versions
|
| 43 |
+
|
| 44 |
+
# version path separator; As mentioned above, this is the character used to split
|
| 45 |
+
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
|
| 46 |
+
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
|
| 47 |
+
# Valid values for version_path_separator are:
|
| 48 |
+
#
|
| 49 |
+
# version_path_separator = :
|
| 50 |
+
# version_path_separator = ;
|
| 51 |
+
# version_path_separator = space
|
| 52 |
+
# version_path_separator = newline
|
| 53 |
+
version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
|
| 54 |
+
|
| 55 |
+
# set to 'true' to search source files recursively
|
| 56 |
+
# in each "version_locations" directory
|
| 57 |
+
# new in Alembic version 1.10
|
| 58 |
+
# recursive_version_locations = false
|
| 59 |
+
|
| 60 |
+
# the output encoding used when revision files
|
| 61 |
+
# are written from script.py.mako
|
| 62 |
+
# output_encoding = utf-8
|
| 63 |
+
|
| 64 |
+
sqlalchemy.url = postgresql://%(POSTGRES_USER)s:%(POSTGRES_PASSWORD)s@%(POSTGRES_HOST)s:%(POSTGRES_PORT)s/%(POSTGRES_DB)s
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
[post_write_hooks]
|
| 68 |
+
# post_write_hooks defines scripts or Python functions that are run
|
| 69 |
+
# on newly generated revision scripts. See the documentation for further
|
| 70 |
+
# detail and examples
|
| 71 |
+
|
| 72 |
+
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
| 73 |
+
# hooks = black
|
| 74 |
+
# black.type = console_scripts
|
| 75 |
+
# black.entrypoint = black
|
| 76 |
+
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
| 77 |
+
|
| 78 |
+
# lint with attempts to fix using "ruff" - use the exec runner, execute a binary
|
| 79 |
+
# hooks = ruff
|
| 80 |
+
# ruff.type = exec
|
| 81 |
+
# ruff.executable = %(here)s/.venv/bin/ruff
|
| 82 |
+
# ruff.options = --fix REVISION_SCRIPT_FILENAME
|
| 83 |
+
|
| 84 |
+
# Logging configuration
|
| 85 |
+
[loggers]
|
| 86 |
+
keys = root,sqlalchemy,alembic
|
| 87 |
+
|
| 88 |
+
[handlers]
|
| 89 |
+
keys = console
|
| 90 |
+
|
| 91 |
+
[formatters]
|
| 92 |
+
keys = generic
|
| 93 |
+
|
| 94 |
+
[logger_root]
|
| 95 |
+
level = WARN
|
| 96 |
+
handlers = console
|
| 97 |
+
qualname =
|
| 98 |
+
|
| 99 |
+
[logger_sqlalchemy]
|
| 100 |
+
level = WARN
|
| 101 |
+
handlers =
|
| 102 |
+
qualname = sqlalchemy.engine
|
| 103 |
+
|
| 104 |
+
[logger_alembic]
|
| 105 |
+
level = INFO
|
| 106 |
+
handlers =
|
| 107 |
+
qualname = alembic
|
| 108 |
+
|
| 109 |
+
[handler_console]
|
| 110 |
+
class = StreamHandler
|
| 111 |
+
args = (sys.stderr,)
|
| 112 |
+
level = NOTSET
|
| 113 |
+
formatter = generic
|
| 114 |
+
|
| 115 |
+
[formatter_generic]
|
| 116 |
+
format = %(levelname)-5.5s [%(name)s] %(message)s
|
| 117 |
+
datefmt = %H:%M:%S
|
pyspur/backend/entrypoint.sh
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash
|
| 2 |
+
|
| 3 |
+
# First test Ollama connection if URL is provided
|
| 4 |
+
if [ -f "test_ollama.sh" ]; then
|
| 5 |
+
chmod +x test_ollama.sh
|
| 6 |
+
./test_ollama.sh
|
| 7 |
+
fi
|
| 8 |
+
|
| 9 |
+
set -e
|
| 10 |
+
mkdir -p /pyspur/backend/pyspur/models/management/alembic/versions/
|
| 11 |
+
start_server() {
|
| 12 |
+
cd /pyspur/backend
|
| 13 |
+
uvicorn "pyspur.api.main:app" --reload --reload-include ./log_conf.yaml --reload-include "**/*.py" --log-config=log_conf.yaml --host 0.0.0.0 --port 8000
|
| 14 |
+
}
|
| 15 |
+
|
| 16 |
+
main() {
|
| 17 |
+
alembic upgrade head
|
| 18 |
+
start_server
|
| 19 |
+
}
|
| 20 |
+
|
| 21 |
+
main
|
pyspur/backend/llms-ctx.txt
ADDED
|
File without changes
|
pyspur/backend/log_conf.yaml
ADDED
|
@@ -0,0 +1,54 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version: 1
|
| 2 |
+
disable_existing_loggers: True
|
| 3 |
+
formatters:
|
| 4 |
+
default:
|
| 5 |
+
# "()": uvicorn.logging.DefaultFormatter
|
| 6 |
+
format: '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
|
| 7 |
+
access:
|
| 8 |
+
# "()": uvicorn.logging.AccessFormatter
|
| 9 |
+
format: '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
|
| 10 |
+
handlers:
|
| 11 |
+
default:
|
| 12 |
+
formatter: default
|
| 13 |
+
class: logging.StreamHandler
|
| 14 |
+
stream: ext://sys.stderr
|
| 15 |
+
access:
|
| 16 |
+
formatter: access
|
| 17 |
+
class: logging.StreamHandler
|
| 18 |
+
stream: ext://sys.stdout
|
| 19 |
+
loggers:
|
| 20 |
+
uvicorn.error:
|
| 21 |
+
level: INFO
|
| 22 |
+
handlers:
|
| 23 |
+
- default
|
| 24 |
+
propagate: no
|
| 25 |
+
uvicorn.access:
|
| 26 |
+
level: INFO
|
| 27 |
+
handlers:
|
| 28 |
+
- access
|
| 29 |
+
propagate: no
|
| 30 |
+
httpx:
|
| 31 |
+
level: ERROR
|
| 32 |
+
handlers:
|
| 33 |
+
- default
|
| 34 |
+
httpcore:
|
| 35 |
+
level: ERROR
|
| 36 |
+
handlers:
|
| 37 |
+
- default
|
| 38 |
+
watchfiles.main:
|
| 39 |
+
level: INFO
|
| 40 |
+
handlers:
|
| 41 |
+
- default
|
| 42 |
+
LiteLLM:
|
| 43 |
+
level: INFO
|
| 44 |
+
handlers:
|
| 45 |
+
- default
|
| 46 |
+
openai._base_client:
|
| 47 |
+
level: INFO
|
| 48 |
+
handlers:
|
| 49 |
+
- default
|
| 50 |
+
root:
|
| 51 |
+
level: DEBUG
|
| 52 |
+
handlers:
|
| 53 |
+
- default
|
| 54 |
+
propagate: no
|
pyspur/backend/output_files/.gitignore
ADDED
|
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
|
|
|
| 1 |
+
*
|
| 2 |
+
!.gitignore
|
pyspur/backend/pyproject.toml
ADDED
|
@@ -0,0 +1,142 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[build-system]
|
| 2 |
+
requires = ["hatchling"]
|
| 3 |
+
build-backend = "hatchling.build"
|
| 4 |
+
|
| 5 |
+
[project]
|
| 6 |
+
name = "pyspur"
|
| 7 |
+
version = "0.1.18"
|
| 8 |
+
description = "PySpur is a Graph UI for building AI Agents in Python"
|
| 9 |
+
requires-python = ">=3.11"
|
| 10 |
+
license = "Apache-2.0"
|
| 11 |
+
classifiers = [
|
| 12 |
+
"Operating System :: MacOS :: MacOS X",
|
| 13 |
+
"Operating System :: POSIX :: Linux",
|
| 14 |
+
"Operating System :: Unix",
|
| 15 |
+
"Development Status :: 4 - Beta",
|
| 16 |
+
"Intended Audience :: Developers",
|
| 17 |
+
"Programming Language :: Python :: 3.11",
|
| 18 |
+
"Programming Language :: Python :: 3.12",
|
| 19 |
+
]
|
| 20 |
+
maintainers = [
|
| 21 |
+
{name = "Srijan Patel", email = "srijan@pyspur.dev"},
|
| 22 |
+
{name = "Jean Kaddour", email = "jean@pyspur.dev"},
|
| 23 |
+
{name = "Parshva Bhadra", email = "parshva.bhadra@pyspur.dev"},
|
| 24 |
+
]
|
| 25 |
+
dependencies = [
|
| 26 |
+
"alembic==1.14.0",
|
| 27 |
+
"arrow==1.3.0",
|
| 28 |
+
"asyncio==3.4.3",
|
| 29 |
+
"attrs==24.3.0",
|
| 30 |
+
"backend==0.2.4.1",
|
| 31 |
+
"chromadb==0.6.2",
|
| 32 |
+
"datasets==3.2.0",
|
| 33 |
+
"docx2txt==0.8",
|
| 34 |
+
"docx2python==3.3.0",
|
| 35 |
+
"exa-py==1.9.0",
|
| 36 |
+
"fastapi==0.115.6",
|
| 37 |
+
"genanki==0.13.1",
|
| 38 |
+
"google-api-python-client==2.159.0",
|
| 39 |
+
"grpcio==1.69.0",
|
| 40 |
+
"Jinja2==3.1.6",
|
| 41 |
+
"litellm==1.61.15",
|
| 42 |
+
"loguru==0.7.3",
|
| 43 |
+
"numpy==2.2.1",
|
| 44 |
+
"ollama==0.4.5",
|
| 45 |
+
"pandas==2.2.3",
|
| 46 |
+
"pinecone==5.4.2",
|
| 47 |
+
"praw==7.8.1",
|
| 48 |
+
"psycopg2-binary==2.9.10",
|
| 49 |
+
"pydantic==2.10.5",
|
| 50 |
+
"pypdf==5.1.0",
|
| 51 |
+
"python-dotenv==1.0.1",
|
| 52 |
+
"python-multipart==0.0.20",
|
| 53 |
+
"python-pptx==1.0.2",
|
| 54 |
+
"PyYAML==6.0.2",
|
| 55 |
+
"py-zerox==0.0.7",
|
| 56 |
+
"qdrant_client==1.12.2",
|
| 57 |
+
"redis==5.2.1",
|
| 58 |
+
"regex==2024.11.6",
|
| 59 |
+
"requests==2.32.3",
|
| 60 |
+
"requests-file==2.1.0",
|
| 61 |
+
"requests-oauthlib==1.3.1",
|
| 62 |
+
"retrying==1.3.4",
|
| 63 |
+
"slack_sdk==3.35.0",
|
| 64 |
+
"slack_bolt==1.23.0",
|
| 65 |
+
"SQLAlchemy==2.0.36",
|
| 66 |
+
"supabase==2.11.0",
|
| 67 |
+
"six==1.17.0",
|
| 68 |
+
"tenacity==8.3.0",
|
| 69 |
+
"tiktoken==0.7.0",
|
| 70 |
+
"tqdm==4.67.1",
|
| 71 |
+
"weaviate_client==4.10.2",
|
| 72 |
+
"itsdangerous==2.2.0",
|
| 73 |
+
"phidata==2.7.8",
|
| 74 |
+
"youtube_transcript_api==0.6.3",
|
| 75 |
+
"PyGithub==2.5.0",
|
| 76 |
+
"firecrawl-py==1.10.2",
|
| 77 |
+
"httpx[http2]==0.27.2",
|
| 78 |
+
"sendgrid==6.11.0",
|
| 79 |
+
"resend==2.6.0",
|
| 80 |
+
"typer[all]==0.9.0",
|
| 81 |
+
"psutil>=7.0.0",
|
| 82 |
+
]
|
| 83 |
+
|
| 84 |
+
[project.urls]
|
| 85 |
+
Repository = "https://github.com/pyspur-dev/pyspur"
|
| 86 |
+
Documentation = "https://docs.pyspur.dev"
|
| 87 |
+
|
| 88 |
+
[project.scripts]
|
| 89 |
+
pyspur = "pyspur.cli:main"
|
| 90 |
+
|
| 91 |
+
[project.optional-dependencies]
|
| 92 |
+
dev = [
|
| 93 |
+
"pytest>=7.0",
|
| 94 |
+
"pytest-cov>=4.0",
|
| 95 |
+
"ruff>=0.1.0",
|
| 96 |
+
]
|
| 97 |
+
|
| 98 |
+
[tool.hatch.build.targets.wheel]
|
| 99 |
+
universal = false
|
| 100 |
+
packages = ["pyspur"]
|
| 101 |
+
zip-safe = false
|
| 102 |
+
|
| 103 |
+
[tool.hatch.build.targets.wheel.force-include]
|
| 104 |
+
"pyspur/templates" = "pyspur/templates/"
|
| 105 |
+
"pyspur/static" = "pyspur/static/"
|
| 106 |
+
|
| 107 |
+
[tool.ruff]
|
| 108 |
+
line-length = 100
|
| 109 |
+
target-version = "py312"
|
| 110 |
+
|
| 111 |
+
[tool.ruff.lint]
|
| 112 |
+
select = ["E", "F", "I", "N", "W", "B", "C", "D", "PYI"]
|
| 113 |
+
ignore = [
|
| 114 |
+
"B006", # Do not use mutable default arguments
|
| 115 |
+
"B008", # Do not perform function call `Depends` in argument defaults
|
| 116 |
+
"C901", # Function is too complex
|
| 117 |
+
"D100", # Missing docstring in public module
|
| 118 |
+
"D101", # Missing docstring in public class
|
| 119 |
+
"D102", # Missing docstring in public method
|
| 120 |
+
"D103", # Missing docstring in public function
|
| 121 |
+
"D104", # Missing docstring in public package
|
| 122 |
+
"D105", # Missing docstring in magic method
|
| 123 |
+
"D106", # Missing docstring in public nested class
|
| 124 |
+
"D107", # Missing docstring in __init__
|
| 125 |
+
"I001", # Import block is un-sorted or un-formatted
|
| 126 |
+
"E402", # Module level import not at top of file
|
| 127 |
+
]
|
| 128 |
+
|
| 129 |
+
[tool.black]
|
| 130 |
+
line-length = 100
|
| 131 |
+
target-version = ["py312"]
|
| 132 |
+
|
| 133 |
+
[tool.mypy]
|
| 134 |
+
python_version = "3.12"
|
| 135 |
+
warn_return_any = true
|
| 136 |
+
warn_unused_configs = true
|
| 137 |
+
disallow_untyped_defs = true
|
| 138 |
+
check_untyped_defs = true
|
| 139 |
+
|
| 140 |
+
[tool.pytest.ini_options]
|
| 141 |
+
testpaths = ["tests"]
|
| 142 |
+
python_files = ["test_*.py"]
|
pyspur/backend/pyspur/__init__.py
ADDED
|
File without changes
|
pyspur/backend/pyspur/__pycache__/__init__.cpython-312.pyc
ADDED
|
Binary file (206 Bytes). View file
|
|
|
pyspur/backend/pyspur/api/__init__.py
ADDED
|
File without changes
|
pyspur/backend/pyspur/api/ai_management.py
ADDED
|
@@ -0,0 +1,352 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import json
|
| 2 |
+
import re
|
| 3 |
+
from typing import Any, Dict, List, Literal, Optional, cast
|
| 4 |
+
|
| 5 |
+
from fastapi import APIRouter, HTTPException
|
| 6 |
+
from loguru import logger
|
| 7 |
+
from pydantic import BaseModel
|
| 8 |
+
|
| 9 |
+
from ..nodes.llm._utils import generate_text
|
| 10 |
+
|
| 11 |
+
router = APIRouter()
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
class SchemaGenerationRequest(BaseModel):
|
| 15 |
+
description: str
|
| 16 |
+
existing_schema: Optional[str] = None
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
class MessageGenerationRequest(BaseModel):
|
| 20 |
+
description: str
|
| 21 |
+
message_type: Literal["system", "user"] # "system" or "user"
|
| 22 |
+
existing_message: Optional[str] = None
|
| 23 |
+
context: Optional[str] = None
|
| 24 |
+
available_variables: Optional[List[str]] = None
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
@router.post("/generate_schema/")
|
| 28 |
+
async def generate_schema(request: SchemaGenerationRequest) -> Dict[str, Any]:
|
| 29 |
+
response: str = ""
|
| 30 |
+
try:
|
| 31 |
+
# Prepare the system message
|
| 32 |
+
system_message = """You are a JSON Schema expert. Your task is to generate a JSON Schema
|
| 33 |
+
based on a text description.
|
| 34 |
+
The schema should:
|
| 35 |
+
1. Follow JSON Schema standards
|
| 36 |
+
2. Include appropriate types, required fields, and descriptions
|
| 37 |
+
3. Be clear and well-structured
|
| 38 |
+
4. Include type: "object" at the root
|
| 39 |
+
5. Include a properties object
|
| 40 |
+
6. Set appropriate required fields
|
| 41 |
+
7. Include meaningful descriptions for each field
|
| 42 |
+
8. Return ONLY the JSON schema without any markdown formatting or explanation
|
| 43 |
+
|
| 44 |
+
Here are some examples:
|
| 45 |
+
|
| 46 |
+
<example>
|
| 47 |
+
Input: "Create a schema for a person with name, age and optional email"
|
| 48 |
+
Output: {
|
| 49 |
+
"type": "object",
|
| 50 |
+
"properties": {
|
| 51 |
+
"name": {
|
| 52 |
+
"type": "string",
|
| 53 |
+
"description": "The person's full name"
|
| 54 |
+
},
|
| 55 |
+
"age": {
|
| 56 |
+
"type": "integer",
|
| 57 |
+
"description": "The person's age in years",
|
| 58 |
+
"minimum": 0
|
| 59 |
+
},
|
| 60 |
+
"email": {
|
| 61 |
+
"type": "string",
|
| 62 |
+
"description": "The person's email address",
|
| 63 |
+
"format": "email"
|
| 64 |
+
}
|
| 65 |
+
},
|
| 66 |
+
"required": ["name", "age"]
|
| 67 |
+
}
|
| 68 |
+
</example>
|
| 69 |
+
|
| 70 |
+
<example>
|
| 71 |
+
Input: "Schema for a blog post with title, content, author details and tags"
|
| 72 |
+
Output: {
|
| 73 |
+
"type": "object",
|
| 74 |
+
"properties": {
|
| 75 |
+
"title": {
|
| 76 |
+
"type": "string",
|
| 77 |
+
"description": "The title of the blog post"
|
| 78 |
+
},
|
| 79 |
+
"content": {
|
| 80 |
+
"type": "string",
|
| 81 |
+
"description": "The main content of the blog post"
|
| 82 |
+
},
|
| 83 |
+
"author": {
|
| 84 |
+
"type": "object",
|
| 85 |
+
"description": "Details about the post author",
|
| 86 |
+
"properties": {
|
| 87 |
+
"name": {
|
| 88 |
+
"type": "string",
|
| 89 |
+
"description": "Author's full name"
|
| 90 |
+
},
|
| 91 |
+
"bio": {
|
| 92 |
+
"type": "string",
|
| 93 |
+
"description": "Short biography of the author"
|
| 94 |
+
}
|
| 95 |
+
},
|
| 96 |
+
"required": ["name"]
|
| 97 |
+
},
|
| 98 |
+
"tags": {
|
| 99 |
+
"type": "array",
|
| 100 |
+
"description": "List of tags associated with the post",
|
| 101 |
+
"items": {
|
| 102 |
+
"type": "string"
|
| 103 |
+
}
|
| 104 |
+
}
|
| 105 |
+
},
|
| 106 |
+
"required": ["title", "content", "author"]
|
| 107 |
+
}
|
| 108 |
+
</example>
|
| 109 |
+
"""
|
| 110 |
+
|
| 111 |
+
# Prepare the user message
|
| 112 |
+
user_message = (
|
| 113 |
+
f"Generate a JSON Schema for the following description:\n{request.description}"
|
| 114 |
+
)
|
| 115 |
+
|
| 116 |
+
if request.existing_schema:
|
| 117 |
+
user_message += (
|
| 118 |
+
f"\n\nPlease consider this existing schema as context:\n{request.existing_schema}"
|
| 119 |
+
)
|
| 120 |
+
user_message += (
|
| 121 |
+
"\nModify it based on the description while preserving any compatible parts."
|
| 122 |
+
)
|
| 123 |
+
|
| 124 |
+
# Call the LLM
|
| 125 |
+
messages = [
|
| 126 |
+
{"role": "system", "content": system_message},
|
| 127 |
+
{"role": "user", "content": user_message},
|
| 128 |
+
]
|
| 129 |
+
|
| 130 |
+
message_response = await generate_text(
|
| 131 |
+
messages=messages, model_name="openai/o3-mini", json_mode=True
|
| 132 |
+
)
|
| 133 |
+
assert message_response.content, "No response from LLM"
|
| 134 |
+
response = message_response.content
|
| 135 |
+
|
| 136 |
+
# Try to parse the response in different ways
|
| 137 |
+
try:
|
| 138 |
+
# First try: direct JSON parse
|
| 139 |
+
schema = json.loads(response)
|
| 140 |
+
if isinstance(schema, dict) and "output" in schema:
|
| 141 |
+
# If we got a wrapper object with an "output" key, extract the schema from it
|
| 142 |
+
schema_str = cast(str, schema["output"])
|
| 143 |
+
# Extract JSON from potential markdown code blocks
|
| 144 |
+
json_match = re.search(r"```json\s*(.*?)\s*```", schema_str, re.DOTALL)
|
| 145 |
+
if json_match:
|
| 146 |
+
schema_str = json_match.group(1)
|
| 147 |
+
schema = json.loads(schema_str)
|
| 148 |
+
except json.JSONDecodeError as e:
|
| 149 |
+
# Second try: Look for JSON in markdown code blocks
|
| 150 |
+
json_match = re.search(r"```(?:json)?\s*(.*?)\s*```", response, re.DOTALL)
|
| 151 |
+
if json_match:
|
| 152 |
+
schema = json.loads(json_match.group(1))
|
| 153 |
+
else:
|
| 154 |
+
raise ValueError("Could not extract valid JSON schema from response") from e
|
| 155 |
+
|
| 156 |
+
# Validate the schema structure
|
| 157 |
+
if not isinstance(schema, dict) or "type" not in schema or "properties" not in schema:
|
| 158 |
+
raise ValueError("Generated schema is not valid - missing required fields")
|
| 159 |
+
|
| 160 |
+
return cast(Dict[str, Any], schema)
|
| 161 |
+
|
| 162 |
+
except Exception as e:
|
| 163 |
+
# Log the raw response if it exists and is not empty
|
| 164 |
+
if response:
|
| 165 |
+
truncated_response = response[:1000] + "..." if len(response) > 1000 else response
|
| 166 |
+
logger.error(f"Schema generation failed. response (truncated): {truncated_response}.")
|
| 167 |
+
raise HTTPException(status_code=400, detail=str(e)) from e
|
| 168 |
+
|
| 169 |
+
|
| 170 |
+
@router.post("/generate_message/")
|
| 171 |
+
async def generate_message(request: MessageGenerationRequest) -> Dict[str, str]:
|
| 172 |
+
response: str = ""
|
| 173 |
+
try:
|
| 174 |
+
# Prepare the system message based on the message type
|
| 175 |
+
if request.message_type == "system":
|
| 176 |
+
system_message = """You are an expert at crafting effective \
|
| 177 |
+
system messages for AI assistants.
|
| 178 |
+
Your task is to generate a clear, concise, and effective system message based\
|
| 179 |
+
on the provided description.
|
| 180 |
+
|
| 181 |
+
# INSTRUCTIONS
|
| 182 |
+
A good system message should:
|
| 183 |
+
1. Clearly define the AI's role and purpose
|
| 184 |
+
2. Set appropriate boundaries and constraints
|
| 185 |
+
3. Provide necessary context and background information
|
| 186 |
+
4. Be concise but comprehensive
|
| 187 |
+
5. Use clear, unambiguous language
|
| 188 |
+
6. Use XML tags when appropriate to structure information:
|
| 189 |
+
e.g., <role>...</role>, <constraints>...</constraints>
|
| 190 |
+
|
| 191 |
+
# FORMAT REQUIREMENTS
|
| 192 |
+
Your generated system message MUST include:
|
| 193 |
+
1. An "# Instructions" section with clearly enumerated instructions (1., 2., 3., etc.)
|
| 194 |
+
2. Clear organization with appropriate headings and structure
|
| 195 |
+
|
| 196 |
+
# EXAMPLES
|
| 197 |
+
Example 1 (Simple role definition):
|
| 198 |
+
```
|
| 199 |
+
You are a helpful coding assistant that specializes in Python programming.
|
| 200 |
+
|
| 201 |
+
# Instructions
|
| 202 |
+
1. Provide accurate Python code examples when requested
|
| 203 |
+
2. Explain coding concepts clearly and concisely
|
| 204 |
+
3. Suggest best practices for Python development
|
| 205 |
+
```
|
| 206 |
+
|
| 207 |
+
Example 2 (With XML tags):
|
| 208 |
+
```
|
| 209 |
+
<role>You are a data analysis expert specialized in interpreting financial data.</role>
|
| 210 |
+
|
| 211 |
+
# Instructions
|
| 212 |
+
1. Only provide analysis based on the data provided
|
| 213 |
+
2. Present findings with supporting evidence
|
| 214 |
+
3. Identify trends and patterns in the data
|
| 215 |
+
4. Suggest actionable insights when appropriate
|
| 216 |
+
|
| 217 |
+
<constraints>Do not make assumptions about data you cannot see.</constraints>
|
| 218 |
+
<format>Present your analysis with clear sections for Summary, Details, \
|
| 219 |
+
and Recommendations.</format>
|
| 220 |
+
```
|
| 221 |
+
|
| 222 |
+
Return ONLY the system message text without any additional explanation or formatting.
|
| 223 |
+
"""
|
| 224 |
+
elif request.message_type == "user":
|
| 225 |
+
system_message = """You are an expert at crafting effective user prompts for AI \
|
| 226 |
+
assistants.
|
| 227 |
+
Your task is to generate a clear, specific, and effective user prompt based on the \
|
| 228 |
+
provided description.
|
| 229 |
+
|
| 230 |
+
# INSTRUCTIONS
|
| 231 |
+
A good user prompt should:
|
| 232 |
+
1. Clearly state what is being asked of the AI
|
| 233 |
+
2. Provide necessary context and specific details
|
| 234 |
+
3. Be structured in a way that guides the AI to produce the desired output
|
| 235 |
+
4. Use clear, unambiguous language
|
| 236 |
+
5. Include any relevant constraints or requirements
|
| 237 |
+
6. Use XML tags when appropriate to structure information \
|
| 238 |
+
(e.g., <context>...</context>, <request>...</request>)
|
| 239 |
+
|
| 240 |
+
# FORMAT REQUIREMENTS
|
| 241 |
+
Your generated user prompt MUST include:
|
| 242 |
+
1. An "# Instructions" section with clearly enumerated instructions (1., 2., 3., etc.)
|
| 243 |
+
2. Clear organization with appropriate headings and structure
|
| 244 |
+
|
| 245 |
+
# EXAMPLES
|
| 246 |
+
Example 1 (Simple request):
|
| 247 |
+
```
|
| 248 |
+
Explain how JavaScript promises work with code examples.
|
| 249 |
+
|
| 250 |
+
# Instructions
|
| 251 |
+
1. Explain the concept in simple terms first
|
| 252 |
+
2. Provide practical code examples
|
| 253 |
+
3. Include error handling patterns
|
| 254 |
+
```
|
| 255 |
+
|
| 256 |
+
Example 2 (With XML tags):
|
| 257 |
+
```
|
| 258 |
+
<context>I'm building a React application with a complex state management system.\
|
| 259 |
+
</context>
|
| 260 |
+
|
| 261 |
+
<request>Review the following code snippet and suggest improvements for performance \
|
| 262 |
+
and readability:</request>
|
| 263 |
+
|
| 264 |
+
<code>
|
| 265 |
+
// Code would go here
|
| 266 |
+
</code>
|
| 267 |
+
|
| 268 |
+
# Instructions
|
| 269 |
+
1. Identify performance bottlenecks in the code
|
| 270 |
+
2. Suggest specific refactoring approaches
|
| 271 |
+
3. Explain the reasoning behind each recommendation
|
| 272 |
+
4. Provide example code for key improvements
|
| 273 |
+
```
|
| 274 |
+
|
| 275 |
+
Return ONLY the user prompt text without any additional explanation or formatting.
|
| 276 |
+
"""
|
| 277 |
+
else:
|
| 278 |
+
raise ValueError(f"Unsupported message type: {request.message_type}")
|
| 279 |
+
|
| 280 |
+
# Prepare the user message
|
| 281 |
+
user_message = f"Generate a {request.message_type} message based on the following \
|
| 282 |
+
description:\n{request.description}"
|
| 283 |
+
|
| 284 |
+
if request.existing_message:
|
| 285 |
+
user_message += f"\n\nPlease consider this existing message as a starting \
|
| 286 |
+
point:\n{request.existing_message}"
|
| 287 |
+
|
| 288 |
+
# Add context if provided
|
| 289 |
+
if request.context:
|
| 290 |
+
user_message += f"\n\nAdditional context:\n{request.context}"
|
| 291 |
+
|
| 292 |
+
# Add information about available template variables if provided
|
| 293 |
+
if request.available_variables and len(request.available_variables) > 0:
|
| 294 |
+
variables_str = "\n".join([f"- {var}" for var in request.available_variables])
|
| 295 |
+
|
| 296 |
+
if request.message_type == "system":
|
| 297 |
+
user_message += f"\n\nThe message should appropriately incorporate the following \
|
| 298 |
+
template variables that the user has specifically selected for this message:\n{variables_str}\n\n\
|
| 299 |
+
These variables will be replaced with actual values at runtime. Use them in the appropriate places \
|
| 300 |
+
to make the message dynamic and context-aware."
|
| 301 |
+
else: # user message
|
| 302 |
+
user_message += f"\n\nThe prompt should appropriately incorporate the following \
|
| 303 |
+
template variables that the user has specifically selected for this message:\n{variables_str}\n\n\
|
| 304 |
+
These variables will be replaced with actual values at runtime. Use them in the appropriate places \
|
| 305 |
+
to make the prompt dynamic and personalized."
|
| 306 |
+
|
| 307 |
+
# Additional guidance on template variable usage
|
| 308 |
+
user_message += "\n\nUse the variables in the format {{ variable_name }}. Only use the \
|
| 309 |
+
variables listed above - do not invent new variables."
|
| 310 |
+
|
| 311 |
+
# Prepare messages for the LLM
|
| 312 |
+
messages = [
|
| 313 |
+
{"role": "system", "content": system_message},
|
| 314 |
+
{"role": "user", "content": user_message},
|
| 315 |
+
]
|
| 316 |
+
|
| 317 |
+
# Generate the message using OpenAI
|
| 318 |
+
message_response = await generate_text(
|
| 319 |
+
messages=messages,
|
| 320 |
+
model_name="openai/o3-mini",
|
| 321 |
+
temperature=0.7,
|
| 322 |
+
max_tokens=1000,
|
| 323 |
+
)
|
| 324 |
+
response = cast(str, message_response.content)
|
| 325 |
+
|
| 326 |
+
# Process the response to extract the message
|
| 327 |
+
message: str = ""
|
| 328 |
+
if response.strip().startswith("{") and response.strip().endswith("}"):
|
| 329 |
+
try:
|
| 330 |
+
parsed_response = json.loads(response)
|
| 331 |
+
if isinstance(parsed_response, dict) and "output" in parsed_response:
|
| 332 |
+
message = cast(str, parsed_response["output"])
|
| 333 |
+
else:
|
| 334 |
+
message = response
|
| 335 |
+
except json.JSONDecodeError:
|
| 336 |
+
message = response
|
| 337 |
+
else:
|
| 338 |
+
message = response
|
| 339 |
+
|
| 340 |
+
# Remove any markdown code blocks if present
|
| 341 |
+
if "```" in message:
|
| 342 |
+
message = re.sub(r"```.*?```", "", message, flags=re.DOTALL).strip()
|
| 343 |
+
else:
|
| 344 |
+
# Fallback if response is not a string (shouldn't happen)
|
| 345 |
+
message = str(response)
|
| 346 |
+
|
| 347 |
+
return {"message": message}
|
| 348 |
+
except Exception as e:
|
| 349 |
+
logger.error(f"Error generating message: {str(e)}")
|
| 350 |
+
if response:
|
| 351 |
+
logger.error(f"Raw response: {response}")
|
| 352 |
+
raise HTTPException(status_code=500) from e
|
pyspur/backend/pyspur/api/api_app.py
ADDED
|
@@ -0,0 +1,53 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from fastapi import FastAPI
|
| 2 |
+
|
| 3 |
+
from ..nodes.registry import NodeRegistry
|
| 4 |
+
|
| 5 |
+
NodeRegistry.discover_nodes()
|
| 6 |
+
|
| 7 |
+
from ..integrations.google.auth import router as google_auth_router
|
| 8 |
+
from .ai_management import router as ai_management_router
|
| 9 |
+
from .dataset_management import router as dataset_management_router
|
| 10 |
+
from .evals_management import router as evals_management_router
|
| 11 |
+
from .file_management import router as file_management_router
|
| 12 |
+
from .key_management import router as key_management_router
|
| 13 |
+
from .node_management import router as node_management_router
|
| 14 |
+
from .openai_compatible_api import router as openai_compatible_api_router
|
| 15 |
+
from .openapi_management import router as openapi_router
|
| 16 |
+
from .output_file_management import router as output_file_management_router
|
| 17 |
+
from .rag_management import router as rag_management_router
|
| 18 |
+
from .run_management import router as run_management_router
|
| 19 |
+
from .session_management import router as session_management_router
|
| 20 |
+
from .slack_management import router as slack_management_router
|
| 21 |
+
from .template_management import router as template_management_router
|
| 22 |
+
from .user_management import router as user_management_router
|
| 23 |
+
from .workflow_code_convert import router as workflow_code_router
|
| 24 |
+
from .workflow_management import router as workflow_management_router
|
| 25 |
+
from .workflow_run import router as workflow_run_router
|
| 26 |
+
|
| 27 |
+
# Create a sub-application for API routes
|
| 28 |
+
api_app = FastAPI(
|
| 29 |
+
docs_url="/docs",
|
| 30 |
+
redoc_url="/redoc",
|
| 31 |
+
title="PySpur API",
|
| 32 |
+
version="1.0.0",
|
| 33 |
+
)
|
| 34 |
+
|
| 35 |
+
api_app.include_router(node_management_router, prefix="/node", tags=["nodes"])
|
| 36 |
+
api_app.include_router(workflow_management_router, prefix="/wf", tags=["workflows"])
|
| 37 |
+
api_app.include_router(workflow_run_router, prefix="/wf", tags=["workflow runs"])
|
| 38 |
+
api_app.include_router(workflow_code_router, prefix="/code_convert", tags=["workflow code (beta)"])
|
| 39 |
+
api_app.include_router(dataset_management_router, prefix="/ds", tags=["datasets"])
|
| 40 |
+
api_app.include_router(run_management_router, prefix="/run", tags=["runs"])
|
| 41 |
+
api_app.include_router(output_file_management_router, prefix="/of", tags=["output files"])
|
| 42 |
+
api_app.include_router(key_management_router, prefix="/env-mgmt", tags=["environment management"])
|
| 43 |
+
api_app.include_router(template_management_router, prefix="/templates", tags=["templates"])
|
| 44 |
+
api_app.include_router(openai_compatible_api_router, prefix="/api", tags=["openai compatible"])
|
| 45 |
+
api_app.include_router(evals_management_router, prefix="/evals", tags=["evaluations"])
|
| 46 |
+
api_app.include_router(google_auth_router, prefix="/google", tags=["google auth"])
|
| 47 |
+
api_app.include_router(rag_management_router, prefix="/rag", tags=["rag"])
|
| 48 |
+
api_app.include_router(file_management_router, prefix="/files", tags=["files"])
|
| 49 |
+
api_app.include_router(ai_management_router, prefix="/ai", tags=["ai"])
|
| 50 |
+
api_app.include_router(user_management_router, prefix="/user", tags=["users"])
|
| 51 |
+
api_app.include_router(session_management_router, prefix="/session", tags=["sessions"])
|
| 52 |
+
api_app.include_router(slack_management_router, prefix="/slack", tags=["slack integration"])
|
| 53 |
+
api_app.include_router(openapi_router, prefix="/openapi", tags=["openapi"])
|
pyspur/backend/pyspur/api/dataset_management.py
ADDED
|
@@ -0,0 +1,121 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
from datetime import datetime, timezone
|
| 3 |
+
from typing import List
|
| 4 |
+
|
| 5 |
+
from fastapi import APIRouter, Depends, File, HTTPException, UploadFile
|
| 6 |
+
from sqlalchemy.orm import Session
|
| 7 |
+
|
| 8 |
+
from ..database import get_db
|
| 9 |
+
from ..models.dataset_model import DatasetModel
|
| 10 |
+
from ..models.run_model import RunModel
|
| 11 |
+
from ..schemas.dataset_schemas import DatasetResponseSchema
|
| 12 |
+
from ..schemas.run_schemas import RunResponseSchema
|
| 13 |
+
|
| 14 |
+
router = APIRouter()
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
def save_file(file: UploadFile) -> str:
|
| 18 |
+
filename = file.filename
|
| 19 |
+
assert filename is not None
|
| 20 |
+
file_location = os.path.join(os.path.dirname(__file__), "..", "..", "datasets", filename)
|
| 21 |
+
with open(file_location, "wb+") as file_object:
|
| 22 |
+
file_object.write(file.file.read())
|
| 23 |
+
return file_location
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
@router.post("/", description="Upload a new dataset")
|
| 27 |
+
def upload_dataset(
|
| 28 |
+
name: str,
|
| 29 |
+
description: str = "",
|
| 30 |
+
file: UploadFile = File(...),
|
| 31 |
+
db: Session = Depends(get_db),
|
| 32 |
+
) -> DatasetResponseSchema:
|
| 33 |
+
file_location = save_file(file)
|
| 34 |
+
new_dataset = DatasetModel(
|
| 35 |
+
name=name,
|
| 36 |
+
description=description,
|
| 37 |
+
file_path=file_location,
|
| 38 |
+
uploaded_at=datetime.now(timezone.utc),
|
| 39 |
+
)
|
| 40 |
+
db.add(new_dataset)
|
| 41 |
+
db.commit()
|
| 42 |
+
db.refresh(new_dataset)
|
| 43 |
+
return DatasetResponseSchema(
|
| 44 |
+
id=new_dataset.id,
|
| 45 |
+
name=new_dataset.name,
|
| 46 |
+
description=new_dataset.description,
|
| 47 |
+
filename=new_dataset.file_path,
|
| 48 |
+
created_at=new_dataset.uploaded_at,
|
| 49 |
+
updated_at=new_dataset.uploaded_at,
|
| 50 |
+
)
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
@router.get(
|
| 54 |
+
"/",
|
| 55 |
+
response_model=List[DatasetResponseSchema],
|
| 56 |
+
description="List all datasets",
|
| 57 |
+
)
|
| 58 |
+
def list_datasets(db: Session = Depends(get_db)) -> List[DatasetResponseSchema]:
|
| 59 |
+
datasets = db.query(DatasetModel).all()
|
| 60 |
+
dataset_list = [
|
| 61 |
+
DatasetResponseSchema(
|
| 62 |
+
id=ds.id,
|
| 63 |
+
name=ds.name,
|
| 64 |
+
description=ds.description,
|
| 65 |
+
filename=ds.file_path,
|
| 66 |
+
created_at=ds.uploaded_at,
|
| 67 |
+
updated_at=ds.uploaded_at,
|
| 68 |
+
)
|
| 69 |
+
for ds in datasets
|
| 70 |
+
]
|
| 71 |
+
return dataset_list
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
@router.get(
|
| 75 |
+
"/{dataset_id}/",
|
| 76 |
+
response_model=DatasetResponseSchema,
|
| 77 |
+
description="Get a dataset by ID",
|
| 78 |
+
)
|
| 79 |
+
def get_dataset(dataset_id: str, db: Session = Depends(get_db)) -> DatasetResponseSchema:
|
| 80 |
+
dataset = db.query(DatasetModel).filter(DatasetModel.id == dataset_id).first()
|
| 81 |
+
if not dataset:
|
| 82 |
+
raise HTTPException(status_code=404, detail="Dataset not found")
|
| 83 |
+
return DatasetResponseSchema(
|
| 84 |
+
id=dataset.id,
|
| 85 |
+
name=dataset.name,
|
| 86 |
+
description=dataset.description,
|
| 87 |
+
filename=dataset.file_path,
|
| 88 |
+
created_at=dataset.uploaded_at,
|
| 89 |
+
updated_at=dataset.uploaded_at,
|
| 90 |
+
)
|
| 91 |
+
|
| 92 |
+
|
| 93 |
+
@router.delete(
|
| 94 |
+
"/{dataset_id}/",
|
| 95 |
+
description="Delete a dataset by ID",
|
| 96 |
+
)
|
| 97 |
+
def delete_dataset(dataset_id: str, db: Session = Depends(get_db)):
|
| 98 |
+
dataset = db.query(DatasetModel).filter(DatasetModel.id == dataset_id).first()
|
| 99 |
+
if not dataset:
|
| 100 |
+
raise HTTPException(status_code=404, detail="Dataset not found")
|
| 101 |
+
db.delete(dataset)
|
| 102 |
+
db.commit()
|
| 103 |
+
return {"message": "Dataset deleted"}
|
| 104 |
+
|
| 105 |
+
|
| 106 |
+
@router.get(
|
| 107 |
+
"/{dataset_id}/list_runs/",
|
| 108 |
+
description="List all runs that used this dataset",
|
| 109 |
+
response_model=List[RunResponseSchema],
|
| 110 |
+
)
|
| 111 |
+
def list_dataset_runs(dataset_id: str, db: Session = Depends(get_db)):
|
| 112 |
+
dataset = db.query(DatasetModel).filter(DatasetModel.id == dataset_id).first()
|
| 113 |
+
if not dataset:
|
| 114 |
+
raise HTTPException(status_code=404, detail="Dataset not found")
|
| 115 |
+
runs = (
|
| 116 |
+
db.query(RunModel)
|
| 117 |
+
.filter(RunModel.input_dataset_id == dataset_id)
|
| 118 |
+
.order_by(RunModel.created_at.desc())
|
| 119 |
+
.all()
|
| 120 |
+
)
|
| 121 |
+
return runs
|
pyspur/backend/pyspur/api/evals_management.py
ADDED
|
@@ -0,0 +1,197 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from datetime import datetime, timezone
|
| 2 |
+
from pathlib import Path
|
| 3 |
+
from typing import Any, Dict, List
|
| 4 |
+
|
| 5 |
+
from fastapi import APIRouter, BackgroundTasks, Depends, HTTPException
|
| 6 |
+
from sqlalchemy.orm import Session
|
| 7 |
+
|
| 8 |
+
from ..database import get_db
|
| 9 |
+
from ..evals.evaluator import load_yaml_config, prepare_and_evaluate_dataset
|
| 10 |
+
from ..models.eval_run_model import EvalRunModel, EvalRunStatus
|
| 11 |
+
from ..models.workflow_model import WorkflowModel
|
| 12 |
+
from ..schemas.eval_schemas import (
|
| 13 |
+
EvalRunRequest,
|
| 14 |
+
EvalRunResponse,
|
| 15 |
+
EvalRunStatusEnum,
|
| 16 |
+
)
|
| 17 |
+
from ..schemas.workflow_schemas import WorkflowDefinitionSchema
|
| 18 |
+
from .workflow_management import get_workflow_output_variables
|
| 19 |
+
|
| 20 |
+
router = APIRouter()
|
| 21 |
+
|
| 22 |
+
EVALS_DIR = Path(__file__).parent.parent / "evals" / "tasks"
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
@router.get("/", description="List all available evals")
|
| 26 |
+
def list_evals() -> List[Dict[str, Any]]:
|
| 27 |
+
"""
|
| 28 |
+
List all available evals by scanning the tasks directory for YAML files.
|
| 29 |
+
"""
|
| 30 |
+
evals = []
|
| 31 |
+
if not EVALS_DIR.exists():
|
| 32 |
+
raise HTTPException(status_code=500, detail="Evals directory not found")
|
| 33 |
+
for eval_file in EVALS_DIR.glob("*.yaml"):
|
| 34 |
+
try:
|
| 35 |
+
eval_content = load_yaml_config(yaml_path=eval_file)
|
| 36 |
+
metadata = eval_content.get("metadata", {})
|
| 37 |
+
evals.append(
|
| 38 |
+
{
|
| 39 |
+
"name": metadata.get("name", eval_file.stem),
|
| 40 |
+
"description": metadata.get("description", ""),
|
| 41 |
+
"type": metadata.get("type", "Unknown"),
|
| 42 |
+
"num_samples": metadata.get("num_samples", "N/A"),
|
| 43 |
+
"paper_link": metadata.get("paper_link", ""),
|
| 44 |
+
"file_name": eval_file.name,
|
| 45 |
+
}
|
| 46 |
+
)
|
| 47 |
+
except Exception as e:
|
| 48 |
+
raise HTTPException(status_code=500, detail=f"Error parsing {eval_file.name}: {e}")
|
| 49 |
+
return evals
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
@router.post(
|
| 53 |
+
"/launch/",
|
| 54 |
+
response_model=EvalRunResponse,
|
| 55 |
+
description="Launch an eval job with detailed validation and workflow integration",
|
| 56 |
+
)
|
| 57 |
+
async def launch_eval(
|
| 58 |
+
request: EvalRunRequest,
|
| 59 |
+
background_tasks: BackgroundTasks,
|
| 60 |
+
db: Session = Depends(get_db),
|
| 61 |
+
) -> EvalRunResponse:
|
| 62 |
+
"""
|
| 63 |
+
Launch an eval job by triggering the evaluator with the specified eval configuration.
|
| 64 |
+
"""
|
| 65 |
+
# Validate workflow ID
|
| 66 |
+
workflow = db.query(WorkflowModel).filter(WorkflowModel.id == request.workflow_id).first()
|
| 67 |
+
if not workflow:
|
| 68 |
+
raise HTTPException(status_code=404, detail="Workflow not found")
|
| 69 |
+
|
| 70 |
+
workflow_definition = WorkflowDefinitionSchema.model_validate(workflow.definition)
|
| 71 |
+
|
| 72 |
+
eval_file = EVALS_DIR / f"{request.eval_name}.yaml"
|
| 73 |
+
if not eval_file.exists():
|
| 74 |
+
raise HTTPException(status_code=404, detail="Eval configuration not found")
|
| 75 |
+
|
| 76 |
+
try:
|
| 77 |
+
# Load the eval configuration
|
| 78 |
+
eval_config = load_yaml_config(eval_file)
|
| 79 |
+
|
| 80 |
+
# Validate the output variable
|
| 81 |
+
leaf_node_output_variables = get_workflow_output_variables(
|
| 82 |
+
workflow_id=request.workflow_id, db=db
|
| 83 |
+
)
|
| 84 |
+
|
| 85 |
+
print(f"Valid output variables: {leaf_node_output_variables}")
|
| 86 |
+
|
| 87 |
+
# Extract the list of valid prefixed variables
|
| 88 |
+
valid_prefixed_variables = [var["prefixed_variable"] for var in leaf_node_output_variables]
|
| 89 |
+
|
| 90 |
+
if request.output_variable not in valid_prefixed_variables:
|
| 91 |
+
raise HTTPException(
|
| 92 |
+
status_code=400,
|
| 93 |
+
detail=(
|
| 94 |
+
f"Invalid output variable '{request.output_variable}'. "
|
| 95 |
+
f"Must be one of: {leaf_node_output_variables}"
|
| 96 |
+
),
|
| 97 |
+
)
|
| 98 |
+
|
| 99 |
+
# Create a new EvalRunModel instance
|
| 100 |
+
new_eval_run = EvalRunModel(
|
| 101 |
+
eval_name=request.eval_name,
|
| 102 |
+
workflow_id=request.workflow_id,
|
| 103 |
+
output_variable=request.output_variable,
|
| 104 |
+
num_samples=request.num_samples,
|
| 105 |
+
status=EvalRunStatus.PENDING,
|
| 106 |
+
start_time=datetime.now(timezone.utc),
|
| 107 |
+
)
|
| 108 |
+
db.add(new_eval_run)
|
| 109 |
+
db.commit()
|
| 110 |
+
db.refresh(new_eval_run)
|
| 111 |
+
|
| 112 |
+
async def run_eval_task(eval_run_id: str):
|
| 113 |
+
with next(get_db()) as session:
|
| 114 |
+
eval_run = (
|
| 115 |
+
session.query(EvalRunModel).filter(EvalRunModel.id == eval_run_id).first()
|
| 116 |
+
)
|
| 117 |
+
if not eval_run:
|
| 118 |
+
session.close()
|
| 119 |
+
return
|
| 120 |
+
|
| 121 |
+
eval_run.status = EvalRunStatus.RUNNING
|
| 122 |
+
session.commit()
|
| 123 |
+
|
| 124 |
+
try:
|
| 125 |
+
# Run the evaluation asynchronously
|
| 126 |
+
results = await prepare_and_evaluate_dataset(
|
| 127 |
+
eval_config,
|
| 128 |
+
workflow_definition=workflow_definition,
|
| 129 |
+
num_samples=eval_run.num_samples,
|
| 130 |
+
output_variable=eval_run.output_variable,
|
| 131 |
+
)
|
| 132 |
+
eval_run.results = results
|
| 133 |
+
eval_run.status = EvalRunStatus.COMPLETED
|
| 134 |
+
eval_run.end_time = datetime.now(timezone.utc)
|
| 135 |
+
except Exception as e:
|
| 136 |
+
eval_run.status = EvalRunStatus.FAILED
|
| 137 |
+
eval_run.end_time = datetime.now(timezone.utc)
|
| 138 |
+
session.commit()
|
| 139 |
+
raise e
|
| 140 |
+
finally:
|
| 141 |
+
session.commit()
|
| 142 |
+
|
| 143 |
+
background_tasks.add_task(run_eval_task, new_eval_run.id)
|
| 144 |
+
|
| 145 |
+
# Return all required parameters
|
| 146 |
+
return EvalRunResponse(
|
| 147 |
+
run_id=new_eval_run.id,
|
| 148 |
+
eval_name=new_eval_run.eval_name,
|
| 149 |
+
workflow_id=new_eval_run.workflow_id,
|
| 150 |
+
status=EvalRunStatusEnum(new_eval_run.status.value),
|
| 151 |
+
start_time=new_eval_run.start_time,
|
| 152 |
+
end_time=new_eval_run.end_time,
|
| 153 |
+
)
|
| 154 |
+
except Exception as e:
|
| 155 |
+
raise HTTPException(status_code=500, detail=f"Error launching eval: {e}")
|
| 156 |
+
|
| 157 |
+
|
| 158 |
+
@router.get(
|
| 159 |
+
"/runs/{eval_run_id}",
|
| 160 |
+
response_model=EvalRunResponse,
|
| 161 |
+
description="Get the status of an eval run",
|
| 162 |
+
)
|
| 163 |
+
async def get_eval_run_status(eval_run_id: str, db: Session = Depends(get_db)) -> EvalRunResponse:
|
| 164 |
+
eval_run = db.query(EvalRunModel).filter(EvalRunModel.id == eval_run_id).first()
|
| 165 |
+
if not eval_run:
|
| 166 |
+
raise HTTPException(status_code=404, detail="Eval run not found")
|
| 167 |
+
return EvalRunResponse(
|
| 168 |
+
run_id=eval_run.id,
|
| 169 |
+
eval_name=eval_run.eval_name,
|
| 170 |
+
workflow_id=eval_run.workflow_id,
|
| 171 |
+
status=EvalRunStatusEnum(eval_run.status.value),
|
| 172 |
+
start_time=eval_run.start_time,
|
| 173 |
+
end_time=eval_run.end_time,
|
| 174 |
+
results=eval_run.results,
|
| 175 |
+
)
|
| 176 |
+
|
| 177 |
+
|
| 178 |
+
@router.get(
|
| 179 |
+
"/runs/",
|
| 180 |
+
response_model=List[EvalRunResponse],
|
| 181 |
+
description="List all eval runs",
|
| 182 |
+
)
|
| 183 |
+
async def list_eval_runs(
|
| 184 |
+
db: Session = Depends(get_db),
|
| 185 |
+
) -> List[EvalRunResponse]:
|
| 186 |
+
eval_runs = db.query(EvalRunModel).order_by(EvalRunModel.start_time.desc()).all()
|
| 187 |
+
return [
|
| 188 |
+
EvalRunResponse(
|
| 189 |
+
run_id=eval_run.id,
|
| 190 |
+
eval_name=eval_run.eval_name,
|
| 191 |
+
workflow_id=eval_run.workflow_id,
|
| 192 |
+
status=EvalRunStatusEnum(eval_run.status.value),
|
| 193 |
+
start_time=eval_run.start_time,
|
| 194 |
+
end_time=eval_run.end_time,
|
| 195 |
+
)
|
| 196 |
+
for eval_run in eval_runs
|
| 197 |
+
]
|
pyspur/backend/pyspur/api/file_management.py
ADDED
|
@@ -0,0 +1,144 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import shutil
|
| 3 |
+
from datetime import datetime, timezone
|
| 4 |
+
from pathlib import Path
|
| 5 |
+
from typing import List
|
| 6 |
+
|
| 7 |
+
from fastapi import APIRouter, HTTPException
|
| 8 |
+
from fastapi.responses import FileResponse
|
| 9 |
+
|
| 10 |
+
from ..schemas.file_schemas import FileResponseSchema
|
| 11 |
+
|
| 12 |
+
router = APIRouter()
|
| 13 |
+
|
| 14 |
+
# Define base data directory
|
| 15 |
+
DATA_DIR = Path("data")
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
@router.get(
|
| 19 |
+
"/{workflow_id}",
|
| 20 |
+
response_model=List[FileResponseSchema],
|
| 21 |
+
description="List all files for a specific workflow",
|
| 22 |
+
)
|
| 23 |
+
async def list_workflow_files(workflow_id: str) -> List[FileResponseSchema]:
|
| 24 |
+
"""
|
| 25 |
+
List all files in the workflow's directory.
|
| 26 |
+
Returns a list of dictionaries containing file information.
|
| 27 |
+
"""
|
| 28 |
+
workflow_dir = DATA_DIR / "run_files" / workflow_id
|
| 29 |
+
|
| 30 |
+
if not workflow_dir.exists():
|
| 31 |
+
return []
|
| 32 |
+
|
| 33 |
+
files: List[FileResponseSchema] = []
|
| 34 |
+
for file_path in workflow_dir.glob("*"):
|
| 35 |
+
if file_path.is_file():
|
| 36 |
+
files.append(
|
| 37 |
+
FileResponseSchema(
|
| 38 |
+
name=file_path.name,
|
| 39 |
+
path=str(file_path.relative_to(DATA_DIR)),
|
| 40 |
+
size=os.path.getsize(file_path),
|
| 41 |
+
created=datetime.fromtimestamp(os.path.getctime(file_path), tz=timezone.utc),
|
| 42 |
+
workflow_id=workflow_id,
|
| 43 |
+
)
|
| 44 |
+
)
|
| 45 |
+
|
| 46 |
+
return files
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
@router.get(
|
| 50 |
+
"/",
|
| 51 |
+
response_model=List[FileResponseSchema],
|
| 52 |
+
description="List all files across all workflows",
|
| 53 |
+
)
|
| 54 |
+
async def list_all_files() -> List[FileResponseSchema]:
|
| 55 |
+
"""
|
| 56 |
+
List all files in the data directory across all workflows.
|
| 57 |
+
Returns a list of dictionaries containing file information.
|
| 58 |
+
"""
|
| 59 |
+
test_files_dir = DATA_DIR / "run_files"
|
| 60 |
+
|
| 61 |
+
if not test_files_dir.exists():
|
| 62 |
+
return []
|
| 63 |
+
|
| 64 |
+
files: List[FileResponseSchema] = []
|
| 65 |
+
for workflow_dir in test_files_dir.glob("*"):
|
| 66 |
+
if workflow_dir.is_dir():
|
| 67 |
+
workflow_id = workflow_dir.name
|
| 68 |
+
for file_path in workflow_dir.glob("*"):
|
| 69 |
+
if file_path.is_file():
|
| 70 |
+
files.append(
|
| 71 |
+
FileResponseSchema(
|
| 72 |
+
name=file_path.name,
|
| 73 |
+
workflow_id=workflow_id,
|
| 74 |
+
path=str(file_path.relative_to(DATA_DIR)),
|
| 75 |
+
size=os.path.getsize(file_path),
|
| 76 |
+
created=datetime.fromtimestamp(
|
| 77 |
+
os.path.getctime(file_path), tz=timezone.utc
|
| 78 |
+
),
|
| 79 |
+
)
|
| 80 |
+
)
|
| 81 |
+
|
| 82 |
+
return files
|
| 83 |
+
|
| 84 |
+
|
| 85 |
+
@router.delete("/{workflow_id}/{filename}", description="Delete a specific file")
|
| 86 |
+
async def delete_file(workflow_id: str, filename: str):
|
| 87 |
+
"""
|
| 88 |
+
Delete a specific file from a workflow's directory.
|
| 89 |
+
"""
|
| 90 |
+
file_path = DATA_DIR / "run_files" / workflow_id / filename
|
| 91 |
+
|
| 92 |
+
if not file_path.exists():
|
| 93 |
+
raise HTTPException(status_code=404, detail="File not found")
|
| 94 |
+
|
| 95 |
+
try:
|
| 96 |
+
os.remove(file_path)
|
| 97 |
+
return {"message": "File deleted successfully"}
|
| 98 |
+
except Exception as e:
|
| 99 |
+
raise HTTPException(status_code=500, detail=f"Error deleting file: {str(e)}")
|
| 100 |
+
|
| 101 |
+
|
| 102 |
+
@router.delete("/{workflow_id}", description="Delete all files for a workflow")
|
| 103 |
+
async def delete_workflow_files(workflow_id: str):
|
| 104 |
+
"""
|
| 105 |
+
Delete all files in a workflow's directory.
|
| 106 |
+
"""
|
| 107 |
+
workflow_dir = DATA_DIR / "run_files" / workflow_id
|
| 108 |
+
|
| 109 |
+
if not workflow_dir.exists():
|
| 110 |
+
raise HTTPException(status_code=404, detail="Workflow directory not found")
|
| 111 |
+
|
| 112 |
+
try:
|
| 113 |
+
shutil.rmtree(workflow_dir)
|
| 114 |
+
return {"message": "All workflow files deleted successfully"}
|
| 115 |
+
except Exception as e:
|
| 116 |
+
raise HTTPException(status_code=500, detail=f"Error deleting workflow files: {str(e)}")
|
| 117 |
+
|
| 118 |
+
|
| 119 |
+
@router.get(
|
| 120 |
+
"/{file_path:path}",
|
| 121 |
+
description="Get a specific file",
|
| 122 |
+
response_class=FileResponse,
|
| 123 |
+
)
|
| 124 |
+
async def get_file(file_path: str):
|
| 125 |
+
"""
|
| 126 |
+
Get a specific file from the data directory.
|
| 127 |
+
Validates file path to prevent path traversal attacks.
|
| 128 |
+
"""
|
| 129 |
+
# Validate that file_path doesn't contain path traversal patterns
|
| 130 |
+
if ".." in file_path or "~" in file_path:
|
| 131 |
+
raise HTTPException(status_code=400, detail="Invalid file path")
|
| 132 |
+
|
| 133 |
+
# Resolve the full path and ensure it's within DATA_DIR
|
| 134 |
+
try:
|
| 135 |
+
full_path = (DATA_DIR / file_path).resolve()
|
| 136 |
+
if not str(full_path).startswith(str(DATA_DIR.resolve())):
|
| 137 |
+
raise HTTPException(status_code=403, detail="Access denied")
|
| 138 |
+
except Exception:
|
| 139 |
+
raise HTTPException(status_code=400, detail="Invalid file path")
|
| 140 |
+
|
| 141 |
+
if not full_path.exists():
|
| 142 |
+
raise HTTPException(status_code=404, detail="File not found")
|
| 143 |
+
|
| 144 |
+
return FileResponse(str(full_path))
|
pyspur/backend/pyspur/api/key_management.py
ADDED
|
@@ -0,0 +1,477 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
from typing import Dict, List, Optional
|
| 3 |
+
|
| 4 |
+
from dotenv import dotenv_values, load_dotenv, set_key, unset_key
|
| 5 |
+
from fastapi import APIRouter, HTTPException
|
| 6 |
+
from pydantic import BaseModel
|
| 7 |
+
|
| 8 |
+
from ..rag.datastore.factory import VectorStoreConfig, get_vector_stores
|
| 9 |
+
from ..rag.embedder import EmbeddingModelConfig, EmbeddingModels
|
| 10 |
+
|
| 11 |
+
# Load existing environment variables from the .env file
|
| 12 |
+
load_dotenv(".env")
|
| 13 |
+
|
| 14 |
+
router = APIRouter()
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
class ProviderParameter(BaseModel):
|
| 18 |
+
name: str
|
| 19 |
+
description: str
|
| 20 |
+
required: bool = True
|
| 21 |
+
type: str = "password" # password, text, select
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
class ProviderConfig(BaseModel):
|
| 25 |
+
id: str
|
| 26 |
+
name: str
|
| 27 |
+
description: str
|
| 28 |
+
category: str # 'llm', 'embedding', 'vectorstore'
|
| 29 |
+
parameters: List[ProviderParameter]
|
| 30 |
+
icon: str = "database" # Default icon for vector stores
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
PROVIDER_CONFIGS = [
|
| 34 |
+
# LLM Providers
|
| 35 |
+
ProviderConfig(
|
| 36 |
+
id="openai",
|
| 37 |
+
name="OpenAI",
|
| 38 |
+
description="OpenAI's GPT models",
|
| 39 |
+
category="llm",
|
| 40 |
+
icon="openai",
|
| 41 |
+
parameters=[
|
| 42 |
+
ProviderParameter(name="OPENAI_API_KEY", description="OpenAI API Key"),
|
| 43 |
+
],
|
| 44 |
+
),
|
| 45 |
+
ProviderConfig(
|
| 46 |
+
id="azure-openai",
|
| 47 |
+
name="Azure OpenAI",
|
| 48 |
+
description="Azure-hosted OpenAI models",
|
| 49 |
+
category="llm",
|
| 50 |
+
icon="azure",
|
| 51 |
+
parameters=[
|
| 52 |
+
ProviderParameter(name="AZURE_OPENAI_API_KEY", description="Azure OpenAI API Key"),
|
| 53 |
+
ProviderParameter(
|
| 54 |
+
name="AZURE_OPENAI_ENDPOINT",
|
| 55 |
+
description="Azure OpenAI Endpoint URL",
|
| 56 |
+
type="text",
|
| 57 |
+
),
|
| 58 |
+
ProviderParameter(
|
| 59 |
+
name="AZURE_OPENAI_API_VERSION",
|
| 60 |
+
description="API Version (e.g. 2023-05-15)",
|
| 61 |
+
type="text",
|
| 62 |
+
),
|
| 63 |
+
],
|
| 64 |
+
),
|
| 65 |
+
ProviderConfig(
|
| 66 |
+
id="anthropic",
|
| 67 |
+
name="Anthropic",
|
| 68 |
+
description="Anthropic's Claude models",
|
| 69 |
+
category="llm",
|
| 70 |
+
icon="anthropic",
|
| 71 |
+
parameters=[
|
| 72 |
+
ProviderParameter(name="ANTHROPIC_API_KEY", description="Anthropic API Key"),
|
| 73 |
+
],
|
| 74 |
+
),
|
| 75 |
+
ProviderConfig(
|
| 76 |
+
id="gemini",
|
| 77 |
+
name="Google Gemini",
|
| 78 |
+
description="Google's Gemini models",
|
| 79 |
+
category="llm",
|
| 80 |
+
icon="google",
|
| 81 |
+
parameters=[
|
| 82 |
+
ProviderParameter(name="GEMINI_API_KEY", description="Google AI API Key"),
|
| 83 |
+
],
|
| 84 |
+
),
|
| 85 |
+
ProviderConfig(
|
| 86 |
+
id="deepseek",
|
| 87 |
+
name="DeepSeek",
|
| 88 |
+
description="DeepSeek's code and chat models",
|
| 89 |
+
category="llm",
|
| 90 |
+
icon="deepseek",
|
| 91 |
+
parameters=[
|
| 92 |
+
ProviderParameter(name="DEEPSEEK_API_KEY", description="DeepSeek API Key"),
|
| 93 |
+
],
|
| 94 |
+
),
|
| 95 |
+
ProviderConfig(
|
| 96 |
+
id="cohere",
|
| 97 |
+
name="Cohere",
|
| 98 |
+
description="Cohere's language models",
|
| 99 |
+
category="llm",
|
| 100 |
+
icon="cohere",
|
| 101 |
+
parameters=[
|
| 102 |
+
ProviderParameter(name="COHERE_API_KEY", description="Cohere API Key"),
|
| 103 |
+
],
|
| 104 |
+
),
|
| 105 |
+
ProviderConfig(
|
| 106 |
+
id="voyage",
|
| 107 |
+
name="Voyage AI",
|
| 108 |
+
description="Voyage's language models",
|
| 109 |
+
category="llm",
|
| 110 |
+
icon="voyage",
|
| 111 |
+
parameters=[
|
| 112 |
+
ProviderParameter(name="VOYAGE_API_KEY", description="Voyage AI API Key"),
|
| 113 |
+
],
|
| 114 |
+
),
|
| 115 |
+
ProviderConfig(
|
| 116 |
+
id="mistral",
|
| 117 |
+
name="Mistral AI",
|
| 118 |
+
description="Mistral's language models",
|
| 119 |
+
category="llm",
|
| 120 |
+
icon="mistral",
|
| 121 |
+
parameters=[
|
| 122 |
+
ProviderParameter(name="MISTRAL_API_KEY", description="Mistral AI API Key"),
|
| 123 |
+
],
|
| 124 |
+
),
|
| 125 |
+
# Vector Store Providers
|
| 126 |
+
ProviderConfig(
|
| 127 |
+
id="pinecone",
|
| 128 |
+
name="Pinecone",
|
| 129 |
+
description="Production-ready vector database",
|
| 130 |
+
category="vectorstore",
|
| 131 |
+
icon="pinecone",
|
| 132 |
+
parameters=[
|
| 133 |
+
ProviderParameter(name="PINECONE_API_KEY", description="Pinecone API Key"),
|
| 134 |
+
ProviderParameter(
|
| 135 |
+
name="PINECONE_ENVIRONMENT",
|
| 136 |
+
description="Pinecone Environment",
|
| 137 |
+
type="text",
|
| 138 |
+
),
|
| 139 |
+
ProviderParameter(
|
| 140 |
+
name="PINECONE_INDEX",
|
| 141 |
+
description="Pinecone Index Name",
|
| 142 |
+
type="text",
|
| 143 |
+
),
|
| 144 |
+
],
|
| 145 |
+
),
|
| 146 |
+
ProviderConfig(
|
| 147 |
+
id="weaviate",
|
| 148 |
+
name="Weaviate",
|
| 149 |
+
description="Multi-modal vector search engine",
|
| 150 |
+
category="vectorstore",
|
| 151 |
+
icon="weaviate",
|
| 152 |
+
parameters=[
|
| 153 |
+
ProviderParameter(name="WEAVIATE_API_KEY", description="Weaviate API Key"),
|
| 154 |
+
ProviderParameter(
|
| 155 |
+
name="WEAVIATE_URL",
|
| 156 |
+
description="Weaviate Instance URL",
|
| 157 |
+
type="text",
|
| 158 |
+
),
|
| 159 |
+
],
|
| 160 |
+
),
|
| 161 |
+
ProviderConfig(
|
| 162 |
+
id="qdrant",
|
| 163 |
+
name="Qdrant",
|
| 164 |
+
description="Vector database for production",
|
| 165 |
+
category="vectorstore",
|
| 166 |
+
icon="qdrant",
|
| 167 |
+
parameters=[
|
| 168 |
+
ProviderParameter(name="QDRANT_API_KEY", description="Qdrant API Key"),
|
| 169 |
+
ProviderParameter(
|
| 170 |
+
name="QDRANT_URL",
|
| 171 |
+
description="Qdrant Instance URL",
|
| 172 |
+
type="text",
|
| 173 |
+
),
|
| 174 |
+
],
|
| 175 |
+
),
|
| 176 |
+
ProviderConfig(
|
| 177 |
+
id="chroma",
|
| 178 |
+
name="Chroma",
|
| 179 |
+
description="Open-source embedding database",
|
| 180 |
+
category="vectorstore",
|
| 181 |
+
icon="chroma",
|
| 182 |
+
parameters=[
|
| 183 |
+
ProviderParameter(
|
| 184 |
+
name="CHROMA_IN_MEMORY",
|
| 185 |
+
description="Run Chroma in memory",
|
| 186 |
+
type="text",
|
| 187 |
+
),
|
| 188 |
+
ProviderParameter(
|
| 189 |
+
name="CHROMA_PERSISTENCE_DIR",
|
| 190 |
+
description="Directory for Chroma persistence",
|
| 191 |
+
type="text",
|
| 192 |
+
),
|
| 193 |
+
ProviderParameter(
|
| 194 |
+
name="CHROMA_HOST",
|
| 195 |
+
description="Chroma server host",
|
| 196 |
+
type="text",
|
| 197 |
+
),
|
| 198 |
+
ProviderParameter(
|
| 199 |
+
name="CHROMA_PORT",
|
| 200 |
+
description="Chroma server port",
|
| 201 |
+
type="text",
|
| 202 |
+
),
|
| 203 |
+
ProviderParameter(
|
| 204 |
+
name="CHROMA_COLLECTION",
|
| 205 |
+
description="Chroma collection name",
|
| 206 |
+
type="text",
|
| 207 |
+
),
|
| 208 |
+
],
|
| 209 |
+
),
|
| 210 |
+
ProviderConfig(
|
| 211 |
+
id="supabase",
|
| 212 |
+
name="Supabase",
|
| 213 |
+
description="Open-source vector database",
|
| 214 |
+
category="vectorstore",
|
| 215 |
+
icon="supabase",
|
| 216 |
+
parameters=[
|
| 217 |
+
ProviderParameter(
|
| 218 |
+
name="SUPABASE_URL",
|
| 219 |
+
description="Supabase Project URL",
|
| 220 |
+
type="text",
|
| 221 |
+
),
|
| 222 |
+
ProviderParameter(
|
| 223 |
+
name="SUPABASE_ANON_KEY",
|
| 224 |
+
description="Supabase Anonymous Key",
|
| 225 |
+
type="password",
|
| 226 |
+
required=False,
|
| 227 |
+
),
|
| 228 |
+
ProviderParameter(
|
| 229 |
+
name="SUPABASE_SERVICE_ROLE_KEY",
|
| 230 |
+
description="Supabase Service Role Key",
|
| 231 |
+
type="password",
|
| 232 |
+
required=False,
|
| 233 |
+
),
|
| 234 |
+
],
|
| 235 |
+
),
|
| 236 |
+
# Add Reddit Provider
|
| 237 |
+
ProviderConfig(
|
| 238 |
+
id="reddit",
|
| 239 |
+
name="Reddit",
|
| 240 |
+
description="Reddit API integration",
|
| 241 |
+
category="social",
|
| 242 |
+
icon="logos:reddit-icon",
|
| 243 |
+
parameters=[
|
| 244 |
+
ProviderParameter(name="REDDIT_CLIENT_ID", description="Reddit API Client ID"),
|
| 245 |
+
ProviderParameter(name="REDDIT_CLIENT_SECRET", description="Reddit API Client Secret"),
|
| 246 |
+
ProviderParameter(
|
| 247 |
+
name="REDDIT_USERNAME", description="Reddit Username", type="text", required=False
|
| 248 |
+
),
|
| 249 |
+
ProviderParameter(
|
| 250 |
+
name="REDDIT_PASSWORD",
|
| 251 |
+
description="Reddit Password",
|
| 252 |
+
type="password",
|
| 253 |
+
required=False,
|
| 254 |
+
),
|
| 255 |
+
ProviderParameter(
|
| 256 |
+
name="REDDIT_USER_AGENT",
|
| 257 |
+
description="Reddit API User Agent",
|
| 258 |
+
type="text",
|
| 259 |
+
required=False,
|
| 260 |
+
),
|
| 261 |
+
],
|
| 262 |
+
),
|
| 263 |
+
# Add Firecrawl Provider
|
| 264 |
+
ProviderConfig(
|
| 265 |
+
id="firecrawl",
|
| 266 |
+
name="Firecrawl",
|
| 267 |
+
description="Web scraping and crawling service",
|
| 268 |
+
category="scraping",
|
| 269 |
+
icon="solar:spider-bold",
|
| 270 |
+
parameters=[
|
| 271 |
+
ProviderParameter(name="FIRECRAWL_API_KEY", description="Firecrawl API Key"),
|
| 272 |
+
],
|
| 273 |
+
),
|
| 274 |
+
# Add Slack Provider
|
| 275 |
+
ProviderConfig(
|
| 276 |
+
id="slack",
|
| 277 |
+
name="Slack",
|
| 278 |
+
description="Slack messaging and notification service",
|
| 279 |
+
category="messaging",
|
| 280 |
+
icon="logos:slack-icon",
|
| 281 |
+
parameters=[
|
| 282 |
+
ProviderParameter(
|
| 283 |
+
name="SLACK_BOT_TOKEN", description="Slack Bot User OAuth Token (starts with xoxb-)"
|
| 284 |
+
),
|
| 285 |
+
ProviderParameter(
|
| 286 |
+
name="SLACK_USER_TOKEN",
|
| 287 |
+
description="Slack User OAuth Token (starts with xoxp-)",
|
| 288 |
+
required=False,
|
| 289 |
+
),
|
| 290 |
+
],
|
| 291 |
+
),
|
| 292 |
+
# Add Exa Provider
|
| 293 |
+
ProviderConfig(
|
| 294 |
+
id="exa",
|
| 295 |
+
name="Exa",
|
| 296 |
+
description="Exa web search API",
|
| 297 |
+
category="search",
|
| 298 |
+
icon="solar:search-bold",
|
| 299 |
+
parameters=[
|
| 300 |
+
ProviderParameter(name="EXA_API_KEY", description="Exa API Key"),
|
| 301 |
+
],
|
| 302 |
+
),
|
| 303 |
+
]
|
| 304 |
+
|
| 305 |
+
# For backward compatibility, create a flat list of all parameter names
|
| 306 |
+
MODEL_PROVIDER_KEYS = [
|
| 307 |
+
{"name": param.name, "value": ""} for config in PROVIDER_CONFIGS for param in config.parameters
|
| 308 |
+
]
|
| 309 |
+
|
| 310 |
+
|
| 311 |
+
class APIKey(BaseModel):
|
| 312 |
+
name: str
|
| 313 |
+
value: Optional[str] = None
|
| 314 |
+
|
| 315 |
+
|
| 316 |
+
def get_all_env_variables() -> Dict[str, str | None]:
|
| 317 |
+
return dotenv_values(".env")
|
| 318 |
+
|
| 319 |
+
|
| 320 |
+
def get_env_variable(name: str) -> Optional[str]:
|
| 321 |
+
return os.getenv(name)
|
| 322 |
+
|
| 323 |
+
|
| 324 |
+
def set_env_variable(name: str, value: str):
|
| 325 |
+
"""Set an environment variable both in the .env file and in the current process.
|
| 326 |
+
|
| 327 |
+
Also ensures the value is properly quoted if it contains special characters.
|
| 328 |
+
"""
|
| 329 |
+
# Ensure the value is properly quoted if it contains spaces or special characters
|
| 330 |
+
if any(c in value for c in " '\"$&()|<>"):
|
| 331 |
+
value = f'"{value}"'
|
| 332 |
+
|
| 333 |
+
# Update the .env file using set_key
|
| 334 |
+
set_key(".env", name, value)
|
| 335 |
+
|
| 336 |
+
# Update the os.environ dictionary
|
| 337 |
+
os.environ[name] = value
|
| 338 |
+
|
| 339 |
+
# Force reload of environment variables
|
| 340 |
+
load_dotenv(".env", override=True)
|
| 341 |
+
|
| 342 |
+
|
| 343 |
+
def delete_env_variable(name: str):
|
| 344 |
+
# Remove the key from the .env file
|
| 345 |
+
unset_key(".env", name)
|
| 346 |
+
# Remove the key from os.environ
|
| 347 |
+
os.environ.pop(name, None)
|
| 348 |
+
|
| 349 |
+
|
| 350 |
+
def mask_key_value(value: str, param_type: str = "password") -> str:
|
| 351 |
+
"""Mask the key value based on the parameter type.
|
| 352 |
+
|
| 353 |
+
For password types, shows only the first and last few characters.
|
| 354 |
+
For other types, shows the full value.
|
| 355 |
+
"""
|
| 356 |
+
if param_type != "password":
|
| 357 |
+
return value
|
| 358 |
+
|
| 359 |
+
visible_chars = 4 # Number of characters to show at the start and end
|
| 360 |
+
min_masked_chars = 4 # Minimum number of masked characters
|
| 361 |
+
if len(value) <= visible_chars * 2 + min_masked_chars:
|
| 362 |
+
return "*" * len(value)
|
| 363 |
+
else:
|
| 364 |
+
return (
|
| 365 |
+
value[:visible_chars] + "*" * (len(value) - visible_chars * 2) + value[-visible_chars:]
|
| 366 |
+
)
|
| 367 |
+
|
| 368 |
+
|
| 369 |
+
@router.get("/providers", description="Get all provider configurations")
|
| 370 |
+
async def get_providers():
|
| 371 |
+
"""Return all provider configurations."""
|
| 372 |
+
return PROVIDER_CONFIGS
|
| 373 |
+
|
| 374 |
+
|
| 375 |
+
@router.get("/", description="Get a list of all environment variable names")
|
| 376 |
+
async def list_api_keys():
|
| 377 |
+
"""Return a list of all model provider keys."""
|
| 378 |
+
return [k["name"] for k in MODEL_PROVIDER_KEYS]
|
| 379 |
+
|
| 380 |
+
|
| 381 |
+
@router.get(
|
| 382 |
+
"/{name}",
|
| 383 |
+
description="Get the masked value of a specific environment variable",
|
| 384 |
+
)
|
| 385 |
+
async def get_api_key(name: str):
|
| 386 |
+
"""Return the masked value of the specified environment variable.
|
| 387 |
+
|
| 388 |
+
Requires authentication.
|
| 389 |
+
"""
|
| 390 |
+
# Find the parameter configuration
|
| 391 |
+
param_type = "password"
|
| 392 |
+
for config in PROVIDER_CONFIGS:
|
| 393 |
+
for param in config.parameters:
|
| 394 |
+
if param.name == name:
|
| 395 |
+
param_type = param.type
|
| 396 |
+
break
|
| 397 |
+
|
| 398 |
+
if name not in [k["name"] for k in MODEL_PROVIDER_KEYS]:
|
| 399 |
+
raise HTTPException(status_code=404, detail="Key not found")
|
| 400 |
+
value = get_env_variable(name)
|
| 401 |
+
if value is None:
|
| 402 |
+
value = ""
|
| 403 |
+
masked_value = mask_key_value(value, param_type)
|
| 404 |
+
return APIKey(name=name, value=masked_value)
|
| 405 |
+
|
| 406 |
+
|
| 407 |
+
@router.post("/", description="Add or update an environment variable")
|
| 408 |
+
async def set_api_key(api_key: APIKey):
|
| 409 |
+
"""Add a new environment variable or updates an existing one.
|
| 410 |
+
|
| 411 |
+
Requires authentication.
|
| 412 |
+
"""
|
| 413 |
+
if api_key.name not in [k["name"] for k in MODEL_PROVIDER_KEYS]:
|
| 414 |
+
raise HTTPException(status_code=404, detail="Key not found")
|
| 415 |
+
if not api_key.value:
|
| 416 |
+
raise HTTPException(status_code=400, detail="Value is required")
|
| 417 |
+
set_env_variable(api_key.name, api_key.value)
|
| 418 |
+
return {"message": f"Key '{api_key.name}' set successfully"}
|
| 419 |
+
|
| 420 |
+
|
| 421 |
+
@router.delete("/{name}", description="Delete an environment variable")
|
| 422 |
+
async def delete_api_key(name: str):
|
| 423 |
+
"""Delete the specified environment variable.
|
| 424 |
+
|
| 425 |
+
Requires authentication.
|
| 426 |
+
"""
|
| 427 |
+
if name not in [k["name"] for k in MODEL_PROVIDER_KEYS]:
|
| 428 |
+
raise HTTPException(status_code=404, detail="Key not found")
|
| 429 |
+
if get_env_variable(name) is None:
|
| 430 |
+
raise HTTPException(status_code=404, detail="Key not found")
|
| 431 |
+
delete_env_variable(name)
|
| 432 |
+
return {"message": f"Key '{name}' deleted successfully"}
|
| 433 |
+
|
| 434 |
+
|
| 435 |
+
@router.get("/embedding-models/", response_model=Dict[str, EmbeddingModelConfig])
|
| 436 |
+
async def get_embedding_models() -> Dict[str, EmbeddingModelConfig]:
|
| 437 |
+
"""Get all available embedding models and their configurations."""
|
| 438 |
+
try:
|
| 439 |
+
models: Dict[str, EmbeddingModelConfig] = {}
|
| 440 |
+
for model in EmbeddingModels:
|
| 441 |
+
model_info = EmbeddingModels.get_model_info(model.value)
|
| 442 |
+
if model_info:
|
| 443 |
+
# Find the corresponding provider config
|
| 444 |
+
provider_config = next(
|
| 445 |
+
(p for p in PROVIDER_CONFIGS if p.id == model_info.provider.value.lower()),
|
| 446 |
+
None,
|
| 447 |
+
)
|
| 448 |
+
if provider_config:
|
| 449 |
+
# Add required environment variables from the provider config
|
| 450 |
+
model_info.required_env_vars = [
|
| 451 |
+
p.name for p in provider_config.parameters if p.required
|
| 452 |
+
]
|
| 453 |
+
models[model.value] = model_info
|
| 454 |
+
return models
|
| 455 |
+
except Exception as e:
|
| 456 |
+
raise HTTPException(status_code=500, detail=str(e)) from e
|
| 457 |
+
|
| 458 |
+
|
| 459 |
+
@router.get("/vector-stores/", response_model=Dict[str, VectorStoreConfig])
|
| 460 |
+
async def get_vector_stores_endpoint() -> Dict[str, VectorStoreConfig]:
|
| 461 |
+
"""Get all available vector stores and their configurations."""
|
| 462 |
+
try:
|
| 463 |
+
stores = get_vector_stores()
|
| 464 |
+
# Add required environment variables from provider configs
|
| 465 |
+
for store_id, store in stores.items():
|
| 466 |
+
provider_config = next((p for p in PROVIDER_CONFIGS if p.id == store_id), None)
|
| 467 |
+
if provider_config:
|
| 468 |
+
store.required_env_vars = [p.name for p in provider_config.parameters if p.required]
|
| 469 |
+
return stores
|
| 470 |
+
except Exception as e:
|
| 471 |
+
raise HTTPException(status_code=500, detail=str(e)) from e
|
| 472 |
+
|
| 473 |
+
|
| 474 |
+
@router.get("/anon-data/", description="Get the status of anonymous telemetry data")
|
| 475 |
+
async def get_anon_data_status() -> bool:
|
| 476 |
+
"""Get the status of anonymous telemetry data."""
|
| 477 |
+
return os.getenv("DISABLE_ANONYMOUS_TELEMETRY", "false").lower() == "true"
|
pyspur/backend/pyspur/api/main.py
ADDED
|
@@ -0,0 +1,128 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import shutil
|
| 3 |
+
import tempfile
|
| 4 |
+
import threading
|
| 5 |
+
from contextlib import ExitStack, asynccontextmanager
|
| 6 |
+
from importlib.resources import as_file, files
|
| 7 |
+
from pathlib import Path
|
| 8 |
+
|
| 9 |
+
from dotenv import load_dotenv
|
| 10 |
+
from fastapi import FastAPI
|
| 11 |
+
from fastapi.middleware.cors import CORSMiddleware
|
| 12 |
+
from fastapi.responses import FileResponse
|
| 13 |
+
from fastapi.staticfiles import StaticFiles
|
| 14 |
+
from loguru import logger
|
| 15 |
+
|
| 16 |
+
from .api_app import api_app
|
| 17 |
+
|
| 18 |
+
load_dotenv()
|
| 19 |
+
|
| 20 |
+
# Create an ExitStack to manage resources
|
| 21 |
+
exit_stack = ExitStack()
|
| 22 |
+
temporary_static_dir = None
|
| 23 |
+
socket_manager = None
|
| 24 |
+
socket_thread = None
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
@asynccontextmanager
|
| 28 |
+
async def lifespan(app: FastAPI):
|
| 29 |
+
"""Manage application lifespan and cleanup."""
|
| 30 |
+
global temporary_static_dir, socket_manager, socket_thread
|
| 31 |
+
|
| 32 |
+
# Setup: Create temporary directory and extract static files
|
| 33 |
+
temporary_static_dir = Path(tempfile.mkdtemp())
|
| 34 |
+
|
| 35 |
+
# Extract static files to temporary directory
|
| 36 |
+
static_files = files("pyspur").joinpath("static")
|
| 37 |
+
static_dir = exit_stack.enter_context(as_file(static_files))
|
| 38 |
+
|
| 39 |
+
# Copy static files to temporary directory
|
| 40 |
+
if static_dir.exists():
|
| 41 |
+
shutil.copytree(static_dir, temporary_static_dir, dirs_exist_ok=True)
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
yield
|
| 45 |
+
|
| 46 |
+
# Cleanup: Stop socket manager and remove temporary directory
|
| 47 |
+
if socket_manager:
|
| 48 |
+
logger.info("Stopping socket manager...")
|
| 49 |
+
socket_manager.stopping = True
|
| 50 |
+
if socket_thread and socket_thread.is_alive():
|
| 51 |
+
try:
|
| 52 |
+
# Give the thread a chance to stop gracefully
|
| 53 |
+
socket_thread.join(timeout=5)
|
| 54 |
+
except Exception as e:
|
| 55 |
+
logger.error(f"Error stopping socket manager thread: {e}")
|
| 56 |
+
|
| 57 |
+
exit_stack.close()
|
| 58 |
+
shutil.rmtree(temporary_static_dir, ignore_errors=True)
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
app = FastAPI(lifespan=lifespan)
|
| 62 |
+
|
| 63 |
+
# Add CORS middleware
|
| 64 |
+
app.add_middleware(
|
| 65 |
+
CORSMiddleware,
|
| 66 |
+
allow_origins=["*"],
|
| 67 |
+
allow_credentials=True,
|
| 68 |
+
allow_methods=["*"],
|
| 69 |
+
allow_headers=["*"],
|
| 70 |
+
)
|
| 71 |
+
|
| 72 |
+
# Mount the API routes under /api
|
| 73 |
+
app.mount("/api", api_app, name="api")
|
| 74 |
+
|
| 75 |
+
# Optionally, mount directories for assets that you want served directly:
|
| 76 |
+
if temporary_static_dir and Path.joinpath(temporary_static_dir, "images").exists():
|
| 77 |
+
app.mount(
|
| 78 |
+
"/images",
|
| 79 |
+
StaticFiles(directory=str(temporary_static_dir.joinpath("images"))),
|
| 80 |
+
name="images",
|
| 81 |
+
)
|
| 82 |
+
if temporary_static_dir and Path.joinpath(temporary_static_dir, "_next").exists():
|
| 83 |
+
app.mount(
|
| 84 |
+
"/_next", StaticFiles(directory=str(temporary_static_dir.joinpath("_next"))), name="_next"
|
| 85 |
+
)
|
| 86 |
+
|
| 87 |
+
|
| 88 |
+
@app.get("/{full_path:path}", include_in_schema=False)
|
| 89 |
+
async def serve_frontend(full_path: str):
|
| 90 |
+
if not temporary_static_dir:
|
| 91 |
+
raise RuntimeError("Static directory not initialized")
|
| 92 |
+
|
| 93 |
+
# If the request is empty, serve index.html
|
| 94 |
+
if full_path == "":
|
| 95 |
+
return FileResponse(temporary_static_dir.joinpath("index.html"))
|
| 96 |
+
|
| 97 |
+
# remove trailing slash
|
| 98 |
+
if full_path[-1] == "/":
|
| 99 |
+
full_path = full_path[:-1]
|
| 100 |
+
|
| 101 |
+
# Build a candidate file path from the request.
|
| 102 |
+
candidate = temporary_static_dir.joinpath(full_path)
|
| 103 |
+
|
| 104 |
+
# If candidate is a directory, try its index.html.
|
| 105 |
+
if candidate.is_dir():
|
| 106 |
+
candidate_index = candidate.joinpath("index.html")
|
| 107 |
+
if candidate_index.exists():
|
| 108 |
+
return FileResponse(candidate_index)
|
| 109 |
+
|
| 110 |
+
# If no direct file, try appending ".html" (for files like dashboard.html)
|
| 111 |
+
candidate_html = temporary_static_dir.joinpath(full_path + ".html")
|
| 112 |
+
if candidate_html.exists():
|
| 113 |
+
return FileResponse(candidate_html)
|
| 114 |
+
|
| 115 |
+
# If a file exists at that candidate, serve it.
|
| 116 |
+
if candidate.exists():
|
| 117 |
+
return FileResponse(candidate)
|
| 118 |
+
|
| 119 |
+
# Check if the parent directory contains a file named "[id].html"
|
| 120 |
+
parts = full_path.split("/")
|
| 121 |
+
if len(parts) >= 2:
|
| 122 |
+
parent = temporary_static_dir.joinpath(*parts[:-1])
|
| 123 |
+
dynamic_file = parent.joinpath("[id].html")
|
| 124 |
+
if dynamic_file.exists():
|
| 125 |
+
return FileResponse(dynamic_file)
|
| 126 |
+
|
| 127 |
+
# Fallback: serve the main index.html for client‑side routing.
|
| 128 |
+
return FileResponse(temporary_static_dir.joinpath("index.html"))
|
pyspur/backend/pyspur/api/node_management.py
ADDED
|
@@ -0,0 +1,69 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Any, Dict, List
|
| 2 |
+
|
| 3 |
+
from fastapi import APIRouter
|
| 4 |
+
|
| 5 |
+
from ..nodes.factory import NodeFactory
|
| 6 |
+
from ..nodes.llm._utils import LLMModels
|
| 7 |
+
|
| 8 |
+
router = APIRouter()
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
@router.get(
|
| 12 |
+
"/supported_types/",
|
| 13 |
+
description="Get the schemas for all available node types",
|
| 14 |
+
)
|
| 15 |
+
async def get_node_types() -> Dict[str, List[Dict[str, Any]]]:
|
| 16 |
+
"""Return the schemas for all available node types."""
|
| 17 |
+
# get the schemas for each node class
|
| 18 |
+
node_groups = NodeFactory.get_all_node_types()
|
| 19 |
+
|
| 20 |
+
response: Dict[str, List[Dict[str, Any]]] = {}
|
| 21 |
+
for group_name, node_types in node_groups.items():
|
| 22 |
+
node_schemas: List[Dict[str, Any]] = []
|
| 23 |
+
for node_type in node_types:
|
| 24 |
+
node_class = node_type.node_class
|
| 25 |
+
try:
|
| 26 |
+
input_schema = node_class.input_model.model_json_schema()
|
| 27 |
+
except AttributeError:
|
| 28 |
+
input_schema = {}
|
| 29 |
+
try:
|
| 30 |
+
output_schema = node_class.output_model.model_json_schema()
|
| 31 |
+
except AttributeError:
|
| 32 |
+
output_schema = {}
|
| 33 |
+
|
| 34 |
+
# Get the config schema and update its title with the display name
|
| 35 |
+
config_schema = node_class.config_model.model_json_schema()
|
| 36 |
+
config_schema["title"] = node_type.display_name
|
| 37 |
+
has_fixed_output = node_class.config_model.model_fields["has_fixed_output"].default
|
| 38 |
+
|
| 39 |
+
node_schema: Dict[str, Any] = {
|
| 40 |
+
"name": node_type.node_type_name,
|
| 41 |
+
"input": input_schema,
|
| 42 |
+
"output": output_schema,
|
| 43 |
+
"config": config_schema,
|
| 44 |
+
"visual_tag": node_class.get_default_visual_tag().model_dump(),
|
| 45 |
+
"has_fixed_output": has_fixed_output,
|
| 46 |
+
}
|
| 47 |
+
|
| 48 |
+
# Add model constraints if this is an LLM node
|
| 49 |
+
if node_type.node_type_name in ["LLMNode", "SingleLLMCallNode"]:
|
| 50 |
+
model_constraints = {}
|
| 51 |
+
for model_enum in LLMModels:
|
| 52 |
+
model_info = LLMModels.get_model_info(model_enum.value)
|
| 53 |
+
if model_info:
|
| 54 |
+
model_constraints[model_enum.value] = model_info.constraints.model_dump()
|
| 55 |
+
node_schema["model_constraints"] = model_constraints
|
| 56 |
+
|
| 57 |
+
# Add the logo if available
|
| 58 |
+
logo = node_type.logo
|
| 59 |
+
if logo:
|
| 60 |
+
node_schema["logo"] = logo
|
| 61 |
+
|
| 62 |
+
category = node_type.category
|
| 63 |
+
if category:
|
| 64 |
+
node_schema["category"] = category
|
| 65 |
+
|
| 66 |
+
node_schemas.append(node_schema)
|
| 67 |
+
response[group_name] = node_schemas
|
| 68 |
+
|
| 69 |
+
return response
|
pyspur/backend/pyspur/api/openai_compatible_api.py
ADDED
|
@@ -0,0 +1,107 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from datetime import datetime, timezone
|
| 2 |
+
from typing import Any, Dict, List, Optional, Union
|
| 3 |
+
|
| 4 |
+
from fastapi import APIRouter, BackgroundTasks, Depends, HTTPException
|
| 5 |
+
from pydantic import BaseModel
|
| 6 |
+
from sqlalchemy.orm import Session
|
| 7 |
+
|
| 8 |
+
from ..database import get_db
|
| 9 |
+
from ..models.workflow_model import WorkflowModel
|
| 10 |
+
from ..schemas.run_schemas import StartRunRequestSchema
|
| 11 |
+
from .workflow_run import run_workflow_blocking
|
| 12 |
+
|
| 13 |
+
router = APIRouter()
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
# Define the request schema for OpenAI-compatible chat completions
|
| 17 |
+
class ChatCompletionRequest(BaseModel):
|
| 18 |
+
model: str
|
| 19 |
+
messages: List[Dict[str, Any]]
|
| 20 |
+
functions: Optional[List[Dict[str, Any]]] = None
|
| 21 |
+
function_call: Optional[Union[Dict[str, Any], str]] = None
|
| 22 |
+
temperature: float = 0.7
|
| 23 |
+
top_p: float = 0.9
|
| 24 |
+
n: int = 1
|
| 25 |
+
stream: bool = False
|
| 26 |
+
stop: Optional[Union[str, List[str]]] = None
|
| 27 |
+
max_tokens: Optional[int] = None
|
| 28 |
+
presence_penalty: float = 0.0
|
| 29 |
+
frequency_penalty: float = 0.0
|
| 30 |
+
logit_bias: Optional[Dict[str, float]] = None
|
| 31 |
+
user: Optional[str] = None
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
# Define the response schema for OpenAI-compatible chat completions
|
| 35 |
+
class ChatCompletionResponse(BaseModel):
|
| 36 |
+
id: str
|
| 37 |
+
object: str
|
| 38 |
+
created: int
|
| 39 |
+
model: str
|
| 40 |
+
choices: List[Dict[str, Any]]
|
| 41 |
+
usage: Dict[str, int]
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
@router.post(
|
| 45 |
+
"/v1/chat/completions",
|
| 46 |
+
response_model=ChatCompletionResponse,
|
| 47 |
+
description="OpenAI-compatible chat completions endpoint",
|
| 48 |
+
)
|
| 49 |
+
async def chat_completions(
|
| 50 |
+
request: ChatCompletionRequest,
|
| 51 |
+
background_tasks: BackgroundTasks,
|
| 52 |
+
db: Session = Depends(get_db),
|
| 53 |
+
) -> ChatCompletionResponse:
|
| 54 |
+
"""
|
| 55 |
+
Mimics OpenAI's /v1/chat/completions endpoint for chat-based workflows.
|
| 56 |
+
"""
|
| 57 |
+
# Fetch the workflow (model maps to workflow_id)
|
| 58 |
+
workflow = db.query(WorkflowModel).filter(WorkflowModel.id == request.model).first()
|
| 59 |
+
if not workflow:
|
| 60 |
+
raise HTTPException(status_code=404, detail="Workflow not found")
|
| 61 |
+
|
| 62 |
+
# Get the latest user message
|
| 63 |
+
latest_user_message = next(
|
| 64 |
+
(message["content"] for message in reversed(request.messages) if message["role"] == "user"),
|
| 65 |
+
None,
|
| 66 |
+
)
|
| 67 |
+
if not latest_user_message:
|
| 68 |
+
raise HTTPException(status_code=400, detail="No user message found in messages")
|
| 69 |
+
|
| 70 |
+
# Prepare initial inputs with the latest user message
|
| 71 |
+
initial_inputs = {"message": {"value": latest_user_message}}
|
| 72 |
+
|
| 73 |
+
# Start a blocking workflow run with the initial inputs
|
| 74 |
+
start_run_request = StartRunRequestSchema(
|
| 75 |
+
initial_inputs=initial_inputs,
|
| 76 |
+
parent_run_id=None,
|
| 77 |
+
)
|
| 78 |
+
outputs = await run_workflow_blocking(
|
| 79 |
+
workflow_id=request.model,
|
| 80 |
+
request=start_run_request,
|
| 81 |
+
db=db,
|
| 82 |
+
run_type="openai",
|
| 83 |
+
)
|
| 84 |
+
|
| 85 |
+
# Format the response with outputs from the workflow
|
| 86 |
+
response = ChatCompletionResponse(
|
| 87 |
+
id=f"chatcmpl-{datetime.now(timezone.utc).timestamp()}",
|
| 88 |
+
object="chat.completion",
|
| 89 |
+
created=int(datetime.now(timezone.utc).timestamp()),
|
| 90 |
+
model=request.model,
|
| 91 |
+
choices=[
|
| 92 |
+
{
|
| 93 |
+
"message": {
|
| 94 |
+
"role": "assistant",
|
| 95 |
+
"content": outputs.get("response", {}).get("value", ""),
|
| 96 |
+
},
|
| 97 |
+
"index": 0,
|
| 98 |
+
"finish_reason": outputs.get("finish_reason", "stop"),
|
| 99 |
+
}
|
| 100 |
+
],
|
| 101 |
+
usage={
|
| 102 |
+
"prompt_tokens": outputs.get("prompt_tokens", 0),
|
| 103 |
+
"completion_tokens": outputs.get("completion_tokens", 0),
|
| 104 |
+
"total_tokens": outputs.get("total_tokens", 0),
|
| 105 |
+
},
|
| 106 |
+
)
|
| 107 |
+
return response
|
pyspur/backend/pyspur/api/openapi_management.py
ADDED
|
@@ -0,0 +1,180 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import json
|
| 2 |
+
import os
|
| 3 |
+
from typing import Dict, List, Optional
|
| 4 |
+
from uuid import uuid4
|
| 5 |
+
|
| 6 |
+
from fastapi import APIRouter, HTTPException
|
| 7 |
+
from pydantic import BaseModel
|
| 8 |
+
|
| 9 |
+
router = APIRouter()
|
| 10 |
+
|
| 11 |
+
# Directory to store OpenAPI specs
|
| 12 |
+
OPENAPI_SPECS_DIR = "pyspur/openapi_specs"
|
| 13 |
+
|
| 14 |
+
# Ensure the directory exists
|
| 15 |
+
os.makedirs(OPENAPI_SPECS_DIR, exist_ok=True)
|
| 16 |
+
|
| 17 |
+
class OpenAPIEndpoint(BaseModel):
|
| 18 |
+
path: str
|
| 19 |
+
method: str
|
| 20 |
+
summary: Optional[str] = None
|
| 21 |
+
operationId: Optional[str] = None
|
| 22 |
+
description: Optional[str] = None
|
| 23 |
+
input_schema: Optional[Dict] = None
|
| 24 |
+
output_schema: Optional[Dict] = None
|
| 25 |
+
|
| 26 |
+
class OpenAPISpec(BaseModel):
|
| 27 |
+
id: str
|
| 28 |
+
name: str
|
| 29 |
+
description: str
|
| 30 |
+
version: str
|
| 31 |
+
endpoints: List[OpenAPIEndpoint]
|
| 32 |
+
raw_spec: Dict
|
| 33 |
+
|
| 34 |
+
class CreateSpecRequest(BaseModel):
|
| 35 |
+
spec: Dict
|
| 36 |
+
|
| 37 |
+
@router.post("/specs/", response_model=OpenAPISpec)
|
| 38 |
+
async def create_openapi_spec(request: CreateSpecRequest) -> OpenAPISpec:
|
| 39 |
+
"""Store an OpenAPI specification."""
|
| 40 |
+
try:
|
| 41 |
+
# Generate a unique ID for this spec
|
| 42 |
+
spec_id = str(uuid4())
|
| 43 |
+
|
| 44 |
+
# Extract basic info from the spec
|
| 45 |
+
info = request.spec.get("info", {})
|
| 46 |
+
|
| 47 |
+
# Parse all endpoints from the spec
|
| 48 |
+
endpoints: List[OpenAPIEndpoint] = []
|
| 49 |
+
for path, path_item in request.spec.get("paths", {}).items():
|
| 50 |
+
for method, operation in path_item.items():
|
| 51 |
+
# Extract input schema
|
| 52 |
+
input_schema: Dict = {"properties": {}}
|
| 53 |
+
|
| 54 |
+
# Path parameters
|
| 55 |
+
if operation.get("parameters"):
|
| 56 |
+
path_params = [p for p in operation["parameters"] if p.get("in") == "path"]
|
| 57 |
+
if path_params:
|
| 58 |
+
input_schema["properties"]["pathParameters"] = {
|
| 59 |
+
"type": "object",
|
| 60 |
+
"properties": {p["name"]: p.get("schema", {}) for p in path_params}
|
| 61 |
+
}
|
| 62 |
+
|
| 63 |
+
# Query parameters
|
| 64 |
+
if operation.get("parameters"):
|
| 65 |
+
query_params = [p for p in operation["parameters"] if p.get("in") == "query"]
|
| 66 |
+
if query_params:
|
| 67 |
+
input_schema["properties"]["queryParameters"] = {
|
| 68 |
+
"type": "object",
|
| 69 |
+
"properties": {p["name"]: p.get("schema", {}) for p in query_params}
|
| 70 |
+
}
|
| 71 |
+
|
| 72 |
+
# Header parameters
|
| 73 |
+
if operation.get("parameters"):
|
| 74 |
+
header_params = [p for p in operation["parameters"] if p.get("in") == "header"]
|
| 75 |
+
if header_params:
|
| 76 |
+
input_schema["properties"]["headerParameters"] = {
|
| 77 |
+
"type": "object",
|
| 78 |
+
"properties": {p["name"]: p.get("schema", {}) for p in header_params}
|
| 79 |
+
}
|
| 80 |
+
|
| 81 |
+
# Request body
|
| 82 |
+
if operation.get("requestBody"):
|
| 83 |
+
content = operation["requestBody"].get("content", {})
|
| 84 |
+
if content:
|
| 85 |
+
media_type = next(iter(content))
|
| 86 |
+
input_schema["properties"]["requestBody"] = {
|
| 87 |
+
"mediaType": media_type,
|
| 88 |
+
"schema": content[media_type].get("schema", {})
|
| 89 |
+
}
|
| 90 |
+
|
| 91 |
+
# Output schema
|
| 92 |
+
output_schema: Dict = {"properties": {}}
|
| 93 |
+
if operation.get("responses"):
|
| 94 |
+
for status_code, response in operation["responses"].items():
|
| 95 |
+
if response.get("content"):
|
| 96 |
+
media_type = next(iter(response["content"]))
|
| 97 |
+
output_schema["properties"][status_code] = {
|
| 98 |
+
"description": response.get("description", ""),
|
| 99 |
+
"mediaType": media_type,
|
| 100 |
+
"schema": response["content"][media_type].get("schema", {})
|
| 101 |
+
}
|
| 102 |
+
else:
|
| 103 |
+
output_schema["properties"][status_code] = {
|
| 104 |
+
"description": response.get("description", ""),
|
| 105 |
+
"mediaType": "application/json",
|
| 106 |
+
"schema": {}
|
| 107 |
+
}
|
| 108 |
+
|
| 109 |
+
endpoints.append(OpenAPIEndpoint(
|
| 110 |
+
path=path,
|
| 111 |
+
method=method.upper(),
|
| 112 |
+
summary=operation.get("summary"),
|
| 113 |
+
operationId=operation.get("operationId"),
|
| 114 |
+
description=operation.get("description"),
|
| 115 |
+
input_schema=input_schema,
|
| 116 |
+
output_schema=output_schema
|
| 117 |
+
))
|
| 118 |
+
|
| 119 |
+
spec_data = OpenAPISpec(
|
| 120 |
+
id=spec_id,
|
| 121 |
+
name=info.get("title", "Untitled API"),
|
| 122 |
+
description=info.get("description", ""),
|
| 123 |
+
version=info.get("version", "1.0.0"),
|
| 124 |
+
endpoints=endpoints,
|
| 125 |
+
raw_spec=request.spec
|
| 126 |
+
)
|
| 127 |
+
|
| 128 |
+
# Save the spec to a file
|
| 129 |
+
spec_path = os.path.join(OPENAPI_SPECS_DIR, f"{spec_id}.json")
|
| 130 |
+
with open(spec_path, "w") as f:
|
| 131 |
+
json.dump(spec_data.dict(), f, indent=2)
|
| 132 |
+
|
| 133 |
+
return spec_data
|
| 134 |
+
except Exception as e:
|
| 135 |
+
raise HTTPException(status_code=500, detail=str(e))
|
| 136 |
+
|
| 137 |
+
@router.get("/specs/", response_model=List[OpenAPISpec])
|
| 138 |
+
async def list_openapi_specs() -> List[OpenAPISpec]:
|
| 139 |
+
"""List all stored OpenAPI specifications."""
|
| 140 |
+
try:
|
| 141 |
+
specs = []
|
| 142 |
+
for filename in os.listdir(OPENAPI_SPECS_DIR):
|
| 143 |
+
if filename.endswith(".json"):
|
| 144 |
+
with open(os.path.join(OPENAPI_SPECS_DIR, filename)) as f:
|
| 145 |
+
spec_data = json.load(f)
|
| 146 |
+
specs.append(OpenAPISpec(**spec_data))
|
| 147 |
+
return specs
|
| 148 |
+
except Exception as e:
|
| 149 |
+
raise HTTPException(status_code=500, detail=str(e))
|
| 150 |
+
|
| 151 |
+
@router.get("/specs/{spec_id}", response_model=OpenAPISpec)
|
| 152 |
+
async def get_openapi_spec(spec_id: str) -> OpenAPISpec:
|
| 153 |
+
"""Get a specific OpenAPI specification by ID."""
|
| 154 |
+
try:
|
| 155 |
+
spec_path = os.path.join(OPENAPI_SPECS_DIR, f"{spec_id}.json")
|
| 156 |
+
if not os.path.exists(spec_path):
|
| 157 |
+
raise HTTPException(status_code=404, detail="Specification not found")
|
| 158 |
+
|
| 159 |
+
with open(spec_path) as f:
|
| 160 |
+
spec_data = json.load(f)
|
| 161 |
+
return OpenAPISpec(**spec_data)
|
| 162 |
+
except HTTPException:
|
| 163 |
+
raise
|
| 164 |
+
except Exception as e:
|
| 165 |
+
raise HTTPException(status_code=500, detail=str(e))
|
| 166 |
+
|
| 167 |
+
@router.delete("/specs/{spec_id}")
|
| 168 |
+
async def delete_openapi_spec(spec_id: str) -> Dict[str, str]:
|
| 169 |
+
"""Delete a specific OpenAPI specification by ID."""
|
| 170 |
+
try:
|
| 171 |
+
spec_path = os.path.join(OPENAPI_SPECS_DIR, f"{spec_id}.json")
|
| 172 |
+
if not os.path.exists(spec_path):
|
| 173 |
+
raise HTTPException(status_code=404, detail="Specification not found")
|
| 174 |
+
|
| 175 |
+
os.remove(spec_path)
|
| 176 |
+
return {"message": "Specification deleted successfully"}
|
| 177 |
+
except HTTPException:
|
| 178 |
+
raise
|
| 179 |
+
except Exception as e:
|
| 180 |
+
raise HTTPException(status_code=500, detail=str(e))
|
pyspur/backend/pyspur/api/output_file_management.py
ADDED
|
@@ -0,0 +1,92 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import List
|
| 2 |
+
|
| 3 |
+
from fastapi import APIRouter, Depends, HTTPException
|
| 4 |
+
from fastapi.responses import FileResponse
|
| 5 |
+
from sqlalchemy.orm import Session
|
| 6 |
+
|
| 7 |
+
from ..database import get_db
|
| 8 |
+
from ..models.output_file_model import OutputFileModel
|
| 9 |
+
from ..schemas.output_file_schemas import OutputFileResponseSchema
|
| 10 |
+
|
| 11 |
+
router = APIRouter()
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
@router.get(
|
| 15 |
+
"/",
|
| 16 |
+
response_model=List[OutputFileResponseSchema],
|
| 17 |
+
description="List all output files",
|
| 18 |
+
)
|
| 19 |
+
def list_output_files(
|
| 20 |
+
db: Session = Depends(get_db),
|
| 21 |
+
) -> List[OutputFileResponseSchema]:
|
| 22 |
+
output_files = db.query(OutputFileModel).all()
|
| 23 |
+
output_file_list = [
|
| 24 |
+
OutputFileResponseSchema(
|
| 25 |
+
id=of.id,
|
| 26 |
+
file_name=of.file_name,
|
| 27 |
+
created_at=of.created_at,
|
| 28 |
+
updated_at=of.updated_at,
|
| 29 |
+
)
|
| 30 |
+
for of in output_files
|
| 31 |
+
]
|
| 32 |
+
return output_file_list
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
@router.get(
|
| 36 |
+
"/{output_file_id}/",
|
| 37 |
+
response_model=OutputFileResponseSchema,
|
| 38 |
+
description="Get an output file by ID",
|
| 39 |
+
)
|
| 40 |
+
def get_output_file(output_file_id: str, db: Session = Depends(get_db)) -> OutputFileResponseSchema:
|
| 41 |
+
output_file = db.query(OutputFileModel).filter(OutputFileModel.id == output_file_id).first()
|
| 42 |
+
if not output_file:
|
| 43 |
+
raise HTTPException(status_code=404, detail="Output file not found")
|
| 44 |
+
return OutputFileResponseSchema(
|
| 45 |
+
id=output_file.id,
|
| 46 |
+
file_name=output_file.file_name,
|
| 47 |
+
created_at=output_file.created_at,
|
| 48 |
+
updated_at=output_file.updated_at,
|
| 49 |
+
)
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
@router.delete(
|
| 53 |
+
"/{output_file_id}/",
|
| 54 |
+
description="Delete an output file by ID",
|
| 55 |
+
)
|
| 56 |
+
def delete_output_file(output_file_id: str, db: Session = Depends(get_db)):
|
| 57 |
+
output_file = db.query(OutputFileModel).filter(OutputFileModel.id == output_file_id).first()
|
| 58 |
+
if not output_file:
|
| 59 |
+
raise HTTPException(status_code=404, detail="Output file not found")
|
| 60 |
+
db.delete(output_file)
|
| 61 |
+
db.commit()
|
| 62 |
+
return {"message": "Output file deleted"}
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
# download_output_file endpoint
|
| 66 |
+
@router.get(
|
| 67 |
+
"/{output_file_id}/download/",
|
| 68 |
+
description="Download an output file by ID",
|
| 69 |
+
)
|
| 70 |
+
def download_output_file(output_file_id: str, db: Session = Depends(get_db)):
|
| 71 |
+
output_file = db.query(OutputFileModel).filter(OutputFileModel.id == output_file_id).first()
|
| 72 |
+
if not output_file:
|
| 73 |
+
raise HTTPException(status_code=404, detail="Output file not found")
|
| 74 |
+
|
| 75 |
+
# get the appropriate media type based on the file extension
|
| 76 |
+
media_type = "application/octet-stream"
|
| 77 |
+
if output_file.file_name.endswith(".csv"):
|
| 78 |
+
media_type = "text/csv"
|
| 79 |
+
elif output_file.file_name.endswith(".json"):
|
| 80 |
+
media_type = "application/json"
|
| 81 |
+
elif output_file.file_name.endswith(".txt"):
|
| 82 |
+
media_type = "text/plain"
|
| 83 |
+
elif output_file.file_name.endswith(".jsonl"):
|
| 84 |
+
media_type = "application/x-ndjson"
|
| 85 |
+
|
| 86 |
+
return FileResponse(
|
| 87 |
+
output_file.file_path,
|
| 88 |
+
media_type=media_type,
|
| 89 |
+
filename=output_file.file_name,
|
| 90 |
+
headers={"Content-Disposition": f"attachment; filename={output_file.file_name}"},
|
| 91 |
+
content_disposition_type="attachment",
|
| 92 |
+
)
|