isaaclk907 commited on
Commit
9da02e1
Β·
verified Β·
1 Parent(s): 4fc8b2e

Update requirements.txt - Add Technology Startup specialization and multi-AI provider support

Browse files
Files changed (1) hide show
  1. requirements.txt +21 -19
requirements.txt CHANGED
@@ -1,19 +1,21 @@
1
- # BIOS-kernel-v1 β€” Python dependencies
2
- # pip install -r requirements.txt
3
-
4
- # ── Core ─────────────────────────────────────────────────────
5
- psycopg[binary]>=3.1.0 # NeonDB / PostgreSQL (psycopg v3)
6
-
7
- # ── LLM Backends (install whichever you use) ─────────────────
8
- groq>=0.9.0 # Groq API (llama-3.3-70b, fastest free option)
9
- huggingface_hub>=0.24.0 # HuggingFace Inference API + BIOS-Insight-v1
10
- anthropic>=0.34.0 # Anthropic Claude (fallback)
11
-
12
- # ── Local inference (optional, GPU required) ─────────────────
13
- # transformers>=4.44.0
14
- # torch>=2.3.0
15
- # accelerate>=0.31.0
16
- # bitsandbytes>=0.43.0 # for 4-bit quantisation
17
-
18
- # ── Utilities ─────────────────────────────────────────────────
19
- python-dotenv>=1.0.0 # load .env files
 
 
 
1
+ # BIOS-kernel-v1 β€” Python dependencies
2
+ # pip install -r requirements.txt
3
+
4
+ # ── Core ─────────────────────────────────────────────────────
5
+ psycopg[binary]>=3.1.0 # NeonDB / PostgreSQL (psycopg v3)
6
+
7
+ # ── LLM Backends (install whichever you use) ─────────────────
8
+ groq>=0.9.0 # Groq API (llama-3.3-70b, fastest free option)
9
+ huggingface_hub>=0.24.0 # HuggingFace Inference API + BIOS-Insight-v1
10
+ openai>=1.0.0 # OpenAI API (GPT-4, GPT-3.5)
11
+ google-generativeai>=0.8.0 # Gemini API (Gemini Pro)
12
+ anthropic>=0.34.0 # Anthropic Claude (fallback)
13
+
14
+ # ── Local inference (optional, GPU required) ─────────────────
15
+ # transformers>=4.44.0
16
+ # torch>=2.3.0
17
+ # accelerate>=0.31.0
18
+ # bitsandbytes>=0.43.0 # for 4-bit quantisation
19
+
20
+ # ── Utilities ─────────────────────────────────────────────────
21
+ python-dotenv>=1.0.0 # load .env files