pradipGiriHugging commited on
Commit
4c9238e
·
verified ·
1 Parent(s): 7975d75

Upload 7 files

Browse files
Files changed (7) hide show
  1. debug_credentials.js +38 -0
  2. index.js +102 -0
  3. model.js +12 -0
  4. package-lock.json +0 -0
  5. package.json +26 -0
  6. test_raw.js +26 -0
  7. workflow.js +52 -0
debug_credentials.js ADDED
@@ -0,0 +1,38 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ const { QdrantClient } = require("@qdrant/js-client-rest");
2
+ require('dotenv').config({ path: './.env' });
3
+
4
+ async function check(url, key) {
5
+ console.log(`\nTesting URL: ${url}`);
6
+ const client = new QdrantClient({
7
+ url: url,
8
+ apiKey: key,
9
+ checkCompatibility: false
10
+ });
11
+ try {
12
+ const result = await client.getCollections();
13
+ console.log("SUCCESS: Connected!");
14
+ return true;
15
+ } catch (e) {
16
+ console.log(`FAILED (${e.status || '?'}) : ${e.message}`);
17
+ return false;
18
+ }
19
+ }
20
+
21
+ async function debug() {
22
+ const originalUrl = process.env.QDRANT_URL;
23
+ const key = process.env.QDRANT_API_KEY;
24
+
25
+ // Test 1: As provided
26
+ await check(originalUrl, key);
27
+
28
+ // Test 2: Without Port
29
+ if (originalUrl.includes(':6333')) {
30
+ const noPortUrl = originalUrl.replace(':6333', '');
31
+ const success = await check(noPortUrl, key);
32
+ if (success) {
33
+ console.log("\nRECOMMENDATION: Remove ':6333' from your QDRANT_URL in .env");
34
+ }
35
+ }
36
+ }
37
+
38
+ debug();
index.js ADDED
@@ -0,0 +1,102 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ const express = require('express');
2
+ const cors = require('cors');
3
+ const path = require('path');
4
+ require('dotenv').config({ path: path.resolve(__dirname, '.env') });
5
+
6
+ const app = express();
7
+ const PORT = process.env.PORT || 3000;
8
+
9
+ app.use(cors());
10
+ app.use(express.json());
11
+
12
+ const { app: workflow } = require('./workflow');
13
+
14
+ const { storeResearch, chatResearch } = require('./rag/researchRag');
15
+ const { storeInsights, chatInsights } = require('./rag/synthesisRag');
16
+ const { storeSummary, chatExecutive } = require('./rag/executiveRag');
17
+
18
+ app.get('/api/analyze', async (req, res) => {
19
+ const { query } = req.query;
20
+
21
+ if (!query) {
22
+ return res.status(400).json({ error: 'Query is required' });
23
+ }
24
+
25
+ // Set SSE headers
26
+ res.setHeader('Content-Type', 'text/event-stream');
27
+ res.setHeader('Cache-Control', 'no-cache');
28
+ res.setHeader('Connection', 'keep-alive');
29
+
30
+ try {
31
+ console.log(`Starting workflow stream for: ${query}`);
32
+
33
+ // Reset or clear previous collections if needed?
34
+ // For now we just append. In a real app we might use session IDs.
35
+
36
+ const stream = await workflow.stream({ query });
37
+
38
+ for await (const chunk of stream) {
39
+ // LangGraph returns chunks like { nodeName: { key: val } }
40
+ // We will send specific events for each update
41
+
42
+ if (chunk.researchNode) {
43
+ const content = chunk.researchNode.research;
44
+ res.write(`data: ${JSON.stringify({ type: 'research', content })}\n\n`);
45
+ // Async store to RAG
46
+ storeResearch(content).catch(err => console.error("RAG Store Error:", err));
47
+ }
48
+ if (chunk.synthesisNode) {
49
+ const content = chunk.synthesisNode.insights;
50
+ res.write(`data: ${JSON.stringify({ type: 'insights', content })}\n\n`);
51
+ storeInsights(content).catch(err => console.error("RAG Store Error:", err));
52
+ }
53
+ if (chunk.executiveNode) {
54
+ const content = chunk.executiveNode.executiveSummary;
55
+ res.write(`data: ${JSON.stringify({ type: 'executiveSummary', content })}\n\n`);
56
+ storeSummary(content).catch(err => console.error("RAG Store Error:", err));
57
+ }
58
+ }
59
+
60
+ res.write('data: [DONE]\n\n');
61
+ res.end();
62
+ } catch (error) {
63
+ console.error('Error in workflow stream:', error);
64
+ res.write(`data: ${JSON.stringify({ error: 'Internal server error' })}\n\n`);
65
+ res.end();
66
+ }
67
+ });
68
+
69
+ app.post('/api/chat', async (req, res) => {
70
+ const { agentRole, message, history } = req.body;
71
+
72
+ if (!message || !agentRole) {
73
+ return res.status(400).json({ error: "Message and agentRole are required" });
74
+ }
75
+
76
+ try {
77
+ let response;
78
+ switch (agentRole) {
79
+ case 'research':
80
+ response = await chatResearch(message, history);
81
+ break;
82
+ case 'synthesis':
83
+ response = await chatInsights(message, history);
84
+ break;
85
+ case 'executive':
86
+ response = await chatExecutive(message, history);
87
+ break;
88
+ default:
89
+ return res.status(400).json({ error: "Invalid agent role" });
90
+ }
91
+
92
+ res.json({ reply: response });
93
+ } catch (error) {
94
+ console.error("Chat API Error:", error);
95
+ res.status(500).json({ error: "Failed to process chat message" });
96
+ }
97
+ });
98
+
99
+ app.listen(PORT, () => {
100
+ console.log(`Server running on port ${PORT}`);
101
+ console.log(`OpenAI API Key present: ${process.env.OPENAI_API_KEY ? 'Yes' : 'No'}`);
102
+ });
model.js ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ const { ChatOpenAI } = require("@langchain/openai");
2
+ const path = require('path');
3
+ require('dotenv').config({ path: path.resolve(__dirname, '.env') });
4
+
5
+ // Initialize the model
6
+ // We can use GPT-3.5-turbo or GPT-4o depending on needs and keys
7
+ const model = new ChatOpenAI({
8
+ modelName: "gpt-4o", // or gpt-3.5-turbo
9
+ temperature: 0.7,
10
+ });
11
+
12
+ module.exports = { model };
package-lock.json ADDED
The diff for this file is too large to render. See raw diff
 
package.json ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "name": "server",
3
+ "version": "1.0.0",
4
+ "description": "",
5
+ "main": "index.js",
6
+ "scripts": {
7
+ "test": "echo \"Error: no test specified\" && exit 1"
8
+ },
9
+ "keywords": [],
10
+ "author": "",
11
+ "license": "ISC",
12
+ "dependencies": {
13
+ "@langchain/community": "^1.1.5",
14
+ "@langchain/core": "^1.1.16",
15
+ "@langchain/langgraph": "^1.1.1",
16
+ "@langchain/openai": "^1.2.3",
17
+ "@langchain/qdrant": "^1.0.1",
18
+ "@langchain/textsplitters": "^1.0.1",
19
+ "@qdrant/js-client-rest": "^1.16.2",
20
+ "cors": "^2.8.5",
21
+ "duck-duck-scrape": "^2.2.7",
22
+ "express": "^5.2.1",
23
+ "express-sse": "^1.0.0",
24
+ "langchain": "^1.2.11"
25
+ }
26
+ }
test_raw.js ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ require('dotenv').config({ path: './.env' });
2
+
3
+ async function testRaw() {
4
+ const url = process.env.QDRANT_URL.replace(/\/$/, ''); // remove trailing slash
5
+ const key = process.env.QDRANT_API_KEY;
6
+ const cleanUrl = url.replace(':6333', ''); // Try without port for Cloud
7
+
8
+ console.log(`Trying: ${cleanUrl}/collections`);
9
+
10
+ try {
11
+ const response = await fetch(`${cleanUrl}/collections`, {
12
+ method: 'GET',
13
+ headers: {
14
+ 'api-key': key
15
+ }
16
+ });
17
+
18
+ console.log(`Status: ${response.status}`);
19
+ const text = await response.text();
20
+ console.log(`Body: ${text}`);
21
+ } catch (e) {
22
+ console.error("Fetch failed:", e);
23
+ }
24
+ }
25
+
26
+ testRaw();
workflow.js ADDED
@@ -0,0 +1,52 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ const { StateGraph, END } = require("@langchain/langgraph");
2
+ const { researchAgent } = require('./agents/researchAgent');
3
+ const { synthesisAgent } = require('./agents/synthesisAgent');
4
+ const { executiveAgent } = require('./agents/executiveAgent');
5
+
6
+ // Define the state channels
7
+ // We are using a simple object state where each agent appends/updates keys.
8
+ const graphState = {
9
+ query: null,
10
+ research: null,
11
+ insights: null,
12
+ executiveSummary: null
13
+ };
14
+
15
+ // Define the graph
16
+ const workflow = new StateGraph({
17
+ channels: {
18
+ query: {
19
+ value: (x, y) => y ? y : x,
20
+ default: () => ""
21
+ },
22
+ research: {
23
+ value: (x, y) => y ? y : x,
24
+ default: () => ""
25
+ },
26
+ insights: {
27
+ value: (x, y) => y ? y : x,
28
+ default: () => ""
29
+ },
30
+ executiveSummary: {
31
+ value: (x, y) => y ? y : x,
32
+ default: () => ""
33
+ }
34
+ }
35
+ });
36
+
37
+ // Add nodes
38
+ workflow.addNode("researchNode", researchAgent);
39
+ workflow.addNode("synthesisNode", synthesisAgent);
40
+ workflow.addNode("executiveNode", executiveAgent);
41
+
42
+ // Add API edges
43
+ workflow.setEntryPoint("researchNode");
44
+ workflow.addEdge("researchNode", "synthesisNode");
45
+ workflow.addEdge("synthesisNode", "executiveNode");
46
+ workflow.addEdge("executiveNode", END);
47
+
48
+ // Compile the graph
49
+ const app = workflow.compile();
50
+
51
+ // Export the app directly to allow streaming
52
+ module.exports = { app };