gauthamnairy commited on
Commit
8f30584
·
verified ·
1 Parent(s): acf6445
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ ai_assistant.db filter=lfs diff=lfs merge=lfs -text
Dockerfile ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ FROM python:3.9
2
+
3
+ WORKDIR /code
4
+
5
+ COPY ./requirements.txt /code/requirements.txt
6
+ RUN pip install --no-cache-dir --upgrade -r /code/requirements.txt
7
+
8
+ COPY . /code
9
+
10
+ # Ensure the templates folder is copied
11
+ COPY ./templates /code/templates
12
+
13
+ CMD ["python", "app.py"]
ai_assistant.db ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f886ff11abcf7f9b7c22c0c3bc5b657334a06e82607ef2103b3778c46f82f5eb
3
+ size 3715072
app.py ADDED
@@ -0,0 +1,472 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import google.generativeai as genai
3
+ from flask import Flask, request, jsonify, render_template
4
+ from flask_cors import CORS
5
+ from werkzeug.utils import secure_filename
6
+ import PyPDF2
7
+ import docx
8
+ import pandas as pd
9
+ import sqlite3
10
+ import base64
11
+ import json
12
+ import numpy as np
13
+ import logging
14
+ from langchain.text_splitter import RecursiveCharacterTextSplitter
15
+ from langchain_google_genai.llms import GoogleGenerativeAI # Updated import statement
16
+ from langchain_experimental.agents import create_csv_agent # Updated import statement
17
+ from langchain_google_genai import GoogleGenerativeAIEmbeddings
18
+ from langchain_google_genai import ChatGoogleGenerativeAI
19
+ from langchain.chains.question_answering import load_qa_chain
20
+ from langchain.prompts import PromptTemplate
21
+ from langchain.docstore.document import Document
22
+ from sklearn.metrics.pairwise import cosine_similarity
23
+ import plotly.express as px # For interactive charts
24
+ import plotly
25
+ from newsapi import NewsApiClient
26
+ import certifi
27
+ import requests
28
+
29
+ app = Flask(__name__)
30
+ CORS(app)
31
+
32
+ # Set up logging
33
+ logging.basicConfig(level=logging.INFO)
34
+
35
+ # Set your Google API key securely using an environment variable
36
+ os.environ['GOOGLE_API_KEY'] = 'AIzaSyD7KJK0GDMh_R9GbE0j7FFyCZIl1BGrpgg'
37
+ genai.configure(api_key=os.environ['GOOGLE_API_KEY'])
38
+
39
+ NEWSAPI_KEY = 'dd9befe361ba4df8bbbf84283db7a373'
40
+ session = requests.Session()
41
+ session.verify = certifi.where()
42
+ newsapi = NewsApiClient(api_key=NEWSAPI_KEY)
43
+ newsapi.session = session
44
+
45
+ # Initialize the model
46
+ model = genai.GenerativeModel('gemini-pro')
47
+
48
+ UPLOAD_FOLDER = 'uploads'
49
+ ALLOWED_EXTENSIONS = {'txt', 'pdf', 'docx', 'xlsx', 'csv'}
50
+
51
+ if not os.path.exists(UPLOAD_FOLDER):
52
+ os.makedirs(UPLOAD_FOLDER)
53
+
54
+ app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER
55
+
56
+ # Database setup
57
+ DATABASE = 'ai_assistant.db'
58
+
59
+ def get_db():
60
+ db = sqlite3.connect(DATABASE)
61
+ db.row_factory = sqlite3.Row
62
+ return db
63
+
64
+ def init_db():
65
+ logging.info("Initializing database...")
66
+ with app.app_context():
67
+ db = get_db()
68
+ db.execute('''CREATE TABLE IF NOT EXISTS files (
69
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
70
+ filename TEXT NOT NULL,
71
+ file_data TEXT NOT NULL,
72
+ analysis TEXT,
73
+ timestamp DATETIME DEFAULT CURRENT_TIMESTAMP
74
+ )''')
75
+ db.execute('''CREATE TABLE IF NOT EXISTS chunks (
76
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
77
+ file_id INTEGER,
78
+ content TEXT NOT NULL,
79
+ embedding TEXT NOT NULL,
80
+ FOREIGN KEY (file_id) REFERENCES files (id)
81
+ )''')
82
+ db.commit()
83
+ logging.info("Database initialized successfully.")
84
+
85
+ def allowed_file(filename):
86
+ return '.' in filename and filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS
87
+
88
+ def convert_excel_to_csv(file_path):
89
+ logging.info(f"Converting Excel to CSV: {file_path}")
90
+ df = pd.read_excel(file_path)
91
+ csv_path = file_path.rsplit('.', 1)[0] + '.csv'
92
+ df.to_csv(csv_path, index=False)
93
+ return csv_path
94
+
95
+ def process_document(file_path):
96
+ logging.info(f"Processing document: {file_path}")
97
+ if file_path.endswith('.pdf'):
98
+ return extract_text_from_pdf(file_path)
99
+ elif file_path.endswith('.docx'):
100
+ return extract_text_from_docx(file_path)
101
+ elif file_path.endswith('.xlsx'):
102
+ csv_path = convert_excel_to_csv(file_path)
103
+ return extract_text_from_csv(csv_path)
104
+ elif file_path.endswith('.csv'):
105
+ return extract_text_from_csv(file_path)
106
+ else:
107
+ with open(file_path, 'r') as f:
108
+ return f.read()
109
+
110
+ def extract_text_from_pdf(file_path):
111
+ logging.info(f"Extracting text from PDF: {file_path}")
112
+ text = ""
113
+ with open(file_path, 'rb') as file:
114
+ reader = PyPDF2.PdfReader(file)
115
+ for page in reader.pages:
116
+ text += page.extract_text() + "\n"
117
+ return text
118
+
119
+ def extract_text_from_docx(file_path):
120
+ logging.info(f"Extracting text from DOCX: {file_path}")
121
+ doc = docx.Document(file_path)
122
+ return "\n".join([para.text for para in doc.paragraphs])
123
+
124
+ def extract_text_from_csv(file_path):
125
+ logging.info(f"Extracting text from CSV: {file_path}")
126
+ df = pd.read_csv(file_path)
127
+ return df.to_string()
128
+
129
+ def get_text_chunks(text):
130
+ logging.info("Chunking text for vector store...")
131
+ text_splitter = RecursiveCharacterTextSplitter(chunk_size=1000, chunk_overlap=200)
132
+ chunks = text_splitter.split_text(text)
133
+ return chunks
134
+
135
+ def create_vector_store(text_chunks, file_id):
136
+ logging.info("Creating vector store in SQLite...")
137
+ embeddings = GoogleGenerativeAIEmbeddings(model="models/embedding-001")
138
+ db = get_db()
139
+ cursor = db.cursor()
140
+
141
+ for chunk in text_chunks:
142
+ embedding = embeddings.embed_query(chunk)
143
+ cursor.execute('INSERT INTO chunks (file_id, content, embedding) VALUES (?, ?, ?)',
144
+ (file_id, chunk, json.dumps(embedding)))
145
+
146
+ db.commit()
147
+ logging.info("Vector store created in SQLite.")
148
+
149
+ def get_conversational_chain():
150
+ prompt_template = """
151
+ Answer the question as detailed as possible from the provided context. If the answer is not directly
152
+ available in the provided context, use your knowledge to infer a reasonable answer based on the given information.
153
+ If you're unsure or the question is completely unrelated to the context, state that you don't have enough information to answer accurately.
154
+
155
+ Context:\n{context}\n
156
+ Question:\n{question}\n
157
+ Answer:
158
+ """
159
+ model = ChatGoogleGenerativeAI(model="gemini-pro", temperature=0.3)
160
+ prompt = PromptTemplate(template=prompt_template, input_variables=["context", "question"])
161
+ chain = load_qa_chain(model, chain_type="stuff", prompt=prompt)
162
+ return chain
163
+
164
+ def answer_query_from_document(user_question, file_id):
165
+ logging.info("Answering query from SQLite...")
166
+ embeddings = GoogleGenerativeAIEmbeddings(model="models/embedding-001")
167
+ query_embedding = np.array(embeddings.embed_query(user_question))
168
+
169
+ db = get_db()
170
+ cursor = db.cursor()
171
+
172
+ cursor.execute('SELECT id, content, embedding FROM chunks WHERE file_id = ?', (file_id,))
173
+ chunks = cursor.fetchall()
174
+
175
+ # Calculate cosine similarity
176
+ chunk_embeddings = [np.array(json.loads(chunk['embedding'])) for chunk in chunks]
177
+ similarities = cosine_similarity([query_embedding], chunk_embeddings)[0]
178
+
179
+ # Sort chunks by similarity
180
+ sorted_chunks = sorted(zip(chunks, similarities), key=lambda x: x[1], reverse=True)
181
+
182
+ # Get top 5 most similar chunks
183
+ top_chunks = sorted_chunks[:5]
184
+
185
+ context = ' '.join([chunk[0]['content'] for chunk in top_chunks])
186
+
187
+ chain = get_conversational_chain()
188
+ response = chain.invoke({"input_documents": [Document(page_content=context)], "question": user_question})
189
+ return response["output_text"]
190
+
191
+ def analyze_document(text):
192
+ logging.info(f"Analyzing document with text: {text[:200]}...") # Log first 200 chars
193
+ prompt = f"Analyze the following document and provide a summary of its key points and any important insights:\n\n{text[:4000]}"
194
+ response = model.generate_content(prompt)
195
+ logging.info("Document analysis completed.")
196
+ return response.text
197
+
198
+ def process_query(query, role=None, file_id=None):
199
+ logging.info(f"Processing query: {query}, role: {role}, file_id: {file_id}")
200
+ if file_id:
201
+ return answer_query_from_document(query, file_id)
202
+ else:
203
+ system_prompt = f"You are an AI assistant specializing in {role}." if role else "You are a helpful AI assistant."
204
+
205
+ prompt = f'''
206
+ {system_prompt}
207
+
208
+ Query: "{query}"
209
+
210
+ Requirements:
211
+ - Use a friendly yet professional tone.
212
+ - Ensure the response is accurate and directly addresses the question.
213
+ - Include relevant examples, definitions, or comparisons to enhance clarity.
214
+ - Format the response in well-structured paragraphs or bullet points with bold headings when appropriate.
215
+ - Use markdown formatting for code snippets, emphasis, and structure.
216
+ - Aim for a comprehensive response that fully addresses the query.
217
+ '''
218
+
219
+ logging.info("Generating content...")
220
+ response = model.generate_content(prompt)
221
+ generated_text = response.text
222
+ logging.info("Content generated successfully.")
223
+
224
+ return generated_text
225
+
226
+ def get_energy_news(query):
227
+ try:
228
+ articles = newsapi.get_everything(q=query, language='en', sort_by='publishedAt', page_size=10)
229
+ return articles['articles']
230
+ except Exception as e:
231
+ logging.error(f"Error fetching news: {e}")
232
+ return []
233
+
234
+ def summarize_article(article):
235
+ title = article.get('title', 'No title')
236
+ content = article.get('description', '') or article.get('content', '') or ''
237
+ prompt = f"""
238
+ Summarize the following news article in 3-4 lines:
239
+
240
+ Title: {title}
241
+ Content: {content}
242
+ """
243
+ try:
244
+ response = model.generate_content(prompt)
245
+ return response.text.strip()
246
+ except Exception as e:
247
+ logging.error(f"Error summarizing article: {e}")
248
+ return "Unable to generate summary."
249
+
250
+ def filter_and_analyze_news(query, articles):
251
+ filtered_and_analyzed_news = []
252
+
253
+ for article in articles:
254
+ title = article.get('title', 'No title')
255
+ content = article.get('description', '') or article.get('content', '') or ''
256
+
257
+ prompt = f"""
258
+ Analyze the following news article in the context of the energy market:
259
+
260
+ Query: {query}
261
+ Title: {title}
262
+ Content: {content}
263
+
264
+ Is this article directly relevant to "{query}" in the context of the energy market?
265
+ Answer ONLY 'YES' or 'NO', followed by a brief explanation.
266
+
267
+ If YES, provide:
268
+ 1. A concise 2-3 sentence summary of the news.
269
+ 2. Key points (up to 3 bullet points).
270
+ 3. Specific impact on the energy market related to {query} (1-2 sentences).
271
+ """
272
+
273
+ try:
274
+ response = model.generate_content(prompt)
275
+ analysis = response.text.strip()
276
+
277
+ if analysis.startswith("YES"):
278
+ filtered_and_analyzed_news.append({
279
+ 'title': title,
280
+ 'link': article.get('url', '#'),
281
+ 'analysis': analysis.split("YES", 1)[1].strip()
282
+ })
283
+
284
+ if len(filtered_and_analyzed_news) >= 10:
285
+ break
286
+ except Exception as e:
287
+ logging.error(f"Error analyzing article: {e}")
288
+
289
+ return filtered_and_analyzed_news
290
+
291
+ def generate_market_summary(query, filtered_news):
292
+ if not filtered_news:
293
+ return f"No relevant news found for '{query}' in the energy market context."
294
+
295
+ summaries = [item.get('analysis', '') for item in filtered_news]
296
+ combined_summary = "\n\n".join(summaries)
297
+
298
+ prompt = f"""
299
+ Based on the following summaries of recent news articles related to '{query}' in the energy market:
300
+
301
+ {combined_summary}
302
+
303
+ Provide a concise market summary that:
304
+ 1. Highlights the current trends and developments related to {query} in the energy market.
305
+ 2. Identifies any significant impacts or potential changes in the market.
306
+ 3. Mentions any notable events or decisions affecting this area.
307
+
308
+ Keep the summary focused on factual information derived from the news articles, without adding speculation or personal opinions.
309
+ """
310
+
311
+ try:
312
+ response = model.generate_content(prompt)
313
+ return response.text.strip()
314
+ except Exception as e:
315
+ logging.error(f"Error generating market summary: {e}")
316
+ return f"Unable to generate market summary for '{query}' due to an error."
317
+
318
+ @app.route('/')
319
+ def index():
320
+ return render_template('index.html')
321
+
322
+ @app.route('/query', methods=['POST'])
323
+ def query():
324
+ data = request.json
325
+ query = data.get('query')
326
+ role = data.get('role')
327
+ file_id = data.get('file_id')
328
+ news_context = data.get('newsContext')
329
+ try:
330
+ logging.info(f"Received query: {query}, role: {role}, file_id: {file_id}")
331
+
332
+ if role == 'AI News Analyst' and news_context:
333
+ # Handle news-related queries with context
334
+ prompt = f"""
335
+ As an AI News Analyst specializing in the energy market, answer the following question based on the provided news context:
336
+
337
+ News Context:
338
+ {json.dumps(news_context, indent=2)}
339
+
340
+ Question: {query}
341
+
342
+ Provide a concise and informative response, using the provided news context to support your answer.
343
+ """
344
+ response = model.generate_content(prompt)
345
+ return jsonify({'response': response.text})
346
+ else:
347
+ # Handle regular queries as before
348
+ response = process_query(query, role, file_id)
349
+ return jsonify({'response': response})
350
+ except Exception as e:
351
+ logging.error(f"Error in /query route: {str(e)}", exc_info=True)
352
+ return jsonify({'error': str(e)}), 500
353
+
354
+ @app.route('/upload', methods=['POST'])
355
+ def upload_file():
356
+ if 'file' not in request.files:
357
+ return jsonify({'error': 'No file part'}), 400
358
+ file = request.files['file']
359
+ if file.filename == '':
360
+ return jsonify({'error': 'No selected file'}), 400
361
+ if file and allowed_file(file.filename):
362
+ filename = secure_filename(file.filename)
363
+ file_path = os.path.join(app.config['UPLOAD_FOLDER'], filename)
364
+ file.save(file_path)
365
+
366
+ try:
367
+ logging.info(f"File uploaded successfully: {filename}")
368
+ extracted_text = process_document(file_path)
369
+ text_chunks = get_text_chunks(extracted_text)
370
+ analysis = analyze_document(extracted_text)
371
+
372
+ db = get_db()
373
+ with open(file_path, 'rb') as f:
374
+ file_data = f.read()
375
+ file_data_base64 = base64.b64encode(file_data).decode('utf-8')
376
+ cursor = db.execute('INSERT INTO files (filename, file_data, analysis) VALUES (?, ?, ?)',
377
+ (filename, file_data_base64, analysis))
378
+ file_id = cursor.lastrowid
379
+ db.commit()
380
+
381
+ create_vector_store(text_chunks, file_id)
382
+
383
+ os.remove(file_path) # Remove the file after processing
384
+ logging.info(f"File processing completed and saved to database with ID: {file_id}")
385
+
386
+ return jsonify({'file_id': file_id, 'analysis': analysis})
387
+ except Exception as e:
388
+ logging.error(f'Error processing file: {str(e)}', exc_info=True)
389
+ return jsonify({'error': str(e)}), 500
390
+
391
+ return jsonify({'error': 'Invalid file type'}), 400
392
+
393
+ @app.route('/plot', methods=['POST'])
394
+ def plot():
395
+ data = request.json
396
+ file_id = data.get('file_id')
397
+ try:
398
+ db = get_db()
399
+ cursor = db.execute('SELECT file_data FROM files WHERE id = ?', (file_id,))
400
+ file_data_base64 = cursor.fetchone()['file_data']
401
+ file_data = base64.b64decode(file_data_base64)
402
+
403
+ df = pd.read_excel(pd.io.common.BytesIO(file_data))
404
+
405
+ fig = px.line(df, x=df.columns[0], y=df.columns[1:])
406
+ graph_json = json.dumps(fig, cls=plotly.utils.PlotlyJSONEncoder)
407
+
408
+ return jsonify({'graph': graph_json})
409
+ except Exception as e:
410
+ logging.error(f'Error generating plot: {str(e)}', exc_info=True)
411
+ return jsonify({'error': str(e)}), 500
412
+
413
+ @app.route('/process_csv_query', methods=['POST'])
414
+ def process_csv_query():
415
+ data = request.json
416
+ file_id = data.get('file_id')
417
+ query = data.get('query')
418
+
419
+ try:
420
+ db = get_db()
421
+ cursor = db.execute('SELECT file_data FROM files WHERE id = ?', (file_id,))
422
+ file_data_base64 = cursor.fetchone()['file_data']
423
+ file_data = base64.b64decode(file_data_base64)
424
+
425
+ # Save the CSV data to a temporary file
426
+ temp_csv_path = f'/tmp/{file_id}.csv'
427
+ with open(temp_csv_path, 'wb') as temp_csv:
428
+ temp_csv.write(file_data)
429
+
430
+ # Create a langchain agent using the gemini-pro model
431
+ agent = create_csv_agent(GoogleGenerativeAI(model="gemini-pro"), temp_csv_path, verbose=True)
432
+
433
+ # Run the query using the agent
434
+ response = agent.run(query)
435
+
436
+ return jsonify({'response': response})
437
+ except Exception as e:
438
+ logging.error(f'Error processing CSV query: {str(e)}', exc_info=True)
439
+ return jsonify({'error': str(e)}), 500
440
+
441
+ @app.route('/fetch_news', methods=['POST'])
442
+ def fetch_news():
443
+ data = request.json
444
+ query = data.get('query')
445
+ try:
446
+ all_articles = get_energy_news(query)
447
+ filtered_news = filter_and_analyze_news(query, all_articles)
448
+ market_summary = generate_market_summary(query, filtered_news)
449
+
450
+ # Prepare the top 10 articles with summaries
451
+ top_articles = []
452
+ for article in filtered_news[:10]:
453
+ summary = article.get('analysis', '').split('\n\n')[0] # Get the first paragraph of the analysis as summary
454
+ top_articles.append({
455
+ 'title': article.get('title', 'No title'),
456
+ 'url': article.get('link', '#'),
457
+ 'summary': summary
458
+ })
459
+
460
+ return jsonify({
461
+ 'top_articles': top_articles,
462
+ 'market_summary': market_summary,
463
+ 'full_analysis': filtered_news
464
+ })
465
+ except Exception as e:
466
+ logging.error(f"Error in fetch_news route: {str(e)}", exc_info=True)
467
+ return jsonify({'error': str(e)}), 500
468
+
469
+ if __name__ == '__main__':
470
+ init_db()
471
+ port = int(os.environ.get('PORT', 7860))
472
+ app.run(host='0.0.0.0', port=port, debug=False)
requirements.txt ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Flask==2.1.0
2
+ flask-cors==3.0.10
3
+ google-generativeai==0.3.2
4
+ Werkzeug==2.2.3
5
+ PyPDF2==3.0.1
6
+ python-docx==0.8.11
7
+ pandas==1.5.3
8
+ openpyxl==3.1.2
9
+ numpy==1.24.3
10
+ scikit-learn==1.2.2
11
+ plotly==5.15.0
12
+ newsapi-python==0.2.7
13
+ requests==2.31.0
14
+ certifi==2023.5.7
15
+ langchain==0.0.350
16
+ langchain-google-genai==0.0.5
17
+ langchain-experimental==0.0.42
schema.sql ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ CREATE TABLE IF NOT EXISTS documents (
2
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
3
+ content TEXT NOT NULL,
4
+ embedding BLOB NOT NULL
5
+ );
6
+
7
+ CREATE TABLE IF NOT EXISTS chunks (
8
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
9
+ document_id INTEGER,
10
+ content TEXT NOT NULL,
11
+ embedding BLOB NOT NULL,
12
+ FOREIGN KEY (document_id) REFERENCES documents(id)
13
+ );
14
+
15
+ CREATE INDEX idx_chunks_embedding ON chunks(embedding);
setup.sh ADDED
File without changes
static/images/AI-PNG-L.png ADDED
static/images/AI-PNG-R.png ADDED
static/images/app_icon.png ADDED
templates/index.html ADDED
@@ -0,0 +1,594 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <!DOCTYPE html>
2
+ <html lang="en">
3
+ <head>
4
+ <meta charset="UTF-8">
5
+ <meta name="viewport" content="width=device-width, initial-scale=1.0">
6
+ <title>Finder - Find Your Answers Here</title>
7
+ <style>
8
+ @import url('https://fonts.googleapis.com/css2?family=Poppins:wght@300;400;600&display=swap');
9
+
10
+ body, html {
11
+ font-family: 'Poppins', Arial, sans-serif;
12
+ margin: 0;
13
+ padding: 0;
14
+ height: 100%;
15
+ background-color: #f0f2f5;
16
+ color: #333;
17
+ }
18
+ .landing-page {
19
+ display: flex;
20
+ flex-direction: column;
21
+ align-items: center;
22
+ justify-content: center;
23
+ height: 100vh;
24
+ background: linear-gradient(135deg, #e0f7fa, #fce4ec);
25
+ text-align: center;
26
+ position: relative;
27
+ overflow: hidden;
28
+ }
29
+ .landing-page img.logo {
30
+ width: 300px;
31
+ height: auto;
32
+ margin-bottom: 10px;
33
+ }
34
+ .landing-page h1 {
35
+ font-size: 3em;
36
+ margin: 10px 0;
37
+ background: linear-gradient(45deg, #2196f3, #e91e63);
38
+ background-clip: text;
39
+ -webkit-background-clip: text;
40
+ color: transparent;
41
+ -webkit-text-fill-color: transparent;
42
+ }
43
+ .landing-page .im-text {
44
+ font-size: 1.5em;
45
+ margin: 0 0 -10px;
46
+ background: linear-gradient(45deg, #2196f3, #e91e63);
47
+ background-clip: text;
48
+ -webkit-background-clip: text;
49
+ color: transparent;
50
+ -webkit-text-fill-color: transparent;
51
+ }
52
+ .landing-page p {
53
+ font-size: 1.2em;
54
+ max-width: 600px;
55
+ margin: 10px 0 30px;
56
+ color: #0277bd;
57
+ }
58
+ .start-chat-btn {
59
+ padding: 12px 24px;
60
+ font-size: 1.2em;
61
+ background: linear-gradient(45deg, #2196f3, #e91e63);
62
+ color: white;
63
+ border: none;
64
+ border-radius: 25px;
65
+ cursor: pointer;
66
+ transition: transform 0.3s, box-shadow 0.3s;
67
+ }
68
+ .start-chat-btn:hover {
69
+ transform: translateY(-3px);
70
+ box-shadow: 0 4px 15px rgba(0, 0, 0, 0.2);
71
+ }
72
+ .ai-image {
73
+ position: absolute;
74
+ width: 150px;
75
+ height: auto;
76
+ opacity: 0.6;
77
+ }
78
+ .ai-image-left {
79
+ left: 60px;
80
+ top: 50%;
81
+ transform: translateY(-50%);
82
+ }
83
+ .ai-image-right {
84
+ right: 60px;
85
+ top: 50%;
86
+ transform: translateY(-50%);
87
+ }
88
+ .chat-interface {
89
+ display: none;
90
+ height: 100vh;
91
+ }
92
+ .sidebar {
93
+ position: fixed;
94
+ left: -300px;
95
+ top: 0;
96
+ width: 300px;
97
+ height: 100%;
98
+ background-color: #ffffff;
99
+ padding: 60px 20px 20px;
100
+ box-shadow: 2px 0 5px rgba(0,0,0,0.1);
101
+ transition: left 0.3s ease-in-out, visibility 0.3s ease-in-out;
102
+ z-index: 1000;
103
+ visibility: hidden;
104
+ }
105
+ .sidebar.open {
106
+ left: 0;
107
+ visibility: visible;
108
+ }
109
+ .main-content {
110
+ margin-left: 0;
111
+ transition: margin-left 0.3s ease-in-out;
112
+ flex-grow: 1;
113
+ display: flex;
114
+ flex-direction: column;
115
+ overflow-y: auto;
116
+ padding: 20px;
117
+ }
118
+ .main-content.sidebar-open {
119
+ margin-left: 300px;
120
+ }
121
+ .chat-container {
122
+ flex-grow: 1;
123
+ overflow-y: auto;
124
+ padding: 20px;
125
+ background-color: #ffffff;
126
+ border-radius: 10px 10px 0 0;
127
+ box-shadow: 0 1px 3px rgba(0,0,0,0.1);
128
+ margin-bottom: 0;
129
+ }
130
+ .message {
131
+ max-width: 80%;
132
+ margin-bottom: 20px;
133
+ line-height: 1.5;
134
+ padding: 15px 20px;
135
+ border-radius: 18px;
136
+ position: relative;
137
+ display: inline-block;
138
+ }
139
+ .user-message {
140
+ background-color: #e3f2fd;
141
+ float: right;
142
+ clear: both;
143
+ }
144
+ .assistant-message {
145
+ background-color: #fce4ec;
146
+ float: left;
147
+ clear: both;
148
+ }
149
+ .input-area {
150
+ display: flex;
151
+ padding: 20px;
152
+ background-color: #ffffff;
153
+ border-top: 1px solid #e1e4e8;
154
+ border-radius: 0 0 10px 10px;
155
+ box-shadow: 0 -1px 3px rgba(0,0,0,0.1);
156
+ }
157
+ #query {
158
+ flex-grow: 1;
159
+ padding: 10px;
160
+ border: 1px solid #d1d5da;
161
+ border-radius: 20px;
162
+ font-size: 16px;
163
+ }
164
+ .send-button, .file-upload-button {
165
+ background: linear-gradient(45deg, #2196f3, #e91e63);
166
+ color: white;
167
+ border: none;
168
+ padding: 10px 20px;
169
+ margin-left: 10px;
170
+ border-radius: 20px;
171
+ cursor: pointer;
172
+ }
173
+ .file-input {
174
+ display: none;
175
+ }
176
+ .new-chat {
177
+ background: linear-gradient(45deg, #2196f3, #e91e63);
178
+ color: white;
179
+ padding: 10px;
180
+ border: none;
181
+ border-radius: 5px;
182
+ cursor: pointer;
183
+ margin-bottom: 20px;
184
+ font-weight: bold;
185
+ }
186
+ .shortcuts {
187
+ display: grid;
188
+ grid-template-columns: repeat(2, 1fr);
189
+ gap: 10px;
190
+ }
191
+ .shortcut {
192
+ background-color: #e3f2fd;
193
+ border: none;
194
+ padding: 10px;
195
+ border-radius: 5px;
196
+ cursor: pointer;
197
+ font-size: 14px;
198
+ transition: background-color 0.3s;
199
+ }
200
+ .shortcut:hover {
201
+ background-color: #bbdefb;
202
+ }
203
+ .context-info {
204
+ background-color: #e3f2fd;
205
+ padding: 10px;
206
+ margin-bottom: 10px;
207
+ border-radius: 5px;
208
+ font-size: 14px;
209
+ }
210
+ .loading {
211
+ display: none;
212
+ text-align: center;
213
+ padding: 20px;
214
+ }
215
+ .menu-button {
216
+ position: fixed;
217
+ top: 20px;
218
+ left: 20px;
219
+ z-index: 1001;
220
+ background: none;
221
+ border: none;
222
+ font-size: 24px;
223
+ cursor: pointer;
224
+ }
225
+ .news-container {
226
+ display: none;
227
+ flex-direction: column;
228
+ height: 100%;
229
+ padding: 20px;
230
+ overflow-y: auto;
231
+ }
232
+ .news-form {
233
+ display: flex;
234
+ margin-bottom: 20px;
235
+ }
236
+ .news-form input {
237
+ flex-grow: 1;
238
+ padding: 10px;
239
+ border: 1px solid #d1d5da;
240
+ border-radius: 20px;
241
+ font-size: 16px;
242
+ margin-right: 10px;
243
+ }
244
+ .news-form button {
245
+ background: linear-gradient(45deg, #2196f3, #e91e63);
246
+ color: white;
247
+ border: none;
248
+ padding: 10px 20px;
249
+ border-radius: 20px;
250
+ cursor: pointer;
251
+ }
252
+ .market-summary {
253
+ background-color: #e3f2fd;
254
+ padding: 15px;
255
+ border-radius: 10px;
256
+ margin-bottom: 20px;
257
+ }
258
+ .news-item {
259
+ background-color: #ffffff;
260
+ padding: 15px;
261
+ border-radius: 10px;
262
+ margin-bottom: 15px;
263
+ box-shadow: 0 1px 3px rgba(0,0,0,0.1);
264
+ }
265
+ .news-item h3 {
266
+ margin-top: 0;
267
+ }
268
+ .news-item a {
269
+ color: #2196f3;
270
+ text-decoration: none;
271
+ }
272
+ .news-item a:hover {
273
+ text-decoration: underline;
274
+ }
275
+ .initial-articles {
276
+ margin-bottom: 20px;
277
+ }
278
+ .initial-article {
279
+ background-color: #f0f8ff;
280
+ padding: 10px;
281
+ margin-bottom: 10px;
282
+ border-radius: 5px;
283
+ }
284
+ .top-articles {
285
+ margin-bottom: 20px;
286
+ }
287
+ .article-item {
288
+ background-color: #f0f8ff;
289
+ padding: 15px;
290
+ margin-bottom: 15px;
291
+ border-radius: 10px;
292
+ box-shadow: 0 1px 3px rgba(0,0,0,0.1);
293
+ }
294
+ .article-item h3 {
295
+ margin-top: 0;
296
+ }
297
+ .article-item a {
298
+ color: #2196f3;
299
+ text-decoration: none;
300
+ }
301
+ .article-item a:hover {
302
+ text-decoration: underline;
303
+ }
304
+ </style>
305
+ </head>
306
+ <body>
307
+ <div class="landing-page" id="landing-page">
308
+ <img src=" static/images/app_icon.png" alt="Finder Logo" class="logo">
309
+ <p class="im-text">I'm</p>
310
+ <h1>Finder</h1>
311
+ <p>Unlock the power of knowledge - Find Your Answers Here</p>
312
+ <button class="start-chat-btn" onclick="startChat()">Start Exploring</button>
313
+ <img src="static/images/AI-PNG-L.png" alt="AI Image Left" class="ai-image ai-image-left">
314
+ <img src="static/images/AI-PNG-R.png" alt="AI Image Right" class="ai-image ai-image-right">
315
+ </div>
316
+
317
+ <div class="chat-interface" id="chat-interface">
318
+ <button class="menu-button" onclick="toggleSidebar()">☰</button>
319
+ <div class="sidebar" id="sidebar">
320
+ <button class="new-chat" onclick="startNewChat()">New chat</button>
321
+ <div class="shortcuts">
322
+ <button class="shortcut" onclick="setRole('Python Teacher')">Python Teacher</button>
323
+ <button class="shortcut" onclick="setRole('Data Analyst')">Data Analyst</button>
324
+ <button class="shortcut" onclick="setRole('AI Expert')">AI Expert</button>
325
+ <button class="shortcut" onclick="setRole('Machine Learning Engineer')">ML Engineer</button>
326
+ <button class="shortcut" onclick="setRole('GenAI Specialist')">GenAI Specialist</button>
327
+ <button class="shortcut" onclick="setRole('Data Scientist')">Data Scientist</button>
328
+ <button class="shortcut" onclick="showNewsInterface()">AI News</button>
329
+ </div>
330
+ </div>
331
+ <div class="main-content" id="main-content">
332
+ <div class="context-info" id="context-info"></div>
333
+ <div class="chat-container" id="chat-container">
334
+ <!-- Chat messages will be dynamically inserted here -->
335
+ </div>
336
+ <div class="news-container" id="news-container">
337
+ <form class="news-form" id="news-form" onsubmit="fetchNews(event)">
338
+ <input type="text" id="news-query" placeholder="Enter energy market topic..." required>
339
+ <button type="submit">Search News</button>
340
+ </form>
341
+ <div id="market-summary" class="market-summary"></div>
342
+ <div id="news-results"></div>
343
+ </div>
344
+ <div class="loading" id="loading">Processing...</div>
345
+ <div class="input-area">
346
+ <input type="text" id="query" placeholder="Send a message..." />
347
+ <button class="send-button" onclick="sendMessage()">Send</button>
348
+ <input type="file" id="file-input" class="file-input" accept=".pdf,.txt,.docx,.xlsx,.csv" onchange="uploadFile()" />
349
+ <button class="file-upload-button" onclick="document.getElementById('file-input').click()">Upload File</button>
350
+ </div>
351
+ </div>
352
+ </div>
353
+ <div id="news-display" class="news-display" style="display: none;"></div>
354
+
355
+ <script src="https://cdnjs.cloudflare.com/ajax/libs/marked/2.0.3/marked.min.js"></script>
356
+ <script>
357
+ let currentRole = null;
358
+ let currentFileId = null;
359
+ let newsData = null;
360
+
361
+ function startChat() {
362
+ document.getElementById('landing-page').style.display = 'none';
363
+ document.getElementById('chat-interface').style.display = 'flex';
364
+ }
365
+
366
+ function toggleSidebar() {
367
+ const sidebar = document.getElementById('sidebar');
368
+ const mainContent = document.getElementById('main-content');
369
+ sidebar.classList.toggle('open');
370
+ mainContent.classList.toggle('sidebar-open');
371
+ }
372
+
373
+ function updateContextInfo() {
374
+ const contextInfo = document.getElementById('context-info');
375
+ let infoText = '';
376
+ if (currentRole) {
377
+ infoText += `Current Role: ${currentRole}`;
378
+ }
379
+ if (currentFileId) {
380
+ infoText += infoText ? ' | ' : '';
381
+ infoText += `File ID: ${currentFileId}`;
382
+ }
383
+ contextInfo.textContent = infoText;
384
+ contextInfo.style.display = infoText ? 'block' : 'none';
385
+ }
386
+
387
+ function setRole(role) {
388
+ currentRole = role;
389
+ updateContextInfo();
390
+ const chatContainer = document.getElementById('chat-container');
391
+ const roleMessage = document.createElement('div');
392
+ roleMessage.classList.add('message', 'assistant-message');
393
+ roleMessage.innerHTML = `Role set to: <strong>${role}</strong>. How can I assist you today?`;
394
+ chatContainer.appendChild(roleMessage);
395
+ chatContainer.scrollTop = chatContainer.scrollHeight;
396
+ }
397
+
398
+ function showNewsInterface() {
399
+ document.getElementById('chat-container').style.display = 'none';
400
+ document.getElementById('news-container').style.display = 'flex';
401
+ currentRole = 'AI News Analyst';
402
+ updateContextInfo();
403
+ }
404
+
405
+ function fetchNews(event) {
406
+ event.preventDefault();
407
+ const query = document.getElementById('news-query').value;
408
+ document.getElementById('loading').style.display = 'block';
409
+
410
+ fetch('/fetch_news', {
411
+ method: 'POST',
412
+ headers: {
413
+ 'Content-Type': 'application/json',
414
+ },
415
+ body: JSON.stringify({ query: query }),
416
+ })
417
+ .then(response => response.json())
418
+ .then(data => {
419
+ newsData = data; // Store the fetched news data
420
+ const marketSummary = document.getElementById('market-summary');
421
+ const newsResults = document.getElementById('news-results');
422
+
423
+ // Display market summary
424
+ marketSummary.innerHTML = `<h2>Market Summary</h2>${marked.parse(data.market_summary)}`;
425
+
426
+ // Display top 10 articles with summaries and links
427
+ newsResults.innerHTML = '<h2>Top News Articles</h2>';
428
+ data.top_articles.forEach(article => {
429
+ const articleElement = document.createElement('div');
430
+ articleElement.classList.add('article-item');
431
+ articleElement.innerHTML = `
432
+ <h3><a href="${article.url}" target="_blank">${article.title}</a></h3>
433
+ <p>${article.summary}</p>
434
+ `;
435
+ newsResults.appendChild(articleElement);
436
+ });
437
+
438
+ // Switch to chat interface after fetching news
439
+ document.getElementById('chat-container').style.display = 'block';
440
+ document.getElementById('news-container').style.display = 'none';
441
+
442
+ // Add a system message to indicate that news has been fetched
443
+ const chatContainer = document.getElementById('chat-container');
444
+ const systemMessage = document.createElement('div');
445
+ systemMessage.classList.add('message', 'assistant-message');
446
+ systemMessage.innerHTML = `News articles related to "${query}" have been fetched and analyzed. You can now ask questions about them.`;
447
+ chatContainer.appendChild(systemMessage);
448
+ chatContainer.scrollTop = chatContainer.scrollHeight;
449
+ })
450
+ .catch(err => {
451
+ console.error(err);
452
+ document.getElementById('news-results').innerHTML = 'Error fetching news.';
453
+ })
454
+ .finally(() => {
455
+ document.getElementById('loading').style.display = 'none';
456
+ });
457
+ }
458
+
459
+ function sendMessage() {
460
+ const queryInput = document.getElementById('query');
461
+ const messageText = queryInput.value.trim();
462
+ if (messageText === '') {
463
+ return;
464
+ }
465
+
466
+ const chatContainer = document.getElementById('chat-container');
467
+
468
+ const userMessage = document.createElement('div');
469
+ userMessage.classList.add('message', 'user-message');
470
+ userMessage.innerText = messageText;
471
+ chatContainer.appendChild(userMessage);
472
+
473
+ queryInput.value = '';
474
+
475
+ const assistantMessage = document.createElement('div');
476
+ assistantMessage.classList.add('message', 'assistant-message');
477
+ chatContainer.appendChild(assistantMessage);
478
+
479
+ chatContainer.scrollTop = chatContainer.scrollHeight;
480
+
481
+ document.getElementById('loading').style.display = 'block';
482
+
483
+ let requestBody = {
484
+ query: messageText,
485
+ role: currentRole,
486
+ file_id: currentFileId
487
+ };
488
+
489
+ if (currentRole === 'AI News Analyst' && newsData) {
490
+ requestBody.newsContext = newsData;
491
+ }
492
+
493
+ fetch('/query', {
494
+ method: 'POST',
495
+ headers: {
496
+ 'Content-Type': 'application/json',
497
+ },
498
+ body: JSON.stringify(requestBody),
499
+ })
500
+ .then(response => response.json())
501
+ .then(data => {
502
+ const formattedResponse = marked.parse(data.response);
503
+ assistantMessage.innerHTML = formattedResponse;
504
+ chatContainer.scrollTop = chatContainer.scrollHeight;
505
+ })
506
+ .catch(err => {
507
+ assistantMessage.innerText = 'Error: Unable to fetch response.';
508
+ console.error(err);
509
+ })
510
+ .finally(() => {
511
+ document.getElementById('loading').style.display = 'none';
512
+ });
513
+ }
514
+
515
+ function startNewChat() {
516
+ currentRole = null;
517
+ currentFileId = null;
518
+ updateContextInfo();
519
+ const chatContainer = document.getElementById('chat-container');
520
+ chatContainer.innerHTML = '';
521
+ }
522
+
523
+ function uploadFile() {
524
+ const fileInput = document.getElementById('file-input');
525
+ const file = fileInput.files[0];
526
+ if (file) {
527
+ const formData = new FormData();
528
+ formData.append('file', file);
529
+
530
+ const chatContainer = document.getElementById('chat-container');
531
+ const userMessage = document.createElement('div');
532
+ userMessage.classList.add('message', 'user-message');
533
+ userMessage.innerText = `Uploaded file: ${file.name}`;
534
+ chatContainer.appendChild(userMessage);
535
+
536
+ const assistantMessage = document.createElement('div');
537
+ assistantMessage.classList.add('message', 'assistant-message');
538
+ assistantMessage.innerHTML = 'Processing file...';
539
+ chatContainer.appendChild(assistantMessage);
540
+
541
+ chatContainer.scrollTop = chatContainer.scrollHeight;
542
+
543
+ document.getElementById('loading').style.display = 'block';
544
+
545
+ fetch('/upload', {
546
+ method: 'POST',
547
+ body: formData
548
+ })
549
+ .then(response => response.json())
550
+ .then(data => {
551
+ if (data.error) {
552
+ throw new Error(data.error);
553
+ }
554
+ currentFileId = data.file_id;
555
+ updateContextInfo();
556
+ const formattedAnalysis = marked.parse(data.analysis);
557
+ assistantMessage.innerHTML = formattedAnalysis;
558
+ chatContainer.scrollTop = chatContainer.scrollHeight;
559
+ })
560
+ .catch(err => {
561
+ assistantMessage.innerText = 'Error: Unable to process file.';
562
+ console.error(err);
563
+ })
564
+ .finally(() => {
565
+ document.getElementById('loading').style.display = 'none';
566
+ });
567
+ }
568
+ }
569
+ function startNewChat() {
570
+ currentRole = null;
571
+ currentFileId = null;
572
+ newsData = null;
573
+ updateContextInfo();
574
+ const chatContainer = document.getElementById('chat-container');
575
+ chatContainer.innerHTML = '';
576
+ document.getElementById('chat-container').style.display = 'block';
577
+ document.getElementById('news-container').style.display = 'none';
578
+ document.getElementById('news-query').value = '';
579
+ document.getElementById('market-summary').innerHTML = '';
580
+ document.getElementById('news-results').innerHTML = '';
581
+ }
582
+
583
+ // Add event listener for Enter key in the input field
584
+ document.getElementById('query').addEventListener('keypress', function(e) {
585
+ if (e.key === 'Enter') {
586
+ sendMessage();
587
+ }
588
+ });
589
+
590
+ // Initialize the UI
591
+ updateContextInfo();
592
+ </script>
593
+ </body>
594
+ </html>
uploads/Check_Survey_Name_11.csv ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Country,ES_ID,Surveys,ADM_Survey Name,ADM_Alternate Name,Dummy,Year
2
+ United States,1000000000838,US_Seismic_2D,US-Seismic-2D,,N,2005
3
+ United States,1000000000839,US_Seismic_2D,US-Seismic-2D,,N,2006
4
+ United States,1000000000840,US_Seismic_2D,US-Seismic-2D,,N,2007
5
+ United States,1000000000841,US_Seismic_2D,US-Seismic-2D,,N,2005
6
+ United States,1000000000842,US_Seismic_2D,US-Seismic-2D,,N,2009
7
+ United States,1000000000843,US_Seismic_2D,US-Seismic-2D,,N,2010
8
+ United States,1000000000844,US_Seismic_2D,US-Seismic-2D,,N,2011
9
+ United States,1000000000845,US_Seismic_2D,US-Seismic-2D,,N,2010
10
+ United States,1000000000846,US_Seismic_2D,US-Seismic-2D,,N,2010
11
+ United States,1000000000847,US_Seismic_2D,US-Seismic-2D,,N,2014
12
+ United States,1000000000848,US_Seismic_2D,US-Seismic-2D,,N,2015
13
+ United States,1000000000849,US_Seismic_2D,US-Seismic-2D,,N,2019
14
+ United States,1000000000850,US_Seismic_2D,US-Seismic-2D,,N,2020
uploads/SP_Global_offer_letter_Gautham_V_Nairy_.pdf ADDED
The diff for this file is too large to render. See raw diff