honeybansal23 commited on
Commit
41e862d
·
1 Parent(s): bcd6e4b
Files changed (4) hide show
  1. apis/reddit_apis.py +42 -0
  2. app.py +2 -51
  3. requirements.txt +0 -0
  4. venv/pyvenv.cfg +1 -1
apis/reddit_apis.py CHANGED
@@ -0,0 +1,42 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import HTTPException
2
+ from models.reddit_models import AnalysisRequest
3
+ from reddit.reddit_functions import getRedditData
4
+ from reddit.reddit_gemini import getKeywords
5
+ from reddit.reddit_pain_point_analysis import pain_point_analysis
6
+ from reddit.reddit_utils import services_names
7
+ from reddit.reddit_search_scrapper import getCompetitorAnalysisData
8
+ from fastapi import HTTPException
9
+
10
+ # main method for user end
11
+ async def analyzeData(index:int,inputData:AnalysisRequest):
12
+ try:
13
+ keywords = getKeywords(user_query=inputData.user_query)
14
+
15
+ result = await getRedditData(user_query=keywords['query'], search_keywords=keywords['top_3_combinations'])
16
+
17
+ getServices(
18
+ index=index,
19
+ analysis_list=inputData.analysis_names,
20
+ user_query=keywords['query'],
21
+ fileName=result['fileName'],
22
+ uniqueFileId=result['fileUniqueId']
23
+ )
24
+ except Exception as e:
25
+ print("Failed to run analyzeData ", e)
26
+ raise HTTPException(status_code=500, detail=str(f"Failed to run analyzeData : {e}"))
27
+
28
+ def getServices(index, analysis_list, user_query=None, fileName=None, uniqueFileId=None):
29
+ # Pain point + Competitor Analysis
30
+ if services_names[0] in analysis_list and services_names[1] in analysis_list:
31
+ result=pain_point_analysis(user_query=user_query,fileName=fileName,uniqueFileId=uniqueFileId)
32
+
33
+ competitor_result = getCompetitorAnalysisData(user_query=user_query,fileName=fileName)
34
+
35
+ # Pain point analysis only
36
+ elif services_names[0] in analysis_list:
37
+ result=pain_point_analysis(user_query=user_query,fileName=fileName,uniqueFileId=uniqueFileId)
38
+
39
+ # Competitor analysis only
40
+ elif services_names[1] in analysis_list:
41
+ competitor_result = getCompetitorAnalysisData(user_query=user_query,fileName=fileName)
42
+
app.py CHANGED
@@ -1,7 +1,7 @@
1
- from collections import deque
2
  import os
3
- from fastapi import APIRouter, FastAPI, HTTPException
4
  from fastapi.middleware.cors import CORSMiddleware
 
5
  from models.reddit_models import AnalysisRequest, RedditPostDataModel
6
  from reddit.reddit_functions import getRedditData
7
  from reddit.reddit_gemini import getKeywords
@@ -9,7 +9,6 @@ from reddit.load_env import api_key
9
  import google.generativeai as genai
10
  from datetime import datetime
11
  from reddit.reddit_pain_point_analysis import pain_point_analysis
12
- from reddit.reddit_utils import services_names
13
  from reddit.reddit_search_scrapper import getCompetitorAnalysisData
14
  from utils import time_execution
15
  import asyncio
@@ -202,55 +201,7 @@ def getCompetitorAnalysis(user_query: str, fileName: str,isSolo=True):
202
 
203
 
204
 
205
- # main method for user end
206
- async def analyzeData(index:int,inputData:AnalysisRequest):
207
- try:
208
- keywords = getKeywords(user_query=inputData.user_query)
209
- config.user_df.loc[index,'gemini_input'] = str({
210
- 'query': keywords['query'],
211
- 'top_3_combinations': keywords['top_3_combinations']
212
- })
213
- result = await getRedditData(user_query=keywords['query'], search_keywords=keywords['top_3_combinations'])
214
- config.user_df.loc[index,'file_with_sentiment'] = str({
215
- 'reddit_data': result['reddit_data'],
216
- 'sentiment_data':result['sentiment_data']
217
- })
218
- getServices(
219
- index=index,
220
- analysis_list=inputData.analysis_names,
221
- user_query=keywords['query'],
222
- fileName=result['fileName'],
223
- uniqueFileId=result['fileUniqueId']
224
- )
225
- except Exception as e:
226
- print("Failed to run analyzeData ", e)
227
- raise HTTPException(status_code=500, detail=str(f"Failed to run analyzeData : {e}"))
228
 
229
- def getServices(index, analysis_list, user_query=None, fileName=None, uniqueFileId=None):
230
- # Pain point + Competitor Analysis
231
- if services_names[0] in analysis_list and services_names[1] in analysis_list:
232
- result=pain_point_analysis(user_query=user_query,fileName=fileName,uniqueFileId=uniqueFileId)
233
- config.user_df.loc[index,'pain_point_analysis'] = str({
234
- 'e_time': result[2],
235
- })
236
- competitor_result = getCompetitorAnalysisData(user_query=user_query,fileName=fileName)
237
- config.user_df.loc[index,'competitor_analysis'] = str({
238
- "no_of_competitors": len(competitor_result['all_competitor_data']),
239
- 'e_time': competitor_result['e_time'],
240
- })
241
- # Pain point analysis only
242
- elif services_names[0] in analysis_list:
243
- result=pain_point_analysis(user_query=user_query,fileName=fileName,uniqueFileId=uniqueFileId)
244
- config.user_df.loc[index,'pain_point_analysis'] = str({
245
- 'e_time': result[2],
246
- })
247
- # Competitor analysis only
248
- elif services_names[1] in analysis_list:
249
- competitor_result = getCompetitorAnalysisData(user_query=user_query,fileName=fileName)
250
- config.user_df.loc[index,'competitor_analysis'] = str({
251
- "no_of_competitors": len(competitor_result['all_competitor_data']),
252
- 'e_time': competitor_result['e_time'],
253
- })
254
 
255
  # if __name__ == "__main__":
256
  # import uvicorn
 
 
1
  import os
2
+ from fastapi import FastAPI, HTTPException
3
  from fastapi.middleware.cors import CORSMiddleware
4
+ from apis.reddit_apis import analyzeData
5
  from models.reddit_models import AnalysisRequest, RedditPostDataModel
6
  from reddit.reddit_functions import getRedditData
7
  from reddit.reddit_gemini import getKeywords
 
9
  import google.generativeai as genai
10
  from datetime import datetime
11
  from reddit.reddit_pain_point_analysis import pain_point_analysis
 
12
  from reddit.reddit_search_scrapper import getCompetitorAnalysisData
13
  from utils import time_execution
14
  import asyncio
 
201
 
202
 
203
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
204
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
205
 
206
  # if __name__ == "__main__":
207
  # import uvicorn
requirements.txt CHANGED
Binary files a/requirements.txt and b/requirements.txt differ
 
venv/pyvenv.cfg CHANGED
@@ -2,4 +2,4 @@ home = C:\Users\HP\AppData\Local\Microsoft\WindowsApps\PythonSoftwareFoundation.
2
  include-system-site-packages = false
3
  version = 3.11.9
4
  executable = C:\Users\HP\AppData\Local\Microsoft\WindowsApps\PythonSoftwareFoundation.Python.3.11_qbz5n2kfra8p0\python.exe
5
- command = C:\Users\HP\AppData\Local\Microsoft\WindowsApps\PythonSoftwareFoundation.Python.3.11_qbz5n2kfra8p0\python.exe -m venv D:\work2\nextAnalytics\venv
 
2
  include-system-site-packages = false
3
  version = 3.11.9
4
  executable = C:\Users\HP\AppData\Local\Microsoft\WindowsApps\PythonSoftwareFoundation.Python.3.11_qbz5n2kfra8p0\python.exe
5
+ command = C:\Users\HP\AppData\Local\Microsoft\WindowsApps\PythonSoftwareFoundation.Python.3.11_qbz5n2kfra8p0\python.exe -m venv D:\work\nextAnalytics\venv