vikramvasudevan commited on
Commit
ed3ba05
·
verified ·
1 Parent(s): 7ec751c

Upload folder using huggingface_hub

Browse files
Files changed (7) hide show
  1. .gitignore +11 -0
  2. .python-version +1 -0
  3. README.md +13 -12
  4. main.py +63 -0
  5. pyproject.toml +13 -0
  6. requirements.txt +232 -0
  7. uv.lock +0 -0
.gitignore ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Python-generated files
2
+ __pycache__/
3
+ *.py[oc]
4
+ build/
5
+ dist/
6
+ wheels/
7
+ *.egg-info
8
+ .env
9
+
10
+ # Virtual environments
11
+ .venv
.python-version ADDED
@@ -0,0 +1 @@
 
 
1
+ 3.12
README.md CHANGED
@@ -1,12 +1,13 @@
1
- ---
2
- title: Deep Research
3
- emoji: 🚀
4
- colorFrom: purple
5
- colorTo: yellow
6
- sdk: gradio
7
- sdk_version: 5.36.2
8
- app_file: app.py
9
- pinned: false
10
- ---
11
-
12
- Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
 
1
+ ---
2
+ title: deep-research
3
+ python_version: 3.12
4
+ emoji: 🚀
5
+ colorFrom: purple
6
+ colorTo: pink
7
+ sdk: gradio
8
+ sdk_version: 5.35.0
9
+ app_file: main.py
10
+ pinned: false
11
+ ---
12
+
13
+ Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
main.py ADDED
@@ -0,0 +1,63 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from json import load
2
+ from openai import OpenAI
3
+ from dotenv import load_dotenv
4
+ from pydantic import BaseModel
5
+ import gradio as gr
6
+ import json
7
+
8
+ class SearchOutput(BaseModel):
9
+ query: str
10
+ result: str
11
+
12
+ class ValidatorOutput(BaseModel):
13
+ searchOutput: SearchOutput
14
+ is_valid: bool
15
+
16
+ def search_agent(query: str) -> SearchOutput | None:
17
+ client = OpenAI()
18
+ response = client.chat.completions.parse(
19
+ model="gpt-4o-mini",
20
+ messages=[{"role": "user", "content": query}],
21
+ response_format=SearchOutput,
22
+ )
23
+ return response.choices[0].message.parsed
24
+
25
+ def validate_search_results(search_results: str | SearchOutput | None) -> ValidatorOutput | None:
26
+ client = OpenAI()
27
+ if search_results is None:
28
+ return None
29
+ response = client.chat.completions.parse(
30
+ model="gpt-4o-mini",
31
+ messages=[{"role": "user", "content": search_results.model_dump_json() if isinstance(search_results, SearchOutput) else search_results}],
32
+ response_format=ValidatorOutput,
33
+ )
34
+ return response.choices[0].message.parsed
35
+
36
+ def render_gradio_interface():
37
+ with gr.Blocks() as demo:
38
+ gr.Markdown("# Deep Research")
39
+ query = gr.Textbox(label="Query", value="What is the capital of France?")
40
+ searchButton = gr.Button("Search")
41
+ searchResults = gr.Textbox(label="Search Results")
42
+ validateButton = gr.Button("Validate")
43
+ validateResults = gr.Textbox(label="Validate Results")
44
+
45
+ query.submit(fn=search_agent, inputs=query, outputs=searchResults)
46
+ searchResults.change(fn=validate_search_results, inputs=searchResults, outputs=validateResults)
47
+ searchButton.click(fn=search_agent, inputs=query, outputs=searchResults)
48
+ validateButton.click(fn=validate_search_results, inputs=searchResults, outputs=validateResults)
49
+
50
+ demo.launch()
51
+
52
+ def main():
53
+ print("Hello from deep-research!")
54
+ load_dotenv(override=True)
55
+ render_gradio_interface()
56
+ # search_results = search_agent("What is the capital of France?")
57
+ # print(search_results)
58
+ # isValid = validate_search_results(search_results)
59
+ # print(isValid)
60
+
61
+
62
+ if __name__ == "__main__":
63
+ main()
pyproject.toml ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [project]
2
+ name = "deep-research"
3
+ version = "0.1.0"
4
+ description = "Add your description here"
5
+ readme = "README.md"
6
+ requires-python = ">=3.12"
7
+ dependencies = [
8
+ "dotenv>=0.9.9",
9
+ "gradio>=5.36.2",
10
+ "openai>=1.95.1",
11
+ "openai-agents>=0.1.0",
12
+ "pydantic>=2.11.7",
13
+ ]
requirements.txt ADDED
@@ -0,0 +1,232 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # This file was autogenerated by uv via the following command:
2
+ # uv pip compile pyproject.toml -o requirements.txt
3
+ aiofiles==24.1.0
4
+ # via gradio
5
+ annotated-types==0.7.0
6
+ # via pydantic
7
+ anyio==4.9.0
8
+ # via
9
+ # gradio
10
+ # httpx
11
+ # mcp
12
+ # openai
13
+ # sse-starlette
14
+ # starlette
15
+ attrs==25.3.0
16
+ # via
17
+ # jsonschema
18
+ # referencing
19
+ brotli==1.1.0
20
+ # via gradio
21
+ certifi==2025.7.9
22
+ # via
23
+ # httpcore
24
+ # httpx
25
+ # requests
26
+ charset-normalizer==3.4.2
27
+ # via requests
28
+ click==8.2.1
29
+ # via
30
+ # typer
31
+ # uvicorn
32
+ colorama==0.4.6
33
+ # via
34
+ # click
35
+ # griffe
36
+ # tqdm
37
+ distro==1.9.0
38
+ # via openai
39
+ dotenv==0.9.9
40
+ # via deep-research (pyproject.toml)
41
+ fastapi==0.116.1
42
+ # via gradio
43
+ ffmpy==0.6.0
44
+ # via gradio
45
+ filelock==3.18.0
46
+ # via huggingface-hub
47
+ fsspec==2025.5.1
48
+ # via
49
+ # gradio-client
50
+ # huggingface-hub
51
+ gradio==5.36.2
52
+ # via deep-research (pyproject.toml)
53
+ gradio-client==1.10.4
54
+ # via gradio
55
+ griffe==1.7.3
56
+ # via openai-agents
57
+ groovy==0.1.2
58
+ # via gradio
59
+ h11==0.16.0
60
+ # via
61
+ # httpcore
62
+ # uvicorn
63
+ httpcore==1.0.9
64
+ # via httpx
65
+ httpx==0.28.1
66
+ # via
67
+ # gradio
68
+ # gradio-client
69
+ # mcp
70
+ # openai
71
+ # safehttpx
72
+ httpx-sse==0.4.1
73
+ # via mcp
74
+ huggingface-hub==0.33.4
75
+ # via
76
+ # gradio
77
+ # gradio-client
78
+ idna==3.10
79
+ # via
80
+ # anyio
81
+ # httpx
82
+ # requests
83
+ jinja2==3.1.6
84
+ # via gradio
85
+ jiter==0.10.0
86
+ # via openai
87
+ jsonschema==4.24.0
88
+ # via mcp
89
+ jsonschema-specifications==2025.4.1
90
+ # via jsonschema
91
+ markdown-it-py==3.0.0
92
+ # via rich
93
+ markupsafe==3.0.2
94
+ # via
95
+ # gradio
96
+ # jinja2
97
+ mcp==1.11.0
98
+ # via openai-agents
99
+ mdurl==0.1.2
100
+ # via markdown-it-py
101
+ numpy==2.3.1
102
+ # via
103
+ # gradio
104
+ # pandas
105
+ openai==1.95.1
106
+ # via
107
+ # deep-research (pyproject.toml)
108
+ # openai-agents
109
+ openai-agents==0.1.0
110
+ # via deep-research (pyproject.toml)
111
+ orjson==3.10.18
112
+ # via gradio
113
+ packaging==25.0
114
+ # via
115
+ # gradio
116
+ # gradio-client
117
+ # huggingface-hub
118
+ pandas==2.3.1
119
+ # via gradio
120
+ pillow==11.3.0
121
+ # via gradio
122
+ pydantic==2.11.7
123
+ # via
124
+ # deep-research (pyproject.toml)
125
+ # fastapi
126
+ # gradio
127
+ # mcp
128
+ # openai
129
+ # openai-agents
130
+ # pydantic-settings
131
+ pydantic-core==2.33.2
132
+ # via pydantic
133
+ pydantic-settings==2.10.1
134
+ # via mcp
135
+ pydub==0.25.1
136
+ # via gradio
137
+ pygments==2.19.2
138
+ # via rich
139
+ python-dateutil==2.9.0.post0
140
+ # via pandas
141
+ python-dotenv==1.1.1
142
+ # via
143
+ # dotenv
144
+ # pydantic-settings
145
+ python-multipart==0.0.20
146
+ # via
147
+ # gradio
148
+ # mcp
149
+ pytz==2025.2
150
+ # via pandas
151
+ pywin32==310
152
+ # via mcp
153
+ pyyaml==6.0.2
154
+ # via
155
+ # gradio
156
+ # huggingface-hub
157
+ referencing==0.36.2
158
+ # via
159
+ # jsonschema
160
+ # jsonschema-specifications
161
+ requests==2.32.4
162
+ # via
163
+ # huggingface-hub
164
+ # openai-agents
165
+ rich==14.0.0
166
+ # via typer
167
+ rpds-py==0.26.0
168
+ # via
169
+ # jsonschema
170
+ # referencing
171
+ ruff==0.12.3
172
+ # via gradio
173
+ safehttpx==0.1.6
174
+ # via gradio
175
+ semantic-version==2.10.0
176
+ # via gradio
177
+ shellingham==1.5.4
178
+ # via typer
179
+ six==1.17.0
180
+ # via python-dateutil
181
+ sniffio==1.3.1
182
+ # via
183
+ # anyio
184
+ # openai
185
+ sse-starlette==2.4.1
186
+ # via mcp
187
+ starlette==0.47.1
188
+ # via
189
+ # fastapi
190
+ # gradio
191
+ # mcp
192
+ tomlkit==0.13.3
193
+ # via gradio
194
+ tqdm==4.67.1
195
+ # via
196
+ # huggingface-hub
197
+ # openai
198
+ typer==0.16.0
199
+ # via gradio
200
+ types-requests==2.32.4.20250611
201
+ # via openai-agents
202
+ typing-extensions==4.14.1
203
+ # via
204
+ # anyio
205
+ # fastapi
206
+ # gradio
207
+ # gradio-client
208
+ # huggingface-hub
209
+ # openai
210
+ # openai-agents
211
+ # pydantic
212
+ # pydantic-core
213
+ # referencing
214
+ # starlette
215
+ # typer
216
+ # typing-inspection
217
+ typing-inspection==0.4.1
218
+ # via
219
+ # pydantic
220
+ # pydantic-settings
221
+ tzdata==2025.2
222
+ # via pandas
223
+ urllib3==2.5.0
224
+ # via
225
+ # requests
226
+ # types-requests
227
+ uvicorn==0.35.0
228
+ # via
229
+ # gradio
230
+ # mcp
231
+ websockets==15.0.1
232
+ # via gradio-client
uv.lock ADDED
The diff for this file is too large to render. See raw diff