theRealNG commited on
Commit
11686ca
·
1 Parent(s): c1fc3f4

Refactored fly config

Browse files
Files changed (6) hide show
  1. .dockerignore +21 -0
  2. Dockerfile +1 -1
  3. README.md +6 -0
  4. endpoints.py +25 -3
  5. fly.toml +17 -0
  6. requirements.txt +1 -0
.dockerignore ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # flyctl launch added from .gitignore
2
+ **/__pycache__
3
+ **/.env
4
+ **/.DS_Store
5
+ **/venv
6
+ **/article_suggestions.json
7
+ **/evaluated_articles.json
8
+ **/final_articles.json
9
+ **/learning_profile.json
10
+ **/pitched_articles.json
11
+
12
+ # flyctl launch added from venv/lib/python3.11/site-packages/crewai/cli/templates/.gitignore
13
+ venv/lib/python3.11/site-packages/crewai/cli/templates/**/.env
14
+ venv/lib/python3.11/site-packages/crewai/cli/templates/**/__pycache__
15
+
16
+ # flyctl launch added from venv/lib/python3.11/site-packages/embedchain/deployment/modal.com/.gitignore
17
+ venv/lib/python3.11/site-packages/embedchain/deployment/modal.com/**/.env
18
+
19
+ # flyctl launch added from venv/lib/python3.11/site-packages/embedchain/deployment/render.com/.gitignore
20
+ venv/lib/python3.11/site-packages/embedchain/deployment/render.com/**/.env
21
+ fly.toml
Dockerfile CHANGED
@@ -20,4 +20,4 @@ RUN pip install --no-cache-dir --upgrade -r /code/requirements.txt
20
 
21
  COPY . ./
22
 
23
- CMD ["fastapi", "run", "endpoints.py", "--port", "80"]
 
20
 
21
  COPY . ./
22
 
23
+ CMD ["fastapi", "run", "endpoints.py", "--port", "8080"]
README.md CHANGED
@@ -17,7 +17,13 @@ python_version: 3.11.0
17
  ### Release process
18
  ##### API server
19
  * Install flyctl by following instructions mentioned here https://fly.io/docs/flyctl/install/
 
20
 
21
 
22
  ##### Streamlit UI
23
  * Push code to main branch on huggingspace https://huggingface.co/spaces/beautiful-code/ai_workflows
 
 
 
 
 
 
17
  ### Release process
18
  ##### API server
19
  * Install flyctl by following instructions mentioned here https://fly.io/docs/flyctl/install/
20
+ * Run `fly deploy` to deploy the app.
21
 
22
 
23
  ##### Streamlit UI
24
  * Push code to main branch on huggingspace https://huggingface.co/spaces/beautiful-code/ai_workflows
25
+
26
+
27
+ ### Running on local
28
+ * Streamlit app `streamlit run main.py`
29
+ * FastAPI app `uvicorn endpoints:app --port=8080`
endpoints.py CHANGED
@@ -3,7 +3,9 @@ import uvicorn
3
  from fastapi import FastAPI, Query
4
  from crew import til
5
  from fastapi.middleware.cors import CORSMiddleware
6
- from typing import List
 
 
7
  load_dotenv()
8
 
9
  description = """
@@ -37,16 +39,36 @@ app.add_middleware(
37
 
38
  @app.get("/til_feedback", tags=["til_feedback"])
39
  async def til_feedback_kickoff(content: List[str] = Query(...)) -> til.TilFeedbackResponse:
40
- separator = "\n"
 
41
  inputs = {"content": separator.join(content)}
42
  result = til.TilCrew().kickoff(inputs)
43
  return result
44
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
45
  @app.get("/healthcheck")
46
  async def read_root():
47
  return {"status": "ok"}
48
 
 
49
  if __name__ == "__main__":
50
- uvicorn.run(app, host="127.0.0.1", port=8000)
51
 
52
 
 
3
  from fastapi import FastAPI, Query
4
  from crew import til
5
  from fastapi.middleware.cors import CORSMiddleware
6
+ from langsmith import Client
7
+ from typing import List, Optional
8
+ from pydantic import UUID4, BaseModel
9
  load_dotenv()
10
 
11
  description = """
 
39
 
40
  @app.get("/til_feedback", tags=["til_feedback"])
41
  async def til_feedback_kickoff(content: List[str] = Query(...)) -> til.TilFeedbackResponse:
42
+ separator = "\n* "
43
+ content[0] = "* " + content[0]
44
  inputs = {"content": separator.join(content)}
45
  result = til.TilCrew().kickoff(inputs)
46
  return result
47
 
48
+ class Feedback(BaseModel):
49
+ helpful_score: Optional[float]
50
+ feedback_on: Optional[str]
51
+
52
+ @app.post("/til_feedback/{run_id}/feedback", tags=["til_feedback"])
53
+ async def capture_feedback(run_id: UUID4, feedback: Feedback) -> str:
54
+ print("Helful Score: ", feedback.helpful_score)
55
+ print("Feedback On: ", feedback.feedback_on)
56
+ client = Client()
57
+ client.create_feedback(
58
+ str(run_id),
59
+ key="helpful",
60
+ score=feedback.helpful_score,
61
+ source_info={"til": feedback.feedback_on},
62
+ type="api",
63
+ )
64
+ return "ok"
65
+
66
  @app.get("/healthcheck")
67
  async def read_root():
68
  return {"status": "ok"}
69
 
70
+
71
  if __name__ == "__main__":
72
+ uvicorn.run(app, host="127.0.0.1", port=8080)
73
 
74
 
fly.toml ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # fly.toml app configuration file generated for growthy-workflows on 2024-06-28T08:59:43+05:30
2
+ #
3
+ # See https://fly.io/docs/reference/configuration/ for information about how to use this file.
4
+ #
5
+
6
+ app = "growthy-workflows"
7
+ primary_region = "sin"
8
+
9
+ [build]
10
+
11
+ [http_service]
12
+ internal_port = 8080
13
+ force_https = true
14
+ auto_stop_machines = true
15
+ auto_start_machines = true
16
+ min_machines_running = 0
17
+ processes = ["app"]
requirements.txt CHANGED
@@ -12,3 +12,4 @@ streamlit-extras
12
  fastapi
13
  uvicorn
14
  fastapi_cors
 
 
12
  fastapi
13
  uvicorn
14
  fastapi_cors
15
+ langsmith