curiouscurrent commited on
Commit
0c8d798
·
verified ·
1 Parent(s): 43e7d13

Delete chains

Browse files
chains/backend_chain.py DELETED
@@ -1,17 +0,0 @@
1
- from langchain.prompts import PromptTemplate
2
- from langchain.chains import LLMChain
3
- from chains.model import get_local_model
4
-
5
- llm = get_local_model()
6
-
7
- prompt = PromptTemplate(
8
- input_variables=["tasks_json"],
9
- template=(
10
- "You are the Backend Agent. Based on these tasks, generate REST API endpoints and database schemas "
11
- "using FastAPI and MongoDB. Ensure proper routes and Pydantic models.\n\n"
12
- "Tasks:\n{tasks_json}\n\n"
13
- "Output only code."
14
- ),
15
- )
16
-
17
- backend_chain = LLMChain(llm=llm, prompt=prompt)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
chains/coordinator_chain.py DELETED
@@ -1,17 +0,0 @@
1
- from langchain.prompts import PromptTemplate
2
- from langchain.chains import LLMChain
3
- from chains.model import get_local_model
4
-
5
- llm = get_local_model()
6
-
7
- prompt = PromptTemplate(
8
- input_variables=["brief"],
9
- template=(
10
- "You are a Project Coordinator Agent. Break this project brief into structured technical tasks.\n"
11
- "Assign each task clearly to Frontend or Backend.\n\n"
12
- "Brief:\n{brief}\n\n"
13
- "Output in JSON list format with keys: task, description, assigned_to."
14
- ),
15
- )
16
-
17
- coordinator_chain = LLMChain(llm=llm, prompt=prompt)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
chains/frontend_chain.py DELETED
@@ -1,17 +0,0 @@
1
- from langchain.prompts import PromptTemplate
2
- from langchain.chains import LLMChain
3
- from chains.model import get_local_model
4
-
5
- llm = get_local_model()
6
-
7
- prompt = PromptTemplate(
8
- input_variables=["tasks_json"],
9
- template=(
10
- "You are the Frontend Agent. Based on these tasks, write React components with TailwindCSS.\n"
11
- "Ensure the UI is clean, responsive, and functional.\n\n"
12
- "Tasks:\n{tasks_json}\n\n"
13
- "Output only valid React code."
14
- ),
15
- )
16
-
17
- frontend_chain = LLMChain(llm=llm, prompt=prompt)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
chains/model.py DELETED
@@ -1,17 +0,0 @@
1
- from langchain_community.chat_models import ChatHuggingFace
2
- from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
3
-
4
- def get_local_model():
5
- model_name = "mistralai/Mixtral-8x7B-Instruct-v0.1" # or "HuggingFaceH4/zephyr-7b-beta"
6
-
7
- pipe = pipeline(
8
- "text-generation",
9
- model=model_name,
10
- tokenizer=model_name,
11
- max_new_tokens=512,
12
- temperature=0.3,
13
- top_p=0.95
14
- )
15
-
16
- llm = ChatHuggingFace(pipeline=pipe)
17
- return llm
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
chains/review_chain.py DELETED
@@ -1,16 +0,0 @@
1
- from langchain.prompts import PromptTemplate
2
- from langchain.chains import LLMChain
3
- from chains.model import get_local_model
4
-
5
- llm = get_local_model()
6
-
7
- prompt = PromptTemplate(
8
- input_variables=["tasks", "frontend", "backend"],
9
- template=(
10
- "You are the Review Agent. Check consistency between tasks, frontend, and backend outputs.\n"
11
- "Point out mismatches or missing features, and summarize overall quality.\n\n"
12
- "Tasks:\n{tasks}\n\nFrontend:\n{frontend}\n\nBackend:\n{backend}\n"
13
- ),
14
- )
15
-
16
- review_chain = LLMChain(llm=llm, prompt=prompt)