alaselababatunde commited on
Commit
15d6738
·
1 Parent(s): be70360
Files changed (2) hide show
  1. main.py +11 -31
  2. requirements.txt +1 -1
main.py CHANGED
@@ -1,5 +1,5 @@
1
  # ===============================================
2
- # Tech Disciples AI Backend — Main.py
3
  # ===============================================
4
 
5
  from fastapi import FastAPI, HTTPException, Header
@@ -7,12 +7,11 @@ from pydantic import BaseModel
7
  import torch
8
  import logging
9
 
10
- # LangChain 1.0 imports
11
- from langchain.prompts import PromptTemplate
12
- from langchain.memory import ConversationBufferMemory
13
  from langchain.chains import LLMChain
14
- from langchain.llms.base import LLM
15
- from typing import Optional, List
16
 
17
  # Transformers pipeline
18
  from transformers import pipeline
@@ -36,7 +35,7 @@ logger = logging.getLogger("TechDisciplesAI")
36
  app = FastAPI(title="Tech Disciples AI (LangChain Conversational)", version="3.0")
37
 
38
  # ===============================================
39
- # HUGGING FACE PIPELINE
40
  # ===============================================
41
  try:
42
  logger.info(f"🚀 Loading model: {MODEL_NAME}")
@@ -51,31 +50,12 @@ try:
51
  top_p=0.9
52
  )
53
 
54
- logger.info("✅ Hugging Face pipeline loaded successfully.")
 
55
 
56
  except Exception as e:
57
- logger.error(f"❌ Failed to load Hugging Face pipeline: {e}")
58
- hf_pipeline = None
59
-
60
- # ===============================================
61
- # HUGGING FACE LLM WRAPPER FOR LANGCHAIN
62
- # ===============================================
63
- class HFLLMWrapper(LLM):
64
- def __init__(self, pipeline):
65
- self.pipeline = pipeline
66
-
67
- @property
68
- def _llm_type(self) -> str:
69
- return "hf_pipeline"
70
-
71
- def _call(self, prompt: str, stop: Optional[List[str]] = None) -> str:
72
- output = self.pipeline(prompt)
73
- if isinstance(output, list) and len(output) > 0:
74
- return output[0].get("generated_text", str(output[0]))
75
- return str(output)
76
-
77
- # Initialize LLM wrapper
78
- llm = HFLLMWrapper(hf_pipeline) if hf_pipeline else None
79
 
80
  # ===============================================
81
  # MEMORY SYSTEM
@@ -115,7 +95,7 @@ chain = LLMChain(
115
  # ===============================================
116
  class QueryInput(BaseModel):
117
  query: str
118
- session_id: str | None = "default"
119
 
120
  # ===============================================
121
  # ROUTES
 
1
  # ===============================================
2
+ # Tech Disciples AI Backend — Updated for LangChain ≥1.0
3
  # ===============================================
4
 
5
  from fastapi import FastAPI, HTTPException, Header
 
7
  import torch
8
  import logging
9
 
10
+ # LangChain imports (modern >=1.0)
11
+ from langchain.chat_models import HuggingFacePipeline
 
12
  from langchain.chains import LLMChain
13
+ from langchain.prompts.prompt import PromptTemplate
14
+ from langchain.memory import ConversationBufferMemory
15
 
16
  # Transformers pipeline
17
  from transformers import pipeline
 
35
  app = FastAPI(title="Tech Disciples AI (LangChain Conversational)", version="3.0")
36
 
37
  # ===============================================
38
+ # LOAD MODEL USING PIPELINE + LANGCHAIN
39
  # ===============================================
40
  try:
41
  logger.info(f"🚀 Loading model: {MODEL_NAME}")
 
50
  top_p=0.9
51
  )
52
 
53
+ llm = HuggingFacePipeline(pipeline=hf_pipeline)
54
+ logger.info("✅ Model loaded successfully.")
55
 
56
  except Exception as e:
57
+ logger.error(f"❌ Failed to load model: {e}")
58
+ llm = None
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
59
 
60
  # ===============================================
61
  # MEMORY SYSTEM
 
95
  # ===============================================
96
  class QueryInput(BaseModel):
97
  query: str
98
+ session_id: str | None = "default" # optional: could be user/session-based
99
 
100
  # ===============================================
101
  # ROUTES
requirements.txt CHANGED
@@ -3,6 +3,6 @@ uvicorn[standard]
3
  torch
4
  transformers
5
  accelerate
6
- langchain==0.0.258
7
  huggingface-hub
8
  pydantic
 
3
  torch
4
  transformers
5
  accelerate
6
+ langchain>=1.0
7
  huggingface-hub
8
  pydantic