HenryY2023 commited on
Commit
8dbfa9b
·
verified ·
1 Parent(s): 5afbf19

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +13 -12
app.py CHANGED
@@ -50,6 +50,8 @@ class ZhipuAILLM(LLM):
50
  api_key: str
51
  model: str = "chatglm3-6b"
52
  temperature: float = 0.1
 
 
53
 
54
  def __init__(self, api_key: str, **kwargs: Any):
55
  # Pass api_key to parent class
@@ -57,14 +59,16 @@ class ZhipuAILLM(LLM):
57
  self.model = kwargs.get("model", self.model)
58
  self.temperature = kwargs.get("temperature", self.temperature)
59
  # Initialize client after setting attributes
60
- self._client = ZhipuAI(api_key=self.api_key)
61
 
62
  @property
63
  def _llm_type(self) -> str:
64
  return "zhipuai"
65
 
66
  def _call(self, prompt: str, stop: Optional[List[str]] = None, **kwargs: Any) -> str:
67
- response = self._client.chat.completions.create(
 
 
68
  model=self.model,
69
  messages=[{"role": "user", "content": prompt}],
70
  temperature=self.temperature
@@ -182,22 +186,19 @@ def initialize_system(pdf_path):
182
  return qa_chain
183
 
184
  # Initialize on startup
 
185
  try:
186
  qa_chain = initialize_system("Henry_Linkedin_Profile.pdf")
 
187
  except Exception as e:
188
  print(f"Error initializing system: {e}")
189
  # Create a dummy chain to allow the app to run
190
- from langchain.chains import LLMChain
191
- from langchain.llms import OpenAI # This is just a placeholder
 
 
192
 
193
- # Create a fallback chain that returns an error message
194
- qa_chain = LLMChain(
195
- llm=OpenAI(temperature=0),
196
- prompt=PromptTemplate(
197
- template="Error: {error}",
198
- input_variables=["error"]
199
- )
200
- )
201
 
202
  # Chat function
203
  def chat(message, history):
 
50
  api_key: str
51
  model: str = "chatglm3-6b"
52
  temperature: float = 0.1
53
+ # Declare client as a field to avoid Pydantic validation error
54
+ client: Optional[ZhipuAI] = None
55
 
56
  def __init__(self, api_key: str, **kwargs: Any):
57
  # Pass api_key to parent class
 
59
  self.model = kwargs.get("model", self.model)
60
  self.temperature = kwargs.get("temperature", self.temperature)
61
  # Initialize client after setting attributes
62
+ self.client = ZhipuAI(api_key=self.api_key)
63
 
64
  @property
65
  def _llm_type(self) -> str:
66
  return "zhipuai"
67
 
68
  def _call(self, prompt: str, stop: Optional[List[str]] = None, **kwargs: Any) -> str:
69
+ if self.client is None:
70
+ raise ValueError("ZhipuAI client not initialized")
71
+ response = self.client.chat.completions.create(
72
  model=self.model,
73
  messages=[{"role": "user", "content": prompt}],
74
  temperature=self.temperature
 
186
  return qa_chain
187
 
188
  # Initialize on startup
189
+ qa_chain = None
190
  try:
191
  qa_chain = initialize_system("Henry_Linkedin_Profile.pdf")
192
+ print("System initialized successfully")
193
  except Exception as e:
194
  print(f"Error initializing system: {e}")
195
  # Create a dummy chain to allow the app to run
196
+ # Instead of using OpenAI, we'll create a simple dummy chain
197
+ class DummyChain:
198
+ def __call__(self, inputs):
199
+ return {"result": f"System initialization failed: {str(e)}"}
200
 
201
+ qa_chain = DummyChain()
 
 
 
 
 
 
 
202
 
203
  # Chat function
204
  def chat(message, history):