Charles Grandjean commited on
Commit
d81d6f6
·
1 Parent(s): 4548589

oui cetait vraiment necessaire d avoir un client openai incompatible avec les autres

Browse files
Files changed (1) hide show
  1. utils/llm_wrapper.py +6 -2
utils/llm_wrapper.py CHANGED
@@ -31,12 +31,16 @@ class NormalizedLLM:
31
  return str(content)
32
 
33
  def invoke(self, input: Any, config: Any = None, **kwargs) -> AIMessage:
34
- response = self.llm.invoke(input, config=config, **kwargs)
 
 
35
  response.content = self._normalize_content(response.content)
36
  return response
37
 
38
  async def ainvoke(self, input: Any, config: Any = None, **kwargs) -> AIMessage:
39
- response = await self.llm.ainvoke(input, config=config, **kwargs)
 
 
40
  response.content = self._normalize_content(response.content)
41
  return response
42
 
 
31
  return str(content)
32
 
33
  def invoke(self, input: Any, config: Any = None, **kwargs) -> AIMessage:
34
+ # Filter out unsupported parameters
35
+ filtered_kwargs = {k: v for k, v in kwargs.items() if k != 'extra_body'}
36
+ response = self.llm.invoke(input, config=config, **filtered_kwargs)
37
  response.content = self._normalize_content(response.content)
38
  return response
39
 
40
  async def ainvoke(self, input: Any, config: Any = None, **kwargs) -> AIMessage:
41
+ # Filter out unsupported parameters
42
+ filtered_kwargs = {k: v for k, v in kwargs.items() if k != 'extra_body'}
43
+ response = await self.llm.ainvoke(input, config=config, **filtered_kwargs)
44
  response.content = self._normalize_content(response.content)
45
  return response
46