model_type ????
from langchain_community.llms import CTransformers
llm = CTransformers( model='./mamba-2.8b-f32.gguf', model_type='?????????', config={'max_new_tokens': 256, 'repetition_penalty': 1.1})
print(llm.invoke('AI is going to'))
· Sign up or log in to comment