deneme / test.py
Tan Gezerman
Upload 11 files
0262d87 verified
#%%
from langchain_core.prompts import PromptTemplate
# ctransformers is no longer used
""" from langchain_community.llms import CTransformers
# Initialize the language model
llm = CTransformers(model='Model/llama-2-7b-chat.ggmlv3.q2_K.bin', # 2 bit quantized model
model_type='llama',
config={'max_new_tokens': 256, # max tokens in reply
'temperature': 0.01, } # randomness of the reply
)
"""
# Initialize the language model with LlamaCpp
#llm = LlamaCpp(model_path="Model/llama-2-7b-chat.Q4_K_M.gguf", # token streaming to terminal
#device="cuda",n_gpu_layers=-1,verbose = True, max_tokens = 4096, #offloads ALL layers to GPU, uses around 6 GB of Vram
#config={ # max tokens in reply
# 'temperature': 0.75} # randomness of the reply
#)
template = """
You are an ai that answers medical related queries in a helpful,clear and concise manner.
Avoid giving misinformation, and provide details about your results. Dont answer any questions outside medical domain.
Question: {query}
Answer:
"""
prompt_template = PromptTemplate(
input_variables=["query"],
template=template
)
print(
prompt_template.format(
query="how can I change a tyre on my car"
)
)
# %%