import streamlit as st import os import litellm from litellm import completion # set env variable os.environ["TOGETHERAI_API_KEY"] = "f722a9f6e3afd6b9999e6aee02aeac9e751ea3a67b124c3667ab50c85c7fa99e" litellm.register_prompt_template( model="teknium/OpenHermes-2p5-Mistral-7B", roles={"system":"<|im_start|>system", "assistant":"<|im_start|>assistant", "user":"<|im_start|>user"}, # tell LiteLLM how you want to map the openai messages to this model pre_message_sep= "\n", post_message_sep= "\n" ) def generate_response(user_input): messages=[{"role":"user", "content": user_input}] response = completion(model="together_ai/teknium/OpenHermes-2p5-Mistral-7B", messages=messages) return response def main(): st.title("Poem Generator") user_input = st.text_input("Enter a topic for your poem:") if st.button("Generate Poem"): response = generate_response(user_input) st.write("Here is your poem:") st.write(response) if __name__ == "__main__": main()