Spaces:
Sleeping
Sleeping
| # -*- coding: utf-8 -*- | |
| """Untitled25.ipynb | |
| Automatically generated by Colaboratory. | |
| Original file is located at | |
| https://colab.research.google.com/drive/1i4BW958q6NUrUaJ2bAgNM8-N1pLSeP-Q | |
| """ | |
| #pip install openai streamlit | |
| import openai | |
| import streamlit as st | |
| # Initialize OpenAI API and server base | |
| openai.api_key = "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" | |
| openai.api_base = "http://zanino.millennium.berkeley.edu:8000/v1" | |
| # Set Streamlit layout | |
| st.set_page_config(layout="wide") | |
| # Function to get response from Gorilla Server | |
| def get_gorilla_response(prompt, model): | |
| try: | |
| completion = openai.ChatCompletion.create( | |
| model=model, | |
| messages=[{"role": "user", "content": prompt}] | |
| ) | |
| return completion.choices[0].message.content | |
| except Exception as e: | |
| print("An error occurred:", e) | |
| def main(): | |
| st.title("Gorilla LLM") | |
| input_prompt = st.text_area("Enter your prompt:") | |
| model_options = ('gorilla-7b-hf-v1', 'gorilla-mpt-7b-hf-v0') | |
| option = st.selectbox('Select a model:', model_options) | |
| if st.button("Create"): | |
| if len(input_prompt) > 0: | |
| result = get_gorilla_response(prompt=input_prompt, model=option) | |
| st.write(result) | |
| # ... | |
| if st.button("Gorilla Magic"): | |
| if len(input_prompt) > 0: | |
| # ... | |
| with col2: | |
| code_result = extract_code_from_output(result) | |
| if option == "gorilla-7b-hf-v1": | |
| st.subheader("Generated Output") | |
| st.code(code_result, language='python') | |
| elif option == "gorilla-mpt-7b-hf-v0": | |
| lines = code_result.split('\\n') | |
| for line in lines[:-1]: | |
| st.code(line, language='python') | |
| # ... | |
| run_generated_code(file_path) | |