Spaces:
Runtime error
Runtime error
| import gradio as gr | |
| import urllib.request | |
| import requests | |
| import bs4 | |
| import lxml | |
| import os | |
| #import subprocess | |
| from huggingface_hub import InferenceClient,HfApi | |
| import random | |
| import json | |
| import datetime | |
| import dl | |
| #from query import tasks | |
| from prompts import ( | |
| FINDER, | |
| READ_FILE_CODE, | |
| COMPRESS_HISTORY_PROMPT, | |
| COMPRESS_DATA_PROMPT, | |
| COMPRESS_DATA_PROMPT_SMALL, | |
| LOG_PROMPT, | |
| LOG_RESPONSE, | |
| #PREFIX, | |
| TASK_PROMPT, | |
| ) | |
| api=HfApi() | |
| client = InferenceClient( | |
| "mistralai/Mixtral-8x7B-Instruct-v0.1" | |
| ) | |
| def parse_action(string: str): | |
| print("PARSING:") | |
| print(string) | |
| assert string.startswith("action:") | |
| idx = string.find("action_input=") | |
| print(idx) | |
| if idx == -1: | |
| print ("idx == -1") | |
| print (string[8:]) | |
| return string[8:], None | |
| print ("last return:") | |
| print (string[8 : idx - 1]) | |
| print (string[idx + 13 :].strip("'").strip('"')) | |
| return string[8 : idx - 1], string[idx + 13 :].strip("'").strip('"') | |
| VERBOSE = True | |
| MAX_HISTORY = 100 | |
| MAX_DATA = 20000 | |
| def format_prompt(message, history): | |
| prompt = "<s>" | |
| for user_prompt, bot_response in history: | |
| prompt += f"[INST] {user_prompt} [/INST]" | |
| prompt += f" {bot_response}</s> " | |
| prompt += f"[INST] {message} [/INST]" | |
| return prompt | |
| PREFIX="""You are a Python Code writing Agent. | |
| Improve this Code: | |
| """ | |
| def run_gpt(inp): | |
| #timestamp=datetime.datetime.now() | |
| print("inp:: "+inp) | |
| trig = True | |
| generate_kwargs = dict( | |
| temperature=0.9, | |
| max_new_tokens=4000, | |
| top_p=0.95, | |
| repetition_penalty=1.0, | |
| do_sample=True, | |
| seed=111111, | |
| ) | |
| for line in inp: | |
| if line.endswith("\n"): | |
| trig = True | |
| if trig: | |
| content = PREFIX +"\n"+inp | |
| #formatted_prompt = format_prompt(f"{system_prompt}, {prompt}", history) | |
| #formatted_prompt = format_prompt(f'{content}', **prompt_kwargs['history']) | |
| stream = client.text_generation(content, **generate_kwargs, stream=True, details=True, return_full_text=False) | |
| resp = "" | |
| for response in stream: | |
| resp += response.token.text | |
| yield resp | |
| def read_code(purpose,task,history,action_input,result,repo,space,file_name): | |
| print("WORKING ON CODE") | |
| seed=random.randint(1,1000000000) | |
| out=dl.show_file_content(repo,space,action_input) | |
| out = str(out) | |
| rl = len(out) | |
| print(f'rl:: {rl}') | |
| for i in str(out): | |
| if i == " " or i=="," or i=="\n" or i=="/" or i=="." or i=="<": | |
| c +=1 | |
| print (c) | |
| #tot=len(purpose) | |
| #print(tot) | |
| divr=int(c)/MAX_DATA | |
| divi=int(divr)+1 if divr != int(divr) else int(divr) | |
| chunk = int(int(c)/divr) | |
| print(f'chunk:: {chunk}') | |
| print(f'divr:: {divr}') | |
| print (f'divi:: {divi}') | |
| s=0 | |
| e=chunk | |
| print(f'e:: {e}') | |
| new_history="" | |
| task = f'Compile this data to fulfill the task: {task}, and complete the purpose: {purpose}\n' | |
| for z in range(divi): | |
| print(f's:e :: {s}:{e}') | |
| hist = out[s:e] | |
| resp = run_gpt( | |
| READ_FILE_CODE, | |
| stop_tokens=["observation:", "task:", "action:", "thought:"], | |
| max_tokens=2048, | |
| seed=seed, | |
| purpose=purpose, | |
| files=file_name, | |
| task=task, | |
| file_name=action_input, | |
| file_contents=hist, | |
| ).strip('\n') | |
| new_history = resp | |
| print (resp) | |
| out+=resp | |
| e=e+chunk-1000 | |
| s=s+chunk-1000 | |
| history += f'observation: the new code is: {resp}' | |
| result = resp | |
| return result | |
| def compress_history(purpose, task, history,file_name): | |
| resp = run_gpt( | |
| COMPRESS_HISTORY_PROMPT, | |
| stop_tokens=["observation:", "task:", "action:", "thought:"], | |
| max_tokens=1024, | |
| seed=random.randint(1,1000000000), | |
| purpose=purpose, | |
| files=file_name, | |
| task=task, | |
| history=history, | |
| ) | |
| history = "observation: {}\n".format(resp) | |
| return history | |
| examples =[ | |
| "What is the current weather in Florida?", | |
| "Find breaking news about Texas", | |
| "Find the best deals on flippers for scuba diving", | |
| "Teach me to fly a helicopter" | |
| ] | |
| def clear_fn(): | |
| return None,None | |
| rand_val=random.randint(1,99999999999) | |
| def check_rand(inp,val): | |
| if inp==True: | |
| return gr.Slider(label="Seed", minimum=1, maximum=99999999999, value=random.randint(1,99999999999)) | |
| else: | |
| return gr.Slider(label="Seed", minimum=1, maximum=99999999999, value=int(val)) | |
| with gr.Blocks() as app: | |
| gr.HTML("""<center><h1>Mixtral 8x7B RPG</h1><h3>HF Co-pilot (development)</h3>""") | |
| with gr.Group(): | |
| with gr.Row(): | |
| in_text=gr.Textbox(lines=100) | |
| out_text=gr.Textbox(lines=100) | |
| in_text.change(run_gpt,in_text,out_text) | |
| app.queue(default_concurrency_limit=20).launch(show_api=False) |