File size: 1,552 Bytes
ced7a64
 
 
 
 
 
f0cf3d0
ced7a64
4197f84
 
 
ced7a64
4197f84
ced7a64
5707583
890b897
 
 
 
 
 
 
 
 
 
ced7a64
 
4197f84
 
 
14dd6d7
4197f84
14dd6d7
 
4197f84
 
ced7a64
4197f84
ced7a64
 
 
890b897
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
import gradio as gr
from huggingface_hub import InferenceClient

"""
For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
"""
client = InferenceClient("gargabhi/shortstories20M")

description = """
# Generate short stories using custom verb or noun or adjective
"""

prompt = 'Write a story. In the story, try to use the verb "fight", the noun "king" and the adjective "brave". Possible story:'

def generate_text(input_prompt="", max_len=200, top_k=10, temp=0.5, top_p=0.95):
    print('inputs: ')
    print('prompt:', prompt)
    print('max_len:', max_len)
    print('top-k:', top_k)
    print('temp:', temp)
    print('top_p:', top_p)
    response = client.text_generation(input_prompt, do_sample=True, max_new_tokens=max_len, temperature=temp, top_k=top_k)
    print('response:')
    print(response)
    return response 


inputs = [
    gr.Textbox(prompt, label="Prompt text"),
    gr.Slider(minimum=50, maximum=250, step=50, label="max-lenth generation", value=200),
    gr.Slider(minimum=0, maximum=20, step=1, label="top-k", value=10),
    gr.Slider(minimum=0.0, maximum=4.0, step=0.1, label="temperature", value=0.5),
    gr.Slider(0.0, 1.0, label="top-p", value=0.95),
    #gr.Textbox(label="top-k", value=10,),
]
outputs = [gr.Textbox(label="Generated Text")]

demo = gr.Interface(fn=generate_text, inputs=inputs, outputs=outputs, allow_flagging=False, description=description)


if __name__ == "__main__":
    demo.launch(debug=True)