akash418 commited on
Commit
6972c11
·
1 Parent(s): c8e9460

first commit

Browse files
Files changed (3) hide show
  1. app.py +115 -0
  2. screenshot.py +56 -0
  3. spaces_info.py +76 -0
app.py ADDED
@@ -0,0 +1,115 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import requests
3
+ import json
4
+ import os
5
+ from screenshot import (
6
+ before_prompt,
7
+ prompt_to_generation,
8
+ after_generation,
9
+ js_save,
10
+ js_load_script,
11
+ )
12
+ from spaces_info import description, examples, initial_prompt_value
13
+
14
+ #API_URL = os.getenv("API_URL")
15
+ #HF_API_TOKEN = os.getenv("HF_API_TOKEN")
16
+
17
+ API_URL = "https://api-inference.huggingface.co/models/bigscience/bloom-560m"
18
+ HF_API_TOKEN = "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"
19
+
20
+ def query(payload):
21
+ print(payload)
22
+ response = requests.request("POST", API_URL, json=payload, headers={"Authorization": f"Bearer {HF_API_TOKEN}"})
23
+ print(response)
24
+ return json.loads(response.content.decode("utf-8"))
25
+
26
+
27
+ def inference(input_sentence, max_length, sample_or_greedy, seed=42):
28
+ if sample_or_greedy == "Sample":
29
+ parameters = {
30
+ "max_new_tokens": max_length,
31
+ "top_p": 0.9,
32
+ "do_sample": True,
33
+ "seed": seed,
34
+ "early_stopping": False,
35
+ "length_penalty": 0.0,
36
+ "eos_token_id": None,
37
+ }
38
+ else:
39
+ parameters = {
40
+ "max_new_tokens": max_length,
41
+ "do_sample": False,
42
+ "seed": seed,
43
+ "early_stopping": False,
44
+ "length_penalty": 0.0,
45
+ "eos_token_id": None,
46
+ }
47
+
48
+ payload = {"inputs": input_sentence, "parameters": parameters,"options" : {"use_cache": False} }
49
+
50
+ data = query(payload)
51
+
52
+ if "error" in data:
53
+ return (None, None, f"<span style='color:red'>ERROR: {data['error']} </span>")
54
+
55
+ generation = data[0]["generated_text"].split(input_sentence, 1)[1]
56
+ return (
57
+ before_prompt
58
+ + input_sentence
59
+ + prompt_to_generation
60
+ + generation
61
+ + after_generation,
62
+ data[0]["generated_text"],
63
+ "",
64
+ )
65
+
66
+
67
+ if __name__ == "__main__":
68
+ demo = gr.Blocks()
69
+ with demo:
70
+ with gr.Row():
71
+ gr.Markdown(value=description)
72
+ with gr.Row():
73
+ with gr.Column():
74
+ text = gr.Textbox(
75
+ label="Input",
76
+ value=" ", # should be set to " " when plugged into a real API
77
+ )
78
+ tokens = gr.Slider(1, 64, value=32, step=1, label="Tokens to generate")
79
+ sampling = gr.Radio(
80
+ ["Sample", "Greedy"], label="Sample or greedy", value="Sample"
81
+ )
82
+ sampling2 = gr.Radio(
83
+ ["Sample 1", "Sample 2", "Sample 3", "Sample 4", "Sample 5"],
84
+ value="Sample 1",
85
+ label="Sample other generations (only work in 'Sample' mode)",
86
+ type="index",
87
+ )
88
+
89
+ with gr.Row():
90
+ submit = gr.Button("Submit")
91
+ load_image = gr.Button("Generate Image")
92
+ with gr.Column():
93
+ text_error = gr.Markdown(label="Log information")
94
+ text_out = gr.Textbox(label="Output")
95
+ display_out = gr.HTML(label="Image")
96
+ display_out.set_event_trigger(
97
+ "load",
98
+ fn=None,
99
+ inputs=None,
100
+ outputs=None,
101
+ no_target=True,
102
+ js=js_load_script,
103
+ )
104
+ with gr.Row():
105
+ gr.Examples(examples=examples, inputs=[text, tokens, sampling, sampling2])
106
+
107
+ submit.click(
108
+ inference,
109
+ inputs=[text, tokens, sampling, sampling2],
110
+ outputs=[display_out, text_out, text_error],
111
+ )
112
+
113
+ load_image.click(fn=None, inputs=None, outputs=None, _js=js_save)
114
+
115
+ demo.launch()
screenshot.py ADDED
@@ -0,0 +1,56 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ## HTML and JS code to give Gradio HTML
2
+ before_prompt = """
3
+ <div id = "img_placeholder">
4
+ </div>
5
+ <div class="relative" id="capture" align="justify" style="display:none;">
6
+ <div class="absolute font-semibold" style="left:7%; right:7%; bottom:32%; top:7%; font-size: 8rem; line-height: 1; padding: 1rem; font-family:-apple-system, BlinkMacSystemFont, 'Arial', sans-serif;" id="text_box">
7
+ <p class="text" style="color:white; white-space:pre-wrap;" dir="auto" id = "prompt">"""
8
+ prompt_to_generation = """</p>
9
+ <p class="text" style="color:#FE57A0; white-space:pre-wrap;" dir="auto" id="generation">"""
10
+ after_generation = """</p>
11
+ </div>
12
+ <img src="https://huggingface.co/spaces/huggingface/bloom_demo/raw/main/bg.jpg" class="w-full" />
13
+ </div>
14
+ """
15
+
16
+ js_save = """() => {
17
+ /*might need to add .root to launch locally */
18
+ var gradioapp = document.body.getElementsByTagName('gradio-app')[0];
19
+
20
+ /* Save image */
21
+ capture = gradioapp.querySelector('#capture')
22
+ img_placeholder = gradioapp.querySelector('#img_placeholder')
23
+ html2canvas(capture, {
24
+ useCORS: true,
25
+ onclone: function (clonedDoc) {
26
+ clonedDoc.querySelector('#capture').style.display = 'block';
27
+
28
+ /*Fits text to box*/
29
+ var text_box = clonedDoc.querySelector('#text_box');
30
+ var prompt = clonedDoc.querySelector('#prompt');
31
+ var generation = clonedDoc.querySelector('#generation');
32
+ console.log(text_box, generation, prompt)
33
+ cur_font_size = getComputedStyle(text_box).getPropertyValue("font-size")
34
+ while( (text_box.clientHeight < text_box.scrollHeight || text_box.clientWidth < text_box.scrollWidth) & parseInt(cur_font_size) > 10) {
35
+ console.log(cur_font_size, text_box.clientHeight, text_box.scrollHeight, text_box.clientWidth, text_box.scrollWidth)
36
+ cur_font_size = 0.98 * parseInt(cur_font_size) + "px"
37
+ cur_line_height = 1.1 * parseInt(cur_font_size) + "px"
38
+ text_box.style.fontSize = cur_font_size
39
+ prompt.style.fontSize = cur_font_size
40
+ generation.style.fontSize = cur_font_size
41
+ text_box.style.lineHeight = cur_line_height
42
+ prompt.style.lineHeight = cur_line_height
43
+ generation.style.lineHeight = cur_line_height
44
+ }
45
+ }
46
+ }).then((canvas)=>{
47
+ img_placeholder.prepend(canvas);
48
+ })
49
+ }"""
50
+
51
+
52
+ js_load_script="""() => {
53
+ var script = document.createElement('script');
54
+ script.src = "https://cdnjs.cloudflare.com/ajax/libs/html2canvas/1.4.1/html2canvas.min.js";
55
+ document.head.appendChild(script);
56
+ }"""
spaces_info.py ADDED
@@ -0,0 +1,76 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ description = """Gradio Demo for BLOOM. To use it, simply add your text, or click one of the examples to load them.
2
+ Tips:
3
+ - Do NOT talk to BLOOM as an entity, it's not a chatbot but a webpage/blog/article completion model.
4
+ - For the best results: MIMIC a few sentences of a webpage similar to the content you want to generate.
5
+ Start a paragraph as if YOU were writing a blog, webpage, math post, coding article and BLOOM will generate a coherent follow-up. Longer prompts usually give more interesting results.
6
+ - Content: Please see our [content disclaimer](https://hf.co/spaces/bigscience/bloom-book) before using the model, as it may sometimes behave in unexpected ways.
7
+
8
+ Options:
9
+ - sampling: imaginative completions (may be not super accurate e.g. math/history)
10
+ - greedy: accurate completions (may be more boring or have repetitions)
11
+ """
12
+
13
+ wip_description = """JAX / Flax Gradio Demo for BLOOM. The 176B BLOOM model running on a TPU v3-256 pod, with 2D model parallelism and custom mesh axes.
14
+ Note:
15
+ 1. For this WIP demo, only **sampling** is supported.
16
+ 2. Rendering of the screenshot is currently not optimised. To experience the true speed of JAX / Flax, tick 'just output raw text'.
17
+ """
18
+
19
+ examples = [
20
+ [
21
+ 'A "whatpu" is a small, furry animal native to Tanzania. An example of a sentence that uses the word whatpu is: We were traveling in Africa and we saw these very cute whatpus. To do a "farduddle" means to jump up and down really fast. An example of a sentence that uses the word farduddle is:',
22
+ 32,
23
+ "Sample",
24
+ "Sample 1",
25
+ ],
26
+ [
27
+ "A poem about the beauty of science by Alfred Edgar Brittle\nTitle: The Magic Craft\nIn the old times",
28
+ 50,
29
+ "Sample",
30
+ "Sample 1",
31
+ ],
32
+ ["استخراج العدد العاملي في لغة بايثون:", 30, "Greedy", "Sample 1"],
33
+ ["Pour déguster un ortolan, il faut tout d'abord", 32, "Sample", "Sample 1"],
34
+ [
35
+ "Traduce español de España a español de Argentina\nEl coche es rojo - el auto es rojo\nEl ordenador es nuevo - la computadora es nueva\nel boligrafo es negro -",
36
+ 16,
37
+ "Sample",
38
+ "Sample 1",
39
+ ],
40
+ [
41
+ "Estos ejemplos quitan vocales de las palabras\nEjemplos:\nhola - hl\nmanzana - mnzn\npapas - pps\nalacran - lcrn\npapa -",
42
+ 16,
43
+ "Sample",
44
+ "Sample 1",
45
+ ],
46
+ [
47
+ "Question: If I put cheese into the fridge, will it melt?\nAnswer:",
48
+ 32,
49
+ "Sample",
50
+ "Sample 1",
51
+ ],
52
+ ["Math exercise - answers:\n34+10=44\n54+20=", 16, "Greedy", "Sample 1"],
53
+ [
54
+ "Question: Where does the Greek Goddess Persephone spend half of the year when she is not with her mother?\nAnswer:",
55
+ 24,
56
+ "Greedy",
57
+ "Sample 1",
58
+ ],
59
+ [
60
+ "spelling test answers.\nWhat are the letters in « language »?\nAnswer: l-a-n-g-u-a-g-e\nWhat are the letters in « Romanian »?\nAnswer:",
61
+ 24,
62
+ "Greedy",
63
+ "Sample 1",
64
+ ],
65
+ ]
66
+
67
+ initial_prompt_value = """استخراج العدد العاملي في لغة بايثون :
68
+ def factorial(n):
69
+ if n == 0:
70
+ return 1
71
+ else:
72
+ result = 1
73
+ for i in range(1, n + 1) :
74
+ result *= i
75
+ return result
76
+ print(factorial(5))"""