File size: 15,330 Bytes
d028a9b
 
 
 
 
 
 
 
 
 
c379520
 
 
 
 
d028a9b
 
 
c379520
d028a9b
 
 
c379520
22108e0
d028a9b
 
 
c379520
d028a9b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
c379520
 
 
d028a9b
5963935
f159474
 
3bd97c2
d028a9b
 
9484ce7
f159474
 
 
d028a9b
 
9484ce7
 
703ce39
d028a9b
cebdace
d028a9b
cebdace
e8463e1
 
 
 
cebdace
 
 
d028a9b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
c379520
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5963935
 
 
 
21c05be
66ba57f
c379520
 
 
 
 
 
cff0d7a
66ba57f
cff0d7a
66ba57f
cff0d7a
22108e0
d56be10
c379520
d56be10
d028a9b
 
 
 
 
 
 
 
22108e0
c379520
c67c3ba
 
bbe7e2d
4ae55cf
d56be10
dda1cd6
 
c67c3ba
22108e0
c379520
 
 
 
 
 
22108e0
 
 
d028a9b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
eefa63e
 
d028a9b
9484ce7
c379520
5963935
c379520
d028a9b
0fae765
3337e70
 
 
d028a9b
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
import gradio as gr
import os
import sys
import json 
import requests
import glob
from PIL import Image
from io import BytesIO
import random
import numpy as np

from thinkgpt.llm import ThinkGPT
# from transformers import AutoModelForCausalLM, AutoTokenizer


MODEL = "gpt-3.5-turbo"
API_URL = os.getenv("API_URL")
DISABLED = os.getenv("DISABLED") == 'True'

NUM_THREADS = int(16)
TOTAL_K = 5
SAMPLE_IDX = []
RANDOM_POSITION = [(145 + 200 * i + 400 * (i//2), j * 110 + 900) for i in range(4) for j in range(4) ]
CURRENT_POSITION = []

print (NUM_THREADS)


def exception_handler(exception_type, exception, traceback):
    print("%s: %s" % (exception_type.__name__, exception))
sys.excepthook = exception_handler
sys.tracebacklimit = 0

def show_image_from_link(image_link):
    response = requests.get(image_link)
    img = Image.open(BytesIO(response.content))
    return img

#https://github.com/gradio-app/gradio/issues/3531#issuecomment-1484029099
def parse_codeblock(text):
    lines = text.split("\n")
    for i, line in enumerate(lines):
        if "```" in line:
            if line != "```":
                lines[i] = f'<pre><code class="{lines[i][3:]}">'
            else:
                lines[i] = '</code></pre>'
        else:
            if i > 0:
                lines[i] = "<br/>" + line.replace("<", "&lt;").replace(">", "&gt;")
    return "".join(lines)

def reset_textbox():
    return gr.update(value='', interactive=False), gr.update(interactive=False)

def random_sample():
    global SAMPLE_IDX
    # SAMPLE_IDX = get_random_sample(range(25), TOTAL_K)
    SAMPLE_IDX = list(np.random.choice(range(25), TOTAL_K, replace = False))
    # SAMPLE_IDX = random.choices(range(25), k=TOTAL_K)
    image = np.array(Image.open(os.path.join(os.path.dirname(__file__), "data/Court.jpeg")))
    random_characters = [Image.open(os.path.join(os.path.dirname(__file__), f'data/characters_{idx:02}.png')).resize((64, 64)) for idx in SAMPLE_IDX]
    global CURRENT_POSITION
    # CURRENT_POSITION = get_random_sample(RANDOM_POSITION, TOTAL_K)
    CURRENT_POSITION = [ RANDOM_POSITION[k] for k in list(np.random.choice(range(len(RANDOM_POSITION)), TOTAL_K, replace = False))]
    # CURRENT_POSITION = random.choices(range(len(RANDOM_POSITION)), k = TOTAL_K)
    new_image = Image.fromarray(image).convert('RGBA')
    for k in range(TOTAL_K):
        new_image.paste(random_characters[k], CURRENT_POSITION[k], random_characters[k])
    print(CURRENT_POSITION)
    return np.array(new_image)

def reset_sample(*agents):
    image = np.array(Image.open(os.path.join(os.path.dirname(__file__), "data/Court.jpeg")))
    new_agents = []
    global CURRENT_POSITION
    global SAMPLE_IDX
    CURRENT_POSITION = []
    SAMPLE_IDX = []
    for k in range(TOTAL_K):
        new_agents.append('')
    return image, *[gr.Textbox.update(value=answer) for answer in new_agents]


title = """<h1 align="center">Courtroom with AI juries</h1>"""
if DISABLED:
    title = """<h1 align="center" style="color:red">This app has reached OpenAI's usage limit. We are currently requesting an increase in our quota. Please check back in a few days.</h1>"""
description = """Language models can be conditioned to act like dialogue agents through a conversational prompt that typically takes the form:
```
User: <utterance>
Assistant: <utterance>
User: <utterance>
Assistant: <utterance>
...
```
In this app, you can explore the outputs of a gpt-3.5 LLM.
"""


memorize_list = ['๊ตญ๋ฏผ์ฐธ์—ฌ์žฌํŒ์— ์ฐธ์—ฌํ•œ ์‚ฌ๋žŒ์€ ๋ช‡ ๊ฐœ์›”์˜ ์ง•์—ญ์„ ์„ ๊ณ ํ•˜๋Š”๊ฒŒ ์ข‹์€์ง€ ๋งํ•ด์•ผ ํ•œ๋‹ค.',
 "'๋„ˆ๋Š” ๋ช‡์‚ด์ด์•ผ?' ๋ผ๊ณ  '์ฒ ์ˆ˜'์—๊ฒŒ ๋ฌผ์–ด๋ดค์„ ๋•Œ, '์ฒ ์ˆ˜'๋Š” '๋‚˜์˜ ๋‚˜์ด๋Š” 23์‚ด์ด์•ผ'๋ผ๊ณ  ๋Œ€๋‹ตํ•จ",
 "'๋„ˆ๋Š” ๋‚จ์ž์•ผ?' ๋ผ๊ณ  '์ฒ ์ˆ˜'์—๊ฒŒ ๋ฌผ์–ด๋ดค์„ ๋•Œ, '์ฒ ์ˆ˜'๋Š” '์‘ ๋‚˜์˜ ์„ฑ๋ณ„์€ ๋‚จ์ž์•ผ'๋ผ๊ณ  ๋Œ€๋‹ตํ•จ",
 "'์›”์šœ๋‚  ๋ฉด์ ‘์ž„ ์›”์šœ๋‚  ๋ฉด์ ‘์ž„' ๋ผ๊ณ  '์ฒ ์ˆ˜'์—๊ฒŒ ๋ฌผ์–ด๋ดค์„ ๋•Œ, '์ฒ ์ˆ˜'๋Š” ' ๋ฉด์ ‘์€ ์–ธ์ œ'๋ผ๊ณ  ๋Œ€๋‹ตํ•จ",
 "'์˜ค ๋‚˜๋„ ํ†ตํ–‡๋„ค' ๋ผ๊ณ  '์ฒ ์ˆ˜'์—๊ฒŒ ๋ฌผ์–ด๋ดค์„ ๋•Œ, '์ฒ ์ˆ˜'๋Š” ' ํ—๊ทธ์ผ€ ๋นจ๋ฆฌ?๋Œ€๋ฐ•์ด๋„ค์ž๊ธฐ์†Œ๊ฐœ ๊ฐ™์€๊ฑฐ ์œ ํŠœ๋ธŒ์— ์ฐพ์•„๋ฐ”๊ดœ์ฐฎ์€๊ฑฐ ๋งŽ์„๊ฑธ'๋ผ๊ณ  ๋Œ€๋‹ตํ•จ",
 "'์ด๋ ‡๊ฒŒ ํ•ด๋„ ์•ผ๊ทผํ• ๊ฑฐ ๊ฐ™์•„.....ใ…Ž ์ด๋ ‡๊ฒŒ ํ•ด๋„ ์•ผ๊ทผํ• ๊ฑฐ ๊ฐ™์•„.....ใ…Ž' ๋ผ๊ณ  '์ฒ ์ˆ˜'์—๊ฒŒ ๋ฌผ์–ด๋ดค์„ ๋•Œ, '์ฒ ์ˆ˜'๋Š” ' ใ…œใ…œ์ง„์งœ ํ• ๊ฑฐ๋งŽ์€๊ฐ€๋ณด๋‹ค์˜ค๋Š˜ ํ–‰์‚ฌ์ค€๋น„๊นŒ์ง€ ๊ฐ”์œผ๋ฉด ํฐ์ผ ๋‚ ๋ป”ํ–ˆ๋Š”๋ฐ...'๋ผ๊ณ  ๋Œ€๋‹ตํ•จ",
 "'ํ–‰์‚ฌ์ค€๋น„.......ใ…Ž' ๋ผ๊ณ  '์ฒ ์ˆ˜'์—๊ฒŒ ๋ฌผ์–ด๋ดค์„ ๋•Œ, '์ฒ ์ˆ˜'๋Š” ' ๊ทธ๋Ÿผ ๋‚ด์ผ๋„ ์•ˆ๊ฐ€?์•„๋‹ˆ๋ฉด ๋‚ด์ผ์€๊ฐ€...?'๋ผ๊ณ  ๋Œ€๋‹ตํ•จ",
 "'๋‚ด์ผ์€ ๊ฐ€์•ผ๋ผ ํ—ˆํ—ˆํ—ˆ' ๋ผ๊ณ  '์ฒ ์ˆ˜'์—๊ฒŒ ๋ฌผ์–ด๋ดค์„ ๋•Œ, '์ฒ ์ˆ˜'๋Š” ' ใ…œใ…œ์•™๋Œ€'๋ผ๊ณ  ๋Œ€๋‹ตํ•จ",
 "'๊ทผ๋ฐ ๊ทผ๋ฐ ์ฃผํœด์ˆ˜๋‹น์ด ์œ ๊ธ‰ํœด๊ฐ€๋ฅผ ์ค€๋‹ค๋Š”๋ง์ด์ž–์•„ ๋ญ”์ง€ ์ž˜ ์ดํ•ด์•ˆ๋ผ...' ๋ผ๊ณ  '์ฒ ์ˆ˜'์—๊ฒŒ ๋ฌผ์–ด๋ดค์„ ๋•Œ, '์ฒ ์ˆ˜'๋Š” ' ํ•œ๋ฒˆ์ฐพ์•„๋ณผ๊ป˜์—'๋ผ๊ณ  ๋Œ€๋‹ตํ•จ",
 "'์›…ใ… ใ… ' ๋ผ๊ณ  '์ฒ ์ˆ˜'์—๊ฒŒ ๋ฌผ์–ด๋ดค์„ ๋•Œ, '์ฒ ์ˆ˜'๋Š” ' ์‰ฌ๋“ ์•ˆ์‰ฌ๋“ ์ฃผ๋‹น 15์‹œ๊ฐ„ ์ด์ƒ๊ทผ๋ฌดํ•œ์‚ฌ๋žŒ์€๋ฌด์กฐ๊ฑด ์ฃผํœด์ˆ˜๋‹น์„ ์ฃผ์–ด์•ผํ•˜๋‚˜๋ดฅ์ด๊ฑฐ ์ฐธ๊ณ ํ•ด๋ณผ๋ž˜์šฉ~?'๋ผ๊ณ  ๋Œ€๋‹ตํ•จ",
 "'๊ทผ๋ฐ 2์ฃผ๋Š” ์‰ฌ๋Š”๋‚ ์ด์—†๋Š”๋ฐ ๋ญ”๊ฐ€ ์ด์ƒํ•ด..ํ ' ๋ผ๊ณ  '์ฒ ์ˆ˜'์—๊ฒŒ ๋ฌผ์–ด๋ดค์„ ๋•Œ, '์ฒ ์ˆ˜'๋Š” ' ๊ณ„์ •์ฃผ5์ผ์ด์ƒ 15์‹œ๊ฐ„์ด์ƒ๊ทผ๋กœ์ž์—๊ฒŒ๋ฌด์กฐ๊ฑด ์ง€๊ธ‰ํ•˜๋Š”๊ฑด๊ฐ€๋ด์•„๋‹ˆ๊ตฌ๋‚˜ ๊ทผ๋กœ๊ณ„์•ฝ์ƒ ์ œ์‹œํ•œ ๊ทผ๋ฌด๋‚ ์งœ๋งŒ๊ธฐ, ์ด๊ทผ๋กœ์‹œ๊ฐ„ / 4์ฃผ ํ‰๊ท  15์‹œ๊ฐ„ ์ด์ƒ ๊ทผ๋กœ ๊ฐ€ ์กฐ๊ฑด์ธ๊ฐ€??'๋ผ๊ณ  ๋Œ€๋‹ตํ•จ",
 "'๊ทผ๋ฐ ํœด๊ฐ€ ๋๋‚˜์•ผ ํ˜„์žฅ ์‹œ์ž‘ํ•  ๋“ฏ ๊ทผ๋ฐ ํœด๊ฐ€ ๋๋‚˜์•ผ ํ˜„์žฅ ์‹œ์ž‘ํ•  ๋“ฏ ํœด๊ฐ€ ๋ชป ๋ฐ”๊ฟˆ. ์ง€์‹œ ๋‚ด๋ ค์™”์œผ 8์›”1์ผ~10์ผ ใ…‹' ๋ผ๊ณ  '์ฒ ์ˆ˜'์—๊ฒŒ ๋ฌผ์–ด๋ดค์„ ๋•Œ, '์ฒ ์ˆ˜'๋Š” ' ํ—'๋ผ๊ณ  ๋Œ€๋‹ตํ•จ",
 "'1~9์ด๊ตฐ.' ๋ผ๊ณ  '์ฒ ์ˆ˜'์—๊ฒŒ ๋ฌผ์–ด๋ดค์„ ๋•Œ, '์ฒ ์ˆ˜'๋Š” ' ํ—ํ—ํœด๊ฐ€ ๋ชป๋ฐ”๊พธ๋ฉด ์–ด์งธ??'๋ผ๊ณ  ๋Œ€๋‹ตํ•จ",
 "'์ด๋ฆ„์•ผ ๋‚˜ ๋‚ผ ์ž์ ผ๊ฑฐ ๋ชปํƒˆ๋“ฏ!!! ์ด๋ฆ„์•ผ ๋‚˜ ๋‚ผ ์ž์ ผ๊ฑฐ ๋ชปํƒˆ๋“ฏ!!! ์ด๋ฒˆ์ฃผ์— ๋ฉด์ ‘์ด ์ขŒ์ขŒ์ขŒ์ž‘ ์ƒ๊ฒป์–ด!!!!' ๋ผ๊ณ  '์ฒ ์ˆ˜'์—๊ฒŒ ๋ฌผ์–ด๋ดค์„ ๋•Œ, '์ฒ ์ˆ˜'๋Š” ' ํ—ํ‚ค์•„๋ผ์จ'๋ผ๊ณ  ๋Œ€๋‹ตํ•จ",
 "'์›…์›… ๋ฏธ์•ˆ์“ฐ!!!ใ… ใ… ' ๋ผ๊ณ  '์ฒ ์ˆ˜'์—๊ฒŒ ๋ฌผ์–ด๋ดค์„ ๋•Œ, '์ฒ ์ˆ˜'๋Š” ' ใ…‹ใ…‹ใ…‹ใ…‹ใ…‹ ๊ดœ์ฐจ๋‚˜'๋ผ๊ณ  ๋Œ€๋‹ตํ•จ",
 "'์˜ค๋Š˜๋„ ๋ฉด์ ‘ ๋›ฐ์—‡์จ' ๋ผ๊ณ  '์ฒ ์ˆ˜'์—๊ฒŒ ๋ฌผ์–ด๋ดค์„ ๋•Œ, '์ฒ ์ˆ˜'๋Š” ' ๊ณ ์ƒํ–ˆ๋‹ค ใ… ใ… '๋ผ๊ณ  ๋Œ€๋‹ตํ•จ",
 "'์ €๊ท€์—ฌ!!!! ์ €๊ท€์—ฌ!!!! ์ € ์˜ค๋Š˜ ์‚ฌ๋ฌด์‹ค ํ˜ผ์ž๊ฑฐ๋Žก์—ฌ!!!' ๋ผ๊ณ  '์ฒ ์ˆ˜'์—๊ฒŒ ๋ฌผ์–ด๋ดค์„ ๋•Œ, '์ฒ ์ˆ˜'๋Š” ' ์˜ค์˜คใ…“ ๋Œ€๋ฐ•์ตœ๊ณค๋ฐ???'๋ผ๊ณ  ๋Œ€๋‹ตํ•จ",
 "'๋…œ์—์—์— ์•„์ฃผ ํ˜ผ์ž ํŒŒํ‹ฐํŒŒํ‹ฐํ• ์ง€๊ฒฝ์ž„๋‹ค!!' ๋ผ๊ณ  '์ฒ ์ˆ˜'์—๊ฒŒ ๋ฌผ์–ด๋ดค์„ ๋•Œ, '์ฒ ์ˆ˜'๋Š” ' ๋„ˆํ•œํ…Œ ๋†€๋Ÿฌ๊ฐ€๊ณ ์‹ถ์Œ์‹ฌ์ •ใ…‹ใ…‹ใ…‹ใ…‹ใ…‹ใ…‹ใ…‹ใ…‹ใ…‹๊ธ‰ํ•œ๊ฑฐ ๋ง๊ณ  ์ตœ๋Œ€ํ•œ ์ผ ๋‚ด์ผ๋กœ ๋ฏธ๋ค„ํ˜ผ์ž์ผ๋•Œ ์ผํ•˜๋Š”๊ฑฐ ์•„๋…€ ใ…Ž'๋ผ๊ณ  ๋Œ€๋‹ตํ•จ",
 "'์ด์ œ ๋นจ๋ฆฌ์ž๊ณ  ๋นจ๋ฆฌ์ผ๋Ÿฌ๋‚˜๋Š” ์—ฐ์Šตํ•ด์•ผ์žผ... ์ด์ œ ๋นจ๋ฆฌ์ž๊ณ  ๋นจ๋ฆฌ์ผ๋Ÿฌ๋‚˜๋Š” ์—ฐ์Šตํ•ด์•ผ์žผ... 8์‹œ ์ถœ๊ทผ..ใ… ใ… ' ๋ผ๊ณ  '์ฒ ์ˆ˜'์—๊ฒŒ ๋ฌผ์–ด๋ดค์„ ๋•Œ, '์ฒ ์ˆ˜'๋Š” ' ํ— 8์‹œ์ถœ๊ทผ!?ใ… ใ…œใ…œใ…œ๊ฒ๋‚ด ์ผ์ฐ์ถฉ๊ทผ์ด๋„คใ… '๋ผ๊ณ  ๋Œ€๋‹ตํ•จ",
 "'์˜คํ”ˆ 8์‹œ^^... ๊ทธ๋ž˜๋‘ ใ…ใ…๊ฐ์€ 5์‹œ ์ถœ๊ทผ!!!ใ…‹ใ…‹ใ…‹ใ…‹ใ…‹ใ…‹ใ…‹ใ…‹ ์˜คํ”ˆ๋ถ€ํ„ฐ ๊ฐ€๋ฅด์น ๊ฑด๊ฐ€๋ฐ”..' ๋ผ๊ณ  '์ฒ ์ˆ˜'์—๊ฒŒ ๋ฌผ์–ด๋ดค์„ ๋•Œ, '์ฒ ์ˆ˜'๋Š” ' ์˜ค์˜ค์˜น์˜คํ”ˆํ•˜๋ฉด ๋ช‡์‹œ์—๋งˆ์ณ!?'๋ผ๊ณ  ๋Œ€๋‹ตํ•จ",
 "'์›”๊ธ‰์ด ๋“ค์–ด์™“์–ด์—ฌ ์›”๊ธ‰์ด ๋“ค์–ด์™“์–ด์—ฌ ์ด๋ฒˆ๋‹ฌ๋ถ€ํ„ฐ ์‹ญ๋งˆ๋„Œ์ด ์˜ฌ๋ž๋„ค์—ฌ..์กฐ๊ธˆ ์˜ฌ๋ž๋„ค.ใ…Žใ…Ž,,, ์„ธ๊ธˆ ๋น ์ง€๋Š”๊ฑด ๋˜‘๊ฐ™์•„์„œ ๋‹คํ–‰..' ๋ผ๊ณ  '์ฒ ์ˆ˜'์—๊ฒŒ ๋ฌผ์–ด๋ดค์„ ๋•Œ, '์ฒ ์ˆ˜'๋Š” ' ์ž˜ํ–ˆ๋‹ค'๋ผ๊ณ  ๋Œ€๋‹ตํ•จ",
 "'์‹ญ์ด ๋ญ์•ผ์‹ญ์ด ์ด๋”ฐ ์ ์‹ฌ์‹œ๊ฐ„์— ๋ˆ๋ณด๋‚ผ๊ฒก' ๋ผ๊ณ  '์ฒ ์ˆ˜'์—๊ฒŒ ๋ฌผ์–ด๋ดค์„ ๋•Œ, '์ฒ ์ˆ˜'๋Š” ' ์‘'๋ผ๊ณ  ๋Œ€๋‹ตํ•จ",
 "'์ผ์„ ๋„ˆ๋ฌด ์ผ์ฐ๋๋‚ด์ง€๋ง๊ณ  ์ผ์„ ๋„ˆ๋ฌด ์ผ์ฐ๋๋‚ด์ง€๋ง๊ณ  ์ฒœ์ฒœํžˆํ•ด๋ด์š”ใ…  ์ผ์ฐ ๋๋‚ด๋ฉด ์ž๊พธ ๋‹ค๋ฅธ์ผ์ด ๋“ค์–ด์™€..' ๋ผ๊ณ  '์ฒ ์ˆ˜'์—๊ฒŒ ๋ฌผ์–ด๋ดค์„ ๋•Œ, '์ฒ ์ˆ˜'๋Š” ' ์‘.. ๋‚œ ๋นจ๋ฆฌ๋๋‚ด๋†“๊ณ  ๋นจ๋ฆฌ ์‰ด๋ผ๊ณ ํ•˜๋Š”๊ฑด๋ฐ์‰ฌ๋ฉด ์ž๊พธ ์ผ์ด ๋“ค์–ด์˜ค๋”๋ผใ…‹ใ…‹ใ…‹์š”๋ น์žˆ๊ฒŒ ํ•ด์•ผ๋˜๋Š”๋ฐ ๋นจ๋ฆฌ๋นจ๋ฆฌ ์„ฑ๊ฒฉ์ƒ ์•ˆ๋ผใ… '๋ผ๊ณ  ๋Œ€๋‹ตํ•จ",
 "'์˜ค๋น ๋„ ๊ทธ๋Ÿฐ์Šคํƒ€์ผ์ธ๋ฐ ์ผ๊ฒ๋‚˜๋งŽ์ด์ค˜ใ… ใ… ใ… ใ… ' ๋ผ๊ณ  '์ฒ ์ˆ˜'์—๊ฒŒ ๋ฌผ์–ด๋ดค์„ ๋•Œ, '์ฒ ์ˆ˜'๋Š” ' ์šฐ๋ฆฌ ์ •๋ง ํ˜ธ๊ตฌ์•„๋‹ˆ์•ผ?ใ…‹ใ…‹ใ…‹ใ…‹์ผ์€ ์šฐ๋ฆฌ๊ฐ€ ๋‹คํ•œ๋Œ€์ž–์•„ ์ฐจ์žฅ๋‹˜์ด..'๋ผ๊ณ  ๋Œ€๋‹ตํ•จ",
 "'๊ทธ๋Ÿฌ๋ฉด 8์›” ๋ง - 9์›” ์ดˆ์— ์ง์ ‘ ๊ฐ€์•ผํ•œ๋‹ค๋Š”๊ฑฐ์ž„ ?? ๊ทธ๋Ÿฌ๋ฉด 8์›” ๋ง - 9์›” ์ดˆ์— ์ง์ ‘ ๊ฐ€์•ผํ•œ๋‹ค๋Š”๊ฑฐ์ž„ ?? ์ฃค๋‚˜ ์ด ๋””์ง€ํ„ธ ์‹œ๋Œ€์— ๋ฌด์Šจ ใ…กใ…กใ…กใ…กใ…กใ…กใ…กใ…กใ…ก ๋‹ต๋‹ค๋ฐ”๊ฒŒ ์ผํ•˜๋„ค ์ฆ๋ง' ๋ผ๊ณ  '์ฒ ์ˆ˜'์—๊ฒŒ ๋ฌผ์–ด๋ดค์„ ๋•Œ, '์ฒ ์ˆ˜'๋Š” ' ๊ทธ๊ฑธ๋ชฐ๋ผ๊ทธ๋‹ˆ๊นŒ๊ตญ๊ฐ€์ง์€ ๊ฐฑ ์ธํ„ฐ๋„ท๋“ฑ๋ก์ธ๋ฐ์ด๋ฆ„๊ฐ€ ์ €๋ฒˆ์—” ์ง์ ‘๋“ฑ๋ก์ด์—‡๋Œ€์›”์šœ๋‚  ์ „ํ™”ํ•ด์„œ ๋ฌผ์–ด๋ณผ๊ฒŒ'๋ผ๊ณ  ๋Œ€๋‹ตํ•จ",
 "'ใ…‡ใ…‹ใ„ทใ…‹' ๋ผ๊ณ  '์ฒ ์ˆ˜'์—๊ฒŒ ๋ฌผ์–ด๋ดค์„ ๋•Œ, '์ฒ ์ˆ˜'๋Š” ' ์•„๋ƒ์•„๋ƒ๋™๊ธฐ๋“คํ•œํ…Œ๋‘๋ฌผ์–ด๋ดฃ๋Œ€'๋ผ๊ณ  ๋Œ€๋‹ตํ•จ",
 "'์›์žฅ๋‹˜์ด ์›์žฅ๋‹˜์ด ์˜ฌ๋ ค์ค€ ์ฒญ๋…„์ฑ„์›€ ๋ชป๋ฐ›์„๊ฑฐ๊ฐ™์€๋ฐ ์˜ค๋…„๋ชป์ฑ„์›Œ;;' ๋ผ๊ณ  '์ฒ ์ˆ˜'์—๊ฒŒ ๋ฌผ์–ด๋ดค์„ ๋•Œ, '์ฒ ์ˆ˜'๋Š” ' ใ…‹ใ…‹ใ…‹ใ…‹ใ…‹ใ…‹ใ…‹ใ…‹ใ…‹ใ…‹ใ…‹'๋ผ๊ณ  ๋Œ€๋‹ตํ•จ",
 "'์˜ค๋žฌ์ง€๋‚ด๊ฐ€!์—ฌ๊ธฐ๋ฃจ!' ๋ผ๊ณ  '์ฒ ์ˆ˜'์—๊ฒŒ ๋ฌผ์–ด๋ดค์„ ๋•Œ, '์ฒ ์ˆ˜'๋Š” ' ใ…‹ใ…‹ใ…‹ใ…‹ใ…‹์กฐ์•„'๋ผ๊ณ  ๋Œ€๋‹ตํ•จ"]


def submit(openai_api_key, question, image, *agents):
    global CURRENT_POSITION
    balloon = Image.open(os.path.join(os.path.dirname(__file__), 'data/balloon.png')).resize((64, 64))
    new_image = Image.fromarray(image).convert('RGBA')
    for k in range(TOTAL_K):
        new_image.paste(balloon, [CURRENT_POSITION[k][0], CURRENT_POSITION[k][1] - 64], balloon)

    # Answer
    os.environ['OPENAI_API_KEY'] = openai_api_key
    
    llm = ThinkGPT(model_name="gpt-3.5-turbo")
    Answer = llm.predict(question, remember=llm.remember('์ฒ ์ˆ˜', limit = 30))
    
    new_agents = []
    for k in range(TOTAL_K):
        new_agents.append(Answer)
        
    return np.array(new_image), *[gr.Textbox.update(value=answer) for answer in new_agents]

theme = gr.themes.Default(primary_hue="green")

agents = []
with gr.Blocks(css = """#col_container { margin-left: auto; margin-right: auto;}
                #chatbot {height: 520px; overflow: auto;}""",
              theme=theme) as demo:
    gr.HTML(title)
    

    #gr.HTML('''<center><a href="https://huggingface.co/spaces/yuntian-deng/ChatGPT?duplicate=true"><img src="https://bit.ly/3gLdBN6" alt="Duplicate Space"></a>Duplicate the Space and run securely with your OpenAI API Key</center>''')
    with gr.Column(elem_id = "col_container", visible=False) as main_block:


        with gr.Row():
            with gr.Column(scale=7):
                image = gr.Image(os.path.join(os.path.dirname(__file__), "data/Court.jpeg"), height = 500, width = 500)
            with gr.Column(scale=3):
                for idx in range(TOTAL_K):
                    text = gr.Textbox(label=f"Random Sample - {idx+1}")
                    agents.append(text)

        with gr.Row():
            with gr.Column(scale=3):
                openai_api_key = gr.Textbox(placeholder= "sk-", label= "Type an openai api key")
            
            with gr.Column(scale=4):
                question = gr.Textbox(placeholder= "Hi there!", label= "Type an input and press Enter") #t
            
            with gr.Column(scale=3):
                b1 = gr.Button('Submit', visible=not DISABLED)

        with gr.Row():
            with gr.Column(scale=7):
                b0 = gr.Button('Random Sample', visible=not DISABLED)
            with gr.Column(scale=3):
                b2 = gr.Button('Reset', visible=not DISABLED)

                
        #inputs, top_p, temperature, top_k, repetition_penalty
        with gr.Accordion("Parameters", open=False):
            top_p = gr.Slider( minimum=-0, maximum=1.0, value=1.0, step=0.05, interactive=True, label="Top-p (nucleus sampling)",)
            temperature = gr.Slider( minimum=-0, maximum=5.0, value=1.0, step=0.1, interactive=True, label="Temperature",)
            #top_k = gr.Slider( minimum=1, maximum=50, value=4, step=1, interactive=True, label="Top-k",)
            #repetition_penalty = gr.Slider( minimum=0.1, maximum=3.0, value=1.03, step=0.01, interactive=True, label="Repetition Penalty", )
            chat_counter = gr.Number(value=0, visible=False, precision=0)
    
    with gr.Column(elem_id = "user_consent_container") as user_consent_block:
        # Get user consent
        accept_checkbox = gr.Checkbox(visible=False)
        js = "(x) => confirm('By clicking \"OK\", I agree that my data may be published or shared.')"
        with gr.Accordion("User Consent for Data Collection, Use, and Sharing", open=True):
            gr.HTML("""
            <div>
                <p>By using our app, which is powered by OpenAI's API, you acknowledge and agree to the following terms regarding the data you provide:</p>
                <ol>
                    <li><strong>Collection:</strong> We may collect information, including the inputs you type into our app, the outputs generated by OpenAI's API, and certain technical details about your device and connection (such as browser type, operating system, and IP address) provided by your device's request headers.</li>
                    <li><strong>Use:</strong> We may use the collected data for research purposes, to improve our services, and to develop new products or services, including commercial applications, and for security purposes, such as protecting against unauthorized access and attacks.</li>
                    <li><strong>Sharing and Publication:</strong> Your data, including the technical details collected from your device's request headers, may be published, shared with third parties, or used for analysis and reporting purposes.</li>
                    <li><strong>Data Retention:</strong> We may retain your data, including the technical details collected from your device's request headers, for as long as necessary.</li>
                </ol>
                <p>By continuing to use our app, you provide your explicit consent to the collection, use, and potential sharing of your data as described above. If you do not agree with our data collection, use, and sharing practices, please do not use our app.</p>
            </div>
            """)
            accept_button = gr.Button("I Agree")

        def enable_inputs():
            return user_consent_block.update(visible=False), main_block.update(visible=True)

    accept_button.click(None, None, accept_checkbox, _js=js, queue=False)
    accept_checkbox.change(fn=enable_inputs, inputs=[], outputs=[user_consent_block, main_block], queue=False)

    b0.click(random_sample, inputs = [], outputs = [image])
    b2.click(reset_sample, inputs = [], outputs = [image])

    b1.click(submit, inputs = [openai_api_key, question, image, *agents], outputs = [image, *agents])
                  
    # inputs.submit(reset_textbox, [], [inputs, b1], queue=False)
    # inputs.submit(predict, [inputs, top_p, temperature, chat_counter, chatbot, state], [chatbot, state, chat_counter, server_status_code, inputs, b1],)  #openai_api_key
    # b1.click(reset_textbox, [], [inputs, b1], queue=False)
    # b1.click(predict, [inputs, top_p, temperature, chat_counter, chatbot, state], [chatbot, state, chat_counter, server_status_code, inputs, b1],)  #openai_api_key
             
    demo.queue(max_size=20, concurrency_count=NUM_THREADS, api_open=False).launch(share=False)