itramb commited on
Commit
12f57a8
·
verified ·
1 Parent(s): 6287118

Upload index.html

Browse files
Files changed (1) hide show
  1. index.html +727 -18
index.html CHANGED
@@ -1,19 +1,728 @@
1
- <!doctype html>
2
- <html>
3
- <head>
4
- <meta charset="utf-8" />
5
- <meta name="viewport" content="width=device-width" />
6
- <title>My static Space</title>
7
- <link rel="stylesheet" href="style.css" />
8
- </head>
9
- <body>
10
- <div class="card">
11
- <h1>Welcome to your static Space!</h1>
12
- <p>You can modify this app directly by editing <i>index.html</i> in the Files and versions tab.</p>
13
- <p>
14
- Also don't forget to check the
15
- <a href="https://huggingface.co/docs/hub/spaces" target="_blank">Spaces documentation</a>.
16
- </p>
17
- </div>
18
- </body>
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
19
  </html>
 
1
+ <!DOCTYPE html>
2
+ <html lang="en">
3
+ <head>
4
+ <meta charset="UTF-8" />
5
+ <meta name="viewport" content="width=device-width, initial-scale=1.0" />
6
+ <title>AIRI institute</title>
7
+ <link
8
+ href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.3/dist/css/bootstrap.min.css"
9
+ rel="stylesheet"
10
+ />
11
+ <meta name="description" content="AIRI is an autonomous non-profit organization bringing together researchers, scientists and data engineers engaged in breakthrough AI research." />
12
+ <meta name="author" content="AIRI" />
13
+ <link rel="stylesheet" href="styles/video.css" />
14
+ <link rel="stylesheet" href="styles/main.css" />
15
+ <link rel="stylesheet" href="styles/header.css" />
16
+ <link rel="stylesheet" href="styles/columnsSection.css" />
17
+ <link rel="stylesheet" href="styles/textSection.css" />
18
+ <link rel="stylesheet" href="styles/textSectionBorder.css" />
19
+ <link rel="stylesheet" href="styles/swiper.css" />
20
+ <link rel="stylesheet" href="styles/charts.css" />
21
+ <link rel="stylesheet" href="styles/numberSection.css" />
22
+ <link rel="stylesheet" href="styles/table.css" />
23
+ <link rel="stylesheet" href="styles/lightbox.css" />
24
+ <link rel="stylesheet" href="styles/dropdown.css" />
25
+ <link rel="stylesheet" href="styles/swiperOnButton.css" />
26
+ <link rel="stylesheet" href="styles/codeSection.css" />
27
+ <link rel="stylesheet" href="styles/sidebar.css" />
28
+ <link rel="stylesheet" href="styles/verticalLine.css" />
29
+ <link rel="stylesheet" href="styles/bibTex.css" />
30
+ <link rel="stylesheet" href="styles/footer.css" />
31
+ <link rel="stylesheet" href="styles/beforeAfter.css" />
32
+ <link rel="preconnect" href="https://fonts.googleapis.com" />
33
+ <link rel="preconnect" href="https://fonts.gstatic.com" crossorigin />
34
+ <link
35
+ href="https://fonts.googleapis.com/css2?family=Inter:ital,opsz,wght@0,14..32,100..900;1,14..32,100..900&family=Montserrat:ital,wght@0,100..900;1,100..900&family=PT+Sans&family=Roboto:wght@100..900&display=swap"
36
+ rel="stylesheet"
37
+ />
38
+ <link
39
+ rel="stylesheet"
40
+ href="https://cdn.jsdelivr.net/npm/swiper@11/swiper-bundle.min.css"
41
+ />
42
+ </head>
43
+ <body>
44
+ <header>
45
+ <section class="header__container" id="header">
46
+ <span class="header-gradient"></span>
47
+ <div class="header__participants-container">
48
+ <img
49
+ class="header__participants-logo"
50
+ src="public/airi_logo_black.svg"
51
+ alt="AIRI"
52
+ />
53
+ <img
54
+ class="header__participants-logo"
55
+ src="public/mtuci-logo.svg"
56
+ alt="MTUCI"
57
+ />
58
+ <!-- <img
59
+ class="header__participants-logo"
60
+ src="public/hse-logo.svg"
61
+ alt="HSE"
62
+ />
63
+ <img
64
+ class="header__participants-logo"
65
+ src="public/mirea-logo.svg"
66
+ alt="MIREA"
67
+ />
68
+ <img
69
+ class="header__participants-logo"
70
+ src="public/MISIS-logo.svg"
71
+ alt="MISIS"
72
+ />
73
+ <img
74
+ class="header__participants-logo"
75
+ src="public/mei-logo.svg"
76
+ alt="MEI"
77
+ />
78
+ <img
79
+ class="header__participants-logo"
80
+ src="public/mirea-logo.svg"
81
+ alt="MIREA"
82
+ />
83
+ <img
84
+ class="header__participants-logo"
85
+ src="public/MISIS-logo.svg"
86
+ alt="MISIS"
87
+ />
88
+ <img
89
+ class="header__participants-logo"
90
+ src="public/mei-logo.svg"
91
+ alt="MEI"
92
+ /> -->
93
+ </div>
94
+ <h1 class="header-projectName">
95
+ Fact-Checking the Output of Large Language Models via Token-Level
96
+ Uncertainty Quantification.
97
+ </h1>
98
+ <div class="header__link-container">
99
+ <div class="linkContainer__authors">
100
+ <a href="">Ekaterina Fadeeva<sup>3,4</sup></a>
101
+ <a href="">Aleksandr Rubashevskii<sup>1,3</sup></a>
102
+ <a href="">Artem Shelmanov<sup>1</sup></a>
103
+ <a href="">Sergey Petrakov<sup>3</sup></a>
104
+ <a href="">Haonan Li<sup>1</sup></a>
105
+ <a href="">Hamdy Mubarak<sup>7</sup></a>
106
+ <a href="">Evgenii Tsymbalov<sup>8</sup></a>
107
+ <a href="">Gleb Kuzmin<sup>2,5</sup></a>
108
+ <a href="">Alexander Panchenko<sup>2,3</sup></a>
109
+ <a href="">Timothy Baldwin<sup>1,6</sup></a>
110
+ <a href="">Preslav Nakov<sup>1</sup></a>
111
+ <a href="">Maxim Panov<sup>1</sup></a>
112
+ </div>
113
+ <div class="linkContainer__institutes">
114
+ <a href=""><sup>1</sup>MBZUAI</a>
115
+ <a href=""><sup>2</sup>AIRI</a>
116
+ <a href=""
117
+ ><sup>3</sup>Center for Artificial Intelligence Technology</a
118
+ >
119
+ <a href=""><sup>4</sup>HSE University</a>
120
+ <a href=""><sup>5</sup>FRC CSC RAS</a>
121
+ <a href=""><sup>6</sup>The University of Melbourne</a>
122
+ <a href=""><sup>7</sup>QCRI</a>
123
+ <a href=""><sup>8</sup>Independent Researcher</a>
124
+ </div>
125
+ </div>
126
+
127
+ <div class="header__button-container">
128
+ <button class="header__button">
129
+ <img
130
+ class="header__button-logo"
131
+ src="public/paper-svgrepo-com.svg"
132
+ alt=""
133
+ />
134
+ <p class="header__button-text">Paper</p>
135
+ </button>
136
+ <button class="header__button">
137
+ <img
138
+ class="header__button-logo"
139
+ src="public/GithubOutlined.svg"
140
+ alt=""
141
+ />
142
+ <p class="header__button-text">Github</p>
143
+ </button>
144
+ <button class="header__button">
145
+ <img
146
+ class="header__button-logo"
147
+ src="public/SearchOutlined.svg"
148
+ alt=""
149
+ />
150
+ <p class="header__button-text">Search</p>
151
+ </button>
152
+ </div>
153
+ </section>
154
+ </header>
155
+ <div class="body-container">
156
+ <aside class="sidebar-section">
157
+ <div class="sidebar">
158
+ <ul class="nav flex-column">
159
+ <li class="nav-item" data-target="#header">
160
+ <a class="nav-link" href="#header">Name</a>
161
+ <span class="indicator"></span>
162
+ </li>
163
+ <li class="nav-item" data-target="#text-section">
164
+ <a class="nav-link" href="#text-section">Text</a>
165
+ <span class="indicator"></span>
166
+ </li>
167
+ <li class="nav-item" data-target="#video-section">
168
+ <a class="nav-link" href="#video-section">Video</a>
169
+ <span class="indicator"></span>
170
+ </li>
171
+ <li class="nav-item" data-target="#text-section-border">
172
+ <a class="nav-link" href="#text-section-border">Text border</a>
173
+ <span class="indicator"></span>
174
+ </li>
175
+ <li class="nav-item" data-target="#swiper-section">
176
+ <a class="nav-link" href="#swiper-section">Swiper</a>
177
+ <span class="indicator"></span>
178
+ </li>
179
+ <li class="nav-item" data-target="#lighbox-section">
180
+ <a class="nav-link" href="#lighbox-section">Lightbox</a>
181
+ <span class="indicator"></span>
182
+ </li>
183
+ <li class="nav-item" data-target="#before-after-section">
184
+ <a class="nav-link" href="#before-after-section">Before-after</a>
185
+ <span class="indicator"></span>
186
+ </li>
187
+
188
+ <li class="nav-item" data-target="#swiper-on-button-section">
189
+ <a class="nav-link" href="#swiper-on-button-section"
190
+ >Swiper diagramms</a
191
+ >
192
+ <span class="indicator"></span>
193
+ </li>
194
+
195
+ <li class="nav-item" data-target="#diagrams-section">
196
+ <a class="nav-link" href="#diagrams-section">Diagramms</a>
197
+ <span class="indicator"></span>
198
+ </li>
199
+ <li class="nav-item ms-3" data-target="#BarChart">
200
+ <a class="nav-link small" href="#BarChart">Bar chart</a>
201
+ <span class="indicator"></span>
202
+ </li>
203
+ <li class="nav-item ms-3" data-target="#LineChart">
204
+ <a class="nav-link small" href="#LineChart">Line chart</a>
205
+ <span class="indicator"></span>
206
+ </li>
207
+ <li class="nav-item ms-3" data-target="#PieChart">
208
+ <a class="nav-link small" href="#PieChart">Pie chart</a>
209
+ <span class="indicator"></span>
210
+ </li>
211
+
212
+ <li class="nav-item" data-target="#table-section">
213
+ <a class="nav-link" href="#table-section">Table</a>
214
+ <span class="indicator"></span>
215
+ </li>
216
+ <li class="nav-item" data-target="#numbers-section">
217
+ <a class="nav-link" href="#numbers-section">Number of claims</a>
218
+ <span class="indicator"></span>
219
+ </li>
220
+ <li class="nav-item" data-target="#columns-section">
221
+ <a class="nav-link" href="#columns-section">Columns</a>
222
+ <span class="indicator"></span>
223
+ </li>
224
+ <li class="nav-item" data-target="#dropdown-section">
225
+ <a class="nav-link" href="#dropdown-section">Dropdown</a>
226
+ <span class="indicator"></span>
227
+ </li>
228
+ <li class="nav-item" data-target="#code-section">
229
+ <a class="nav-link" href="#code-section">Code</a>
230
+ <span class="indicator"></span>
231
+ </li>
232
+ <li class="nav-item" data-target="#bibTeX-section">
233
+ <a class="nav-link" href="#bibTeX-section">bibTeX</a>
234
+ <span class="indicator"></span>
235
+ </li>
236
+ </ul>
237
+ </div>
238
+ </aside>
239
+ <section class="text-section" id="text-section">
240
+ <div class="text-section__container">
241
+ <h2>Abstract</h2>
242
+ <p>Large language models (LLMs) are notorious for hallucinating, i. e., producing erroneous claims in&nbsp;their output. Such hallucinations can be&nbsp;dangerous, as&nbsp;occasional factual inaccuracies in&nbsp;the generated text might be&nbsp;obscured by&nbsp;the rest of&nbsp;the output being generally factually correct, making it&nbsp;extremely hard for the users to&nbsp;spot them. Current services that leverage LLMs usually do&nbsp;not provide any means for detecting unreliable generations. Here, we&nbsp;aim to&nbsp;bridge this gap. In&nbsp;particular, we&nbsp;propose a&nbsp;novel <nobr>fact-checking</nobr> and hallucination detection pipeline based on&nbsp;<nobr>token-level</nobr> uncertainty quantification. Uncertainty scores leverage information encapsulated in&nbsp;the output of&nbsp;a&nbsp;neural network or&nbsp;its layers to&nbsp;detect unreliable predictions, and we&nbsp;show that they can be&nbsp;used to&nbsp;<nobr>fact-check</nobr> the atomic claims in&nbsp;the LLM output. Moreover, we&nbsp;present a&nbsp;novel <nobr>token-level</nobr> uncertainty quantification method that removes the impact of&nbsp;uncertainty about what claim to&nbsp;generate on&nbsp;the current step and what surface form to&nbsp;use. Our method Claim Conditioned Probability (CCP) measures only the uncertainty of&nbsp;a&nbsp;particular claim value expressed by&nbsp;the model. Experiments on&nbsp;the task of&nbsp;biography generation demonstrate strong improvements for CCP compared to&nbsp;the baselines for seven LLMs and four languages. Human evaluation reveals that the <nobr>fact-checking</nobr> pipeline based on&nbsp;uncertainty quantification is&nbsp;competitive with a&nbsp;<nobr>fact-checking</nobr> tool that leverages external knowledge.</p>
243
+ </div>
244
+ </section>
245
+ <section class="video-section" id="video-section">
246
+ <div class="video-container">
247
+ <h2>Video</h2>
248
+ <div class="video-wrapper">
249
+ <iframe
250
+ class="iframe"
251
+ src="https://rutube.ru/play/embed/c6cc4d620b1d4338901770a44b3e82f4/"
252
+ frameborder="0"
253
+ allow="clipboard-write; autoplay"
254
+ webkitAllowFullScreen
255
+ mozallowfullscreen
256
+ allowfullscreen
257
+ ></iframe>
258
+ </div>
259
+ </div>
260
+ </section>
261
+ <section class="text-section-border" id="text-section-border">
262
+ <div class="text-section-border__container">
263
+ <h2>Introduction</h2>
264
+ <p> Large language models (LLMs) have become a&nbsp;ubiquitous and versatile tool for addressing a&nbsp;variety of&nbsp;natural language processing (NLP) tasks. People use these models for tasks including information search Sun et&nbsp;al. (2023b), to&nbsp;ask medical questions Thirunavukarasu et&nbsp;al. (2023), or&nbsp;to&nbsp;generate new content Sun et&nbsp;al. (2023a). Recently, there has been a&nbsp;notable shift in&nbsp;user behavior, indicating an&nbsp;increasing reliance on&nbsp;and trust in&nbsp;LLMs as&nbsp;primary information sources, often surpassing traditional channels. However, a&nbsp;significant challenge with the spread of&nbsp;these models is&nbsp;their tendency to&nbsp;produce &laquo;hallucinations&raquo;, i.e., factually incorrect generations that contain misleading information Bang et&nbsp;al. (2023); Dale et&nbsp;al. (2023). This is&nbsp;a&nbsp;<nobr>side-effect</nobr> of&nbsp;the way modern LLMs are designed and trained Kalai and Vempala (2023). </p> <p> LLM hallucinations are a&nbsp;major concern because the deceptive content at&nbsp;the surface level can be&nbsp;highly coherent and persuasive. Common examples include the creation of&nbsp;fictitious biographies or&nbsp;the assertion of&nbsp;unfounded claims. The danger is&nbsp;that a&nbsp;few occasional false claims might be&nbsp;easily obscured by&nbsp;a&nbsp;large number of&nbsp;factual statements, making it&nbsp;extremely hard for people to&nbsp;spot them. As&nbsp;hallucinations in&nbsp;LLM outputs are hard to&nbsp;eliminate completely, users of&nbsp;such systems could be&nbsp;informed via highlighting some potential caveats in&nbsp;the text, and this is&nbsp;where our approach can help. </p> <p> <nobr>Fact-checking</nobr> is&nbsp;a&nbsp;research direction that addresses this problem. It&nbsp;is&nbsp;usually approached using complex systems that leverage external knowledge sources Guo et&nbsp;al. (2022); Nakov et&nbsp;al. (2021); Wadden et&nbsp;al. (2020). This introduces problems related to&nbsp;the incomplete nature of&nbsp;such sources and notable overhead in&nbsp;terms of&nbsp;storing the knowledge. We&nbsp;argue that information about whether a&nbsp;generation is&nbsp;a&nbsp;hallucination is&nbsp;encapsulated in&nbsp;the model output itself, and can be&nbsp;extracted using uncertainty quantification (UQ) Gal et&nbsp;al. (2016); Kotelevskii et&nbsp;al. (2022); Vazhentsev et&nbsp;al. (2022, 2023a). This avoids implementing complex and expensive <nobr>fact-checking</nobr> systems that require additional computational overhead and rely on&nbsp;external resources. </p> <p> Prior work has mainly focused on&nbsp;quantification of&nbsp;uncertainty for the whole generated text and been mostly limited to&nbsp;tasks such as&nbsp;machine translation Malinin and Gales (2020), question answering Kuhn et&nbsp;al. (2023), and text summarization van der Poel et&nbsp;al. (2022). However, the need for an&nbsp;uncertainty score for only a&nbsp;part of&nbsp;the generation substantially complicates the problem. We&nbsp;approach it&nbsp;by&nbsp;leveraging <nobr>token-level</nobr> uncertainty scores and aggregating them into <nobr>claim-level</nobr> scores. Moreover, we&nbsp;introduce a&nbsp;new <nobr>token-level</nobr> uncertainty score, namely <nobr>claim-conditioned</nobr> probability (CCP), which demonstrates confident improvements over several baselines for seven LLMs and four languages. </p> <p> To&nbsp;the best of&nbsp;our knowledge, there is&nbsp;no&nbsp;previous work that has investigated the quality of&nbsp;<nobr>claim-level</nobr> UQ&nbsp;techniques for LLM generation. Therefore, for this purpose, we&nbsp;construct a&nbsp;novel benchmark based on&nbsp;<nobr>fact-checking</nobr> of&nbsp;biographies of&nbsp;individuals generated using a&nbsp;range of&nbsp;LLMs. Note that different LLMs produce different outputs, which generally have higher variability than, e.g., outputs in&nbsp;such tasks as&nbsp;machine translation or&nbsp;question answering. Therefore, we&nbsp;compare the predictions and uncertainty scores to&nbsp;the results of&nbsp;an&nbsp;automatic external <nobr>fact-checking</nobr> system FactScore Min et&nbsp;al. (2023). Human evaluation verifies that our constructed benchmark based on&nbsp;FactScore can adequately evaluate the performance of&nbsp;the uncertainty scores. </p> <p>Our contributions are as&nbsp;follows:</p> <ul> <li> We&nbsp;propose a&nbsp;novel framework for <nobr>fact-checking</nobr> LLM generations using <nobr>token-level</nobr> uncertainty quantification. We&nbsp;provide a&nbsp;procedure for efficiently estimating the uncertainty of&nbsp;atomic claims generated by&nbsp;a&nbsp;<nobr>white-box</nobr> model and highlighting potentially deceptive fragments by&nbsp;mapping them back to&nbsp;the original response. </li> <li> We&nbsp;propose a&nbsp;novel method for <nobr>token-level</nobr> uncertainty quantification that outperforms baselines and can be&nbsp;used as&nbsp;a&nbsp;<nobr>plug-in</nobr> a&nbsp;<nobr>fact-checking</nobr> framework. </li> <li> We&nbsp;design a&nbsp;novel approach to&nbsp;evaluation of&nbsp;<nobr>token-level</nobr> UQ&nbsp;methods for <nobr>white-box</nobr> LLMs based on&nbsp;<nobr>fact-checking</nobr>, which can be&nbsp;applied to&nbsp;other <nobr>white-box</nobr> LLMs. </li> <li> We&nbsp;provide an&nbsp;empirical and ablation analysis of&nbsp;the method for <nobr>fact-checking</nobr> of&nbsp;LLM generations, and find that the uncertainty scores we&nbsp;produce can help to&nbsp;spot claims with factual errors for seven LLMs over four languages: English, Chinese, Arabic, and Russian. </li> <li> The method is&nbsp;implemented as&nbsp;a&nbsp;part of&nbsp;the <nobr>LM-Polygraph</nobr> library Fadeeva et&nbsp;al. (2023). All the code and data for experiments is&nbsp;publicly available1. </li>
265
+ </ul>
266
+ </div>
267
+ </section>
268
+ <section class="swiper-section" id="swiper-section">
269
+ <div class="swiper-container">
270
+ <h2>Swiper</h2>
271
+ <div class="swiper mySwiper">
272
+ <div class="swiper-wrapper">
273
+ <div class="swiper-slide"><img src="public/flower1.jpg" alt="" /></div>
274
+ <div class="swiper-slide"><img src="public/flower2.jpg" alt="" /></div>
275
+ <div class="swiper-slide"><img src="public/flower3.jpg" alt="" /></div>
276
+ <div class="swiper-slide"><img src="public/flower4.jpg" alt="" /></div>
277
+ <div class="swiper-slide"><img src="public/flower5.jpg" alt="" /></div>
278
+ </div>
279
+ <div class="swiper-button-next"></div>
280
+ <div class="swiper-button-prev"></div>
281
+ </div>
282
+ <div class="swiper-lightbox" id="swiper-lightbox">
283
+ <span class="close-btn" id="swiper-lightbox__close">&times;</span>
284
+ <img class="swiper-lightbox-content" id="swiper-lightbox-img" />
285
+ </div>
286
+ </section>
287
+ <section class="lighbox-section" id="lighbox-section">
288
+ <div class="lighbox-container">
289
+ <h2>Lightbox</h2>
290
+
291
+ <div class="lighbox-row">
292
+ <img src="public/x5.png" alt="" class="thumbnail" id="openLightbox1" />
293
+
294
+ <img src="public/x4.jpg" alt="" class="thumbnail" id="openLightbox2" />
295
+
296
+ <img src="public/x6.png" alt="" class="thumbnail" id="openLightbox3" />
297
+
298
+ </div>
299
+
300
+ <div class="lightbox" id="lightbox1">
301
+ <span class="close-btn" id="closeLightbox1">&times;</span>
302
+ <img class="lightbox-content" id="lightbox-img1" />
303
+ </div>
304
+
305
+ <div class="lightbox" id="lightbox2">
306
+ <span class="close-btn" id="closeLightbox2">&times;</span>
307
+ <img class="lightbox-content" id="lightbox-img2" />
308
+ </div>
309
+
310
+ <div class="lightbox" id="lightbox3">
311
+ <span class="close-btn" id="closeLightbox3">&times;</span>
312
+ <img class="lightbox-content" id="lightbox-img3" />
313
+ </div>
314
+ </div>
315
+ </section>
316
+
317
+ <section class="before-after-section" id="before-after-section">
318
+ <h2>Before-after</h2>
319
+ <div class="compare-container">
320
+ <div class="compare-images">
321
+ <img
322
+ src="public/flower1.jpg"
323
+ alt="Before"
324
+ class="compare-img before-img"
325
+ />
326
+
327
+ <img
328
+ src="public/flower2.jpg"
329
+ alt="After"
330
+ class="compare-img after-img"
331
+ />
332
+
333
+ <div class="slider-handle"></div>
334
+ </div>
335
+ </div>
336
+ </section>
337
+ <section class="swiper-on-button-section" id="swiper-on-button-section">
338
+ <div class="swiper-on-button__container">
339
+ <h2>Swiper with buttons</h2>
340
+ <div class="swiper-buttons">
341
+ <button class="swiper-button active" data-slide-to="0">
342
+ Slide 1
343
+ </button>
344
+ <button class="swiper-button" data-slide-to="1">Slide 2</button>
345
+ <button class="swiper-button" data-slide-to="2">Slide 3</button>
346
+ </div>
347
+
348
+ <div class="swiper my-swiper-on-button">
349
+ <div class="swiper-wrapper">
350
+ <div class="swiper-slide">
351
+ <canvas
352
+ class="myChart-slide swiper-slide"
353
+ id="BarChart-slider"
354
+ ></canvas>
355
+ </div>
356
+ <div class="swiper-slide">
357
+ <canvas
358
+ class="myChart-slide swiper-slide"
359
+ id="LineChart-slider"
360
+ ></canvas>
361
+ </div>
362
+ <div class="swiper-slide">
363
+ <canvas
364
+ class="myChart-slide swiper-slide"
365
+ id="PieChart-slider"
366
+ ></canvas>
367
+ </div>
368
+ </div>
369
+ </div>
370
+ </div>
371
+ </section>
372
+
373
+ <section class="diagrams-section" id="diagrams-section">
374
+ <div class="diagrams-section__container">
375
+ <h2>Charts</h2>
376
+ <canvas class="myChart" id="BarChart"></canvas>
377
+ <p class="caption">
378
+ The comparison of token-level uncertainty quantification methods in
379
+ terms of ROC-AUC scores, measured for Chinese dataset. The results
380
+ are split into bins when considering only facts from the first 2, 5,
381
+ and all sentences.
382
+ </p>
383
+ <canvas class="myChart" id="LineChart"></canvas>
384
+ <p class="caption">
385
+ The comparison of token-level uncertainty quantification methods in
386
+ terms of ROC-AUC scores, measured for Chinese dataset. The results
387
+ are split into bins when considering only facts from the first 2, 5,
388
+ and all sentences.
389
+ </p>
390
+ <canvas class="myChart" id="PieChart"></canvas>
391
+ <p class="caption">
392
+ The comparison of token-level uncertainty quantification methods in
393
+ terms of ROC-AUC scores, measured for Chinese dataset. The results
394
+ are split into bins when considering only facts from the first 2, 5,
395
+ and all sentences.
396
+ </p>
397
+ </div>
398
+ </section>
399
+
400
+ <section class="table-section" id="table-section">
401
+ <h2>
402
+ ROC-AUC of claim-level UQ methods with manual annotation as the ground
403
+ truth.
404
+ </h2>
405
+ <div class="table-section__container">
406
+ <table>
407
+ <thead>
408
+ <th>Model</th>
409
+ <th>Yi 6b, Chinese</th>
410
+ <th>Jais 13b, Arabic</th>
411
+ <th>GPT-4, Arabic</th>
412
+ <th>Vikhr 7b, Russian</th>
413
+ </thead>
414
+ <tbody>
415
+ <tr>
416
+ <td>CCP (ours)</td>
417
+ <td>0.64 ± 0.03</td>
418
+ <td>0.66 ± 0.02</td>
419
+ <td>0.56 ± 0.05</td>
420
+ <td>0.68 ± 0.04</td>
421
+ </tr>
422
+ <tr>
423
+ <td>Maximum Prob.</td>
424
+ <td>0.52 ± 0.03</td>
425
+ <td>0.59 ± 0.02</td>
426
+ <td>0.55 ± 0.08</td>
427
+ <td>0.63 ± 0.04</td>
428
+ </tr>
429
+ <tr>
430
+ <td>Perplexity</td>
431
+ <td>0.51 ± 0.04</td>
432
+ <td>0.56 ± 0.02</td>
433
+ <td>0.54 ± 0.08</td>
434
+ <td>0.58 ± 0.04</td>
435
+ </tr>
436
+ <tr>
437
+ <td>Token Entropy</td>
438
+ <td>0.51 ± 0.04</td>
439
+ <td>0.56 ± 0.02</td>
440
+ <td>0.54 ± 0.08</td>
441
+ <td>0.58 ± 0.04</td>
442
+ </tr>
443
+ <tr>
444
+ <td>P(True)</td>
445
+ <td>0.52 ± 0.03</td>
446
+ <td>0.59 ± 0.02</td>
447
+ <td>0.55 ± 0.08</td>
448
+ <td>0.63 ± 0.04</td>
449
+ </tr>
450
+ </tbody>
451
+ </table>
452
+ </div>
453
+ </section>
454
+ <section class="numbers-section" id="numbers-section">
455
+ <div class="numbers-section__container">
456
+ <h2>Number of claims</h2>
457
+ <div class="numbers__container">
458
+ <div class="number__card">
459
+ <h1>100</h1>
460
+ <p>Vicuna 13b, English</p>
461
+ </div>
462
+ <div class="number__card">
463
+ <h1>1,603</h1>
464
+ <p>Yi 6b, Chinese</p>
465
+ </div>
466
+ <div class="number__card">
467
+ <h1>200</h1>
468
+ <p>GPT-4, Arabic</p>
469
+ </div>
470
+ <div class="number__card">
471
+ <h1>146</h1>
472
+ <p>Vikhr 7b, Russian</p>
473
+ </div>
474
+ </div>
475
+ </div>
476
+ </section>
477
+ <section class="columns-section" id="columns-section">
478
+ <div class="columns-section__container">
479
+ <h2>Datasets and Statistics</h2>
480
+
481
+ <div class="row g-6 justify-content-between">
482
+ <div class="col-md-6 p-2">
483
+ <div class="column">
484
+ <p>For Arabic, using <nobr>GPT-4</nobr>, we&nbsp;generate 100 biographies of&nbsp;people randomly selected from the list of&nbsp;the most visited websites in&nbsp;Arabic Wikipedia. The used Arabic prompt is&nbsp;the translation of: &laquo;Tell me&nbsp;the biography of&nbsp;{person name}&raquo;. To&nbsp;extract claims, we&nbsp;prompt <nobr>GPT-4</nobr> in&nbsp;the following way: &laquo;Convert the following biography into Arabic atomic factual claims that can be&nbsp;verified, one claim per line. Biography is: {biography}&raquo;. Arabic biographies and claims are translated into English using Google Translate. It&nbsp;is&nbsp;worth mentioning that almost <nobr>one-third</nobr> of&nbsp;the names in&nbsp;the list of&nbsp;person names are foreign</p>
485
+ </div>
486
+ </div>
487
+ <div class="col-md-6 p-2">
488
+ <div class="column">
489
+ <p>For Jais 13b experiments, we&nbsp;use the same prompts used for <nobr>GPT-4</nobr>. We&nbsp;notice that the biographies generated by&nbsp;Jais 13b are much shorter than the ones generated by&nbsp;<nobr>GPT-4</nobr> (almost <nobr>half-length</nobr>). Similarly, we&nbsp;use <nobr>GPT-4</nobr> to&nbsp;extract claims from the generated biographies. On&nbsp;average, biographies generated by&nbsp;Jais 13b have nine claims. Jais 13b generates empty biographies for seven names (out of&nbsp;100) with response messages like: &laquo;I&nbsp;am&nbsp;sorry! I&nbsp;cannot provide information about {name}&raquo;, or&nbsp;&laquo;What do&nbsp;you want to&nbsp;know exactly?&raquo;. Two random claims from each biography are verified manually (total = 186 claims).</p>
490
+ </div>
491
+ </div>
492
+ </div>
493
+ <div class="row g-6 justify-content-between">
494
+ <div class="col-md-4 p-2">
495
+ <div class="column">
496
+ <p>Since FactScore only supports English, for Arabic, Chinese, and Russian, we&nbsp;generate biographies of&nbsp;<nobr>well-known</nobr> people and annotate them only manually. We&nbsp;also manually annotate English claims generated by&nbsp;Vicuna 13b. The statistics for annotated datasets are presented in&nbsp;Table 7.</p>
497
+ </div>
498
+ </div>
499
+ <div class="col-md-4 p-2">
500
+ <div class="column">
501
+ <p>For Chinese, we&nbsp;first prompt ChatGPT to&nbsp;generate a&nbsp;list of&nbsp;famous people. Then use the same way as&nbsp;we&nbsp;have done Arabic, but change the prompt to&nbsp;Chinese, to&nbsp;biographies and claims. We&nbsp;use Yi&nbsp;6b to&nbsp;generate texts <nobr>GPT-4</nobr> to&nbsp;split them into atomic claims.</p>
502
+ </div>
503
+ </div>
504
+ <div class="col-md-4 p-2">
505
+ <div class="column">
506
+ <p>For Russian, we&nbsp;conduct a&nbsp;similar approach, prompting to&nbsp;generate a&nbsp;list of&nbsp;100 famous people and checking result to&nbsp;obtain representative personalities from different areas such as&nbsp;science, sport, literature, art, activity, cinematography, heroes, etc.</p>
507
+ </div>
508
+ </div>
509
+ </div>
510
+ </div>
511
+ </section>
512
+ <section class="dropdown-section" id="dropdown-section">
513
+ <div class="dropdown-section__container">
514
+ <h2>Ablation Studies</h2>
515
+
516
+ <div class="dropdown__row">
517
+ <div class="dropdown" data-dropdown>
518
+ <button class="dropdown__button">
519
+ Aggregation of C⁢C⁢Pword for obtaining C⁢C⁢Pclaim.
520
+ </button>
521
+ <div class="dropdown__content">
522
+ <div class="withline__container">
523
+ <span class="vertical-line blue-line"></span>
524
+ <div class="dropdown__item" data-value="opt1-1">
525
+ Besides the product of probabilities, we also tried the
526
+ normalized product, minimum, and average probability. All
527
+ these approaches perform slightly worse than the product
528
+ (see Table 9).
529
+ </div>
530
+ </div>
531
+ </div>
532
+ </div>
533
+ <div class="dropdown" data-dropdown>
534
+ <button class="dropdown__button">NLI model</button>
535
+ <div class="dropdown__content">
536
+ <div class="withline__container">
537
+ <span class="vertical-line red-line"></span>
538
+ <div class="dropdown__item" data-value="opt2-1">
539
+ We investigate the influence of the specific NLI model on
540
+ the performance of CCP. Table 10 shows that CCP’s
541
+ effectiveness is not critically dependent on the complexity
542
+ of the NLI model employed. Notably, even a relatively small
543
+ model with 22M parameters maintains strong performance
544
+ without any degradation.
545
+ </div>
546
+ </div>
547
+ </div>
548
+ </div>
549
+
550
+ <div class="dropdown" data-dropdown>
551
+ <button class="dropdown__button">NLI context</button>
552
+ <div class="dropdown__content">
553
+ <div class="withline__container">
554
+ <span class="vertical-line green-line"></span>
555
+ <div class="dropdown__item" data-value="opt3-1">
556
+ We analyze what context is sufficient for NLI in CCP (Table
557
+ 11). In addition to the standard variant in CCP, where we
558
+ keep the claim that precedes the word in question, we
559
+ experiment with a single target word without context and the
560
+ whole sentence that precedes the target word. All variants
561
+ demonstrate lower performance. No context results in a drop
562
+ of 0.02 ROC-AUC, and longer contexts – of more than 0.07.
563
+ </div>
564
+ </div>
565
+ </div>
566
+ </div>
567
+ </div>
568
+ </div>
569
+ </section>
570
+ <section class="code-section" id="code-section">
571
+ <div class="code-section__container">
572
+ <h2>Code</h2>
573
+ <pre><code><div class="code__container" ><p class="code">items.forEach((item) => {
574
+ item.addEventListener('click', function () {
575
+ const value = this.textContent
576
+ selectedValue.textContent = value
577
+ dropdown.classList.remove('open')
578
+ })
579
+ })</p></div></code></pre>
580
+ </div>
581
+ </section>
582
+
583
+ <section class="bibTeX-section" id="bibTeX-section">
584
+ <div class="bibTeX-section__container">
585
+ <h2>BibTeX</h2>
586
+ <pre><code><div class="bibTeX__container" ><p class="bibTeX">@misc{salnikov2025geopolitical,
587
+ title={Geopolitical biases in LLMs: what are the "good" and the "bad" countries according to contemporary language models},
588
+ author={Mikhail Salnikov and Dmitrii Korzh and Ivan Lazichny and Elvir Karimov and Artyom Iudin and Ivan Oseledets and Oleg Y. Rogov and Alexander Panchenko and Natalia Loukachevitch and Elena Tutubalina},
589
+ year={2025},
590
+ eprint={2506.06751},
591
+ archivePrefix={arXiv},
592
+ primaryClass={cs.CL},
593
+ url={https://arxiv.org/abs/2506.06751}
594
+ }</p></div></code></pre>
595
+ <button class="copy-button copy-button--active">
596
+ <p class="active-content">Copy</p>
597
+ <img src="public/ok.svg" class="inactive-content" alt="" />
598
+ </button>
599
+ </div>
600
+ </section>
601
+ </div>
602
+ <footer id="footer">
603
+ <section class="footer-section">
604
+ <div class="footer-section__container">
605
+ <div class="footer__information">
606
+ <div class="airi-logo-section">
607
+ <img
608
+ class="participants-logo"
609
+ src="public/airi_logo_white.svg"
610
+ alt="MTUCI"
611
+ />
612
+ <p>This page was build with using AIRI Institute template</p>
613
+ </div>
614
+
615
+ <div class="footer__mails">
616
+
617
+ <div class="mail__contanct">
618
+ <a href="mailto:parther@airi.net" class="mail" type="email"
619
+ >parther@airi.net</a
620
+ >
621
+ <p class="mail__description">
622
+ For scientific cooperation and partnership
623
+ </p>
624
+ </div>
625
+ <div class="mail__contanct">
626
+ <a href="mailto:people@airi.net" class="mail" type="email"
627
+ >people@airi.net</a
628
+ >
629
+ <p class="mail__description">
630
+ For any questions connected with employees and employment
631
+ </p>
632
+ </div>
633
+ </div>
634
+ <div class="footer__social-media">
635
+ <p class="gray">Join AIRI</p>
636
+ <div class="social-media__row">
637
+ <svg
638
+ class="media-logo"
639
+ width="40"
640
+ height="40"
641
+ viewBox="0 0 50 50"
642
+ fill="none"
643
+ xmlns="http://www.w3.org/2000/svg"
644
+ >
645
+ <path
646
+ d="M25 6.10352e-05C11.193 6.10352e-05 0 11.1931 0 25.0001C0 38.807 11.193 50.0001 25 50.0001C38.807 50.0001 50 38.807 50 25.0001C50 11.1931 38.807 6.10352e-05 25 6.10352e-05ZM18.3059 36.6565H12.2754V18.5148H18.3059V36.6565V36.6565ZM15.2909 16.0393H15.2503C13.2275 16.0393 11.9165 14.6446 11.9165 12.9031C11.9165 11.1231 13.2665 9.76898 15.3304 9.76898C17.3943 9.76898 18.6633 11.1226 18.7029 12.9031C18.7029 14.6451 17.3949 16.0393 15.2909 16.0393ZM40.0547 36.6565H34.0237V26.9504C34.0237 24.5124 33.1512 22.8485 30.9686 22.8485C29.3022 22.8485 28.3106 23.9699 27.874 25.0548C27.7149 25.4427 27.6738 25.9826 27.6738 26.525V36.6565H21.6408C21.6408 36.6565 21.7219 20.2178 21.6408 18.5148H27.6728V21.0862C28.4743 19.8517 29.9055 18.0885 33.1091 18.0885C37.0782 18.0885 40.0536 20.6811 40.0536 26.2538V36.6565H40.0547Z"
647
+ ></path>
648
+ </svg>
649
+
650
+ <svg
651
+ class="media-logo"
652
+ xmlns="http://www.w3.org/2000/svg"
653
+ width="40"
654
+ height="40"
655
+ fill="none"
656
+ >
657
+ <path
658
+ fill-rule="evenodd"
659
+ d="M20 40c11.046 0 20-8.954 20-20S31.046 0 20 0 0 8.954 0 20s8.954 20 20 20Zm9.754-30H26.38l-5.622 6.391L15.957 10H9l8.284 10.922L9.44 30h3.372l6.085-6.996L24.212 30H31l-8.65-11.513L29.754 10ZM27.07 28.007H25.2L12.966 11.92h2.009L27.07 28.007Z"
660
+ clip-rule="evenodd"
661
+ ></path>
662
+ </svg>
663
+ <svg
664
+ class="media-logo"
665
+ xmlns="http://www.w3.org/2000/svg"
666
+ width="40"
667
+ height="40"
668
+ fill="none"
669
+ >
670
+ <path
671
+ fill-rule="evenodd"
672
+ d="M40 20c0 11.046-8.954 20-20 20S0 31.046 0 20 8.954 0 20 0s20 8.954 20 20Zm-20.068-6.124c-2.06.854-6.177 2.622-12.35 5.303-1.003.398-1.529.787-1.576 1.167-.081.642.726.895 1.825 1.24.15.047.304.095.463.147 1.082.35 2.536.76 3.292.776.686.015 1.452-.267 2.297-.846 5.769-3.881 8.746-5.843 8.933-5.885.132-.03.315-.068.439.042.124.11.111.318.098.373-.08.34-3.248 3.276-4.887 4.796-.512.473-.874.81-.948.886-.166.172-.335.335-.498.49-1.005.966-1.758 1.69.042 2.872.864.568 1.556 1.038 2.247 1.507.754.512 1.506 1.022 2.479 1.658.248.162.485.33.715.494.878.624 1.666 1.184 2.64 1.095.566-.052 1.15-.583 1.447-2.165.702-3.739 2.081-11.84 2.4-15.178a3.714 3.714 0 0 0-.036-.83c-.028-.165-.087-.4-.301-.572-.254-.206-.645-.249-.82-.246-.797.014-2.02.438-7.901 2.876Z"
673
+ clip-rule="evenodd"
674
+ ></path>
675
+ </svg>
676
+ <svg
677
+ class="media-logo"
678
+ xmlns="http://www.w3.org/2000/svg"
679
+ width="40"
680
+ height="40"
681
+ fill="none"
682
+ >
683
+ <path
684
+ fill-rule="evenodd"
685
+ d="M20 40c11.046 0 20-8.954 20-20S31.046 0 20 0 0 8.954 0 20s8.954 20 20 20ZM7 13c.211 9.994 5.278 16 14.161 16h.504v-5.718c3.264.32 5.732 2.675 6.723 5.718H33c-1.267-4.549-4.596-7.063-6.675-8.024 2.079-1.185 5.002-4.068 5.7-7.976h-4.19c-.909 3.171-3.604 6.054-6.17 6.326V13h-4.19v11.083c-2.599-.64-5.88-3.748-6.025-11.083H7Z"
686
+ clip-rule="evenodd"
687
+ ></path>
688
+ </svg>
689
+ <svg
690
+ class="media-logo"
691
+ xmlns="http://www.w3.org/2000/svg"
692
+ width="40"
693
+ height="40"
694
+ fill="none"
695
+ >
696
+ <path
697
+ fill-rule="evenodd"
698
+ d="M40 20c0 11.046-8.954 20-20 20S0 31.046 0 20 8.954 0 20 0s20 8.954 20 20ZM20.198 8.282c6.187 0 11.203 4.985 11.203 11.134a11.1 11.1 0 0 1-.302 2.577 1.678 1.678 0 0 0-1.266-.55c-.416 0-.843.136-1.27.406-.285.179-.598.496-.921.895-.3-.412-.718-.686-1.197-.761a1.788 1.788 0 0 0-.277-.022c-1.094 0-1.752.943-2 1.791-.124.287-.717 1.592-1.608 2.477-1.361 1.352-1.7 2.747-1.02 4.242a11.39 11.39 0 0 1-2.736-.006c.676-1.493.337-2.886-1.022-4.236-.892-.885-1.484-2.19-1.608-2.477-.248-.848-.906-1.79-2-1.79-.092 0-.185.006-.277.02-.48.076-.898.35-1.197.762-.323-.399-.637-.716-.92-.895-.428-.27-.856-.407-1.271-.407a1.68 1.68 0 0 0-1.223.504c-.19-.813-.29-1.66-.29-2.53 0-6.15 5.015-11.134 11.202-11.134ZM32.69 19.416c0 1.297-.2 2.548-.571 3.723l.07-.001c.496 0 .945.19 1.263.533.407.441.589.984.51 1.526-.038.259-.125.49-.255.705.274.22.476.526.574.895.076.29.154.891-.255 1.511.026.04.05.083.074.126.245.463.26.987.044 1.474-.328.739-1.145 1.321-2.731 1.946a21.05 21.05 0 0 1-1.897.64c-1.305.336-2.484.507-3.506.507-1.668 0-2.915-.452-3.715-1.344a12.648 12.648 0 0 1-4.24-.008c-.8.897-2.05 1.352-3.724 1.352-1.021 0-2.2-.171-3.505-.507-.008-.003-.91-.251-1.897-.64-1.586-.625-2.403-1.207-2.732-1.946a1.653 1.653 0 0 1 .118-1.6 1.861 1.861 0 0 1-.254-1.51c.098-.37.3-.676.573-.896a1.856 1.856 0 0 1-.254-.705c-.079-.542.102-1.085.51-1.526a1.693 1.693 0 0 1 1.39-.529 12.34 12.34 0 0 1-.573-3.726C7.707 12.559 13.299 7 20.198 7c6.9 0 12.492 5.559 12.492 12.416Zm-6.68 12.16c-.751 0-1.49-.047-2.085-.306l.004-.002a2.272 2.272 0 0 1-.872-.737c-.07-.101-.135-.2-.193-.298l-.009.002c-.64-1.117-.487-2.013.612-3.104 1.21-1.203 1.915-2.962 1.915-2.962s.24-.933.786-.933c.025 0 .05.002.077.006.6.094 1.04 1.62-.216 2.554-.835.621-.449 1.11.046 1.11.249 0 .525-.125.687-.418.483-.877 1.803-3.132 2.487-3.563.22-.139.42-.208.584-.208.344 0 .53.305.42.907-.08.441-.825 1.194-1.52 1.898-.707.713-1.363 1.376-1.223 1.61a.239.239 0 0 0 .219.123c.392 0 1.04-.67 1.04-.67s2.403-2.172 3.42-2.172c.132 0 .24.036.318.12.592.64-.279 1.154-1.649 1.963-.177.105-.362.214-.554.33-1.674 1.007-1.784 1.317-1.546 1.698.014.023.04.033.076.033.193 0 .682-.292 1.262-.638.845-.504 1.885-1.124 2.483-1.124.233 0 .4.094.46.326.188.709-.93 1.12-1.959 1.498-.876.322-1.686.62-1.564 1.057.038.137.138.24.27.24.281 0 .735-.249 1.217-.513.546-.298 1.127-.616 1.532-.616.189 0 .339.069.43.24.542 1.023-3.735 2.144-3.77 2.153a13.92 13.92 0 0 1-3.185.396Zm-8.517-1.353c.633-1.112.477-2.005-.618-3.092-1.21-1.203-1.916-2.962-1.916-2.962s-.24-.933-.785-.933a.49.49 0 0 0-.077.006c-.6.094-1.04 1.62.215 2.554.836.621.45 1.11-.045 1.11a.781.781 0 0 1-.688-.418c-.483-.877-1.803-3.132-2.487-3.563-.22-.139-.42-.208-.583-.208-.344 0-.53.305-.42.907.08.441.824 1.194 1.52 1.898.707.713 1.363 1.376 1.223 1.61a.239.239 0 0 1-.219.123c-.392 0-1.04-.67-1.04-.67s-2.403-2.172-3.42-2.172c-.132 0-.241.036-.318.12-.592.64.278 1.154 1.648 1.963.177.105.363.214.554.33 1.675 1.007 1.785 1.317 1.547 1.698-.014.023-.04.033-.076.033-.193 0-.682-.292-1.262-.637-.846-.505-1.885-1.125-2.483-1.125-.234 0-.4.094-.461.326-.188.709.931 1.12 1.96 1.498.876.322 1.686.62 1.564 1.057-.039.137-.138.24-.271.24-.28 0-.735-.249-1.217-.513-.545-.298-1.126-.616-1.531-.616-.189 0-.34.069-.43.24-.542 1.023 3.734 2.144 3.769 2.153.703.181 1.96.396 3.185.396.761 0 1.51-.048 2.108-.317l-.005-.001c.34-.173.632-.41.852-.727a5.21 5.21 0 0 0 .2-.31l.007.002Zm5.56-10.463c.84-.414 1.367-.673 1.367.092 0 .956-.458 2.511-1.687 3.454-.62.477-1.439.798-2.495.805h-.001c-1.056-.008-1.875-.328-2.496-.805-1.23-.943-1.687-2.498-1.687-3.454 0-.765.527-.506 1.366-.092.743.366 1.731.853 2.817.858 1.086-.005 2.074-.492 2.817-.858Zm-2.519 2.688c.063-.085.11-.168.152-.243-.043.076-.09.158-.152.243Zm7.16-4.602a1.036 1.036 0 1 0 0-2.072 1.036 1.036 0 0 0 0 2.072ZM13.798 16.81a1.036 1.036 0 1 1-2.071 0 1.036 1.036 0 0 1 2.071 0Zm10.242-.451c.174.061.304.249.428.427.166.241.322.465.56.34a1.604 1.604 0 0 0 .666-2.173 1.613 1.613 0 0 0-2.178-.664 1.604 1.604 0 0 0-.666 2.172c.11.207.356.11.614.006.203-.08.414-.165.576-.108Zm-8.015.427c.124-.178.254-.366.428-.427.161-.057.373.027.575.108.26.104.504.201.615-.006a1.604 1.604 0 0 0-.666-2.172 1.613 1.613 0 0 0-2.178.664 1.604 1.604 0 0 0 .666 2.172c.238.127.393-.098.56-.339Z"
699
+ clip-rule="evenodd"
700
+ ></path>
701
+ </svg>
702
+ </div>
703
+ </div>
704
+ </div>
705
+
706
+ <p class="copyrighted">© 2025, AIRI</p>
707
+ </div>
708
+ </section>
709
+ </footer>
710
+
711
+ <script src="https://cdn.jsdelivr.net/npm/chart.js"></script>
712
+ <script src="https://cdn.jsdelivr.net/npm/swiper@11/swiper-bundle.min.js"></script>
713
+ <script src="https://cdn.jsdelivr.net/npm/bootstrap@5.3.3/dist/js/bootstrap.bundle.min.js"></script>
714
+ <script src="scripts/sidebar.js"></script>
715
+ <script src="scripts/beforeAfter.js"></script>
716
+ <script src="scripts/swiper.js"></script>
717
+ <script src="scripts/swiperCharts.js"></script>
718
+ <script src="scripts/pieChart.js"></script>
719
+ <script src="scripts/barChart.js"></script>
720
+ <script src="scripts/lineChart.js"></script>
721
+ <script src="scripts/pieChartSlider.js"></script>
722
+ <script src="scripts/barChartSlider.js"></script>
723
+ <script src="scripts/lineChartSlider.js"></script>
724
+ <script src="scripts/lightbox.js"></script>
725
+ <script src="scripts/dropdown.js"></script>
726
+ <script src="scripts/codeCopy.js"></script>
727
+ </body>
728
  </html>