Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -270,276 +270,5 @@ demo = gr.Interface(
|
|
| 270 |
)
|
| 271 |
|
| 272 |
if __name__ == "__main__":
|
| 273 |
-
demo.launch()
|
| 274 |
-
os.environ["OMP_NUM_THREADS"] = "1"
|
| 275 |
-
|
| 276 |
-
import gradio as gr
|
| 277 |
-
import torch
|
| 278 |
-
import re
|
| 279 |
-
from transformers import AutoTokenizer, AutoModelForCausalLM, BitsAndBytesConfig
|
| 280 |
-
import spaces
|
| 281 |
-
|
| 282 |
-
# Model configuration
|
| 283 |
-
MODEL_ID = "oberbics/newspaper-argument-mining-V1"
|
| 284 |
-
|
| 285 |
-
SYSTEM_PROMPT = """You are an expert at analyzing historical texts and you hate to summarize
|
| 286 |
-
|
| 287 |
-
OUTPUT FORMAT - EXACTLY these 4 XML tags and NOTHING else:
|
| 288 |
-
<argument>Original argument text OR "NA"</argument>
|
| 289 |
-
<claim>Core claim (implication) in one sentence OR "NA"</claim>
|
| 290 |
-
<explanation>Why this is an argument OR "NA"</explanation>
|
| 291 |
-
<human_verification_needed>True OR False</human_verification_needed>
|
| 292 |
-
|
| 293 |
-
EXAMPLE WITH ARGUMENT:
|
| 294 |
-
<argument>Es sind furchtbare Bilder, die sich dabei entrollen. Unter den Trümmern des einen Hause», so erzählt Luigt Barsint im Corrtcre della sera, findet man die Leichen von Unglück lichen, die in anderen Häusern gewohnt baben und die in der Ber- Wirrung de» schrcck.ichen Augenblickes instinktiv bet Fremden Hülfe und Unterschlupf suchten. Niemand erkennt jetzt diese armen Ein dringlinge, ihre Leichen werden nicht reklamiert, und man trägt sie hinunter an de» Strand, wo sie in langer Reihe einer neben den anderen hingebettet weiden, in denselben Tüchern und Decken, in denen sie tbren Tod gesunden.</argument>
|
| 295 |
-
<claim>The earthquake's chaos led to unidentified victims dying in unfamiliar places.</claim>
|
| 296 |
-
<explanation>Describes how people fled to other houses seeking help during the disaster, died there, and now cannot be identified or claimed by relatives. Shows cause (panic/confusion) and effect (anonymous deaths).</explanation>
|
| 297 |
-
<human_verification_needed>False</human_verification_needed>
|
| 298 |
-
|
| 299 |
-
EXAMPLE WITHOUT ARGUMENT:
|
| 300 |
-
<argument>NA</argument>
|
| 301 |
-
<claim>NA</claim>
|
| 302 |
-
<explanation>NA</explanation>
|
| 303 |
-
<human_verification_needed>FALSE</human_verification_needed>
|
| 304 |
-
|
| 305 |
-
RULES:
|
| 306 |
-
- NO SUMMARY; ONLY ORIGINAL EXTRACTOM FROM THE TEXT; don't extract anything that is not in the text. Only extract word by word
|
| 307 |
-
- ONLY output these 4 XML tags
|
| 308 |
-
- Factual reportings such as "Dem Vulkanausbruch folgten drei Sturzwellen in etwa 10 Meter Höhe" or "Almost all the inhabitants were killed; only a few thousands escaped death" are NO arguments
|
| 309 |
-
- Extract only original text without changes or use NA when you did not find an argument
|
| 310 |
-
- The claim is not a translation of summary of argument. It should say what the (implicite) argument implies
|
| 311 |
-
- In cases of uncertainty or ambiguity, say human_verification_needed TRUE
|
| 312 |
-
- If no argument exists, use NA for all fields except <human_verification_needed>FALSE or TRUE</human_verification_needed>
|
| 313 |
-
- More than one argumentative unit possible for one aticle, one unit has one clear clame and all the xml structures"""
|
| 314 |
-
|
| 315 |
-
print("Loading tokenizer...")
|
| 316 |
-
tokenizer = AutoTokenizer.from_pretrained(MODEL_ID)
|
| 317 |
-
tokenizer.pad_token = tokenizer.eos_token
|
| 318 |
-
|
| 319 |
-
print("Loading model...")
|
| 320 |
-
bnb_config = BitsAndBytesConfig(
|
| 321 |
-
load_in_4bit=True,
|
| 322 |
-
bnb_4bit_quant_type="nf4",
|
| 323 |
-
bnb_4bit_compute_dtype=torch.bfloat16,
|
| 324 |
-
bnb_4bit_use_double_quant=True
|
| 325 |
-
)
|
| 326 |
-
|
| 327 |
-
model = AutoModelForCausalLM.from_pretrained(
|
| 328 |
-
MODEL_ID,
|
| 329 |
-
quantization_config=bnb_config,
|
| 330 |
-
device_map="auto",
|
| 331 |
-
torch_dtype=torch.bfloat16,
|
| 332 |
-
trust_remote_code=True
|
| 333 |
-
)
|
| 334 |
-
print("Model loaded successfully!")
|
| 335 |
-
|
| 336 |
-
|
| 337 |
-
def parse_argumentative_units(response):
|
| 338 |
-
"""Parse multiple argumentative units from model response"""
|
| 339 |
-
units = []
|
| 340 |
-
|
| 341 |
-
# Find all argument blocks using regex
|
| 342 |
-
pattern = r'<argument>(.*?)</argument>\s*<claim>(.*?)</claim>\s*<explanation>(.*?)</explanation>\s*<human_verification_needed>(.*?)</human_verification_needed>'
|
| 343 |
-
matches = re.findall(pattern, response, re.DOTALL | re.IGNORECASE)
|
| 344 |
-
|
| 345 |
-
for match in matches:
|
| 346 |
-
argument, claim, explanation, verification = [m.strip() for m in match]
|
| 347 |
-
|
| 348 |
-
units.append({
|
| 349 |
-
'argument': argument,
|
| 350 |
-
'claim': claim,
|
| 351 |
-
'explanation': explanation,
|
| 352 |
-
'human_verification_needed': verification.lower() == 'true',
|
| 353 |
-
'raw_verification': verification
|
| 354 |
-
})
|
| 355 |
-
|
| 356 |
-
return units
|
| 357 |
-
|
| 358 |
-
|
| 359 |
-
def calculate_confidence_score(unit, position):
|
| 360 |
-
"""Calculate confidence based on model's natural ordering + basic quality checks"""
|
| 361 |
-
|
| 362 |
-
# Base confidence from position (model's implicit ranking)
|
| 363 |
-
# First argument = highest confidence, then declining
|
| 364 |
-
position_confidence = max(0.2, 0.95 - (position * 0.15)) # 0.95, 0.80, 0.65, 0.50, 0.35, 0.20...
|
| 365 |
-
|
| 366 |
-
# Only major adjustments for obvious quality issues
|
| 367 |
-
if unit['argument'] == 'NA':
|
| 368 |
-
return 0.0
|
| 369 |
-
|
| 370 |
-
# Small penalty if model itself says verification needed
|
| 371 |
-
if unit['human_verification_needed']:
|
| 372 |
-
position_confidence -= 0.1
|
| 373 |
-
|
| 374 |
-
# Small penalty for very short arguments (likely incomplete)
|
| 375 |
-
if len(unit['argument']) < 30:
|
| 376 |
-
position_confidence -= 0.2
|
| 377 |
-
|
| 378 |
-
return max(0.0, min(1.0, position_confidence))
|
| 379 |
-
|
| 380 |
-
|
| 381 |
-
def filter_high_confidence_arguments(units, confidence_threshold=0.6):
|
| 382 |
-
"""Filter argumentative units by confidence score"""
|
| 383 |
-
scored_units = []
|
| 384 |
-
|
| 385 |
-
for unit in units:
|
| 386 |
-
confidence = calculate_confidence_score(unit)
|
| 387 |
-
unit['confidence_score'] = confidence
|
| 388 |
-
scored_units.append(unit)
|
| 389 |
-
|
| 390 |
-
# Sort by confidence (highest first)
|
| 391 |
-
scored_units.sort(key=lambda x: x['confidence_score'], reverse=True)
|
| 392 |
-
|
| 393 |
-
# Filter by threshold
|
| 394 |
-
high_confidence_units = [unit for unit in scored_units if unit['confidence_score'] >= confidence_threshold]
|
| 395 |
-
|
| 396 |
-
return high_confidence_units, scored_units
|
| 397 |
-
|
| 398 |
-
|
| 399 |
-
def format_filtered_output(high_confidence_units, show_scores=True, debug=False):
|
| 400 |
-
"""Format the high-confidence units for display"""
|
| 401 |
-
if not high_confidence_units:
|
| 402 |
-
return "No high-confidence arguments found."
|
| 403 |
-
|
| 404 |
-
output = []
|
| 405 |
-
for i, unit in enumerate(high_confidence_units, 1):
|
| 406 |
-
if show_scores:
|
| 407 |
-
output.append(f"=== ARGUMENT {i} (Confidence: {unit['confidence_score']:.3f}) ===")
|
| 408 |
-
else:
|
| 409 |
-
output.append(f"=== ARGUMENT {i} ===")
|
| 410 |
-
|
| 411 |
-
if debug:
|
| 412 |
-
# Show lengths and verification status for debugging
|
| 413 |
-
arg_len = len(unit['argument']) if unit['argument'] != 'NA' else 0
|
| 414 |
-
claim_len = len(unit['claim']) if unit['claim'] != 'NA' else 0
|
| 415 |
-
exp_len = len(unit['explanation']) if unit['explanation'] != 'NA' else 0
|
| 416 |
-
output.append(f"[DEBUG: arg_len={arg_len}, claim_len={claim_len}, exp_len={exp_len}, verification={unit['human_verification_needed']}]")
|
| 417 |
-
|
| 418 |
-
output.append(f"<argument>{unit['argument']}</argument>")
|
| 419 |
-
output.append(f"<claim>{unit['claim']}</claim>")
|
| 420 |
-
output.append(f"<explanation>{unit['explanation']}</explanation>")
|
| 421 |
-
output.append(f"<human_verification_needed>{unit['raw_verification']}</human_verification_needed>")
|
| 422 |
-
output.append("")
|
| 423 |
-
|
| 424 |
-
return "\n".join(output)
|
| 425 |
-
|
| 426 |
-
|
| 427 |
-
@spaces.GPU
|
| 428 |
-
def extract_arguments(text, temperature=0.1, confidence_threshold=0.6, show_all=False):
|
| 429 |
-
if not text or not text.strip():
|
| 430 |
-
return "", "Please enter some text to analyze."
|
| 431 |
-
|
| 432 |
-
try:
|
| 433 |
-
prompt = f"""<|begin_of_text|><|start_header_id|>system<|end_header_id|>
|
| 434 |
-
{SYSTEM_PROMPT}<|eot_id|>
|
| 435 |
-
<|start_header_id|>user<|end_header_id|>
|
| 436 |
-
Extract arguments from historical text.
|
| 437 |
-
{text}<|eot_id|>
|
| 438 |
-
<|start_header_id|>assistant<|end_header_id|>"""
|
| 439 |
-
|
| 440 |
-
inputs = tokenizer(prompt, return_tensors="pt", truncation=True, max_length=5048).to(model.device)
|
| 441 |
-
input_length = inputs['input_ids'].shape[1]
|
| 442 |
-
|
| 443 |
-
try:
|
| 444 |
-
if temperature is None:
|
| 445 |
-
temperature = 0.1
|
| 446 |
-
else:
|
| 447 |
-
temperature = float(temperature)
|
| 448 |
-
|
| 449 |
-
if temperature < 0.01:
|
| 450 |
-
temperature = 0.01
|
| 451 |
-
elif temperature > 0.3:
|
| 452 |
-
temperature = 0.3
|
| 453 |
-
except:
|
| 454 |
-
temperature = 0.1
|
| 455 |
-
|
| 456 |
-
print(f"DEBUG: Generating with temperature {temperature}")
|
| 457 |
-
|
| 458 |
-
with torch.no_grad():
|
| 459 |
-
outputs = model.generate(
|
| 460 |
-
**inputs,
|
| 461 |
-
max_new_tokens=5000,
|
| 462 |
-
temperature=temperature,
|
| 463 |
-
do_sample=True if temperature > 0.01 else False,
|
| 464 |
-
top_p=0.9,
|
| 465 |
-
pad_token_id=tokenizer.eos_token_id,
|
| 466 |
-
repetition_penalty=1.1
|
| 467 |
-
)
|
| 468 |
-
|
| 469 |
-
generated_tokens = outputs[0][input_length:]
|
| 470 |
-
response = tokenizer.decode(generated_tokens, skip_special_tokens=True)
|
| 471 |
-
|
| 472 |
-
print(f"DEBUG: Raw response length: {len(response)}")
|
| 473 |
-
print(f"DEBUG: Response starts with: {response[:100]}")
|
| 474 |
|
| 475 |
-
# Fix XML start
|
| 476 |
-
if not response.startswith('<argument>'):
|
| 477 |
-
arg_start = response.find('<argument>')
|
| 478 |
-
if arg_start != -1:
|
| 479 |
-
response = response[arg_start:]
|
| 480 |
-
print(f"DEBUG: Fixed response start, new length: {len(response)}")
|
| 481 |
-
|
| 482 |
-
# Parse and filter arguments
|
| 483 |
-
units = parse_argumentative_units(response)
|
| 484 |
-
|
| 485 |
-
if not units:
|
| 486 |
-
print("DEBUG: No units found, returning raw response")
|
| 487 |
-
return response, f"Raw output returned (no parseable argumentative units found)"
|
| 488 |
-
|
| 489 |
-
print(f"DEBUG: Found {len(units)} units, applying confidence filtering")
|
| 490 |
-
|
| 491 |
-
try:
|
| 492 |
-
high_confidence_units, all_units = filter_high_confidence_arguments(units, confidence_threshold)
|
| 493 |
-
print(f"DEBUG: {len(high_confidence_units)} high confidence units")
|
| 494 |
-
except Exception as e:
|
| 495 |
-
print(f"DEBUG: Error in confidence filtering: {str(e)}")
|
| 496 |
-
return response, f"Error in confidence filtering: {str(e)}"
|
| 497 |
-
|
| 498 |
-
if show_all:
|
| 499 |
-
# Show all units with confidence scores
|
| 500 |
-
all_output = []
|
| 501 |
-
for i, unit in enumerate(all_units, 1):
|
| 502 |
-
status = "✓ HIGH CONFIDENCE" if unit['confidence_score'] >= confidence_threshold else "⚠ LOW CONFIDENCE"
|
| 503 |
-
all_output.append(f"=== ARGUMENT {i} - {status} (Score: {unit['confidence_score']:.3f}) ===")
|
| 504 |
-
all_output.append(f"<argument>{unit['argument']}</argument>")
|
| 505 |
-
all_output.append(f"<claim>{unit['claim']}</claim>")
|
| 506 |
-
all_output.append(f"<explanation>{unit['explanation']}</explanation>")
|
| 507 |
-
all_output.append(f"<human_verification_needed>{unit['raw_verification']}</human_verification_needed>")
|
| 508 |
-
all_output.append("")
|
| 509 |
-
|
| 510 |
-
return "\n".join(all_output), f"Found {len(units)} total units, {len(high_confidence_units)} high-confidence"
|
| 511 |
-
|
| 512 |
-
else:
|
| 513 |
-
# Show only high-confidence units
|
| 514 |
-
filtered_output = format_filtered_output(high_confidence_units, show_scores=True)
|
| 515 |
-
return filtered_output, f"Showing {len(high_confidence_units)}/{len(units)} high-confidence arguments (threshold: {confidence_threshold})"
|
| 516 |
-
|
| 517 |
-
except Exception as e:
|
| 518 |
-
error_msg = f"Error during processing: {str(e)}"
|
| 519 |
-
print(f"DEBUG: {error_msg}")
|
| 520 |
-
return error_msg, "Processing failed - check console for details"
|
| 521 |
-
|
| 522 |
-
|
| 523 |
-
# Gradio interface
|
| 524 |
-
demo = gr.Interface(
|
| 525 |
-
fn=extract_arguments,
|
| 526 |
-
inputs=[
|
| 527 |
-
gr.Textbox(label="Input Text", placeholder="Enter newspaper text here...", lines=10),
|
| 528 |
-
gr.Slider(minimum=0.01, maximum=0.3, value=0.1, step=0.01, label="Temperature (lower = more consistent)"),
|
| 529 |
-
gr.Slider(minimum=0.3, maximum=0.9, value=0.6, step=0.05, label="Confidence Threshold (higher = more selective)"),
|
| 530 |
-
gr.Checkbox(label="Show All Arguments (including low confidence)", value=False)
|
| 531 |
-
],
|
| 532 |
-
outputs=[
|
| 533 |
-
gr.Textbox(label="Filtered Arguments", lines=12),
|
| 534 |
-
gr.Textbox(label="Summary", lines=1)
|
| 535 |
-
],
|
| 536 |
-
title="Newspaper Argumentative Unit Extractor with Confidence Filtering",
|
| 537 |
-
description="Extract and filter argumentative units from news sources based on confidence scores",
|
| 538 |
-
examples=[
|
| 539 |
-
["Reggio, January 8. Frequent shocks of earthquake were felt here dur ing the night, accompanied at times by loud subter ranean reports. A few buildings that had not been completely destroyed were further damaged. The work of reconstructing the railway is being pushed forward energetically. News has been received from Brancaleone, Catanzaro, and Palmi of earthquakes by which the inhabitants were alarmed last night", 0.1, 0.6, False],
|
| 540 |
-
["The bourses and theatres are closed. In every quarter help committees have been estab lished. A central committee has been organised at Rome for the purpose of privately and publicly collecting donations, and organising relief expedi tions to the afflicted districts. The Duke of Aosta has accepted the presidency. From all parts of the globe come telegrams of sympathy. The entire press has founded relief funds. Every Ambassador and Minister in Rome personally visited the Ministry of the Exterior yesterday morning, and expressed sympathy on behalf of their respective countries. Doctors, firemen, and municipal guards have been despatched to Messina and Calabria from many Italian towns. The Lombard Bank of Milan has already distributed 250,000 lire to sufferers from the earthquake, and the city of Milan has sent 25 firemen to Messina. A curious result of the earthquake is that the craters of Aetna, Vesuvius, and Stromboli ceased their activity immediately after the shock. It is reported from Malta that the British war- ships \"Exmouth,\" \"Euryalus,\" \"Minerva,\" and \"Sutlej\" have left for Messina. The French Government has sent two armoured ships and three destroyers to Messina. President Fallieres, Premier Clemenceau, Minister Pichon, and the Presidents of the Senate and Chamber have all sent messages of sympathy to the Italian Government. Palermo, December 30. Yesterday evening the first official telegraphic des- patches from the prefect of Messina reached here, They state that the catastrophe is beyond human description. Many thousands of people are known to have perished. It is impossible, says the prefect, to accurately relate the frightful scenes witnessed. The help already proffered and accepted is insuffi cient for the purpose. There is pressing need of extraordinary measures of help, and provisions are in great demand. At the time of wiring the fires in many parts of the ruined city have not been got under control, and are spreading in many directions. Catania, December 30. A survivor from the catastrophe at Messina who has arrived here says: \"It is impossible to describe the appalling scene. The city has been transformed into a vast heap of ruins. Almost all the inhabitants were killed; only a few thousands escaped death. There is need of doctors, tents, clothing, and pro visions for the survivors, who, deprived of all ne cessaries, are exposed to the inclemencies of the winter weather. There is need of fire engines to cope with the flames that are raging among the ruins. Messina appears as if it had been swept away by the earthquake. The railway station has collapsed. Railway carriages have been destroyed. Almost all the railway employes are dead. The streets are no longer recognisable; they look like enormous fissures in a distant and extensive heap of ruins. The Uni versity, the Post and Telegraph Office, and all the other public buildings have disappeared. The gas mains are entirely destroyed. For hours after the catastrophe the town was without any help, as the authorities, the garrison, the doctors, and apothe* * caries,—in short, all classes of the population, wjere buried under the ruins.\" Three more trains and** a steamer have left Messina with vjarKled and gitives. Bremen, In cembcr 30, Information has been recei\ o, i rom the Rprt authorities at Naples that the ! ' -house in the Straits of Messina has been destrc* - ' It seems doubtful whether the navigation of ■ < l traits will be pos sible without risk. The N r.h German Lloyd has therefore ordered all its ship commanders to avoid the Straits. All communicauon with Sicily is inter rupted. Rome, December 30. Newspaper reports from Catanzaro state that the prefect of. Reggio, who was believed to have pe rished, has arrived there and says that he managed to escape from the prefecture when the greater part of the building had fallen in. The surrounding streets and the centre of the town down to the harbour have been totally destroyed. Only the small villas clustering in the hills surrounding the town and on the Promenade of Reggio and Campi are intact. The castle, the cathedral, and the Lyceum all collapsed, and practically every student in the Ly ceum met his or her death. The prefect adds that he believes the Bishop to be dead. The barracks fell in, burying hundreds of soldiers. Reports as to the fate of the council house are contradictory. All the fugitives from Reggio describe the disaster as frightful, and estimate the number of dead as (Continued on page 3)", 0.1, 0.6, False]
|
| 541 |
-
]
|
| 542 |
-
)
|
| 543 |
-
|
| 544 |
-
if __name__ == "__main__":
|
| 545 |
-
demo.launch()
|
|
|
|
| 270 |
)
|
| 271 |
|
| 272 |
if __name__ == "__main__":
|
| 273 |
+
demo.launch()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 274 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|