blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 2 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 69 | license_type stringclasses 2
values | repo_name stringlengths 5 118 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringlengths 4 63 | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 2.91k 686M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 23
values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 220
values | src_encoding stringclasses 30
values | language stringclasses 1
value | is_vendor bool 2
classes | is_generated bool 2
classes | length_bytes int64 2 10.3M | extension stringclasses 257
values | content stringlengths 2 10.3M | authors listlengths 1 1 | author_id stringlengths 0 212 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
71364d0e80490815581c647975d2b3ccd944b25c | e04c20092aa7b917360aec8c6a4ba745c43b01cb | /TheHood/wsgi.py | 26b819fb20bdb57cfc41d4b108e82b8589463057 | [
"MIT"
] | permissive | Moses-254-Mugo/The_Hood | ea45e0285568afb1d028e51f8ec9de575ecd8519 | f9075419f1d1e2f5027b26ba2644c74b6264715d | refs/heads/master | 2023-08-25T10:56:26.700867 | 2021-11-01T18:42:06 | 2021-11-01T18:42:06 | 422,486,391 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 391 | py | """
WSGI config for TheHood project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'TheHood.settings')
application = get_wsgi_application()
| [
"moses.mugo@student.moringaschool.com"
] | moses.mugo@student.moringaschool.com |
02fa26b9e0df086043b85ec343a517613b75611b | 35fc6f805f97de291436ee52d2ec2ee0562162de | /ppExercise 11.py | e7d863451d4c24860ecc2d654fa365e87ee015da | [] | no_license | drfarre/learningpython | ae62cc7b242457683d09a7dce6ae981e7ba3ab26 | 38708e240330e3d2c369c311482820df8524b7d8 | refs/heads/master | 2020-03-10T17:49:32.177770 | 2018-04-23T19:50:40 | 2018-04-23T19:50:40 | 129,509,456 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 417 | py | import math
def isitprime(n):
if n == 2 or n == 3 : return True, print("Es primo")
if n < 2 : return False, print("NO Es primo 1 ")
if n %2 == 0 : return False , print("NO Es primo 2")
for i in range(3,int(n**0.5)+1,2):
if n%i==0:
return False, print("No es primo 3")
return True, print("Es primo 2")
n = int(input("Ingresar numero a evaluar: "))
isitprime(n)
| [
"noreply@github.com"
] | drfarre.noreply@github.com |
31e8a034979e3d16f32838f9c35fd13b1d519474 | 61cb2bc04357af37ce43e665c943b2763681907c | /scripts/performC.py | cd830ccf7b1fa1c8c4ec956dfb0439bfacca89d4 | [] | no_license | XiaominWuFred/NeuroGRS | ac95662d65ffb21353e5fc56158e3fa3305c9dd4 | 09781bd1eae76384cdce2fdcfaebad7f1fbd53d4 | refs/heads/main | 2023-04-14T10:04:47.152718 | 2021-04-16T00:29:12 | 2021-04-16T00:29:12 | 334,046,500 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,383 | py | import csv
import numpy as np
def accSapa(row):
acc = row.split('(')[0]
remain = row.split('(')[1]
percentage = remain.split('%')[0]
return float(acc), float(percentage)
def accAssem(num,percentage):
num = round(num,3)
percentage = round(percentage,2)
reStr = str(num)+'('+str(percentage)+'%)'
return reStr
def accAssemInt(num,percentage):
num = int(num)
percentage = round(percentage,2)
reStr = str(num)+'('+str(percentage)+'%)'
return reStr
ccsv = []
avg_ccsv = []
for i in range(4):
if i == 0:
files = ['cnnsingle_1004_e1',
'cnnsingle_1004_e2',
'cnnsingle_1004_e3',
'cnnsingle_1005_e1',
'cnnsingle_1005_e2',
'cnnsingle_1005_e3',
'cnnsingle_1006_e1',
'cnnsingle_1006_e2',
'cnnsingle_1006_e3'
]
layers = 4
if i ==1:
files = ['mlpmulti_1004_e1',
'mlpmulti_1004_e2',
'mlpmulti_1004_e3',
'mlpmulti_1005_e1',
'mlpmulti_1005_e2',
'mlpmulti_1005_e3',
'mlpmulti_1006_e1',
'mlpmulti_1006_e2',
'mlpmulti_1006_e3'
]
layers = 4
if i ==2:
files = ['cnnmulti_1004_e1',
'cnnmulti_1004_e2',
'cnnmulti_1004_e3',
'cnnmulti_1005_e1',
'cnnmulti_1005_e2',
'cnnmulti_1005_e3',
'cnnmulti_1006_e1',
'cnnmulti_1006_e2',
'cnnmulti_1006_e3'
]
layers = 6
if i == 3:
files = ['mlpsingle_1004_e1',
'mlpsingle_1004_e2',
'mlpsingle_1004_e3',
'mlpsingle_1005_e1',
'mlpsingle_1005_e2',
'mlpsingle_1005_e3',
'mlpsingle_1006_e1',
'mlpsingle_1006_e2',
'mlpsingle_1006_e3'
]
layers = 2
avg_GRS_shape = np.zeros(layers)
avg_GRS_val = 0
avg_GRS_valloss = 0
avg_GRS_test = 0
avg_GRS_testloss = 0
avg_GRS_flop = 0
avg_GRS_floploss = 0
avg_GRS_para = 0
avg_GRS_paraloss = 0
avg_TQ_val = 0
avg_TQ_valloss = 0
avg_TQ_test = 0
avg_TQ_testloss = 0
avg_TQ_flop = 0
avg_TQ_floploss = 0
avg_TQ_para = 0
avg_TQ_paraloss = 0
avgModel = []
model = None
for fileName in files:
model = fileName[0:9]
ds = ''.join(['m',fileName[len(fileName)-5],fileName[len(fileName)-4],
fileName[len(fileName)-2],fileName[len(fileName)-1]])
GRS_shape = np.zeros(layers)
GRS_val = 0
GRS_valloss = 0
GRS_test = 0
GRS_testloss = 0
GRS_flop = 0
GRS_floploss = 0
GRS_para = 0
GRS_paraloss = 0
TQ_val = 0
TQ_valloss = 0
TQ_test=0
TQ_testloss =0
TQ_flop=0
TQ_floploss =0
TQ_para=0
TQ_paraloss=0
oriShape = None
dataSet = []
runs = 10
title = None
for i in range(runs):
#print('comparisonEXPT/prunedM/'+fileName+'_seed0V'+str(i)+'_comparisonstats.csv')
with open('outputs0602bw/regenPrunedM/'+fileName+'_V{'+str(i)+'}seed0_designEvaluation.csv', newline='') as csvfile:
spamreader = csv.reader(csvfile, delimiter=',', quotechar='|')
for i,row in enumerate(spamreader):
#print(row)
if i != 0:
oriShape = row[9]
#print(row[5])
grs_val, grs_valloss = accSapa(row[2])
grs_test, grs_testloss = accSapa(row[1])
grs_flop,grs_floploss = accSapa(row[3])
grs_para,grs_paraloss = accSapa(row[4])
tq_val, tq_valloss = accSapa(row[6])
tq_test, tq_testloss = accSapa(row[5])
tq_flop,tq_floploss = accSapa(row[7])
tq_para,tq_paraloss = accSapa(row[8])
GRS_val += grs_val
GRS_valloss += grs_valloss
GRS_test += grs_test
GRS_testloss += grs_testloss
GRS_flop += grs_flop
GRS_floploss += grs_floploss
GRS_para += grs_para
GRS_paraloss += grs_paraloss
TQ_val += tq_val
TQ_valloss += tq_valloss
TQ_test += tq_test
TQ_testloss += tq_testloss
TQ_flop += tq_flop
TQ_floploss += tq_floploss
TQ_para += tq_para
TQ_paraloss += tq_paraloss
for i in range(layers):
GRS_shape[i] += int(row[10].split('X')[i])
csvfile.close()
print(fileName)
GRS_val = GRS_val/runs
GRS_valloss = GRS_valloss/runs
GRS_test=GRS_test/runs
GRS_testloss=GRS_testloss/runs
GRS_flop=GRS_flop/runs
GRS_floploss=GRS_floploss/runs
GRS_para=GRS_para/runs
GRS_paraloss=GRS_paraloss/runs
TQ_val = TQ_val/runs
TQ_valloss = TQ_valloss/runs
TQ_test=TQ_test/runs
TQ_testloss=TQ_testloss/runs
TQ_flop=TQ_flop/runs
TQ_floploss=TQ_floploss/runs
TQ_para=TQ_para/runs
TQ_paraloss=TQ_paraloss/runs
GRS_shape = GRS_shape / runs
#cumulative for averaging model on 9 datasets
avg_GRS_val += GRS_val
avg_GRS_valloss += GRS_valloss
avg_GRS_test += GRS_test
avg_GRS_testloss += GRS_testloss
avg_GRS_flop += GRS_flop
avg_GRS_floploss += GRS_floploss
avg_GRS_para += GRS_para
avg_GRS_paraloss += GRS_paraloss
avg_TQ_val += TQ_val
avg_TQ_valloss += TQ_valloss
avg_TQ_test += TQ_test
avg_TQ_testloss += TQ_testloss
avg_TQ_flop += TQ_flop
avg_TQ_floploss += TQ_floploss
avg_TQ_para += TQ_para
avg_TQ_paraloss += TQ_paraloss
avg_GRS_shape += GRS_shape
##############################################
testacc_s = accAssem(GRS_test,GRS_testloss)
valacc_s = accAssem(GRS_val,GRS_valloss)
flops_s = accAssemInt(GRS_flop,100-GRS_floploss)
paras_s = accAssemInt(GRS_para,100-GRS_paraloss)
testacc_f = accAssem(TQ_test,TQ_testloss)
valacc_f = accAssem(TQ_val,TQ_valloss)
flops_f = accAssemInt(TQ_flop,100-TQ_floploss)
paras_f = accAssemInt(TQ_para,100-TQ_paraloss)
tmp = []
for i in range(len(GRS_shape)-1):
tmp.append(str(GRS_shape[i]))
grsStr = 'X'.join(tmp)
print('GRSshape: '+grsStr)
print('')
dataSet.append(model)
dataSet.append(ds)
dataSet.append(testacc_s)
dataSet.append(valacc_s)
dataSet.append(flops_s)
dataSet.append(paras_s)
dataSet.append(grsStr)
dataSet.append(testacc_f)
dataSet.append(valacc_f)
dataSet.append(flops_f)
dataSet.append(paras_f)
ccsv.append(dataSet)
#finished 9 files for one model
numOfFiles = len(files)
print("numOfFiles: "+str(numOfFiles))
avg_GRS_val = avg_GRS_val / numOfFiles
avg_GRS_valloss = avg_GRS_valloss / numOfFiles
avg_GRS_test = avg_GRS_test / numOfFiles
avg_GRS_testloss = avg_GRS_testloss / numOfFiles
avg_GRS_flop = avg_GRS_flop / numOfFiles
avg_GRS_floploss = avg_GRS_floploss / numOfFiles
avg_GRS_para = avg_GRS_para / numOfFiles
avg_GRS_paraloss = avg_GRS_paraloss / numOfFiles
avg_TQ_val = avg_TQ_val / numOfFiles
avg_TQ_valloss = avg_TQ_valloss / numOfFiles
avg_TQ_test = avg_TQ_test / numOfFiles
avg_TQ_testloss = avg_TQ_testloss / numOfFiles
avg_TQ_flop = avg_TQ_flop / numOfFiles
avg_TQ_floploss = avg_TQ_floploss / numOfFiles
avg_TQ_para = avg_TQ_para / numOfFiles
avg_TQ_paraloss = avg_TQ_paraloss / numOfFiles
avg_GRS_shape = avg_GRS_shape / numOfFiles
avg_testacc_s = accAssem(avg_GRS_test, avg_GRS_testloss)
avg_valacc_s = accAssem(avg_GRS_val, avg_GRS_valloss)
avg_flops_s = accAssemInt(avg_GRS_flop, 100 - avg_GRS_floploss)
avg_paras_s = accAssemInt(avg_GRS_para, 100 - avg_GRS_paraloss)
avg_testacc_f = accAssem(avg_TQ_test, avg_TQ_testloss)
avg_valacc_f = accAssem(avg_TQ_val, avg_TQ_valloss)
avg_flops_f = accAssemInt(avg_TQ_flop, 100 - avg_TQ_floploss)
avg_paras_f = accAssemInt(avg_TQ_para, 100 - avg_TQ_paraloss)
tmp = []
for i,each in enumerate(avg_GRS_shape):
avg_GRS_shape[i] = round(each,1)
for i in range(len(avg_GRS_shape) - 1):
tmp.append(str(avg_GRS_shape[i]))
avg_grsStr = 'X'.join(tmp)
print('GRSshape: ' + avg_grsStr)
avgModel.append(model)
avgModel.append(avg_testacc_s)
avgModel.append(avg_valacc_s)
avgModel.append(avg_flops_s)
avgModel.append(avg_paras_s)
avgModel.append(avg_grsStr)
avgModel.append(avg_testacc_f)
avgModel.append(avg_valacc_f)
avgModel.append(avg_flops_f)
avgModel.append(avg_paras_f)
avg_ccsv.append(avgModel)
print('done')
with open('performCbwRegen.csv', 'w', newline='') as csvfile:
spamwriter = csv.writer(csvfile, delimiter=',')
spamwriter.writerow(['model','Dataset','TestAcc_S(lost%)','ValAcc_S(lost%)','FLOPs_S(% of initial)','Paras_S(% of initial)','Structure','TestAcc_F(lost%)','ValAcc_F(lost%)','FLOPs_F(% of initial)','Paras_F(% of initial)'])
for i in range(len(ccsv)):
spamwriter.writerow(ccsv[i])
csvfile.close()
with open('performAvgModelRegen.csv', 'w', newline='') as csvfile:
spamwriter = csv.writer(csvfile, delimiter=',')
spamwriter.writerow(['model','TestAcc_S(lost%)','ValAcc_S(lost%)','FLOPs_S(% of initial)','Paras_S(% of initial)','Structure','TestAcc_F(lost%)','ValAcc_F(lost%)','FLOPs_F(% of initial)','Paras_F(% of initial)'])
for i in range(len(avg_ccsv)):
spamwriter.writerow(avg_ccsv[i])
csvfile.close()
| [
"xiaomwu@umiacs.umd.edu"
] | xiaomwu@umiacs.umd.edu |
37b2e3d95e07aa0990e27d3c02ae008f0db3dda5 | 7470d991d82b4e577df843c25efd02bf90d1cb38 | /butter/splice.py | c5cded3deb06da1e2a3f64f364851d358c6d4502 | [
"BSD-3-Clause"
] | permissive | RIAPS/butter | eccee0f2423b938eaec24fbb92e022dc8a245206 | 17f678043d2ac32e2d98d1a83b55e7e987596729 | refs/heads/master | 2023-05-29T12:21:32.198480 | 2021-06-10T20:25:48 | 2021-06-10T20:25:48 | 278,401,827 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,773 | py | #!/usr/bin/env python
"""splice: wrapper around the splice() syscall"""
from .utils import UnknownError
import errno as _errno
from ._splice_c import ffi as _ffi
from ._splice_c import lib as _lib
NULL_TERMINATOR = 1 # length of \0 in bytes
def splice(fd_in, fd_out, in_offset=0, out_offset=0, len=0, flags=0):
"""Take data from fd_in and pass it to fd_out without going through userspace
Arguments
----------
:param file fd_in: File object or fd to splice from
:param file fd_out: File object or fd to splice to
:param int in_offset: Offset inside fd_in to read from
:param int out_offset: Offset inside fd_out to write to
:param int len: Ammount of data to transfer
:param int flags: Flags to specify extra options
Flags
------
SPLICE_F_MOVE: This is a noop in modern kernels and is left here for compatibility
SPLICE_F_NONBLOCK: Make splice operations Non blocking (as long as the fd's are non blocking)
SPLICE_F_MORE: After splice() more data will be sent, this is a hint to add TCP_CORK like buffering
SPLICE_F_GIFT: unused for splice() (vmsplice compatibility)
Returns
--------
:return: Number of bytes written
:rtype: int
Exceptions
-----------
:raises ValueError: One of the file descriptors is unseekable
:raises ValueError: Neither descriptor refers to a pipe
:raises ValueError: Target filesystem does not support splicing
:raises OSError: supplied fd does not refer to a file
:raises OSError: Incorrect mode for file
:raises MemoryError: Insufficient kernel memory
:raises OSError: No writers waiting on fd_in
:raises OSError: one or both fd's are in blocking mode and SPLICE_F_NONBLOCK specified
"""
if hasattr(fd_in, 'fileno'):
fd_in = fd_in.fileno()
if hasattr(fd_out, 'fileno'):
fd_out = fd_out.fileno()
assert isinstance(fd_in, int), 'fd_in must be an integer'
assert isinstance(fd_out, int), 'fd_in must be an integer'
assert isinstance(in_offset, int), 'in_offset must be an integer'
assert isinstance(out_offset, int), 'out_offset must be an integer'
assert isinstance(len, int), 'len must be an integer'
assert isinstance(flags, int), 'flags must be an integer'
in_offset = _ffi.cast("long long *", in_offset)
out_offset = _ffi.cast("long long *", out_offset)
size = _lib.splice(fd_in, in_offset, fd_out, out_offset, len, flags)
if size < 0:
err = _ffi.errno
if err == _errno.EINVAL:
if in_offset or out_offset:
raise ValueError("fds may not be seekable")
else:
raise ValueError("Target filesystem does not support slicing or file may be in append mode")
elif err == _errno.EBADF:
raise ValueError("fds are invalid or incorrect mode for file")
elif err == _errno.EPIPE:
raise ValueError("offset specified but one of the fds is a pipe")
elif err == _errno.ENOMEM:
raise MemoryError("Insufficent kernel memory available")
elif err == _errno.EAGAIN:
raise OSError("No writers on fd_in or a fd is open in BLOCKING mode and NON_BLOCK specified to splice()")
else:
# If you are here, its a bug. send us the traceback
raise UnknownError(err)
return size
def tee(fd_in, fd_out, len=0, flags=0):
"""Splice data like the :py:func:`.splice` but also leave a copy of the data in the original fd's buffers
Arguments
----------
:param file fd_in: File object or fd to splice from
:param file fd_out: File object or fd to splice to
:param int len: Ammount of data to transfer
:param int flags: Flags to specify extra options
Flags
------
SPLICE_F_MOVE: This is a noop in modern kernels and is left here for compatibility
SPLICE_F_NONBLOCK: Make tee operations Non blocking (as long as the fd's are non blocking)
SPLICE_F_MORE: unused for tee()
SPLICE_F_GIFT: unused for tee() (:py:func:`.vmsplice` compatibility)
Returns
--------
:return: Number of bytes written
:rtype: int
Exceptions
-----------
:raises ValueError: One of the file descriptors is not a pipe
:raises ValueError: Both file descriptors refer to the same pipe
:raises MemoryError: Insufficient kernel memory
"""
if hasattr(fd_in, 'fileno'):
fd_in = fd_in.fileno()
if hasattr(fd_out, 'fileno'):
fd_out = fd_out.fileno()
assert isinstance(fd_in, int), 'fd_in must be an integer'
assert isinstance(fd_out, int), 'fd_in must be an integer'
assert isinstance(len, int), 'len must be an integer'
assert isinstance(flags, int), 'flags must be an integer'
size = _lib.tee(fd_in, fd_out, len, flags)
if size < 0:
err = _ffi.errno
if err == _errno.EINVAL:
raise ValueError("fd_in or fd_out are not a pipe or refer to the same pipe")
elif err == _errno.ENOMEM:
raise MemoryError("Insufficent kernel memory available")
else:
# If you are here, its a bug. send us the traceback
raise UnknownError(err)
return size
def vmsplice(fd, vec, flags=0):
"""This cannot be implemented safely under python due to the GC
Please use os.writev() instead which has a near identical api and effect
:raises NotImplemented: This is always raised as this operation has been removed
"""
raise NotImplementedError("Removed due to safety concerns")
SPLICE_F_MOVE = _lib.SPLICE_F_MOVE
SPLICE_F_NONBLOCK = _lib.SPLICE_F_NONBLOCK
SPLICE_F_MORE = _lib.SPLICE_F_MORE
SPLICE_F_GIFT = _lib.SPLICE_F_GIFT
IOV_MAX = _lib.IOV_MAX
| [
"mary.a.metelko@vanderbilt.edu"
] | mary.a.metelko@vanderbilt.edu |
865cb0d36984b1318d80985738529bf318f15a09 | b7e2cca563ef8b1a7f29070bb1c7a8eb3f11d3a9 | /ReinforcementLearning/tic_tac_toe.py | acb97d8cd00d5b560bc25b8cafe05558472c9790 | [] | no_license | ahndroo/Udemy | a0aa9cbfcba710c8b729fba32cc72ae8cc2c83f6 | 0f5678eae17cbb5ed2496f847e1c397108d19d46 | refs/heads/master | 2021-01-19T17:14:38.790027 | 2018-04-13T21:12:51 | 2018-04-13T21:12:51 | 86,938,332 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,041 | py | import numpy as np
import matplotlib.pyplot as plt
LENGTH = 3; # length of game board
class Agent:
def __init__(self, eps=0.1, alpha=0.5):
self.eps = eps # greedy epsilon
self.alpha = alpha # learning rate
self.verbose = False
self.state_history = []
def setV(self, V):
self.V = V
def set_symbol(self, symbol):
self.sym = symbol;
def set_verbose(self, v):
# if true will print vlaues for each pos on board
self.verbose = v
def reset_history(self):
self.state_history = []
def take_action(self, env):
# choose action based on epsilon greedy
r = np.random.rand()
best_state = None
if r < self.eps:
# take a random action
if self.verbose:
print "taking random action"
possible_moves = []
for i in xrange(LENGTH):
for j in xrange(LENGTH):
if env.is_empty(i ,j):
possible_moves.append((i, j))
idx = np.random.choice(len(possible_moves))
next_move = possible_moves[idx]
else:
pos2value = {}
next_move = None
best_value = -1
for i in xrange(LENGTH):
for j in xrange(LENGTH):
if env.is_empty(i,j):
# what is state if made this move?
env.board[i,j] = self.sym
state = env.get_state()
env.board[i,j] = 0 # change it back!
pos2value[(i,j)] = self.V[state]
if self.V[state] > best_value:
best_value = self.V[state]
best_state = state
next_move = (i,j)
if self.verbose:
print("Taking greedy action")
for i in xrange(LENGTH):
print "------------------"
for j in xrange(LENGTH):
if env.is_empty(i,j):
print "%.2f|" % pos2value[(i, j)],
else:
print " ",
if env.board[i,j] == env.x:
print "x |",
elif env.board[i,j] == env.o:
print "o |",
else:
print " |",
print ""
print "------------------"
env.board[next_move[0], next_move[1]] = self.sym
def update_state_history(self, s):
# needs to be updated every iteration--cannot be put in take_action since it
# only happens once every other iteration for each player
self.state_history.append(s)
def update(self, env):
# want to backtrack over the states so that:
# V(prev_state) = V(prev_state) + alpha*(V(next_state) - V(prev_state))
# where V(next_state) = reward if its hte most current state
# only do this at end of episode
reward = env.reward(self.sym)
target = reward
for prev in reversed(self.state_history):
value = self.V[prev] + self.alpha*(target - self.V[prev])
self.V[prev] = value
target = value
self.reset_history()
class Environment:
def __init__(self):
self.board = np.zeros((LENGTH, LENGTH))
self.x = -1; # represents x on board, p1
self.o = 1; # represents o on board, p2
self.winner = None;
self.ended = False;
self.num_states = 3**(LENGTH*LENGTH);
def is_empty(self, i, j):
return self.board[i,j]==0; # true if pos on board is 0, false otherwise
def reward(self, sym):
# no reward until game is over
if not self.game_over():
return 0
# sym will be self.x or self.o -- agents needs to know its own symbol
return 1 if self.winner == sym else 0;
def get_state(self):
# returns current state, represented as an int from 0....|S|-1, where
# S=set of all possible states. |S| 3^(BOARD_SIZE), since each cell can
# have 3 possible values- empty(0), x(1), o(2).
k = 0;
h = 0;
for i in xrange(LENGTH):
for j in xrange(LENGTH):
if self.board[i,j] == 0:
v=0;
elif self.board[i,j] == self.x:
v = 1;
elif self.board[i,j] == self.o:
v = 2;
h += (3**k) * v;
k += 1;
return h;
def game_over(self, force_recalculate=False):
if not force_recalculate and self.ended:
return self.ended
# check rows
for i in xrange(LENGTH):
for player in (self.x, self.o):
if self.board[i].sum() == player*LENGTH:
self.winner = player
self.ended = True;
return True
# check cols
for j in xrange(LENGTH):
for player in (self.x, self.o):
if self.board[j].sum() == player*LENGTH:
self.winner = player
self.ended = True
return True
# check diags
for player in (self.x, self.o):
if self.board.trace() == player*LENGTH:
self.winner = player;
self.ended = True;
return True
# top-right -> bottom-left diag
if np.fliplr(self.board).trace() == player*LENGTH:
self.winner = player
self.ended = True
return True
# check if draw
if np.all((self.board==0) == False):
self.winner = None
self.ended = True
return True
self.winner = None
return False
def is_draw(self):
return self.ended and self.winner is None
def draw_board(self):
for i in xrange(LENGTH):
print "-------------"
for j in xrange(LENGTH):
print " ",
if self.board[i, j] == self.x:
print "x",
elif self.board[i,j] == self.o:
print "o",
else:
print " ",
print ""
print "-------------"
class Human:
def __init__(self):
pass
def set_symbol(self, sym):
self.sym = sym
def take_action(self, env):
while True:
# break if illegal move
move = raw_input("Enter coordinates i,j for next move (i,j=0...2): ")
i, j = move.split(',')
i = int(i)
j = int(j)
if env.is_empty(i, j):
env.board[i, j] = self.sym
break
def update(self, env):
pass
def update_state_history(self, s):
pass
def play_game(p1, p2, env, draw=False):
# loops until game is over
current_player = None
while not env.game_over():
# alternate between p1, p2--p1 always goes first
if current_player == p1:
current_player = p2
else:
current_player = p1
# draw the board before the user who wants to see it to make a move
if draw:
if draw == 1 and current_player==p1:
env.draw_board()
if draw==2 and current_player==p2:
env.draw_board()
current_player.take_action(env)
# update state histories
state = env.get_state()
p1.update_state_history(state)
p2.update_state_history(state)
if draw:
env.draw_board()
# do the value function update
p1.update(env)
p2.update(env)
def get_state_hash_and_winner(env, i=0, j=0):
results = []
for v in (0, env.x, env.o):
env.board[i, j] = v # if empty board-should be 0
if j==2:
#j goes back to 0, increase i, unless i==2--we done
if i==2:
#board is full, collect results and return
state = env.get_state()
ended = env.game_over(force_recalculate=True)
winner = env.winner
results.append((state, winner, ended))
else:
results += get_state_hash_and_winner(env, i+1, 0)
else:
results+= get_state_hash_and_winner(env, i, j+1)
return results
def initialV_x(env, state_winner_triples):
# initialize state values as follows:
# if x wins, V(s)=1
# if x loses or draw, V(s)=0
# otherwise V(s)=.5
V = np.zeros(env.num_states)
for state, winner, ended in state_winner_triples:
if ended:
if winner == env.x:
v = 1
else:
v=0
else:
v=0.5
V[state] = v
return V
def initialV_o(env, state_winner_triples):
V = np.zeros(env.num_states)
for state, winner, ended in state_winner_triples:
if ended:
if winner == env.o:
v = 1
else:
v=0
else:
v=0.5
V[state]=v
return V
if __name__ == "__main__":
# train agent
p1 = Agent()
p2 = Agent()
# set initial V for p1 and p2
env = Environment()
state_winner_triples = get_state_hash_and_winner(env)
Vx = initialV_x(env, state_winner_triples)
p1.setV(Vx)
Vo = initialV_o(env, state_winner_triples)
p2.setV(Vo)
p1.set_symbol(env.x)
p2.set_symbol(env.o)
T = 10000
for t in xrange(T):
if t%200==0:
print t
play_game(p1, p2, Environment())
human = Human()
human.set_symbol(env.o)
while True:
p1.set_verbose(True)
play_game(p1, human, Environment(), draw=2)
answer = raw_input("Play again? [Y/n]:")
if answer and answer.lower()[0] == 'n':
break
| [
"andrew.kirk@outlook.com"
] | andrew.kirk@outlook.com |
1f0e61c90c771060405321abaca52698270d1e36 | 0fa3a670019574ad2d1577b035070058147193a1 | /examples/compare_to_others.py | 5d29a5fcea0ba08bf91a9cccdc1576a67e5ee4ff | [
"MIT"
] | permissive | rubens1287/pairwise | f41cb86b61f1bcd33cf8f4713630bf32ec55ef20 | 1ce95932bb1496eb4fb04988c7d464bc885199fa | refs/heads/master | 2023-04-04T23:52:39.643283 | 2021-04-19T00:32:25 | 2021-04-19T00:32:25 | 359,283,730 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,160 | py | import metacomm.combinatorics.all_pairs2
all_pairs = metacomm.combinatorics.all_pairs2.all_pairs2
"""
Provided to make it easier to compare efficiency with other tools
as per http://pairwise.org/tools.asp
Current iutput is:
3^4: produces 9 rows
3^13: produces 17 rows
4^15 * 3^17 * 2^29: produces 37 rows
4^1 * 3^39 * 2^35: produces 27 rows
3^100: produces 29 rows
10^20: produces 219 rows
10^10: produces 172 rows
"""
def get_arrays( dimensions ):
opts = []
for d in dimensions:
r = []
for i in range(d[1]):
r.append( range(d[0]) )
opts += r
return opts
def print_result( dimensions ):
header_list = []
for d in dimensions:
header_list.append( "%i^%i" % d )
header = " * ".join(header_list)
pairwise = all_pairs( get_arrays( dimensions ) )
n = len(list(pairwise))
print ("%s: produces %i rows" % (header, n))
print_result(( (3, 4), ))
print_result(( (3, 13), ))
print_result(( (4, 15), (3, 17), (2, 29) ))
print_result(( (4, 1), (3, 39), (2, 35) ))
print_result(( (3, 100), ))
print_result(( (10, 20), ))
print_result(( (10, 10), ))
| [
"rslo@gft.com"
] | rslo@gft.com |
1eef8e93e8de7612f888f85a4c6212bf86dade5c | 28aa8a2d8d0f4c2e105d567500c626e78997e4be | /site-packages/shinken/objects/service.py | 714093f417ca7feefd04603f2c192dfc2019199e | [] | no_license | kastesh/shinken-2.4.2-replace | c4a7bb4eb182a0f59b70e9eadd5eb259547b8a18 | 7721de0897bfee7b926ab5212e299c237779414c | refs/heads/master | 2021-01-10T08:50:50.355931 | 2016-03-02T11:19:39 | 2016-03-02T11:19:39 | 52,957,049 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 80,253 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (C) 2009-2014:
# Gabes Jean, naparuba@gmail.com
# Gerhard Lausser, Gerhard.Lausser@consol.de
# Gregory Starck, g.starck@gmail.com
# Hartmut Goebel, h.goebel@goebel-consult.de
#
# This file is part of Shinken.
#
# Shinken is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Shinken is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Shinken. If not, see <http://www.gnu.org/licenses/>.
""" This Class is the service one, s it manage all service specific thing.
If you look at the scheduling part, look at the scheduling item class"""
import time
import re
import itertools
import uuid
try:
from ClusterShell.NodeSet import NodeSet, NodeSetParseRangeError
except ImportError:
NodeSet = None
from shinken.objects.item import Items
from shinken.objects.schedulingitem import SchedulingItem
from shinken.autoslots import AutoSlots
from shinken.util import strip_and_uniq, format_t_into_dhms_format, to_svc_hst_distinct_lists, \
get_key_value_sequence, GET_KEY_VALUE_SEQUENCE_ERROR_SYNTAX, GET_KEY_VALUE_SEQUENCE_ERROR_NODEFAULT, \
GET_KEY_VALUE_SEQUENCE_ERROR_NODE, to_list_string_of_names, to_list_of_names, to_name_if_possible, \
is_complex_expr
from shinken.property import BoolProp, IntegerProp, FloatProp,\
CharProp, StringProp, ListProp, DictProp
from shinken.macroresolver import MacroResolver
from shinken.eventhandler import EventHandler
from shinken.log import logger, naglog_result
from shinken.util import filter_service_by_regex_name
from shinken.util import filter_service_by_host_name
class Service(SchedulingItem):
# AutoSlots create the __slots__ with properties and
# running_properties names
__metaclass__ = AutoSlots
# Every service have a unique ID, and 0 is always special in
# database and co...
id = 1
# The host and service do not have the same 0 value, now yes :)
ok_up = 'OK'
# used by item class for format specific value like for Broks
my_type = 'service'
# properties defined by configuration
# required: is required in conf
# default: default value if no set in conf
# pythonize: function to call when transforming string to python object
# fill_brok: if set, send to broker. there are two categories:
# full_status for initial and update status, check_result for check results
# no_slots: do not take this property for __slots__
properties = SchedulingItem.properties.copy()
properties.update({
'host_name':
StringProp(fill_brok=['full_status', 'check_result', 'next_schedule']),
'hostgroup_name':
StringProp(default='', fill_brok=['full_status'], merging='join'),
'service_description':
StringProp(fill_brok=['full_status', 'check_result', 'next_schedule']),
'display_name':
StringProp(default='', fill_brok=['full_status']),
'servicegroups':
ListProp(default=[], fill_brok=['full_status'],
brok_transformation=to_list_string_of_names, merging='join'),
'is_volatile':
BoolProp(default=False, fill_brok=['full_status']),
'check_command':
StringProp(fill_brok=['full_status']),
'initial_state':
CharProp(default='', fill_brok=['full_status']),
'initial_output':
StringProp(default='', fill_brok=['full_status']),
'max_check_attempts':
IntegerProp(default=1, fill_brok=['full_status']),
'check_interval':
IntegerProp(fill_brok=['full_status', 'check_result']),
'retry_interval':
IntegerProp(fill_brok=['full_status', 'check_result']),
'active_checks_enabled':
BoolProp(default=True, fill_brok=['full_status'], retention=True),
'passive_checks_enabled':
BoolProp(default=True, fill_brok=['full_status'], retention=True),
'check_period':
StringProp(brok_transformation=to_name_if_possible, fill_brok=['full_status']),
'obsess_over_service':
BoolProp(default=False, fill_brok=['full_status'], retention=True),
'check_freshness':
BoolProp(default=False, fill_brok=['full_status']),
'freshness_threshold':
IntegerProp(default=0, fill_brok=['full_status']),
'event_handler':
StringProp(default='', fill_brok=['full_status']),
'event_handler_enabled':
BoolProp(default=False, fill_brok=['full_status'], retention=True),
'low_flap_threshold':
IntegerProp(default=-1, fill_brok=['full_status']),
'high_flap_threshold':
IntegerProp(default=-1, fill_brok=['full_status']),
'flap_detection_enabled':
BoolProp(default=True, fill_brok=['full_status'], retention=True),
'flap_detection_options':
ListProp(default=['o', 'w', 'c', 'u'], fill_brok=['full_status'], split_on_coma=True),
'process_perf_data':
BoolProp(default=True, fill_brok=['full_status'], retention=True),
'retain_status_information':
BoolProp(default=True, fill_brok=['full_status']),
'retain_nonstatus_information':
BoolProp(default=True, fill_brok=['full_status']),
'notification_interval':
IntegerProp(default=60, fill_brok=['full_status']),
'first_notification_delay':
IntegerProp(default=0, fill_brok=['full_status']),
'notification_period':
StringProp(brok_transformation=to_name_if_possible, fill_brok=['full_status']),
'notification_options':
ListProp(default=['w', 'u', 'c', 'r', 'f', 's'],
fill_brok=['full_status'], split_on_coma=True),
'notifications_enabled':
BoolProp(default=True, fill_brok=['full_status'], retention=True),
'contacts':
ListProp(default=[], brok_transformation=to_list_of_names,
fill_brok=['full_status'], merging='join'),
'contact_groups':
ListProp(default=[], fill_brok=['full_status'], merging='join'),
'stalking_options':
ListProp(default=[''], fill_brok=['full_status'], merging='join'),
'notes':
StringProp(default='', fill_brok=['full_status']),
'notes_url':
StringProp(default='', fill_brok=['full_status']),
'action_url':
StringProp(default='', fill_brok=['full_status']),
'icon_image':
StringProp(default='', fill_brok=['full_status']),
'icon_image_alt':
StringProp(default='', fill_brok=['full_status']),
'icon_set':
StringProp(default='', fill_brok=['full_status']),
'failure_prediction_enabled':
BoolProp(default=False, fill_brok=['full_status']),
'parallelize_check':
BoolProp(default=True, fill_brok=['full_status']),
# Shinken specific
'poller_tag':
StringProp(default='None'),
'reactionner_tag':
StringProp(default='None'),
'resultmodulations':
ListProp(default=[], merging='join'),
'business_impact_modulations':
ListProp(default=[], merging='join'),
'escalations':
ListProp(default=[], fill_brok=['full_status'], merging='join', split_on_coma=True),
'maintenance_period':
StringProp(default='',
brok_transformation=to_name_if_possible, fill_brok=['full_status']),
'time_to_orphanage':
IntegerProp(default=300, fill_brok=['full_status']),
'merge_host_contacts':
BoolProp(default=False, fill_brok=['full_status']),
'labels':
ListProp(default=[], fill_brok=['full_status'], merging='join'),
'host_dependency_enabled':
BoolProp(default=True, fill_brok=['full_status']),
# BUSINESS CORRELATOR PART
# Business rules output format template
'business_rule_output_template':
StringProp(default='', fill_brok=['full_status']),
# Business rules notifications mode
'business_rule_smart_notifications':
BoolProp(default=False, fill_brok=['full_status']),
# Treat downtimes as acknowledgements in smart notifications
'business_rule_downtime_as_ack':
BoolProp(default=False, fill_brok=['full_status']),
# Enforces child nodes notification options
'business_rule_host_notification_options':
ListProp(default=[], fill_brok=['full_status'], split_on_coma=True),
'business_rule_service_notification_options':
ListProp(default=[], fill_brok=['full_status'], split_on_coma=True),
# Easy Service dep definition
'service_dependencies': # TODO: find a way to brok it?
ListProp(default=None, merging='join', split_on_coma=True),
# service generator
'duplicate_foreach':
StringProp(default=''),
'default_value':
StringProp(default=''),
# Business_Impact value
'business_impact':
IntegerProp(default=2, fill_brok=['full_status']),
# Load some triggers
'trigger':
StringProp(default=''),
'trigger_name':
StringProp(default=''),
'trigger_broker_raise_enabled':
BoolProp(default=False),
# Trending
'trending_policies':
ListProp(default=[], fill_brok=['full_status'], merging='join'),
# Our check ways. By defualt void, but will filled by an inner if need
'checkmodulations':
ListProp(default=[], fill_brok=['full_status'], merging='join'),
'macromodulations':
ListProp(default=[], merging='join'),
# Custom views
'custom_views':
ListProp(default=[], fill_brok=['full_status'], merging='join'),
# UI aggregation
'aggregation':
StringProp(default='', fill_brok=['full_status']),
# Snapshot part
'snapshot_enabled':
BoolProp(default=False),
'snapshot_command':
StringProp(default=''),
'snapshot_period':
StringProp(default=''),
'snapshot_criteria':
ListProp(default=['w', 'c', 'u'], fill_brok=['full_status'], merging='join'),
'snapshot_interval':
IntegerProp(default=5),
})
# properties used in the running state
running_properties = SchedulingItem.running_properties.copy()
running_properties.update({
'modified_attributes':
IntegerProp(default=0L, fill_brok=['full_status'], retention=True),
'last_chk':
IntegerProp(default=0, fill_brok=['full_status', 'check_result'], retention=True),
'next_chk':
IntegerProp(default=0, fill_brok=['full_status', 'next_schedule'], retention=True),
'in_checking':
BoolProp(default=False,
fill_brok=['full_status', 'check_result', 'next_schedule'], retention=True),
'in_maintenance':
IntegerProp(default=None, fill_brok=['full_status'], retention=True),
'latency':
FloatProp(default=0, fill_brok=['full_status', 'check_result'], retention=True,),
'attempt':
IntegerProp(default=0, fill_brok=['full_status', 'check_result'], retention=True),
'state':
StringProp(default='PENDING',
fill_brok=['full_status', 'check_result'], retention=True),
'state_id':
IntegerProp(default=0, fill_brok=['full_status', 'check_result'], retention=True),
'current_event_id':
IntegerProp(default=0, fill_brok=['full_status', 'check_result'], retention=True),
'last_event_id':
IntegerProp(default=0, fill_brok=['full_status', 'check_result'], retention=True),
'last_state':
StringProp(default='PENDING',
fill_brok=['full_status', 'check_result'], retention=True),
'last_state_type':
StringProp(default='HARD', fill_brok=['full_status', 'check_result'], retention=True),
'last_state_id':
IntegerProp(default=0, fill_brok=['full_status', 'check_result'], retention=True),
'last_state_change':
FloatProp(default=0.0, fill_brok=['full_status', 'check_result'], retention=True),
'last_hard_state_change':
FloatProp(default=0.0, fill_brok=['full_status', 'check_result'], retention=True),
'last_hard_state':
StringProp(default='PENDING', fill_brok=['full_status'], retention=True),
'last_hard_state_id':
IntegerProp(default=0, fill_brok=['full_status'], retention=True),
'last_time_ok':
IntegerProp(default=0, fill_brok=['full_status', 'check_result'], retention=True),
'last_time_warning':
IntegerProp(default=0, fill_brok=['full_status', 'check_result'], retention=True),
'last_time_critical':
IntegerProp(default=0, fill_brok=['full_status', 'check_result'], retention=True),
'last_time_unknown':
IntegerProp(default=0, fill_brok=['full_status', 'check_result'], retention=True),
'duration_sec':
IntegerProp(default=0, fill_brok=['full_status'], retention=True),
'state_type':
StringProp(default='HARD', fill_brok=['full_status', 'check_result'], retention=True),
'state_type_id':
IntegerProp(default=0, fill_brok=['full_status', 'check_result'], retention=True),
'output':
StringProp(default='', fill_brok=['full_status', 'check_result'], retention=True),
'long_output':
StringProp(default='', fill_brok=['full_status', 'check_result'], retention=True),
'is_flapping':
BoolProp(default=False, fill_brok=['full_status'], retention=True),
# dependencies for actions like notif of event handler,
# so AFTER check return
'act_depend_of':
ListProp(default=[]),
# dependencies for checks raise, so BEFORE checks
'chk_depend_of':
ListProp(default=[]),
# elements that depend of me, so the reverse than just upper
'act_depend_of_me':
ListProp(default=[]),
# elements that depend of me
'chk_depend_of_me':
ListProp(default=[]),
'last_state_update':
FloatProp(default=0.0, fill_brok=['full_status'], retention=True),
# no brok because checks are too linked
'checks_in_progress':
ListProp(default=[]),
# no broks because notifications are too linked
'notifications_in_progress': DictProp(default={}, retention=True),
'downtimes':
ListProp(default=[], fill_brok=['full_status'], retention=True),
'comments':
ListProp(default=[], fill_brok=['full_status'], retention=True),
'flapping_changes':
ListProp(default=[], fill_brok=['full_status'], retention=True),
'flapping_comment_id':
IntegerProp(default=0, fill_brok=['full_status'], retention=True),
'percent_state_change':
FloatProp(default=0.0, fill_brok=['full_status', 'check_result'], retention=True),
'problem_has_been_acknowledged':
BoolProp(default=False, fill_brok=['full_status', 'check_result'], retention=True),
'acknowledgement':
StringProp(default=None, retention=True),
'acknowledgement_type':
IntegerProp(default=1, fill_brok=['full_status', 'check_result'], retention=True),
'check_type':
IntegerProp(default=0, fill_brok=['full_status', 'check_result'], retention=True),
'has_been_checked':
IntegerProp(default=0, fill_brok=['full_status', 'check_result'], retention=True),
'should_be_scheduled':
IntegerProp(default=1, fill_brok=['full_status'], retention=True),
'last_problem_id':
IntegerProp(default=0, fill_brok=['full_status', 'check_result'], retention=True),
'current_problem_id':
IntegerProp(default=0, fill_brok=['full_status', 'check_result'], retention=True),
'execution_time':
FloatProp(default=0.0, fill_brok=['full_status', 'check_result'], retention=True),
'u_time':
FloatProp(default=0.0),
's_time':
FloatProp(default=0.0),
'last_notification':
FloatProp(default=0.0, fill_brok=['full_status'], retention=True),
'current_notification_number':
IntegerProp(default=0, fill_brok=['full_status'], retention=True),
'current_notification_id':
IntegerProp(default=0, fill_brok=['full_status'], retention=True),
'check_flapping_recovery_notification':
BoolProp(default=True, fill_brok=['full_status'], retention=True),
'scheduled_downtime_depth':
IntegerProp(default=0, fill_brok=['full_status'], retention=True),
'pending_flex_downtime':
IntegerProp(default=0, fill_brok=['full_status'], retention=True),
'timeout':
IntegerProp(default=0, fill_brok=['full_status', 'check_result'], retention=True),
'start_time':
IntegerProp(default=0, fill_brok=['full_status', 'check_result'], retention=True),
'end_time':
IntegerProp(default=0, fill_brok=['full_status', 'check_result'], retention=True),
'early_timeout':
IntegerProp(default=0, fill_brok=['full_status', 'check_result'], retention=True),
'return_code':
IntegerProp(default=0, fill_brok=['full_status', 'check_result'], retention=True),
'perf_data':
StringProp(default='', fill_brok=['full_status', 'check_result'], retention=True),
'last_perf_data':
StringProp(default='', retention=True),
'host':
StringProp(default=None),
'customs':
DictProp(default={}, fill_brok=['full_status']),
# Warning: for the notified_contacts retention save,
# we save only the names of the contacts, and we should RELINK
# them when we load it.
# use for having all contacts we have notified
'notified_contacts': ListProp(default=set(),
retention=True,
retention_preparation=to_list_of_names),
'in_scheduled_downtime': BoolProp(
default=False, fill_brok=['full_status', 'check_result'], retention=True),
'in_scheduled_downtime_during_last_check': BoolProp(default=False, retention=True),
'actions': ListProp(default=[]), # put here checks and notif raised
'broks': ListProp(default=[]), # and here broks raised
# Problem/impact part
'is_problem': BoolProp(default=False, fill_brok=['full_status']),
'is_impact': BoolProp(default=False, fill_brok=['full_status']),
# the save value of our business_impact for "problems"
'my_own_business_impact': IntegerProp(default=-1, fill_brok=['full_status']),
# list of problems that make us an impact
'source_problems': ListProp(default=[],
fill_brok=['full_status'],
brok_transformation=to_svc_hst_distinct_lists),
# list of the impact I'm the cause of
'impacts': ListProp(default=[],
fill_brok=['full_status'],
brok_transformation=to_svc_hst_distinct_lists),
# keep a trace of the old state before being an impact
'state_before_impact': StringProp(default='PENDING'),
# keep a trace of the old state id before being an impact
'state_id_before_impact': IntegerProp(default=0),
# if the state change, we know so we do not revert it
'state_changed_since_impact': BoolProp(default=False),
# BUSINESS CORRELATOR PART
# Say if we are business based rule or not
'got_business_rule': BoolProp(default=False, fill_brok=['full_status']),
# Previously processed business rule (with macro expanded)
'processed_business_rule': StringProp(default="", fill_brok=['full_status']),
# Our Dependency node for the business rule
'business_rule': StringProp(default=None),
# Here it's the elements we are depending on
# so our parents as network relation, or a host
# we are depending in a hostdependency
# or even if we are business based.
'parent_dependencies': StringProp(default=set(),
brok_transformation=to_svc_hst_distinct_lists,
fill_brok=['full_status']),
# Here it's the guys that depend on us. So it's the total
# opposite of the parent_dependencies
'child_dependencies': StringProp(brok_transformation=to_svc_hst_distinct_lists,
default=set(), fill_brok=['full_status']),
# Manage the unknown/unreach during hard state
'in_hard_unknown_reach_phase': BoolProp(default=False, retention=True),
'was_in_hard_unknown_reach_phase': BoolProp(default=False, retention=True),
'state_before_hard_unknown_reach_phase': StringProp(default='OK', retention=True),
# Set if the element just change its father/son topology
'topology_change': BoolProp(default=False, fill_brok=['full_status']),
# Trigger list
'triggers': ListProp(default=[]),
# snapshots part
'last_snapshot': IntegerProp(default=0, fill_brok=['full_status'], retention=True),
# Keep the string of the last command launched for this element
'last_check_command': StringProp(default=''),
})
# Mapping between Macros and properties (can be prop or a function)
macros = {
'SERVICEDESC': 'service_description',
'SERVICEDISPLAYNAME': 'display_name',
'SERVICESTATE': 'state',
'SERVICESTATEID': 'state_id',
'LASTSERVICESTATE': 'last_state',
'LASTSERVICESTATEID': 'last_state_id',
'SERVICESTATETYPE': 'state_type',
'SERVICEATTEMPT': 'attempt',
'MAXSERVICEATTEMPTS': 'max_check_attempts',
'SERVICEISVOLATILE': 'is_volatile',
'SERVICEEVENTID': 'current_event_id',
'LASTSERVICEEVENTID': 'last_event_id',
'SERVICEPROBLEMID': 'current_problem_id',
'LASTSERVICEPROBLEMID': 'last_problem_id',
'SERVICELATENCY': 'latency',
'SERVICEEXECUTIONTIME': 'execution_time',
'SERVICEDURATION': 'get_duration',
'SERVICEDURATIONSEC': 'get_duration_sec',
'SERVICEDOWNTIME': 'get_downtime',
'SERVICEPERCENTCHANGE': 'percent_state_change',
'SERVICEGROUPNAME': 'get_groupname',
'SERVICEGROUPNAMES': 'get_groupnames',
'LASTSERVICECHECK': 'last_chk',
'LASTSERVICESTATECHANGE': 'last_state_change',
'LASTSERVICEOK': 'last_time_ok',
'LASTSERVICEWARNING': 'last_time_warning',
'LASTSERVICEUNKNOWN': 'last_time_unknown',
'LASTSERVICECRITICAL': 'last_time_critical',
'SERVICEOUTPUT': 'output',
'LONGSERVICEOUTPUT': 'long_output',
'SERVICEPERFDATA': 'perf_data',
'LASTSERVICEPERFDATA': 'last_perf_data',
'SERVICECHECKCOMMAND': 'get_check_command',
'SERVICEACKAUTHOR': 'get_ack_author_name',
'SERVICEACKAUTHORNAME': 'get_ack_author_name',
'SERVICEACKAUTHORALIAS': 'get_ack_author_name',
'SERVICEACKCOMMENT': 'get_ack_comment',
'SERVICEACTIONURL': 'action_url',
'SERVICENOTESURL': 'notes_url',
'SERVICENOTES': 'notes',
'SERVICEBUSINESSIMPACT': 'business_impact',
# Business rules output formatting related macros
'STATUS': 'get_status',
'SHORTSTATUS': 'get_short_status',
'FULLNAME': 'get_full_name',
}
# This tab is used to transform old parameters name into new ones
# so from Nagios2 format, to Nagios3 ones.
# Or Shinken deprecated names like criticity
old_properties = {
'normal_check_interval': 'check_interval',
'retry_check_interval': 'retry_interval',
'criticity': 'business_impact',
'hostgroup': 'hostgroup_name',
'hostgroups': 'hostgroup_name',
# 'criticitymodulations': 'business_impact_modulations',
}
#######
# __ _ _ _
# / _(_) | | (_)
# ___ ___ _ __ | |_ _ __ _ _ _ _ __ __ _| |_ _ ___ _ __
# / __/ _ \| '_ \| _| |/ _` | | | | '__/ _` | __| |/ _ \| '_ \
# | (_| (_) | | | | | | | (_| | |_| | | | (_| | |_| | (_) | | | |
# \___\___/|_| |_|_| |_|\__, |\__,_|_| \__,_|\__|_|\___/|_| |_|
# __/ |
# |___/
######
def get_newid(self):
cls = self.__class__
value = uuid.uuid1().hex
cls.id += 1
return value
def __repr__(self):
return '<Service host_name=%r desc=%r name=%r use=%r />' % (
getattr(self, 'host_name', None),
getattr(self, 'service_description', None),
getattr(self, 'name', None),
getattr(self, 'use', None)
)
__str__ = __repr__
@property
def unique_key(self): # actually only used for (un)indexitem() via name_property..
return (self.host_name, self.service_description)
@property
def display_name(self):
display_name = getattr(self, '_display_name', None)
if not display_name:
return self.service_description
return display_name
@display_name.setter
def display_name(self, display_name):
self._display_name = display_name
# Give a nice name output
def get_name(self):
if hasattr(self, 'service_description'):
return self.service_description
if hasattr(self, 'name'):
return self.name
return 'SERVICE-DESCRIPTION-MISSING'
# Get the servicegroups names
def get_groupnames(self):
return ','.join([sg.get_name() for sg in self.servicegroups])
# Need the whole name for debugging purpose
def get_dbg_name(self):
return "%s/%s" % (self.host.host_name, self.service_description)
def get_full_name(self):
if self.host and hasattr(self.host, 'host_name') and hasattr(self, 'service_description'):
return "%s/%s" % (self.host.host_name, self.service_description)
return 'UNKNOWN-SERVICE'
# Get our realm, so in fact our host one
def get_realm(self):
if self.host is None:
return None
return self.host.get_realm()
def get_hostgroups(self):
return self.host.hostgroups
def get_host_tags(self):
return self.host.tags
def get_service_tags(self):
return self.tags
def is_duplicate(self):
"""
Indicates if a service holds a duplicate_foreach statement
"""
if getattr(self, "duplicate_foreach", None):
return True
else:
return False
def set_initial_state(self):
mapping = {
"o": {
"state": "OK",
"state_id": 0
},
"w": {
"state": "WARNING",
"state_id": 1
},
"c": {
"state": "CRITICAL",
"state_id": 2
},
"u": {
"state": "UNKNOWN",
"state_id": 3
},
}
SchedulingItem.set_initial_state(self, mapping)
# Check is required prop are set:
# template are always correct
# contacts OR contactgroups is need
def is_correct(self):
state = True
cls = self.__class__
source = getattr(self, 'imported_from', 'unknown')
desc = getattr(self, 'service_description', 'unnamed')
hname = getattr(self, 'host_name', 'unnamed')
special_properties = ('check_period', 'notification_interval', 'host_name',
'hostgroup_name', 'notification_period')
for prop, entry in cls.properties.items():
if prop not in special_properties:
if not hasattr(self, prop) and entry.required:
logger.error("The service %s on host '%s' does not have %s", desc, hname, prop)
state = False # Bad boy...
# Then look if we have some errors in the conf
# Juts print warnings, but raise errors
for err in self.configuration_warnings:
logger.warning("[service::%s] %s", desc, err)
# Raised all previously saw errors like unknown contacts and co
if self.configuration_errors != []:
state = False
for err in self.configuration_errors:
logger.error("[service::%s] %s", self.get_full_name(), err)
# If no notif period, set it to None, mean 24x7
if not hasattr(self, 'notification_period'):
self.notification_period = None
# Ok now we manage special cases...
if self.notifications_enabled and self.contacts == []:
logger.warning("The service '%s' in the host '%s' does not have "
"contacts nor contact_groups in '%s'", desc, hname, source)
# Set display_name if need
if getattr(self, 'display_name', '') == '':
self.display_name = getattr(self, 'service_description', '')
# If we got an event handler, it should be valid
if getattr(self, 'event_handler', None) and not self.event_handler.is_valid():
logger.error("%s: my event_handler %s is invalid",
self.get_name(), self.event_handler.command)
state = False
if not hasattr(self, 'check_command'):
logger.error("%s: I've got no check_command", self.get_name())
state = False
# Ok got a command, but maybe it's invalid
else:
if not self.check_command.is_valid():
logger.error("%s: my check_command %s is invalid",
self.get_name(), self.check_command.command)
state = False
if self.got_business_rule:
if not self.business_rule.is_valid():
logger.error("%s: my business rule is invalid", self.get_name(),)
for bperror in self.business_rule.configuration_errors:
logger.error("%s: %s", self.get_name(), bperror)
state = False
if not hasattr(self, 'notification_interval') \
and self.notifications_enabled is True:
logger.error("%s: I've got no notification_interval but "
"I've got notifications enabled", self.get_name())
state = False
if not self.host_name:
logger.error("The service '%s' is not bound do any host.", desc)
state = False
elif self.host is None:
logger.error("The service '%s' got an unknown host_name '%s'.", desc, self.host_name)
state = False
if not hasattr(self, 'check_period'):
self.check_period = None
if hasattr(self, 'service_description'):
for c in cls.illegal_object_name_chars:
if c in self.service_description:
logger.error("%s: My service_description got the "
"character %s that is not allowed.", self.get_name(), c)
state = False
return state
# The service is dependent of his father dep
# Must be AFTER linkify
# TODO: implement "not host dependent" feature.
def fill_daddy_dependency(self):
# Depend of host, all status, is a networkdep
# and do not have timeperiod, and follow parents dep
if self.host is not None and self.host_dependency_enabled:
# I add the dep in MY list
self.act_depend_of.append(
(self.host, ['d', 'u', 's', 'f'], 'network_dep', None, True)
)
# I add the dep in Daddy list
self.host.act_depend_of_me.append(
(self, ['d', 'u', 's', 'f'], 'network_dep', None, True)
)
# And the parent/child dep lists too
self.host.register_son_in_parent_child_dependencies(self)
# Register the dependency between 2 service for action (notification etc)
def add_service_act_dependency(self, srv, status, timeperiod, inherits_parent):
# first I add the other the I depend on in MY list
self.act_depend_of.append((srv, status, 'logic_dep', timeperiod, inherits_parent))
# then I register myself in the other service dep list
srv.act_depend_of_me.append((self, status, 'logic_dep', timeperiod, inherits_parent))
# And the parent/child dep lists too
srv.register_son_in_parent_child_dependencies(self)
# Register the dependency between 2 service for action (notification etc)
# but based on a BUSINESS rule, so on fact:
# ERP depend on database, so we fill just database.act_depend_of_me
# because we will want ERP mails to go on! So call this
# on the database service with the srv=ERP service
def add_business_rule_act_dependency(self, srv, status, timeperiod, inherits_parent):
# I only register so he know that I WILL be a impact
self.act_depend_of_me.append((srv, status, 'business_dep',
timeperiod, inherits_parent))
# And the parent/child dep lists too
self.register_son_in_parent_child_dependencies(srv)
# Register the dependency between 2 service for checks
def add_service_chk_dependency(self, srv, status, timeperiod, inherits_parent):
# first I add the other the I depend on in MY list
self.chk_depend_of.append((srv, status, 'logic_dep', timeperiod, inherits_parent))
# then I register myself in the other service dep list
srv.chk_depend_of_me.append(
(self, status, 'logic_dep', timeperiod, inherits_parent)
)
# And the parent/child dep lists too
srv.register_son_in_parent_child_dependencies(self)
def duplicate(self, host):
''' For a given host, look for all copy we must create for for_each property
:type host: shinken.objects.host.Host
:return Service
'''
# In macro, it's all in UPPER case
prop = self.duplicate_foreach.strip().upper()
if prop not in host.customs: # If I do not have the property, we bail out
return []
duplicates = []
# Get the list entry, and the not one if there is one
entry = host.customs[prop]
# Look at the list of the key we do NOT want maybe,
# for _disks it will be _!disks
not_entry = host.customs.get('_' + '!' + prop[1:], '').split(',')
not_keys = strip_and_uniq(not_entry)
default_value = getattr(self, 'default_value', '')
# Transform the generator string to a list
# Missing values are filled with the default value
(key_values, errcode) = get_key_value_sequence(entry, default_value)
if key_values:
for key_value in key_values:
key = key_value['KEY']
# Maybe this key is in the NOT list, if so, skip it
if key in not_keys:
continue
value = key_value['VALUE']
new_s = self.copy()
new_s.host_name = host.get_name()
if self.is_tpl(): # if template, the new one is not
new_s.register = 1
for key in key_value:
if key == 'KEY':
if hasattr(self, 'service_description'):
# We want to change all illegal chars to a _ sign.
# We can't use class.illegal_obj_char
# because in the "explode" phase, we do not have access to this data! :(
safe_key_value = re.sub(r'[' + "`~!$%^&*\"|'<>?,()=" + ']+', '_',
key_value[key])
new_s.service_description = self.service_description.replace(
'$' + key + '$', safe_key_value
)
# Here is a list of property where we will expand the $KEY$ by the value
_the_expandables = ['check_command',
'display_name',
'aggregation',
'event_handler']
for prop in _the_expandables:
if hasattr(self, prop):
# here we can replace VALUE, VALUE1, VALUE2,...
setattr(new_s, prop, getattr(new_s, prop).replace('$' + key + '$',
key_value[key]))
if hasattr(self, 'service_dependencies'):
for i, sd in enumerate(new_s.service_dependencies):
new_s.service_dependencies[i] = sd.replace(
'$' + key + '$', key_value[key]
)
# And then add in our list this new service
duplicates.append(new_s)
else:
# If error, we should link the error to the host, because self is
# a template, and so won't be checked not print!
if errcode == GET_KEY_VALUE_SEQUENCE_ERROR_SYNTAX:
err = "The custom property '%s' of the host '%s' is not a valid entry %s for a service generator" % \
(self.duplicate_foreach.strip(), host.get_name(), entry)
logger.warning(err)
host.configuration_errors.append(err)
elif errcode == GET_KEY_VALUE_SEQUENCE_ERROR_NODEFAULT:
err = "The custom property '%s 'of the host '%s' has empty " \
"values %s but the service %s has no default_value" % \
(self.duplicate_foreach.strip(),
host.get_name(), entry, self.service_description)
logger.warning(err)
host.configuration_errors.append(err)
elif errcode == GET_KEY_VALUE_SEQUENCE_ERROR_NODE:
err = "The custom property '%s' of the host '%s' has an invalid node range %s" % \
(self.duplicate_foreach.strip(), host.get_name(), entry)
logger.warning(err)
host.configuration_errors.append(err)
return duplicates
#####
# _
# (_)
# _ __ _ _ _ __ _ __ _ _ __ __ _
# | '__| | | | '_ \| '_ \| | '_ \ / _` |
# | | | |_| | | | | | | | | | | | (_| |
# |_| \__,_|_| |_|_| |_|_|_| |_|\__, |
# __/ |
# |___/
####
# Set unreachable: our host is DOWN, but it mean nothing for a service
def set_unreachable(self):
pass
# We just go an impact, so we go unreachable
# but only if it's enable in the configuration
def set_impact_state(self):
cls = self.__class__
if cls.enable_problem_impacts_states_change:
# Keep a trace of the old state (problem came back before
# a new checks)
self.state_before_impact = self.state
self.state_id_before_impact = self.state_id
# this flag will know if we override the impact state
self.state_changed_since_impact = False
self.state = 'UNKNOWN' # exit code UNDETERMINED
self.state_id = 3
# Ok, we are no more an impact, if no news checks
# override the impact state, we came back to old
# states
# And only if we enable the state change for impacts
def unset_impact_state(self):
cls = self.__class__
if cls.enable_problem_impacts_states_change and not self.state_changed_since_impact:
self.state = self.state_before_impact
self.state_id = self.state_id_before_impact
# Set state with status return by the check
# and update flapping state
def set_state_from_exit_status(self, status):
now = time.time()
self.last_state_update = now
# we should put in last_state the good last state:
# if not just change the state by an problem/impact
# we can take current state. But if it's the case, the
# real old state is self.state_before_impact (it's the TRUE
# state in fact)
# but only if the global conf have enable the impact state change
cls = self.__class__
if cls.enable_problem_impacts_states_change \
and self.is_impact \
and not self.state_changed_since_impact:
self.last_state = self.state_before_impact
else: # standard case
self.last_state = self.state
if status == 0:
self.state = 'OK'
self.state_id = 0
self.last_time_ok = int(self.last_state_update)
state_code = 'o'
elif status == 1:
self.state = 'WARNING'
self.state_id = 1
self.last_time_warning = int(self.last_state_update)
state_code = 'w'
elif status == 2:
self.state = 'CRITICAL'
self.state_id = 2
self.last_time_critical = int(self.last_state_update)
state_code = 'c'
elif status == 3:
self.state = 'UNKNOWN'
self.state_id = 3
self.last_time_unknown = int(self.last_state_update)
state_code = 'u'
else:
self.state = 'CRITICAL' # exit code UNDETERMINED
self.state_id = 2
self.last_time_critical = int(self.last_state_update)
state_code = 'c'
if state_code in self.flap_detection_options:
self.add_flapping_change(self.state != self.last_state)
if self.state != self.last_state:
self.last_state_change = self.last_state_update
self.duration_sec = now - self.last_state_change
# Return True if status is the state (like OK) or small form like 'o'
def is_state(self, status):
if status == self.state:
return True
# Now low status
elif status == 'o' and self.state == 'OK':
return True
elif status == 'c' and self.state == 'CRITICAL':
return True
elif status == 'w' and self.state == 'WARNING':
return True
elif status == 'u' and self.state == 'UNKNOWN':
return True
return False
# The last time when the state was not OK
def last_time_non_ok_or_up(self):
non_ok_times = filter(lambda x: x > self.last_time_ok, [self.last_time_warning,
self.last_time_critical,
self.last_time_unknown])
if len(non_ok_times) == 0:
last_time_non_ok = 0 # program_start would be better
else:
last_time_non_ok = min(non_ok_times)
return last_time_non_ok
# Add a log entry with a SERVICE ALERT like:
# SERVICE ALERT: server;Load;UNKNOWN;HARD;1;I don't know what to say...
def raise_alert_log_entry(self):
naglog_result('critical', 'SERVICE ALERT: %s;%s;%s;%s;%d;%s'
% (self.host.get_name(), self.get_name(),
self.state, self.state_type,
self.attempt, self.output))
# If the configuration allow it, raise an initial log like
# CURRENT SERVICE STATE: server;Load;UNKNOWN;HARD;1;I don't know what to say...
def raise_initial_state(self):
if self.__class__.log_initial_states:
naglog_result('info', 'CURRENT SERVICE STATE: %s;%s;%s;%s;%d;%s'
% (self.host.get_name(), self.get_name(),
self.state, self.state_type, self.attempt, self.output))
# Add a log entry with a Freshness alert like:
# Warning: The results of host 'Server' are stale by 0d 0h 0m 58s (threshold=0d 1h 0m 0s).
# I'm forcing an immediate check of the host.
def raise_freshness_log_entry(self, t_stale_by, t_threshold):
logger.warning("The results of service '%s' on host '%s' are stale "
"by %s (threshold=%s). I'm forcing an immediate check "
"of the service.",
self.get_name(), self.host.get_name(),
format_t_into_dhms_format(t_stale_by),
format_t_into_dhms_format(t_threshold))
# Raise a log entry with a Notification alert like
# SERVICE NOTIFICATION: superadmin;server;Load;OK;notify-by-rss;no output
def raise_notification_log_entry(self, n):
contact = n.contact
command = n.command_call
if n.type in ('DOWNTIMESTART', 'DOWNTIMEEND', 'DOWNTIMECANCELLED',
'CUSTOM', 'ACKNOWLEDGEMENT', 'FLAPPINGSTART',
'FLAPPINGSTOP', 'FLAPPINGDISABLED'):
state = '%s (%s)' % (n.type, self.state)
else:
state = self.state
if self.__class__.log_notifications:
naglog_result('critical', "SERVICE NOTIFICATION: %s;%s;%s;%s;%s;%s"
% (contact.get_name(),
self.host.get_name(), self.get_name(), state,
command.get_name(), self.output))
# Raise a log entry with a Eventhandler alert like
# SERVICE EVENT HANDLER: test_host_0;test_ok_0;OK;SOFT;4;eventhandler
def raise_event_handler_log_entry(self, command):
if self.__class__.log_event_handlers:
naglog_result('critical', "SERVICE EVENT HANDLER: %s;%s;%s;%s;%s;%s"
% (self.host.get_name(), self.get_name(),
self.state, self.state_type,
self.attempt, command.get_name()))
# Raise a log entry with a Eventhandler alert like
# SERVICE SNAPSHOT: test_host_0;test_ok_0;OK;SOFT;4;eventhandler
def raise_snapshot_log_entry(self, command):
if self.__class__.log_event_handlers:
naglog_result('critical', "SERVICE SNAPSHOT: %s;%s;%s;%s;%s;%s"
% (self.host.get_name(), self.get_name(),
self.state, self.state_type, self.attempt, command.get_name()))
# Raise a log entry with FLAPPING START alert like
# SERVICE FLAPPING ALERT: server;LOAD;STARTED;
# Service appears to have started flapping (50.6% change >= 50.0% threshold)
def raise_flapping_start_log_entry(self, change_ratio, threshold):
naglog_result('critical', "SERVICE FLAPPING ALERT: %s;%s;STARTED; "
"Service appears to have started flapping "
"(%.1f%% change >= %.1f%% threshold)"
% (self.host.get_name(), self.get_name(),
change_ratio, threshold))
# Raise a log entry with FLAPPING STOP alert like
# SERVICE FLAPPING ALERT: server;LOAD;STOPPED;
# Service appears to have stopped flapping (23.0% change < 25.0% threshold)
def raise_flapping_stop_log_entry(self, change_ratio, threshold):
naglog_result('critical', "SERVICE FLAPPING ALERT: %s;%s;STOPPED; "
"Service appears to have stopped flapping "
"(%.1f%% change < %.1f%% threshold)"
% (self.host.get_name(), self.get_name(),
change_ratio, threshold))
# If there is no valid time for next check, raise a log entry
def raise_no_next_check_log_entry(self):
logger.warning("I cannot schedule the check for the service '%s' on "
"host '%s' because there is not future valid time",
self.get_name(), self.host.get_name())
# Raise a log entry when a downtime begins
# SERVICE DOWNTIME ALERT: test_host_0;test_ok_0;STARTED;
# Service has entered a period of scheduled downtime
def raise_enter_downtime_log_entry(self):
naglog_result('critical', "SERVICE DOWNTIME ALERT: %s;%s;STARTED; "
"Service has entered a period of scheduled "
"downtime" % (self.host.get_name(), self.get_name()))
# Raise a log entry when a downtime has finished
# SERVICE DOWNTIME ALERT: test_host_0;test_ok_0;STOPPED;
# Service has exited from a period of scheduled downtime
def raise_exit_downtime_log_entry(self):
naglog_result('critical', "SERVICE DOWNTIME ALERT: %s;%s;STOPPED; Service "
"has exited from a period of scheduled downtime"
% (self.host.get_name(), self.get_name()))
# Raise a log entry when a downtime prematurely ends
# SERVICE DOWNTIME ALERT: test_host_0;test_ok_0;CANCELLED;
# Service has entered a period of scheduled downtime
def raise_cancel_downtime_log_entry(self):
naglog_result(
'critical', "SERVICE DOWNTIME ALERT: %s;%s;CANCELLED; "
"Scheduled downtime for service has been cancelled."
% (self.host.get_name(), self.get_name()))
# Is stalking?
# Launch if check is waitconsume==first time
# and if c.status is in self.stalking_options
def manage_stalking(self, c):
need_stalk = False
if c.status == 'waitconsume':
if c.exit_status == 0 and 'o' in self.stalking_options:
need_stalk = True
elif c.exit_status == 1 and 'w' in self.stalking_options:
need_stalk = True
elif c.exit_status == 2 and 'c' in self.stalking_options:
need_stalk = True
elif c.exit_status == 3 and 'u' in self.stalking_options:
need_stalk = True
if c.output == self.output:
need_stalk = False
if need_stalk:
logger.info("Stalking %s: %s", self.get_name(), c.output)
# Give data for checks's macros
def get_data_for_checks(self):
return [self.host, self]
# Give data for event handlers's macros
def get_data_for_event_handler(self):
return [self.host, self]
# Give data for notifications'n macros
def get_data_for_notifications(self, contact, n):
return [self.host, self, contact, n]
# See if the notification is launchable (time is OK and contact is OK too)
def notification_is_blocked_by_contact(self, n, contact):
return not contact.want_service_notification(self.last_chk, self.state,
n.type, self.business_impact, n.command_call)
def get_duration_sec(self):
return str(int(self.duration_sec))
def get_duration(self):
m, s = divmod(self.duration_sec, 60)
h, m = divmod(m, 60)
return "%02dh %02dm %02ds" % (h, m, s)
def get_ack_author_name(self):
if self.acknowledgement is None:
return ''
return self.acknowledgement.author
def get_ack_comment(self):
if self.acknowledgement is None:
return ''
return self.acknowledgement.comment
def get_check_command(self):
return self.check_command.get_name()
# Check if a notification for this service is suppressed at this time
def notification_is_blocked_by_item(self, type, t_wished=None):
if t_wished is None:
t_wished = time.time()
# TODO
# forced notification
# pass if this is a custom notification
# Block if notifications are program-wide disabled
if not self.enable_notifications:
return True
# Does the notification period allow sending out this notification?
if self.notification_period is not None \
and not self.notification_period.is_time_valid(t_wished):
return True
# Block if notifications are disabled for this service
if not self.notifications_enabled:
return True
# Block if the current status is in the notification_options w,u,c,r,f,s
if 'n' in self.notification_options:
return True
if type in ('PROBLEM', 'RECOVERY'):
if self.state == 'UNKNOWN' and 'u' not in self.notification_options:
return True
if self.state == 'WARNING' and 'w' not in self.notification_options:
return True
if self.state == 'CRITICAL' and 'c' not in self.notification_options:
return True
if self.state == 'OK' and 'r' not in self.notification_options:
return True
if (type in ('FLAPPINGSTART', 'FLAPPINGSTOP', 'FLAPPINGDISABLED')
and 'f' not in self.notification_options):
return True
if (type in ('DOWNTIMESTART', 'DOWNTIMEEND', 'DOWNTIMECANCELLED')
and 's' not in self.notification_options):
return True
# Acknowledgements make no sense when the status is ok/up
if type == 'ACKNOWLEDGEMENT':
if self.state == self.ok_up:
return True
# When in downtime, only allow end-of-downtime notifications
if self.scheduled_downtime_depth > 1 and type not in ('DOWNTIMEEND', 'DOWNTIMECANCELLED'):
return True
# Block if host is in a scheduled downtime
if self.host.scheduled_downtime_depth > 0:
return True
# Block if in a scheduled downtime and a problem arises, or flapping event
if self.scheduled_downtime_depth > 0 and type in \
('PROBLEM', 'RECOVERY', 'FLAPPINGSTART', 'FLAPPINGSTOP', 'FLAPPINGDISABLED'):
return True
# Block if the status is SOFT
if self.state_type == 'SOFT' and type == 'PROBLEM':
return True
# Block if the problem has already been acknowledged
if self.problem_has_been_acknowledged and type != 'ACKNOWLEDGEMENT':
return True
# Block if flapping
if self.is_flapping and type not in ('FLAPPINGSTART', 'FLAPPINGSTOP', 'FLAPPINGDISABLED'):
return True
# Block if host is down
if self.host.state != self.host.ok_up:
return True
# Block if business rule smart notifications is enabled and all its
# childs have been acknowledged or are under downtime.
if self.got_business_rule is True \
and self.business_rule_smart_notifications is True \
and self.business_rule_notification_is_blocked() is True \
and type == 'PROBLEM':
return True
return False
# Get a oc*p command if item has obsess_over_*
# command. It must be enabled locally and globally
def get_obsessive_compulsive_processor_command(self):
cls = self.__class__
if not cls.obsess_over or not self.obsess_over_service:
return
m = MacroResolver()
data = self.get_data_for_event_handler()
cmd = m.resolve_command(cls.ocsp_command, data)
e = EventHandler(cmd, timeout=cls.ocsp_timeout)
# ok we can put it in our temp action queue
self.actions.append(e)
def get_short_status(self):
mapping = {
0: "O",
1: "W",
2: "C",
3: "U",
}
if self.got_business_rule:
return mapping.get(self.business_rule.get_state(), "n/a")
else:
return mapping.get(self.state_id, "n/a")
def get_status(self):
if self.got_business_rule:
mapping = {
0: "OK",
1: "WARNING",
2: "CRITICAL",
3: "UNKNOWN",
}
return mapping.get(self.business_rule.get_state(), "n/a")
else:
return self.state
def get_downtime(self):
return str(self.scheduled_downtime_depth)
# Class for list of services. It's mainly, mainly for configuration part
class Services(Items):
name_property = 'unique_key' # only used by (un)indexitem (via 'name_property')
inner_class = Service # use for know what is in items
def add_template(self, tpl):
"""
Adds and index a template into the `templates` container.
This implementation takes into account that a service has two naming
attribute: `host_name` and `service_description`.
:param tpl: The template to add
"""
objcls = self.inner_class.my_type
name = getattr(tpl, 'name', '')
hname = getattr(tpl, 'host_name', '')
if not name and not hname:
mesg = "a %s template has been defined without name nor " \
"host_name%s" % (objcls, self.get_source(tpl))
tpl.configuration_errors.append(mesg)
elif name:
tpl = self.index_template(tpl)
self.templates[tpl.id] = tpl
def add_item(self, item, index=True):
"""
Adds and index an item into the `items` container.
This implementation takes into account that a service has two naming
attribute: `host_name` and `service_description`.
:param item: The item to add
:param index: Flag indicating if the item should be indexed
"""
objcls = self.inner_class.my_type
hname = getattr(item, 'host_name', '')
hgname = getattr(item, 'hostgroup_name', '')
sdesc = getattr(item, 'service_description', '')
source = getattr(item, 'imported_from', 'unknown')
if source:
in_file = " in %s" % source
else:
in_file = ""
if not hname and not hgname:
mesg = "a %s has been defined without host_name nor " \
"hostgroups%s" % (objcls, in_file)
item.configuration_errors.append(mesg)
if index is True:
if hname and sdesc:
item = self.index_item(item)
else:
mesg = "a %s has been defined without host_name nor " \
"service_description%s" % (objcls, in_file)
item.configuration_errors.append(mesg)
return
self.items[item.id] = item
# Inheritance for just a property
def apply_partial_inheritance(self, prop):
for i in itertools.chain(self.items.itervalues(),
self.templates.itervalues()):
i.get_property_by_inheritance(prop, 0)
# If a "null" attribute was inherited, delete it
try:
if getattr(i, prop) == 'null':
delattr(i, prop)
except AttributeError:
pass
def apply_inheritance(self):
""" For all items and templates inherite properties and custom
variables.
"""
# We check for all Class properties if the host has it
# if not, it check all host templates for a value
cls = self.inner_class
for prop in cls.properties:
self.apply_partial_inheritance(prop)
for i in itertools.chain(self.items.itervalues(),
self.templates.itervalues()):
i.get_customs_properties_by_inheritance(0)
def linkify_templates(self):
# First we create a list of all templates
for i in itertools.chain(self.items.itervalues(),
self.templates.itervalues()):
self.linkify_item_templates(i)
for i in self:
i.tags = self.get_all_tags(i)
# Search for all of the services in a host
def find_srvs_by_hostname(self, host_name):
if hasattr(self, 'hosts'):
h = self.hosts.find_by_name(host_name)
if h is None:
return None
return h.get_services()
return None
# Search a service by it's name and hot_name
def find_srv_by_name_and_hostname(self, host_name, sdescr):
key = (host_name, sdescr)
return self.name_to_item.get(key, None)
# Make link between elements:
# service -> host
# service -> command
# service -> timeperiods
# service -> contacts
def linkify(self, hosts, commands, timeperiods, contacts,
resultmodulations, businessimpactmodulations, escalations,
servicegroups, triggers, checkmodulations, macromodulations):
self.linkify_with_timeperiods(timeperiods, 'notification_period')
self.linkify_with_timeperiods(timeperiods, 'check_period')
self.linkify_with_timeperiods(timeperiods, 'maintenance_period')
self.linkify_with_timeperiods(timeperiods, 'snapshot_period')
self.linkify_s_by_hst(hosts)
self.linkify_s_by_sg(servicegroups)
self.linkify_one_command_with_commands(commands, 'check_command')
self.linkify_one_command_with_commands(commands, 'event_handler')
self.linkify_one_command_with_commands(commands, 'snapshot_command')
self.linkify_with_contacts(contacts)
self.linkify_with_resultmodulations(resultmodulations)
self.linkify_with_business_impact_modulations(businessimpactmodulations)
# WARNING: all escalations will not be link here
# (just the escalation here, not serviceesca or hostesca).
# This last one will be link in escalations linkify.
self.linkify_with_escalations(escalations)
self.linkify_with_triggers(triggers)
self.linkify_with_checkmodulations(checkmodulations)
self.linkify_with_macromodulations(macromodulations)
def override_properties(self, hosts):
ovr_re = re.compile(r'^([^,]+),([^\s]+)\s+(.*)$')
ovr_hosts = [h for h in hosts if getattr(h, 'service_overrides', None)]
for host in ovr_hosts:
# We're only looking for hosts having service overrides defined
if isinstance(host.service_overrides, list):
service_overrides = host.service_overrides
else:
service_overrides = [host.service_overrides]
for ovr in service_overrides:
# Checks service override syntax
match = ovr_re.search(ovr)
if match is None:
err = "Error: invalid service override syntax: %s" % ovr
host.configuration_errors.append(err)
continue
sdescr, prop, value = match.groups()
# Checks if override is allowed
excludes = ['host_name', 'service_description', 'use',
'servicegroups', 'trigger', 'trigger_name']
if prop in excludes:
err = "Error: trying to override '%s', a forbidden property for service '%s'" % \
(prop, sdescr)
host.configuration_errors.append(err)
continue
# Looks for corresponding services
services = self.get_ovr_services_from_expression(host, sdescr)
if not services:
err = "Error: trying to override property '%s' on " \
"service identified by '%s' " \
"but it's unknown for this host" % (prop, sdescr)
host.configuration_errors.append(err)
continue
value = Service.properties[prop].pythonize(value)
for service in services:
# Pythonize the value because here value is str.
setattr(service, prop, value)
def get_ovr_services_from_expression(self, host, sdesc):
hostname = getattr(host, "host_name", "")
if sdesc == "*":
filters = [filter_service_by_host_name(hostname)]
return self.find_by_filter(filters)
elif sdesc.startswith("r:"):
pattern = sdesc[2:]
filters = [
filter_service_by_host_name(hostname),
filter_service_by_regex_name(pattern)
]
return self.find_by_filter(filters)
else:
svc = self.find_srv_by_name_and_hostname(hostname, sdesc)
if svc is not None:
return [svc]
else:
return []
# We can link services with hosts so
# We can search in O(hosts) instead
# of O(services) for common cases
def optimize_service_search(self, hosts):
self.hosts = hosts
# We just search for each host the id of the host
# and replace the name by the id
# + inform the host we are a service of him
def linkify_s_by_hst(self, hosts):
for s in self:
# If we do not have a host_name, we set it as
# a template element to delete. (like Nagios)
if not hasattr(s, 'host_name'):
s.host = None
continue
try:
hst_name = s.host_name
# The new member list, in id
hst = hosts.find_by_name(hst_name)
s.host = hst
# Let the host know we are his service
if s.host is not None:
hst.add_service_link(s)
else: # Ok, the host do not exists!
err = "Warning: the service '%s' got an invalid host_name '%s'" % \
(self.get_name(), hst_name)
s.configuration_warnings.append(err)
continue
except AttributeError, exp:
pass # Will be catch at the is_correct moment
# We look for servicegroups property in services and
# link them
def linkify_s_by_sg(self, servicegroups):
for s in self:
new_servicegroups = []
if hasattr(s, 'servicegroups') and s.servicegroups != '':
for sg_name in s.servicegroups:
sg_name = sg_name.strip()
sg = servicegroups.find_by_name(sg_name)
if sg is not None:
new_servicegroups.append(sg)
else:
err = "Error: the servicegroup '%s' of the service '%s' is unknown" %\
(sg_name, s.get_dbg_name())
s.configuration_errors.append(err)
s.servicegroups = new_servicegroups
# In the scheduler we need to relink the commandCall with
# the real commands
def late_linkify_s_by_commands(self, commands):
props = ['check_command', 'event_handler', 'snapshot_command']
for s in self:
for prop in props:
cc = getattr(s, prop, None)
if cc:
cc.late_linkify_with_command(commands)
# Delete services by ids
def delete_services_by_id(self, ids):
for id in ids:
del self[id]
# Apply implicit inheritance for special properties:
# contact_groups, notification_interval , notification_period
# So service will take info from host if necessary
def apply_implicit_inheritance(self, hosts):
for prop in ('contacts', 'contact_groups', 'notification_interval',
'notification_period', 'resultmodulations', 'business_impact_modulations',
'escalations', 'poller_tag', 'reactionner_tag', 'check_period',
'business_impact', 'maintenance_period'):
for s in self:
if not hasattr(s, prop) and hasattr(s, 'host_name'):
h = hosts.find_by_name(s.host_name)
if h is not None and hasattr(h, prop):
setattr(s, prop, getattr(h, prop))
# Create dependencies for services (daddy ones)
def apply_dependencies(self):
for s in self:
s.fill_daddy_dependency()
def set_initial_state(self):
"""
Sets services initial state if required in configuration
"""
for s in self:
s.set_initial_state()
# For services the main clean is about service with bad hosts
def clean(self):
to_del = []
for s in self:
if not s.host:
to_del.append(s.id)
for sid in to_del:
del self.items[sid]
def explode_services_from_hosts(self, hosts, s, hnames):
"""
Explodes a service based on a lis of hosts.
:param hosts: The hosts container
:param s: The base service to explode
:param hnames: The host_name list to exlode sevice on
"""
duplicate_for_hosts = [] # get the list of our host_names if more than 1
not_hosts = [] # the list of !host_name so we remove them after
for hname in hnames:
hname = hname.strip()
# If the name begin with a !, we put it in
# the not list
if hname.startswith('!'):
not_hosts.append(hname[1:])
else: # the standard list
duplicate_for_hosts.append(hname)
# remove duplicate items from duplicate_for_hosts:
duplicate_for_hosts = list(set(duplicate_for_hosts))
# Ok now we clean the duplicate_for_hosts with all hosts
# of the not
for hname in not_hosts:
try:
duplicate_for_hosts.remove(hname)
except IndexError:
pass
# Now we duplicate the service for all host_names
for hname in duplicate_for_hosts:
h = hosts.find_by_name(hname)
if h is None:
err = 'Error: The hostname %s is unknown for the ' \
'service %s!' % (hname, s.get_name())
s.configuration_errors.append(err)
continue
if h.is_excluded_for(s):
continue
new_s = s.copy()
new_s.host_name = hname
self.add_item(new_s)
def _local_create_service(self, hosts, host_name, service):
'''Create a new service based on a host_name and service instance.
:param hosts: The hosts items instance.
:type hosts: shinken.objects.host.Hosts
:param host_name: The host_name to create a new service.
:param service: The service to be used as template.
:type service: Service
:return: The new service created.
:rtype: Service
'''
h = hosts.find_by_name(host_name.strip())
if h.is_excluded_for(service):
return
# Creates concrete instance
new_s = service.copy()
new_s.host_name = host_name
new_s.register = 1
if new_s.is_duplicate():
self.add_item(new_s, index=False)
else:
self.add_item(new_s)
return new_s
def explode_services_from_templates(self, hosts, service):
"""
Explodes services from templates. All hosts holding the specified
templates are bound the service.
:param hosts: The hosts container.
:type hosts: shinken.objects.host.Hosts
:param service: The service to explode.
:type service: Service
"""
hname = getattr(service, "host_name", None)
if not hname:
return
# Now really create the services
if is_complex_expr(hname):
hnames = self.evaluate_hostgroup_expression(
hname.strip(), hosts, hosts.templates, look_in='templates')
for name in hnames:
self._local_create_service(hosts, name, service)
else:
hnames = [n.strip() for n in hname.split(',') if n.strip()]
for hname in hnames:
for name in hosts.find_hosts_that_use_template(hname):
self._local_create_service(hosts, name, service)
def explode_services_duplicates(self, hosts, s):
"""
Explodes services holding a `duplicate_foreach` clause.
:param hosts: The hosts container
:param s: The service to explode
:type s: Service
"""
hname = getattr(s, "host_name", None)
if hname is None:
return
# the generator case, we must create several new services
# we must find our host, and get all key:value we need
h = hosts.find_by_name(hname.strip())
if h is None:
err = 'Error: The hostname %s is unknown for the ' \
'service %s!' % (hname, s.get_name())
s.configuration_errors.append(err)
return
# Duplicate services
for new_s in s.duplicate(h):
if h.is_excluded_for(new_s):
continue
# Adds concrete instance
self.add_item(new_s)
def register_service_into_servicegroups(self, s, servicegroups):
"""
Registers a service into the service groups declared in its
`servicegroups` attribute.
:param s: The service to register
:param servicegroups: The servicegroups container
"""
if hasattr(s, 'service_description'):
sname = s.service_description
shname = getattr(s, 'host_name', '')
if hasattr(s, 'servicegroups'):
# Todo: See if we can remove this if
if isinstance(s.servicegroups, list):
sgs = s.servicegroups
else:
sgs = s.servicegroups.split(',')
for sg in sgs:
servicegroups.add_member([shname, sname], sg.strip())
def register_service_dependencies(self, s, servicedependencies):
"""
Registers a service dependencies.
:param s: The service to register
:param servicedependencies: The servicedependencies container
"""
# We explode service_dependencies into Servicedependency
# We just create serviceDep with goods values (as STRING!),
# the link pass will be done after
sdeps = [d.strip() for d in
getattr(s, "service_dependencies", [])]
# %2=0 are for hosts, !=0 are for service_description
i = 0
hname = ''
for elt in sdeps:
if i % 2 == 0: # host
hname = elt
else: # description
desc = elt
# we can register it (s) (depend on) -> (hname, desc)
# If we do not have enough data for s, it's no use
if hasattr(s, 'service_description') and hasattr(s, 'host_name'):
if hname == '':
hname = s.host_name
servicedependencies.add_service_dependency(
s.host_name, s.service_description, hname, desc)
i += 1
# We create new service if necessary (host groups and co)
def explode(self, hosts, hostgroups, contactgroups,
servicegroups, servicedependencies, triggers):
"""
Explodes services, from host_name, hostgroup_name, and from templetes.
:param hosts: The hosts container
:param hostgroups: The hostgoups container
:param contactgroups: The concactgoups container
:param servicegroups: The servicegoups container
:param servicedependencies: The servicedependencies container
:param triggers: The triggers container
"""
# items::explode_trigger_string_into_triggers
self.explode_trigger_string_into_triggers(triggers)
for t in self.templates.values():
self.explode_contact_groups_into_contacts(t, contactgroups)
self.explode_services_from_templates(hosts, t)
# Explode services that have a duplicate_foreach clause
duplicates = [s.id for s in self if s.is_duplicate()]
for id in duplicates:
s = self.items[id]
self.explode_services_duplicates(hosts, s)
if not s.configuration_errors:
self.remove_item(s)
# Then for every host create a copy of the service with just the host
# because we are adding services, we can't just loop in it
for s in self.items.values():
# items::explode_host_groups_into_hosts
# take all hosts from our hostgroup_name into our host_name property
self.explode_host_groups_into_hosts(s, hosts, hostgroups)
# items::explode_contact_groups_into_contacts
# take all contacts from our contact_groups into our contact property
self.explode_contact_groups_into_contacts(s, contactgroups)
hnames = getattr(s, "host_name", '')
hnames = list(set([n.strip() for n in hnames.split(',') if n.strip()]))
# hnames = strip_and_uniq(hnames)
# We will duplicate if we have multiple host_name
# or if we are a template (so a clean service)
if len(hnames) == 1:
self.index_item(s)
else:
if len(hnames) >= 2:
self.explode_services_from_hosts(hosts, s, hnames)
# Delete expanded source service
if not s.configuration_errors:
self.remove_item(s)
to_remove = []
for service in self:
host = hosts.find_by_name(service.host_name)
if host and host.is_excluded_for(service):
to_remove.append(service)
for service in to_remove:
self.remove_item(service)
# Servicegroups property need to be fullfill for got the informations
# And then just register to this service_group
for s in self:
self.register_service_into_servicegroups(s, servicegroups)
self.register_service_dependencies(s, servicedependencies)
# Will create all business tree for the
# services
def create_business_rules(self, hosts, services):
for s in self:
s.create_business_rules(hosts, services)
# Will link all business service/host with theirs
# dep for problem/impact link
def create_business_rules_dependencies(self):
for s in self:
s.create_business_rules_dependencies()
| [
"k.sh770@gmail.com"
] | k.sh770@gmail.com |
128b801964308dadff32f8c6325cc1579ff1454c | 61e2257ab27b04209ef6439e470717629de06f06 | /Ecommerce/settings.py | dc03744e14c07271f7ce516af0a9fbfd0f70397d | [] | no_license | amey1210/BookGet | a9bdf2607b4839909f503c5e8add7c06911b7a81 | cc005d19d7e98e73d71061c70270d3e617630a62 | refs/heads/master | 2020-04-08T00:33:19.506000 | 2018-11-23T16:41:32 | 2018-11-23T16:41:32 | 158,856,218 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,697 | py | """
Django settings for Ecommerce project.
Generated by 'django-admin startproject' using Django 2.1.
For more information on this file, see
https://docs.djangoproject.com/en/2.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.1/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
#SECRET_KEY = '$l301bcac23uj*&ae8t$w^d4+(%xwu6t3@#&7+4-co+z!yj9!-'
import os
SECRET_KEY = os.environ.get('DJANGO_SECRET_KEY', '$l301bcac23uj*&ae8t$w^d4+(%xwu6t3@#&7+4-co+z!yj9!-')
# SECURITY WARNING: don't run with debug turned on in production!
#DEBUG = True
DEBUG = bool(os.environ.get('DJANGO_DEBUG', True))
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'accounts',
'shop',
'cart',
'orders',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'Ecommerce.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'cart.context_processors.cart',
],
},
},
]
WSGI_APPLICATION = 'Ecommerce.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.1/howto/static-files/
STATIC_URL = '/static/'
AUTH_USER_MODEL = 'accounts.MyUser'
STATICFILES_DIRS = [
os.path.join(BASE_DIR, 'static'),
]
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'products/')
CART_SESSION_ID = 'cart' | [
"noreply@github.com"
] | amey1210.noreply@github.com |
0843ca751b78db8856ca9d5205d51514773d87fe | f7e511e0dc14d2b23d2bbc1c7023d1efd346829a | /rocon_smartthings_bridge/resources/test_code.py | 29a37746cb2af0f918a643e0d87867d96cdb263d | [] | no_license | robotics-in-concert/rocon_devices | 1f6ffd0968a0cc565a1992b8423e5a497dbd3440 | 614a190cb9f531c3db83c3e3e4650e8a0971c8c1 | refs/heads/develop | 2021-01-10T21:06:25.738755 | 2015-07-28T08:29:35 | 2015-07-28T08:29:35 | 18,580,058 | 10 | 11 | null | 2018-02-19T22:24:08 | 2014-04-09T00:40:15 | Python | UTF-8 | Python | false | false | 2,280 | py | #!/usr/bin/env python
import json
import requests
def load_settings(filename="smartthings.json"):
"""Load the JSON Settings file.
See the documentation, but briefly you can
get it from here:
https://iotdb.org/playground/oauthorize
"""
with open(filename) as fin:
std = json.load(fin)
return std['api'], std['api_location'], std['access_token']
def get_endpoint(url, api_location, access_token):
endpoints_url = url
endpoints_paramd = {
"access_token": access_token
}
endpoints_response = requests.get(url=endpoints_url, params=endpoints_paramd)
end_url = endpoints_response.json()[0]['url']
endpoint_url = 'http://%s%s'%(api_location, end_url)
return endpoint_url
def request_get(url, access_token, command):
request_url = "%s/%s"%(url, command)
params = {}
header = {
"Authorization": "Bearer %s" % access_token,
}
resp = requests.get(url=request_url, params=params, headers=header)
r = resp.json()
if type(r) == list:
r = r[0]
if type(r) == dict:
for k, v in r.items():
print("--- %s"%str(k))
if type(v) == list or type(v) == dict:
if v:
for kk in v:
for kkk, vvv in kk.items():
print("\t%s : %s"%(kkk, vvv))
print("")
else:
print(str(" %s"%v))
else:
print(str(r))
def request_to_update_uri(url, access_token, paired_uri):
request_url = "%s/%s"%(url, "configuration")
params = {"uri": paired_uri}
header = {
"Authorization": "Bearer %s" % access_token,
}
resp = requests.put(url=request_url, params=params, headers=header)
print(str(resp))
if __name__ == '__main__':
api, api_location, access_token = load_settings()
endpoint_url = get_endpoint(api, api_location, access_token)
#request_devices(endpoint_url, access_token, "motion")
#r = request_get(endpoint_url, access_token,"get_all_types")
#r = request_get(endpoint_url, access_token,"configuration")
r = request_to_update_uri(endpoint_url, access_token, "qiowjefqojiwefoqijfqwe")
r = request_get(endpoint_url, access_token,"configuration")
| [
"jihoonlee.in@gmail.com"
] | jihoonlee.in@gmail.com |
b2c14211005aacceb7b237f92d39b72c2fba2218 | 93ed8dd9576a397912dad7693d63fc081f7651db | /tests/contracts/test_contract_estimateGas.py | 24f7f4f3f3524f15111d642ecfcfbe6c4ad24f7b | [
"MIT"
] | permissive | XertroV/web3.py | 3cf1a1265aa9225fe0391feb99bf6088ecfd1937 | 1c6ead0c271da7b648d20dba8c880b76b436a03c | refs/heads/master | 2021-01-24T20:25:36.578888 | 2016-07-16T19:00:10 | 2016-07-16T19:00:10 | 64,264,819 | 0 | 0 | null | 2016-07-27T00:46:25 | 2016-07-27T00:46:24 | null | UTF-8 | Python | false | false | 1,476 | py | import pytest
from web3.providers.rpc import TestRPCProvider
from web3.utils.abi import (
function_abi_to_4byte_selector,
)
@pytest.fixture(autouse=True)
def wait_for_first_block(web3, wait_for_block):
wait_for_block(web3)
@pytest.fixture()
def math_contract(web3, MATH_ABI, MATH_CODE, MATH_RUNTIME, MATH_SOURCE,
wait_for_transaction):
MathContract = web3.eth.contract(
abi=MATH_ABI,
code=MATH_CODE,
code_runtime=MATH_RUNTIME,
source=MATH_SOURCE,
)
deploy_txn = MathContract.deploy({'from': web3.eth.coinbase})
deploy_receipt = wait_for_transaction(deploy_txn)
assert deploy_receipt is not None
contract_address = deploy_receipt['contractAddress']
web3.isAddress(contract_address)
_math_contract = MathContract(address=contract_address)
return _math_contract
def test_needs_skipping(web3):
if not isinstance(web3.currentProvider, TestRPCProvider):
pytest.skip("N/A")
with pytest.raises(ValueError):
web3.eth.estimateGas({})
def test_contract_estimateGas(web3, math_contract):
if isinstance(web3.currentProvider, TestRPCProvider):
pytest.skip("The testrpc server doesn't implement `eth_estimateGas`")
increment_abi = math_contract.find_matching_abi('increment', [])
call_data = function_abi_to_4byte_selector(increment_abi)
gas_estimate = math_contract.estimateGas().increment()
assert abs(gas_estimate - 21272) < 200
| [
"pipermerriam@gmail.com"
] | pipermerriam@gmail.com |
d3a52cd97d2d1792cbadfe6f6aac0c69fa2586b7 | 51e64cc09dd5593a4bd4ae47f4546c877ab0f04e | /code.py | dec97557aeb66fa380a73751293ec8097810be4a | [
"MIT"
] | permissive | suyashphatak23/Simple-Face-Detection | 7c0e22f0ed7bb6e4a394627abebce5cf26a7a73d | cc8bc6a9e3ac152c99614e6414fd2f0fb596ad13 | refs/heads/master | 2022-11-12T05:44:48.637562 | 2020-07-03T09:26:32 | 2020-07-03T09:26:32 | 276,311,085 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 660 | py | '''
Simple Face detector
@author: Suyash Phatak
Date: 30/06/2020
'''
# Importing the required libraries
import cv2
# Loading the face or eye cascade or xml file for face
face_cascade = cv2.CascadeClassifier('face_detector.xml')
# Input image
# To change the image change the below filename and extension of the image
img = cv2.imread('test4.png')
# Detecting Faces
faces = face_cascade.detectMultiScale(img, 1.1, 4)
# Creating rectangle around the detected faces
for(x, y, w, h) in faces:
cv2.rectangle(img, (x,y), (x+w, y+h), (255,0,0), 3)
# Output Image
cv2.imwrite("face_detected.png", img)
print("Succesfully Saved")
| [
"noreply@github.com"
] | suyashphatak23.noreply@github.com |
96ccc37d3e8cd2be9c9d931b1c7a8fb3d1694a0d | 37534e6aabfd60b3c3b94074e31c061c66128cb8 | /.venv/bin/pip3 | 02153aba0214f06e0483b4cdafe48451b556adf1 | [] | no_license | ymsk861/group-f | aa7aa0c9ee9d8101edad90e9f23958cf8c1b6784 | 58412f7c4571299d96a4185b792cfeac86e0d696 | refs/heads/master | 2022-12-16T16:51:38.244564 | 2020-09-20T04:54:23 | 2020-09-20T04:54:23 | 293,081,772 | 1 | 0 | null | 2020-09-20T04:38:06 | 2020-09-05T13:30:46 | Python | UTF-8 | Python | false | false | 260 | #!/Users/yamasakishun/Desktop/haitlab/group-f/.venv/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from pip._internal import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"yamasaki-shun219@g.ecc.u-tokyo.ac.jp"
] | yamasaki-shun219@g.ecc.u-tokyo.ac.jp | |
2f4acca4d952c98e362de5605247860fb695630f | eccb79e51d0805d5643e4bfb37d8bf771a33d07d | /day19/test94.py | 659856390305d1c1b8922cd7d45cfe04017ee076 | [] | no_license | kkc272104568/python_test | a05d1f07f240f9f82410a3ac283f1766591013b3 | 75ca1e937c830114a6df84d0340c438ce9a01c91 | refs/heads/master | 2020-03-26T13:45:28.831829 | 2020-02-25T09:48:45 | 2020-02-25T09:48:45 | 144,955,227 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,351 | py | #!/usr/bin/python
# -*- coding: UTF-8 -*-
'''
题目:时间函数举例4,一个猜数游戏,判断一个人反应快慢。
程序分析:无。
程序源代码:
'''
if __name__ == '__main__':
import time
import random
play_it = raw_input('do you want to play it.(\'y\' or \'n\')')
while play_it == 'y':
c = raw_input('input a character:\n')
i = random.randint(0, 2 ** 32) % 100
print 'please input number you guess:\n'
start = time.clock()
a = time.time()
guess = int(raw_input('input your guess:\n'))
while guess != i:
if guess > i:
print 'please input a little smaller'
guess = int(raw_input('input your guess:\n'))
else:
print 'please input a little bigger'
guess = int(raw_input('input your guess:\n'))
end = time.clock()
b = time.time()
var = (end - start) / 18.2
print var
# print 'It took you %6.3 seconds' % time.difftime(b,a))
if var < 15:
print 'you are very clever!'
elif var < 25:
print 'you are normal!'
else:
print 'you are stupid!'
print 'Congradulations'
print 'The number you guess is %d' % i
play_it = raw_input('do you want to play it.') | [
"272104568@qq.com"
] | 272104568@qq.com |
31a9497b36cd6a4d54d6959b49c2445df08feb30 | 0adb68bbf576340c8ba1d9d3c07320ab3bfdb95e | /regexlib/python_re_test_file/regexlib_3613.py | 7fbb386b0c1adfa1f8ca79e8518568b2dc33d7fb | [
"MIT"
] | permissive | agentjacker/ReDoS-Benchmarks | c7d6633a3b77d9e29e0ee2db98d5dfb60cde91c6 | f5b5094d835649e957bf3fec6b8bd4f6efdb35fc | refs/heads/main | 2023-05-10T13:57:48.491045 | 2021-05-21T11:19:39 | 2021-05-21T11:19:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 578 | py | # 3613
# <(?<tag>\w*|\w*\.+\w*)>+((.|[\n\t\f\r\s])*?)<\/\k<tag>>
# EXPONENT
# nums:4
# EXPONENT AttackString:"<>"+"\t"*16+"! _1◎@! _1! _1! _1!\n_SLQ_3"
import re
from time import perf_counter
regex = """<(?<tag>\w*|\w*\.+\w*)>+((.|[\n\t\f\r\s])*?)<\/\k<tag>>"""
REGEX = re.compile(regex)
for i in range(0, 150000):
ATTACK = "<>" + "\t" * i * 1 + "! _1◎@! _1! _1! _1!\n_SLQ_3"
LEN = len(ATTACK)
BEGIN = perf_counter()
m = REGEX.search(ATTACK)
# m = REGEX.match(ATTACK)
DURATION = perf_counter() - BEGIN
print(f"{i *1}: took {DURATION} seconds!") | [
"liyt@ios.ac.cn"
] | liyt@ios.ac.cn |
1680eee25e123cd65af8e484e82f821ffcef73f5 | 72fd091cf4f9ad8c1a6475a8344bb750889e3b53 | /cars/migrations/0001_initial.py | 6f561308a8ce2a3549922aa1b79b89063e822140 | [] | no_license | petrshirin/example-web-app-for-using-db | abe312ab9dee36e5f53b795a2a0bc7529fa245f3 | c625815525cc8427a6e0fc749afc14f126a90e05 | refs/heads/master | 2023-02-18T08:31:03.842057 | 2021-01-14T14:43:11 | 2021-01-14T14:43:11 | 328,006,038 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,449 | py | # Generated by Django 3.1.5 on 2021-01-08 18:02
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Car',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('car_model', models.CharField(max_length=255)),
('production_date', models.DateField()),
('is_free', models.BooleanField(default=True)),
('price', models.DecimalField(decimal_places=2, max_digits=10)),
],
),
migrations.CreateModel(
name='Client',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('FIO', models.CharField(max_length=500)),
('car', models.ManyToManyField(blank=True, null=True, to='cars.Car')),
],
),
migrations.CreateModel(
name='Color',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
],
),
migrations.CreateModel(
name='Manager',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('FIO', models.CharField(max_length=500)),
('salary', models.DecimalField(decimal_places=2, max_digits=10)),
],
),
migrations.CreateModel(
name='Order',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date_created', models.DateTimeField(blank=True, default=django.utils.timezone.now)),
('days_to_use', models.PositiveIntegerField()),
('total_price', models.DecimalField(decimal_places=2, max_digits=10)),
('closed', models.BooleanField(default=False)),
('car', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='cars.car')),
('client', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='cars.client')),
('manager', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='cars.manager')),
],
),
migrations.CreateModel(
name='ClientPassportData',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('series', models.CharField(max_length=4, null=True)),
('number', models.CharField(max_length=6, null=True)),
('issued_by_whom', models.CharField(max_length=255, null=True)),
('client', models.OneToOneField(null=True, on_delete=django.db.models.deletion.CASCADE, to='cars.client')),
],
),
migrations.AddField(
model_name='car',
name='color',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='cars.color'),
),
]
| [
"p.e.shirin@gmail.com"
] | p.e.shirin@gmail.com |
fab4e484e4788c8cdc5b3d600f1bcd29168b91b8 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02897/s211396554.py | 103568fcd8071619233fcea4b16b1f438fe3b2be | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 62 | py | #A
import math
n = int(input())
print((math.ceil(n / 2)) / n)
| [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
26b47d93b9f098125c860638325e4732ca39e0d5 | f1ee253ad14b75c2afcbe01ee09c762f96bed8d3 | /django_mxonline/mxonline/apps/operation/models.py | f1768798737ac34ef587bb853bb55d787a0ba142 | [] | no_license | MengGuoJian/mxonline | 326d0059eb8e1c09ffdaf7d32a7ecf012e28f6a3 | 2835e24e41ea350cef9e164e6aa818aee1a78da4 | refs/heads/master | 2022-11-30T19:25:39.220884 | 2019-06-28T16:52:25 | 2019-06-28T16:52:25 | 194,286,291 | 1 | 1 | null | 2022-11-22T01:22:36 | 2019-06-28T14:23:00 | Python | UTF-8 | Python | false | false | 2,582 | py | # -*- coding=utf-8 -*-
from __future__ import unicode_literals
from datetime import datetime
from django.db import models
from users.models import UserProfile
from course.models import Course
# Create your models here.
class UserAsk(models.Model):
user = models.CharField(max_length=20, verbose_name=u'姓名')
mobile = models.CharField(max_length=11, verbose_name=u'手机')
course_name = models.CharField(max_length=50, verbose_name=u'课程名称')
add_time = models.DateTimeField(default=datetime.now, verbose_name=u'添加时间')
class Meta:
verbose_name = u'用户咨询'
verbose_name_plural = verbose_name
class CourseComments(models.Model):
user = models.ForeignKey(UserProfile, verbose_name=u'用户')
course = models.ForeignKey(Course, verbose_name=u'课程')
comment = models.CharField(max_length=200, verbose_name=u'评论')
add_time = models.DateTimeField(default=datetime.now, verbose_name=u'添加时间')
class Meta:
verbose_name = u'用户评论'
verbose_name_plural = verbose_name
def __unicode__(self):
return self.user.username+u'的评论:'+self.comment
class UserFavorite(models.Model):
user = models.ForeignKey(UserProfile, verbose_name=u'用户')
fav_id = models.IntegerField(default=0, verbose_name=u'数据id')
fav_type = models.IntegerField(choices=((1, '课程'), (2, '课程机构'), (3, '讲师')), default=1, verbose_name=u'收藏类型')
add_time = models.DateTimeField(default=datetime.now, verbose_name=u'添加时间')
class Meta:
verbose_name = u'用户收藏'
verbose_name_plural = verbose_name
class UserMessage(models.Model):
user = models.IntegerField(default=0, verbose_name=u'接受用户')
message = models.CharField(max_length=500, verbose_name=u'消息内容')
has_read = models.BooleanField(default=False, verbose_name=u'是否已读')
add_time = models.DateTimeField(default=datetime.now, verbose_name=u'发送时间')
class Meta:
verbose_name = u'用户消息'
verbose_name_plural = verbose_name
def __unicode__(self):
return self.message
class UserCourse(models.Model):
user = models.ForeignKey(UserProfile, verbose_name=u'用户')
course = models.ForeignKey(Course, verbose_name=u'课程')
add_time = models.DateTimeField(default=datetime.now, verbose_name=u'添加时间')
class Meta:
verbose_name = u'用户课程'
verbose_name_plural = verbose_name
def __unicode__(self):
return self.course.name | [
"1026006639@qq.com"
] | 1026006639@qq.com |
340a571d6e8d98598158064fceab0255ca1fc886 | f19896ff3a1016d4ae63db6e9345cfcc4d0a2967 | /Topics/Topic/Data Structures and Alogrithms (Topic)/Grokking the Coding Interview/14(subrarrays with product less than a target).py | 003b8322ed8eb2534e4c82f9b758bb6c3169aa06 | [] | no_license | Akshay199456/100DaysOfCode | 079800b77a44abe560866cf4750dfc6c7fe01a59 | b4ed8a6793c17bcb71c56686d98fcd683af64841 | refs/heads/master | 2023-08-08T07:59:02.723675 | 2023-08-01T03:44:15 | 2023-08-01T03:44:15 | 226,718,143 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,736 | py | """
Problem statement:
iven an array with positive numbers and a positive target number, find all of its contiguous subarrays whose product is less than the target number.
Example 1:
Input: [2, 5, 3, 10], target=30
Output: [2], [5], [2, 5], [3], [5, 3], [10]
Explanation: There are six contiguous subarrays whose product is less than the target.
Example 2:
Input: [8, 2, 6, 5], target=50
Output: [8], [2], [8, 2], [6], [2, 6], [5], [6, 5]
Explanation: There are seven contiguous subarrays whose product is less than the target.
"""
"""
------------------------- My Approaches:
1.
Start: 8:43 pm
End:
we can use the two pointer approach here inspite of the list not being sorted. at tach iteration,
we check iof the prodcut times arr[end] is greater than the traget. if it si not, then
we can extend the end pointer as we can still expand. we add this to our prod and then
extend end. if it is greater than the target, we must then greentate all sublists of the list
, get rid of the arr[beg] from the prod and push beg forward.
at th very end, we want to get the remaining elements that might have eben missted as a result
of end hitting the max value while beg still exsitign.
to be noted here is that lists are mutable, so when you add to a list, it modifeis the list.
even if you have assigned once to a list after each iteration of the loop,. it seems to get called only
once for the whole program.
Time complexity: O(n^3)
Space complexity: O(n^2)
*/
"""
"""
------------------------- Other Approaches
1.
Start:
End:
Time complexity: O()
Space complexity: O()
"""
"""
------------------------- Notes
problem follows sliding window and the two pointes pattern and shares similariteis with triplets
with samller small with two differences:
input array is not sorted
instead of dinding triplets with sum less than a target, we need to find all subarrays having
a product less than the target
"""
# My approaches(1)
def generate_subarrays(start, end, result, arr, all_tuples_set):
for i in range(start, end+1):
for j in range(i+1, end+1):
temp_list = arr[i:j]
if tuple(temp_list) not in all_tuples_set:
result.append(temp_list)
all_tuples_set.add(tuple(temp_list))
def get_all_subarrays(arr, target, result):
beg, end, prod = 0, 0, 1
all_tuples_set = set()
while end < len(arr):
if prod * arr[end] < target:
prod *= arr[end]
end += 1
else:
generate_subarrays(beg, end, result, arr, all_tuples_set)
prod /= arr[beg]
beg += 1
generate_subarrays(beg,end, result, arr, all_tuples_set)
def find_subarrays(arr, target):
result = []
if len(arr) > 0:
get_all_subarrays(arr, target, result)
return result
# Other Approaches(1)
from collections import deque
def find_subarrays(arr, target):
result = []
product = 1
left = 0
for right in range(len(arr)):
product *= arr[right]
while (product >= target and left < len(arr)):
product /= arr[left]
left += 1
# since the product of all numbers from left to right is less than the target therefore,
# all subarrays from left to right will have a product less than the target too; to avoid
# duplicates, we will start with a subarray containing only arr[right] and then extend it
temp_list = deque()
for i in range(right, left-1, -1):
temp_list.appendleft(arr[i])
result.append(list(temp_list))
return result
def main():
print(find_subarrays([2, 5, 3, 10], 30))
print(find_subarrays([8, 2, 6, 5], 50))
main()
| [
"akshay.kum94@gmail.com"
] | akshay.kum94@gmail.com |
222aba3bdc6078a2bbd2e01c79d319ab45d44737 | 30dea47f44695f3eeacb8270496cdced39485cbd | /tonedetect/tones.py | b72f95f4783db9396b689fd9cd31df1dc93ba559 | [] | no_license | cheind/py-tonedetect | 67490d9b6f238226486e0cfa2831c4855e079c07 | 662b5d335ba9e830914cc0d0d2a1515f832f743b | refs/heads/master | 2021-01-10T23:36:59.131599 | 2016-10-29T06:52:37 | 2016-10-29T06:52:37 | 70,419,207 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 880 | py |
import json
import itertools
import math
import numpy as np
class Tones(object):
""" A list of tones """
def __init__(self):
self.items = []
def add_tone(self, frequencies, sym=None):
sym = sym if sym is not None else len(self.items)
self.items.append({'f': frequencies, 'sym': sym})
def all_tone_frequencies(self):
""" Return a list of all frequencies across all tones """
f = []
for e in self.items:
f.extend(e['f'])
return list(set(f))
def minimum_frequency_step(self):
dists = [math.fabs(pair[0]-pair[1]) for pair in itertools.combinations(self.all_tone_frequencies(), 2)]
return np.min(dists)
@staticmethod
def from_json_file(filename):
t = Tones()
with open(filename) as f:
t.items = json.load(f)
return t
| [
"christoph.heindl@gmail.com"
] | christoph.heindl@gmail.com |
ad892533d941d44cbd1f217d430b5ea20d849611 | 676f98bd490e61727e8801bbda542507e6522d5b | /chap4/cross_entropy_error.py | ba6b74717de6799a083916b428f8f678bd194ba6 | [] | no_license | Glico621/DeepLearning_from_Scratch | 8045c65e0a2087f1d0c3a87d90893c36bd88156f | dc2eb3bb3c56b5e0e1f91b0ae4573b6281227aa8 | refs/heads/master | 2023-08-20T11:04:27.295875 | 2021-10-19T03:30:36 | 2021-10-19T03:30:36 | 369,708,876 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 553 | py | #p91 交差エントロピー誤差
import numpy as np
def cross_entropy_error(y, t):
delta = 1e-7 #マイナス無限大対策 1x10の-7乗
return -np.sum(t * np.log(y + delta))
t = [0, 0, 1, 0, 0, 0, 0, 0, 0, 0]
y1 = [0.1, 0.05, 0.6, 0.0, 0.05, 0.1, 0.0, 0.1, 0.0, 0.0]
a1 = cross_entropy_error(np.array(y1), np.array(t))
print(a1)
y2 = [0.1, 0.05, 0.1, 0.0, 0.05, 0.1, 0.0, 0.6, 0.0, 0.0]
a2 =cross_entropy_error(np.array(y2), np.array(t))
print(a2)
#出力される値が小さいほど誤差が少なく、より正解に近い | [
"82133884+Glico621@users.noreply.github.com"
] | 82133884+Glico621@users.noreply.github.com |
281d0127dbb5560633ec0da580a15c81be6ba978 | 24f29f50988c59785011f3bc2645fa5d2a7a7d97 | /wlct/cogs/ladders.py | 333e6289dac1c32fa28dd01d4fe5d125622fc1c7 | [] | no_license | h-shan/wzclot | d1c0c0f83b0b2916e0352c7cc0bfd25775a632d9 | 88d0e57c053a69a212af43f52168e234f41f6351 | refs/heads/master | 2021-01-07T20:17:04.065980 | 2020-02-19T22:44:18 | 2020-02-19T22:44:18 | 241,809,955 | 0 | 0 | null | 2020-02-20T06:25:12 | 2020-02-20T06:25:11 | null | UTF-8 | Python | false | false | 5,329 | py | import discord
from wlct.models import Clan, Player
from wlct.tournaments import Tournament, TournamentTeam, TournamentPlayer, MonthlyTemplateRotation, get_games_finished_for_team_since, find_tournament_by_id, get_team_data_no_clan, RealTimeLadder, get_real_time_ladder, TournamentGame
from discord.ext import commands, tasks
from wlct.cogs.common import is_admin
from django.utils import timezone
from traceback import print_exc
class Ladders(commands.Cog, name="ladders"):
''' Actually sends the help command '''
def __init__(self, bot):
self.bot = bot
@commands.command(brief="Lists all real-time ladders hosted by this bot and their IDs",
usage='''
109 -j : joins ladder 109
109 -l : leaves ladder 109
109 -t : displays all templates on the ladder
109 -p : displays all players currently on the ladder
109 -r : displays full ladder rankings
109 -g : displays all in progress games
109 -v templateid: vetoes a template or displays the current one if no template id is passed
''')
async def rtl(self, ctx, arg_id="invalid_id", arg_cmd="invalid_cmd", arg_cmd2="invalid_cmd2"):
print("Arguments for RTL id: {} command: {}".format(arg_id, arg_cmd))
invalid_cmd_text = "You've entered an invalid command. Please correct it and try again."
retStr = ""
do_embed = False
do_all_channels = False
embed_name = ""
if arg_id != "invalid_id":
emb = discord.Embed(color=self.bot.embed_color)
emb.set_author(icon_url=ctx.message.author.avatar_url, name=ctx.message.author)
emb.set_footer(text="Bot created and maintained by -B#0292")
if arg_id.isnumeric():
ladder = get_real_time_ladder(int(arg_id))
discord_id = ctx.message.author.id
if ladder is not None:
if arg_cmd == "-p":
# display current players in the ladder
retStr = ladder.get_current_joined()
elif arg_cmd == "-j":
retStr = ladder.join_ladder(discord_id)
retStr += "\n\n" + ladder.get_current_joined()
do_all_channels = True
elif arg_cmd == "-l":
retStr = ladder.leave_ladder(discord_id)
retStr += "\n\n" + ladder.get_current_joined()
do_all_channels = True
elif arg_cmd == "-t":
retStr = ladder.get_current_templates()
do_embed = True
emb.title = "Current Templates - Ladder {}".format(ladder.name)
emb.add_field(name="Templates", value=retStr)
elif arg_cmd == "-r":
retStr = ladder.get_current_rankings()
elif arg_cmd == "-g":
do_embed = True
retStr = ladder.get_current_games()
emb.title = "Current Games - Ladder {}".format(ladder.name)
emb.add_field(name="In Progress", value=retStr)
elif arg_cmd == "-v":
if arg_cmd2 != "invalid_cmd2":
retStr = ladder.veto_template(discord_id, arg_cmd2)
else:
# display the users current veto
retStr = ladder.get_current_vetoes(discord_id)
elif arg_cmd == "-ta":
if arg_cmd2 != "invalid_cmd2":
# check to make sure the author has access here
if is_admin(ctx.message.author.id):
retStr = ladder.add_template(arg_cmd2)
else:
retStr = invalid_cmd_text
elif arg_cmd == "-tr":
if arg_cmd2 != "invalid_cmd2":
# check for access
if is_admin(ctx.message.author.id):
retStr = ladder.remove_template(arg_cmd2)
else:
retStr = invalid_cmd_text
else:
retStr = invalid_cmd_text
else:
retStr = "You've entered an invalid ladder ID."
else:
retStr = "You've entered an invalid ladder ID."
elif arg_id == "invalid_id":
retStr += "__**Current Real-Time Ladders**__\n"
ladders = RealTimeLadder.objects.all()
if not ladders or ladders.count() == 0:
retStr += "There are no real-time ladders created yet."
else:
for ladder in ladders:
retStr += "{} | Id: {}".format(ladder.name, ladder.id)
else:
retStr = "You have entered an invalid command. Please correct it and try again."
if do_embed:
await ctx.send(embed=emb)
else:
await ctx.send(retStr)
def setup(bot):
bot.add_cog(Ladders(bot)) | [
"brendanflynn@outlook.com"
] | brendanflynn@outlook.com |
232015e79c33abe633a6a6a36a1bb34843158437 | 5aa98c0e9fae69521d28ef3cb5ca9e4aefb0478a | /venv/Scripts/pip3-script.py | 4b872a6d286f41e1b984795022f5834341d27232 | [] | no_license | Ghazan1/portfolio | f5f99b9f819e17d45ebbdbb41aa2e4609c2e5d3d | 1cc41b23721ad99d022f5807cb47ba7e6f0076f2 | refs/heads/master | 2020-04-13T16:36:06.144843 | 2018-12-27T19:00:55 | 2018-12-27T19:00:55 | 163,325,515 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 424 | py | #!C:\Users\Ghazan\PycharmProjects\portfolio-project\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==10.0.1','console_scripts','pip3'
__requires__ = 'pip==10.0.1'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('pip==10.0.1', 'console_scripts', 'pip3')()
)
| [
"meetghz@outlook.com"
] | meetghz@outlook.com |
5b83bbca055be0745bf5a1b74558a9d27384af32 | 290d98f0a09c3420eecc21e21129196f0f731648 | /time_series_clustering/run_time_series_clustering.py | 10c4627f90aac74bc8f4e768521d720b63c61098 | [] | no_license | bouguerram/newman-research | 8afbe9d12ceee718e9a28172d8bed6cdfd79091b | 0b4b4dbe786da9b3e8a61749f778658b487a04cf | refs/heads/master | 2020-03-22T16:01:24.079566 | 2016-09-22T18:08:18 | 2016-09-22T18:08:18 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,948 | py | import numpy as np
import matplotlib.pyplot as plt
# import rpy2.robjects.numpy2ri
# from rpy2.robjects.packages import importr
import kmedoids_minibatch_dist_matrix
import csv
def convert_y_labels_to_positive_definite_interval(data_y):
for i in range(len(data_y)):
if data_y[i] == -1:
data_y[i] = 1
else:
data_y[i] = 2
return data_y
def get_rss_of_model(centroid_labels_vec, data_y):
# find majority label of each centroid:
assigned_centroid_labels_set = set(centroid_labels_vec)
print 'CENTROID LABELS VEC IS:'
print centroid_labels_vec
print assigned_centroid_labels_set
num_classes = len(set(data_y))
centroid_class_vec = []
for i in assigned_centroid_labels_set:
class_counts_vec = np.array([0]*num_classes)
for j in range(len(centroid_labels_vec)):
if centroid_labels_vec[j] == i:
class_for_pt = data_y[j]
# print class_for_pt
class_counts_vec[class_for_pt-1] += 1
# chosen_class = arg_max(class_counts_vec) + 1
chosen_class = np.argmax(class_counts_vec) + 1
centroid_class_vec.append(chosen_class)
# Find error between y and y_hat (true labels vs generated labels):
num_misclass = 0
m = 0
print 'Centroid class vec is:'
print centroid_class_vec
for i in assigned_centroid_labels_set:
for j in range(len(centroid_labels_vec)):
if centroid_labels_vec[j] == i and centroid_class_vec[m] != data_y[j]:
num_misclass += 1
m += 1
return num_misclass
# time_series_name = 'wafer'
time_series_name = 'yoga'
# time_series_data_file = '/home/chris/Time_Series_Datasets/UCR_TS_Archive_2015/'+time_series_name+'/'+time_series_name+'_TRAIN'
time_series_data_file = 'UCR_time_series_archive_2015/'+time_series_name+'/'+time_series_name+'_TRAIN'
fd = open(time_series_data_file)
data_x = []
data_y = []
# data_y.astype(int)
i = 0
# for line in fd:
reader = csv.reader(fd)
for line in reader:
# print type(line)
line = np.asarray(line)
arr = line.astype(np.float)
temp_x = arr[1::]
print 'NUM FEATURES == %s' %len(temp_x)
data_x.append(temp_x)
data_y.append(int(arr[0]))
# print data_y[i]
# print line[0]
i += 1
# data_y = convert_y_labels_to_positive_definite_interval(data_y)
# data_y = data_y[:50]
# data_x = data_x[:50]
# Initialization of centroid_labels_vector:
centroid_labels_vec = []
for i in range(len(data_x)):
centroid_labels_vec.append(0)
# k = 5
k = int(0.1*len(data_x))
window_frac = 0.1
# Clustering of time series data:
centroid_labels_vec, curr_medoids = kmedoids_minibatch_dist_matrix.k_medoids_dtw_cluster(data_x, centroid_labels_vec, k)
rss_of_model = get_rss_of_model(centroid_labels_vec, data_y)
error_rate = rss_of_model/float(len(data_y))
print rss_of_model
print error_rate
print data_y
# plt.plot(data_x[14] )
# plt.show(data_x)
| [
"cschulze@schulze-pc.PFI.LOCAL"
] | cschulze@schulze-pc.PFI.LOCAL |
23cc696455e578661687ff0eb2c8e466a0ecca7b | 492d3e666b87eff971628a74fe13facde01e2949 | /htmlcov/_python_Django_My Projects_student-portal_Lib_site-packages_PIL_GifImagePlugin_py.html.py | 1dc887ff9dbe11ce042ec22d45bfd7fc4da3fc41 | [] | no_license | OmarFateh/Student-Portal | 42050da15327aa01944dc79b5e00ca34deb51531 | 167ffd3a4183529c0cbc5db4ab232026711ea915 | refs/heads/master | 2023-06-13T01:03:16.475588 | 2021-07-08T11:09:09 | 2021-07-08T11:09:09 | 382,895,837 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 251,069 | py | XXXXXXXXX XXXXX
XXXXXX
XXXXXX
XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX XX
XXXXXXXXXXXXXXX XXX XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXX
XXXXX XXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXXX
XXXXXXX
XXXXX XXXXXXXXXXXXXXX
XXXX XXXXXXXXXXXX
XXXX XXXXXXXXXXXXXXXX
XXXXXXXXXXXX XXX XXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX X
XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX
XXXXX
XXXX XXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXXXXXX XXXXXXXX XXXXXXXXXX XX
XXX XXXXXXXXXXXXXX
XXX XXXXXXXXXX XXXXXX
XXXXXXX XXXXXXXXXXXXX XXXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXX XXXXXXX XXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXX XXXXXXXXXX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXX XXXXXXXXXX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXX
XXXXX
XXXXXX
XXXXXX
XXXX XXXXXXXXXXXXXXXXXXX
XXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXX XXXXXXXXX XXXXXXXX XXXXXXXXXX XX
XX XXXXXXXXXXXXXXXXXXXXXXX XX XXXX XXXXXXXX
XXXXX
XX XXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXX XXXX XXXXXXXX
XXXX
XX XXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXX XXXXXXXXXXX XXXXX
XXXX
XX XXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXX XXX XX XXXX
XXXX
XX XXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXXXXXX XXXXXX XXXXX XXXXX XXXXXXXXXXX XXXXX
XXXX
XXXXXX
XXXXXX
XXXX XXXXXXXXXXXX
XX XXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXX XXXXXX XXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXX XX XXXXX XXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXX XX XXXXX XXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXX XX XXXXX XXXXX XXXXXXXX XXXXX XXXXX XXXXXX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXX XX XXXX XXXX XX XXXX XXXXXX XXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXX XX XXXXXXX XXXXXXXX XXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXX XX XXXXXX XXX XXXXX XXXX XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXX XX XXXXXXX XXXXXX XXXXXXXXX XX XXXXX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXX XX XXXXX XXXXXX XXXXXXX XXXXX XX XXXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXX XX XXXXX XXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXX XX XXXXX XXXXXXXXXXXX XXXXXXX XXX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXX XX XXXXXXX XXXXXXXXXXX XXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXX XXX XXXXXXXXX XX XXXXXX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXX XXX XXXXXXXXX XX XXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXX XXX XXXXXX XXXX XXX XXXXXXXXXXX XX XXXXX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXXXXX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXX XXXXXX XXX XXX XXXXXXX XXXX XXXXXX XXXXXXXX XXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXX X XXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXX XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXX XX XXXXXXX XXXXXXXX XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXX XXX XXXXX XX XXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXX XXXXXX XX XXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXX XXX XXX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXX XXXX XXX XXXXXXXX XXXXX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXX XXXX XX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXX XX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXX XXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXXX XXXXXX X XXXX XXX XXXXX XX XXXX X X XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXX XXX XXX XXXXXXX XX XX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXXXXXXX XXX XXX XXXX XX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXXX XXX XX XXXXX XX XXXXXXX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXX XXXX XXXXXXX XXX XXX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXX XXXXXXXX XXXXXXXX XXX XXX XXXXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XX XXX XXXXXXX XX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXX XXXX XXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXX XXXXXXX XXX XXXXXX XX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXX XXXX XXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXX XXXXXXX XXX XXXXXX XX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXXXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXX XX XXXXX XXX XXXXXXX XXXXXXX XX XXXXX XX X XXXX XXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXX XXXXXX XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XX XXX XXXXXX XXX XXXXXXXX XXXXXX XX XX XXX XXXXXX XX XXXXX XXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXX XX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXX XXXX XX XXX XXXXX XX XXXXXXX XXXX XXX XXXXXXX XXXX XXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XX XXXXX XXXX XXXXXXXXX XXXX X XXXXXXXX XX XX XXXXX XX X XXXX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXX XXXXXXXX XX XXX XXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXX XXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXX XXXXXXXXXXXXX XXXXXXX XXXXXX XXX XX XXXX XXX X XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXX XXX XXXXXXX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX X XXXX XXX XXXXXXX XX XXX XXXXXXXX XXXXXXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX X XXXXXXX XXXX XXXXXXX X XXXXXXX XXX X XXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX X XXXXXXXXX XXX XXXXXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXX XXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXX XXXXXXXX XXXXX XXXXXX XXXXXXXXXX XXX XXXXXX XXXXXXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX X XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXX XXX XX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX X XXXX XX XXXXXXXX XXXX XXXXX XXXX XXX XXXXX XXXXXX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXX XXXXX XX XXXXXXXXX XX XXX XXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXX XXXXXXXX XXXXXX XXXX XXX XX XXXXXXXX XXX XXXXXXXX XXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXX XXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXXXXX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXXX XXX XXXXX XXXX XX XXXXXXXXXXX XXXXX XX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXX XXX XXXXXXXXXXXX XXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXX XXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XX XXXX XXXXXXXXX XXX XXXXXXXXXXXXX XXXX XX XXX XXX XX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XX XXX XXXX XXXX XXX XXXXXXXXXXX XXXXXX XXX XXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXX XXX XXX XXXXXXXX XXXXXXXXXXXXXX XXXXXXXXXX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXX XXX XXXXXXXXXXX XX XXX XX XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXX XXXXXXXX XXXXXX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXXXX XXX XX X XXXXXXXX X XXX X XXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXX XXXXXXXX XX XXXXXXX XXXXXXX XX XXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXX XXXXXXXXXXXX XX XXXX XX XXX XXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXX XXXXX XX XXXX XXXX XX XXXXXX XXX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXX XXXXXXXXXXXX XX X XXXXXXXXXXX XXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXX XXXXXXXX XXXXXXXXXX XX XXX XXXXXXX XXXXXX XX XXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXX XXXXXXXXXXX XXXX XXXXXXX XXX XXXX XX XXXXXXX XXXXXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXX XXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXX XXXX XX XXXXXXX XX XXXXXXX XXXXXXX XX XXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXXXXXX XXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXX XXXXXXX XXXXX X XXXXX XXX XXXXX XXX XXXXX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXX XXX XXXXXXXXXX XX XXXXXXXX XXXXXX XXX XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XX XXX XX X XXXXX XX XX XXXX XXX XXX XXX XXXX XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX X XX XXXXX XXXX XXX XXXXXXX XX XXXXX XXXX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX X XX XXXXX XXXXXX XXX XXXXXX XXXX XXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX X XX XX XXXX X XXXXXXX XXXXXX XXX XXXXXXX XX XX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXX XXX XXX XXXXXXX XX XXX XXXXX XXXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXX XXXXX XXXXXX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXXXX XXX XXXXXXX XXXX XXX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXX XXX XXXXXXXX XXXX XXXXXX XX XXX XXXX XXXXX XX X XXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXX XXX XXXXXX XXXXXXXXX XX XXX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXX XXXXXXXX XXXXXXX XXXXXX XX XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXX XXXX XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXX XXX XXXXXXX XXXXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXX XXXXXXX XXX XX XX XXXXXXXXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXX XXX XXXXXXX XXX XXXXXXXXX XX XXX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXX XXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXX XXXXXX XXXXXXXXXXX XXXXXXXX XXX XXXXXXXXX XX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXX XX XXXX XXXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XX XX XXXX XX XXXXXXXXX XX XXX XXXX XXXXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXXXXX X X XXXXXX XXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX X XXXX XX XXXXXXX XXXXXXXXXXXX X XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXX XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXX XX XXXXXX XXXXX XXXXX X XXXXXX XXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXXXXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXX XXX XX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXXX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXX XXXXXX XX XXX XXX XXXX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXX XXX XXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXX XXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXX XXXXXXXX XXXXX XXXXXX XXXXXXXXXX XXX XXXXXX XXXXXXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXX XXXXX XXXXXXX XX XXXXXX XXXXXX XXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XX XXXXXXX XXXXXXXXX XXX XXX XXXX XX XXXXXXXXXXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXX X XXXX XX XXXXXXX XXXXXXXXXXXX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXX XXXXX XXXXXX XX X XXXXX XXXXX XXXXXXX XXX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXX XXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXX XXXXXXX XXXXX XX XXX XX XXXXXXX XXXXXXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXX XXXXXXXXXXX XXXX XXXXXXXX XX XXXXX XXXXXXX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXX XXXX XX XXXXX XXXXXXXXXX XXX XXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXX XXXX XXXXXX XXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXX XXX XXXXXXXXX XXXX XX XXX XXXX XX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXX XX XXX XXXXXXXX XXXXXXXXXXXXXX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XXXXXX
XXXX XXXXXXXXXXXX
XXXX XXXXXXXXXXXXXXXX
XXX
XX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXX XXXXXX XXXXXX XX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXX
XXXXXXX XX XXXXXXXXXX XXXXX XXXXX
XXXX
XXXXXX
XXXXXX
XXXXXXX
XXXXXXX
| [
"66747309+OmarFateh@users.noreply.github.com"
] | 66747309+OmarFateh@users.noreply.github.com |
c6b54b508c8164b78490f5ffa11a8276d1a9fe2a | e9750bc41b9a3f55907fa6d6a6faebcd5bdaf020 | /stableGroups.py | a6f4251b93a01aeeda20a954001ee65b692dfb9c | [] | no_license | jamesdhope/teaching-lecturing-resources | 4fe8b6997eaa5dd109367d5cb592ee98ff71ceb6 | 18cd42104bd658e9a737b364637eb47bafd9288b | refs/heads/master | 2021-01-16T19:28:10.113669 | 2018-05-18T13:51:54 | 2018-05-18T13:51:54 | 100,172,875 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,616 | py | # Author : James Hope
# Date : 13 April 2018
import sys
import numpy as np
debug = False
# Pool Class :: holds engagements
class Pool:
def __init__(self, acceptors):
"""
Construct an array which will hold the engagements. Instatiate each maximum preference number that
"""
self.engagements = np.empty(shape=len(acceptors))
self.engagements.fill(np.nan)
def new_engagement(self,acceptor,proposer):
"""
Update (replace) the engagement in the pool
"""
if proposer in self.engagements:
print(proposer, "in position", self.engagements.tolist().index(proposer)+1, "set to NaN")
self.engagements[self.engagements.tolist().index(proposer)] = np.nan
self.engagements[acceptor-1] = proposer
def is_complete(self):
"""
Return True if complete
"""
if (np.isnan(self.engagements).any()):
return False
else:
return True
def get_current_engagement(self,acceptor):
"""
Return the current engagement for a acceptor
"""
return self.engagements[acceptor-1]
def get_all_engagements(self):
"""
Return all the current engagements
"""
return self.engagements
# Acceptor Class :: holds the acceptor preferences
class Acceptor:
def __init__(self,values):
"""
Construct the acceptor preferences
"""
self.values = values
def get_preference_number(self,acceptor,proposer):
"""
Return the preference of the acceptor for the proposer passed
"""
#print(self.values[acceptor-1])
if proposer in self.values[acceptor-1]:
return self.values[acceptor-1].index(proposer)+1
else:
return 0
def is_proposal_accepted(self,acceptor,proposer):
"""
If proposer is in accepter preferences return true else return false
"""
if debug: (print("acceptor preference of proposal", self.get_preference_number(acceptor,proposer)))
if debug: (print("acceptor currently engaged to", pool_object.get_current_engagement(acceptor)))
if debug: (print("acceptor preference of current engagement", self.get_preference_number(acceptor,pool_object.get_current_engagement(acceptor))))
if (np.isnan(pool_object.get_current_engagement(acceptor)) and (self.get_preference_number(acceptor,proposer)!=0)):
return True
if (self.get_preference_number(acceptor,proposer) < self.get_preference_number(acceptor,pool_object.get_current_engagement(acceptor))):
return True
else:
return False
# Proposer Class :: holds the proposer preferences
class Proposer:
def __init__(self, values):
"""
Construct the proposer preferences
"""
self.values = values
def get_proposal(self,proposer,iteration):
"""
Return the acceptor value (proposal to try) for the proposer and iteration passed
"""
#return self.values.iloc[proposer,iteration]
return self.values[proposer][iteration]
# Create dummy data
acceptors_table = [[1,2,3,4],[3,4,1,2],[4,2,3,1],[3,2,1,4]]
proposers_table = [[2,1,3,4],[4,1,2,3],[1,3,2,4],[2,3,1,4]]
# Instantiate the Acceptor and Proposer class
accepter_object = Acceptor(acceptors_table)
proposer_object = Proposer(proposers_table)
print("Acceptors Table:", accepter_object.values)
print("Proposers Table:", proposer_object.values)
# Instantiate the pool class
pool_object = Pool(np.unique(acceptors_table))
if debug: print("Pool Object:", pool_object.get_all_engagements())
def stable_marriage():
for iteration in range(len(proposers_table)):
print("\n Round:", iteration+1)
for proposer in range(len(proposers_table[iteration])):
print("PROPOSAL:", proposer+1, "---->", proposers_table[proposer][iteration])
if accepter_object.is_proposal_accepted(proposer_object.get_proposal(proposer,iteration),proposer+1): #if proposal is accepter
if debug: print("PROPOSAL ACCEPTED")
pool_object.new_engagement(proposer_object.get_proposal(proposer,iteration),proposer+1)
else:
if debug: print("PROPOSAL FAILED")
print("ENGAGEMENTS:", pool_object.get_all_engagements())
if pool_object.is_complete():
return pool_object.get_all_engagements()
print("\n FINAL ENGAGEMENTS:", stable_marriage())
| [
"noreply@github.com"
] | jamesdhope.noreply@github.com |
e9e6d1aac73d06c35eb53081c986af7d039a7fc4 | 52102c804996dd30f4248f586e34b575516bf261 | /setup/setup.py | 6bf9f389a3647cc0253c081e8c0675197abd2696 | [] | no_license | Anima879/Voiture_autonome | 895510e975d88562d3b368efb0c761e46ebc60b0 | 21c1a2970a4be09659b839656c047219e4e3ac39 | refs/heads/master | 2020-07-04T18:39:01.860932 | 2019-09-11T09:09:26 | 2019-09-11T09:09:26 | 202,377,479 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,178 | py | import tkinter as tk
from tkinter import messagebox, filedialog
from setup.create_track import *
class Setup(tk.Frame):
def __init__(self, root, **kwargs):
tk.Frame.__init__(self, root, **kwargs)
self.grid()
tk.Label(self, text="Forme du véhicule :").grid(column=0, row=0)
list_options = ('Rond', '')
self.vehicle_shape = tk.StringVar()
self.vehicle_shape.set(list_options[0])
tk.OptionMenu(self, self.vehicle_shape, *list_options).grid(column=0, row=1)
self.fov_value = tk.Scale(self, orient='horizontal', from_=0, to=360, resolution=5, length=100,
label='FOV')
self.fov_value.grid(column=0, row=2)
self.btn_create_new_track = tk.Button(self, text="Nouveau circuit", command=self.create)
self.btn_create_new_track.grid(column=1, row=0, padx=10)
self.btn_open_track = tk.Button(self, text="Ouvrir un circuit", command=self.load)
self.btn_open_track.grid(column=2, row=0, padx=10)
self.track_name = tk.StringVar(value="None")
self.label_name_track = tk.Label(self, textvariable=self.track_name)
self.label_name_track.grid(column=1, row=1, columnspan=2)
self.btn_launch = tk.Button(self, text="Confirmer", command=self.launch)
self.btn_launch.grid(column=2, row=2)
def launch(self):
"""
Launch simulation according to settings.
:return:
"""
if messagebox.askokcancel('Confirmer ?', 'Voulez-vous lancer la simulation ? \n'):
self.quit()
else:
return
def load(self):
"""
Open window for loading a track file.
:return:
"""
path = filedialog.askopenfilename(
initialdir='C:/Users/eloim/Documents/Programmation/Python/raycasting/setup/tracks')
self.track_name.set(path)
def create(self):
"""
Open window for creating a new track file and save it.
:return:
"""
root = tk.Tk()
root.title("Création d'un circuit")
wd = TrackCreator(root)
wd.mainloop()
wd.destroy()
| [
"eloi.mahe@reseau.eseo.fr"
] | eloi.mahe@reseau.eseo.fr |
25ca27d0a43551073e356f08f1bbdcb27e6788b4 | a5185ad8b0d33d3141ecf1f37e1edaf05564e38f | /tools/eval_crnn_ctc.py | 539fa9365bd8f45fc305b2aa4533a9562d3bcea8 | [] | no_license | lyk595/CRNN_CTC_OCR_TensorFlow | 529682833e52c334b53ed2201bb85506028ce0a5 | 703317f4f4b01f6367b952dae327adabf32cdc55 | refs/heads/master | 2022-01-10T13:45:07.338497 | 2019-04-17T12:49:56 | 2019-04-17T12:49:56 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,459 | py | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import time
import json
import tensorflow as tf
import numpy as np
from crnn_model import model
os.environ["CUDA_VISIBLE_DEVICES"]="0"
# ------------------------------------Basic prameters------------------------------------
tf.app.flags.DEFINE_string(
'data_dir', './tfrecords/', 'Path to the directory containing data tf record.')
tf.app.flags.DEFINE_string(
'model_dir', './model/', 'Base directory for the model.')
tf.app.flags.DEFINE_integer(
'num_threads', 4, 'The number of threads to use in batch shuffling')
tf.app.flags.DEFINE_integer(
'batch_size', 32, 'The number of samples in each batch.')
# ------------------------------------LSTM prameters------------------------------------
tf.app.flags.DEFINE_integer(
'lstm_hidden_layers', 2, 'The number of stacked LSTM cell.')
tf.app.flags.DEFINE_integer(
'lstm_hidden_uints', 256, 'The number of units in each LSTM cell')
# ------------------------------------Char dictionary------------------------------------
tf.app.flags.DEFINE_string(
'char_map_json_file', './char_map/char_map.json', 'Path to char map json file')
FLAGS = tf.app.flags.FLAGS
def _sparse_matrix_to_list(sparse_matrix, char_map_dict=None):
indices = sparse_matrix.indices
values = sparse_matrix.values
dense_shape = sparse_matrix.dense_shape
# the last index in sparse_matrix is ctc blanck note
if char_map_dict is None:
char_map_dict = json.load(open(FLAGS.char_map_json_file, 'r'))
assert(isinstance(char_map_dict, dict))
dense_matrix = len(char_map_dict.keys()) * np.ones(dense_shape, dtype=np.int32)
for i, indice in enumerate(indices):
dense_matrix[indice[0], indice[1]] = values[i]
string_list = []
for row in dense_matrix:
string = []
for val in row:
string.append(_int_to_string(val, char_map_dict))
string_list.append(''.join(s for s in string if s != '*'))
return string_list
def _int_to_string(value, char_map_dict=None):
if char_map_dict is None:
char_map_dict = json.load(open(FLAGS.char_map_json_file, 'r'))
assert(isinstance(char_map_dict, dict))
for key in char_map_dict.keys():
if char_map_dict[key] == int(value):
return str(key)
elif len(char_map_dict.keys()) == int(value):
return ""
raise ValueError('char map dict not has {:d} value. convert index to char failed.'.format(value))
def _read_tfrecord(tfrecord_path, num_epochs=None):
if not os.path.exists(tfrecord_path):
raise ValueError('cannott find tfrecord file in path: {:s}'.format(tfrecord_path))
filename_queue = tf.train.string_input_producer([tfrecord_path], num_epochs=num_epochs)
reader = tf.TFRecordReader()
_, serialized_example = reader.read(filename_queue)
features = tf.parse_single_example(serialized_example,
features={
'images': tf.FixedLenFeature([], tf.string),
'labels': tf.VarLenFeature(tf.int64),
'imagenames': tf.FixedLenFeature([], tf.string),
})
images = tf.image.decode_jpeg(features['images'])
images.set_shape([32, None, 3])
images = tf.cast(images, tf.float32)
labels = tf.cast(features['labels'], tf.int32)
sequence_length = tf.cast(tf.shape(images)[-2] / 4, tf.int32)
imagenames = features['imagenames']
return images, labels, sequence_length, imagenames
def _eval_crnn_ctc():
tfrecord_path = os.path.join(FLAGS.data_dir, 'validation.tfrecord')
images, labels, sequence_lengths, imagenames = _read_tfrecord(tfrecord_path=tfrecord_path)
# decode the training data from tfrecords
batch_images, batch_labels, batch_sequence_lengths, batch_imagenames = tf.train.batch(
tensors=[images, labels, sequence_lengths, imagenames], batch_size=FLAGS.batch_size, dynamic_pad=True,
capacity=1000 + 2*FLAGS.batch_size, num_threads=FLAGS.num_threads)
input_images = tf.placeholder(tf.float32, shape=[FLAGS.batch_size, 32, None, 3], name='input_images')
input_labels = tf.sparse_placeholder(tf.int32, name='input_labels')
input_sequence_lengths = tf.placeholder(dtype=tf.int32, shape=[FLAGS.batch_size], name='input_sequence_lengths')
char_map_dict = json.load(open(FLAGS.char_map_json_file, 'r'))
# initialise the net model
crnn_net = model.CRNNCTCNetwork(phase='test',
hidden_num=FLAGS.lstm_hidden_uints,
layers_num=FLAGS.lstm_hidden_layers,
num_classes=len(char_map_dict.keys()) + 1)
with tf.variable_scope('CRNN_CTC', reuse=False):
net_out = crnn_net.build_network(images=input_images, sequence_length=input_sequence_lengths)
ctc_decoded, ct_log_prob = tf.nn.ctc_beam_search_decoder(net_out, input_sequence_lengths, merge_repeated=False)
# set checkpoint saver
saver = tf.train.Saver()
save_path = tf.train.latest_checkpoint(FLAGS.model_dir)
test_sample_count = 0
for record in tf.python_io.tf_record_iterator(tfrecord_path):
test_sample_count += 1
step_nums = test_sample_count // FLAGS.batch_size
sess_config = tf.ConfigProto()
sess_config.gpu_options.allow_growth = True
with tf.Session(config=sess_config) as sess:
# restore all variables
saver.restore(sess=sess, save_path=save_path)
coord = tf.train.Coordinator()
threads = tf.train.start_queue_runners(sess=sess, coord=coord)
accuracy = []
for _ in range(step_nums):
imgs, lbls, seq_lens, names = sess.run([batch_images, batch_labels, batch_sequence_lengths, batch_imagenames])
preds = sess.run(ctc_decoded, feed_dict={input_images:imgs, input_labels:lbls, input_sequence_lengths:seq_lens})
preds = _sparse_matrix_to_list(preds[0])
lbls = _sparse_matrix_to_list(lbls)
#print(preds)
#print(lbls)
for index, lbl in enumerate(lbls):
pred = preds[index]
total_count = len(lbl)
correct_count = 0
try:
for i, tmp in enumerate(lbl):
if tmp == pred[i]:
correct_count += 1
except IndexError:
continue
finally:
try:
accuracy.append(correct_count / total_count)
except ZeroDivisionError:
if len(pred) == 0:
accuracy.append(1)
else:
accuracy.append(0)
for index, img in enumerate(imgs):
print('Predict {:s} image with gt label: {:s} <--> predict label: {:s}'.format(names[index], lbls[index], preds[index]))
accuracy = np.mean(np.array(accuracy).astype(np.float32), axis=0)
print('Mean test accuracy is {:5f}'.format(accuracy))
# stop file queue
coord.request_stop()
coord.join(threads=threads)
def main(unused_argv):
_eval_crnn_ctc()
if __name__ == '__main__':
tf.app.run()
| [
"514202874@qq.com"
] | 514202874@qq.com |
abc3821860d6f8d7c15a8cffb45965e0db355334 | d0d416c558bd495a0d5f71c9d12c198d5065a51b | /yolact/utils/timer.py | baeb2470daa41e6553e31e104e3d245640188cef | [
"MIT"
] | permissive | masszhou/YOLACT_masszhou | a007ef55d6cd848c87237d606b26524f536f0e49 | bc4a658674fccaaf0ed545403892c3c3d8816d24 | refs/heads/main | 2023-03-01T23:26:00.632304 | 2021-02-02T14:25:00 | 2021-02-02T14:25:00 | 334,150,367 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,904 | py | import time
from collections import defaultdict
_total_times = defaultdict(lambda: 0)
_start_times = defaultdict(lambda: -1)
_disabled_names = set()
_timer_stack = []
_running_timer = None
_disable_all = False
def disable_all():
global _disable_all
_disable_all = True
def enable_all():
global _disable_all
_disable_all = False
def disable(fn_name):
""" Disables the given function name fom being considered for the average or outputted in print_stats. """
_disabled_names.add(fn_name)
def enable(fn_name):
""" Enables function names disabled by disable. """
_disabled_names.remove(fn_name)
def reset():
""" Resets the current timer. Call this at the start of an iteration. """
global _running_timer
_total_times.clear()
_start_times.clear()
_timer_stack.clear()
_running_timer = None
def start(fn_name, use_stack=True):
"""
Start timing the specific function.
Note: If use_stack is True, only one timer can be active at a time.
Once you stop this timer, the previous one will start again.
"""
global _running_timer, _disable_all
if _disable_all:
return
if use_stack:
if _running_timer is not None:
stop(_running_timer, use_stack=False)
_timer_stack.append(_running_timer)
start(fn_name, use_stack=False)
_running_timer = fn_name
else:
_start_times[fn_name] = time.perf_counter()
def stop(fn_name=None, use_stack=True):
"""
If use_stack is True, this will stop the currently running timer and restore
the previous timer on the stack if that exists. Note if use_stack is True,
fn_name will be ignored.
If use_stack is False, this will just stop timing the timer fn_name.
"""
global _running_timer, _disable_all
if _disable_all:
return
if use_stack:
if _running_timer is not None:
stop(_running_timer, use_stack=False)
if len(_timer_stack) > 0:
_running_timer = _timer_stack.pop()
start(_running_timer, use_stack=False)
else:
_running_timer = None
else:
print('Warning: timer stopped with no timer running!')
else:
if _start_times[fn_name] > -1:
_total_times[fn_name] += time.perf_counter() - _start_times[fn_name]
else:
print('Warning: timer for %s stopped before starting!' % fn_name)
def print_stats():
""" Prints the current timing information into a table. """
print()
all_fn_names = [k for k in _total_times.keys() if k not in _disabled_names]
max_name_width = max([len(k) for k in all_fn_names] + [4])
if max_name_width % 2 == 1: max_name_width += 1
format_str = ' {:>%d} | {:>10.4f} ' % max_name_width
header = (' {:^%d} | {:^10} ' % max_name_width).format('Name', 'Time (ms)')
print(header)
sep_idx = header.find('|')
sep_text = ('-' * sep_idx) + '+' + '-' * (len(header) - sep_idx - 1)
print(sep_text)
for name in all_fn_names:
print(format_str.format(name, _total_times[name] * 1000))
print(sep_text)
print(format_str.format('Total', total_time() * 1000))
print()
def total_time():
""" Returns the total amount accumulated across all functions in seconds. """
return sum([elapsed_time for name, elapsed_time in _total_times.items() if name not in _disabled_names])
class env():
"""
A class that lets you go:
with timer.env(fn_name):
# (...)
That automatically manages a timer start and stop for you.
"""
def __init__(self, fn_name, use_stack=True):
self.fn_name = fn_name
self.use_stack = use_stack
def __enter__(self):
start(self.fn_name, use_stack=self.use_stack)
def __exit__(self, e, ev, t):
stop(self.fn_name, use_stack=self.use_stack)
| [
"zhouzhiliang@gmail.com"
] | zhouzhiliang@gmail.com |
8fbd6c02b72607b5cd55525e6fb33f0ce4082fef | a964f0f3f93a84d5195042d3c1bb2288e8b62161 | /muddery/server/utils/utils.py | a69f6d5ace3c6d1dc5892332ebc02a3c67a0e928 | [
"BSD-3-Clause"
] | permissive | nobodxbodon/muddery | 474433791b75d2f2130e6b758fb3126e2d56230b | 4b4c6c0dc5cc237a5df012a05ed260fad1a793a7 | refs/heads/master | 2023-06-19T19:28:39.252340 | 2021-07-14T15:07:47 | 2021-07-14T15:07:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,890 | py | """
General helper functions that don't fit neatly under any given category.
They provide some useful string and conversion methods that might
be of use when designing your own game.
"""
import os, re, inspect
from importlib import import_module
from pkgutil import iter_modules
from django.conf import settings
from evennia.utils import search, logger
from muddery.launcher import configs
from muddery.server.dao.localized_strings import LocalizedStrings
def get_muddery_version():
"""
Get muddery's version.
"""
import muddery
return muddery.__version__
def set_obj_data_key(obj, key):
"""
Set data key. Put it info into an object's attributes.
Args:
obj: (object) object to be set
key: (string) key of the data.
"""
obj.attributes.add("key", key, category=settings.DATA_KEY_CATEGORY, strattr=True)
def search_obj_data_key(key):
"""
Search objects which have the given key.
Args:
key: (string) Data's key.
"""
if not key:
return None
return search.search_object_attribute(key="key", strvalue=key, category=settings.DATA_KEY_CATEGORY)
def search_db_data_type(key, value, typeclass):
"""
Search objects of the given typeclass which have the given value.
"""
objs = search.search_object_attribute(key=key, value=value)
return [obj for obj in objs if obj.is_typeclass(typeclass, exact=False)]
def set_obj_unique_type(obj, type):
"""
Set unique object's type.
Args:
obj: (object) object to be set
type: (string) unique object's type.
"""
obj.attributes.add("type", type, category=settings.DATA_KEY_CATEGORY, strattr=True)
def search_obj_unique_type(type):
"""
Search objects which have the given unique type.
Args:
type: (string) unique object's type.
"""
obj = search.search_object_attribute(key="type", strvalue=type, category=settings.DATA_KEY_CATEGORY)
return obj
def is_child(child, parent):
"""
Check if the child class is inherited from the parent.
Args:
child: child class
parent: parent class
Returns:
boolean
"""
for base in child.__bases__:
if base is parent:
return True
for base in child.__bases__:
if is_child(base, parent):
return True
return False
def file_iterator(file, erase=False, chunk_size=512):
while True:
c = file.read(chunk_size)
if c:
yield c
else:
# remove temp file
file.close()
if erase:
os.remove(file.name)
break
def get_unlocalized_py_strings(filename, filter):
"""
Get all unlocalized strings.
Args:
file_type: (string) type of file.
filter: (boolean) filter exits strings or not.
Returns:
(set): a list of tuple (string, category).
"""
re_func = re.compile(r'_\(\s*".+?\)')
re_string = re.compile(r'".*?"')
re_category = re.compile(r'category.*=.*".*?"')
strings = set()
# search in python files
with open(filename, "r") as file:
lines = file.readlines()
for line in lines:
# parse _() function
for func in re_func.findall(line):
str = ""
cate = ""
str_search = re_string.search(func)
if str_search:
str = str_search.group()
#remove quotations
str = str[1:-1]
cate_search = re_category.search(func)
if cate_search:
group = cate_search.group()
cate = re_string.search(group).group()
#remove quotations
cate = cate[1:-1]
if str or cate:
if filter:
# check database
try:
LocalizedStrings.get(str, cate)
continue
except Exception as e:
pass
strings.add((str, cate,))
return strings
def all_unlocalized_py_strings(filter):
"""
Get all unlocalized strings.
Args:
file_type: (string) type of file.
filter: (boolean) filter exits strings or not.
Returns:
(set): a list of tuple (string, category).
"""
rootdir = configs.MUDDERY_LIB
strings = set()
ext = ".py"
# get all _() args in all files
for parent, dirnames, filenames in os.walk(rootdir):
for filename in filenames:
file_ext = os.path.splitext(filename)[1].lower()
if file_ext == ext:
full_name = os.path.join(parent, filename)
strings.update(get_unlocalized_py_strings(full_name, filter))
return strings
def get_unlocalized_js_strings(filename, filter_set):
"""
Get all unlocalized strings.
Args:
file_type: (string) type of file.
filter_set: (set) current localized stings set.
Returns:
(set): a list of strings.
"""
re_func = re.compile(r'_\(\s*".+?\)')
re_string = re.compile(r'".*?"')
strings = set()
# search in python files
with open(filename, "r") as file:
lines = file.readlines()
for line in lines:
# parse _() function
for func in re_func.findall(line):
str = ""
cate = ""
str_search = re_string.search(func)
if str_search:
str = str_search.group()
#remove quotations
str = str[1:-1]
if str:
if filter_set:
# check dict
if str not in filter_set:
strings.add(str)
else:
strings.add(str)
return strings
def all_unlocalized_js_strings(filter):
"""
Get all unlocalized strings.
Args:
file_type: (string) type of file.
filter: (boolean) filter exits strings or not.
Returns:
(set): a list of tuple (string, category).
"""
rootdir = configs.MUDDERY_LIB
strings = set()
ext = ".js"
filter_set = set()
# get filter
if filter:
local_string_filename = os.path.join(configs.MUDDERY_LIB, "web", "webclient", "webclient",
"lang", settings.LANGUAGE_CODE, "strings.js")
with open(local_string_filename, "r") as file:
re_dict = re.compile(r'".+?"\s*:\s*".+?"')
re_string = re.compile(r'".*?"')
lines = file.readlines()
for line in lines:
# find localization dict
dict_search = re_dict.search(line)
if dict_search:
word_dict = dict_search.group()
str_search = re_string.search(word_dict)
str = str_search.group()
#remove quotations
str = str[1:-1]
filter_set.add(str)
# get all _() args in all files
for parent, dirnames, filenames in os.walk(rootdir):
for filename in filenames:
file_ext = os.path.splitext(filename)[1].lower()
if file_ext == ext:
full_name = os.path.join(parent, filename)
strings.update(get_unlocalized_js_strings(full_name, filter_set))
return strings
def load_modules(path):
"""
Load all modules ans sub modules in the path.
Args:
path: (string) modules' path
"""
modules = []
m = import_module(path)
if hasattr(m, '__path__'):
for _, subpath, ispkg in iter_modules(m.__path__):
fullpath = path + '.' + subpath
if ispkg:
modules += load_modules(fullpath)
else:
modules.append(import_module(fullpath))
return modules
def classes_in_path(path, cls):
"""
Load all classes in the path.
Args:
path: (string) classes' path
cls: (class) classes' base class
"""
modules = load_modules(path)
for module in modules:
for name, obj in vars(module).items():
if inspect.isclass(obj) and issubclass(obj, cls) and obj is not cls:
yield obj
def get_module_path(path):
"""
Transform a normal path to a python module style path.
"""
root, name = os.path.split(path)
if not name:
return
root = get_module_path(root)
if root:
return root + "." + name
else:
return name
| [
"luyijun999@gmail.com"
] | luyijun999@gmail.com |
58d9f299b2be3ac5b35e101e9d797493adbbab9e | 502fc0002d5575d0a37b4f13706c7072f860033c | /Chapter06/cyclegan/datasets.py | c851a615e497b6d1583f95c8955550f2b29adf88 | [
"MIT"
] | permissive | PacktPublishing/Hands-On-Generative-Adversarial-Networks-with-PyTorch-1.x | 665d9364af54d7fd44787d0753400d7625ac8b82 | beee21343078b607f393bbb1321ac49cf17ffb5f | refs/heads/master | 2023-02-10T22:12:08.980700 | 2023-01-30T09:26:20 | 2023-01-30T09:26:20 | 227,829,701 | 66 | 50 | null | null | null | null | UTF-8 | Python | false | false | 1,129 | py | import glob
import random
import os
import torchvision
from torch.utils.data import Dataset
from PIL import Image
class ImageDataset(Dataset):
def __init__(self, root_dir, transform=None, unaligned=False, mode='train'):
self.transform = torchvision.transforms.Compose(transform)
self.unaligned = unaligned
self.train = (mode == 'train')
self.files_A = sorted(glob.glob(os.path.join(root_dir, '%sA' % mode) + '/*.*'))
self.files_B = sorted(glob.glob(os.path.join(root_dir, '%sB' % mode) + '/*.*'))
def __getitem__(self, index):
item_A = self.transform(Image.open(self.files_A[index % len(self.files_A)]))
if self.unaligned:
item_B = self.transform(Image.open(self.files_B[random.randint(0, len(self.files_B) - 1)]))
else:
item_B = self.transform(Image.open(self.files_B[index % len(self.files_B)]))
if self.train:
return {'trainA': item_A, 'trainB': item_B}
else:
return {'testA': item_A, 'testB': item_B}
def __len__(self):
return max(len(self.files_A), len(self.files_B)) | [
"dineshchaudhary@packtpub.com"
] | dineshchaudhary@packtpub.com |
edf9a1aa2884f4004f8f64b165c1bbdd45947b4e | 021dbfe468381785fb358e5099e49a8d8dff38cf | /simplification/lightls.py | 0fbf9e9dbfe28a077e8eca6c97f0a251b0ceacea | [] | no_license | codogogo/lightls | 742833c9f353bc6ac16cff4b860034df4f426f78 | 38c0cd06efde539b8dda70d1d1ba19f2b72ad256 | refs/heads/master | 2020-03-08T14:22:35.898906 | 2018-04-05T10:26:08 | 2018-04-05T10:26:08 | 128,183,674 | 9 | 2 | null | null | null | null | UTF-8 | Python | false | false | 5,593 | py | import math
import numpy as np
from helpers import string_helper
class LightLS(object):
"""description of class"""
def __init__(self, embeddings, word_freqs, parameters, stopwords = None, lang = "default"):
self.stopwords = stopwords
self.params = parameters
self.embeddings = embeddings
self.lang = lang
self.complexities = {x : 1.0 / math.log2(word_freqs[x] + 2) for x in word_freqs }
max_freq = max(word_freqs.values())
min_freq = min(word_freqs.values())
min_complexity = 1.0 / math.log2(max_freq + 2)
max_complexity = 1.0 / math.log2(min_freq + 2)
self.complexities = {x : (self.complexities[x] - min_complexity) / (max_complexity - min_complexity) for x in self.complexities}
def fix_token(self, token):
punctuation = [".", ",", "!", ":", "?", ";", "-", ")", "(", "[", "]", "{", "}", "...", "/", "\\", "''", "\"", "'"]
if token[0] in punctuation and token[-1] in punctuation:
return token[1:-1]
elif token[0] in punctuation:
return token[1:]
elif token[-1] in punctuation:
return token[:-1]
else:
return token
def fix_token_inverse(self, token, simp_token):
punctuation = [".", ",", "!", ":", "?", ";", "-", ")", "(", "[", "]", "{", "}", "...", "/", "\\", "''", "\"", "'"]
if token[0] in punctuation and token[-1] in punctuation:
return token[0] + simp_token + token[-1]
elif token[0] in punctuation:
return token[0] + simp_token
elif token[-1] in punctuation:
return simp_token + token[-1]
else:
return simp_token
def simplify_text(self, text):
simplifications = []
tokens = text.split()
for i in range(len(tokens)):
res = self.try_simplify_token(tokens, i)
if res:
simplifications.append((i, res))
tokens_simple = []
tokens_simple.extend(tokens)
replacements = []
for s in simplifications:
tokens_simple[s[0]] = self.fix_token_inverse(tokens[s[0]], s[1])
replacements.append((s[0], self.fix_token(tokens[s[0]]), s[1]))
simplified_text = ' '.join(tokens_simple)
return (simplified_text, replacements)
def try_simplify_token(self, tokens, index):
target = self.fix_token(tokens[index])
# Not simplifying proper names
if str.isupper(target) or str.istitle(target) or str.isnumeric(target):
return None
complexity_target = self.complexities[target] if target in self.complexities else (self.complexities[target.lower()] if target.lower() in self.complexities else 1.0)
# If the word is simple enough, no need to consider replacing it
if complexity_target <= self.params["complexity_threshold"]:
return None
# we're not "simplifying" stopwords
if self.stopwords and target.lower() in self.stopwords:
return None
tvec = self.embeddings.get_vector(self.lang, target)
if tvec is None:
tvec = self.embeddings.get_vector(self.lang, target.lower())
if tvec is not None:
candidates = self.embeddings.most_similar_fast_cosine(tvec, self.lang, num = self.params["num_cand"], without_first = True)
simpler_candidates = {}
for c in candidates:
# we discard candidates that are derivational morphological variations of the target word
if c in target or target in c:
continue
lcses = string_helper.longest_common_subsequence(c, target)
if len(lcses) > 0:
lcs = lcses.pop()
if len(target) >= 6 and len(c) >= 6 and len(lcs) >= (min(len(c), len(target)) - 3):
continue
# don't allow the target word to be replaced by a stopword
if self.stopwords is not None and c.lower() in self.stopwords:
continue
complexity_cand = self.complexities[c] if c in self.complexities else 1.0
if (complexity_cand < complexity_target) and ((complexity_target - complexity_cand) >= self.params["complexity_drop_threshold"]):
simpler_candidates[c] = { "complexity_drop" : complexity_target - complexity_cand }
if len(simpler_candidates) == 0:
return None
context_vecs = self.get_context_vectors(tokens, index)
self.compute_features(tokens, index, tvec, simpler_candidates, context_vecs)
feats = ["sim", "complexity_drop"] if len(context_vecs) == 0 else ["sim", "complexity_drop", "context"]
ranks = {}
for c in simpler_candidates:
ranks[c] = []
for f in feats:
feat_sorted = sorted({c : simpler_candidates[c][f] for c in simpler_candidates}.items(), key=lambda x:x[1])
for fs in feat_sorted:
ranks[fs[0]].append(feat_sorted.index(fs))
ranked_candidates = sorted({c : sum(ranks[c]) for c in ranks}.items(), key=lambda x:x[1])
best_candidate = ranked_candidates[-1][0]
if simpler_candidates[best_candidate]["sim"] >= self.params["similarity_threshold"]:
return best_candidate
else:
return None
else:
return None
def compute_features(self, tokens, index, tvec, candidates, context_vecs):
for c in candidates:
candidates[c]["sim"] = np.dot(self.embeddings.get_vector(self.lang, c), tvec)
if len(context_vecs) > 0:
csim = 0.0
for cv in context_vecs:
csim += np.dot(self.embeddings.get_vector(self.lang, c), cv)
candidates[c]["context"] = csim
def get_context_vectors(self, tokens, index):
window_size = self.params["context_window_size"]
start = index - window_size if (index - window_size >= 0) else 0
end = index + window_size if (index + window_size <= len(tokens)) else len(tokens)
cvecs = []
for i in range(start, end):
if i != index:
cword = self.fix_token(tokens[i])
cvec = self.embeddings.get_vector(self.lang, cword)
if cvec is None:
cvec = self.embeddings.get_vector(self.lang, cword.lower())
if cvec is not None:
cvecs.append(cvec)
return cvecs
| [
"gogo.glavas@gmail.com"
] | gogo.glavas@gmail.com |
cb7860ee2b88086961192dfd83a266f4ee32a4b6 | f2e17f6266b2ab9d26cef4980d074fabd004dec3 | /myweb/myapp/migrations/0028_classifiedtext.py | f0584c6b2d349ba5b0760f64f032bb10b169fb59 | [] | no_license | kearron/Sentiment-Analysis-and-News-Classification | ecea59d35a3919367839bc99707b7538502f6859 | f0ecc74750da3d902adcf2e8ad4ad47120e28058 | refs/heads/master | 2021-07-21T09:48:34.979523 | 2021-05-31T05:04:46 | 2021-05-31T05:04:46 | 121,771,455 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 573 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-08-04 06:46
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('myapp', '0027_search'),
]
operations = [
migrations.CreateModel(
name='ClassifiedText',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('text', models.CharField(max_length=200)),
],
),
]
| [
"kearon99@gmail.com"
] | kearon99@gmail.com |
84b89e0386fc0b8c715ac49bec238a0e63ddb575 | 6497f1f7739e97ea21ac5f8d6c12c4bb57cfd871 | /setup.py | 749eefe5582e3c33d98d53f2d6dc931d87f42f30 | [
"MIT"
] | permissive | RahmanTeamDevelopment/RefSeqDB | cb2e78b67a4775deb96c4dd95919615ef34cbcd4 | fef8c4ba363fc4f405dd3f2e5613f99613f8ac71 | refs/heads/master | 2021-01-23T00:44:32.097909 | 2017-11-22T16:50:33 | 2017-11-22T16:50:33 | 92,837,119 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 400 | py | from setuptools import setup
setup(
name= 'RefSeqDB',
version = '0.4.0',
description = 'A tool for creating RefSeq transcript databases',
url = 'https://github.com/RahmanTeamDevelopment/RefSeqDB',
author = 'RahmanTeam',
author_email = 'rahmanlab@icr.ac.uk',
license = 'MIT',
packages=['refseqdb'],
scripts=['bin/RefSeqDB.py','bin/refseqdb'],
zip_safe=False
)
| [
"Marton.Muenz@icr.ac.uk"
] | Marton.Muenz@icr.ac.uk |
f6d005bf620b16a84ee81a39616dfe874307c8e9 | 2d6c22f3b363e62c4020d11bdbfc8542243b6c5d | /scripts/occupancy.py | 7e16c5330e1fc84b3856607f6d3d9bb9a1525afb | [] | no_license | CharlesxrWu/CUDA_gemm | e72ed313f8618427747e698ef11010f75e1346a6 | 95ab0d229082aa05d9a2b0036a30986d92d303f7 | refs/heads/master | 2023-04-01T03:18:52.955425 | 2021-04-14T05:24:55 | 2021-04-14T05:24:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,272 | py | import sys
from gpu_data import GPU_data
def calculator(Threads_Per_Block, Registers_Per_Thread, Shared_Memory_Per_Block, Compute_Capability):
# cal active warps per SM
Data = GPU_data[Compute_Capability]
Registers_Per_Block = Registers_Per_Thread * Threads_Per_Block
Block_Num_Bound_By_Reg = Data["Register File Size / SM (32-bit registers)"] // Registers_Per_Block
Block_Num_Bound_By_Shared_Memory = Data["Register File Size / SM (32-bit registers)"] // Shared_Memory_Per_Block
Active_Block_Num = min(Block_Num_Bound_By_Reg, Block_Num_Bound_By_Shared_Memory)
Warps_Per_Block = (Threads_Per_Block + Data["Thread / Warp"] - 1) // Data["Thread / Warp"]
Active_Warps = Active_Block_Num * Warps_Per_Block
Occupancy = Active_Warps / Data["Warps / SM"]
return Occupancy
if __name__ == "__main__":
if len(sys.argv) != 5:
print ("usage: python occupancy.py [a] [b] [c] [d]")
exit(0)
Threads_Per_Block = int(sys.argv[1])
Registers_Per_Thread = int(sys.argv[2])
Shared_Memory_Per_Block = int(sys.argv[3])
Compute_Capability = sys.argv[4]
Occupancy = calculator(Threads_Per_Block, Registers_Per_Thread, Shared_Memory_Per_Block, Compute_Capability) * 100
print("Occupancy = {}".format(Occupancy)) | [
"shanbinke@gmail.com"
] | shanbinke@gmail.com |
9e1c74b5b640913e6bc975e144610281b32930ae | 1df7f153a42b7e2fcbf37511ae7829b5101b32f0 | /codetodo.py | 8b8111cf7b28177e6db5e9fcf22cddff14a1ba62 | [
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] | permissive | marcioweck/codetodo | 6af0bf0e689442bd17525fc5890ebbae2b8615fd | e4cf0ed2adee46926a4d9948e6039106473bb5bd | refs/heads/master | 2020-04-06T04:19:15.880642 | 2016-08-16T12:46:11 | 2016-08-16T12:46:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,650 | py | #! /usr/bin/env python
from __future__ import print_function
import subprocess
import os
import sys
import re
from tabulate import tabulate
from termcolor import colored
import argparse
from glob import glob
from rglob import rglob
from fnmatch import fnmatch
import multiprocessing as mp
import mimetypes
if sys.stdout.encoding == "UTF-8":
CHECK_MARK = u"\u2714".encode("utf-8")
CROSS_MARK = u"\u2718".encode("utf-8")
else:
CHECK_MARK = "[OK]"
CROSS_MARK = "[ ]"
keywords = [
"@TODO",
"@FIXME",
]
blacklist = [
"*.swp",
"*cache"
]
def glob_pattern(p):
return rglob(os.getcwd(), p)
# for f in files:
# print(f)
# return files
if sys.stdout.isatty():
red = lambda s: colored(s, "red", attrs=["bold"])
green = lambda s: colored(s, "green")
yellow = lambda s: colored(s, "yellow", attrs=["bold"])
bold = lambda s: colored(s, attrs=["bold"])
else:
identity = lambda s:s
red = identity
green = identity
yellow = identity
bold = identity
def find_in_file(filename):
with open(filename, "r") as f:
lines = f.read().strip().split("\n")
matched = []
for i, l in enumerate(lines):
if any(k in l for k in keywords):
matched.append( ( filename, i+1, l ) )
return matched
# return filter(lambda l: any(k in l for k in keywords), lines)
def get_grep(args, pool):
# cmd = "grep -rnw {} -e '@FIXME' -e '@TODO'".format(os.getcwd())
# p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr = subprocess.PIPE)
# out, err = p.communicate()
# with open("cache") as f:
# out = f.read()
# lines = out.split("\n")[:-1]
# lines = filter(lambda l: "Binary file" not in l, lines)
files = []
if len(args.allow) > 0:
results = pool.map(glob_pattern, args.allow)
for r in results:
for f in r:
files.append(f)
else:
for dirpath, dirname, filenames in os.walk(os.getcwd()):
for f in filenames:
files.append(os.path.join(dirpath, f))
files = filter(lambda f: not any(fnmatch(f, b) for b in blacklist), files)
line_results = pool.map(find_in_file, files)
lines = []
for r in line_results:
for l in r:
lines.append(l)
return lines
def main():
parser = argparse.ArgumentParser()
parser.add_argument("--allow", "-a", action="append", default=[])
args = parser.parse_args()
pool = mp.Pool(processes=mp.cpu_count())
lines = get_grep(args, pool)
rows = []
for line in lines:
filename, fileline, comment_line = line
# filename, fileline, comment_line = line.split(":", 2)
# print(red(filename), red(comment_line))
filename = os.path.relpath(filename, os.getcwd())
comment_type = re.search('@(.*):', comment_line)
if not comment_type:
continue
comment_type = comment_type.group(1)
comment_type = comment_type.replace("(", "").replace(")", "")
prio = comment_type.count("!")
comment_type = comment_type.replace("!", "").strip()
rest, comment = comment_line.split(":", 1)
done = "DONE" in comment
comment = comment.replace("DONE", "")
comment = comment.strip()
# print(filename, fileline, comment_type, comment)
rows.append((comment_type, prio, filename, fileline, comment, done))
type_prios = {
"TODO": 10,
"FIXME": 50
}
def mimefilter(r):
mime = mimetypes.guess_type(r[2])
return mime[0] and "text" in mime[0]
rows = filter(mimefilter, rows)
try:
if len(args.allow) > 0:
rows = filter(lambda r: any([fnmatch(r[2], p) for p in args.allow]), rows)
rows = reversed(sorted(rows, key=lambda r: (0 if r[5] else 1, r[2], type_prios[r[0]], r[1] ) ))
except:
print("error")
# print(tabulate(rows, headers=["type", "prio", "file", "comment"]))
for row in rows:
ttype, prio, filename, line, comment, done = row
if ttype == "TODO":
cprint = yellow
else:
cprint = red
if done:
cprint = green
stat = CHECK_MARK if done else CROSS_MARK
prio_line = (" (" if prio > 0 else "") + prio*"!" + (")" if prio > 0 else "")
print(cprint("{stat} {type}{prio}: {cm}".format(stat=stat, prio=prio_line, type=(ttype), cm=comment)))
print("{}:{}".format(filename, bold(line)))
print("")
if __name__ == "__main__":
main()
| [
"hello@paulgessinger.com"
] | hello@paulgessinger.com |
61e0018f668d7500563f0938b91dcaa0cfac1aa9 | d404779e9c385899ac0a3eb199190f80cce54658 | /ch5/koch_snowflake.py | edf867a1cd8130f9471b88ff2179424832a905d3 | [] | no_license | pbscrimmage/thinkPy | 8209be55e48a310a73342de7bd34c15a219dfbc0 | 06efa33a29c20c35e793a806d0f063ed2fd8623c | refs/heads/master | 2021-01-23T11:55:45.302599 | 2015-05-12T11:30:48 | 2015-05-12T11:30:48 | 34,089,072 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 950 | py | '''
koch_snowflake.py
Author: Patrick Rummage
[patrickbrummage@gmail.com]
Objective:
Write a function that uses koch() to draw a three-sided
polygon of Koch curves, also known as a Koch snowflake.
http://en.wikipedia.org/Koch_snowflake
Note:
Requires Tkinter as well as the swampy package created for use with the book.
Info here: http://www.thinkpython.com/swampy
'''
from swampy.TurtleWorld import *
world = TurtleWorld()
trtl = Turtle()
def koch(t, length):
if length < 10:
fd(t, length)
return
koch(t, length/3)
lt(t, 60) # turn left 60 degrees
koch(t, length/3)
rt(t, 120) # turn right 120 degrees
koch(t, length/3)
lt(t, 60) # turn left 60 desgrees
koch(t, length/3)
def snowflake(t):
koch(t, 500)
rt(t, 120) # turn right 120 degrees
koch(t, 500)
rt(t, 120) # turn right 120 degrees
koch(t, 500)
snowflake(trtl)
wait_for_user()
| [
"patrickbrummage@gmail.com"
] | patrickbrummage@gmail.com |
a5e07a5a9d645cf3dae7bb95b442ba124c80d5bb | e92ccaa18e6fd3bc0595db6213dc2621569b2efe | /digitsData1.py | 6756ba17ffa03e9814bc63fc0a48876d2b7d9207 | [] | no_license | wakanak2/python-practice | 4dfc7fa4e362f4a43670541bb1190caae2f9d634 | 75f4795d542553c76dbe1a2d087eb3d0f23909b8 | refs/heads/master | 2022-12-07T18:19:43.399462 | 2020-09-03T12:50:47 | 2020-09-03T12:50:47 | 289,915,911 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 206 | py | import sklearn.datasets
digits = sklearn.datasets.load_digits()
print("データの個数=",len(digits.images))
print("画像データ=",digits.images[0])
print("何の数字か=",digits.target[0])
| [
"w.koba.0214@gmail.com"
] | w.koba.0214@gmail.com |
3e295f508d6e7221e1d8f8ab587dacf31e01f117 | bff19e9433ea962e912cc2470413572b6a63892d | /message.py | 91e7362bb9b1095aa8ec6459c744c6eeed0c94b0 | [] | no_license | wy-hm/IMS | d83ab2d7f456a99a9727d3c9686335a9857e76d1 | 320e6207ad41ad0f83fc8a9fbca542b79ed22440 | refs/heads/master | 2023-03-16T11:17:06.221980 | 2019-04-10T05:19:19 | 2019-04-10T05:19:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,621 | py | from abc import ABC
from utility import MessageType
class GenericMessage(ABC):
""" generic message exchanged among agents """
def __init__(self, auction_id, msg_type):
super(GenericMessage, self).__init__()
self.msg_type = msg_type
self.auction_id = auction_id
class AnnouncementMessage(GenericMessage):
""" Task Announcement message """
def __init__(self, auction_id, task):
super(AnnouncementMessage, self).__init__(auction_id, MessageType.ANNOUNCEMENT)
self.task = task
class BidMessage(GenericMessage):
""" Bid message sent by agent toward auctioneer """
def __init__(self, auction_id, agent_id, value):
super(BidMessage, self).__init__(auction_id, MessageType.BID)
self.value = value
self.agent_id = agent_id
class RenewalMessage(GenericMessage):
""" Renewal message sent by auctioneer toward the winner agent """
def __init__(self,auction_id, winner_id, renewal_id):
super(RenewalMessage, self).__init__(auction_id, MessageType.RENEWAL)
self.winner_id = winner_id
self.renewal_id = renewal_id
class CloseMessage(GenericMessage):
""" Close message, sent by the auctioneer towards all other agents """
def __init__(self, auction_id, winner_id):
super(CloseMessage, self).__init__(auction_id, MessageType.CLOSE)
self.winner_id = winner_id
class AcknowledgementMessage(GenericMessage):
def __init__(self, auction_id, ack_id):
super(AcknowledgementMessage, self).__init__(auction_id, MessageType.ACKNOWLEDGEMENT)
self.ack_id = ack_id | [
"lampa9559@gmail.com"
] | lampa9559@gmail.com |
9aa00de86b197df4d513d205f46f4cab244ca9b7 | 532bc42753923d61cfffc4fa02483a978e156954 | /CLClone/my_app/migrations/0001_initial.py | 6d0865b23e2a6287d1501de2af09cd515448bd86 | [] | no_license | chasegarsee/Python-Django | 600ab278ef5efe9a5b645b9f63686bd195a22148 | cde6d9dab4eb816b269c86295dc874bf62371808 | refs/heads/master | 2022-12-15T15:42:26.649476 | 2020-02-22T23:27:43 | 2020-02-22T23:27:43 | 242,187,354 | 0 | 0 | null | 2022-12-08T03:40:01 | 2020-02-21T16:50:37 | Python | UTF-8 | Python | false | false | 553 | py | # Generated by Django 3.0.3 on 2020-02-22 17:46
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Search',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('search', models.CharField(max_length=500)),
('create', models.DateTimeField(auto_now=True)),
],
),
]
| [
"chasegarsee@gmail.com"
] | chasegarsee@gmail.com |
8c0deb13652980b813955659659716bcd1001fd7 | d359fe1fb1e3824b9411d86d33e91eeb6325aa93 | /agent_bee_v4_5_3.py | a32d687a32ee0e5c46f53fca0fce39deb945189a | [] | no_license | flynnwang/kaggle_halite4 | 9c14e82899ee3ae469ac1ed47e095ab6635b5b21 | 453ca80a6fec5a346629e21e9449af571f52e5ef | refs/heads/master | 2023-03-18T19:43:59.920892 | 2020-09-16T02:41:25 | 2020-09-16T02:41:25 | 275,327,343 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 54,974 | py | # -*- coding: utf-8 -*-
"""
v4_5_3 <- v4_5_2
* grow halite steps = 15 (more aggresive)
"""
import random
import timeit
import logging
from collections import Counter
from enum import Enum, auto
import numpy as np
import scipy.optimize
from kaggle_environments.envs.halite.helpers import *
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
# Mute print.
# def print(*args, **kwargs):
# pass
MIN_WEIGHT = -99999
BEGINNING_PHRASE_END_STEP = 60
ENDING_PHRASE_STEP = 360
# If my halite is less than this, do not build ship or shipyard anymore.
MIN_HALITE_TO_BUILD_SHIPYARD = 1000
MIN_HALITE_TO_BUILD_SHIP = 1000
# Controls the number of ships.
MAX_SHIP_NUM = 100
# Threshold for attack enemy nearby my shipyard
TIGHT_ENEMY_SHIP_DEFEND_DIST = 5
LOOSE_ENEMY_SHIP_DEFEND_DIST = 7
AVOID_COLLIDE_RATIO = 0.95
HOME_YARD_COVER_DIST = 2
# Threshod used to send bomb to enemy shipyard
POSSIBLE_MOVES = [
Point(0, 0),
Point(0, 1),
Point(0, -1),
Point(1, 0),
Point(-1, 0)
]
TURNS_OPTIMAL = np.array(
[[0, 2, 3, 4, 4, 5, 5, 5, 6, 6, 6, 6, 6, 7, 7, 7, 7, 7, 7, 7, 8],
[0, 1, 2, 3, 3, 4, 4, 4, 5, 5, 5, 5, 6, 6, 6, 6, 6, 6, 7, 7, 7],
[0, 0, 2, 2, 3, 3, 4, 4, 4, 5, 5, 5, 5, 5, 6, 6, 6, 6, 6, 6, 7],
[0, 0, 1, 2, 2, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 5, 6, 6, 6, 6, 6],
[0, 0, 0, 1, 2, 2, 3, 3, 3, 4, 4, 4, 4, 4, 5, 5, 5, 5, 5, 5, 6],
[0, 0, 0, 0, 0, 1, 2, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4, 4, 5, 5, 5],
[0, 0, 0, 0, 0, 0, 0, 1, 1, 2, 2, 2, 3, 3, 3, 3, 3, 4, 4, 4, 4],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 2, 3, 3, 3],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 2, 2],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]])
# cached values
HALITE_RETENSION_BY_DIST = []
HALITE_GROWTH_BY_DIST = []
MANHATTAN_DISTS = None
def get_quadrant(p: Point):
if p.x > 0 and p.y >= 0:
return 1
if p.x <= 0 and p.y > 0:
return 2
if p.x < 0 and p.y <= 0:
return 3
if p.x >= 0 and p.y < 0:
return 4
assert p == Point(0, 0), "not exist quadrant: %s %s" % (p.x, p.y)
return 0
def optimal_mining_steps(C, H, rt_travel):
# How many turns should we plan on mining?
# C=carried halite, H=halite in square, rt_travel=steps to square and back to shipyard
if C == 0:
ch = 0
elif H == 0:
ch = TURNS_OPTIMAL.shape[0] - 1 # ?
else:
ch = int(np.log(C / H) * 2.5 + 5.5)
ch = min(max(ch, 0), TURNS_OPTIMAL.shape[0] - 1)
# rt_travel = int(np.clip(rt_travel, 0, TURNS_OPTIMAL.shape[1] - 1))
rt_travel = int(min(max(rt_travel, 0), TURNS_OPTIMAL.shape[1] - 1))
return TURNS_OPTIMAL[ch, rt_travel]
class Timer:
def __init__(self, logging_text=None):
self._logging_text = logging_text
self._start = None
self._end = None
self._interval = None
def __enter__(self):
self._start = timeit.default_timer()
return self
def __exit__(self, *args):
self._end = timeit.default_timer()
self._interval = self._end - self._start
if self._logging_text is not None:
logger.info("Took %.3f seconds for %s", self._interval,
self._logging_text)
@property
def interval(self):
return self._interval
def cargo(player):
"""Computes the cargo value for a player."""
return sum([s.halite for s in player.ships], 0)
def axis_manhattan_dists(a: Point, b: Point, size):
def dist(x, y):
v = abs(x - y)
return min(v, size - v)
return dist(a.x, b.x), dist(a.y, b.y)
def manhattan_dist(a: Point, b: Point, size):
if MANHATTAN_DISTS:
return MANHATTAN_DISTS[a.x * size + a.y][b.x * size + b.y]
dist_x, dist_y = axis_manhattan_dists(a, b, size)
return dist_x + dist_y
def has_enemy_ship(cell, me):
if not cell.ship_id:
return False
ship = cell.ship
return ship.player_id != me.id
def direction_to_ship_action(position, next_position, board_size):
if position == next_position:
return None
if (position + Point(0, 1)) % board_size == next_position:
return ShipAction.NORTH
if (position + Point(1, 0)) % board_size == next_position:
return ShipAction.EAST
if (position + Point(0, -1)) % board_size == next_position:
return ShipAction.SOUTH
if (position + Point(-1, 0)) % board_size == next_position:
return ShipAction.WEST
assert False, '%s, %s' % (position, next_position)
def make_move(position, move, board_size):
return (position + move) % board_size
def get_neighbor_cells(cell, include_self=False):
neighbor_cells = [cell] if include_self else []
neighbor_cells.extend([cell.north, cell.south, cell.east, cell.west])
return neighbor_cells
def init_globals(board):
growth_factor = board.configuration.regen_rate + 1.0
retension_rate_rate = 1.0 - board.configuration.collect_rate
size = board.configuration.size
global HALITE_GROWTH_BY_DIST
if not HALITE_GROWTH_BY_DIST:
HALITE_GROWTH_BY_DIST = [growth_factor**d for d in range(size**2 + 1)]
global HALITE_RETENSION_BY_DIST
if not HALITE_RETENSION_BY_DIST:
HALITE_RETENSION_BY_DIST = [
retension_rate_rate**d for d in range(size**2 + 1)
]
global MANHATTAN_DISTS
dists = np.zeros((size**2, size**2), dtype=int)
with Timer("Init manhattan_dist"):
for c1 in board.cells.values():
for c2 in board.cells.values():
a = c1.position
b = c2.position
d = manhattan_dist(a, b, size)
dists[a.x * size + a.y][b.x * size + b.y] = d
MANHATTAN_DISTS = dists.tolist()
class ShipTask(Enum):
UNKNOWN_TASK = auto()
# Default task, to stay on current cell.
STAY = auto()
# Send a ship to a halite.
GOTO_HALITE = auto()
# Continuing collecting halite on current cell.
COLLECT = auto()
# Finish collecting halite and go back to shipyard.
RETURN = auto()
# Send a ship to attack enemy's shipyard.
ATTACK_SHIPYARD = auto()
# Attack enemy ship.
ATTACK_SHIP = auto()
# Send the first ship to a location to build the initial shipyard.
INITIAL_SHIPYARD = auto()
# Make one ship stay on the shipyard to protect it from enemy next to it.
GUARD_SHIPYARD = auto()
class StrategyBase:
"""Class with board related method."""
@property
def me(self):
return self.board.current_player
@property
def c(self):
return self.board.configuration
@property
def step(self):
return self.board.step
@property
def num_ships(self):
return len(self.me.ship_ids)
@property
def sz(self):
return self.c.size
@property
def num_shipyards(self):
return len(self.me.shipyard_ids)
@property
def tight_defend_dist(self):
# return 4 + max((self.num_ships - 15) // 5, 0)
return TIGHT_ENEMY_SHIP_DEFEND_DIST
@property
def loose_defend_dist(self):
return LOOSE_ENEMY_SHIP_DEFEND_DIST
@property
def home_grown_cell_dist(self):
return self.tight_defend_dist
@property
def is_beginning_phrase(self):
return self.step <= BEGINNING_PHRASE_END_STEP
@property
def my_idle_ships(self):
"""All ships without task assignment."""
for ship in self.ships:
if ship.next_action or ship.has_assignment:
continue
yield ship
@property
def enemy_shipyards(self):
for e in self.board.opponents:
for y in e.shipyards:
yield y
@property
def enemy_ships(self):
for e in self.board.opponents:
for s in e.ships:
yield s
@staticmethod
def assign_task(ship, target_cell: Cell, task_type: ShipTask, enemy=None):
"""Add a task to a ship."""
ship.has_assignment = True
ship.target_cell = target_cell
ship.task_type = task_type
ship.target_cell.is_targetd = True
ship.target_enemy = enemy
def manhattan_dist(self, p, q):
return manhattan_dist(p.position, q.position, self.c.size)
def nearest_shipyards(self, cell: Cell, shipyards):
dist_yards = [(self.manhattan_dist(y, cell), y) for y in shipyards]
dist_yards = sorted(dist_yards, key=lambda x: x[0])
return dist_yards
def find_nearest_enemy(self, cell: Cell, enemy_ships):
"""Nearest enemy with least halite."""
if not isinstance(enemy_ships, list):
enemy_ships = list(enemy_ships)
enemy_ships = sorted(enemy_ships,
key=lambda s: (self.manhattan_dist(cell, s), s.halite))
for enemy in enemy_ships:
return self.manhattan_dist(cell, enemy), enemy
return 9999, None
def get_nearest_home_yard(self, cell):
if not hasattr(cell, 'home_yard_info'):
cell.nearest_home_yards = self.nearest_shipyards(cell, self.shipyards)
cell.home_yard_info = (9999, None)
if cell.nearest_home_yards:
cell.home_yard_info = cell.nearest_home_yards[0]
return cell.home_yard_info
def get_nearest_enemy_yard(self, cell):
if not hasattr(cell, 'enemy_yard_info'):
cell.nearest_enemy_yards = self.nearest_shipyards(cell,
self.enemy_shipyards)
cell.enemy_yard_info = (9999, None)
if cell.nearest_enemy_yards:
cell.enemy_yard_info = cell.nearest_enemy_yards[0]
return cell.enemy_yard_info
def update(self, board):
self.board = board
# Cache it to eliminate repeated list constructor.
self.shipyards = self.me.shipyards
self.ships = self.me.ships
def execute(self):
pass
def __call__(self):
self.execute()
class FollowerDetector(StrategyBase):
# >= 2 is considered as following.
FOLLOW_COUNT = 2
def __init__(self):
self.board = None
self.ship_index = {} # Ship id => ship
self.follower = {} # ship_id => follower
self.follow_count = Counter()
def clear(self, ship_id):
if ship_id not in self.follower:
return
del self.follower[ship_id]
del self.follow_count[ship_id]
def add(self, ship_id, follower: Ship):
"""Note: follower.halite < ship.halite"""
prev_follower = self.follower.get(ship_id)
if prev_follower is None or prev_follower.id != follower.id:
# New follower.
self.follow_count[ship_id] = 1
else:
# Existing follower.
self.follow_count[ship_id] += 1
self.follower[ship_id] = follower
def update(self, board):
"""Updates follow info with the latest board state."""
super().update(board)
latest_ship_index = {s.id: s for s in self.ships}
# Check last ship positions for follower.
for ship_id, prev_ship in self.ship_index.items():
ship = latest_ship_index.get(ship_id)
if ship is None:
# The ship has gone.
self.clear(ship_id)
continue
follower = board[prev_ship.position].ship
if follower is None or follower.halite >= ship.halite:
# Not a follower.
self.clear(ship_id)
continue
assert follower and follower.halite < ship.halite
self.add(ship_id, follower)
# Update with latest ship position.
self.ship_index = latest_ship_index
def is_followed(self, ship: Ship):
"""Returns true if the ship of mine is traced by enemy."""
follower = self.follower.get(ship.id)
assert not follower or follower.halite < ship.halite
follow_count = self.follow_count.get(ship.id, 0)
return follow_count >= self.FOLLOW_COUNT
def get_follower(self, ship: Ship):
return self.follower.get(ship.id)
class InitializeFirstShipyard(StrategyBase):
def __init__(self):
super().__init__()
self.first_shipyard_set = False
self.initial_yard_position = None
self.initial_ship_position = None
def estimate_cell_halite(self, candidate_cell):
expected_halite = 0
current_halite = 0
num_halite_cells = 0
for cell in self.halite_cells:
# shipyard will destory the halite under it.
if candidate_cell.position == cell.position:
continue
dist = self.manhattan_dist(cell, candidate_cell)
# TODO(wangfei): try larger value?
if dist <= self.home_grown_cell_dist and cell.halite > 0:
expected_halite += self.halite_per_turn(None, cell, dist, dist)
current_halite += cell.halite
num_halite_cells += 1
return expected_halite, current_halite, dist
def select_initial_cell(self):
def get_coord_range(v):
DELTA = 0
MARGIN = 5
if v == 5:
v_min, v_max = MARGIN, 5 + DELTA
else:
v_min, v_max = 15 - DELTA, 20 - MARGIN
return v_min, v_max
position = self.initial_ship_position
x_min, x_max = get_coord_range(position.x)
y_min, y_max = get_coord_range(position.y)
for cell in self.board.cells.values():
position = cell.position
if (x_min <= position.x <= x_max and y_min <= position.y <= y_max):
yield self.estimate_cell_halite(cell), cell
def convert_first_shipyard(self):
"""Strategy for convert the first shipyard."""
assert self.num_ships == 1, self.num_ships
ship = self.ships[0]
if not self.initial_ship_position:
self.initial_ship_position = ship.position
candidate_cells = list(self.select_initial_cell())
if candidate_cells:
candidate_cells.sort(key=lambda x: x[0], reverse=True)
value, yard_cell = candidate_cells[0]
self.initial_yard_position = yard_cell.position
print(
"Ship initial:", self.initial_ship_position, 'dist=',
manhattan_dist(self.initial_ship_position,
self.initial_yard_position, self.c.size),
'selected yard position:', self.initial_yard_position, 'value=',
value)
self.assign_task(ship, self.board[self.initial_yard_position],
ShipTask.INITIAL_SHIPYARD)
if ship.position == self.initial_yard_position:
ship.next_action = ShipAction.CONVERT
self.first_shipyard_set = True
class GradientMap(StrategyBase):
def __init__(self):
self.board = None
self.enemy_gradient = None
def get_nearby_cells(self, center: Cell, max_dist):
visited = set()
nearby_cells = []
def dfs(c: Cell):
if c.position in visited:
return
visited.add(c.position)
if self.manhattan_dist(c, center) > max_dist:
return
nearby_cells.append(c)
for next_cell in get_neighbor_cells(c):
dfs(next_cell)
dfs(center)
return nearby_cells
def compute_gradient(self, center_cells, max_dist, value_func):
gradient = np.zeros((self.sz, self.sz))
for center in center_cells:
for nb_cell in self.get_nearby_cells(center, max_dist):
p = nb_cell.position
gradient[p.x, p.y] += value_func(center, nb_cell)
return gradient
def get_enemy_gradient(self,
center_cell,
max_dist=2,
broadcast_dist=1,
halite=999999):
"""The amount enemy can hurt me."""
def nearby_enemy_cells():
for cell in self.get_nearby_cells(center_cell, max_dist):
if has_enemy_ship(cell, self.me):
yield cell
def enemy_cost(dist):
if dist > broadcast_dist:
return 0
return self.c.spawn_cost / (dist or 1)
def enemy_value(enemy_cell, nb_cell):
enemy = enemy_cell.ship
dist = self.manhattan_dist(nb_cell, enemy_cell)
if enemy.halite < halite:
return enemy_cost(dist)
return 0
return self.compute_gradient(nearby_enemy_cells(), max_dist, enemy_value)
def get_full_map_enemy_gradient(self, max_dist=4, min_halite=10):
def all_enemy_cells():
for enemy in self.enemy_ships:
yield enemy.cell
def enemy_value(enemy_cell, nb_cell):
enemy = enemy_cell.ship
dist = self.manhattan_dist(nb_cell, enemy_cell)
h = enemy.halite
if h <= min_halite:
h = 0
h = min(50, h)
hurt_factor = 1 - (h / 50)
return hurt_factor * self.c.spawn_cost / (dist + 1)
return self.compute_gradient(all_enemy_cells(), max_dist, enemy_value)
class Stage(Enum):
# step <= 60
OPENING = auto()
# Normal behaviour to grow home cell as planned
GROW_HALITE = auto()
# Collect halite for more ship.
HARVEST = auto()
# step >= 300, save as much as I can.
SAVING = auto()
# step >= 360 (TODO: maybe 370?)
ENDING = auto()
class ShipStrategy(InitializeFirstShipyard, StrategyBase):
"""Sends every ships to the nearest cell with halite.
cell:
|is_targetd|: prevent more than 1 alley ships choose the same halite.
ship:
|next_cell|: next cell location of the ship.
|has_assignment|: has already has task assignment for this ship.
|target_cell|: send ship to this cell, may be the same of current cell.
|task_type|: used to rank ship for moves.
"""
def __init__(self, simulation=False):
super().__init__()
self.board = None
self.simulation = simulation
self.follower_detector = FollowerDetector()
self.gradient_map = GradientMap()
@property
def stage(self):
step = self.board.step
if step <= BEGINNING_PHRASE_END_STEP:
return Stage.OPENING
if step >= 360:
return Stage.ENDING
elif step >= 300:
return Stage.SAVING
ROUND_STEP_NUM = 30
GROW_HALITE_STEP_IN_ROUND = 15
step -= BEGINNING_PHRASE_END_STEP
round_id = step // ROUND_STEP_NUM
step_in_round = step - round_id * ROUND_STEP_NUM
if step_in_round < GROW_HALITE_STEP_IN_ROUND:
return Stage.GROW_HALITE
return Stage.HARVEST
def update(self, board):
"""Updates board state at each step."""
if self.board is None:
init_globals(board)
super().update(board)
self.board = board
self.cost_halite = 0
self.halite_ratio = -1
self.num_home_halite_cells = 0
self.mean_home_halite = 100
self.init_halite_cells()
# Default ship to stay on the same cell without assignment.
for ship in self.ships:
ship.has_assignment = False
ship.target_cell = ship.cell
ship.next_cell = ship.cell
ship.task_type = ShipTask.STAY
self.follower_detector.update(board)
self.gradient_map.update(board)
def init_halite_cells(self):
HOME_GROWN_CELL_MIN_HALITE = 80
def home_cell_halite_value(cell):
stage_factor = 1.0
# if self.stage == Stage.GROW_HALITE and self.num_ships >= 20:
# stage_factor = 1.1
if self.stage == Stage.SAVING:
stage_factor = 1.1
num_covered = len(cell.convering_shipyards)
cover_factor = num_covered / 3
ship_factor = self.num_ships / 10
keep_halite = HOME_GROWN_CELL_MIN_HALITE * (ship_factor + cover_factor)
keep_halite *= stage_factor
# if self.stage == Stage.HARVEST:
# keep_halite = HOME_GROWN_CELL_MIN_HALITE
keep_halite = max(keep_halite, HOME_YARD_COVER_DIST)
return keep_halite
def is_home_grown_cell(cell):
num_covered = len(cell.convering_shipyards)
return (num_covered >= 2 or
(num_covered > 0 and
cell.convering_shipyards[0][0] <= HOME_YARD_COVER_DIST))
def keep_halite_value(cell):
threshold = self.mean_halite_value * 0.7
if self.step >= ENDING_PHRASE_STEP:
return min(self.mean_halite_value * 0.5, threshold)
if is_home_grown_cell(cell):
threshold = max(home_cell_halite_value(cell), threshold)
# Do not go into enemy shipyard for halite.
enemy_yard_dist, enemy_yard = self.get_nearest_enemy_yard(cell)
if (enemy_yard and enemy_yard_dist <= 4):
ally_yard_dist, alley_yard = self.get_nearest_home_yard(cell)
if (alley_yard and enemy_yard_dist < ally_yard_dist):
# if the cell is nearer to the enemy yard.
return 1000
return min(threshold, 400)
# Init halite cells
self.halite_cells = []
for cell in self.board.cells.values():
cell.is_targetd = False
if cell.halite > 0:
self.halite_cells.append(cell)
# Initialize covered cells by shipyards.
for cell in self.halite_cells:
# Populate cache
self.get_nearest_home_yard(cell)
home_yards = [
x for x in cell.nearest_home_yards
if x[0] <= self.home_grown_cell_dist
]
cell.convering_shipyards = home_yards
self.mean_halite_value = 0
if self.halite_cells:
halite_values = [c.halite for c in self.halite_cells]
self.mean_halite_value = np.mean(halite_values)
self.std_halite_value = np.std(halite_values)
for cell in self.halite_cells:
cell.keep_halite_value = keep_halite_value(cell)
@property
def me_halite(self):
return self.me.halite - self.cost_halite
def collect_game_info(self):
# Computes neighbour cells mean halite values.
# TODO: reuse
def cell_to_yard_dist(cell):
min_dist, _ = self.get_nearest_home_yard(cell)
return min_dist
self.mean_home_halite = 100
home_cells = [
cell.halite
for cell in self.halite_cells
if cell_to_yard_dist(cell) <= self.home_grown_cell_dist
]
if home_cells:
self.mean_home_halite = np.mean(home_cells)
# Player info
self.me.total_halite = self.me.halite + cargo(self.me)
self.max_enemy_halite = -1
self.max_enemy = None
self.total_enemy_ship_num = 0
for p in self.board.opponents:
p.total_halite = p.halite + cargo(p)
if p.total_halite >= self.max_enemy_halite:
self.max_enemy_halite = p.halite
self.max_enemy = p
self.total_enemy_ship_num += len(p.ship_ids)
def bomb_enemy_shipyard(self):
"""Having enough farmers, let's send ghost to enemy shipyard."""
def estimate_halite(player):
h = player.halite
s = len(player.ship_ids) * self.c.spawn_cost
return h + s
MIN_ENEMY_YARD_TO_MY_YARD = 4
def max_bomb_dist(enemy_yard):
# Don't use bomb if ship group is small.
if self.num_ships <= 16:
return 0
# Elimination program.
if (self.num_ships >= 50 and
self.num_ships >= self.total_enemy_ship_num + 10):
return self.sz * 2
# Only attack nearby enemy yard.
return MIN_ENEMY_YARD_TO_MY_YARD
def is_near_my_shipyard(enemy_yard):
for yard in self.shipyards:
dist = self.manhattan_dist(yard, enemy_yard)
if dist <= max_bomb_dist(enemy_yard):
return True
return False
def non_targeted_enemy_shipyards():
for y in self.enemy_shipyards:
if y.cell.is_targetd:
continue
if not is_near_my_shipyard(y):
continue
yield y
def select_bomb_ship(enemy_yard):
min_dist = 99999
bomb_ship = None
for ship in self.my_idle_ships:
# Don't send halite to enemy.
if ship.halite > 0:
continue
dist = self.manhattan_dist(enemy_yard, ship)
if dist < min_dist:
min_dist = dist
bomb_ship = ship
return min_dist, bomb_ship, enemy_yard
if self.step < BEGINNING_PHRASE_END_STEP:
return
enemy_shipyards = (
select_bomb_ship(y) for y in non_targeted_enemy_shipyards())
enemy_shipyards = [x for x in enemy_shipyards if x[1]]
enemy_shipyards.sort(key=lambda x: x[0])
for _, bomb_ship, enemy_yard in enemy_shipyards:
self.assign_task(bomb_ship, enemy_yard.cell, ShipTask.ATTACK_SHIPYARD)
# One bomb at a time
break
def convert_shipyard(self):
"""Builds shipyard to maximize the total number of halite covered within
|home_grown_cell_dist|."""
MAX_SHIPYARD_NUM = 20
MANHATTAN_DIST_RANGE = range(6, 7 + 1)
AXIS_DIST_RANGE1 = range(3, 5 + 1)
AXIS_DIST_RANGE2 = range(1, 5 + 1)
MAX_SHIP_TO_SHIPYARD_DIST = 8
HALITE_CELL_PER_SHIP = 2.5 if self.is_beginning_phrase else 2.9
self.halite_ratio = -1
# No ship left.
if not self.num_ships:
return
def shipyard_num_by_ship_num():
if self.num_ships >= 12:
return min(2 + max((self.num_ships - 12) // 6, 0), MAX_SHIPYARD_NUM)
return 1
def shipyard_num_by_halite_ratio():
num_halite_cells = 0
for cell in self.halite_cells:
min_dist, _ = self.get_nearest_home_yard(cell)
if min_dist <= self.home_grown_cell_dist:
num_halite_cells += 1
num_yards = self.num_shipyards
halite_ratio = num_halite_cells / (self.num_ships or 1)
self.num_home_halite_cells = num_halite_cells
self.halite_ratio = halite_ratio
if halite_ratio < HALITE_CELL_PER_SHIP and self.num_ships >= 15:
num_yards += 1
print('more ship: halite cell / ship =', halite_ratio)
return num_yards
def max_shipyard_num():
return max(shipyard_num_by_ship_num(), shipyard_num_by_halite_ratio())
# Reach max shipyard num.
if self.num_shipyards >= max_shipyard_num():
return
def convert_threshold():
threshold = MIN_HALITE_TO_BUILD_SHIPYARD
# Use as much as I can.
if (self.num_shipyards == 0 or
self.board.step <= BEGINNING_PHRASE_END_STEP or
self.num_ships <= MAX_SHIP_NUM):
threshold = self.c.convert_cost
return max(self.c.convert_cost, threshold)
def has_enough_halite(ship):
return ship.halite + self.me.halite >= convert_threshold()
# return self.me_halite >= convert_threshold()
def has_enemy_shipyard_nearby(cell):
if self.num_ships >= 30:
return False
min_dist, min_yard = self.get_nearest_enemy_yard(cell)
if min_yard and min_dist <= 3:
return True
return False
def has_enemy_nearby(cell):
return any(
has_enemy_ship(c, self.me)
for c in get_neighbor_cells(cell, include_self=True))
def within_predefined_range(cell):
if not self.me.shipyard_ids:
return True
self.get_nearest_home_yard(cell) # populate cache
for dist, yard in cell.nearest_home_yards[:2]:
if dist not in MANHATTAN_DIST_RANGE:
return False
dist_x, dist_y = axis_manhattan_dists(cell.position, yard.position,
self.c.size)
axis_dist_range = (AXIS_DIST_RANGE1
if self.num_shipyards == 1 else AXIS_DIST_RANGE2)
# That satisfy some axis distance constraints to make me feel safe.
if dist_x not in axis_dist_range or dist_y not in axis_dist_range:
return False
return True
def compute_convert_score(candidate_cell):
MAX_COVER_HALITE = 2
# Maximize the total value of halite when converting ship.
total_cell = 0
total_halite = 0
total_halite2 = 0
shipyards = self.shipyards + [candidate_cell
] # Fake the cell as shipyard.
for cell in self.halite_cells:
if cell.position == candidate_cell.position:
continue
covered = 0
dist_yards = [(self.manhattan_dist(y, cell), y) for y in shipyards]
dist_yards = sorted(dist_yards, key=lambda x: x[0])
for dist, yard in dist_yards[:MAX_COVER_HALITE]:
if dist <= self.home_grown_cell_dist:
# Repeat count halite if recovered.
total_halite2 += cell.halite / np.sqrt(dist)
total_halite += cell.halite / dist
# total_halite += 1.0 / dist
covered = 1
total_cell += covered
# print("convert score for %s, total=%s, s1=%s, s2=%s" %
# (candidate_cell.position, total_cell, total_halite, total_halite2))
return total_halite, total_cell
def nominate_shipyard_positions():
for cell in self.board.cells.values():
# Exclude existing shipyard position (including enemy ones).
if cell.shipyard_id:
continue
# Not convert too near enemy shipyard.
if has_enemy_shipyard_nearby(cell):
continue
if not within_predefined_range(cell):
continue
# Have a nearby ship.
dist_to_yard, _ = self.find_nearest_enemy(cell, self.ships)
if dist_to_yard > MAX_SHIP_TO_SHIPYARD_DIST:
continue
cell.convert_score = compute_convert_score(cell)
yield cell
def convert_ship(ship):
self.cost_halite += (self.c.convert_cost - ship.halite)
ship.next_action = ShipAction.CONVERT
ship.has_assignment = True
ship.cell.is_targetd = True
def call_for_ship(cell):
ships = sorted(self.ships, key=lambda s: self.manhattan_dist(s, cell))
for ship in ships:
if not has_enough_halite(ship):
continue
dist_to_yard = self.manhattan_dist(ship, cell)
# Annoy nearby enemy.
min_enemy_to_yard_dist, min_enemy = self.find_nearest_enemy(
cell, self.enemy_ships)
if (min_enemy and min_enemy_to_yard_dist <= dist_to_yard and
min_enemy.halite < ship.halite):
continue
if ship.position == cell.position and not has_enemy_nearby(ship.cell):
convert_ship(ship)
return True
if ship.position != cell.position:
print("Send ship(%s %s) to shipyard position (%s), dist=%s" %
(ship.id, ship.position, cell.position, dist_to_yard))
# Let's use GOTO_HALITE for now.
self.assign_task(ship, cell, ShipTask.INITIAL_SHIPYARD)
return True
return False
# Pre cash money for shipyard conversion when moving towards.
self.save_for_converting = self.c.convert_cost
candidate_cells = list(nominate_shipyard_positions())
if not candidate_cells:
return
candidate_cells.sort(key=lambda c: c.convert_score, reverse=True)
for cell in candidate_cells:
if call_for_ship(cell):
# One shipyard at a time.
return
def compute_ship_moves(self):
"""Computes ship moves to its target.
Maximize total expected value.
* prefer the move to the target (distance is shorter)
* not prefer move into enemy with lower halite (or equal)
"""
spawn_cost = self.board.configuration.spawn_cost
convert_cost = self.board.configuration.convert_cost
collect_rate = self.board.configuration.collect_rate
def compute_weight(ship, next_position):
ignore_neighbour_cell_enemy = False
target_cell = ship.target_cell
next_cell = self.board[next_position]
# If a non-followed ship's next move is to alley SPAWNING shipyard, skip
yard = next_cell.shipyard
if (yard and yard.player_id == self.me.id and
yard.next_action == ShipyardAction.SPAWN and
not hasattr(ship, "follower")):
return MIN_WEIGHT
# If stay at current location, prefer not stay...
dist = manhattan_dist(next_position, target_cell.position, self.c.size)
ship_dist = self.manhattan_dist(ship, target_cell)
wt = ship_dist - dist
# if (ship.task_type == ShipTask.STAY and ship.position == next_position):
# wt -= 10
# If collecting halite
if ((ship.task_type == ShipTask.GOTO_HALITE or
ship.task_type == ShipTask.COLLECT) and target_cell.halite > 0):
ship_to_poi = dist
poi_to_yard, min_yard = self.get_nearest_home_yard(next_cell)
if min_yard is None:
poi_to_yard = 1
expect_return = self.halite_per_turn(ship, target_cell, ship_to_poi,
poi_to_yard)
if expect_return < 0:
return MIN_WEIGHT
wt += expect_return
# If go back home
if ship.task_type == ShipTask.RETURN:
wt += ship.halite / (dist + 1)
if hasattr(ship, 'follower'):
wt += self.c.spawn_cost
# If goto enemy yard.
if ship.task_type == ShipTask.ATTACK_SHIPYARD:
# TODO: use what value as weight for destory enemy yard?
wt += convert_cost / (dist + 1)
# Do not step on shipyard
if (ship.task_type != ShipTask.ATTACK_SHIPYARD and
next_cell.shipyard_id and next_cell.shipyard.player_id != self.me.id):
wt -= convert_cost / (dist + 1)
if ship.task_type == ShipTask.ATTACK_SHIP:
wt += convert_cost / (dist + 1)
enemy = ship.target_enemy
enemy_dist = manhattan_dist(next_position, enemy.position, self.c.size)
wt += (enemy.halite + enemy.cell.halite) / (enemy_dist + 1)
if ship.task_type == ShipTask.GUARD_SHIPYARD:
wt += 1 / (dist + 1)
# Only ignore enemy when the ship is on the yard.
if next_position == target_cell.position:
ignore_neighbour_cell_enemy = True
def move_away_from_enemy(enemy, ship, avoid_collision=True):
"""Collides with enemy if my ship has less halite."""
if ship.halite < enemy.halite:
return False
elif ship.halite > enemy.halite:
return True
# enemy.halite == ship.halite
assert enemy.halite == ship.halite
if ship.halite > 0:
return True
if avoid_collision:
return True
return random.random() < AVOID_COLLIDE_RATIO
# If there is an enemy in next_position with lower halite
if has_enemy_ship(next_cell, self.me):
# If there is an enemy sitting on its shipyard, collide with him.
if (ship.task_type == ShipTask.ATTACK_SHIPYARD and
next_cell.position == target_cell.position):
pass
elif move_away_from_enemy(next_cell.ship, ship):
wt -= (spawn_cost + ship.halite)
# If there is an enemy in neighbor next_position with lower halite
if not ignore_neighbour_cell_enemy:
for nb_cell in get_neighbor_cells(next_cell):
if has_enemy_ship(nb_cell, self.me):
if move_away_from_enemy(nb_cell.ship, ship, avoid_collision=False):
wt -= (spawn_cost + ship.halite)
return wt
# Skip only convert ships.
ships = [s for s in self.ships if not s.next_action]
next_positions = {
make_move(s.position, move, self.c.size)
for s in ships
for move in POSSIBLE_MOVES
}
position_to_index = {pos: i for i, pos in enumerate(next_positions)}
C = np.ones((len(ships), len(next_positions))) * MIN_WEIGHT
for ship_idx, ship in enumerate(ships):
for move in POSSIBLE_MOVES:
next_position = make_move(ship.position, move, self.c.size)
poi_idx = position_to_index[next_position]
C[ship_idx, poi_idx] = compute_weight(ship, next_position)
rows, cols = scipy.optimize.linear_sum_assignment(C, maximize=True)
index_to_position = list(next_positions)
for ship_idx, poi_idx in zip(rows, cols):
ship = ships[ship_idx]
next_position = index_to_position[poi_idx]
ship.next_cell = self.board[next_position]
ship.next_action = direction_to_ship_action(ship.position, next_position,
self.c.size)
# print(ship.id, 'at', ship.position, 'goto', next_position)
if len(rows) != len(ships):
matched_ship_ids = set()
for ship_idx in rows:
matched_ship_ids.add(ships[ship_idx].id)
for ship in ships:
print('ship %s (matchd=%s), at %s, has_assignment=%s, task=%s' %
(ship.id, ship.id in matched_ship_ids, ship.position,
ship.has_assignment, ship.task_type))
for move in POSSIBLE_MOVES:
next_position = make_move(ship.position, move, self.c.size)
wt = compute_weight(ship, next_position)
print(' to %s, wt=%.2f' % (next_position, wt))
assert len(rows) == len(ships), "match=%s, ships=%s" % (len(rows),
len(ships))
def spawn_ships(self):
"""Spawns farmer ships if we have enough money and no collision with my own
ships."""
SHIP_NUM_HARD_LIMIT = 100
# When leading, convert as much as possible.
def max_ship_num():
by_cash = max(0, (self.me_halite - 3000) // 1000) + MAX_SHIP_NUM
by_enemy_halite = 0
if self.me.total_halite > self.max_enemy_halite + 6 * self.c.spawn_cost:
by_enemy_halite = SHIP_NUM_HARD_LIMIT
return min(SHIP_NUM_HARD_LIMIT, max(by_cash, by_enemy_halite))
def spawn(yard):
self.cost_halite += self.c.spawn_cost
yard.next_action = ShipyardAction.SPAWN
def spawn_threshold():
threshold = self.save_for_converting
if (self.step <= BEGINNING_PHRASE_END_STEP or
self.num_ships <= MAX_SHIP_NUM):
threshold += self.c.spawn_cost
else:
threshold += MIN_HALITE_TO_BUILD_SHIP
return threshold
# Too many ships.
mx = max_ship_num()
if self.num_ships >= max_ship_num():
return
# TODO(wangfei): use stage
# No more ships after ending.
if self.num_ships >= 3 and self.step >= 280:
return
random.shuffle(self.shipyards)
for shipyard in self.shipyards:
# Only skip for the case where I have any ship.
if self.num_ships and self.me_halite < spawn_threshold():
continue
spawn(shipyard)
# One ship at a time
break
def final_stage_back_to_shipyard(self):
MARGIN_STEPS = 7
MIN_HALITE_TO_YARD = 10
def ship_and_dist_to_yard():
for ship in self.my_idle_ships:
if ship.halite <= MIN_HALITE_TO_YARD:
continue
_, yard = self.get_nearest_home_yard(ship.cell)
if yard:
dist = self.manhattan_dist(ship, yard)
yield dist, ship, yard
if not self.me.shipyard_ids:
return
ship_dists = list(ship_and_dist_to_yard())
if not ship_dists:
return
for min_dist, ship, min_dist_yard in ship_dists:
if self.step + min_dist + MARGIN_STEPS > self.c.episode_steps:
self.assign_task(ship, min_dist_yard.cell, ShipTask.RETURN)
def spawn_if_shipyard_in_danger(self):
"""Spawn ship if enemy nearby my shipyard and no ship's next_cell on this
shipyard."""
if self.step >= ENDING_PHRASE_STEP:
return
ship_next_positions = {
ship.next_cell.position
for ship in self.ships
if ship.next_action != ShipAction.CONVERT
}
def is_shipyard_in_danger(yard):
# If there is one of my ship will be the on yard in the next round.
if yard.position in ship_next_positions:
return False
return yard.is_in_danger
def spawn(yard):
self.cost_halite += self.c.spawn_cost
yard.next_action = ShipyardAction.SPAWN
for yard in self.shipyards:
# Skip shipyard already has action.
if yard.next_action:
continue
if is_shipyard_in_danger(yard) and self.me_halite >= self.c.spawn_cost:
print('spawn for danger: y=', yard.position, 'in_danger=',
is_shipyard_in_danger(yard))
spawn(yard)
def print_info(self):
def mean_cargo(player):
num_ships = len(player.ship_ids)
if num_ships == 0:
return 0
return int(cargo(player) / num_ships)
def print_player(player, end='\n'):
num_ships = len(player.ship_ids)
num_shipyards = len(player.shipyard_ids)
print(' p[%s](s=%s, y=%s, h=%s, c=%s, mc=%s)' %
(player.id, num_ships, num_shipyards, player.halite, cargo(player),
mean_cargo(player)),
end=end)
def print_ship_task_type():
task = Counter()
for ship in self.ships:
task[ship.task_type] += 1
items = sorted(task.items(), key=lambda x: x[0].name)
print(", ".join("%s=%s(%.0f%%)" % (k.name, v, v / self.num_ships * 100)
for k, v in items))
print(
'#%s' % self.step, 'halite(n=%s, mean=%s, std=%s)' %
(len(self.halite_cells), int(
self.mean_halite_value), int(self.std_halite_value)),
'home_halite=(d=%s, cover=%.0f%%, n=%s, m=%s, n/s=%.2f)' %
(self.home_grown_cell_dist, self.num_home_halite_cells /
len(self.halite_cells) * 100, self.num_home_halite_cells,
int(self.mean_home_halite), self.halite_ratio))
print_player(self.me, end=' ')
print("stage = %s" % self.stage)
enemy = sorted(self.board.opponents, key=lambda x: -(len(x.ship_ids)))[0]
print_player(enemy, end='\n')
print_ship_task_type()
print()
def halite_per_turn(self,
ship,
poi: Cell,
ship_to_poi,
poi_to_yard,
min_mine=1):
"""Computes the expected return for mining with optimial steps.
TODO(wangfei): we could use small panelty for return home dist
to mimic the we don't want back home.
"""
enemy_carry = 0
if ship and has_enemy_ship(poi, self.me):
# Dist to move to neighour cell of POI.
dist = max(0, ship_to_poi - 1)
# Halite will decrease if there is ship sitting on it.
halite_left = poi.halite * HALITE_RETENSION_BY_DIST[dist]
# Give up if my ship has more halite then enemy.
enemy = poi.ship
enemy_halite = poi.ship.halite + int(poi.halite - halite_left)
if ship and ship.halite >= enemy_halite:
return MIN_WEIGHT
enemy_carry = enemy.halite
carry = ship and ship.halite or 0
travel = ship_to_poi + poi_to_yard
opt_steps = optimal_mining_steps(carry, poi.halite, travel)
if opt_steps < min_mine:
opt_steps = min_mine
total_halite = (carry + enemy_carry +
(1 - HALITE_RETENSION_BY_DIST[opt_steps]) * poi.halite)
return total_halite / (ship_to_poi + opt_steps + poi_to_yard / 7)
def get_trapped_enemy_ships(self, max_attack_num):
"""A enemy is trapped if there're at least one ship in each quadrant."""
# Do not attack enemy during ending.
if self.step >= ENDING_PHRASE_STEP:
return
adjust = 0
if self.num_ships >= 20:
adjust += 1
MAX_ATTACK_DIST = 3 + adjust
MIN_ATTACK_QUADRANT_NUM = 3 - int(self.num_ships >= 35)
# Be aggresive when grow halite.
# if self.stage in (Stage.GROW_HALITE, Stage.SAVING):
if self.stage in (Stage.GROW_HALITE,):
if self.num_ships >= 17:
MIN_ATTACK_QUADRANT_NUM = max(1, MIN_ATTACK_QUADRANT_NUM - 1)
MIN_ATTACK_QUADRANT_NUM = max(2, MIN_ATTACK_QUADRANT_NUM)
def is_enemy_within_home_boundary(enemy):
"""1. Within distance of 2 of any shipyard
2. double covered by multiple shipyards.
"""
covered = 0
self.get_nearest_home_yard(enemy.cell) # populate cache.
for dist, yard in enemy.cell.nearest_home_yards:
if dist <= HOME_YARD_COVER_DIST:
return True
if dist <= self.home_grown_cell_dist:
covered += 1
return covered >= 2
def get_attack_ships(enemy):
# Extra attack distance for enemy within home boundary.
max_attack_dist = MAX_ATTACK_DIST
if enemy.within_home_boundary:
max_attack_dist = max(5, max_attack_dist + 1)
for ship in self.my_idle_ships:
# Only send ship with no halite for attack enemy outside.
# if (self.stage == Stage.GROW_HALITE
# and (not enemy.within_home_boundary)
# and ship.halite >= 10):
# continue
dist = self.manhattan_dist(ship, enemy)
if dist <= max_attack_dist and ship.halite < enemy.halite:
yield dist, ship
def annotate_by_quadrant(dist_ships, enemy):
"""Sort to make sure at least one ship is selected in each quadrant."""
quadrants = set()
for dist, ship in dist_ships:
q = get_quadrant(ship.position - enemy.position)
q_exist = int(q in quadrants)
quadrants.add(q)
yield (q_exist, dist), ship
for enemy in self.enemy_ships:
enemy.within_home_boundary = is_enemy_within_home_boundary(enemy)
dist_ships = get_attack_ships(enemy)
dist_ships = list(annotate_by_quadrant(dist_ships, enemy))
dist_ships.sort(key=lambda x: x[0])
quadrant_num = len({
get_quadrant(ship.position - enemy.position) for _, ship in dist_ships
})
# Reduce quadrant_num for home boundary enemy.
min_attack_quadrant_num = MIN_ATTACK_QUADRANT_NUM
# if enemy.within_home_boundary and self.stage == Stage.GROW_HALITE:
# min_attack_quadrant_num -= 1
if quadrant_num >= min_attack_quadrant_num:
enemy.quadrant_num = quadrant_num
enemy.attack_ships = [ship for _, ship in dist_ships][:max_attack_num]
yield enemy
def get_ship_halite_pairs(self, ships, halites):
CHECK_TRAP_DIST = 4
enemy_gradient = self.gradient_map.get_full_map_enemy_gradient(
min_halite=10)
for poi_idx, cell in enumerate(halites):
for ship_idx, ship in enumerate(ships):
# Do not go to halite with too many enemy around.
dist = self.manhattan_dist(ship, cell)
if dist <= CHECK_TRAP_DIST:
if enemy_gradient[cell.position.x, cell.position.y] >= 350:
continue
yield ship_idx, poi_idx
def optimal_assignment(self):
ATTACK_PER_ENEMY = 6
SHIPYARD_DUPLICATE_NUM = 4
def shipyard_duplicate_num():
if self.step >= ENDING_PHRASE_STEP:
return 1
return SHIPYARD_DUPLICATE_NUM
ships = list(self.my_idle_ships)
halites = [c for c in self.halite_cells if c.halite >= c.keep_halite_value]
ship_halite_pairs = set(self.get_ship_halite_pairs(ships, halites))
# Shipyards is duplicated to allow multiple ships having a same target.
shipyards = [y.cell for y in self.shipyards] * shipyard_duplicate_num()
# Attack enemy.
trapped_enemy_ships = list(self.get_trapped_enemy_ships(ATTACK_PER_ENEMY))
enemy_cells = [e.cell for e in trapped_enemy_ships] * ATTACK_PER_ENEMY
attack_pairs = {
(s.id, e.id) for e in trapped_enemy_ships for s in e.attack_ships
}
# Guard shipyard.
offended_shipyards = list(self.get_offended_shipyard())
offended_cells = [y.cell for y, _ in offended_shipyards]
guard_paris = {
(s.id, y.id) for y, ships in offended_shipyards for s in ships
}
pois = halites + shipyards + enemy_cells + offended_cells
def is_halite_column(x):
return x < len(halites)
def is_shipyard_column(x):
return len(halites) <= x < len(halites) + len(shipyards)
def is_enemy_column(x):
return (len(halites) + len(shipyards) <= x and
x < len(halites) + len(shipyards) + len(enemy_cells))
# Value matrix for ship target assginment
# * row: ships
# * column: halite cells + shipyards with duplicates.
# TODO(wangfei): can we add enemy to this matrix?
C = np.zeros((len(ships), len(pois)))
for i, ship in enumerate(ships):
for j, poi in enumerate(pois):
# Init distances: from ship to POI and POI to the nearest yard.
ship_to_poi = manhattan_dist(ship.position, poi.position, self.c.size)
poi_to_yard, min_yard = self.get_nearest_home_yard(poi)
if min_yard is None:
poi_to_yard = 1
if is_halite_column(j):
if (i, j) not in ship_halite_pairs:
v = MIN_WEIGHT
else:
# If the target is a halite cell, with enemy considered.
v = self.halite_per_turn(ship, poi, ship_to_poi, poi_to_yard)
elif is_shipyard_column(j):
# If the target is a shipyard.
if ship_to_poi > 0:
v = ship.halite / ship_to_poi
else:
# The ship is on a shipyard.
v = 0
# Encourage ship to go home to prepare attack.
# if self.stage == Stage.GROW_HALITE:
if (self.stage == Stage.GROW_HALITE and ship_to_poi <= 3 and
self.num_ships >= 17):
v *= 3
# If have follower, let the followed ship back.
if hasattr(ship, 'follower'):
v += self.c.spawn_cost
elif is_enemy_column(j):
# If attack enemy
enemy = poi.ship
v = MIN_WEIGHT # not exists edge.
if (ship.id, enemy.id) in attack_pairs:
v = (self.c.spawn_cost + enemy.halite +
enemy.cell.halite) / ship_to_poi
else:
# If shipyard is offended.
yard = poi.shipyard
v = MIN_WEIGHT
if (ship.id, yard.id) in guard_paris:
v = (self.c.spawn_cost + self.c.convert_cost +
ship.halite) / (ship_to_poi or 1)
# If selected as guard ship, the followed ship has priority.
if hasattr(ship, 'follower'):
v += self.c.spawn_cost
C[i, j] = v
rows, cols = scipy.optimize.linear_sum_assignment(C, maximize=True)
# assert len(rows) == len(ships), "ships=%s, halites=%s" % (len(ships),
# len(halites))
for ship_idx, poi_idx in zip(rows, cols):
ship = ships[ship_idx]
poi_cell = pois[poi_idx]
# print('send ship(id=%s, p=%s, h=%s)' % (ship.id, ship.position,
# ship.halite),
# 'to poi_cell(p=%s, h=%s)' % (poi_cell.position,
# poi_cell.halite))
enemy = None
if is_halite_column(poi_idx):
if ship.position == poi_cell.position:
task_type = ShipTask.COLLECT
else:
task_type = ShipTask.GOTO_HALITE
elif is_shipyard_column(poi_idx):
task_type = ShipTask.RETURN
elif is_enemy_column(poi_idx):
task_type = ShipTask.ATTACK_SHIP
enemy = poi_cell.ship
else:
task_type = ShipTask.GUARD_SHIPYARD
shipyard = poi_cell.shipyard
shipyard.is_in_danger = False
enemy = shipyard.offend_enemy
# print('guide task: ', ship.position, poi_cell.position,
# shipyard.offend_enemy.position)
self.assign_task(ship, poi_cell, task_type, enemy=enemy)
def get_offended_shipyard(self):
"""Guard shipyard."""
def shipyard_defend_dist():
has_enough_halite = (self.me_halite >=
self.num_shipyards * self.c.spawn_cost)
if self.num_ships >= 27 or has_enough_halite:
return 2
if len(self.me.shipyard_ids) > 1 or self.me_halite >= self.c.spawn_cost:
return 3
return 4
def offend_enemy_ships(yard):
not_enough_halite_to_spawn = self.me_halite < self.c.spawn_cost
for enemy in self.enemy_ships:
if self.manhattan_dist(enemy, yard) > shipyard_defend_dist():
continue
# If the enemy has money, then I'll just let it send it for me.
if not_enough_halite_to_spawn or enemy.halite == 0:
yield enemy
def get_defend_ships(yard, enemy, enemy_to_yard_dist):
for ship in self.my_idle_ships:
dist_to_yard = self.manhattan_dist(ship, yard)
if (dist_to_yard < enemy_to_yard_dist or
(dist_to_yard == enemy_to_yard_dist and
(enemy.halite > 0 and ship.halite < enemy.halite or
enemy.halite == 0 and ship.halite == 0))):
# print('defend enemy(%s) by ship(%s, %s)' % (enemy.position, ship.id, ship.position))
yield ship
for yard in self.shipyards:
yard.is_in_danger = False
min_enemy_dist, enemy = self.find_nearest_enemy(yard.cell,
offend_enemy_ships(yard))
if enemy is None:
continue
# No need guard shipyard if enemy has halite (by turn order, spawn comes
# before collision)
if yard.next_action == ShipyardAction.SPAWN and enemy.halite > 0:
continue
defend_ships = list(get_defend_ships(yard, enemy, min_enemy_dist))
for ship in defend_ships:
dist_to_yard = self.manhattan_dist(ship, yard)
# If my move away is still more near than enemy, not in danger.
if dist_to_yard + 1 <= min_enemy_dist - 1:
continue
yard.is_in_danger = True
if defend_ships:
yard.offend_enemy = enemy
yield yard, defend_ships
def update_ship_follower(self):
"""If a ship is followed by enemy, send it back home."""
for ship in self.my_idle_ships:
if not self.follower_detector.is_followed(ship):
continue
_, yard = self.get_nearest_home_yard(ship.cell)
if not yard:
continue
ship.follower = self.follower_detector.get_follower(ship)
# self.assign_task(ship, yard.cell, ShipTask.RETURN)
# print('ship(%s) at %s is followed by enemy(%s) at %s by %s times' %
# (ship.id, ship.position, ship.follower.id, ship.follower.position,
# self.follower_detector.follow_count[ship.id]))
def clear_spawn_ship(self):
"""Clear ship spawn for ship to return homeyard with follower."""
def is_my_shipyard_spawning(cell):
return (cell.shipyard_id and cell.shipyard.player_id == self.me.id and
cell.shipyard.next_action == ShipyardAction.SPAWN)
for ship in self.ships:
cell = ship.next_cell
if cell and is_my_shipyard_spawning(cell):
cell.shipyard.next_action = None
def convert_trapped_ship_to_shipyard(self):
MIN_TRAPPED_HALITE = 240
def is_ship_trapped(ship):
enemy_nearby_count = 0
danger_cell_count = 0
enemy_gradient = self.gradient_map.get_enemy_gradient(ship.cell,
halite=ship.halite,
broadcast_dist=1,
max_dist=2)
for cell in get_neighbor_cells(ship.cell):
if has_enemy_ship(cell, self.me):
enemy = cell.ship
if enemy.halite < ship.halite:
enemy_nearby_count += 1
continue
if enemy_gradient[cell.position.x,
cell.position.y] >= self.c.spawn_cost:
danger_cell_count += 1
return (enemy_nearby_count == 4 or
(enemy_nearby_count == 3 and danger_cell_count == 1))
def has_enough_halite(ship):
return ship.halite + self.me_halite >= self.c.convert_cost
for ship in self.ships:
if (ship.halite >= MIN_TRAPPED_HALITE and
ship.next_action != ShipAction.CONVERT and is_ship_trapped(ship) and
has_enough_halite(ship)):
ship.next_action = ShipAction.CONVERT
self.cost_halite += (self.c.convert_cost - ship.halite)
print("Convert ship in danger %s at %s h=%s for trapped." %
(ship.id, ship.position, ship.halite))
def execute(self):
self.save_for_converting = 0
self.collect_game_info()
if self.first_shipyard_set:
self.convert_shipyard()
self.update_ship_follower()
self.spawn_ships()
self.bomb_enemy_shipyard()
self.final_stage_back_to_shipyard()
self.optimal_assignment()
else:
self.convert_first_shipyard()
self.compute_ship_moves()
self.spawn_if_shipyard_in_danger()
self.clear_spawn_ship()
self.convert_trapped_ship_to_shipyard()
if not self.simulation:
self.print_info()
STRATEGY = ShipStrategy()
@board_agent
def agent(board):
STRATEGY.update(board)
STRATEGY.execute()
| [
"wang.flynn@gmail.com"
] | wang.flynn@gmail.com |
e65cbf5b64816e289ebbda33044e7070ef649a39 | a5e591dc09e11e88af56fb5a881fae064fb9c495 | /recruitment/recruitment/doctype/sds/test_sds.py | c6cce9a11e7909f1af24d4e5044701cb9cfd6ede | [
"MIT"
] | permissive | barathprathosh/recruitment | 6b61dd1ee9c0b9d7851b0b3e5bab307f7ee2d1b5 | 9660944856e72288e47960e6802ec97a220a656d | refs/heads/master | 2020-04-29T03:03:51.722972 | 2019-03-15T08:58:32 | 2019-03-15T08:58:32 | 175,794,797 | 0 | 0 | NOASSERTION | 2019-03-15T10:00:32 | 2019-03-15T10:00:31 | null | UTF-8 | Python | false | false | 247 | py | # -*- coding: utf-8 -*-
# Copyright (c) 2015, VHRS and Contributors
# See license.txt
from __future__ import unicode_literals
import frappe
import unittest
# test_records = frappe.get_test_records('SDS')
class TestSDS(unittest.TestCase):
pass
| [
"abdulla.pi@voltechgroup.com"
] | abdulla.pi@voltechgroup.com |
1c0bbd4904def21b4a63324dbea15faf22ec77eb | 4bf3f5e1400bdd1f0727ead74c88b6dd09a8dc03 | /Leoncmdb/urls.py | 2c829c4da837f40778a10c9d76bba2c0d4ffc732 | [] | no_license | LeonYanwj/LeonCMDB | d913d3066c5968270724c77e8cea8a31247370c3 | bbab2cb11687c342c46a261647ce8afc1913dd8d | refs/heads/master | 2021-06-19T06:39:09.468203 | 2020-07-28T07:41:43 | 2020-07-28T07:41:43 | 201,918,797 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,063 | py | """Leoncmdb URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url,include
from django.contrib import admin
from asset import urls as asset_urls
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^asset/',include(asset_urls)),
url(r'^login.html/$',asset_urls.views.login),
url(r'^logout/$',asset_urls.views.logout),
url(r'^index.html/$',asset_urls.views.index),
url(r'^$',asset_urls.views.root),
]
| [
"1908175252@qq.com"
] | 1908175252@qq.com |
a697dce54965d918f7b330653707e0336ac916cc | 4bd4bacecee33cada173e427b5ecb1d758bafaad | /src/scalarizr/storage2/filesystems/ext3.py | db6c14a7eb67d5266be90987c0ab7e3c2a861102 | [] | no_license | kenorb-contrib/scalarizr | 3f2492b20910c42f6ab38749545fdbb79969473f | 3cc8b64d5a1b39c4cf36f5057f1a6a84a9a74c83 | refs/heads/master | 2022-11-26T10:00:58.706301 | 2017-11-02T16:41:34 | 2017-11-02T16:41:34 | 108,550,233 | 0 | 2 | null | 2020-07-24T11:05:36 | 2017-10-27T13:33:46 | Python | UTF-8 | Python | false | false | 1,840 | py | """
Created on Aug 29, 2012
@author: marat
"""
from scalarizr import storage2
from scalarizr.storage2 import filesystems
E2LABEL_EXEC = "/sbin/e2label"
RESIZE2FS_EXEC = "/sbin/resize2fs"
E2FSCK_EXEC = "/sbin/e2fsck"
MAX_LABEL_LENGTH = 16
class ExtFileSystem(filesystems.FileSystem):
features = filesystems.FileSystem.features.copy()
features['umount_on_resize'] = True
error_messages = filesystems.FileSystem.error_messages.copy()
error_messages['fsck'] = 'Error occured during filesystem check on device %s'
os_packages = ('e2fsprogs', )
def mkfs(self, device, *short_args):
short_args = list(short_args)
short_args += list(opt for opt in ('-F', '-q') if opt not in short_args)
super(ExtFileSystem, self).mkfs(device, *short_args)
def resize(self, device, size=None, *short_args, **long_kwds):
cmd = (E2FSCK_EXEC, '-fy', device)
rcode = filesystems.system(cmd, raise_exc=False,
error_text=self.error_messages['fsck'] % device)[2]
if rcode not in (0, 1):
raise storage2.StorageError('Fsck failed to correct file system errors')
cmd = (RESIZE2FS_EXEC, device)
filesystems.system(cmd, error_text=self.error_messages['resize'] % device)
def set_label(self, device, label):
cmd = (E2LABEL_EXEC, device, label[:MAX_LABEL_LENGTH])
filesystems.system(cmd, error_text=self.error_messages['set_label'] % device)
def get_label(self, device):
cmd = (E2LABEL_EXEC, device)
return filesystems.system(cmd, error_text=self.error_messages['get_label'] % device)[0].strip()
class Ext3FileSystem(ExtFileSystem):
type = 'ext3'
storage2.filesystem_types[Ext3FileSystem.type] = Ext3FileSystem
| [
"kenorb@users.noreply.github.com"
] | kenorb@users.noreply.github.com |
3a06d395571c615334aaecec346f30c1be3640f5 | 424dcf9cdc6d564f1af94882ecbea0d9491214d1 | /models/report.py | cf34ae5c6fcca6f153500c7c00c80552a7813a2b | [] | no_license | nzaniela/ratecard | 9780896fb2f8fbc12a7088ce7aa777073f2df5fa | d5f108ff95ea9877803e925cc1a05476f0faa079 | refs/heads/master | 2021-07-06T23:44:40.322101 | 2017-10-01T10:06:57 | 2017-10-01T10:06:57 | 105,432,926 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,290 | py | from openerp import models, fields
class ir_actions_report(models.Model):
_inherit = 'ir.actions.report.xml'
journal_ids = fields.Many2many(
'account.journal', 'report_configuration_receiptbook_rel',
'report_configuration_id', 'receiptbook_id', 'ReceiptBooks')
voucher_type = fields.Selection(
[('payment', 'Payment'), ('receipt', 'Receipt')], 'Voucher Type', )
def get_domains(self, cr, model, record, context=None):
domains = super(ir_actions_report, self).get_domains(
cr, model, record, context=context)
if model == 'ratecard.multiple':
# Search for especific report
domains.append([('voucher_type', '=', record.type),
('journal_ids', '=', record.journal_id.id)])
# Search without type
domains.append(
[('voucher_type', '=', False), ('journal_ids', '=', record.journal_id.id)])
# Search without journal and with type
domains.append(
[('voucher_type', '=', record.type), ('journal_ids', '=', False)])
# Search without journal and without type
domains.append(
[('voucher_type', '=', False), ('journal_ids', '=', False)])
return domains
| [
"naistechnologies.com@gmail.com"
] | naistechnologies.com@gmail.com |
e2dd3fc6ed5023653573ada2255327ccf464b401 | d694a99c910ce36c8d6981e126548fc91e74046e | /Regression/regression.py | 60bd820898953a66d0998f401bded14011439700 | [] | no_license | YiddishKop/ml_src_adam_compare | 08ac23cf1fb02222da1f04e833e296b1b75ae9af | cfeadebd41f802686828958068c15bcfdfea0be9 | refs/heads/master | 2020-03-25T22:16:19.581690 | 2018-08-09T23:37:31 | 2018-08-09T23:37:31 | 144,213,684 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,177 | py | import numpy as np
import tensorflow as tf
from math import sqrt
from keras import optimizers
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation, normalization
from keras.backend.tensorflow_backend import set_session
config = tf.ConfigProto()
config.gpu_options.per_process_gpu_memory_fraction = 0.5
set_session(tf.Session(config = config))
class Model(object):
def __init__(self):
self.x = np.load("x_train.npy")
self.y = np.load("y_train.npy")
def select_training_data(self):
self.x_train, self.y_train = np.array(self.x), np.array(self.y)
self.add_x()
n = 18000
self.x_valid = self.x[n:]
self.y_valid = self.y[n:]
self.x = self.x[:n]
self.y = self.y[:n]
print "x_train =", self.x.shape, self.x_valid.shape
print "y_train =", self.y.shape, self.y_valid.shape
def add_x(self):
grade0 = [0, 1, 6, 7]
grade1 = [2, 3, 8, 9, 10]
square = np.square(self.x[:, grade0])
sqrt = np.sqrt(self.x[:, :])
n = self.x_train.shape[1]
cross_term = np.empty([self.x.shape[0], n*(n-1)/2])
s = 0
for i in range(n-1):
for j in range(i+1, n):
cross_term[:, s] = self.x[:, i] * self.x[:, j]
s += 1
cube = np.power(self.x[:, grade0], 3)
self.x = np.concatenate([self.x, square], 1)
self.x = np.concatenate([self.x, cross_term], 1)
self.x = np.concatenate([self.x, np.ones([self.x.shape[0], 1])], 1)
print self.x.shape
def build_nn_model(self):
nn_model = Sequential()
nn_model.add(Dense(input_dim = self.x.shape[1], output_dim = 1000))
nn_model.add(Activation('relu'))
nn_model.add(Dense(output_dim = 1000))
nn_model.add(Activation('relu'))
nn_model.add(Dense(output_dim = 1000))
nn_model.add(Dense(output_dim = 1))
nn_model.summary()
opt = optimizers.Adam(lr = 0.0001, beta_1 = 0.9, beta_2 = 0.999, epsilon = 1e-8, decay=0.0)
#opt = optimizers.SGD(lr = 10E-5)
nn_model.compile(loss = 'mean_squared_error', optimizer = opt, metrics = ['accuracy'])
nn_model.fit(self.x, self.y, batch_size = 100, nb_epoch = 0, shuffle = True, validation_data = (self.x_valid, self.y_valid))
nn_model.save('model.h5')
fout = open("result", 'w')
self.result = nn_model.predict(self.x[:5000])
self.output_result(fout, self.y[:5000])
self.result = nn_model.predict(self.x_valid)
self.output_result(fout, self.y_valid)
def output_result(self, fout, y_true): # write file
fout.write("y_pred, y_train, error, rms_error\n")
ave_error = 0
rms_error = 0
count = self.result.shape[0]
for i in range(self.result.shape[0]):
if self.y[i] > 0:
err1 = np.abs((self.result[i][0] - y_true[i]))/y_true[i]#self.y[i][0]
ave_error += err1
err2 = np.square((self.result[i][0] - y_true[i]))
rms_error += err2
fout.write("%.2f" %(self.result[i][0]) + " - " + "%.2f" %(y_true[i]) + " - ")
fout.write("%.2f" %(err1*100) + ", %.2f" %(err2) + "\n")
else:
count -= 1
ave_error = ave_error / float(count)
rms_error = sqrt(rms_error / float(count))
print "Number =", count
print "Ave error = %.3f" %(ave_error * 100), "%"
print "RMS error = %.3f" %(rms_error)
model = Model()
model.select_training_data()
model.build_nn_model()
| [
"yiddishkop@163.com"
] | yiddishkop@163.com |
e7a36e27e6d971147dc3a9e2f484d56baf29118a | 93ced759b43be7aad6b0fe55fc7752b6db2bf0e8 | /forecast-ml/ml_code/ml_process/ml_core.py | ad066f97da6370281036971c72c038ed207bcb6a | [] | no_license | indrajitkumar/Forcast-ML | f13b2e064fd32ecd24a6fa9a2410c1721edc55a5 | 154312df0e744bbf852146aa7cbe28074b80c89d | refs/heads/master | 2020-09-05T19:39:30.469526 | 2019-11-27T09:51:56 | 2019-11-27T09:51:56 | 220,195,198 | 0 | 0 | null | 2019-11-07T09:15:03 | 2019-11-07T09:09:58 | HTML | UTF-8 | Python | false | false | 3,650 | py | # Libraries
import numpy as np
import pandas as pd
import pickle
from matplotlib import pyplot as plt
from sklearn import metrics
from sklearn import model_selection
#from sklearn import preprocessing
from sklearn.cluster import KMeans
from sklearn.linear_model import LinearRegression
from sklearn.linear_model import LogisticRegression
from sklearn.model_selection import train_test_split
#from sklearn.linear_model import Ridge
from sklearn.externals import joblib
# Importing Dataset
data = pd.read_csv('ml_code/ml_process/dataset.csv')
f1 = data['back_camera'].values
f2 = data['front_camera'].values
f3 = data['resolution_1'].values
f4 = data['resolution_2'].values
f5 = data['screen_size'].values
f6 = data['battery'].values
f7 = data['price'].values
f8 = data['prebook'].values
f9 = data['sales'].values
X = np.array(list(zip(f1, f2, f3, f4, f5, f6, f7, f8, f9)))
X2 = np.array(list(zip(f1, f2, f3, f4, f5, f6, f7, f8)))
# K-Means clustering
k = 2
clf = KMeans(init='k-means++', n_clusters=k, n_init=10)
clf = clf.fit(X)
labels = clf.predict(X)
C = clf.cluster_centers_
print(C)
# Plotting Results
colors = ['r', 'c']
fig, ax = plt.subplots()
for i in range(k):
points = np.array([X[j] for j in range(len(X)) if labels[j] == i])
ax.scatter(points[:, 7], points[:, 8], s=7, c=colors[i])
ax.scatter(C[:, 7], C[:, 8], marker='*', s=300, c='#050505')
print("Silhouette Score: %.7f" % (metrics.silhouette_score(X, labels, metric='euclidean')))
y = clf.labels_
# print(y)
data2 = data.drop('sales', axis=1)
data2['y'] = y
# Logistic Regression
X_train, X_test, y_train, y_test = train_test_split(X2, y, test_size=0.3, random_state=0)
logreg = LogisticRegression()
logreg.fit(X_train, y_train)
print('Accuracy of logistic regression on training set: {:.2f}'.format(logreg.score(X_train, y_train)))
print('Accuracy of logistic regression on test set: {:.2f}'.format(logreg.score(X_test, y_test)))
kfold = model_selection.KFold(n_splits=10, random_state=7)
results = model_selection.cross_val_score(logreg, X_train, y_train, cv=kfold, scoring="accuracy")
print("10-fold cross validation average accuracy: %.3f" % (results.mean()))
X_new = [[5, 5, 800, 800, 5 ,3000, 300, 350]]
y_pred = logreg.predict(X_new)
print("Cluster Assigned: %d" % y_pred)
data3 = data2.loc[data2['y'] == y_pred[0]]
data3 = data3.drop('y', axis=1)
data3['sales'] = data['sales']
# Linear Regression
X3 = data3.iloc[:, :-1].values
y2 = data3.iloc[:, 8].values
# Ridge Regularization
'''
ridge = Ridge(alpha=1.0)
ridge.fit(X3, y2)
'''
X2_train, X2_test, y2_train, y2_test = train_test_split(X3, y2, test_size=0.3, random_state=0)
# Scaler
'''
scaler = preprocessing.StandardScaler().fit(X2_train)
scaler.transform(X2_train)
scaler.transform(X2_test)
'''
lreg = LinearRegression(fit_intercept=True, normalize = True)
lreg.fit(X2_train, y2_train)
print('Accuracy of linear regression on training set: {:.2f}'.format(lreg.score(X2_train, y2_train)))
print('Accuracy of linear regression on test set: {:.2f}'.format(lreg.score(X2_test, y2_test)))
y_pred2 = lreg.predict(X_new)
print("Predicted Sales: %.3f" % (y_pred2[0]))
#fig,ax = plt.subplots()
#ax.scatter(data2['quarter'], data2['sales'], marker='*', s=300, c='#050505')
# Saving the Logistic Regression Model
classifier_model = pickle.dumps(clf)
logistic_regression_model = pickle.dumps(logreg)
linear_regression_model = pickle.dumps(lreg)
# Saving the model to a file
joblib.dump(classifier_model, 'ml_code/clustering_model.pkl')
joblib.dump(logistic_regression_model, 'ml_code/logistic_regression_model.pkl')
joblib.dump(linear_regression_model, 'ml_code/linear_regression_model.pkl') | [
"noreply@github.com"
] | indrajitkumar.noreply@github.com |
5497665447fb033386b2092a63fbef7149fd845b | dddd89637373f455a476431f4fcb7e17b4e9dd57 | /py/display.py | 85bcfe46e59c909fad72f4b04abaf084a127d399 | [] | no_license | DhirManish/Python | 35304eb47dea61934426fb6fc5094e1a83517cf3 | 10df7245d0964340d6c8d14cf26a9cf8f93ecf5d | refs/heads/master | 2020-06-05T07:09:41.856780 | 2015-03-07T12:53:10 | 2015-03-07T12:53:10 | 20,372,496 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 991 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# display.py
#
# Copyright 2014 Ajay Bhatia <ajay@dumb-box>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
#
import sys
def main(fname):
file = open(fname)
for line in file.readlines():
print line,
if __name__ == '__main__':
main(sys.argv[1])
| [
"prof.ajaybhatia@gmail.com"
] | prof.ajaybhatia@gmail.com |
51d3cd83c17924f57928febd4c77f7e11a693a64 | ac42f1d918bdbd229968cea0954ed75250acd55c | /admin/dashboard/openstack_dashboard/test/integration_tests/tests/test_credentials.py | 45c7f9956f3bb90d941ff841ff21f9390cc0aa7a | [
"Apache-2.0"
] | permissive | naanal/product | 016e18fd2f35608a0d8b8e5d2f75b653bac7111a | bbaa4cd60d4f2cdda6ce4ba3d36312c1757deac7 | refs/heads/master | 2020-04-03T22:40:48.712243 | 2016-11-15T11:22:00 | 2016-11-15T11:22:00 | 57,004,514 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,250 | py | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from horizon.test import firefox_binary
from openstack_dashboard.test.integration_tests import helpers
from os import listdir
from os.path import join
from os import remove
class TestDownloadRCFile(helpers.AdminTestCase):
_directory = firefox_binary.WebDriver.TEMPDIR
_openrc_template = "-openrc.sh"
def setUp(self):
super(TestDownloadRCFile, self).setUp()
username = self.TEST_USER_NAME
tenant_name = self.HOME_PROJECT
projects_page = self.home_pg.go_to_identity_projectspage()
tenant_id = projects_page.get_project_id_from_row(tenant_name)
self.actual_dict = {'OS_USERNAME': username,
'OS_TENANT_NAME': tenant_name,
'OS_TENANT_ID': tenant_id}
def test_download_rc_v2_file(self):
"""This is a basic scenario test:
Steps:
1) Login to Horizon Dashboard as admin user
2) Navigate to Project > Compute > Access & Security > API Access tab
3) Click on "Download OpenStack RC File v2.0" button
4) File named by template "<tenant_name>-openrc.sh" must be downloaded
5) Check that username, tenant name and tenant id correspond to current
username, tenant name and tenant id
"""
api_access_page = self.home_pg.\
go_to_compute_accessandsecurity_apiaccesspage()
api_access_page.download_openstack_rc_file(
2, self._directory, self._openrc_template)
cred_dict = api_access_page.get_credentials_from_file(
2, self._directory, self._openrc_template)
self.assertEqual(cred_dict, self.actual_dict)
def test_download_rc_v3_file(self):
"""This is a basic scenario test:
Steps:
1) Login to Horizon Dashboard as admin user
2) Navigate to Project > Compute > Access & Security > API Access tab
3) Click on "Download OpenStack RC File v3" button
4) File named by template "<tenant_name>-openrc.sh" must be downloaded
5) Check that username, project name and project id correspond to
current username, tenant name and tenant id
"""
api_access_page = self.home_pg.\
go_to_compute_accessandsecurity_apiaccesspage()
api_access_page.download_openstack_rc_file(
3, self._directory, self._openrc_template)
cred_dict = api_access_page.get_credentials_from_file(
3, self._directory, self._openrc_template)
self.assertEqual(cred_dict, self.actual_dict)
def tearDown(self):
super(TestDownloadRCFile, self).tearDown()
remove(join(self._directory, listdir(self._directory)[0]))
| [
"rajagopalx@gmail.com"
] | rajagopalx@gmail.com |
618f9bcd1ac00d548731eda3fafa12277cda04f6 | 6265e456db89ef370b694a39579cf948663254e6 | /share/qt/extract_strings_qt.py | 0b0e92289975c7b8a76e9d798ccdf9e7423af19a | [
"MIT"
] | permissive | greencoincc/greencoin | ebc4a25c785fd72debd9b3db6704463ce651d460 | 74e40729dd2af31351da0ce00572accfd90ea984 | refs/heads/master | 2020-09-26T16:31:02.769967 | 2019-12-06T09:27:21 | 2019-12-06T09:27:21 | 226,291,974 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,170 | py | #!/usr/bin/python
'''
Extract _("...") strings for translation and convert to Qt stringdefs so that
they can be picked up by Qt linguist.
'''
from __future__ import division,print_function,unicode_literals
from subprocess import Popen, PIPE
import glob
import operator
import os
import sys
OUT_CPP="qt/greencoinstrings.cpp"
EMPTY=['""']
def parse_po(text):
"""
Parse 'po' format produced by xgettext.
Return a list of (msgid,msgstr) tuples.
"""
messages = []
msgid = []
msgstr = []
in_msgid = False
in_msgstr = False
for line in text.split('\n'):
line = line.rstrip('\r')
if line.startswith('msgid '):
if in_msgstr:
messages.append((msgid, msgstr))
in_msgstr = False
# message start
in_msgid = True
msgid = [line[6:]]
elif line.startswith('msgstr '):
in_msgid = False
in_msgstr = True
msgstr = [line[7:]]
elif line.startswith('"'):
if in_msgid:
msgid.append(line)
if in_msgstr:
msgstr.append(line)
if in_msgstr:
messages.append((msgid, msgstr))
return messages
files = sys.argv[1:]
# xgettext -n --keyword=_ $FILES
XGETTEXT=os.getenv('XGETTEXT', 'xgettext')
if not XGETTEXT:
print('Cannot extract strings: xgettext utility is not installed or not configured.',file=sys.stderr)
print('Please install package "gettext" and re-run \'./configure\'.',file=sys.stderr)
exit(1)
child = Popen([XGETTEXT,'--output=-','-n','--keyword=_'] + files, stdout=PIPE)
(out, err) = child.communicate()
messages = parse_po(out.decode('utf-8'))
f = open(OUT_CPP, 'w')
f.write("""
#include <QtGlobal>
// Automatically generated by extract_strings.py
#ifdef __GNUC__
#define UNUSED __attribute__((unused))
#else
#define UNUSED
#endif
""")
f.write('static const char UNUSED *greencoin_strings[] = {\n')
messages.sort(key=operator.itemgetter(0))
for (msgid, msgstr) in messages:
if msgid != EMPTY:
f.write('QT_TRANSLATE_NOOP("greencoin-core", %s),\n' % ('\n'.join(msgid)))
f.write('};\n')
f.close()
| [
"drandrewbullock@gmail.com"
] | drandrewbullock@gmail.com |
2114d60c3df5d390370a9aaff6d8ab17a9d1172a | 5665a5bb81cf43a0eb83195e9413c05e4dd3497d | /iris-ml-app.py | 5006a156a83e106e87c402509c01a3efc3de86e2 | [] | no_license | jeaggo/tc3068 | e25e39df2312304dbd9e308fa94d0953f5f6ff61 | 99c77aed1b36758f2cc505bf99770928dd023aa4 | refs/heads/master | 2022-05-01T08:12:57.007820 | 2022-04-26T22:00:06 | 2022-04-26T22:00:06 | 169,940,798 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,234 | py | import streamlit as st
import pandas as pd
from sklearn import datasets
from sklearn.ensemble import RandomForestClassifier
st.write("""
# Simple Iris Flower Prediction App
This app predicts the Iris flower type!
""")
st.sidebar.header('User Input Parameters')
def user_input_features():
sepal_length = st.sidebar.slider('sepal_length', 4.3, 7.9, 5.4)
sepal_width = st.sidebar.slider('sepal_width', 2.0, 4.4, 3.4)
petal_length = st.sidebar.slider('petal_length', 1.0, 6.9, 1.3)
petal_width = st.sidebar.slider('petal_width', 0.1, 2.5, 0.2)
data = {'sepal_length': sepal_length,
'sepal_width': sepal_width,
'petal_length': petal_length,
'petal_width': petal_width}
features = pd.DataFrame(data, index=[0])
return features
df = user_input_features()
st.subheader('User Input Parameters')
st.write(df)
iris = datasets.load_iris()
X = iris.data
Y = iris.target
clf = RandomForestClassifier()
clf.fit(X, Y)
prediction = clf.predict(df)
prediction_proba = clf.predict_proba(df)
st.subheader('Class labels and their corresponding index number')
st.write(iris.target_names)
st.subheader('Prediction')
st.write(iris.target_names[prediction])
st.subheader('Prediction Probability')
st.write(prediction_proba)
| [
"noreply@github.com"
] | jeaggo.noreply@github.com |
e75026675bdaf7c8eed5eec20b6c5393a22dc913 | 51bd3c0135f514d9e90d3b215192e3e02fc713b2 | /MajorProject/venv/bin/pyreverse | d344e34993f8d64b848f6b5f7cc870ec3ea9d74a | [] | no_license | ayush-ray/Consultadd | c843d35fb73f4d9c115667adebf8ea0ef3e17761 | f129c3bc5ab39c68ebbd23f417330fd90cd050dc | refs/heads/master | 2023-08-07T20:09:05.256140 | 2020-07-06T00:20:43 | 2020-07-06T00:20:43 | 276,488,466 | 0 | 0 | null | 2021-09-22T19:23:41 | 2020-07-01T21:45:41 | Python | UTF-8 | Python | false | false | 272 | #!/Users/rishti/Desktop/Consultadd/MajorProject/venv/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from pylint import run_pyreverse
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(run_pyreverse())
| [
"rishti@amazon.com"
] | rishti@amazon.com | |
8654ed8796db644dd4805d8b68137f4e06de7879 | f2b44af5372c6318a941015f64b279ccf9099a18 | /rest130/wsgi.py | 5726a659e41211d23cf13244551e776d53d36061 | [] | no_license | yuansuixin/Rest-Framework-page-view | c459ca54c1998cde4c0fe207ba6464353471cbdf | a8663c09a00ce4f4d055ca96e3132ae0a4ddea54 | refs/heads/master | 2020-03-13T06:58:12.512163 | 2018-04-25T14:09:15 | 2018-04-25T14:09:15 | 131,015,642 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 392 | py | """
WSGI config for rest130 project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "rest130.settings")
application = get_wsgi_application()
| [
"cyss428@163.com"
] | cyss428@163.com |
ca44774613f769beee533722ed754a97f9cc33ad | 7c5e36ef60fe9134c74d6c790fabb52894958a38 | /implementations/trainDeepFace_torch_otherGroup.py | 511b5179d04c9ecd13ebf72641c95e3d2e20e885 | [] | no_license | tkawchak/MATH597_FacialRecognition | 63c9a7341a05ab6e1e7747674d468b8771aecabe | 93b7e41b001ac7faa1569850f3ba26e0407cdd60 | refs/heads/master | 2021-08-23T14:28:10.776641 | 2017-12-05T05:08:36 | 2017-12-05T05:08:36 | 103,861,951 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,865 | py | import glob
import unicodedata
import string
import torch
import torch.nn as nn
from torch.autograd import Variable
import time
import math
import random
import torch.nn.functional as F
import os
import random
import io
from PIL import Image
import torchvision.transforms as transforms
#https://stackoverflow.com/questions/33330779/whats-the-triplet-loss-back-propagation-gradient-formula
# class TripletLoss(torch.nn.Module):
#
# def __init__(self, margin=2.0):
# super(TripletLoss, self).__init__()
# self.margin = margin
#
# def forward(self, output_anchor, output_negative, output_positive, label):
# alpha = 1
# euclidean_distance_positive = F.pairwise_distance(output_anchor, output_positive)
# euclidean_distance_negative = F.pairwise_distance(output_anchor, output_negative)
# triplet_loss = torch.mean((1-label) * torch.pow(euclidean_distance, 2) +
# (label) * torch.pow(torch.clamp(self.margin - euclidean_distance, min=0.0), 2))
# Define DeepFace Class
class Net(nn.Module):
def __init__(self):
super(Net, self).__init__()
self.conv1 = nn.Conv2d(3, 64, 3, padding = 1)
self.conv2 = nn.Conv2d(64, 64, 3, padding = 1)
self.conv3 = nn.Conv2d(64, 128, 3, padding = 1)
self.conv4 = nn.Conv2d(128, 128, 3, padding = 1)
self.conv5 = nn.Conv2d(128, 256, 3, padding = 1)
self.conv6 = nn.Conv2d(256, 256, 3, padding = 1)
self.conv7 = nn.Conv2d(256, 256, 3, padding = 1)
self.conv8 = nn.Conv2d(256, 512, 3, padding = 1)
self.conv9 = nn.Conv2d(512, 512, 3, padding = 1)
self.conv10 = nn.Conv2d(512, 512, 3, padding = 1)
self.conv11 = nn.Conv2d(512, 512, 3, padding = 1)
self.conv12 = nn.Conv2d(512, 512, 3, padding = 1)
self.conv13 = nn.Conv2d(512, 512, 3, padding = 1)
self.pool = nn.MaxPool2d(2, 2)
self.fc1 = nn.Conv2d(512, 4096, 3, padding = 0)
self.fc2 = nn.Conv2d(4096, 4096, 3, padding = 0)
self.fc3 = nn.Conv2d(4096, 2622, 3, padding = 0)
def forward(self, inputdata):
hidden = F.relu(self.conv1(inputdata))
hidden = self.pool(F.relu(self.conv2(hidden)))
hidden = F.relu(self.conv3(hidden))
hidden = self.pool(F.relu(self.conv4(hidden)))
hidden = F.relu(self.conv5(hidden))
hidden = F.relu(self.conv6(hidden))
hidden = self.pool(F.relu(self.conv7(hidden)))
hidden = F.relu(self.conv8(hidden))
hidden = F.relu(self.conv9(hidden))
hidden = self.pool(F.relu(self.conv10(hidden)))
hidden = F.relu(self.conv11(hidden))
hidden = F.relu(self.conv12(hidden))
hidden = self.pool(F.relu(self.conv13(hidden)))
hidden = F.relu(self.fc1(hidden))
hidden = F.relu(self.fc2(hidden))
out = F.softmax(self.fc3(hidden))
return out
def load_dataToDict(data_path):
mydict = {}
mydict_multi = {}
for i in os.listdir(data_path):
if os.path.isdir(os.path.join(data_path, i)):
mydict[i] = []
for j in os.listdir(os.path.join(data_path, i)):
mydict[i].append(j)
if len(mydict[i])>1:
mydict_multi[i] = mydict[i]
return mydict, mydict_multi
# Generates triplets such that first two elements are same and third is different (anchor, positive, negative)
def generateRandomTriplets(data_path, data_dict, data_multiInstances, size):
dataset = []
keys = data_multiInstances.keys()
out = open('Dataset.txt', 'w')
for i in range(0, size):
num = random.randint(0, len(keys)-1 )
im1 = random.randint(0, len(data_multiInstances[keys[num]])-1 )
im2 = random.randint(0, len(data_multiInstances[keys[num]])-2 )
if im2 >= im1:
im2 = im2 + 1
a = os.path.join(data_path, keys[num], data_multiInstances[keys[num]][im1])
p = os.path.join(data_path, keys[num], data_multiInstances[keys[num]][im2])
keys_all = data_dict.keys()
num_negative = random.randint(0, len(keys_all)-2 )
if num_negative >= num:
num_negative = num_negative + 1
im_neg = random.randint(0, len(data_dict[keys_all[num_negative]])-1 )
n = os.path.join(data_path, keys_all[num_negative], data_dict[keys_all[num_negative]][im_neg])
out.write(data_multiInstances[keys[num]][im1] + ', ' + data_multiInstances[keys[num]][im2] + ', ' + data_dict[keys_all[num_negative]][im_neg] + '\n')
dataset.append([a, p, n])
return dataset
loader = transforms.Compose([
#transforms.Scale(448), # scale imported image
transforms.ToTensor()]) # transform it into a torch tensor
learning_rate = 0.01
def train(dataset):
neural_net = Net()
triplet_loss = nn.TripletMarginLoss(margin = 1.0)
for data in dataset:
print data[0], data[1], data[2]
a = Variable(loader(Image.open(data[0]))).unsqueeze(0)
p = Variable(loader(Image.open(data[1]))).unsqueeze(0)
n = Variable(loader(Image.open(data[2]))).unsqueeze(0)
anc = neural_net(a)
pos = neural_net(p)
neg = neural_net(n)
anc = anc.squeeze(2)
anc = anc.squeeze(2)
pos = pos.squeeze(2)
pos = pos.squeeze(2)
neg = neg.squeeze(2)
neg = neg.squeeze(2)
loss = triplet_loss(anc, pos, neg)
loss.backward()
for p in neural_net.parameters():
p.data.add_(-learning_rate, p.grad.data)
data, data_simiar = load_dataToDict('lfw')
dataset = generateRandomTriplets('lfw', data, data_simiar, 10)
train(dataset)
#print data.keys()
| [
"tkawchak@gmail.com"
] | tkawchak@gmail.com |
7b1fbc5fa287f2e6a697c45a64cc5cd9b9be745d | ed3b4ed46656813573e2f5a90ef5cf1b36a724ee | /Class notes.py | 0cb0fc4b2e492aa8b5e8d41af95b8d4b4a397c71 | [] | no_license | ok6j/CSE7 | 6e4bc739fa4efad98bdda6be76fe2c1717922495 | 62eb5b7ac305f8dcb00fa500084fdec570e6e400 | refs/heads/master | 2021-09-14T23:11:02.273856 | 2018-05-21T23:23:58 | 2018-05-21T23:23:58 | 112,399,971 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,586 | py | # Defining a class
class Cat(object):
# TWO underscores before and after
def __init__(self, color, pattern):
# Things that a Cat has
self.color = color
self.pattern = pattern
self.state = "happy"
self.hungry = False
# Things that a cat can do
def jump(self):
self.state = "Scared"
print("The cat jumps in he air")
def play(self):
self.state = "happy"
print("You can play with the cat")
# Instantiating (creating) two cats
cute_cat = Cat("brown", "spots")
cute_cat2 = Cat("grey", "no spots")
# Getting information about the cats
print(cute_cat.color)
print(cute_cat2.state)
print(cute_cat2.color)
cute_cat.jump()
print(cute_cat.state)
print(cute_cat2.state)
cute_cat.play()
print(cute_cat.state)
class Car(object):
def __init__(self, color, brand, num_of_cylinders):
self.color = color
self.pattern = brand
self.cylinders = num_of_cylinders
self.engineOn = False
def turn_on(self):
if self.engineOn:
print("Nothing Happens")
else:
print("The engine turns on")
self.engineOn = True
def move_forward(self):
if self.engineOn:
print("You move forward")
else:
print("Nothing happens")
def turn_off(self):
if self.engineOn:
print("The engine turns off")
self.engineOn = False
else:
print("Nothing happens")
mycar = Car(4, "Subaru", "Blue")
mycar.turn_on()
mycar.move_forward()
mycar.turn_off()
| [
"31050508+ok6j@users.noreply.github.com"
] | 31050508+ok6j@users.noreply.github.com |
12bc9d01f76a0e97ddab25e13875cb929b890d50 | 516157e6e97e1c122e2efeba1ee9d765c88d4ca9 | /fi2.py | 5525f1997a613005f5413fa688c0defcfc2fe1d8 | [] | no_license | AashiqRamachandran/hackers-playground | b2d699d67f66ed06f969089c46a7c3b16e606fc4 | 3cc390d4de536455459b4a2615f662699a6223b0 | refs/heads/master | 2022-11-09T00:40:19.799571 | 2020-07-04T11:14:27 | 2020-07-04T11:14:27 | 245,553,630 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,175 | py | from twitter import Twitter, OAuth, TwitterHTTPError
OAUTH_TOKEN = '1262017120717533185-HpXoDfrrKSuCCiInVaXXdnI6hNm1Ia'
OAUTH_SECRET = '6vJSvudVE3kBqZvWr6FiKllWuJzWIGXSHpP6vZURC0ulE'
CONSUMER_KEY = 'HPdeLBBAhcaGuCcN6MKd5fwk6'
CONSUMER_SECRET = 'BzQgSPProiCApnkeuB7dMsRMcOqFgE3xXsDNLnsl1qxsIBJk5s'
t = Twitter(auth=OAuth(OAUTH_TOKEN, OAUTH_SECRET, CONSUMER_KEY, CONSUMER_SECRET))
def search_tweets(q, count=100, max_id=None):
return t.search.tweets(q=q, result_type='recent', count=count, lang="en", max_id=max_id)
def favorites_create(tweet):
try:
result = t.favorites.create(_id=tweet['id'])
print("Favorited: %s, %s" % (result['text'], result['id']))
return result
except TwitterHTTPError as e:
print("Error: ", e)
return None
def search_and_fav(q, count=100, max_id=None):
result = search_tweets(q, count, max_id)
first_id = result['statuses'][0]['id']
last_id = result['statuses'][-1]['id']
success = 0
for t in result['statuses']:
if favorites_create(t) is not None:
success += 1
def main():
search_and_fav("infosec", 1000)
if __name__ == '__main__':
main()
| [
"noreply@github.com"
] | AashiqRamachandran.noreply@github.com |
f80eb0ee75c7c6f6668996e9f7d6ded968c8c196 | c9ddbdb5678ba6e1c5c7e64adf2802ca16df778c | /cases/synthetic/tree-big-937.py | 1cb8651f581c5c2f244b40205a24f459938ff392 | [] | no_license | Virtlink/ccbench-chocopy | c3f7f6af6349aff6503196f727ef89f210a1eac8 | c7efae43bf32696ee2b2ee781bdfe4f7730dec3f | refs/heads/main | 2023-04-07T15:07:12.464038 | 2022-02-03T15:42:39 | 2022-02-03T15:42:39 | 451,969,776 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 23,291 | py | # Binary-search trees
class TreeNode(object):
value:int = 0
left:"TreeNode" = None
right:"TreeNode" = None
def insert(self:"TreeNode", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode(x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode(x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class TreeNode2(object):
value:int = 0
value2:int = 0
left:"TreeNode2" = None
left2:"TreeNode2" = None
right:"TreeNode2" = None
right2:"TreeNode2" = None
def insert(self:"TreeNode2", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode2(x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode2(x, x)
return True
else:
return self.right.insert(x)
return False
def insert2(self:"TreeNode2", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode2(x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode2(x, x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode2", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains2(self:"TreeNode2", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class TreeNode3(object):
value:int = 0
value2:$Type = 0
value3:int = 0
left:"TreeNode3" = None
left2:"TreeNode3" = None
left3:"TreeNode3" = None
right:"TreeNode3" = None
right2:"TreeNode3" = None
right3:"TreeNode3" = None
def insert(self:"TreeNode3", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode3(x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode3(x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert2(self:"TreeNode3", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode3(x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode3(x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert3(self:"TreeNode3", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode3(x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode3(x, x, x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode3", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains2(self:"TreeNode3", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains3(self:"TreeNode3", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class TreeNode4(object):
value:int = 0
value2:int = 0
value3:int = 0
value4:int = 0
left:"TreeNode4" = None
left2:"TreeNode4" = None
left3:"TreeNode4" = None
left4:"TreeNode4" = None
right:"TreeNode4" = None
right2:"TreeNode4" = None
right3:"TreeNode4" = None
right4:"TreeNode4" = None
def insert(self:"TreeNode4", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode4(x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode4(x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert2(self:"TreeNode4", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode4(x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode4(x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert3(self:"TreeNode4", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode4(x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode4(x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert4(self:"TreeNode4", x:int, x2:int, x3:int, x4:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode4(x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode4(x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode4", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains2(self:"TreeNode4", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains3(self:"TreeNode4", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains4(self:"TreeNode4", x:int, x2:int, x3:int, x4:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class TreeNode5(object):
value:int = 0
value2:int = 0
value3:int = 0
value4:int = 0
value5:int = 0
left:"TreeNode5" = None
left2:"TreeNode5" = None
left3:"TreeNode5" = None
left4:"TreeNode5" = None
left5:"TreeNode5" = None
right:"TreeNode5" = None
right2:"TreeNode5" = None
right3:"TreeNode5" = None
right4:"TreeNode5" = None
right5:"TreeNode5" = None
def insert(self:"TreeNode5", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert2(self:"TreeNode5", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert3(self:"TreeNode5", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert4(self:"TreeNode5", x:int, x2:int, x3:int, x4:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert5(self:"TreeNode5", x:int, x2:int, x3:int, x4:int, x5:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode5", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains2(self:"TreeNode5", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains3(self:"TreeNode5", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains4(self:"TreeNode5", x:int, x2:int, x3:int, x4:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains5(self:"TreeNode5", x:int, x2:int, x3:int, x4:int, x5:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class Tree(object):
root:TreeNode = None
size:int = 0
def insert(self:"Tree", x:int) -> object:
if self.root is None:
self.root = makeNode(x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
class Tree2(object):
root:TreeNode2 = None
root2:TreeNode2 = None
size:int = 0
size2:int = 0
def insert(self:"Tree2", x:int) -> object:
if self.root is None:
self.root = makeNode2(x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert2(self:"Tree2", x:int, x2:int) -> object:
if self.root is None:
self.root = makeNode2(x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree2", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains2(self:"Tree2", x:int, x2:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
class Tree3(object):
root:TreeNode3 = None
root2:TreeNode3 = None
root3:TreeNode3 = None
size:int = 0
size2:int = 0
size3:int = 0
def insert(self:"Tree3", x:int) -> object:
if self.root is None:
self.root = makeNode3(x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert2(self:"Tree3", x:int, x2:int) -> object:
if self.root is None:
self.root = makeNode3(x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert3(self:"Tree3", x:int, x2:int, x3:int) -> object:
if self.root is None:
self.root = makeNode3(x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree3", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains2(self:"Tree3", x:int, x2:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains3(self:"Tree3", x:int, x2:int, x3:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
class Tree4(object):
root:TreeNode4 = None
root2:TreeNode4 = None
root3:TreeNode4 = None
root4:TreeNode4 = None
size:int = 0
size2:int = 0
size3:int = 0
size4:int = 0
def insert(self:"Tree4", x:int) -> object:
if self.root is None:
self.root = makeNode4(x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert2(self:"Tree4", x:int, x2:int) -> object:
if self.root is None:
self.root = makeNode4(x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert3(self:"Tree4", x:int, x2:int, x3:int) -> object:
if self.root is None:
self.root = makeNode4(x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert4(self:"Tree4", x:int, x2:int, x3:int, x4:int) -> object:
if self.root is None:
self.root = makeNode4(x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree4", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains2(self:"Tree4", x:int, x2:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains3(self:"Tree4", x:int, x2:int, x3:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains4(self:"Tree4", x:int, x2:int, x3:int, x4:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
class Tree5(object):
root:TreeNode5 = None
root2:TreeNode5 = None
root3:TreeNode5 = None
root4:TreeNode5 = None
root5:TreeNode5 = None
size:int = 0
size2:int = 0
size3:int = 0
size4:int = 0
size5:int = 0
def insert(self:"Tree5", x:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert2(self:"Tree5", x:int, x2:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert3(self:"Tree5", x:int, x2:int, x3:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert4(self:"Tree5", x:int, x2:int, x3:int, x4:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert5(self:"Tree5", x:int, x2:int, x3:int, x4:int, x5:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree5", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains2(self:"Tree5", x:int, x2:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains3(self:"Tree5", x:int, x2:int, x3:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains4(self:"Tree5", x:int, x2:int, x3:int, x4:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains5(self:"Tree5", x:int, x2:int, x3:int, x4:int, x5:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def makeNode(x: int) -> TreeNode:
b:TreeNode = None
b = TreeNode()
b.value = x
return b
def makeNode2(x: int, x2: int) -> TreeNode2:
b:TreeNode2 = None
b2:TreeNode2 = None
b = TreeNode2()
b.value = x
return b
def makeNode3(x: int, x2: int, x3: int) -> TreeNode3:
b:TreeNode3 = None
b2:TreeNode3 = None
b3:TreeNode3 = None
b = TreeNode3()
b.value = x
return b
def makeNode4(x: int, x2: int, x3: int, x4: int) -> TreeNode4:
b:TreeNode4 = None
b2:TreeNode4 = None
b3:TreeNode4 = None
b4:TreeNode4 = None
b = TreeNode4()
b.value = x
return b
def makeNode5(x: int, x2: int, x3: int, x4: int, x5: int) -> TreeNode5:
b:TreeNode5 = None
b2:TreeNode5 = None
b3:TreeNode5 = None
b4:TreeNode5 = None
b5:TreeNode5 = None
b = TreeNode5()
b.value = x
return b
# Input parameters
n:int = 100
n2:int = 100
n3:int = 100
n4:int = 100
n5:int = 100
c:int = 4
c2:int = 4
c3:int = 4
c4:int = 4
c5:int = 4
# Data
t:Tree = None
t2:Tree = None
t3:Tree = None
t4:Tree = None
t5:Tree = None
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
i5:int = 0
k:int = 37813
k2:int = 37813
k3:int = 37813
k4:int = 37813
k5:int = 37813
# Crunch
t = Tree()
while i < n:
t.insert(k)
k = (k * 37813) % 37831
if i % c != 0:
t.insert(i)
i = i + 1
print(t.size)
for i in [4, 8, 15, 16, 23, 42]:
if t.contains(i):
print(i)
| [
"647530+Virtlink@users.noreply.github.com"
] | 647530+Virtlink@users.noreply.github.com |
43eeefacfcf0e69ed91cff1e139d21ea5ca3dcb0 | 55b57d64ec547869835334318f3059fbb507558c | /Fred2/Data/pssms/tepitopepan/mat/DRB3_0303_9.py | 9a0ad63b34e9c15470b38ea93c1c9309344d6602 | [
"BSD-3-Clause"
] | permissive | FRED-2/Fred2 | 9845f6678d4011cb746c7a5a6f283eea68077a02 | b3e54c8c4ed12b780b61f74672e9667245a7bb78 | refs/heads/master | 2021-07-12T05:05:54.515427 | 2020-05-25T06:56:25 | 2020-05-25T06:56:25 | 16,275,425 | 42 | 35 | null | 2021-07-07T12:05:11 | 2014-01-27T10:08:11 | Python | UTF-8 | Python | false | false | 2,168 | py | DRB3_0303_9 = {0: {'A': -999.0, 'E': -999.0, 'D': -999.0, 'G': -999.0, 'F': -0.004754, 'I': -0.99525, 'H': -999.0, 'K': -999.0, 'M': -0.99525, 'L': -0.99525, 'N': -999.0, 'Q': -999.0, 'P': -999.0, 'S': -999.0, 'R': -999.0, 'T': -999.0, 'W': -0.004754, 'V': -0.99525, 'Y': -0.004754}, 1: {'A': 0.0, 'E': 0.1, 'D': -1.3, 'G': 0.5, 'F': 0.8, 'I': 1.1, 'H': 0.8, 'K': 1.1, 'M': 1.1, 'L': 1.0, 'N': 0.8, 'Q': 1.2, 'P': -0.5, 'S': -0.3, 'R': 2.2, 'T': 0.0, 'W': -0.1, 'V': 2.1, 'Y': 0.9}, 2: {'A': 0.0, 'E': -1.2, 'D': -1.3, 'G': 0.2, 'F': 0.8, 'I': 1.5, 'H': 0.2, 'K': 0.0, 'M': 1.4, 'L': 1.0, 'N': 0.5, 'Q': 0.0, 'P': 0.3, 'S': 0.2, 'R': 0.7, 'T': 0.0, 'W': 0.0, 'V': 0.5, 'Y': 0.8}, 3: {'A': 0.0, 'E': -0.74263, 'D': 1.9303, 'G': 0.15152, 'F': -0.88385, 'I': 0.54359, 'H': 0.066424, 'K': -1.101, 'M': 0.17312, 'L': 0.14265, 'N': 0.23879, 'Q': 0.081257, 'P': -1.1015, 'S': 0.61335, 'R': -1.1477, 'T': -0.82428, 'W': -1.0004, 'V': -0.079372, 'Y': -1.0353}, 4: {'A': 0.0, 'E': 0.0, 'D': 0.0, 'G': 0.0, 'F': 0.0, 'I': 0.0, 'H': 0.0, 'K': 0.0, 'M': 0.0, 'L': 0.0, 'N': 0.0, 'Q': 0.0, 'P': 0.0, 'S': 0.0, 'R': 0.0, 'T': 0.0, 'W': 0.0, 'V': 0.0, 'Y': 0.0}, 5: {'A': 0.0, 'E': -1.6108, 'D': -2.1332, 'G': -0.86057, 'F': -1.3415, 'I': 0.51964, 'H': -0.37968, 'K': 0.51952, 'M': -0.94544, 'L': -0.079917, 'N': -0.21194, 'Q': -0.55568, 'P': 0.39251, 'S': 0.1234, 'R': 0.28561, 'T': 1.0129, 'W': -1.3229, 'V': 1.1232, 'Y': -1.4223}, 6: {'A': 0.0, 'E': -0.19258, 'D': -0.78133, 'G': -1.1317, 'F': -0.44204, 'I': 0.17504, 'H': -0.15349, 'K': -0.5888, 'M': 0.72971, 'L': 0.52181, 'N': 0.49952, 'Q': -0.021692, 'P': -0.56978, 'S': -0.010359, 'R': -0.88875, 'T': 0.036589, 'W': -0.93873, 'V': 0.10742, 'Y': -0.61142}, 7: {'A': 0.0, 'E': 0.0, 'D': 0.0, 'G': 0.0, 'F': 0.0, 'I': 0.0, 'H': 0.0, 'K': 0.0, 'M': 0.0, 'L': 0.0, 'N': 0.0, 'Q': 0.0, 'P': 0.0, 'S': 0.0, 'R': 0.0, 'T': 0.0, 'W': 0.0, 'V': 0.0, 'Y': 0.0}, 8: {'A': 0.0, 'E': -0.61422, 'D': -0.86927, 'G': -0.56068, 'F': -0.1483, 'I': 0.46799, 'H': 0.34784, 'K': -0.63915, 'M': 0.36782, 'L': 0.11655, 'N': -0.68387, 'Q': 0.44053, 'P': -1.049, 'S': 0.51357, 'R': -0.9464, 'T': -0.64651, 'W': -0.41564, 'V': -0.094717, 'Y': -0.29141}} | [
"schubert@informatik.uni-tuebingen.de"
] | schubert@informatik.uni-tuebingen.de |
fd0e88862a3552ff5a444410f25c478bc09b9ccc | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_185/ch21_2019_09_02_16_11_34_126817.py | caeb588b2448887a21cc708aae209b3242a27c36 | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 138 | py | valor_conta = float(input("Digite o valor da conta"))
valor_10 = valor_conta * 1.1
print("Valor da conta com 10% {.:2f}".format(valor_10)) | [
"you@example.com"
] | you@example.com |
962e96893571f492778142b708236e00d56b680e | c54f5a7cf6de3ed02d2e02cf867470ea48bd9258 | /pyobjc/pyobjc-framework-Quartz/PyObjCTest/test_cikernel.py | b0db5ef848f5d457400c0faee426365fabfe1997 | [
"MIT"
] | permissive | orestis/pyobjc | 01ad0e731fbbe0413c2f5ac2f3e91016749146c6 | c30bf50ba29cb562d530e71a9d6c3d8ad75aa230 | refs/heads/master | 2021-01-22T06:54:35.401551 | 2009-09-01T09:24:47 | 2009-09-01T09:24:47 | 16,895 | 8 | 5 | null | null | null | null | UTF-8 | Python | false | false | 306 | py |
from PyObjCTools.TestSupport import *
from Quartz.QuartzCore import *
from Quartz import *
class TestCIKernel (TestCase):
def testMethods(self):
self.failUnlessArgIsSEL(CIKernel.setROISelector_, 0, CGRect.__typestr__ + '@:i' + CGRect.__typestr__ + '@')
if __name__ == "__main__":
main()
| [
"ronaldoussoren@f55f28a5-9edb-0310-a011-a803cfcd5d25"
] | ronaldoussoren@f55f28a5-9edb-0310-a011-a803cfcd5d25 |
48263a798ad30ad427cd58b1e66ad05f0fef9263 | 9ab9123a631260903c242191d04bf452279ba544 | /experiment/draw.py | 92d33584297ac9218e26ce76aecba56642aece1f | [
"Apache-2.0"
] | permissive | peihy/PyEGT | bba58526eb6a275341fd8539250c6009f08c4337 | 9313a6d12ecb5078081c8b2bd05b1c0716f9bd87 | refs/heads/master | 2020-04-13T01:58:51.448074 | 2018-09-19T15:18:10 | 2018-09-19T15:18:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,718 | py | # -*- coding: utf-8 -*-
import matplotlib.pyplot as plt
import numpy as np
import networkx as nx
from matplotlib import cm
from numpy.random import randn
import collections
import math
def degree_histogram():
G = nx.gnp_random_graph(100, 0.02)
degree_sequence=sorted(nx.degree(G).values(),reverse=True) # degree sequence
# print "Degree sequence", degree_sequence
# dmax=max(degree_sequence)
plt.loglog(degree_sequence, 'b-', marker='o')
plt.title("Degree rank plot")
plt.ylabel("degree")
plt.xlabel("rank")
# draw graph in inset
plt.axes([0.45, 0.45, 0.45, 0.45])
gcc=sorted(nx.connected_component_subgraphs(G), key=len, reverse=True)[0]
pos=nx.spring_layout(gcc)
plt.axis('off')
nx.draw_networkx_nodes(gcc, pos, node_size=20)
nx.draw_networkx_edges(gcc, pos, alpha=0.4)
# plt.savefig("degree_histogram.png")
plt.show()
def imshow_interpolation():
"""
https://matplotlib.org/gallery/images_contours_and_fields/interpolation_methods
.html#sphx-glr-gallery-images-contours-and-fields-interpolation-methods-py
https://matplotlib.org/gallery/subplots_axes_and_figures/subplot_toolbar
.html#sphx-glr-gallery-subplots-axes-and-figures-subplot-toolbar-py
"""
methods = [None, 'none', 'nearest', 'bilinear', 'bicubic', 'spline16',
'spline36', 'hanning', 'hamming', 'hermite', 'kaiser', 'quadric',
'catrom', 'gaussian', 'bessel', 'mitchell', 'sinc', 'lanczos']
# Fixing random state for reproducibility
np.random.seed(19680801)
# grid = np.random.rand(50, 50)
grid = np.random.randint(2, size=(50, 50))
fig, axes = plt.subplots(3, 6, figsize=(12, 6), subplot_kw={'xticks': [], 'yticks': []})
fig.subplots_adjust(hspace=0.3, wspace=0.05)
for ax, interp_method in zip(axes.flat, methods):
ax.imshow(grid, interpolation=interp_method, cmap='bwr') #'viridis')
ax.set_title(interp_method)
plt.show()
def colorbar():
"""
https://matplotlib.org/gallery/ticks_and_spines/colorbar_tick_labelling_demo
.html#sphx-glr-gallery-ticks-and-spines-colorbar-tick-labelling-demo-py
"""
# Make plot with vertical (default) colorbar
fig, ax = plt.subplots()
data = np.clip(randn(250, 250), -1, 1)
cax = ax.imshow(data, interpolation='nearest', cmap=cm.coolwarm)
ax.set_title('Gaussian noise with vertical colorbar')
# Add colorbar, make sure to specify tick locations to match desired ticklabels
cbar = fig.colorbar(cax, ticks=[-1, 0, 1])
cbar.ax.set_yticklabels(['< -1', '0', '> 1']) # vertically oriented colorbar
# Make plot with horizontal colorbar
fig, ax = plt.subplots()
data = np.clip(randn(250, 250), -1, 1)
cax = ax.imshow(data, interpolation='nearest', cmap=cm.afmhot)
ax.set_title('Gaussian noise with horizontal colorbar')
cbar = fig.colorbar(cax, ticks=[-1, 0, 1], orientation='horizontal')
cbar.ax.set_xticklabels(['Low', 'Medium', 'High']) # horizontal colorbar
plt.show()
def degree_distr_logbined():
"""
log bin plot
http://stackoverflow.com/questions/16489655/plotting-log-binned-network-degree-distributions
"""
g = nx.barabasi_albert_graph(1000, 3)
# degree_h = nx.degree_histogram(g)
# convert normalized degrees to raw degrees
raw_data = dict(collections.Counter([k for _, k in g.degree]))
plt.scatter(raw_data.keys(), raw_data.values(), c='b', marker='x')
x = [float(i) for i in raw_data.keys()]
y = raw_data.values()
max_x = math.log10(max(x))
max_y = math.log10(max(y))
max_base = max([max_x, max_y])
min_x = math.log10(min(filter(lambda z: z > 0, x)))
bins = np.logspace(min_x, max_base, num=50)
log_x = (np.histogram(x, bins, weights=y)[0] / np.histogram(x, bins)[0])
log_y = (np.histogram(x, bins, weights=x)[0] / np.histogram(x, bins)[0])
plt.scatter(log_x, log_y, c='r', marker='s', s=50)
# plt.plot(range(len(data)), data, 'bo')
plt.xscale('log')
plt.yscale('log')
# plt.xlim(0, 1000)
# plt.xlim((1e-1, 1e5))
# plt.ylim((.9, 1e4))
# plt.xlim(1, max_x)
# plt.ylim(1, max_y)
plt.xlabel('Degree')
plt.ylabel('Frequency')
plt.show()
def degree_distr():
def plot(data):
""" Plot Distribution """
plt.plot(range(len(data)), data, 'bo')
plt.yscale('log')
plt.xscale('log')
plt.ylabel('Freq')
plt.xlabel('Degree')
# plt.subplot(131)
# plt.clf()
""" Plot CDF """
# s = float(data.sum())
# cdf = data.cumsum(0) / s
# plt.plot(range(len(cdf)), cdf, 'bo')
# plt.xscale('log')
# plt.ylim([0, 1])
# plt.ylabel('CDF')
# plt.xlabel('Degree')
# plt.subplot(132)
# plt.clf()
""" Plot CCDF """
# ccdf = 1 - cdf
# plt.plot(range(len(ccdf)), ccdf, 'bo')
# plt.xscale('log')
# plt.yscale('log')
# plt.ylim([0, 1])
# plt.ylabel('CCDF')
# plt.xlabel('Degree')
# plt.subplot(133)
# plt.clf()
plt.show()
""" Load graph """
G = nx.barabasi_albert_graph(1000, 3)
""" To sparse adjacency matrix """
M = nx.to_scipy_sparse_matrix(G)
indegrees = M.sum(0).A[0]
outdegrees = M.sum(1).T.A[0]
indegree_distribution = np.bincount(indegrees)
outdegree_distribution = np.bincount(outdegrees)
# print np.bincount([d for _, d in G.degree])
print indegree_distribution
# return
plot(indegree_distribution)
plot(outdegree_distribution)
# degree_histogram()
# imshow_interpolation()
colorbar()
# degree_distr()
# degree_distr_logbined()
| [
"shaodan.cn@gmail.com"
] | shaodan.cn@gmail.com |
fafc0ffe23e94a43e1dd908611b6ef8ab622feeb | b4eefdd65750e6437b764a90b380155c43ef886e | /betonreddit/history/urls.py | 2d0143c49267f829b32a5e6e8bbbe8989755b56a | [
"Apache-2.0"
] | permissive | sengv/betonreddit | 9a74682351bc8f33451dace362a850eb16595720 | faee136b0cf0382487bac8d8cb3ffa0051ec0123 | refs/heads/master | 2021-01-22T11:58:29.252973 | 2016-05-30T08:09:41 | 2016-05-30T08:09:41 | 58,032,675 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 143 | py | from django.conf.urls import url
from .views import history_main
urlpatterns = [
url(r'^$', history_main, name="history_main"),
]
| [
"padimkj@gmail.com"
] | padimkj@gmail.com |
de0839089f56fdbd89a3d5e3b05d99c0d7a11b20 | 2cc20f40f1d420dd5e933b00622a792bd22b3cc0 | /Dice GAME.py | 2ba8a91b4a78ab4ae601686b519cf6d633c126c1 | [] | no_license | HemantDeshmukh96/Dise-Game | 27b03a665316d8e7fa792b3ccbae608dc9b37875 | edf8e756463239c625c5e0b80c5ababf753e0ed1 | refs/heads/master | 2020-11-25T06:56:49.804445 | 2019-12-17T06:33:42 | 2019-12-17T06:33:42 | 228,548,417 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,030 | py | import time
import random
key='Y'
print("%76s"%("*"*32))
sleep=time.sleep(1)
print("%76s"%("** WELCOME TO THE DICE GAME **"))
sleep=time.sleep(1)
print("%76s" %("*" *32))
sleep=time.sleep(1)
print("Try out your luck!!")
sleep=time.sleep(1)
print("Select a number between 1-6 and lets see in how many trial you will guess the correct number")
sleep=time.sleep(1)
while(key=='Y'):
num=int(input("Enter a number of your choice (1-6): "))
print("\nROLLING",end="")
for n in range(1,7):
x=time.sleep(0.5)
print("." ,end="")
y=random.randint(1,6)
print('\n\nNumber generated by dice is %d'%(y))
if(y==num):
print("%73s \n %72s \n %72s" % ("*" * 23, "** CONGRATULATIONS **", "*" * 23))
print("%77s \n %76s \n %76s" % ("*" * 32, "** YOU ARE A GOOD GUESSER **", "*" * 32))
else:
print("Sorry !")
print("Better Luck Next Time..")
key=str(input("Wanna try your Luck Again (Y/N) : "))
else:
print("Thank You For Playing") | [
"noreply@github.com"
] | HemantDeshmukh96.noreply@github.com |
bdbb08709eafee58b0e2298380f07b3701a79bd1 | 605356250c655a7f98d5f1158e0ffc94175de4f7 | /devel/lib/python2.7/dist-packages/pal_vision_msgs/msg/_FaceRecognitionFeedback.py | 463e9865ee7b4e52e18bc66246629867f1f725f5 | [] | no_license | MatthewCallery/msc-tiago-project | 4d3dcf07b7bc6915d2f203bbff46f6c11720ff9f | 8c9e987c45d6152192ba36bb27781e961e7900c3 | refs/heads/master | 2020-11-30T04:17:53.649839 | 2017-07-11T14:38:47 | 2017-07-11T14:38:47 | 96,903,254 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 136 | py | /home/mc16766/tiago_public_ws/devel/.private/pal_vision_msgs/lib/python2.7/dist-packages/pal_vision_msgs/msg/_FaceRecognitionFeedback.py | [
"mc16766@it051534"
] | mc16766@it051534 |
ecfab7aad233063a8f80f703890b5e2ac2aaf80f | cff3ba19e4627afa1aa5178c148d9d169163c225 | /learning_logs/forms.py | 102b1adfb08ff65925c328c05764f25b2284d32b | [] | no_license | HarryWang-3/learning_log | f6087fd4ea72122b75eab8e1e1fb4ebfdaeff699 | 35edb263cc07d9da1993499b68191c929a2045ba | refs/heads/master | 2023-06-08T14:42:55.083277 | 2021-06-13T06:12:55 | 2021-06-13T06:12:55 | 376,456,930 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 381 | py | from django import forms
from .models import Topic,Entry
class TopicForm(forms.ModelForm):
class Meta:
model = Topic
fields = ['text']
labels = {'text': ''}
class EntryForm(forms.ModelForm):
class Meta:
model = Entry
fields = ['text']
labels = {'text': ' '}
widgets = {'text': forms.Textarea(attrs={'cols': 80})}
| [
"3150665355@qq.com"
] | 3150665355@qq.com |
707305a90e1834f2ba69595ca2d350f8ce033c35 | 9ae45d9917983d255a2fcfa09bff3d5395734dfb | /OpenCV_contour_retrieval_mode.py | 0a13a1464054b3793646ca8c9de846b25d771cac | [] | no_license | ganzerkim/OpenCV_basic_examples | 88aeeb5f5b8bf0fae4594bea5fd8f640a02fe3b5 | 18d9cfcf6673478c9a82dce7161c32a3fd51fa68 | refs/heads/master | 2021-10-22T03:00:04.839651 | 2019-03-07T15:38:05 | 2019-03-07T15:38:05 | 172,716,096 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 643 | py | # -*- coding: utf-8 -*-
"""
Created on Wed Mar 6 23:47:30 2019
@author: ganze
"""
import cv2 as cv
import numpy as np
img_color = cv.imread('y.png')
img_gray = cv.cvtColor(img_color, cv.COLOR_BGR2GRAY)
ret, img_binary = cv.threshold(img_gray, 127, 255, 0)
#contour Retrieval Mode는 총 4가지
#RETR_TREE, RETR_LIST, RETR_EXTERNAL, RETR_CCOMP
#[Next, Previous, First_Child, Parent]
contours, hierarchy = cv.findContours(img_binary, cv.RETR_LIST, cv.CHAIN_APPROX_SIMPLE)
for cnt in contours:
cv.drawContours(img_color, [cnt], 0, (255, 0, 0), 3) # blue
cv.imshow("result", img_color)
cv.waitKey(0)
| [
"noreply@github.com"
] | ganzerkim.noreply@github.com |
df5d39c58929100b51c6e381b7857341c5650f11 | 15082a212458e7cb1ea88fc40e0d954d44d22f31 | /src/posts/migrations/0003_auto_20180624_0135.py | 104e6dff6560f76652722b0bb6df1587d28382de | [] | no_license | stopamusa/trydjango | 85734312a3444b506aeaa35dbf1dce38cc44b956 | 525c4ec1cdce8ba018529e8445b5719a12fc4c5e | refs/heads/master | 2020-03-21T06:45:01.058344 | 2018-06-23T18:58:50 | 2018-06-23T18:58:50 | 138,239,752 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 736 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.13 on 2018-06-23 17:35
from __future__ import unicode_literals
import datetime
from django.db import migrations, models
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('posts', '0002_post_user'),
]
operations = [
migrations.AddField(
model_name='post',
name='draft',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='post',
name='publish',
field=models.DateField(default=datetime.datetime(2018, 6, 23, 17, 35, 59, 278000, tzinfo=utc)),
preserve_default=False,
),
]
| [
"31841917+stopamusa@users.noreply.github.com"
] | 31841917+stopamusa@users.noreply.github.com |
65e0291639ebec067e0dcd604abd5aaedb3f2f2f | 38900ad30dbf214a2dcb3f6f2fb1187200cea7f1 | /hero_rpgpreinheritance.py | 2d1794d133dddedf4cfacfde7977ae6850f216cb | [
"MIT"
] | permissive | ahmermalik/hero-rpg | c5ee86f7b753204bd1ff2199c0211b0903688e67 | 09185a4187fda7d2ad766fd366276db7d334c26c | refs/heads/master | 2021-07-05T22:37:07.648804 | 2017-09-25T00:08:18 | 2017-09-25T00:08:18 | 104,235,237 | 0 | 0 | null | 2017-09-20T15:42:41 | 2017-09-20T15:42:41 | null | UTF-8 | Python | false | false | 2,046 | py | #!/usr/bin/env python
# In this simple RPG game, the hero fights the goblin. He has the options to:
# 1. fight goblin
# 2. do nothing - in which case the goblin will attack him anyway
# 3. flee
class
class Hero:
def __init__(self, health, power):
self.health = health
self.power = power
def attack(self, enemy):
enemy.health -= self.power
print("You do {} SUPER-DUPER damage to the goblin.".format(self.power))
def alive(self):
if self.health > 0:
return True
def print_status(self):
print("You have {} health and {} power.".format(self.health, self.power))
class Goblin:
def __init__(self, health, power):
self.health = health
self.power = power
def attack(self, enemy):
enemy.health -= self.power
print("The goblin does {} damage to you.".format(self.power))
def alive(self):
if self.health > 0:
return True
def print_status(self):
print("The goblin has {} health and {} power.\n".format(self.health, self.power))
def main():
hero = Hero(10, 5)
goblin = Goblin(6, 2)
while goblin.alive() and hero.alive():
hero.print_status()
goblin.print_status()
print("What do you want to do?\n")
print("1. fight goblin")
print("2. do nothing")
print("3. flee")
print("> ", end=' ')
raw_input = input()
if raw_input == "1":
# Hero attacks goblin
hero.attack(goblin)
print("You do {} damage to the goblin.".format(hero.power))
if goblin.health <= 0:
print("The goblin is dead.")
elif raw_input == "2":
pass
elif raw_input == "3":
print("Goodbye.")
break
else:
print("Invalid input {}".format(raw_input))
if goblin.health > 0:
# Goblin attacks hero
goblin.attack(hero)
if hero.health <= 0:
print("You are dead.")
main() | [
"ahmer9800@gmail.com"
] | ahmer9800@gmail.com |
b2ef089382ce1cb9f95bf36c276ac46473b3e651 | 1abd183f85f4d6e3654af22c1a306c6579502214 | /student/forms.py | 749f213630bf334c2f5d66ecee3d8aabe2cd9399 | [] | no_license | datrix/infs3605 | 81afc443534e87323bb8934d73881ff4dc0e7c9a | 5986b9dceeb13138705f9da9aad0b24271036c17 | refs/heads/master | 2022-07-26T01:43:57.509621 | 2016-10-26T03:19:48 | 2016-10-26T03:19:48 | 68,784,231 | 0 | 0 | null | 2022-07-07T22:53:01 | 2016-09-21T05:46:42 | Python | UTF-8 | Python | false | false | 3,596 | py | from django import forms
from django.views.generic import View
from .models import student
from .models import degree
from .models import enrol, coopPlacement
from django.contrib.admin import widgets
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Layout, Fieldset, ButtonHolder, Submit, Div, Reset, Button, Row
from crispy_forms.bootstrap import InlineField, FormActions, InlineRadios
from django.forms.formsets import BaseFormSet
from django.forms.models import inlineformset_factory
from django.contrib.auth.models import User
#cant fken make this look good, following link might work - rach
#https://kuanyui.github.io/2015/04/13/django-crispy-inline-form-layout-with-bootstrap/
import django_filters
class StudentForm(forms.ModelForm):
ugc = forms.ModelChoiceField(queryset=User.objects.all())
def __init__(self, *args, **kwargs):
super(StudentForm, self).__init__(*args, **kwargs)
self.helper = FormHelper(form=self)
self.helper.layout = Layout(
Div('zID', 'f_name', 'l_name'),
'email',
'startYear',
'degreeCode',
ButtonHolder(
Submit('submit', 'Submit', css_class="btn btn-success"),
)
)
class Meta: #information about the class
model = student
fields = ['zID', 'f_name', 'l_name',
'email',
'startYear',
'degreeCode']
exclude = ['ugc', 'url']
class EnrolForm (forms.ModelForm):
class Meta:
model = enrol
fields = ['zID', 'course', 'sem_taken', 'year', 'grade']
def __init__(self, *args, **kwargs):
super(EnrolForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.layout = Layout(
Div('zID'),
Div(
Div('course', css_class="col-sm-5"),
Div('sem_taken', css_class="col-sm-3"),
Div('year', css_class="col-sm-2"),
Div('grade', css_class="col-sm-2"),
css_class = 'row'
),
ButtonHolder(
Submit('submit', 'Save', css_class='button white'),
Submit('submit', 'Save and Add Another', css_class='btn-default', onclick="window.location.reload()")
)
)
class UpdateEnrolForm (forms.ModelForm):
class Meta:
model = enrol
fields = [ 'course', 'sem_taken', 'year', 'grade']
#zID = forms.CharField(disabled=True)
def __init__(self, *args, **kwargs):
super(UpdateEnrolForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.layout = Layout(
Div('zID'),
Div(
Div('course', css_class="col-sm-5"),
Div('sem_taken', css_class="col-sm-3"),
Div('year', css_class="col-sm-2"),
Div('grade', css_class="col-sm-2"),
css_class = 'row'
),
ButtonHolder(
Submit('submit', 'Update', css_class='button white'),
Button('cancel', 'Cancel', css_class='btn-default', onclick="window.location.back()")
)
)
class coopPlacementForm (forms.ModelForm):
def __init__(self, *args, **kwargs):
super(coopPlacementForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.layout = Layout(
Div('zID'),
Div(
Div('firstPref', css_class='col-md-4'),
Div('secondPref', css_class='col-md-4'),
Div('thirdPref', css_class='col-md-4'),
css_class='row',
),
ButtonHolder(
Submit('submit', 'Submit', css_class='button white')
)
)
class Meta:
model = coopPlacement
fields = ['zID', 'firstPref', 'secondPref', 'thirdPref']
| [
"sworden9@gmail.com"
] | sworden9@gmail.com |
6595fb71c70fc74783a8cd72a479c17a5a5f438a | c48c1f0dcffe38966b599250f02c202bc961cb34 | /aws/api/S3.py | 0b2b48c33f1e8eab90c69fb27bfabe0976791048 | [
"Apache-2.0"
] | permissive | mikhailadvani/aws-security-test | 503a9412058b88f2deefba3805a72c0b1bc173aa | f751661e4b9a7a2c09c7a0464700e078820cf384 | refs/heads/master | 2021-06-21T22:15:31.318681 | 2017-08-21T18:31:41 | 2017-08-21T18:31:41 | 72,627,142 | 9 | 13 | Apache-2.0 | 2018-08-27T06:35:41 | 2016-11-02T10:03:04 | Python | UTF-8 | Python | false | false | 407 | py | import boto3
import ast
class S3:
def __init__(self):
self.s3 = boto3.client('s3')
def getBucketAcl(self, bucket):
return self.s3.get_bucket_acl(Bucket=bucket)
def getBucketPolicy(self, bucket):
return ast.literal_eval(self.s3.get_bucket_policy(Bucket=bucket)['Policy'])
def getBucketLogging(self, bucket):
return self.s3.get_bucket_logging(Bucket=bucket)
| [
"mikhail.advani@gmail.com"
] | mikhail.advani@gmail.com |
dab5d3cfa57ce5c42b986123e0df58c8cfea27fe | 9431070f08eb587e00225b98cf27cf2f1494e519 | /The-Python-Workbook/2-Decision-Making/exercise37.py | 5d3f327186b021de31438f086ae8d1c3a8090701 | [] | no_license | emilianot04/Exercise_Python | 94908fd2612da077717de8907a4b9a39b9de9480 | abc29498f4c7efe1c4e42ad24e3850ad2f330615 | refs/heads/main | 2023-06-24T12:37:02.167480 | 2021-07-21T16:29:07 | 2021-07-21T16:29:07 | 377,550,892 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 772 | py | """
In this exercise you will create a program that reads a letter of the alphabet from the user. If the user enters a, e, i, o or u then your program should display a message indicating that the entered letter is a vowel. If the user enters y then your program should display a message indicating that sometimes y is a vowel, and sometimes y is a consonant. Otherwise your program should display a message indicating that the
letter is a consonant.
"""
letter = str(input('Insert a letter of alphabet:'))
if(letter == 'a' or letter =='e' or letter =='i' or letter =='o' or letter =='u' ):
print (letter + ' is a vowel')
elif(letter == 'y'):
print (letter + ' Sometimes is a vowel, and sometimes is a consonant.')
else:
print( letter + ' is a consonant')
| [
"Emiliano@iMac-Emiliano.fritz.box"
] | Emiliano@iMac-Emiliano.fritz.box |
fdfeb9760cd25f62a5213d0e45ce9bdc30e836f3 | f1cb311afbca051b66205c1e4bbe570ed37cfe02 | /src/memberships/admin.py | f236cedfff84ef2e074d9e6ce1e134b48a52cd18 | [] | no_license | defcon1983/Video-Membership-Django | 9eb430450f4980594f885bf3179be66b94614ab4 | 49444d5d0626ac102e596f5e13f9a996dc137706 | refs/heads/master | 2023-03-26T22:05:12.393506 | 2020-06-03T22:31:48 | 2020-06-03T22:31:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 225 | py | from django.contrib import admin
from .models import Membership, UserMembership, Subscription
# Register your models here.
admin.site.register(Membership)
admin.site.register(UserMembership)
admin.site.register(Subscription) | [
"shahzaibakash@gmail.com"
] | shahzaibakash@gmail.com |
00525d1b1729de186c4e477e804e3f2aca3d64c8 | 10d9de608a65d9b589248eb0c34fb04aa4f9af3e | /docs/conf.py | e7d970ebb413f9ec2d52d3b219eb47f3fb5e9150 | [
"BSD-2-Clause"
] | permissive | metatooling/sphinxcontrib-editable | 69c27ef35081ab56b3dcc48d43f48b6454e07184 | 5d0cb42d40505d21516b5c6f3b9daa4bf4434b2c | refs/heads/master | 2020-08-07T01:59:29.805596 | 2019-10-08T23:32:34 | 2019-10-08T23:32:34 | 213,251,110 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,490 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.napoleon",
"sphinx.ext.autosummary",
"sphinx.ext.coverage",
"sphinx.ext.doctest",
"sphinx.ext.extlinks",
"sphinx.ext.ifconfig",
"sphinx.ext.napoleon",
"sphinx.ext.todo",
"sphinx.ext.viewcode",
]
if os.getenv("SPELLCHECK"):
extensions += ("sphinxcontrib.spelling",)
spelling_show_suggestions = True
spelling_lang = "en_US"
source_suffix = ".rst"
master_doc = "index"
project = "sphinxcontrib-editable"
year = "2019"
author = "sphinxcontrib-editable contributors"
copyright = "{0}, {1}".format(year, author)
version = release = "0.1.0"
pygments_style = "trac"
templates_path = ["."]
extlinks = {
"issue": ("https://github.com/metatooling/sphinxcontrib-editable/issues/%s", "#"),
"pr": ("https://github.com/metatooling/sphinxcontrib-editable/pull/%s", "PR #"),
}
# on_rtd is whether we are on readthedocs.org
on_rtd = os.environ.get("READTHEDOCS", None) == "True"
if not on_rtd: # only set the theme if we"re building docs locally
html_theme = "sphinx_rtd_theme"
html_use_smartypants = True
html_last_updated_fmt = "%b %d, %Y"
html_split_index = False
html_sidebars = {"**": ["searchbox.html", "globaltoc.html", "sourcelink.html"]}
html_short_title = "%s-%s" % (project, version)
napoleon_use_ivar = True
napoleon_use_rtype = False
napoleon_use_param = True
autoapi_dirs = ["../sphinxcontrib"]
| [
"interdoc@cordaz.com"
] | interdoc@cordaz.com |
04b4a536ba96e89e25da1c88e6fb100ff5ea651c | ab8c9c355264d623c1c53b611c88db0b3dd5b9b5 | /begginning_cerv/dictionaries.py | bd5982bab43c5e395fd915bb64adcbcf06256a2f | [] | no_license | cervthecoder/scratch_code | a815f867dc8d0f65cf49af08490c5e5829463534 | de521cd5f4a03aed3eda8f1ee00bb43aca3ade8f | refs/heads/master | 2022-11-18T22:46:18.094857 | 2020-07-15T15:40:22 | 2020-07-15T15:40:22 | 232,979,276 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 380 | py |
monthConversions = {
"Jan": "January",
"Feb": "February",
"Mar": "March",
"Apr": "April",
"May": "May",
"Jun": "June",
"Jul": "July",
"Aug": "August",
"Sep": "September",
"Oct": "October",
"Nov": "November",
"Dec": "December",
}
print(monthConversions["Nov"])
print(monthConversions.get("Luv", "Invalid key"))
string = ""
| [
"matej.cervenka03@gmail.com"
] | matej.cervenka03@gmail.com |
e1d3416783dbee5d2d2c6dda0278ee13e91866bf | 1e6afcbff258481c5b0f700eb3f4c26a8df05ee1 | /conf.py | 29357b4aac34c415f7299d4f4b6f4afa4c85ff00 | [] | no_license | lacus577/resys | b92f42b0fbb457809d1acca0d09be2c91b8e3989 | 39758ebd61669f7a519bf36e67caccdad9f7cb6e | refs/heads/master | 2022-12-08T22:19:18.154495 | 2020-09-20T05:15:37 | 2020-09-20T05:15:37 | 295,891,580 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,821 | py | import os
# valid集合比例
valid_rate = 0.2
# 样本采样比例
sampling_rate = 0.05
# 每路召回策略召回数量
itemcf_matching_num = 1000
# -------------------------- 特征 --------------------------
# 正负样本比例
neg_pos_rate = 20
# gbdt+lr baseline特征:
baseline_features_columns = [
'UserCate1', 'UserCate2', 'UserGender', 'UserAge', 'UserComsumptionLevel1',
'UserComsumptionLevel2', 'UserWorking', 'UserGeo',
'ItemCateID', 'ItemShopID', 'ItemBrandID',
'ComposeCateAndActions', 'ComposeShopAndActions', 'ComposeBrandAndActions', 'SceneID'
]
# -------------------------- 数据存放路径 --------------------------
# 根目录
train_root_path = r'../../data/tb/sample_train'
test_root_path = r'../../data/tb/sample_test'
# 原始数据存放路径
raw_train_common_features_file_name = 'common_features_train.csv'
raw_train_sample_skeleton_file_name = 'sample_skeleton_train.csv'
raw_test_common_features_file_name = r'common_features_test.csv'
raw_test_sample_skeleton_file_name = r'sample_skeleton_test.csv'
# 采样后中间结果存放路径
raw_train_sampled_common_features_file_name = r'sampled_common_features_train_{}.csv'
raw_train_sampled_skeleton_file_name = r'sampled_skeleton_train_{}.csv'
raw_test_sampled_common_features_file_name = r'sampled_common_features_test_{}.csv'
raw_test_sampled_skeleton_file_name = r'sampled_skeleton_test_{}.csv'
# 采样比例不同,生成对应前缀数据集,前缀是采样比例(比如05_item.csv)
item_file_name = r'{}item.csv'
user_file_name = r'{}user.csv'
action_file_name = r'{}action.csv'
# 中间结果缓存路径
root_caching_path = r'./caching/'
# itemcf 相似度矩阵
itemcf_sim_mat = 'itemcf_sim_mat'
# 模型缓存
gbdt_model_save_file_name = 'gbdt_model'
lr_model_save_file_name = 'lr_model'
| [
"jiatao577@163.com"
] | jiatao577@163.com |
5a3124f99109b12cba535fb69171824dc8d287be | 90c677965e6429bd9d5453943df7275be1cb5b8f | /Week_2.0/Assignment_2.py | 2d787e9d6f22439e07eb2a8718f6a6f2b8e57ec0 | [] | no_license | TASNUBA-HOSSAIN/Python-Assignment-2.0 | 17e2e705f22136f48cc54378aa9873d84b659bcd | e64ae3087591c33232b0dcf140694b18781a895e | refs/heads/master | 2020-04-08T09:53:06.541480 | 2018-11-26T23:04:22 | 2018-11-26T23:04:22 | 159,244,048 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,744 | py | # -*- coding: utf-8 -*-
"""
Created on Fri Nov 23 01:32:33 2018
@author: TASNUBA
"""
## Part 1
gradient_func_x = lambda x: 2*x + 12
gradient_func_y = lambda y: 2*y + 4
X_0 = 3
Y_0 = 3
#initially X_old is X_init, that's the point we start from
X_old = X_0
Y_old = Y_0
#Learning rate to do the shifting of slope
learning_rate = 0.1
#Diff between X_old and X_new
threshold = 100
threshold1 = 100
iteration_no = 0
limit = 500
while threshold > 0.000001:
X_new = X_old - (learning_rate * gradient_func_x(X_old))
Y_new = Y_old - (learning_rate * gradient_func_y(Y_old))
threshold = abs(X_new - X_old)
threshold1 = abs(Y_new - Y_old)
X_old = X_new
Y_old = Y_new
iteration_no += 1
if iteration_no == limit:
break
print("Iteration no: "+str(iteration_no)+", Value of X = "+str(X_new) +", Value of Y = "+str(Y_new))
# When iteration no. is set to 1000, the code runs upto 600 iterations only. So I set the limit of iterations to 500.
# The most convenient learning rate for me is 0.1 since this value generates the lowest values of x and y.
## PART 2:
list_1 = []
print ('Enter five integer numbers to generate first list (list_1)')
for i in range(0, 4): # set up loop to run 5 times
number = int(input('Please enter a number: '))
list_1.append(number)
print ('list_1 = '+str(list_1))
list_2 = []
print ('Enter five integer numbers to generate second list (list_2)')
for i in range(0, 4): # set up loop to run 5 times
number_2 = int(input('Please enter a number: '))
list_2.append(number_2)
print ('list_2 = '+str(list_2))
import modules as mods
mods.Average(list_1)
mods.Average(list_2)
mods.median(list_1)
mods.median(list_2)
| [
"tasnubahossain99@gmail.com"
] | tasnubahossain99@gmail.com |
628273600b3b177e554c810b42d93f065917c1c4 | 70fd5649bdde3de3f845589f3dbb8282a7ced7b2 | /mbt.py | d26ebbb79891273b4a252776936c5b34a0b2c169 | [] | no_license | VanRitzOwen/ML-Foundations | 7c9c54d48d152e93e3d962f2b63a5c33be7a1074 | 8f31b05828daabba7ac20eed3ec660fcee6f5d16 | refs/heads/master | 2022-11-29T11:11:22.211838 | 2020-08-05T22:37:30 | 2020-08-05T22:37:30 | 285,418,089 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,549 | py | import numpy as np
import pandas as pd
from sklearn.model_selection import train_test_split
from sklearn import preprocessing
from sklearn.ensemble import RandomForestRegressor
from sklearn.pipeline import make_pipeline
from sklearn.model_selection import GridSearchCV
from sklearn.metrics import mean_squared_error, r2_score
from sklearn.externals import joblib
#data = pd.read_csv('./dataset/winequality-red.csv', sep=';')
data = pd.read_csv('./dataset/winequality-white.csv', sep=';')
print(data.head())
print(data.shape)
print(data.describe())
X = data.drop(['quality'], axis=1)
y = data.quality
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=123, stratify=y)
'''
X_train_scaled = preprocessing.scale(X_train)
print(X_train_scaled.mean(axis=0), X_train_scaled.std(axis=0))
'''
'''
scaler = preprocessing.StandardScaler().fit(X_train)
X_train_scaled = scaler.transform(X_train)
print(X_train_scaled.mean(axis=0), X_train_scaled.std(axis=0))
X_test_scaled = scaler.transform(X_test)
print(X_test_scaled.mean(axis=0), X_test_scaled.std(axis=0))
'''
pipeline = make_pipeline(preprocessing.StandardScaler(), RandomForestRegressor(n_estimators=100))
hyperparameters = {'randomforestregressor__max_features': ['auto', 'sqrt', 'log2'], 'randomforestregressor__max_depth': [None, 5, 3, 1]}
clf = GridSearchCV(pipeline, hyperparameters, cv=10)
clf.fit(X_train, y_train)
y_pred = clf.predict(X_test)
print(r2_score(y_test, y_pred))
print(mean_squared_error(y_test, y_pred))
#joblib.dump(clf, 'model.pkl') | [
"zbwen406@yeah.net"
] | zbwen406@yeah.net |
cd0f02b2b7910833b1a04e12120f795ef04b78d9 | 8c322f98385c11965f7820bc6b99546556b4fb63 | /mms_pair/api/serializers.py | 910d17307104fd814fc1c38351c69bebee10b873 | [
"MIT"
] | permissive | mvr-garcia/mb_challenge | e6b723eaa1d57fa870a0b74dce408a85fc7f6f4f | 5c1d36e35693cf3b81a4b51a821b0d9688b1d0e6 | refs/heads/main | 2023-04-06T10:38:58.865855 | 2021-04-12T22:55:07 | 2021-04-12T22:55:07 | 356,965,603 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 634 | py | from rest_framework import serializers
from mms_pair.models import Coin
class CoinSerializer(serializers.ModelSerializer):
class Meta:
model = Coin
fields = ['timestamp', 'mms_20', 'mms_50', 'mms_200']
class CoinMms20Serializer(serializers.ModelSerializer):
class Meta:
model = Coin
fields = ['timestamp', 'mms_20']
class CoinMms50Serializer(serializers.ModelSerializer):
class Meta:
model = Coin
fields = ['timestamp', 'mms_50']
class CoinMms200Serializer(serializers.ModelSerializer):
class Meta:
model = Coin
fields = ['timestamp', 'mms_200']
| [
"mvrgarcia05@gmail.com"
] | mvrgarcia05@gmail.com |
73c7def96b73ffe9624d1c69ed41988bf8ec96b6 | fe2a3482a3752bfb123182ef0d4e6607b6b169cd | /scripts/pytext/split-extended.py | 7d75259af3f1169033bf3a7e7fbcecb5c762909d | [] | no_license | MANASLU8/VoiceIoT | 4c4fb123081cdcbe06f9e0839c0edea9327b56df | 5d5dd3ed37f8815fe51ea204596676b88447f600 | refs/heads/master | 2022-12-11T13:51:41.440661 | 2019-11-29T15:49:54 | 2019-11-29T15:49:54 | 143,786,995 | 1 | 0 | null | 2022-12-08T06:09:17 | 2018-08-06T21:48:38 | Python | UTF-8 | Python | false | false | 2,346 | py | import json, random, sys
from .. import file_operators as fo, utils
config = utils.load_config(utils.parse_args().config)
data = fo.read_lines(config['paths']['datasets']['pytext']['data-extended'])
test_samples = {}
validate_samples = []
NO_SLOT_MARK = "NoLabel"
# group samples by labels
samples = {}
for sample in data:
label = sample.split('\t')[0]
if label in samples:
samples[label].append(sample)
else:
samples[label] = [sample]
print("Samples per label:")
for label in samples.keys():
print(f"{label:80s}: {len(samples[label])}")
quantity_for_test = len(samples[label]) * config['test-percentage'] / float(100)
if quantity_for_test < 1:
continue
test_samples[label] = []
counter = 0
while counter < quantity_for_test:
choice = random.choice(samples[label])
text = choice.split('\t')[-3]
slots = [slot.split(':') for slot in choice.split('\t')[1].split(',')]
all_slots = {tuple(map(int, slot[0:2])): slot[2] for slot in slots}
space_indices = [i for i in range(len(text)) if text.startswith(' ', i)]
words_indices = []
for i in range(len(space_indices)):
if len(words_indices) == 0:
words_indices.append((0, space_indices[i] - 1))
else:
words_indices.append((space_indices[i - 1] + 1, space_indices[i] - 1))
words_indices.append((space_indices[-1] + 1, len(text) - 1))
enriched_slots = [all_slots.get(pair, NO_SLOT_MARK) for pair in words_indices]
print(f"Text: {text}")
print(f"Slots: {slots}")
print(f"Space indices: {space_indices}")
print(f"Word indices: {words_indices}")
print(f"All slots: {all_slots}")
print(f"Enriched slots: {enriched_slots}")
test_samples[label].append({'text': choice.split('\t')[-3], 'slots': enriched_slots })#[slot.split(':')[-1] for slot in choice.split('\t')[1].split(',')]})
validate_samples.append(choice)
samples[label].remove(choice)
counter += 1
print(f"{len([sample for label in samples.keys() for sample in samples[label]])} Train samples; {len(test_samples)} Test samples")
fo.write_lines(config['paths']['datasets']['pytext']['train-extended'], [sample for label in samples.keys() for sample in samples[label]])
fo.write_lines(config['paths']['datasets']['pytext']['validate-extended'], validate_samples)
fo.write_json(config['paths']['datasets']['pytext']['test-extended'], test_samples)
| [
"zeionara@niuitmo.ru"
] | zeionara@niuitmo.ru |
9547ea1f783a6dde9eba4d725d96cbc515750330 | 8680c0fa3cdfeb177d4e32f623e1b24ef9faccc1 | /Improved Lottery.py | ba67943c1c057b951f26cb951f3f9cf7e4bf0a2d | [] | no_license | santiagoprietoar/PythonBasicPrograms | e26ee6365a65c4629c3e90ae05c724590b184472 | 0cdb0435f68e308b05f5dc76f67d2ee25342e58a | refs/heads/master | 2022-12-18T15:28:55.605108 | 2020-09-26T20:24:35 | 2020-09-26T20:24:35 | 296,893,161 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 955 | py | import random
# This line creates a set with 6 random numbers
lottery_numbers = set(random.sample(range(22), 6))
# Here are your players; find out who has the most numbers matching lottery_numbers!
players = [
{'name': 'Rolf', 'numbers': {1, 3, 5, 7, 9, 11}},
{'name': 'Charlie', 'numbers': {2, 7, 9, 22, 10, 5}},
{'name': 'Anna', 'numbers': {13, 14, 15, 16, 17, 18}},
{'name': 'Jen', 'numbers': {19, 20, 12, 7, 3, 5}}
]
top_player = players[0]
for player in players:
matched_numbers = len(player['numbers'].intersection(lottery_numbers))
if matched_numbers > len(top_player['numbers'].intersection(lottery_numbers)):
top_player = player
# The winnings are calculated with the formula:
# 100 ** len(numbers_matched)
winnings = 100 ** len(top_player['numbers'].intersection(lottery_numbers))
# Then, print out a line such as "Jen won 1000.".
print(f"{top_player['name']} won {winnings}")
| [
"noreply@github.com"
] | santiagoprietoar.noreply@github.com |
cbc6351fd46ad8ea36dc9847027121c21c9f0537 | c81d7dfef424b088bf2509a1baf406a80384ea5a | /venv/Lib/site-packages/pandas/tests/indexes/period/test_period_range.py | 49d34248207919814f02e980ff00b963e21dcdd9 | [] | no_license | Goutham2591/OMK_PART2 | 111210d78fc4845481ed55c852b8f2f938918f4a | cb54fb21ebf472bffc6ee4f634bf1e68303e113d | refs/heads/master | 2022-12-10T01:43:08.213010 | 2018-04-05T02:09:41 | 2018-04-05T02:09:41 | 124,828,094 | 0 | 1 | null | 2022-12-07T23:43:03 | 2018-03-12T03:20:14 | Python | UTF-8 | Python | false | false | 3,740 | py | import pytest
import pandas.util.testing as tm
from pandas import date_range, NaT, period_range, Period, PeriodIndex
class TestPeriodRange(object):
@pytest.mark.parametrize('freq', ['D', 'W', 'M', 'Q', 'A'])
def test_construction_from_string(self, freq):
# non-empty
expected = date_range(start='2017-01-01', periods=5,
freq=freq, name='foo').to_period()
start, end = str(expected[0]), str(expected[-1])
result = period_range(start=start, end=end, freq=freq, name='foo')
tm.assert_index_equal(result, expected)
result = period_range(start=start, periods=5, freq=freq, name='foo')
tm.assert_index_equal(result, expected)
result = period_range(end=end, periods=5, freq=freq, name='foo')
tm.assert_index_equal(result, expected)
# empty
expected = PeriodIndex([], freq=freq, name='foo')
result = period_range(start=start, periods=0, freq=freq, name='foo')
tm.assert_index_equal(result, expected)
result = period_range(end=end, periods=0, freq=freq, name='foo')
tm.assert_index_equal(result, expected)
result = period_range(start=end, end=start, freq=freq, name='foo')
tm.assert_index_equal(result, expected)
def test_construction_from_period(self):
# upsampling
start, end = Period('2017Q1', freq='Q'), Period('2018Q1', freq='Q')
expected = date_range(start='2017-03-31', end='2018-03-31', freq='M',
name='foo').to_period()
result = period_range(start=start, end=end, freq='M', name='foo')
tm.assert_index_equal(result, expected)
# downsampling
start, end = Period('2017-1', freq='M'), Period('2019-12', freq='M')
expected = date_range(start='2017-01-31', end='2019-12-31', freq='Q',
name='foo').to_period()
result = period_range(start=start, end=end, freq='Q', name='foo')
tm.assert_index_equal(result, expected)
# empty
expected = PeriodIndex([], freq='W', name='foo')
result = period_range(start=start, periods=0, freq='W', name='foo')
tm.assert_index_equal(result, expected)
result = period_range(end=end, periods=0, freq='W', name='foo')
tm.assert_index_equal(result, expected)
result = period_range(start=end, end=start, freq='W', name='foo')
tm.assert_index_equal(result, expected)
def test_errors(self):
# not enough params
msg = ('Of the three parameters: start, end, and periods, '
'exactly two must be specified')
with tm.assert_raises_regex(ValueError, msg):
period_range(start='2017Q1')
with tm.assert_raises_regex(ValueError, msg):
period_range(end='2017Q1')
with tm.assert_raises_regex(ValueError, msg):
period_range(periods=5)
with tm.assert_raises_regex(ValueError, msg):
period_range()
# too many params
with tm.assert_raises_regex(ValueError, msg):
period_range(start='2017Q1', end='2018Q1', periods=8, freq='Q')
# start/end NaT
msg = 'start and end must not be NaT'
with tm.assert_raises_regex(ValueError, msg):
period_range(start=NaT, end='2018Q1')
with tm.assert_raises_regex(ValueError, msg):
period_range(start='2017Q1', end=NaT)
# invalid periods param
msg = 'periods must be a number, got foo'
with tm.assert_raises_regex(TypeError, msg):
period_range(start='2017Q1', periods='foo')
| [
"amatar@unomaha.edu"
] | amatar@unomaha.edu |
820d4e8465a9ca575fe26c9092050f29834c8f99 | 5891051796778cfb44a255248ce38789bfef9e70 | /DjangoLearn/apps/test_django/models.py | 8820b1517c116ec9de6dbc52b1199d2373a5e12e | [] | no_license | Faithlmy/Python_base | cc546a5d86b123e102a69df1227cde9b6e567493 | 5a43557e6375dc9dbe5f6701d7c10e549873a5ab | refs/heads/master | 2021-01-01T17:07:04.097978 | 2018-03-31T16:44:01 | 2018-03-31T16:44:01 | 98,000,621 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,296 | py | from django.db import models
# Create your models here.
class TbCustomInfo(models.Model):
c_name = models.CharField(max_length=255, blank=True, null=True)
c_name_short = models.CharField(max_length=255, blank=True, null=True)
c_num = models.CharField(max_length=255, blank=True, null=True)
c_type = models.CharField(max_length=255, blank=True, null=True)
c_site = models.CharField(max_length=255, blank=True, null=True)
create_time = models.DateTimeField(blank=True, null=True, auto_now_add=True)
modify_time = models.DateTimeField(blank=True, null=True, auto_now_add=True)
delete_flag = models.IntegerField(blank=True, null=True)
class Meta:
managed = False
db_table = 'tb_custom_info'
class TbCustomerPaper(models.Model):
# mm = models.Manager
ecn_no = models.CharField(db_column='ECN_NO', max_length=255, blank=True, null=True) # Field name made lowercase.
version = models.CharField(max_length=255, blank=True, null=True)
p_name = models.CharField(max_length=255, blank=True, null=True)
st_site = models.CharField(db_column='ST_site', max_length=255, blank=True, null=True) # Field name made lowercase.
drawing_type = models.CharField(max_length=255, blank=True, null=True)
definer_name = models.CharField(max_length=255, blank=True, null=True)
c_name = models.CharField(max_length=255, blank=True, null=True)
c_type = models.CharField(max_length=255, blank=True, null=True)
c_site = models.CharField(max_length=255, blank=True, null=True)
uploader = models.CharField(max_length=255, blank=True, null=True)
custters_spec = models.CharField(max_length=255, blank=True, null=True)
case_name = models.CharField(max_length=255, blank=True, null=True)
priority = models.IntegerField(blank=True, null=True)
alter_cause = models.TextField(blank=True, null=True)
alter_front = models.TextField(blank=True, null=True)
alter_later = models.TextField(blank=True, null=True)
desc_file = models.TextField(blank=True, null=True)
create_time = models.DateTimeField(blank=True, null=True)
modify_time = models.DateTimeField(blank=True, null=True)
upload_time = models.DateTimeField(blank=True, null=True)
c_confirm = models.IntegerField(blank=True, null=True)
customer_info = models.ForeignKey('TbCustomInfo', on_delete=models.DO_NOTHING, db_column='customer_info', blank=True, null=True)
copy_id = models.IntegerField(blank=True, null=True)
drawingtype = models.IntegerField(blank=True, null=True)
modify_draft = models.IntegerField(blank=True, null=True)
valid = models.IntegerField(blank=True, null=True)
create_people = models.CharField(max_length=255, blank=True, null=True)
current = models.IntegerField(blank=True, null=True)
enabled = models.IntegerField(blank=True, null=True)
class Meta:
managed = False
db_table = 'tb_customer_paper'
class TbDrawingFile(models.Model):
file_name = models.CharField(max_length=255, blank=True, null=True)
file_size = models.IntegerField(blank=True, null=True)
file_path = models.CharField(max_length=255, blank=True, null=True)
uploader = models.CharField(max_length=255, blank=True, null=True)
create_time = models.DateTimeField(blank=True, null=True)
customer = models.ForeignKey('TbCustomerPaper', on_delete=models.DO_NOTHING, blank=True, null=True)
file_type = models.IntegerField(blank=True, null=True)
# project = models.ForeignKey('TbProjectPaper', on_delete=models.DO_NOTHING, blank=True, null=True)
# drawing_type = models.ForeignKey('TbDrawing', on_delete=models.DO_NOTHING, blank=True, null=True)
cus = models.IntegerField(blank=True, null=True)
drawing_type_name = models.CharField(max_length=255, blank=True, null=True)
dev_type = models.CharField(max_length=255, blank=True, null=True)
# id_dev_type = models.ForeignKey('TbTypeDevice', on_delete=models.DO_NOTHING, db_column='id_dev_type', blank=True, null=True)
b_all = models.IntegerField(blank=True, null=True)
sign_filepath = models.CharField(max_length=255, blank=True, null=True)
seal_filepath = models.CharField(max_length=255, blank=True, null=True)
class Meta:
managed = False
db_table = 'tb_drawing_file'
| [
"lmengyy@126.com"
] | lmengyy@126.com |
a6e8c059df5232f59ffca3ef37f34122a8a96dc7 | ceef1c00e694edf4bd407f6e7e8849abd9226791 | /apps/cm.py | 694830e70599823bc95b56e7c70e00cb4e4c025b | [] | no_license | wyh33200/Dash_Go | 6efaae39c8b15cc595b5e9a7715bb92459a33e38 | ee3ac430558e2e9f6fff0be7bce6486e5455e7b5 | refs/heads/master | 2023-04-21T08:10:26.906933 | 2021-05-10T01:17:31 | 2021-05-10T01:17:31 | 344,755,704 | 0 | 0 | null | 2021-03-05T10:25:36 | 2021-03-05T09:14:09 | Python | UTF-8 | Python | false | false | 1,386 | py | import datetime
import logging
import dash_core_components as dcc
import dash_html_components as html
from sqlalchemy import create_engine
#
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO,
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", )
calendar = html.Div([
dcc.DatePickerRange(
id='my-date-picker-range',
min_date_allowed=datetime.date(2020, 9, 18),
max_date_allowed=datetime.date(2021, 12, 31),
display_format='Y-MM-DD',
month_format='Y-MM',
start_date=datetime.date.today() - datetime.timedelta(days=7),
end_date=datetime.date.today() - datetime.timedelta(days=1)
),
html.Div(id='output-container-date-picker-range')
])
cnn_xxz_log = create_engine("mysql+mysqlconnector://xxzlog:xxz@log@192.168.2.6:3306/xxz_log")
cnn_xxz_base = create_engine("mysql+mysqlconnector://xxzdata:chitone@xxzdata@192.168.2.225:3306/xxz_base")
cnn_51job_base = create_engine("mysql+mysqlconnector://M5156BSQL:&,R}sH_F{g5!$w](+V8')-8gJ@192.168.0.51:3306/5156base")
cnn_51job_log = create_engine("mysql+mysqlconnector://db180:db180@job5156@192.168.0.98:3306/LogDB")
cnn_51job_action = create_engine("mysql+mysqlconnector://M5156ASQL:aPvM!)*^%~cdwc@7*^1@192.168.0.52:3306/5156action")
cnn_root = create_engine("mysql+mysqlconnector://root:root@localhost:3306/wyh")
| [
"18770038017@163.com"
] | 18770038017@163.com |
ade0436e9ac1a8be7232fb44c22733e9d08427f7 | 1a3bf9848f364ea842315bda5342600f26356577 | /Zajecia12 -Python/Napis/gameobjects/vector3.py | be1d015796b8dfbb9e18bca655b1a0a7c4a25e8e | [] | no_license | Forestf90/Grafika | 54fbc71129bb78b43be33b32b5c687b25779daed | 95d82c75eafe93bc5fbc746a2530e51556c48c1d | refs/heads/master | 2021-04-27T00:27:05.928873 | 2018-06-10T21:39:26 | 2018-06-10T21:39:26 | 123,817,040 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 13,678 | py | from math import *
from gameobjects.util import format_number
class Vector3(object):
__slots__ = ('_v')
def __init__(self, *args):
"""Creates a Vector3 from 3 numeric values or a list-like object
containing at least 3 values. No arguments result in a null vector.
"""
if len(args) == 3:
self._v = map(float, args[:3])
return
if not args:
self._v = [0., 0., 0.]
elif len(args) == 1:
self._v = map(float, args[0][:3])
else:
raise ValueError("Vector3.__init__ takes 0, 1 or 3 parameters")
def _get_0(self):
return self._v[0]
def _get_1(self):
return self._v[1]
def _get_2(self):
return self._v[2]
def _set_0(self, value):
self._v[0] = value
def _set_1(self, value):
self._v[1] = value
def _set_2(self, value):
self._v[2] = value
_getters = (_get_0, _get_1, _get_2)
_setters = (_set_0, _set_1, _set_2)
@classmethod
def from_points(cls, p1, p2):
v = cls.__new__(cls, object)
ax, ay, az = p1
bx, by, bz = p2
v._v = [bx-ax, by-ay, bz-az]
return v
@classmethod
def from_floats(cls, *args):
"""Creates a Vector3 from individual float values.
Warning: There is no checking for efficiency here: x, y, z _must_ be
floats.
"""
v = cls.__new__(cls, object)
v._v = list(args)
return v
@classmethod
def from_iter(cls, iterable):
"""Creates a Vector3 from an iterable containing at least 3 values."""
it = iter(iterable)
v = cls.__new__(cls, object)
v._v = [ float(it.next()), float(it.next()), float(it.next()) ]
return v
def copy():
"""Returns a copy of this vector."""
v = cls.__new__(sel.__class__, object)
v._v = self._v[:]
return v
#return self.from_floats(self._v[0], self._v[1], self._v[2])
__copy__ = copy
def _get_x(self):
return self._v[0]
def _set_x(self, x):
assert isinstance(x, float), "Must be a float"
self._v[0] = x
x = property(_get_x, _set_x, None, "x component.")
def _get_y(self):
return self._v[1]
def _set_y(self, y):
assert isinstance(y, float), "Must be a float"
self._v[1] = y
y = property(_get_y, _set_y, None, "y component.")
def _get_z(self):
return self._v[2]
def _set_z(self, z):
assert isinstance(z, float), "Must be a float"
self._v[2] = z
z = property(_get_z, _set_z, None, "z component.")
def _get_length(self):
x, y, z = self._v
return sqrt(x*x + y*y +z*z)
def _set_length(self, length):
v = self._v
try:
x, y, z = v
l = length / sqrt(x*x + y*y +z*z)
except ZeroDivisionError:
v[0] = 0.
v[1] = 0.
v[2] = 0.
return self
v[0] = x*l
v[1] = y*l
v[2] = z*l
length = property(_get_length, _set_length, None, "Length of the vector")
def unit(self):
"""Returns a unit vector."""
x, y, z = self._v
l = sqrt(x*x + y*y + z*z)
return self.from_floats(x/l, y/l, z/l)
def set(self, x, y, z):
"""Sets the components of this vector.
x -- x component
y -- y component
z -- z component
"""
assert ( isinstance(x, float) and
isinstance(y, float) and
isinstance(z, float) ), "x, y, z must be floats"
v = self._v
v[0] = x
v[1] = y
v[2] = z
return self
def __str__(self):
return "(%s, %s, %s)" % (format_number(self._v[0]), format_number(self._v[1]), format_number(self._v[2]))
def __repr__(self):
return "Vector3(%s, %s, %s)" % (self._v[0], self._v[1], self._v[2])
def __len__(self):
return 3
def __iter__(self):
return iter(self._v)
def __getitem__(self, index):
try:
return self._v[index]
except IndexError:
raise IndexError
def __setitem__(self, index, value):
try:
assert isinstance(value, float), "Must be a float"
self._v[index] = value
except IndexError:
raise IndexError
def __add__(self, rhs):
"""Returns the result of adding a vector (or collection of 3 numbers) from this vector."""
x, y, z = self._v
ox, oy, oz = rhs
return self.from_floats(x+ox, y+oy, z+oz)
def __iadd__(self, rhs):
"""Adds another vector (or a collection of 3 numbers) to this vector."""
x, y, z = self._v
ox, oy, oz = rhs
v = self._v
v[0] = x+ox
v[1] = y+oy
v[2] = z+oz
return self
def __radd__(self, lhs):
x, y, z = self._v
ox, oy, oz = lhs[:3]
return self.from_floats(x+ox, y+oy, z+oz)
def __sub__(self, rhs):
"""Returns the result of subtracting a vector (or collection of 3 numbers) from this vector."""
x, y, z = self._v
ox, oy, oz = rhs[:3]
return self.from_floats(x-ox, y-oy, z-oz)
def _isub__(self, rhs):
"""Subtracts another vector (or a collection of 3 numbers) from this vector."""
x, y, z = self._v
ox, oy, oz = rhs
v = self._v
v[0] = x-ox
v[1] = y-oy
v[2] = z-oz
return self
def __rsub__(self, lhs):
x, y, z = self._v
ox, oy, oz = lhs[:3]
return self.from_floats(x-ox, y-oy, z-oz)
def __mul__(self, rhs):
"""Return the result of multiplying this vector by another vector, or a scalar (single number)."""
x, y, z = self._v
try:
return self.from_floats(x*rhs, y*rhs, z*rhs)
except TypeError:
ox, oy, oz = rhs
return self.from_floats(x*ox, y*oy, z*oz)
def __imul__(self, rhs):
"""Multiply this vector by another vector, or a scalar (single number)."""
v = self._v
try:
x, y, z = v
v[0] = x * rhs
v[1] = y * rhs
v[2] = z * rhs
#self._v = [x*rhs, y*rhs, z*rhs]
except TypeError:
ox, oy, oz = rhs
v[0] = x * ox
v[1] = y * oy
v[2] = z * oz
return self
def __div__(self, rhs):
"""Return the result of dividing this vector by another vector, or a scalar (single number)."""
x, y, z = self._v
try:
return self.from_floats(x/rhs, y/rhs, z/rhs)
except TypeError:
ox, oy, oz = rhs._v
return self.from_floats(x/ox, y/oy, z/oz)
def __idiv__(self, rhs):
"""Divide this vector by another vector, or a scalar (single number)."""
v = self._v
try:
x, y, z = v
v[0] = x/rhs
v[1] = y/rhs
v[2] = z/rhs
except TypeError:
ox, oy, oz = rhs
v[0] = x/ox
v[1] = y/oy
v[2] = z/oz
return self
def __neg__(self):
"""Returns the negation of this vector (a vector pointing in the opposite direction.
eg v1 = Vector(1,2,3)
print -v1
>>> (-1,-2,-3)
"""
x, y, z = self._v
return self.from_floats(-x, -y, -z)
def __pos__(self):
return self
def __nonzero__(self):
x, y, z = self._v
return x and y and z
def __call__(self, keys):
"""Returns a tuple of the values in a vector
keys -- An iterable containing the keys (x, y or z)
eg v = Vector3(1.0, 2.0, 3.0)
v('zyx') -> (3.0, 2.0, 1.0)
"""
ord_x = ord('x')
_v = self._v
return tuple( _v[ord(c)-ord_x] for c in keys )
def as_tuple(self):
"""Returns a tuple of the x, y, z components. A little quicker than
iter(vector)."""
return tuple(self._v)
def scale(self, scale):
"""Scales the vector by onther vector or a scalar. Same as the
*= operator.
scale -- Value to scale the vector by
"""
v = self._v
try:
x, y, z = v
v[0] = x*rhs
v[1] = y*rhs
v[2] = z*rhs
except TypeError:
ox, oy, oz = rhs
v[0] = x*ox
v[1] = y*oy
v[2] = z*oz
return self
def get_length(self):
"""Calculates the length of the vector."""
x, y, z = self._v
return sqrt(x*x + y*y +z*z)
get_magnitude = get_length
def set_length(self, new_length):
"""Sets the length of the vector. (Normalises it then scales it)
new_length -- The new length of the vector.
"""
try:
x, y, z = self._v
l = length / sqrt(x*x + y*y + z*z)
except ZeroDivisionError:
self.v[:] = [0., 0., 0.]
return self
v = self._v
v[0] = x*l
v[1] = y*l
v[2] = z*l
return self
def get_distance_to(self, p):
"""Returns the distance of this vector to a point.
p -- A position as a vector, or collection of 3 values.
"""
ax, ay, az = self._v
bx, by, bz = p
return sqrt( (ax-bx)**2 + (bx-by)**2 + (cx-cy)**2 )
def get_distance_squared(self, p):
"""Returns the squared distance of this vector to a point.
p -- A position as a vector, or collection of 3 values.
"""
ax, ay, az = self._v
bx, by, bz = p
return ( (ax-bx)**2 + (bx-by)**2 + (cx-cy)**2 )
def normalise(self):
"""Scales the vector to be length 1."""
x, y, z = self._v
l = sqrt(x*x + y*y + z*z)
v = self._v
v[0] = x/l
v[1] = y/l
v[2] = z/l
return self
normalize = normalise
def get_normalised(self):
x, y, z = self._v
l = sqrt(x*x + y*y + z*z)
return self.from_floats(x/l, y/l, z/l)
get_normalized = get_normalised
def in_sphere(self, sphere):
"""Returns true if this vector (treated as a position) is contained in
the given sphere.
"""
return distance3d(sphere.position, self) <= sphere.radius
def dot(self, other):
"""Returns the dot product of this vector with another.
other -- A vector or tuple
"""
x, y, z = self._v
ox, oy, oz = other
return x*ox + y*oy + z*oz
def cross(self, other):
"""Returns the cross product of this vector with another.
other -- A vector or tuple
"""
x, y, z = self._v
bx, by, bz = other
return self.from_floats( y*bz - by*z,
z*bx - bz*x,
x*by - bx*y )
def distance3d_squared(p1, p2):
return (p2[0]-p1[0])**2 + (p2[1]-p1[1])**2 + (p2[2]-p1[2])**2
def distance3d(p1, p2):
return sqrt( (p2[0]-p1[0])**2 + (p2[1]-p1[1])**2 + (p2[2]-p1[2])**2 )
def centre_point3d(points):
return sum( Vector3(p) for p in points ) / len(points)
if __name__ == "__main__":
v1 = Vector3(2.2323, 3.43242, 1.)
print (1, 2, 3)+v1
print (v1('xxxyyyzzz'))
print (v1[2])
print (v1.z)
v1[2]=5.
print (v1)
v2= Vector3(1.2, 5, 10)
print (v2)
v1 += v2
print (v1.get_length())
print (repr(v1))
print (v1[1])
p1 = Vector3(1,2,3)
print (p1)
print (repr(p1))
for v in p1:
print (v)
#print p1[6]
ptest = Vector3( [1,2,3] )
print (ptest)
z = Vector3()
print (z)
file("test.txt", "w").write( "\n".join(str(float(n)) for n in range(20)) )
f = file("test.txt")
v1 = Vector3.from_iter( f )
v2 = Vector3.from_iter( f )
v3 = Vector3.from_iter( f )
print (v1, v2, v3)
print ("--")
print (v1)
print (v1 + (10,20,30))
print (v1('xz'))
print (-v1)
#print tuple(ptest)
#p1.set( (4, 5, 6) )
#print p1
print (Vector3(10,10,30)+v1)
| [
"michalsliwa96@wp.pl"
] | michalsliwa96@wp.pl |
717bd41fe9b212c3ecf9faf925de10324dd12ab9 | ef48efff7b9022f9745145509193d551f4084376 | /novedades/models.py | f731d1dcb004d9927fff918f8e0a6360dcc718a1 | [] | no_license | ljarufe/incamotors | b5ace5cfb2f5208a31859f06da3e6cf46867b35c | 79926654e286e9fd496bb1be9ce8d03ca218d654 | refs/heads/master | 2020-05-06T12:20:00.996574 | 2013-10-21T16:09:20 | 2013-10-21T16:09:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,527 | py | # -*- coding: utf-8 -*-
from django.db import models
from sorl.thumbnail.fields import ImageWithThumbnailsField
class Novedad(models.Model):
"""
Clase abstracta para todas las novedades
"""
nombre = models.CharField(max_length=100)
descripcion = models.TextField(verbose_name=u"descripción")
def __unicode__(self):
return '%s' % self.nombre
class Meta:
abstract = True
class Evento(Novedad):
"""
Eventos de la página de inicio
"""
foto = ImageWithThumbnailsField(
upload_to = 'img/eventos',
thumbnail = {'size': (625, 420),
'options': ['upscale', 'max', 'crop']},
generate_on_save = True,
)
class Evento_home(Novedad):
"""
Eventos para la página de inicio
"""
foto = ImageWithThumbnailsField(
upload_to = 'img/eventos',
thumbnail = {'size': (625, 420),
'options': ['upscale', 'max', 'crop']},
generate_on_save = True,
)
foto_pie = ImageWithThumbnailsField(
upload_to = 'img/eventos',
thumbnail = {'size': (625, 220),
'options': ['upscale', 'max', 'crop']},
generate_on_save = True,
)
class Meta:
verbose_name = u"Evento en home"
verbose_name_plural = u"Eventos en home"
class Promocion(Novedad):
"""
Promociones de incamotors, iguales a los eventos
"""
foto = ImageWithThumbnailsField(
upload_to = 'img/promociones',
thumbnail = {'size': (625, 420),
'options': ['upscale', 'max', 'crop']},
generate_on_save = True,
)
class Meta:
verbose_name = u"promoción"
verbose_name_plural = u"promociones"
class Noticia(Novedad):
"""
Noticias del menu superior
"""
foto = ImageWithThumbnailsField(
upload_to = 'img/noticias',
thumbnail = {'size': (625, 420),
'options': ['upscale', 'max', 'crop']},
generate_on_save = True,
)
class Enlace(models.Model):
"""
Enlaces a otros sitios web
"""
url = models.URLField()
descripcion = models.TextField(verbose_name=u"descripción")
def __unicode__(self):
return u"%s" % self.url
| [
"luisjarufe@gmail.com"
] | luisjarufe@gmail.com |
b030b9b1532f3d3f32963e6690b69eb4fbe89b78 | 4aba06fd9cb6cd61dc1f06b7195b588f60670ffe | /0x0E-python-object_relational_mapping/11-model_state_insert.py | cf85f9ea815e7ce1e71c8a106a766fb1e4f5361b | [] | no_license | Uchennaore/holbertonschool-higher_level_programming | b9e71e4d289bcf16ce1adff0706260e474cea727 | 5433662047d967438982d7d2e50b8e5c96855dae | refs/heads/master | 2023-03-17T10:33:00.273768 | 2017-06-27T23:27:07 | 2017-06-27T23:27:07 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 656 | py | #!/usr/bin/python3
from sys import argv
import sqlalchemy
from model_state import State, Base
import MySQLdb
if __name__ == '__main__':
mysql = "mysql+mysqldb://{}:{}@localhost/{}".format(argv[1], argv[2],
argv[3])
database = sqlalchemy.create_engine(mysql)
Base.metadata.create_all(database)
session_fake = sqlalchemy.orm.sessionmaker(bind=database)
session = session_fake()
new_state = State(name="Louisiana")
session.add(new_state)
session.commit()
state_added = session.query(State).filter_by(name="Louisiana").first()
print("{}".format(state_added.id))
| [
"corbincol@hotmail.com"
] | corbincol@hotmail.com |
afcf7aead3f79f4e7a67eb293cca02a727211e47 | 20c42cb2be000dc5b6f33a953e98e4d75c425d45 | /study_code/study_python/using_file.py | ccba9de55a4c183dc8d8ec5ca89c12e3c36c3724 | [] | no_license | lufb/code | ac5e9b056f54baf4f8fb3e955ac71b7c5339d39c | cf72556e77a45030e266dd1ea6e34b17219f3aba | refs/heads/master | 2020-12-18T18:40:49.388650 | 2016-06-15T03:11:07 | 2016-06-15T03:11:07 | 28,075,225 | 0 | 0 | null | 2014-12-17T07:20:56 | 2014-12-16T07:14:11 | C | UTF-8 | Python | false | false | 492 | py | poem='''\
Programming is funWhen the work is doneif you wanna make your work also fun: use
python
'''
f=open("poem.txt","w") # open for 'w'riting
f.write(poem) # write text to file
f.close() # close the file
f=open('poem.txt')# if no mode is specified, 'r'ead mode is assumed by default
while True:
line=f.readline()
if len(line)==0: # Zero length indicates EOF
break
print (line) # Notice comma to avoid automatic newline added by Python
f.close() # close the file | [
"1164830775@qq.com"
] | 1164830775@qq.com |
0c5fbf27a5bb0c76633e2b3542dec3778c015070 | 05fd2d51a83c93316cd50bf31a1961f8e6629ce0 | /Salary.py | 790bd4faaa9d69195bd8831b164d7244e1ce3104 | [] | no_license | amberkakkar01/Machine-Learning-Projects | 225e94313bfd305ed1b18718fa07e683e125cbaa | fa78e2f0a041ecdf36706f93a1ffba793e2d5163 | refs/heads/master | 2022-12-01T20:13:13.920117 | 2020-08-11T14:44:42 | 2020-08-11T14:44:42 | 283,111,976 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,260 | py | #Load the dataset
import pandas as pd
df = pd.read_csv('Salary_Data.csv')
print(df)
print(df.shape)
#outliars
print(df.isna().sum())
#Genral info
print(df.describe())
#X = df['YearExperience'].values.reshape(-1,1)
X = df.iloc[:,0:1].values
Y = df.iloc[:,-1:].values
print("Feature Data",X)
print("Target Data",Y)
import matplotlib.pyplot as plt
plt.xlabel("Year of experience")
plt.ylabel("Salary")
plt.title("Salary Predication")
plt.scatter(X,Y)
#plt.show()
#training
from sklearn.model_selection import train_test_split
X_train, X_test, Y_train, Y_test = train_test_split(X,Y,test_size=0.3)
plt.scatter(X_train,Y_train,color ='g',marker='*',label='Training')
plt.scatter(X_test,Y_test,color ='r',marker='+',label='Testing')
plt.legend()
#plt.show()
from sklearn.linear_model import LinearRegression
model = LinearRegression()
model.fit(X_train,Y_train)
print("Y-intercept:",model.intercept_)
print("Slope:",model.coef_)
print("Model Score:",model.score(X_test,Y_test))
Y_pred = model.predict(X)
plt.plot(X,Y_pred,color='k',label='Best fit line')
plt.legend()
plt.show()
x_years_input = eval(input("Enter no of yr for prediction"))
predicated_value = model.predict([[x_years_input]])
print(predicated_value) | [
"noreply@github.com"
] | amberkakkar01.noreply@github.com |
2756828f3761fcc61cdd0cb33462e66e7e043c25 | 38b54f8d36dcacd1d32427306d23ffb7599b737b | /ps4/ps4.py | 93cdef26ddd7cb4c7915d0bf970a80454580e3ac | [] | no_license | qiuyang57/6.00sc-problem-sets | 213c6ebd7bfe5bda41d345597d0f773d5e863577 | 8ff90797fb31855db48062c096d3f62e978d2271 | refs/heads/master | 2021-01-21T13:29:53.262797 | 2016-05-08T15:21:58 | 2016-05-08T15:21:58 | 50,235,994 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,489 | py | # 6.00 Problem Set 4
#
# Caesar Cipher Skeleton
#
import string
import random
WORDLIST_FILENAME = "words.txt"
# -----------------------------------
# Helper code
# (you don't need to understand this helper code)
def load_words():
"""
Returns a list of valid words. Words are strings of lowercase letters.
Depending on the size of the word list, this function may
take a while to finish.
"""
print "Loading word list from file..."
# inFile: file
inFile = open(WORDLIST_FILENAME, 'r', 0)
# line: string
line = inFile.readline()
# wordlist: list of strings
wordlist = line.split()
print " ", len(wordlist), "words loaded."
return wordlist
wordlist = load_words()
def is_word(wordlist, word):
"""
Determines if word is a valid word.
wordlist: list of words in the dictionary.
word: a possible word.
returns True if word is in wordlist.
Example:
>>> is_word(wordlist, 'bat') returns
True
>>> is_word(wordlist, 'asdf') returns
False
"""
word = word.lower()
word = word.strip(" !@#$%^&*()-_+={}[]|\:;'<>?,./\"")
return word in wordlist
def random_word(wordlist):
"""
Returns a random word.
wordlist: list of words
returns: a word from wordlist at random
"""
return random.choice(wordlist)
def random_string(wordlist, n):
"""
Returns a string containing n random words from wordlist
wordlist: list of words
returns: a string of random words separated by spaces.
"""
return " ".join([random_word(wordlist) for _ in range(n)])
def random_scrambled(wordlist, n):
"""
Generates a test string by generating an n-word random string
and encrypting it with a sequence of random shifts.
wordlist: list of words
n: number of random words to generate and scamble
returns: a scrambled string of n random words
NOTE:
This function will ONLY work once you have completed your
implementation of apply_shifts!
"""
s = random_string(wordlist, n) + " "
global da
da = s[:]
shifts = [(i, random.randint(0, 26)) for i in range(len(s)) if s[i-1] == ' ']
return apply_shifts(s, shifts)[:-1]
def get_fable_string():
"""
Returns a fable in encrypted text.
"""
f = open("fable.txt", "r")
fable = str(f.read())
f.close()
return fable
# (end of helper code)
# -----------------------------------
#
# Problem 1: Encryption
#
def build_coder(shift):
"""
Returns a dict that can apply a Caesar cipher to a letter.
The cipher is defined by the shift value. Ignores non-letter characters
like punctuation and numbers.
shift: -27 < int < 27
returns: dict
Example:
>>> build_coder(3)
{' ': 'c', 'A': 'D', 'C': 'F', 'B': 'E', 'E': 'H', 'D': 'G', 'G': 'J',
'F': 'I', 'I': 'L', 'H': 'K', 'K': 'N', 'J': 'M', 'M': 'P', 'L': 'O',
'O': 'R', 'N': 'Q', 'Q': 'T', 'P': 'S', 'S': 'V', 'R': 'U', 'U': 'X',
'T': 'W', 'W': 'Z', 'V': 'Y', 'Y': 'A', 'X': ' ', 'Z': 'B', 'a': 'd',
'c': 'f', 'b': 'e', 'e': 'h', 'd': 'g', 'g': 'j', 'f': 'i', 'i': 'l',
'h': 'k', 'k': 'n', 'j': 'm', 'm': 'p', 'l': 'o', 'o': 'r', 'n': 'q',
'q': 't', 'p': 's', 's': 'v', 'r': 'u', 'u': 'x', 't': 'w', 'w': 'z',
'v': 'y', 'y': 'a', 'x': ' ', 'z': 'b'}
(The order of the key-value pairs may be different.)
"""
### TODO.
coder = {}
lowercase_and_space = string.ascii_lowercase + ' '
uppercase_and_space = string.ascii_uppercase + ' '
shifted_lowercase_and_space = lowercase_and_space[shift:] + lowercase_and_space[:shift]
shifted_uppercase_and_space = uppercase_and_space[shift:] + uppercase_and_space[:shift]
for i in range(len(uppercase_and_space)):
coder[uppercase_and_space[i]] = shifted_uppercase_and_space[i]
for i in range(len(lowercase_and_space)):
coder[lowercase_and_space[i]] = shifted_lowercase_and_space[i]
return coder
def build_encoder(shift):
"""
Returns a dict that can be used to encode a plain text. For example, you
could encrypt the plain text by calling the following commands
>>>encoder = build_encoder(shift)
>>>encrypted_text = apply_coder(plain_text, encoder)
The cipher is defined by the shift value. Ignores non-letter characters
like punctuation and numbers.
shift: 0 <= int < 27
returns: dict
Example:
>>> build_encoder(3)
{' ': 'c', 'A': 'D', 'C': 'F', 'B': 'E', 'E': 'H', 'D': 'G', 'G': 'J',
'F': 'I', 'I': 'L', 'H': 'K', 'K': 'N', 'J': 'M', 'M': 'P', 'L': 'O',
'O': 'R', 'N': 'Q', 'Q': 'T', 'P': 'S', 'S': 'V', 'R': 'U', 'U': 'X',
'T': 'W', 'W': 'Z', 'V': 'Y', 'Y': 'A', 'X': ' ', 'Z': 'B', 'a': 'd',
'c': 'f', 'b': 'e', 'e': 'h', 'd': 'g', 'g': 'j', 'f': 'i', 'i': 'l',
'h': 'k', 'k': 'n', 'j': 'm', 'm': 'p', 'l': 'o', 'o': 'r', 'n': 'q',
'q': 't', 'p': 's', 's': 'v', 'r': 'u', 'u': 'x', 't': 'w', 'w': 'z',
'v': 'y', 'y': 'a', 'x': ' ', 'z': 'b'}
(The order of the key-value pairs may be different.)
HINT : Use build_coder.
"""
### TODO.
return build_coder(shift)
def build_decoder(shift):
"""
Returns a dict that can be used to decode an encrypted text. For example, you
could decrypt an encrypted text by calling the following commands
>>>encoder = build_encoder(shift)
>>>encrypted_text = apply_coder(plain_text, encoder)
>>>decrypted_text = apply_coder(plain_text, decoder)
The cipher is defined by the shift value. Ignores non-letter characters
like punctuation and numbers.
shift: 0 <= int < 27
returns: dict
Example:
>>> build_decoder(3)
{' ': 'x', 'A': 'Y', 'C': ' ', 'B': 'Z', 'E': 'B', 'D': 'A', 'G': 'D',
'F': 'C', 'I': 'F', 'H': 'E', 'K': 'H', 'J': 'G', 'M': 'J', 'L': 'I',
'O': 'L', 'N': 'K', 'Q': 'N', 'P': 'M', 'S': 'P', 'R': 'O', 'U': 'R',
'T': 'Q', 'W': 'T', 'V': 'S', 'Y': 'V', 'X': 'U', 'Z': 'W', 'a': 'y',
'c': ' ', 'b': 'z', 'e': 'b', 'd': 'a', 'g': 'd', 'f': 'c', 'i': 'f',
'h': 'e', 'k': 'h', 'j': 'g', 'm': 'j', 'l': 'i', 'o': 'l', 'n': 'k',
'q': 'n', 'p': 'm', 's': 'p', 'r': 'o', 'u': 'r', 't': 'q', 'w': 't',
'v': 's', 'y': 'v', 'x': 'u', 'z': 'w'}
(The order of the key-value pairs may be different.)
HINT : Use build_coder.
"""
### TODO.
return build_coder( - shift)
def apply_coder(text, coder):
"""
Applies the coder to the text. Returns the encoded text.
text: string
coder: dict with mappings of characters to shifted characters
returns: text after mapping coder chars to original text
Example:
>>> apply_coder("Hello, world!", build_encoder(3))
'Khoor,czruog!'
>>> apply_coder("Khoor,czruog!", build_decoder(3))
'Hello, world!'
"""
### TODO.
coder_applied_text = ''
for i in text:
coder_applied_text += coder.get(i,i)
return coder_applied_text
def apply_shift(text, shift):
"""
Given a text, returns a new text Caesar shifted by the given shift
offset. The empty space counts as the 27th letter of the alphabet,
so spaces should be replaced by a lowercase letter as appropriate.
Otherwise, lower case letters should remain lower case, upper case
letters should remain upper case, and all other punctuation should
stay as it is.
text: string to apply the shift to
shift: amount to shift the text
returns: text after being shifted by specified amount.
Example:
>>> apply_shift('This is a test.', 8)
'Apq hq hiham a.'
"""
### TODO.
return apply_coder(text, build_coder(shift))
#
# Problem 2: Codebreaking.
#
def find_best_shift(wordlist, text):
"""
Decrypts the encoded text and returns the plaintext.
text: string
returns: 0 <= int 27
Example:
>>> s = apply_coder('Hello, world!', build_encoder(8))
>>> s
'Pmttw,hdwztl!'
>>> find_best_shift(wordlist, s) returns
8
>>> apply_coder(s, build_decoder(8)) returns
'Hello, world!'
"""
### TODO
best_shift = 0
best_shift_valid_word_num = 0
for i in range(27):
n = 0
decoded_text = apply_shift(text,-i)
decoded_word_list = decoded_text.split()
for j in decoded_word_list:
if is_word(wordlist, j):
n += 1
if n > best_shift_valid_word_num:
best_shift_valid_word_num = n
best_shift = i
return best_shift
#
# Problem 3: Multi-level encryption.
#
def apply_shifts(text, shifts):
"""
Applies a sequence of shifts to an input text.
text: A string to apply the Ceasar shifts to
shifts: A list of tuples containing the location each shift should
begin and the shift offset. Each tuple is of the form (location,
shift) The shifts are layered: each one is applied from its
starting position all the way through the end of the string.
returns: text after applying the shifts to the appropriate
positions
Example:
>>> apply_shifts("Do Androids Dream of Electric Sheep?", [(0,6), (3, 18), (12, 16)])
'JufYkaolfapxQdrnzmasmRyrpfdvpmEurrb?'
"""
### TODO.
text_shifted = text
for i in shifts:
text_shifted = text_shifted[:i[0]] + apply_shift(text_shifted[i[0]:],i[1])
return text_shifted
#
# Problem 4: Multi-level decryption.
#
def find_best_shifts(wordlist, text):
"""
Given a scrambled string, returns a shift key that will decode the text to
words in wordlist, or None if there is no such key.
Hint: Make use of the recursive function
find_best_shifts_rec(wordlist, text, start)
wordlist: list of words
text: scambled text to try to find the words for
returns: list of tuples. each tuple is (position in text, amount of shift)
Examples:
>>> s = random_scrambled(wordlist, 3)
>>> s
'eqorqukvqtbmultiform wyy ion'
>>> shifts = find_best_shifts(wordlist, s)
>>> shifts
[(0, 25), (11, 2), (21, 5)]
>>> apply_shifts(s, shifts)
'compositor multiform accents'
>>> s = apply_shifts("Do Androids Dream of Electric Sheep?", [(0,6), (3, 18), (12, 16)])
>>> s
'JufYkaolfapxQdrnzmasmRyrpfdvpmEurrb?'
>>> shifts = find_best_shifts(wordlist, s)
>>> print apply_shifts(s, shifts)
Do Androids Dream of Electric Sheep?
"""
return find_best_shifts_rec(wordlist, text, 0)
def find_best_shifts_rec(wordlist, text, start):
"""
Given a scrambled string and a starting position from which
to decode, returns a shift key that will decode the text to
words in wordlist, or None if there is no such key.
Hint: You will find this function much easier to implement
if you use recursion.
wordlist: list of words
text: scambled text to try to find the words for
start: where to start looking at shifts
returns: list of tuples. each tuple is (position in text, amount of shift)
"""
### TODO.
for shift in range(27):
## print 'shift:',shift
s_shifted = apply_shift(text, shift)
## print s_shifted
index = s_shifted.find(' ', start)
## print index
if index == -1:
if is_word(wordlist, s_shifted[start:]):
## print 'end',[(start,shift)]
return [(start,shift)]
elif index != -1 and is_word(wordlist, s_shifted[start:index]):
## print 'True'
result = find_best_shifts_rec(wordlist, s_shifted, index + 1)
if result != None:
## print 'result=', result
## print 'recursive',index,[(start,shift)]
return [(start,shift)] + result
return None
def decrypt_fable(wordlsit):
"""
Using the methods you created in this problem set,
decrypt the fable given by the function get_fable_string().
Once you decrypt the message, be sure to include as a comment
at the end of this problem set how the fable relates to your
education at MIT.
returns: string - fable in plain text
"""
### TODO.
fable_string = get_fable_string()
return apply_shifts(fable_string, find_best_shifts(wordlist, fable_string))
#What is the moral of the story?
#
#
#
#
#
##for _ in range(1):
## text = random_scrambled(wordlist,9)
## k = find_best_shifts_rec(wordlist, text, 0)
## print 'answer',da
## print k
## print apply_shifts(text, k)
print decrypt_fable(wordlist)
| [
"qiuyang57@gmail.com"
] | qiuyang57@gmail.com |
86e33fe91cf2430565d4221890f6bbe6a320c21e | 19f7e1cbb73d8b6ab818ce36356ae6385881a4b0 | /Helix - Bootstrap/error_progress.py | b7c8e1deb5516f6a1ebe5ba996b4ccb93ee10322 | [] | no_license | DTRobson/SimulationProject | 0d51a9148af9cab1b973e86bd7900b5f80a7006a | fa76ba9de0d72df3d494d19dfc069488f6f1aca0 | refs/heads/master | 2020-07-17T21:09:34.872843 | 2019-09-03T15:13:16 | 2019-09-03T15:13:16 | 206,100,046 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,699 | py | # -*- coding: utf-8 -*-
import numpy as np
import matplotlib.pyplot as plt
b10 = np.loadtxt('bsResult10.csv', skiprows = 12, delimiter = '\t')
b20 = np.loadtxt('bsResult20.csv', skiprows = 12, delimiter = '\t')
b30 = np.loadtxt('bsResult30.csv', skiprows = 12, delimiter = '\t')
b40 = np.loadtxt('bsResult40.csv', skiprows = 12, delimiter = '\t')
b50 = np.loadtxt('bsResult50.csv', skiprows = 12, delimiter = '\t')
b60 = np.loadtxt('bsResult60.csv', skiprows = 12, delimiter = '\t')
b70 = np.loadtxt('bsResult70.csv', skiprows = 12, delimiter = '\t')
b80 = np.loadtxt('bsResult80.csv', skiprows = 12, delimiter = '\t')
b90 = np.loadtxt('bsResult90.csv', skiprows = 12, delimiter = '\t')
b100 = np.loadtxt('bsResult100.csv', skiprows = 12, delimiter = '\t')
b150 = np.loadtxt('bsResult150.csv', skiprows = 12, delimiter = '\t')
b200 = np.loadtxt('bsResult200.csv', skiprows = 12, delimiter = '\t')
b250 = np.loadtxt('bsResult250.csv', skiprows = 12, delimiter = '\t')
av10 = np.average(b10[:, 2])
av20 = np.average(b20[:, 2])
av30 = np.average(b30[:, 2])
av40 = np.average(b40[:, 2])
av50 = np.average(b50[:, 2])
av60 = np.average(b60[:, 2])
av70 = np.average(b70[:, 2])
av80 = np.average(b80[:, 2])
av90 = np.average(b90[:, 2])
av100 = np.average(b100[:, 2])
av150 = np.average(b150[:, 2])
av200 = np.average(b200[:, 2])
av250 = np.average(b250[:, 2])
ns = np.array([10,20,30,40,50,60,70,80,90,100,150,200, 250])
avs = np.array([av10, av20, av30, av40, av50, av60, av70, av80, av90, av100, av150, av200, av250])
plt.scatter(ns, avs)
plt.xlabel('Number of Bootstrap Iterations')
plt.ylabel('Average Standard Deviation (kcal mol$^{-1}$)')
plt.savefig('BootstrapSDev')
plt.show()
| [
"noreply@github.com"
] | DTRobson.noreply@github.com |
10ec8a0e3e819daa85adc891d23fc2fe9c11803f | fad866361aa4940369761123a8e5e841a69ad1c3 | /soubory_ukoly_na_procviceni.py | f6cf322a963eefa0495d7908fceb1c9c7c998689 | [] | no_license | libusepoustkova/Pyladies_lekce5 | 0ae83664fee0391f33d9c84d20e74623ea158683 | 157278580197276094c4d932b89b1cf991249fee | refs/heads/master | 2020-08-12T22:37:35.959144 | 2019-10-16T19:01:37 | 2019-10-16T19:01:37 | 214,856,197 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 760 | py | """
samostudium:
https://naucse.python.cz/2019/plzen-podzim-2019/beginners/files/
Ukoly:
Otevřete jeden soubor pro čtení, druhý pro zápis.
1.Do druhého souboru přendejte obsah prvního souboru - samozřejmě programem v pythonu
2.Do druhého souboru přendejte obsah prvního souboru ale všechna písmena převeďte na velká
3.Do druhého souboru přendejte každou sudou řádku.
4.Do druhého souboru napište, kolik znaků má každá řádka.
5.Do druhého souboru napište kolik nebílých znaků má každá řádka
6.Do druhého souboru napiště počet samohlásek v textu každé řádky
7.Kolik odstavců soubor obsahuje?
samostudium:
https://naucse.python.cz/2019/brno-podzim-pondeli/intro/pathlib/
"""
| [
"noreply@github.com"
] | libusepoustkova.noreply@github.com |
d0da70c270dd71617fd4d3449d199f1a9f94549a | 8004cc465359aecb7a1890617646ea5b49f11d9a | /cnn_models/cnn_dropout.py | 4034ce3be9e2c7aad18f0a543a39bd23928ad07f | [] | no_license | jhyang12345/facial-recognition | 661e34d874986943f50b5b9691d186760957b594 | 2f2ba5cfcbd58efbc9de5b9f0bafc7bc640d9c26 | refs/heads/master | 2020-04-06T09:52:20.932793 | 2018-12-13T08:15:25 | 2018-12-13T08:15:25 | 157,360,076 | 7 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,747 | py | from keras.layers import Input, Convolution2D, SeparableConvolution2D, \
GlobalAveragePooling2D, GlobalMaxPooling2D, MaxPooling2D, \
Dense, Activation, BatchNormalization, Dropout
from keras.models import Sequential, Model
from keras.callbacks import ModelCheckpoint
class CNNDropout:
def __init__(self, input_shape=(128, 128, 3), summarize=True):
self.image_width = input_shape[0]
self.image_height = input_shape[1]
self.channels = input_shape[2]
self.input_shape = input_shape
self.alpha = 1
self.name = "cnn_dropout"
self.model = None
self.checkpoint_path = 'models/cnn_dropout.best.hdf5'
self.checkpointer = ModelCheckpoint(filepath=self.checkpoint_path, verbose=1,
save_best_only=True)
self.build_model()
if summarize: self.model.summary()
def build_model(self):
model_input = Input(shape=self.input_shape)
alpha = self.alpha
activation_type = 'relu'
# applying dropout factor to prevent overfitting
dropout_factor = 0.4
# input format will usually be 128 or 2^7
# strides of 2 halfs input shape
# usually kernel sizes are in odd numbers
# kernel strides alternate between 1 and 2 so that we don't miss out
x = Convolution2D(int(32 * alpha), (3, 3), strides=(1, 1), padding='same')(model_input)
x = BatchNormalization()(x)
x = Activation(activation_type)(x)
x = Convolution2D(int(64 * alpha), (3, 3), strides=(1, 1), padding='same')(x)
x = BatchNormalization()(x)
x = Activation(activation_type)(x)
x = Convolution2D(int(64 * alpha), (3, 3), strides=(2, 2), padding='same')(x)
x = BatchNormalization()(x)
x = Activation(activation_type)(x)
# kernel size of 3 halfs the input dimensions
x = MaxPooling2D(pool_size=(3, 3), strides=1, padding='same')(x)
x = Dropout(dropout_factor)(x)
x = Convolution2D(int(128 * alpha), (3, 3), strides=(1, 1), padding='same')(x)
x = BatchNormalization()(x)
x = Activation(activation_type)(x)
x = Convolution2D(int(128 * alpha), (3, 3), strides=(2, 2), padding='same')(x)
x = BatchNormalization()(x)
x = Activation(activation_type)(x)
x = MaxPooling2D(pool_size=(3, 3), strides=1, padding='same')(x)
x = Dropout(dropout_factor)(x)
x = Convolution2D(int(256 * alpha), (3, 3), strides=(1, 1), padding='same')(x)
x = BatchNormalization()(x)
x = Activation(activation_type)(x)
x = Convolution2D(int(256 * alpha), (3, 3), strides=(2, 2), padding='same')(x)
x = BatchNormalization()(x)
x = Activation(activation_type)(x)
x = MaxPooling2D(pool_size=(3, 3), strides=1, padding='same')(x)
x = Dropout(dropout_factor)(x)
x = Convolution2D(int(512 * alpha), (3, 3), strides=(1, 1), padding='same')(x)
x = BatchNormalization()(x)
x = Activation(activation_type)(x)
x = Convolution2D(int(512 * alpha), (3, 3), strides=(2, 2), padding='same')(x)
x = BatchNormalization()(x)
x = Activation(activation_type)(x)
# basically flattens a dimension
x = GlobalMaxPooling2D()(x)
# maybe add another dense layer in between
out = Dense(1, activation='sigmoid')(x)
self.model = Model(model_input, out, name='cnn_dropout')
self.model.compile(loss='binary_crossentropy', optimizer='adam',
metrics=['accuracy'])
def load_model(self):
self.model.load_weights(self.checkpoint_path)
if __name__ == '__main__':
CNNPool()
| [
"jhyang12345@naver.com"
] | jhyang12345@naver.com |
841ec70cff7a6255202feda2d3fa3c674009b94a | a8fac714701a9216b47ef43da8436fa1d0f8fbef | /hansards/apps.py | 5f5e17a8c96b84effbe57b91d76a1e35183497dd | [] | no_license | quiksilv/scrutiny | 8460fae584972c2300cef48dcceb03efbdc5ef62 | a77607b4a6ce932285b00bb4dff5acf82fe45694 | refs/heads/master | 2023-02-09T15:31:14.331617 | 2020-12-31T08:04:11 | 2020-12-31T08:04:11 | 303,297,953 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 91 | py | from django.apps import AppConfig
class HansardsConfig(AppConfig):
name = 'hansards'
| [
"mark@squarepotato.com"
] | mark@squarepotato.com |
17e2dbb5e0b9a0c021700f1bd2e39ce0b59e032a | ea5bdac57f23d099a35db4dc5b2e4baf5baaa86c | /wxsample.py | f180e7c14a3770075ed9573a5162cf496f5fe7b0 | [] | no_license | codeHeavy/vortex | 2fa29f0cc36188aace1518e7048e7870041d9ec3 | b7befe3deccfd3f0c434c03d903840ea37e9eb08 | refs/heads/master | 2021-01-10T13:10:51.646912 | 2016-03-08T18:38:26 | 2016-03-08T18:38:26 | 52,667,403 | 0 | 3 | null | null | null | null | UTF-8 | Python | false | false | 2,674 | py | #----------------------------------------------------------------------
# A very simple wxPython example. Just a wx.Frame, wx.Panel,
# wx.StaticText, wx.Button, and a wx.BoxSizer, but it shows the basic
# structure of any wxPython application.
#----------------------------------------------------------------------
import wx
class MyFrame(wx.Frame):
"""
This is MyFrame. It just shows a few controls on a wxPanel,
and has a simple menu.
"""
def __init__(self, parent, title):
wx.Frame.__init__(self, parent, -1, title,
pos=(150, 150), size=(350, 200))
# Create the menubar
menuBar = wx.MenuBar()
# and a menu
menu = wx.Menu()
# add an item to the menu, using \tKeyName automatically
# creates an accelerator, the third param is some help text
# that will show up in the statusbar
menu.Append(wx.ID_EXIT, "E&xit\tAlt-X", "Exit this simple sample")
# bind the menu event to an event handler
self.Bind(wx.EVT_MENU, self.OnTimeToClose, id=wx.ID_EXIT)
# and put the menu on the menubar
menuBar.Append(menu, "&File")
self.SetMenuBar(menuBar)
self.CreateStatusBar()
# Now create the Panel to put the other controls on.
panel = wx.Panel(self)
# and a few controls
text = wx.StaticText(panel, -1, "Hello World!")
text.SetFont(wx.Font(14, wx.SWISS, wx.NORMAL, wx.BOLD))
text.SetSize(text.GetBestSize())
btn = wx.Button(panel, -1, "Close")
funbtn = wx.Button(panel, -1, "Just for fun...")
# bind the button events to handlers
self.Bind(wx.EVT_BUTTON, self.OnTimeToClose, btn)
self.Bind(wx.EVT_BUTTON, self.OnFunButton, funbtn)
# Use a sizer to layout the controls, stacked vertically and with
# a 10 pixel border around each
sizer = wx.BoxSizer(wx.VERTICAL)
sizer.Add(text, 0, wx.ALL, 10)
sizer.Add(btn, 0, wx.ALL, 10)
sizer.Add(funbtn, 0, wx.ALL, 10)
panel.SetSizer(sizer)
panel.Layout()
def OnTimeToClose(self, evt):
"""Event handler for the button click."""
print "See ya later!"
self.Close()
def OnFunButton(self, evt):
"""Event handler for the button click."""
print "Having fun yet?"
class MyApp(wx.App):
def OnInit(self):
frame = MyFrame(None, "Simple wxPython App")
self.SetTopWindow(frame)
print "Print statements go to this stdout window by default."
frame.Show(True)
return True
app = MyApp(redirect=True)
app.MainLoop()
| [
"jishnugirish@gmail.com"
] | jishnugirish@gmail.com |
784c097f29c1199560dbfea31ffdbcf228f8ffb2 | ac915c9f40e6c2c9842bcc3a415c0d90acd4475a | /oopbasics.py | 96cd497487020827a7d7d6844144e4be6526c8b7 | [] | no_license | Trietptm-on-Coding-Algorithms/Learn_Python_the_Hard_Way | 230b6bba193663d0c576ba7ebc1abc494ad1bc59 | 0020a6df6170ab8a388fd8cf7c532c0f9ef57321 | refs/heads/master | 2021-01-18T20:07:52.069366 | 2014-01-10T00:30:00 | 2014-01-10T00:30:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,832 | py | """
What happened is Python's original rendition of class was broken in many serious ways. By the time they admitted the fault it was too late, and they had to support it. In order to fix the problem, they needed some "new class" style so that the "old classes" would keep working but you could use the new more correct version. This is where "class is-a object" comes in. They decided that they would use the word "object", lowercased, to be the "class" that you inherit from to make a class. Confusing right? A class inherits from the class named object to make a class but it's not an object really it's a class, but do not forget to inherit from object.
Python 3.x:
class MyClass(object): = new-style class
class MyClass: = new-style class (implicitly inherits from object)
Python 2.x:
class MyClass(object): = new-style class
class MyClass: = OLD-STYLE CLASS
"""
class pet(object): #class
number_of_legs = 0
#second argument is turned into a local variable within the class instance called "name".
def __init__(self,name):
self.name = name
def sleep(self):
#method, this is essentially a function that always has a variable called self
#self refers to the word preceeding the method in dot notation
#for example instance.sleep() would populate self variable with "instance"
print "%s is sleeping. ZZzzz..." % self.name
def count_legs(self):
print "%s has %s legs." % (self.name, self.number_of_legs)
#dog is-a pet
class dog(pet):
#dog class inherits from pet class, meaning it has pet and dog methods available to use
def bark(self):
print "%s is a dog. Woof woof!" % self.name
Doug = dog("Doug")
#instance, class is useless if not intitialized in an instance
Doug.sleep()
Doug.bark()
Doug.number_of_legs = 4 #doug is a dog, lol
Doug.count_legs()
Nemo = pet("Nemo")
Nemo.count_legs()
| [
"w4rgh0st@gmail.com"
] | w4rgh0st@gmail.com |
d48f823cfcf2710e639e3a772e9a4f1da94ef685 | ba5bfa05ee16ea52300446e3644b176044b43d0e | /Django/mysite/mysite/settings.py | 67799e1794c4f18854a457a526191a8b57158170 | [] | no_license | msrahman07/Tutorials | 6b16b91599b4270571e6f41a1b49fbb1d2535507 | 5b7d92845c0230268ec610f20c1ad6dde34e7877 | refs/heads/master | 2022-11-27T21:02:20.788392 | 2020-08-16T04:33:13 | 2020-08-16T04:33:13 | 275,960,508 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,175 | py | """
Django settings for mysite project.
Generated by 'django-admin startproject' using Django 3.0.8.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.0/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '4e*i4g=$vv4^onwoq$7f&b(l)p$3+a!togbucz$@i4m8j27gs6'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'polls.apps.PollsConfig',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'mysite.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
# 'DIRS': [],
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'mysite.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.0/howto/static-files/
STATIC_URL = '/static/'
| [
"msrahman@sfu.ca"
] | msrahman@sfu.ca |
01e57d80ebc8e6d8e0699129b26fe8a9b7a5b6b4 | e29647acac8d23dfd97deb043b9e3f15f11d264b | /SkipperDispatcher.py | 26af40b828141ac59ce27f704936e93266c8b0ec | [] | no_license | Haerok/SkipperDataGenerator | 62d8e39521943c1e68294344d1f99cf63f5cb08a | f47538504a5561b2480a76c21164bd91ea0bd613 | refs/heads/master | 2020-05-23T23:35:13.341484 | 2019-05-16T09:04:37 | 2019-05-16T09:04:37 | 186,997,518 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,201 | py | import os
import os.path
import shutil
from SkipperDataGenerator import *
def is_video(extension):
if extension == '.mpg' or extension == '.mxf' or extension \
== '.mp4' or extension == '.ts':
return True
else:
return False
def is_cablelabs(extension):
if extension == '.xml':
return True
else:
return False
def watcher():
while True:
list_of_file = os.listdir(RECEPTION_dir_path)
for file in list_of_file:
filename = os.path.splitext(file)
if is_video(filename[1]):
for side_file in list_of_file:
side_file_name = os.path.splitext(side_file)
if filename[0] == side_file_name[0] \
and is_cablelabs(side_file_name[1]):
shutil.move(RECEPTION_dir_path + '/'
+ filename[0] + filename[1],
WORK_dir_path + '/' + filename[0]
+ filename[1])
data_generator(WORK_dir_path + '/'
+ filename[0] + filename[1])
return 0
watcher()
| [
"bdrapeaud@freiflpt0035736.cpgrp.root.local"
] | bdrapeaud@freiflpt0035736.cpgrp.root.local |
1d4c28ff38486b42c667dcc9ccd7c19055dd4de2 | 95337b3ad66453bae38a532e0ca349785f6b7c01 | /lico/licoweb/licoweb/asgi.py | d1e75982ca5788d05ab7f79531a8e1f49a178e87 | [] | no_license | Shanukarn11/licoweb | d4531b8873e4eace5ad03110a22c5cccf9069634 | f70f462c27ac229b1cd1501e4a0f9e46f7835178 | refs/heads/main | 2023-01-15T01:02:59.344617 | 2020-11-17T12:11:02 | 2020-11-17T12:11:02 | 313,603,783 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 391 | py | """
ASGI config for licoweb project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'licoweb.settings')
application = get_asgi_application()
| [
"developer2@bluecrestsoft.com"
] | developer2@bluecrestsoft.com |
2176af9dbac7ee196c5386b09111ef6c992b1ae3 | bea2a9300ab374f55dd10066041be907b7c667ef | /migrations/versions/7cf9963ea300_.py | d2a170bc2f720cb9757b86eaadef2995c8f2bdfc | [] | no_license | Gipnologleha/chinarov | ed6aed7ff16550d00c03d6a8b23ef819a45f320b | fd540eeac54fbd437b197ce4187ae068d083d531 | refs/heads/master | 2022-12-16T17:27:29.255454 | 2020-09-28T12:06:23 | 2020-09-28T12:06:23 | 284,992,402 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,465 | py | """empty message
Revision ID: 7cf9963ea300
Revises:
Create Date: 2020-08-31 16:28:15.982609
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '7cf9963ea300'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('cows_breeds1',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('longtitude1', sa.Float(), nullable=False),
sa.Column('latitude1', sa.Float(), nullable=False),
sa.Column('breed1', sa.String(), nullable=False),
sa.Column('type1', sa.String(), nullable=False),
sa.Column('yeild1', sa.Float(), nullable=False),
sa.Column('fat1', sa.Float(), nullable=False),
sa.Column('protein1', sa.Float(), nullable=False),
sa.Column('weight1', sa.Float(), nullable=False),
sa.Column('first_calving_days1', sa.Float(), nullable=False),
sa.Column('age_in_calving1', sa.Float(), nullable=False),
sa.Column('leaving_in_calving1', sa.Float(), nullable=False),
sa.Column('service_period1', sa.Float(), nullable=False),
sa.Column('dry_period1', sa.Float(), nullable=False),
sa.Column('calving_per100_heads1', sa.Float(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_table('cows_breeds2',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('longtitude2', sa.Float(), nullable=False),
sa.Column('latitude2', sa.Float(), nullable=False),
sa.Column('breed2', sa.String(), nullable=False),
sa.Column('type2', sa.String(), nullable=False),
sa.Column('udoy2', sa.Float(), nullable=False),
sa.Column('jir2', sa.Float(), nullable=False),
sa.Column('belok2', sa.Float(), nullable=False),
sa.Column('massa2', sa.Float(), nullable=False),
sa.Column('perviy_otel_dney2', sa.Float(), nullable=False),
sa.Column('v_otelah2', sa.Float(), nullable=False),
sa.Column('vibitiya_v_otelah2', sa.Float(), nullable=False),
sa.Column('servis_period2', sa.Float(), nullable=False),
sa.Column('suhostoyniy_period2', sa.Float(), nullable=False),
sa.Column('vihod_telyat_na100_golov2', sa.Float(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('cows_breeds2')
op.drop_table('cows_breeds1')
# ### end Alembic commands ###
| [
"strider7777@yandex.ru"
] | strider7777@yandex.ru |
7455afa728f6af5d47411cfac7fe64159c81c7f0 | 7e20c4ad991159e46462c1ef6674340d1427452e | /mfr/version.py | 3e33c308c4918e8b00b6d51b698aa58ad76155f4 | [
"LicenseRef-scancode-free-unknown",
"Apache-2.0"
] | permissive | CenterForOpenScience/modular-file-renderer | 2758fa4142251b21a10f0f8f42fdae135dd69497 | 64a04870eb6b358e4ba65dfe275869450868a2aa | refs/heads/develop | 2023-08-09T03:27:20.875353 | 2023-04-17T18:57:47 | 2023-04-17T18:57:47 | 13,421,446 | 37 | 49 | Apache-2.0 | 2023-04-17T18:58:12 | 2013-10-08T18:11:00 | JavaScript | UTF-8 | Python | false | false | 23 | py | __version__ = '23.1.0'
| [
"fitz@cos.io"
] | fitz@cos.io |
e0e17babd741e0e150fdf4822dccc0fc24996bea | 100be61cb2136d89097d929f129e6d69f8590c81 | /image_recognition/from_scratch_neural_network/predict.py | f7bdf42fc0ad3cb913a39e5aee10055c39765e41 | [] | no_license | paulasquin/mosquito-induced-epidemics-anticipation | ee57f1b0dcb3e8cd8cf17c8bc3008bbc7bb72acf | 17a51b0a1d62376a3c9ac54ca07c9f2be957cf81 | refs/heads/master | 2020-04-10T12:41:16.325493 | 2018-12-17T14:11:28 | 2018-12-17T14:11:28 | 161,029,488 | 0 | 0 | null | 2018-12-17T09:01:40 | 2018-12-09T10:50:36 | Python | UTF-8 | Python | false | false | 3,420 | py | import image_recognition.from_scratch_neural_network.dataset as dataset
import image_recognition.from_scratch_neural_network.train as train
import numpy as np
import os
import sys, argparse
from image_recognition.from_scratch_neural_network.tools import *
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
import tensorflow as tf
tf.logging.set_verbosity(tf.logging.ERROR)
IMG_SIZE = 150
NUM_CHANNELS = 3
def main():
print("Loading the image")
dir_path = os.path.dirname(os.path.realpath(__file__))
image_path = sys.argv[1]
if image_path[0] != "/":
image_path = dir_path + '/' + image_path
image = np.array([dataset.read_image(filename=image_path, image_size=IMG_SIZE)], dtype=np.uint8)
print("Shapping the image for the model input")
# The input to the network is of shape [None image_size image_size num_channels]. Hence we reshape.
x_batch = image.reshape(1, IMG_SIZE, IMG_SIZE, NUM_CHANNELS)
print("Please choose the model to use : ")
les_meta_path = locate_files(extension=".meta", path=os.getcwd(), dbName="meta")
for i, meta_path in enumerate(les_meta_path):
print("\n\n" + str(i) + " : " + str(meta_path))
info_txt_path = str('/'.join(meta_path.split("/")[:-1]) + "/info.txt")
try:
with open(info_txt_path, 'r') as f:
for line in f:
print("\t" + str(line.replace("\n", "")))
print("")
except FileNotFoundError:
print("// No info.txt \n")
model_num = int(input(">> "))
try:
meta_path = les_meta_path[model_num]
model_dir_path = '/'.join(meta_path.split("/")[:-1]) + "/"
except IndexError or TypeError:
print("Wrong input")
return -1
print("Restoring the model", end="")
sys.stdout.flush()
sess = tf.Session()
# Step-1: Recreate the network graph. At this step only graph is created.
saver = tf.train.import_meta_graph(meta_path)
# Step-2: Now let's load the weights saved using the restore method.
saver.restore(sess, tf.train.latest_checkpoint(model_dir_path))
graph = tf.get_default_graph()
y_pred = graph.get_tensor_by_name("y_pred:0")
print(" - Done")
print("Feeding the image to the input")
x = graph.get_tensor_by_name("x:0")
y_true = graph.get_tensor_by_name("y_true:0")
les_labels = []
try:
with open(model_dir_path + "labels.txt", 'r') as f:
for line in f:
label = line.replace("\n", "")
if label != "":
les_labels.append(label)
except Exception as e:
les_labels = ['Bathroom', 'Bedroom', 'Kitchen', 'Living Room']
print("Error openning labels.txt. We are going to use default values : " + str(les_labels))
print("***\n" + str(e) + "\n***")
print("Using labels : " + str(les_labels))
y_test_images = np.zeros((1, len(les_labels)))
# Creating the feed_dict that is required to be fed to calculate y_pred
feed_dict_testing = {x: x_batch, y_true: y_test_images}
result = sess.run(y_pred, feed_dict=feed_dict_testing)
print(result[0])
# result is of this format [probabiliy_of_rose probability_of_sunflower]
print("Prediction : ")
for i in range(len(result[0])):
print("\t" + les_labels[i] + " : " + str('{0:f}'.format(round(result[0][i] * 100, 5))) + "%")
if __name__ == '__main__':
main()
| [
"paul.asquin@gmail.com"
] | paul.asquin@gmail.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.