repo_name stringlengths 5 100 | path stringlengths 4 375 | copies stringclasses 991 values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15 values |
|---|---|---|---|---|---|
zzzombat/precise-python-django-social-auth | social_auth/backends/contrib/instagram.py | 7 | 1683 | from urllib import urlencode
from django.utils import simplejson
from social_auth.backends import BaseOAuth2, OAuthBackend, USERNAME
from social_auth.utils import dsa_urlopen
INSTAGRAM_SERVER = 'instagram.com'
INSTAGRAM_AUTHORIZATION_URL = 'https://instagram.com/oauth/authorize'
INSTAGRAM_ACCESS_TOKEN_URL = 'https://instagram.com/oauth/access_token'
INSTAGRAM_CHECK_AUTH = 'https://api.instagram.com/v1/users/self'
class InstagramBackend(OAuthBackend):
name = 'instagram'
def get_user_id(self, details, response):
return response['user']['id']
def get_user_details(self, response):
"""Return user details from Instagram account"""
username = response['user']['username']
fullname = response['user'].get('fullname', '')
email = response['user'].get('email', '')
return {
USERNAME: username,
'first_name': fullname,
'email': email
}
class InstagramAuth(BaseOAuth2):
"""Instagram OAuth mechanism"""
AUTHORIZATION_URL = INSTAGRAM_AUTHORIZATION_URL
ACCESS_TOKEN_URL = INSTAGRAM_ACCESS_TOKEN_URL
SERVER_URL = INSTAGRAM_SERVER
AUTH_BACKEND = InstagramBackend
SETTINGS_KEY_NAME = 'INSTAGRAM_CLIENT_ID'
SETTINGS_SECRET_NAME = 'INSTAGRAM_CLIENT_SECRET'
def user_data(self, access_token, *args, **kwargs):
"""Loads user data from service"""
params = {'access_token': access_token}
url = INSTAGRAM_CHECK_AUTH + '?' + urlencode(params)
try:
return simplejson.load(dsa_urlopen(url))
except ValueError:
return None
# Backend definition
BACKENDS = {
'instagram': InstagramAuth,
}
| bsd-3-clause |
abaditsegay/arangodb | 3rdParty/V8-4.3.61/third_party/python_26/Lib/test/test_posix.py | 51 | 9802 | "Test posix functions"
from test import test_support
try:
import posix
except ImportError:
raise test_support.TestSkipped, "posix is not available"
import time
import os
import pwd
import shutil
import unittest
import warnings
warnings.filterwarnings('ignore', '.* potential security risk .*',
RuntimeWarning)
class PosixTester(unittest.TestCase):
def setUp(self):
# create empty file
fp = open(test_support.TESTFN, 'w+')
fp.close()
def tearDown(self):
os.unlink(test_support.TESTFN)
def testNoArgFunctions(self):
# test posix functions which take no arguments and have
# no side-effects which we need to cleanup (e.g., fork, wait, abort)
NO_ARG_FUNCTIONS = [ "ctermid", "getcwd", "getcwdu", "uname",
"times", "getloadavg", "tmpnam",
"getegid", "geteuid", "getgid", "getgroups",
"getpid", "getpgrp", "getppid", "getuid",
]
for name in NO_ARG_FUNCTIONS:
posix_func = getattr(posix, name, None)
if posix_func is not None:
posix_func()
self.assertRaises(TypeError, posix_func, 1)
def test_statvfs(self):
if hasattr(posix, 'statvfs'):
self.assert_(posix.statvfs(os.curdir))
def test_fstatvfs(self):
if hasattr(posix, 'fstatvfs'):
fp = open(test_support.TESTFN)
try:
self.assert_(posix.fstatvfs(fp.fileno()))
finally:
fp.close()
def test_ftruncate(self):
if hasattr(posix, 'ftruncate'):
fp = open(test_support.TESTFN, 'w+')
try:
# we need to have some data to truncate
fp.write('test')
fp.flush()
posix.ftruncate(fp.fileno(), 0)
finally:
fp.close()
def test_dup(self):
if hasattr(posix, 'dup'):
fp = open(test_support.TESTFN)
try:
fd = posix.dup(fp.fileno())
self.assert_(isinstance(fd, int))
os.close(fd)
finally:
fp.close()
def test_confstr(self):
if hasattr(posix, 'confstr'):
self.assertRaises(ValueError, posix.confstr, "CS_garbage")
self.assertEqual(len(posix.confstr("CS_PATH")) > 0, True)
def test_dup2(self):
if hasattr(posix, 'dup2'):
fp1 = open(test_support.TESTFN)
fp2 = open(test_support.TESTFN)
try:
posix.dup2(fp1.fileno(), fp2.fileno())
finally:
fp1.close()
fp2.close()
def fdopen_helper(self, *args):
fd = os.open(test_support.TESTFN, os.O_RDONLY)
fp2 = posix.fdopen(fd, *args)
fp2.close()
def test_fdopen(self):
if hasattr(posix, 'fdopen'):
self.fdopen_helper()
self.fdopen_helper('r')
self.fdopen_helper('r', 100)
def test_osexlock(self):
if hasattr(posix, "O_EXLOCK"):
fd = os.open(test_support.TESTFN,
os.O_WRONLY|os.O_EXLOCK|os.O_CREAT)
self.assertRaises(OSError, os.open, test_support.TESTFN,
os.O_WRONLY|os.O_EXLOCK|os.O_NONBLOCK)
os.close(fd)
if hasattr(posix, "O_SHLOCK"):
fd = os.open(test_support.TESTFN,
os.O_WRONLY|os.O_SHLOCK|os.O_CREAT)
self.assertRaises(OSError, os.open, test_support.TESTFN,
os.O_WRONLY|os.O_EXLOCK|os.O_NONBLOCK)
os.close(fd)
def test_osshlock(self):
if hasattr(posix, "O_SHLOCK"):
fd1 = os.open(test_support.TESTFN,
os.O_WRONLY|os.O_SHLOCK|os.O_CREAT)
fd2 = os.open(test_support.TESTFN,
os.O_WRONLY|os.O_SHLOCK|os.O_CREAT)
os.close(fd2)
os.close(fd1)
if hasattr(posix, "O_EXLOCK"):
fd = os.open(test_support.TESTFN,
os.O_WRONLY|os.O_SHLOCK|os.O_CREAT)
self.assertRaises(OSError, os.open, test_support.TESTFN,
os.O_RDONLY|os.O_EXLOCK|os.O_NONBLOCK)
os.close(fd)
def test_fstat(self):
if hasattr(posix, 'fstat'):
fp = open(test_support.TESTFN)
try:
self.assert_(posix.fstat(fp.fileno()))
finally:
fp.close()
def test_stat(self):
if hasattr(posix, 'stat'):
self.assert_(posix.stat(test_support.TESTFN))
if hasattr(posix, 'chown'):
def test_chown(self):
# raise an OSError if the file does not exist
os.unlink(test_support.TESTFN)
self.assertRaises(OSError, posix.chown, test_support.TESTFN, -1, -1)
# re-create the file
open(test_support.TESTFN, 'w').close()
if os.getuid() == 0:
try:
# Many linux distros have a nfsnobody user as MAX_UID-2
# that makes a good test case for signedness issues.
# http://bugs.python.org/issue1747858
# This part of the test only runs when run as root.
# Only scary people run their tests as root.
ent = pwd.getpwnam('nfsnobody')
posix.chown(test_support.TESTFN, ent.pw_uid, ent.pw_gid)
except KeyError:
pass
else:
# non-root cannot chown to root, raises OSError
self.assertRaises(OSError, posix.chown,
test_support.TESTFN, 0, 0)
# test a successful chown call
posix.chown(test_support.TESTFN, os.getuid(), os.getgid())
def test_chdir(self):
if hasattr(posix, 'chdir'):
posix.chdir(os.curdir)
self.assertRaises(OSError, posix.chdir, test_support.TESTFN)
def test_lsdir(self):
if hasattr(posix, 'lsdir'):
self.assert_(test_support.TESTFN in posix.lsdir(os.curdir))
def test_access(self):
if hasattr(posix, 'access'):
self.assert_(posix.access(test_support.TESTFN, os.R_OK))
def test_umask(self):
if hasattr(posix, 'umask'):
old_mask = posix.umask(0)
self.assert_(isinstance(old_mask, int))
posix.umask(old_mask)
def test_strerror(self):
if hasattr(posix, 'strerror'):
self.assert_(posix.strerror(0))
def test_pipe(self):
if hasattr(posix, 'pipe'):
reader, writer = posix.pipe()
os.close(reader)
os.close(writer)
def test_tempnam(self):
if hasattr(posix, 'tempnam'):
self.assert_(posix.tempnam())
self.assert_(posix.tempnam(os.curdir))
self.assert_(posix.tempnam(os.curdir, 'blah'))
def test_tmpfile(self):
if hasattr(posix, 'tmpfile'):
fp = posix.tmpfile()
fp.close()
def test_utime(self):
if hasattr(posix, 'utime'):
now = time.time()
posix.utime(test_support.TESTFN, None)
self.assertRaises(TypeError, posix.utime, test_support.TESTFN, (None, None))
self.assertRaises(TypeError, posix.utime, test_support.TESTFN, (now, None))
self.assertRaises(TypeError, posix.utime, test_support.TESTFN, (None, now))
posix.utime(test_support.TESTFN, (int(now), int(now)))
posix.utime(test_support.TESTFN, (now, now))
def test_chflags(self):
if hasattr(posix, 'chflags'):
st = os.stat(test_support.TESTFN)
if hasattr(st, 'st_flags'):
posix.chflags(test_support.TESTFN, st.st_flags)
def test_lchflags(self):
if hasattr(posix, 'lchflags'):
st = os.stat(test_support.TESTFN)
if hasattr(st, 'st_flags'):
posix.lchflags(test_support.TESTFN, st.st_flags)
def test_getcwd_long_pathnames(self):
if hasattr(posix, 'getcwd'):
dirname = 'getcwd-test-directory-0123456789abcdef-01234567890abcdef'
curdir = os.getcwd()
base_path = os.path.abspath(test_support.TESTFN) + '.getcwd'
try:
os.mkdir(base_path)
os.chdir(base_path)
except:
# Just returning nothing instead of the TestSkipped exception,
# because the test results in Error in that case.
# Is that ok?
# raise test_support.TestSkipped, "cannot create directory for testing"
return
try:
def _create_and_do_getcwd(dirname, current_path_length = 0):
try:
os.mkdir(dirname)
except:
raise test_support.TestSkipped, "mkdir cannot create directory sufficiently deep for getcwd test"
os.chdir(dirname)
try:
os.getcwd()
if current_path_length < 1027:
_create_and_do_getcwd(dirname, current_path_length + len(dirname) + 1)
finally:
os.chdir('..')
os.rmdir(dirname)
_create_and_do_getcwd(dirname)
finally:
shutil.rmtree(base_path)
os.chdir(curdir)
def test_main():
test_support.run_unittest(PosixTester)
if __name__ == '__main__':
test_main()
| apache-2.0 |
abaditsegay/arangodb | 3rdParty/V8-4.3.61/third_party/python_26/Lib/site-packages/win32/lib/winerror.py | 45 | 101427 | """Error related constants for win32
Generated by h2py from winerror.h
"""
# Few extras added manually...
TRUST_E_PROVIDER_UNKNOWN = -2146762751
TRUST_E_ACTION_UNKNOWN = -2146762750
TRUST_E_SUBJECT_FORM_UNKNOWN = -2146762749
TRUST_E_SUBJECT_NOT_TRUSTED = -2146762748
# up to here...
FACILITY_WINRM = 51
FACILITY_WINDOWSUPDATE = 36
FACILITY_WINDOWS_DEFENDER = 80
FACILITY_WINDOWS_CE = 24
FACILITY_WINDOWS = 8
FACILITY_URT = 19
FACILITY_UMI = 22
FACILITY_TPM_SOFTWARE = 41
FACILITY_TPM_SERVICES = 40
FACILITY_SXS = 23
FACILITY_STORAGE = 3
FACILITY_STATE_MANAGEMENT = 34
FACILITY_SSPI = 9
FACILITY_SCARD = 16
FACILITY_SHELL = 39
FACILITY_SETUPAPI = 15
FACILITY_SECURITY = 9
FACILITY_RPC = 1
FACILITY_PLA = 48
FACILITY_WIN32 = 7
FACILITY_CONTROL = 10
FACILITY_NULL = 0
FACILITY_NDIS = 52
FACILITY_METADIRECTORY = 35
FACILITY_MSMQ = 14
FACILITY_MEDIASERVER = 13
FACILITY_INTERNET = 12
FACILITY_ITF = 4
FACILITY_USERMODE_HYPERVISOR = 53
FACILITY_HTTP = 25
FACILITY_GRAPHICS = 38
FACILITY_FWP = 50
FACILITY_FVE = 49
FACILITY_USERMODE_FILTER_MANAGER = 31
FACILITY_DPLAY = 21
FACILITY_DISPATCH = 2
FACILITY_DIRECTORYSERVICE = 37
FACILITY_CONFIGURATION = 33
FACILITY_COMPLUS = 17
FACILITY_USERMODE_COMMONLOG = 26
FACILITY_CMI = 54
FACILITY_CERT = 11
FACILITY_BACKGROUNDCOPY = 32
FACILITY_ACS = 20
FACILITY_AAF = 18
ERROR_SUCCESS = 0
NO_ERROR = 0
S_OK = 0
S_FALSE = 1
ERROR_INVALID_FUNCTION = 1
ERROR_FILE_NOT_FOUND = 2
ERROR_PATH_NOT_FOUND = 3
ERROR_TOO_MANY_OPEN_FILES = 4
ERROR_ACCESS_DENIED = 5
ERROR_INVALID_HANDLE = 6
ERROR_ARENA_TRASHED = 7
ERROR_NOT_ENOUGH_MEMORY = 8
ERROR_INVALID_BLOCK = 9
ERROR_BAD_ENVIRONMENT = 10
ERROR_BAD_FORMAT = 11
ERROR_INVALID_ACCESS = 12
ERROR_INVALID_DATA = 13
ERROR_OUTOFMEMORY = 14
ERROR_INVALID_DRIVE = 15
ERROR_CURRENT_DIRECTORY = 16
ERROR_NOT_SAME_DEVICE = 17
ERROR_NO_MORE_FILES = 18
ERROR_WRITE_PROTECT = 19
ERROR_BAD_UNIT = 20
ERROR_NOT_READY = 21
ERROR_BAD_COMMAND = 22
ERROR_CRC = 23
ERROR_BAD_LENGTH = 24
ERROR_SEEK = 25
ERROR_NOT_DOS_DISK = 26
ERROR_SECTOR_NOT_FOUND = 27
ERROR_OUT_OF_PAPER = 28
ERROR_WRITE_FAULT = 29
ERROR_READ_FAULT = 30
ERROR_GEN_FAILURE = 31
ERROR_SHARING_VIOLATION = 32
ERROR_LOCK_VIOLATION = 33
ERROR_WRONG_DISK = 34
ERROR_SHARING_BUFFER_EXCEEDED = 36
ERROR_HANDLE_EOF = 38
ERROR_HANDLE_DISK_FULL = 39
ERROR_NOT_SUPPORTED = 50
ERROR_REM_NOT_LIST = 51
ERROR_DUP_NAME = 52
ERROR_BAD_NETPATH = 53
ERROR_NETWORK_BUSY = 54
ERROR_DEV_NOT_EXIST = 55
ERROR_TOO_MANY_CMDS = 56
ERROR_ADAP_HDW_ERR = 57
ERROR_BAD_NET_RESP = 58
ERROR_UNEXP_NET_ERR = 59
ERROR_BAD_REM_ADAP = 60
ERROR_PRINTQ_FULL = 61
ERROR_NO_SPOOL_SPACE = 62
ERROR_PRINT_CANCELLED = 63
ERROR_NETNAME_DELETED = 64
ERROR_NETWORK_ACCESS_DENIED = 65
ERROR_BAD_DEV_TYPE = 66
ERROR_BAD_NET_NAME = 67
ERROR_TOO_MANY_NAMES = 68
ERROR_TOO_MANY_SESS = 69
ERROR_SHARING_PAUSED = 70
ERROR_REQ_NOT_ACCEP = 71
ERROR_REDIR_PAUSED = 72
ERROR_FILE_EXISTS = 80
ERROR_CANNOT_MAKE = 82
ERROR_FAIL_I24 = 83
ERROR_OUT_OF_STRUCTURES = 84
ERROR_ALREADY_ASSIGNED = 85
ERROR_INVALID_PASSWORD = 86
ERROR_INVALID_PARAMETER = 87
ERROR_NET_WRITE_FAULT = 88
ERROR_NO_PROC_SLOTS = 89
ERROR_TOO_MANY_SEMAPHORES = 100
ERROR_EXCL_SEM_ALREADY_OWNED = 101
ERROR_SEM_IS_SET = 102
ERROR_TOO_MANY_SEM_REQUESTS = 103
ERROR_INVALID_AT_INTERRUPT_TIME = 104
ERROR_SEM_OWNER_DIED = 105
ERROR_SEM_USER_LIMIT = 106
ERROR_DISK_CHANGE = 107
ERROR_DRIVE_LOCKED = 108
ERROR_BROKEN_PIPE = 109
ERROR_OPEN_FAILED = 110
ERROR_BUFFER_OVERFLOW = 111
ERROR_DISK_FULL = 112
ERROR_NO_MORE_SEARCH_HANDLES = 113
ERROR_INVALID_TARGET_HANDLE = 114
ERROR_INVALID_CATEGORY = 117
ERROR_INVALID_VERIFY_SWITCH = 118
ERROR_BAD_DRIVER_LEVEL = 119
ERROR_CALL_NOT_IMPLEMENTED = 120
ERROR_SEM_TIMEOUT = 121
ERROR_INSUFFICIENT_BUFFER = 122
ERROR_INVALID_NAME = 123
ERROR_INVALID_LEVEL = 124
ERROR_NO_VOLUME_LABEL = 125
ERROR_MOD_NOT_FOUND = 126
ERROR_PROC_NOT_FOUND = 127
ERROR_WAIT_NO_CHILDREN = 128
ERROR_CHILD_NOT_COMPLETE = 129
ERROR_DIRECT_ACCESS_HANDLE = 130
ERROR_NEGATIVE_SEEK = 131
ERROR_SEEK_ON_DEVICE = 132
ERROR_IS_JOIN_TARGET = 133
ERROR_IS_JOINED = 134
ERROR_IS_SUBSTED = 135
ERROR_NOT_JOINED = 136
ERROR_NOT_SUBSTED = 137
ERROR_JOIN_TO_JOIN = 138
ERROR_SUBST_TO_SUBST = 139
ERROR_JOIN_TO_SUBST = 140
ERROR_SUBST_TO_JOIN = 141
ERROR_BUSY_DRIVE = 142
ERROR_SAME_DRIVE = 143
ERROR_DIR_NOT_ROOT = 144
ERROR_DIR_NOT_EMPTY = 145
ERROR_IS_SUBST_PATH = 146
ERROR_IS_JOIN_PATH = 147
ERROR_PATH_BUSY = 148
ERROR_IS_SUBST_TARGET = 149
ERROR_SYSTEM_TRACE = 150
ERROR_INVALID_EVENT_COUNT = 151
ERROR_TOO_MANY_MUXWAITERS = 152
ERROR_INVALID_LIST_FORMAT = 153
ERROR_LABEL_TOO_LONG = 154
ERROR_TOO_MANY_TCBS = 155
ERROR_SIGNAL_REFUSED = 156
ERROR_DISCARDED = 157
ERROR_NOT_LOCKED = 158
ERROR_BAD_THREADID_ADDR = 159
ERROR_BAD_ARGUMENTS = 160
ERROR_BAD_PATHNAME = 161
ERROR_SIGNAL_PENDING = 162
ERROR_MAX_THRDS_REACHED = 164
ERROR_LOCK_FAILED = 167
ERROR_BUSY = 170
ERROR_CANCEL_VIOLATION = 173
ERROR_ATOMIC_LOCKS_NOT_SUPPORTED = 174
ERROR_INVALID_SEGMENT_NUMBER = 180
ERROR_INVALID_ORDINAL = 182
ERROR_ALREADY_EXISTS = 183
ERROR_INVALID_FLAG_NUMBER = 186
ERROR_SEM_NOT_FOUND = 187
ERROR_INVALID_STARTING_CODESEG = 188
ERROR_INVALID_STACKSEG = 189
ERROR_INVALID_MODULETYPE = 190
ERROR_INVALID_EXE_SIGNATURE = 191
ERROR_EXE_MARKED_INVALID = 192
ERROR_BAD_EXE_FORMAT = 193
ERROR_ITERATED_DATA_EXCEEDS_64k = 194
ERROR_INVALID_MINALLOCSIZE = 195
ERROR_DYNLINK_FROM_INVALID_RING = 196
ERROR_IOPL_NOT_ENABLED = 197
ERROR_INVALID_SEGDPL = 198
ERROR_AUTODATASEG_EXCEEDS_64k = 199
ERROR_RING2SEG_MUST_BE_MOVABLE = 200
ERROR_RELOC_CHAIN_XEEDS_SEGLIM = 201
ERROR_INFLOOP_IN_RELOC_CHAIN = 202
ERROR_ENVVAR_NOT_FOUND = 203
ERROR_NO_SIGNAL_SENT = 205
ERROR_FILENAME_EXCED_RANGE = 206
ERROR_RING2_STACK_IN_USE = 207
ERROR_META_EXPANSION_TOO_LONG = 208
ERROR_INVALID_SIGNAL_NUMBER = 209
ERROR_THREAD_1_INACTIVE = 210
ERROR_LOCKED = 212
ERROR_TOO_MANY_MODULES = 214
ERROR_NESTING_NOT_ALLOWED = 215
ERROR_EXE_MACHINE_TYPE_MISMATCH = 216
ERROR_EXE_CANNOT_MODIFY_SIGNED_BINARY = 217
ERROR_EXE_CANNOT_MODIFY_STRONG_SIGNED_BINARY = 218
ERROR_FILE_CHECKED_OUT = 220
ERROR_CHECKOUT_REQUIRED = 221
ERROR_BAD_FILE_TYPE = 222
ERROR_FILE_TOO_LARGE = 223
ERROR_FORMS_AUTH_REQUIRED = 224
ERROR_VIRUS_INFECTED = 225
ERROR_VIRUS_DELETED = 226
ERROR_PIPE_LOCAL = 229
ERROR_BAD_PIPE = 230
ERROR_PIPE_BUSY = 231
ERROR_NO_DATA = 232
ERROR_PIPE_NOT_CONNECTED = 233
ERROR_MORE_DATA = 234
ERROR_VC_DISCONNECTED = 240
ERROR_INVALID_EA_NAME = 254
ERROR_EA_LIST_INCONSISTENT = 255
WAIT_TIMEOUT = 258
ERROR_NO_MORE_ITEMS = 259
ERROR_CANNOT_COPY = 266
ERROR_DIRECTORY = 267
ERROR_EAS_DIDNT_FIT = 275
ERROR_EA_FILE_CORRUPT = 276
ERROR_EA_TABLE_FULL = 277
ERROR_INVALID_EA_HANDLE = 278
ERROR_EAS_NOT_SUPPORTED = 282
ERROR_NOT_OWNER = 288
ERROR_TOO_MANY_POSTS = 298
ERROR_PARTIAL_COPY = 299
ERROR_OPLOCK_NOT_GRANTED = 300
ERROR_INVALID_OPLOCK_PROTOCOL = 301
ERROR_DISK_TOO_FRAGMENTED = 302
ERROR_DELETE_PENDING = 303
ERROR_MR_MID_NOT_FOUND = 317
ERROR_SCOPE_NOT_FOUND = 318
ERROR_FAIL_NOACTION_REBOOT = 350
ERROR_FAIL_SHUTDOWN = 351
ERROR_FAIL_RESTART = 352
ERROR_MAX_SESSIONS_REACHED = 353
ERROR_THREAD_MODE_ALREADY_BACKGROUND = 400
ERROR_THREAD_MODE_NOT_BACKGROUND = 401
ERROR_PROCESS_MODE_ALREADY_BACKGROUND = 402
ERROR_PROCESS_MODE_NOT_BACKGROUND = 403
ERROR_INVALID_ADDRESS = 487
ERROR_USER_PROFILE_LOAD = 500
ERROR_ARITHMETIC_OVERFLOW = 534
ERROR_PIPE_CONNECTED = 535
ERROR_PIPE_LISTENING = 536
ERROR_VERIFIER_STOP = 537
ERROR_ABIOS_ERROR = 538
ERROR_WX86_WARNING = 539
ERROR_WX86_ERROR = 540
ERROR_TIMER_NOT_CANCELED = 541
ERROR_UNWIND = 542
ERROR_BAD_STACK = 543
ERROR_INVALID_UNWIND_TARGET = 544
ERROR_INVALID_PORT_ATTRIBUTES = 545
ERROR_PORT_MESSAGE_TOO_LONG = 546
ERROR_INVALID_QUOTA_LOWER = 547
ERROR_DEVICE_ALREADY_ATTACHED = 548
ERROR_INSTRUCTION_MISALIGNMENT = 549
ERROR_PROFILING_NOT_STARTED = 550
ERROR_PROFILING_NOT_STOPPED = 551
ERROR_COULD_NOT_INTERPRET = 552
ERROR_PROFILING_AT_LIMIT = 553
ERROR_CANT_WAIT = 554
ERROR_CANT_TERMINATE_SELF = 555
ERROR_UNEXPECTED_MM_CREATE_ERR = 556
ERROR_UNEXPECTED_MM_MAP_ERROR = 557
ERROR_UNEXPECTED_MM_EXTEND_ERR = 558
ERROR_BAD_FUNCTION_TABLE = 559
ERROR_NO_GUID_TRANSLATION = 560
ERROR_INVALID_LDT_SIZE = 561
ERROR_INVALID_LDT_OFFSET = 563
ERROR_INVALID_LDT_DESCRIPTOR = 564
ERROR_TOO_MANY_THREADS = 565
ERROR_THREAD_NOT_IN_PROCESS = 566
ERROR_PAGEFILE_QUOTA_EXCEEDED = 567
ERROR_LOGON_SERVER_CONFLICT = 568
ERROR_SYNCHRONIZATION_REQUIRED = 569
ERROR_NET_OPEN_FAILED = 570
ERROR_IO_PRIVILEGE_FAILED = 571
ERROR_CONTROL_C_EXIT = 572
ERROR_MISSING_SYSTEMFILE = 573
ERROR_UNHANDLED_EXCEPTION = 574
ERROR_APP_INIT_FAILURE = 575
ERROR_PAGEFILE_CREATE_FAILED = 576
ERROR_INVALID_IMAGE_HASH = 577
ERROR_NO_PAGEFILE = 578
ERROR_ILLEGAL_FLOAT_CONTEXT = 579
ERROR_NO_EVENT_PAIR = 580
ERROR_DOMAIN_CTRLR_CONFIG_ERROR = 581
ERROR_ILLEGAL_CHARACTER = 582
ERROR_UNDEFINED_CHARACTER = 583
ERROR_FLOPPY_VOLUME = 584
ERROR_BIOS_FAILED_TO_CONNECT_INTERRUPT = 585
ERROR_BACKUP_CONTROLLER = 586
ERROR_MUTANT_LIMIT_EXCEEDED = 587
ERROR_FS_DRIVER_REQUIRED = 588
ERROR_CANNOT_LOAD_REGISTRY_FILE = 589
ERROR_DEBUG_ATTACH_FAILED = 590
ERROR_SYSTEM_PROCESS_TERMINATED = 591
ERROR_DATA_NOT_ACCEPTED = 592
ERROR_VDM_HARD_ERROR = 593
ERROR_DRIVER_CANCEL_TIMEOUT = 594
ERROR_REPLY_MESSAGE_MISMATCH = 595
ERROR_LOST_WRITEBEHIND_DATA = 596
ERROR_CLIENT_SERVER_PARAMETERS_INVALID = 597
ERROR_NOT_TINY_STREAM = 598
ERROR_STACK_OVERFLOW_READ = 599
ERROR_CONVERT_TO_LARGE = 600
ERROR_FOUND_OUT_OF_SCOPE = 601
ERROR_ALLOCATE_BUCKET = 602
ERROR_MARSHALL_OVERFLOW = 603
ERROR_INVALID_VARIANT = 604
ERROR_BAD_COMPRESSION_BUFFER = 605
ERROR_AUDIT_FAILED = 606
ERROR_TIMER_RESOLUTION_NOT_SET = 607
ERROR_INSUFFICIENT_LOGON_INFO = 608
ERROR_BAD_DLL_ENTRYPOINT = 609
ERROR_BAD_SERVICE_ENTRYPOINT = 610
ERROR_IP_ADDRESS_CONFLICT1 = 611
ERROR_IP_ADDRESS_CONFLICT2 = 612
ERROR_REGISTRY_QUOTA_LIMIT = 613
ERROR_NO_CALLBACK_ACTIVE = 614
ERROR_PWD_TOO_SHORT = 615
ERROR_PWD_TOO_RECENT = 616
ERROR_PWD_HISTORY_CONFLICT = 617
ERROR_UNSUPPORTED_COMPRESSION = 618
ERROR_INVALID_HW_PROFILE = 619
ERROR_INVALID_PLUGPLAY_DEVICE_PATH = 620
ERROR_QUOTA_LIST_INCONSISTENT = 621
ERROR_EVALUATION_EXPIRATION = 622
ERROR_ILLEGAL_DLL_RELOCATION = 623
ERROR_DLL_INIT_FAILED_LOGOFF = 624
ERROR_VALIDATE_CONTINUE = 625
ERROR_NO_MORE_MATCHES = 626
ERROR_RANGE_LIST_CONFLICT = 627
ERROR_SERVER_SID_MISMATCH = 628
ERROR_CANT_ENABLE_DENY_ONLY = 629
ERROR_FLOAT_MULTIPLE_FAULTS = 630
ERROR_FLOAT_MULTIPLE_TRAPS = 631
ERROR_NOINTERFACE = 632
ERROR_DRIVER_FAILED_SLEEP = 633
ERROR_CORRUPT_SYSTEM_FILE = 634
ERROR_COMMITMENT_MINIMUM = 635
ERROR_PNP_RESTART_ENUMERATION = 636
ERROR_SYSTEM_IMAGE_BAD_SIGNATURE = 637
ERROR_PNP_REBOOT_REQUIRED = 638
ERROR_INSUFFICIENT_POWER = 639
ERROR_MULTIPLE_FAULT_VIOLATION = 640
ERROR_SYSTEM_SHUTDOWN = 641
ERROR_PORT_NOT_SET = 642
ERROR_DS_VERSION_CHECK_FAILURE = 643
ERROR_RANGE_NOT_FOUND = 644
ERROR_NOT_SAFE_MODE_DRIVER = 646
ERROR_FAILED_DRIVER_ENTRY = 647
ERROR_DEVICE_ENUMERATION_ERROR = 648
ERROR_MOUNT_POINT_NOT_RESOLVED = 649
ERROR_INVALID_DEVICE_OBJECT_PARAMETER = 650
ERROR_MCA_OCCURED = 651
ERROR_DRIVER_DATABASE_ERROR = 652
ERROR_SYSTEM_HIVE_TOO_LARGE = 653
ERROR_DRIVER_FAILED_PRIOR_UNLOAD = 654
ERROR_VOLSNAP_PREPARE_HIBERNATE = 655
ERROR_HIBERNATION_FAILURE = 656
ERROR_FILE_SYSTEM_LIMITATION = 665
ERROR_ASSERTION_FAILURE = 668
ERROR_ACPI_ERROR = 669
ERROR_WOW_ASSERTION = 670
ERROR_PNP_BAD_MPS_TABLE = 671
ERROR_PNP_TRANSLATION_FAILED = 672
ERROR_PNP_IRQ_TRANSLATION_FAILED = 673
ERROR_PNP_INVALID_ID = 674
ERROR_WAKE_SYSTEM_DEBUGGER = 675
ERROR_HANDLES_CLOSED = 676
ERROR_EXTRANEOUS_INFORMATION = 677
ERROR_RXACT_COMMIT_NECESSARY = 678
ERROR_MEDIA_CHECK = 679
ERROR_GUID_SUBSTITUTION_MADE = 680
ERROR_STOPPED_ON_SYMLINK = 681
ERROR_LONGJUMP = 682
ERROR_PLUGPLAY_QUERY_VETOED = 683
ERROR_UNWIND_CONSOLIDATE = 684
ERROR_REGISTRY_HIVE_RECOVERED = 685
ERROR_DLL_MIGHT_BE_INSECURE = 686
ERROR_DLL_MIGHT_BE_INCOMPATIBLE = 687
ERROR_DBG_EXCEPTION_NOT_HANDLED = 688
ERROR_DBG_REPLY_LATER = 689
ERROR_DBG_UNABLE_TO_PROVIDE_HANDLE = 690
ERROR_DBG_TERMINATE_THREAD = 691
ERROR_DBG_TERMINATE_PROCESS = 692
ERROR_DBG_CONTROL_C = 693
ERROR_DBG_PRINTEXCEPTION_C = 694
ERROR_DBG_RIPEXCEPTION = 695
ERROR_DBG_CONTROL_BREAK = 696
ERROR_DBG_COMMAND_EXCEPTION = 697
ERROR_OBJECT_NAME_EXISTS = 698
ERROR_THREAD_WAS_SUSPENDED = 699
ERROR_IMAGE_NOT_AT_BASE = 700
ERROR_RXACT_STATE_CREATED = 701
ERROR_SEGMENT_NOTIFICATION = 702
ERROR_BAD_CURRENT_DIRECTORY = 703
ERROR_FT_READ_RECOVERY_FROM_BACKUP = 704
ERROR_FT_WRITE_RECOVERY = 705
ERROR_IMAGE_MACHINE_TYPE_MISMATCH = 706
ERROR_RECEIVE_PARTIAL = 707
ERROR_RECEIVE_EXPEDITED = 708
ERROR_RECEIVE_PARTIAL_EXPEDITED = 709
ERROR_EVENT_DONE = 710
ERROR_EVENT_PENDING = 711
ERROR_CHECKING_FILE_SYSTEM = 712
ERROR_FATAL_APP_EXIT = 713
ERROR_PREDEFINED_HANDLE = 714
ERROR_WAS_UNLOCKED = 715
ERROR_SERVICE_NOTIFICATION = 716
ERROR_WAS_LOCKED = 717
ERROR_LOG_HARD_ERROR = 718
ERROR_ALREADY_WIN32 = 719
ERROR_IMAGE_MACHINE_TYPE_MISMATCH_EXE = 720
ERROR_NO_YIELD_PERFORMED = 721
ERROR_TIMER_RESUME_IGNORED = 722
ERROR_ARBITRATION_UNHANDLED = 723
ERROR_CARDBUS_NOT_SUPPORTED = 724
ERROR_MP_PROCESSOR_MISMATCH = 725
ERROR_HIBERNATED = 726
ERROR_RESUME_HIBERNATION = 727
ERROR_FIRMWARE_UPDATED = 728
ERROR_DRIVERS_LEAKING_LOCKED_PAGES = 729
ERROR_WAKE_SYSTEM = 730
ERROR_WAIT_1 = 731
ERROR_WAIT_2 = 732
ERROR_WAIT_3 = 733
ERROR_WAIT_63 = 734
ERROR_ABANDONED_WAIT_0 = 735
ERROR_ABANDONED_WAIT_63 = 736
ERROR_USER_APC = 737
ERROR_KERNEL_APC = 738
ERROR_ALERTED = 739
ERROR_ELEVATION_REQUIRED = 740
ERROR_REPARSE = 741
ERROR_OPLOCK_BREAK_IN_PROGRESS = 742
ERROR_VOLUME_MOUNTED = 743
ERROR_RXACT_COMMITTED = 744
ERROR_NOTIFY_CLEANUP = 745
ERROR_PRIMARY_TRANSPORT_CONNECT_FAILED = 746
ERROR_PAGE_FAULT_TRANSITION = 747
ERROR_PAGE_FAULT_DEMAND_ZERO = 748
ERROR_PAGE_FAULT_COPY_ON_WRITE = 749
ERROR_PAGE_FAULT_GUARD_PAGE = 750
ERROR_PAGE_FAULT_PAGING_FILE = 751
ERROR_CACHE_PAGE_LOCKED = 752
ERROR_CRASH_DUMP = 753
ERROR_BUFFER_ALL_ZEROS = 754
ERROR_REPARSE_OBJECT = 755
ERROR_RESOURCE_REQUIREMENTS_CHANGED = 756
ERROR_TRANSLATION_COMPLETE = 757
ERROR_NOTHING_TO_TERMINATE = 758
ERROR_PROCESS_NOT_IN_JOB = 759
ERROR_PROCESS_IN_JOB = 760
ERROR_VOLSNAP_HIBERNATE_READY = 761
ERROR_FSFILTER_OP_COMPLETED_SUCCESSFULLY = 762
ERROR_INTERRUPT_VECTOR_ALREADY_CONNECTED = 763
ERROR_INTERRUPT_STILL_CONNECTED = 764
ERROR_WAIT_FOR_OPLOCK = 765
ERROR_DBG_EXCEPTION_HANDLED = 766
ERROR_DBG_CONTINUE = 767
ERROR_CALLBACK_POP_STACK = 768
ERROR_COMPRESSION_DISABLED = 769
ERROR_CANTFETCHBACKWARDS = 770
ERROR_CANTSCROLLBACKWARDS = 771
ERROR_ROWSNOTRELEASED = 772
ERROR_BAD_ACCESSOR_FLAGS = 773
ERROR_ERRORS_ENCOUNTERED = 774
ERROR_NOT_CAPABLE = 775
ERROR_REQUEST_OUT_OF_SEQUENCE = 776
ERROR_VERSION_PARSE_ERROR = 777
ERROR_BADSTARTPOSITION = 778
ERROR_MEMORY_HARDWARE = 779
ERROR_DISK_REPAIR_DISABLED = 780
ERROR_INSUFFICIENT_RESOURCE_FOR_SPECIFIED_SHARED_SECTION_SIZE = 781
ERROR_SYSTEM_POWERSTATE_TRANSITION = 782
ERROR_SYSTEM_POWERSTATE_COMPLEX_TRANSITION = 783
ERROR_MCA_EXCEPTION = 784
ERROR_ACCESS_AUDIT_BY_POLICY = 785
ERROR_ACCESS_DISABLED_NO_SAFER_UI_BY_POLICY = 786
ERROR_ABANDON_HIBERFILE = 787
ERROR_LOST_WRITEBEHIND_DATA_NETWORK_DISCONNECTED = 788
ERROR_LOST_WRITEBEHIND_DATA_NETWORK_SERVER_ERROR = 789
ERROR_LOST_WRITEBEHIND_DATA_LOCAL_DISK_ERROR = 790
ERROR_BAD_MCFG_TABLE = 791
ERROR_EA_ACCESS_DENIED = 994
ERROR_OPERATION_ABORTED = 995
ERROR_IO_INCOMPLETE = 996
ERROR_IO_PENDING = 997
ERROR_NOACCESS = 998
ERROR_SWAPERROR = 999
ERROR_STACK_OVERFLOW = 1001
ERROR_INVALID_MESSAGE = 1002
ERROR_CAN_NOT_COMPLETE = 1003
ERROR_INVALID_FLAGS = 1004
ERROR_UNRECOGNIZED_VOLUME = 1005
ERROR_FILE_INVALID = 1006
ERROR_FULLSCREEN_MODE = 1007
ERROR_NO_TOKEN = 1008
ERROR_BADDB = 1009
ERROR_BADKEY = 1010
ERROR_CANTOPEN = 1011
ERROR_CANTREAD = 1012
ERROR_CANTWRITE = 1013
ERROR_REGISTRY_RECOVERED = 1014
ERROR_REGISTRY_CORRUPT = 1015
ERROR_REGISTRY_IO_FAILED = 1016
ERROR_NOT_REGISTRY_FILE = 1017
ERROR_KEY_DELETED = 1018
ERROR_NO_LOG_SPACE = 1019
ERROR_KEY_HAS_CHILDREN = 1020
ERROR_CHILD_MUST_BE_VOLATILE = 1021
ERROR_NOTIFY_ENUM_DIR = 1022
ERROR_DEPENDENT_SERVICES_RUNNING = 1051
ERROR_INVALID_SERVICE_CONTROL = 1052
ERROR_SERVICE_REQUEST_TIMEOUT = 1053
ERROR_SERVICE_NO_THREAD = 1054
ERROR_SERVICE_DATABASE_LOCKED = 1055
ERROR_SERVICE_ALREADY_RUNNING = 1056
ERROR_INVALID_SERVICE_ACCOUNT = 1057
ERROR_SERVICE_DISABLED = 1058
ERROR_CIRCULAR_DEPENDENCY = 1059
ERROR_SERVICE_DOES_NOT_EXIST = 1060
ERROR_SERVICE_CANNOT_ACCEPT_CTRL = 1061
ERROR_SERVICE_NOT_ACTIVE = 1062
ERROR_FAILED_SERVICE_CONTROLLER_CONNECT = 1063
ERROR_EXCEPTION_IN_SERVICE = 1064
ERROR_DATABASE_DOES_NOT_EXIST = 1065
ERROR_SERVICE_SPECIFIC_ERROR = 1066
ERROR_PROCESS_ABORTED = 1067
ERROR_SERVICE_DEPENDENCY_FAIL = 1068
ERROR_SERVICE_LOGON_FAILED = 1069
ERROR_SERVICE_START_HANG = 1070
ERROR_INVALID_SERVICE_LOCK = 1071
ERROR_SERVICE_MARKED_FOR_DELETE = 1072
ERROR_SERVICE_EXISTS = 1073
ERROR_ALREADY_RUNNING_LKG = 1074
ERROR_SERVICE_DEPENDENCY_DELETED = 1075
ERROR_BOOT_ALREADY_ACCEPTED = 1076
ERROR_SERVICE_NEVER_STARTED = 1077
ERROR_DUPLICATE_SERVICE_NAME = 1078
ERROR_DIFFERENT_SERVICE_ACCOUNT = 1079
ERROR_CANNOT_DETECT_DRIVER_FAILURE = 1080
ERROR_CANNOT_DETECT_PROCESS_ABORT = 1081
ERROR_NO_RECOVERY_PROGRAM = 1082
ERROR_SERVICE_NOT_IN_EXE = 1083
ERROR_NOT_SAFEBOOT_SERVICE = 1084
ERROR_END_OF_MEDIA = 1100
ERROR_FILEMARK_DETECTED = 1101
ERROR_BEGINNING_OF_MEDIA = 1102
ERROR_SETMARK_DETECTED = 1103
ERROR_NO_DATA_DETECTED = 1104
ERROR_PARTITION_FAILURE = 1105
ERROR_INVALID_BLOCK_LENGTH = 1106
ERROR_DEVICE_NOT_PARTITIONED = 1107
ERROR_UNABLE_TO_LOCK_MEDIA = 1108
ERROR_UNABLE_TO_UNLOAD_MEDIA = 1109
ERROR_MEDIA_CHANGED = 1110
ERROR_BUS_RESET = 1111
ERROR_NO_MEDIA_IN_DRIVE = 1112
ERROR_NO_UNICODE_TRANSLATION = 1113
ERROR_DLL_INIT_FAILED = 1114
ERROR_SHUTDOWN_IN_PROGRESS = 1115
ERROR_NO_SHUTDOWN_IN_PROGRESS = 1116
ERROR_IO_DEVICE = 1117
ERROR_SERIAL_NO_DEVICE = 1118
ERROR_IRQ_BUSY = 1119
ERROR_MORE_WRITES = 1120
ERROR_COUNTER_TIMEOUT = 1121
ERROR_FLOPPY_ID_MARK_NOT_FOUND = 1122
ERROR_FLOPPY_WRONG_CYLINDER = 1123
ERROR_FLOPPY_UNKNOWN_ERROR = 1124
ERROR_FLOPPY_BAD_REGISTERS = 1125
ERROR_DISK_RECALIBRATE_FAILED = 1126
ERROR_DISK_OPERATION_FAILED = 1127
ERROR_DISK_RESET_FAILED = 1128
ERROR_EOM_OVERFLOW = 1129
ERROR_NOT_ENOUGH_SERVER_MEMORY = 1130
ERROR_POSSIBLE_DEADLOCK = 1131
ERROR_MAPPED_ALIGNMENT = 1132
ERROR_SET_POWER_STATE_VETOED = 1140
ERROR_SET_POWER_STATE_FAILED = 1141
ERROR_TOO_MANY_LINKS = 1142
ERROR_OLD_WIN_VERSION = 1150
ERROR_APP_WRONG_OS = 1151
ERROR_SINGLE_INSTANCE_APP = 1152
ERROR_RMODE_APP = 1153
ERROR_INVALID_DLL = 1154
ERROR_NO_ASSOCIATION = 1155
ERROR_DDE_FAIL = 1156
ERROR_DLL_NOT_FOUND = 1157
ERROR_NO_MORE_USER_HANDLES = 1158
ERROR_MESSAGE_SYNC_ONLY = 1159
ERROR_SOURCE_ELEMENT_EMPTY = 1160
ERROR_DESTINATION_ELEMENT_FULL = 1161
ERROR_ILLEGAL_ELEMENT_ADDRESS = 1162
ERROR_MAGAZINE_NOT_PRESENT = 1163
ERROR_DEVICE_REINITIALIZATION_NEEDED = 1164
ERROR_DEVICE_REQUIRES_CLEANING = 1165
ERROR_DEVICE_DOOR_OPEN = 1166
ERROR_DEVICE_NOT_CONNECTED = 1167
ERROR_NOT_FOUND = 1168
ERROR_NO_MATCH = 1169
ERROR_SET_NOT_FOUND = 1170
ERROR_POINT_NOT_FOUND = 1171
ERROR_NO_TRACKING_SERVICE = 1172
ERROR_NO_VOLUME_ID = 1173
ERROR_CONNECTED_OTHER_PASSWORD = 2108
ERROR_BAD_USERNAME = 2202
ERROR_NOT_CONNECTED = 2250
ERROR_OPEN_FILES = 2401
ERROR_ACTIVE_CONNECTIONS = 2402
ERROR_DEVICE_IN_USE = 2404
ERROR_BAD_DEVICE = 1200
ERROR_CONNECTION_UNAVAIL = 1201
ERROR_DEVICE_ALREADY_REMEMBERED = 1202
ERROR_NO_NET_OR_BAD_PATH = 1203
ERROR_BAD_PROVIDER = 1204
ERROR_CANNOT_OPEN_PROFILE = 1205
ERROR_BAD_PROFILE = 1206
ERROR_NOT_CONTAINER = 1207
ERROR_EXTENDED_ERROR = 1208
ERROR_INVALID_GROUPNAME = 1209
ERROR_INVALID_COMPUTERNAME = 1210
ERROR_INVALID_EVENTNAME = 1211
ERROR_INVALID_DOMAINNAME = 1212
ERROR_INVALID_SERVICENAME = 1213
ERROR_INVALID_NETNAME = 1214
ERROR_INVALID_SHARENAME = 1215
ERROR_INVALID_PASSWORDNAME = 1216
ERROR_INVALID_MESSAGENAME = 1217
ERROR_INVALID_MESSAGEDEST = 1218
ERROR_SESSION_CREDENTIAL_CONFLICT = 1219
ERROR_REMOTE_SESSION_LIMIT_EXCEEDED = 1220
ERROR_DUP_DOMAINNAME = 1221
ERROR_NO_NETWORK = 1222
ERROR_CANCELLED = 1223
ERROR_USER_MAPPED_FILE = 1224
ERROR_CONNECTION_REFUSED = 1225
ERROR_GRACEFUL_DISCONNECT = 1226
ERROR_ADDRESS_ALREADY_ASSOCIATED = 1227
ERROR_ADDRESS_NOT_ASSOCIATED = 1228
ERROR_CONNECTION_INVALID = 1229
ERROR_CONNECTION_ACTIVE = 1230
ERROR_NETWORK_UNREACHABLE = 1231
ERROR_HOST_UNREACHABLE = 1232
ERROR_PROTOCOL_UNREACHABLE = 1233
ERROR_PORT_UNREACHABLE = 1234
ERROR_REQUEST_ABORTED = 1235
ERROR_CONNECTION_ABORTED = 1236
ERROR_RETRY = 1237
ERROR_CONNECTION_COUNT_LIMIT = 1238
ERROR_LOGIN_TIME_RESTRICTION = 1239
ERROR_LOGIN_WKSTA_RESTRICTION = 1240
ERROR_INCORRECT_ADDRESS = 1241
ERROR_ALREADY_REGISTERED = 1242
ERROR_SERVICE_NOT_FOUND = 1243
ERROR_NOT_AUTHENTICATED = 1244
ERROR_NOT_LOGGED_ON = 1245
ERROR_CONTINUE = 1246
ERROR_ALREADY_INITIALIZED = 1247
ERROR_NO_MORE_DEVICES = 1248
ERROR_NO_SUCH_SITE = 1249
ERROR_DOMAIN_CONTROLLER_EXISTS = 1250
ERROR_DS_NOT_INSTALLED = 1251
ERROR_NOT_ALL_ASSIGNED = 1300
ERROR_SOME_NOT_MAPPED = 1301
ERROR_NO_QUOTAS_FOR_ACCOUNT = 1302
ERROR_LOCAL_USER_SESSION_KEY = 1303
ERROR_NULL_LM_PASSWORD = 1304
ERROR_UNKNOWN_REVISION = 1305
ERROR_REVISION_MISMATCH = 1306
ERROR_INVALID_OWNER = 1307
ERROR_INVALID_PRIMARY_GROUP = 1308
ERROR_NO_IMPERSONATION_TOKEN = 1309
ERROR_CANT_DISABLE_MANDATORY = 1310
ERROR_NO_LOGON_SERVERS = 1311
ERROR_NO_SUCH_LOGON_SESSION = 1312
ERROR_NO_SUCH_PRIVILEGE = 1313
ERROR_PRIVILEGE_NOT_HELD = 1314
ERROR_INVALID_ACCOUNT_NAME = 1315
ERROR_USER_EXISTS = 1316
ERROR_NO_SUCH_USER = 1317
ERROR_GROUP_EXISTS = 1318
ERROR_NO_SUCH_GROUP = 1319
ERROR_MEMBER_IN_GROUP = 1320
ERROR_MEMBER_NOT_IN_GROUP = 1321
ERROR_LAST_ADMIN = 1322
ERROR_WRONG_PASSWORD = 1323
ERROR_ILL_FORMED_PASSWORD = 1324
ERROR_PASSWORD_RESTRICTION = 1325
ERROR_LOGON_FAILURE = 1326
ERROR_ACCOUNT_RESTRICTION = 1327
ERROR_INVALID_LOGON_HOURS = 1328
ERROR_INVALID_WORKSTATION = 1329
ERROR_PASSWORD_EXPIRED = 1330
ERROR_ACCOUNT_DISABLED = 1331
ERROR_NONE_MAPPED = 1332
ERROR_TOO_MANY_LUIDS_REQUESTED = 1333
ERROR_LUIDS_EXHAUSTED = 1334
ERROR_INVALID_SUB_AUTHORITY = 1335
ERROR_INVALID_ACL = 1336
ERROR_INVALID_SID = 1337
ERROR_INVALID_SECURITY_DESCR = 1338
ERROR_BAD_INHERITANCE_ACL = 1340
ERROR_SERVER_DISABLED = 1341
ERROR_SERVER_NOT_DISABLED = 1342
ERROR_INVALID_ID_AUTHORITY = 1343
ERROR_ALLOTTED_SPACE_EXCEEDED = 1344
ERROR_INVALID_GROUP_ATTRIBUTES = 1345
ERROR_BAD_IMPERSONATION_LEVEL = 1346
ERROR_CANT_OPEN_ANONYMOUS = 1347
ERROR_BAD_VALIDATION_CLASS = 1348
ERROR_BAD_TOKEN_TYPE = 1349
ERROR_NO_SECURITY_ON_OBJECT = 1350
ERROR_CANT_ACCESS_DOMAIN_INFO = 1351
ERROR_INVALID_SERVER_STATE = 1352
ERROR_INVALID_DOMAIN_STATE = 1353
ERROR_INVALID_DOMAIN_ROLE = 1354
ERROR_NO_SUCH_DOMAIN = 1355
ERROR_DOMAIN_EXISTS = 1356
ERROR_DOMAIN_LIMIT_EXCEEDED = 1357
ERROR_INTERNAL_DB_CORRUPTION = 1358
ERROR_INTERNAL_ERROR = 1359
ERROR_GENERIC_NOT_MAPPED = 1360
ERROR_BAD_DESCRIPTOR_FORMAT = 1361
ERROR_NOT_LOGON_PROCESS = 1362
ERROR_LOGON_SESSION_EXISTS = 1363
ERROR_NO_SUCH_PACKAGE = 1364
ERROR_BAD_LOGON_SESSION_STATE = 1365
ERROR_LOGON_SESSION_COLLISION = 1366
ERROR_INVALID_LOGON_TYPE = 1367
ERROR_CANNOT_IMPERSONATE = 1368
ERROR_RXACT_INVALID_STATE = 1369
ERROR_RXACT_COMMIT_FAILURE = 1370
ERROR_SPECIAL_ACCOUNT = 1371
ERROR_SPECIAL_GROUP = 1372
ERROR_SPECIAL_USER = 1373
ERROR_MEMBERS_PRIMARY_GROUP = 1374
ERROR_TOKEN_ALREADY_IN_USE = 1375
ERROR_NO_SUCH_ALIAS = 1376
ERROR_MEMBER_NOT_IN_ALIAS = 1377
ERROR_MEMBER_IN_ALIAS = 1378
ERROR_ALIAS_EXISTS = 1379
ERROR_LOGON_NOT_GRANTED = 1380
ERROR_TOO_MANY_SECRETS = 1381
ERROR_SECRET_TOO_LONG = 1382
ERROR_INTERNAL_DB_ERROR = 1383
ERROR_TOO_MANY_CONTEXT_IDS = 1384
ERROR_LOGON_TYPE_NOT_GRANTED = 1385
ERROR_NT_CROSS_ENCRYPTION_REQUIRED = 1386
ERROR_NO_SUCH_MEMBER = 1387
ERROR_INVALID_MEMBER = 1388
ERROR_TOO_MANY_SIDS = 1389
ERROR_LM_CROSS_ENCRYPTION_REQUIRED = 1390
ERROR_NO_INHERITANCE = 1391
ERROR_FILE_CORRUPT = 1392
ERROR_DISK_CORRUPT = 1393
ERROR_NO_USER_SESSION_KEY = 1394
ERROR_LICENSE_QUOTA_EXCEEDED = 1395
ERROR_INVALID_WINDOW_HANDLE = 1400
ERROR_INVALID_MENU_HANDLE = 1401
ERROR_INVALID_CURSOR_HANDLE = 1402
ERROR_INVALID_ACCEL_HANDLE = 1403
ERROR_INVALID_HOOK_HANDLE = 1404
ERROR_INVALID_DWP_HANDLE = 1405
ERROR_TLW_WITH_WSCHILD = 1406
ERROR_CANNOT_FIND_WND_CLASS = 1407
ERROR_WINDOW_OF_OTHER_THREAD = 1408
ERROR_HOTKEY_ALREADY_REGISTERED = 1409
ERROR_CLASS_ALREADY_EXISTS = 1410
ERROR_CLASS_DOES_NOT_EXIST = 1411
ERROR_CLASS_HAS_WINDOWS = 1412
ERROR_INVALID_INDEX = 1413
ERROR_INVALID_ICON_HANDLE = 1414
ERROR_PRIVATE_DIALOG_INDEX = 1415
ERROR_LISTBOX_ID_NOT_FOUND = 1416
ERROR_NO_WILDCARD_CHARACTERS = 1417
ERROR_CLIPBOARD_NOT_OPEN = 1418
ERROR_HOTKEY_NOT_REGISTERED = 1419
ERROR_WINDOW_NOT_DIALOG = 1420
ERROR_CONTROL_ID_NOT_FOUND = 1421
ERROR_INVALID_COMBOBOX_MESSAGE = 1422
ERROR_WINDOW_NOT_COMBOBOX = 1423
ERROR_INVALID_EDIT_HEIGHT = 1424
ERROR_DC_NOT_FOUND = 1425
ERROR_INVALID_HOOK_FILTER = 1426
ERROR_INVALID_FILTER_PROC = 1427
ERROR_HOOK_NEEDS_HMOD = 1428
ERROR_GLOBAL_ONLY_HOOK = 1429
ERROR_JOURNAL_HOOK_SET = 1430
ERROR_HOOK_NOT_INSTALLED = 1431
ERROR_INVALID_LB_MESSAGE = 1432
ERROR_SETCOUNT_ON_BAD_LB = 1433
ERROR_LB_WITHOUT_TABSTOPS = 1434
ERROR_DESTROY_OBJECT_OF_OTHER_THREAD = 1435
ERROR_CHILD_WINDOW_MENU = 1436
ERROR_NO_SYSTEM_MENU = 1437
ERROR_INVALID_MSGBOX_STYLE = 1438
ERROR_INVALID_SPI_VALUE = 1439
ERROR_SCREEN_ALREADY_LOCKED = 1440
ERROR_HWNDS_HAVE_DIFF_PARENT = 1441
ERROR_NOT_CHILD_WINDOW = 1442
ERROR_INVALID_GW_COMMAND = 1443
ERROR_INVALID_THREAD_ID = 1444
ERROR_NON_MDICHILD_WINDOW = 1445
ERROR_POPUP_ALREADY_ACTIVE = 1446
ERROR_NO_SCROLLBARS = 1447
ERROR_INVALID_SCROLLBAR_RANGE = 1448
ERROR_INVALID_SHOWWIN_COMMAND = 1449
ERROR_NO_SYSTEM_RESOURCES = 1450
ERROR_NONPAGED_SYSTEM_RESOURCES = 1451
ERROR_PAGED_SYSTEM_RESOURCES = 1452
ERROR_WORKING_SET_QUOTA = 1453
ERROR_PAGEFILE_QUOTA = 1454
ERROR_COMMITMENT_LIMIT = 1455
ERROR_MENU_ITEM_NOT_FOUND = 1456
ERROR_INVALID_KEYBOARD_HANDLE = 1457
ERROR_HOOK_TYPE_NOT_ALLOWED = 1458
ERROR_REQUIRES_INTERACTIVE_WINDOWSTATION = 1459
ERROR_TIMEOUT = 1460
ERROR_INVALID_MONITOR_HANDLE = 1461
ERROR_INCORRECT_SIZE = 1462
ERROR_SYMLINK_CLASS_DISABLED = 1463
ERROR_SYMLINK_NOT_SUPPORTED = 1464
ERROR_XML_PARSE_ERROR = 1465
ERROR_XMLDSIG_ERROR = 1466
ERROR_RESTART_APPLICATION = 1467
ERROR_WRONG_COMPARTMENT = 1468
ERROR_AUTHIP_FAILURE = 1469
ERROR_EVENTLOG_FILE_CORRUPT = 1500
ERROR_EVENTLOG_CANT_START = 1501
ERROR_LOG_FILE_FULL = 1502
ERROR_EVENTLOG_FILE_CHANGED = 1503
ERROR_EVENTLOG_FILE_CORRUPT = 1500
ERROR_EVENTLOG_CANT_START = 1501
ERROR_LOG_FILE_FULL = 1502
ERROR_INSTALL_SERVICE = 1601
ERROR_INSTALL_USEREXIT = 1602
ERROR_INSTALL_FAILURE = 1603
ERROR_INSTALL_SUSPEND = 1604
ERROR_UNKNOWN_PRODUCT = 1605
ERROR_UNKNOWN_FEATURE = 1606
ERROR_UNKNOWN_COMPONENT = 1607
ERROR_UNKNOWN_PROPERTY = 1608
ERROR_INVALID_HANDLE_STATE = 1609
ERROR_BAD_CONFIGURATION = 1610
ERROR_INDEX_ABSENT = 1611
ERROR_INSTALL_SOURCE_ABSENT = 1612
ERROR_BAD_DATABASE_VERSION = 1613
ERROR_PRODUCT_UNINSTALLED = 1614
ERROR_BAD_QUERY_SYNTAX = 1615
ERROR_INVALID_FIELD = 1616
ERROR_DEVICE_REMOVED = 1617
ERROR_INSTALL_ALREADY_RUNNING = 1618
ERROR_INSTALL_PACKAGE_OPEN_FAILED = 1619
ERROR_INSTALL_PACKAGE_INVALID = 1620
ERROR_INSTALL_UI_FAILURE = 1621
ERROR_INSTALL_LOG_FAILURE = 1622
ERROR_INSTALL_LANGUAGE_UNSUPPORTED = 1623
ERROR_INSTALL_TRANSFORM_FAILURE = 1624
ERROR_INSTALL_PACKAGE_REJECTED = 1625
ERROR_FUNCTION_NOT_CALLED = 1626
ERROR_FUNCTION_FAILED = 1627
ERROR_INVALID_TABLE = 1628
ERROR_DATATYPE_MISMATCH = 1629
ERROR_UNSUPPORTED_TYPE = 1630
ERROR_CREATE_FAILED = 1631
ERROR_INSTALL_TEMP_UNWRITABLE = 1632
ERROR_INSTALL_PLATFORM_UNSUPPORTED = 1633
ERROR_INSTALL_NOTUSED = 1634
ERROR_PATCH_PACKAGE_OPEN_FAILED = 1635
ERROR_PATCH_PACKAGE_INVALID = 1636
ERROR_PATCH_PACKAGE_UNSUPPORTED = 1637
ERROR_PRODUCT_VERSION = 1638
ERROR_INVALID_COMMAND_LINE = 1639
ERROR_INSTALL_REMOTE_DISALLOWED = 1640
ERROR_SUCCESS_REBOOT_INITIATED = 1641
ERROR_PATCH_TARGET_NOT_FOUND = 1642
ERROR_PATCH_PACKAGE_REJECTED = 1643
ERROR_INSTALL_TRANSFORM_REJECTED = 1644
ERROR_INSTALL_REMOTE_PROHIBITED = 1645
ERROR_PATCH_REMOVAL_UNSUPPORTED = 1646
ERROR_UNKNOWN_PATCH = 1647
ERROR_PATCH_NO_SEQUENCE = 1648
ERROR_PATCH_REMOVAL_DISALLOWED = 1649
ERROR_INVALID_PATCH_XML = 1650
ERROR_PATCH_MANAGED_ADVERTISED_PRODUCT = 1651
ERROR_INSTALL_SERVICE_SAFEBOOT = 1652
RPC_S_INVALID_STRING_BINDING = 1700
RPC_S_WRONG_KIND_OF_BINDING = 1701
RPC_S_INVALID_BINDING = 1702
RPC_S_PROTSEQ_NOT_SUPPORTED = 1703
RPC_S_INVALID_RPC_PROTSEQ = 1704
RPC_S_INVALID_STRING_UUID = 1705
RPC_S_INVALID_ENDPOINT_FORMAT = 1706
RPC_S_INVALID_NET_ADDR = 1707
RPC_S_NO_ENDPOINT_FOUND = 1708
RPC_S_INVALID_TIMEOUT = 1709
RPC_S_OBJECT_NOT_FOUND = 1710
RPC_S_ALREADY_REGISTERED = 1711
RPC_S_TYPE_ALREADY_REGISTERED = 1712
RPC_S_ALREADY_LISTENING = 1713
RPC_S_NO_PROTSEQS_REGISTERED = 1714
RPC_S_NOT_LISTENING = 1715
RPC_S_UNKNOWN_MGR_TYPE = 1716
RPC_S_UNKNOWN_IF = 1717
RPC_S_NO_BINDINGS = 1718
RPC_S_NO_PROTSEQS = 1719
RPC_S_CANT_CREATE_ENDPOINT = 1720
RPC_S_OUT_OF_RESOURCES = 1721
RPC_S_SERVER_UNAVAILABLE = 1722
RPC_S_SERVER_TOO_BUSY = 1723
RPC_S_INVALID_NETWORK_OPTIONS = 1724
RPC_S_NO_CALL_ACTIVE = 1725
RPC_S_CALL_FAILED = 1726
RPC_S_CALL_FAILED_DNE = 1727
RPC_S_PROTOCOL_ERROR = 1728
RPC_S_PROXY_ACCESS_DENIED = 1729
RPC_S_UNSUPPORTED_TRANS_SYN = 1730
RPC_S_UNSUPPORTED_TYPE = 1732
RPC_S_INVALID_TAG = 1733
RPC_S_INVALID_BOUND = 1734
RPC_S_NO_ENTRY_NAME = 1735
RPC_S_INVALID_NAME_SYNTAX = 1736
RPC_S_UNSUPPORTED_NAME_SYNTAX = 1737
RPC_S_UUID_NO_ADDRESS = 1739
RPC_S_DUPLICATE_ENDPOINT = 1740
RPC_S_UNKNOWN_AUTHN_TYPE = 1741
RPC_S_MAX_CALLS_TOO_SMALL = 1742
RPC_S_STRING_TOO_LONG = 1743
RPC_S_PROTSEQ_NOT_FOUND = 1744
RPC_S_PROCNUM_OUT_OF_RANGE = 1745
RPC_S_BINDING_HAS_NO_AUTH = 1746
RPC_S_UNKNOWN_AUTHN_SERVICE = 1747
RPC_S_UNKNOWN_AUTHN_LEVEL = 1748
RPC_S_INVALID_AUTH_IDENTITY = 1749
RPC_S_UNKNOWN_AUTHZ_SERVICE = 1750
EPT_S_INVALID_ENTRY = 1751
EPT_S_CANT_PERFORM_OP = 1752
EPT_S_NOT_REGISTERED = 1753
RPC_S_NOTHING_TO_EXPORT = 1754
RPC_S_INCOMPLETE_NAME = 1755
RPC_S_INVALID_VERS_OPTION = 1756
RPC_S_NO_MORE_MEMBERS = 1757
RPC_S_NOT_ALL_OBJS_UNEXPORTED = 1758
RPC_S_INTERFACE_NOT_FOUND = 1759
RPC_S_ENTRY_ALREADY_EXISTS = 1760
RPC_S_ENTRY_NOT_FOUND = 1761
RPC_S_NAME_SERVICE_UNAVAILABLE = 1762
RPC_S_INVALID_NAF_ID = 1763
RPC_S_CANNOT_SUPPORT = 1764
RPC_S_NO_CONTEXT_AVAILABLE = 1765
RPC_S_INTERNAL_ERROR = 1766
RPC_S_ZERO_DIVIDE = 1767
RPC_S_ADDRESS_ERROR = 1768
RPC_S_FP_DIV_ZERO = 1769
RPC_S_FP_UNDERFLOW = 1770
RPC_S_FP_OVERFLOW = 1771
RPC_X_NO_MORE_ENTRIES = 1772
RPC_X_SS_CHAR_TRANS_OPEN_FAIL = 1773
RPC_X_SS_CHAR_TRANS_SHORT_FILE = 1774
RPC_X_SS_IN_NULL_CONTEXT = 1775
RPC_X_SS_CONTEXT_DAMAGED = 1777
RPC_X_SS_HANDLES_MISMATCH = 1778
RPC_X_SS_CANNOT_GET_CALL_HANDLE = 1779
RPC_X_NULL_REF_POINTER = 1780
RPC_X_ENUM_VALUE_OUT_OF_RANGE = 1781
RPC_X_BYTE_COUNT_TOO_SMALL = 1782
RPC_X_BAD_STUB_DATA = 1783
ERROR_INVALID_USER_BUFFER = 1784
ERROR_UNRECOGNIZED_MEDIA = 1785
ERROR_NO_TRUST_LSA_SECRET = 1786
ERROR_NO_TRUST_SAM_ACCOUNT = 1787
ERROR_TRUSTED_DOMAIN_FAILURE = 1788
ERROR_TRUSTED_RELATIONSHIP_FAILURE = 1789
ERROR_TRUST_FAILURE = 1790
RPC_S_CALL_IN_PROGRESS = 1791
ERROR_NETLOGON_NOT_STARTED = 1792
ERROR_ACCOUNT_EXPIRED = 1793
ERROR_REDIRECTOR_HAS_OPEN_HANDLES = 1794
ERROR_PRINTER_DRIVER_ALREADY_INSTALLED = 1795
ERROR_UNKNOWN_PORT = 1796
ERROR_UNKNOWN_PRINTER_DRIVER = 1797
ERROR_UNKNOWN_PRINTPROCESSOR = 1798
ERROR_INVALID_SEPARATOR_FILE = 1799
ERROR_INVALID_PRIORITY = 1800
ERROR_INVALID_PRINTER_NAME = 1801
ERROR_PRINTER_ALREADY_EXISTS = 1802
ERROR_INVALID_PRINTER_COMMAND = 1803
ERROR_INVALID_DATATYPE = 1804
ERROR_INVALID_ENVIRONMENT = 1805
RPC_S_NO_MORE_BINDINGS = 1806
ERROR_NOLOGON_INTERDOMAIN_TRUST_ACCOUNT = 1807
ERROR_NOLOGON_WORKSTATION_TRUST_ACCOUNT = 1808
ERROR_NOLOGON_SERVER_TRUST_ACCOUNT = 1809
ERROR_DOMAIN_TRUST_INCONSISTENT = 1810
ERROR_SERVER_HAS_OPEN_HANDLES = 1811
ERROR_RESOURCE_DATA_NOT_FOUND = 1812
ERROR_RESOURCE_TYPE_NOT_FOUND = 1813
ERROR_RESOURCE_NAME_NOT_FOUND = 1814
ERROR_RESOURCE_LANG_NOT_FOUND = 1815
ERROR_NOT_ENOUGH_QUOTA = 1816
RPC_S_NO_INTERFACES = 1817
RPC_S_CALL_CANCELLED = 1818
RPC_S_BINDING_INCOMPLETE = 1819
RPC_S_COMM_FAILURE = 1820
RPC_S_UNSUPPORTED_AUTHN_LEVEL = 1821
RPC_S_NO_PRINC_NAME = 1822
RPC_S_NOT_RPC_ERROR = 1823
RPC_S_UUID_LOCAL_ONLY = 1824
RPC_S_SEC_PKG_ERROR = 1825
RPC_S_NOT_CANCELLED = 1826
RPC_X_INVALID_ES_ACTION = 1827
RPC_X_WRONG_ES_VERSION = 1828
RPC_X_WRONG_STUB_VERSION = 1829
RPC_X_INVALID_PIPE_OBJECT = 1830
RPC_X_WRONG_PIPE_ORDER = 1831
RPC_X_WRONG_PIPE_VERSION = 1832
RPC_S_GROUP_MEMBER_NOT_FOUND = 1898
EPT_S_CANT_CREATE = 1899
RPC_S_INVALID_OBJECT = 1900
ERROR_INVALID_TIME = 1901
ERROR_INVALID_FORM_NAME = 1902
ERROR_INVALID_FORM_SIZE = 1903
ERROR_ALREADY_WAITING = 1904
ERROR_PRINTER_DELETED = 1905
ERROR_INVALID_PRINTER_STATE = 1906
ERROR_PASSWORD_MUST_CHANGE = 1907
ERROR_DOMAIN_CONTROLLER_NOT_FOUND = 1908
ERROR_ACCOUNT_LOCKED_OUT = 1909
OR_INVALID_OXID = 1910
OR_INVALID_OID = 1911
OR_INVALID_SET = 1912
RPC_S_SEND_INCOMPLETE = 1913
RPC_S_INVALID_ASYNC_HANDLE = 1914
RPC_S_INVALID_ASYNC_CALL = 1915
RPC_X_PIPE_CLOSED = 1916
RPC_X_PIPE_DISCIPLINE_ERROR = 1917
RPC_X_PIPE_EMPTY = 1918
ERROR_NO_SITENAME = 1919
ERROR_CANT_ACCESS_FILE = 1920
ERROR_CANT_RESOLVE_FILENAME = 1921
RPC_S_ENTRY_TYPE_MISMATCH = 1922
RPC_S_NOT_ALL_OBJS_EXPORTED = 1923
RPC_S_INTERFACE_NOT_EXPORTED = 1924
RPC_S_PROFILE_NOT_ADDED = 1925
RPC_S_PRF_ELT_NOT_ADDED = 1926
RPC_S_PRF_ELT_NOT_REMOVED = 1927
RPC_S_GRP_ELT_NOT_ADDED = 1928
RPC_S_GRP_ELT_NOT_REMOVED = 1929
ERROR_KM_DRIVER_BLOCKED = 1930
ERROR_CONTEXT_EXPIRED = 1931
ERROR_PER_USER_TRUST_QUOTA_EXCEEDED = 1932
ERROR_ALL_USER_TRUST_QUOTA_EXCEEDED = 1933
ERROR_USER_DELETE_TRUST_QUOTA_EXCEEDED = 1934
ERROR_AUTHENTICATION_FIREWALL_FAILED = 1935
ERROR_REMOTE_PRINT_CONNECTIONS_BLOCKED = 1936
ERROR_NTLM_BLOCKED = 1937
ERROR_INVALID_PIXEL_FORMAT = 2000
ERROR_BAD_DRIVER = 2001
ERROR_INVALID_WINDOW_STYLE = 2002
ERROR_METAFILE_NOT_SUPPORTED = 2003
ERROR_TRANSFORM_NOT_SUPPORTED = 2004
ERROR_CLIPPING_NOT_SUPPORTED = 2005
ERROR_INVALID_CMM = 2010
ERROR_INVALID_PROFILE = 2011
ERROR_TAG_NOT_FOUND = 2012
ERROR_TAG_NOT_PRESENT = 2013
ERROR_DUPLICATE_TAG = 2014
ERROR_PROFILE_NOT_ASSOCIATED_WITH_DEVICE = 2015
ERROR_PROFILE_NOT_FOUND = 2016
ERROR_INVALID_COLORSPACE = 2017
ERROR_ICM_NOT_ENABLED = 2018
ERROR_DELETING_ICM_XFORM = 2019
ERROR_INVALID_TRANSFORM = 2020
ERROR_COLORSPACE_MISMATCH = 2021
ERROR_INVALID_COLORINDEX = 2022
ERROR_PROFILE_DOES_NOT_MATCH_DEVICE = 2023
ERROR_CONNECTED_OTHER_PASSWORD = 2108
ERROR_CONNECTED_OTHER_PASSWORD_DEFAULT = 2109
ERROR_BAD_USERNAME = 2202
ERROR_NOT_CONNECTED = 2250
ERROR_INVALID_CMM = 2300
ERROR_INVALID_PROFILE = 2301
ERROR_TAG_NOT_FOUND = 2302
ERROR_TAG_NOT_PRESENT = 2303
ERROR_DUPLICATE_TAG = 2304
ERROR_PROFILE_NOT_ASSOCIATED_WITH_DEVICE = 2305
ERROR_PROFILE_NOT_FOUND = 2306
ERROR_INVALID_COLORSPACE = 2307
ERROR_ICM_NOT_ENABLED = 2308
ERROR_DELETING_ICM_XFORM = 2309
ERROR_INVALID_TRANSFORM = 2310
ERROR_OPEN_FILES = 2401
ERROR_ACTIVE_CONNECTIONS = 2402
ERROR_DEVICE_IN_USE = 2404
ERROR_UNKNOWN_PRINT_MONITOR = 3000
ERROR_PRINTER_DRIVER_IN_USE = 3001
ERROR_SPOOL_FILE_NOT_FOUND = 3002
ERROR_SPL_NO_STARTDOC = 3003
ERROR_SPL_NO_ADDJOB = 3004
ERROR_PRINT_PROCESSOR_ALREADY_INSTALLED = 3005
ERROR_PRINT_MONITOR_ALREADY_INSTALLED = 3006
ERROR_INVALID_PRINT_MONITOR = 3007
ERROR_PRINT_MONITOR_IN_USE = 3008
ERROR_PRINTER_HAS_JOBS_QUEUED = 3009
ERROR_SUCCESS_REBOOT_REQUIRED = 3010
ERROR_SUCCESS_RESTART_REQUIRED = 3011
ERROR_PRINTER_NOT_FOUND = 3012
ERROR_PRINTER_DRIVER_WARNED = 3013
ERROR_PRINTER_DRIVER_BLOCKED = 3014
ERROR_PRINTER_DRIVER_PACKAGE_IN_USE = 3015
ERROR_CORE_DRIVER_PACKAGE_NOT_FOUND = 3016
ERROR_FAIL_REBOOT_REQUIRED = 3017
ERROR_FAIL_REBOOT_INITIATED = 3018
ERROR_PRINTER_DRIVER_DOWNLOAD_NEEDED = 3019
ERROR_PRINT_JOB_RESTART_REQUIRED = 3020
ERROR_IO_REISSUE_AS_CACHED = 3950
ERROR_WINS_INTERNAL = 4000
ERROR_CAN_NOT_DEL_LOCAL_WINS = 4001
ERROR_STATIC_INIT = 4002
ERROR_INC_BACKUP = 4003
ERROR_FULL_BACKUP = 4004
ERROR_REC_NON_EXISTENT = 4005
ERROR_RPL_NOT_ALLOWED = 4006
ERROR_DHCP_ADDRESS_CONFLICT = 4100
ERROR_WMI_GUID_NOT_FOUND = 4200
ERROR_WMI_INSTANCE_NOT_FOUND = 4201
ERROR_WMI_ITEMID_NOT_FOUND = 4202
ERROR_WMI_TRY_AGAIN = 4203
ERROR_WMI_DP_NOT_FOUND = 4204
ERROR_WMI_UNRESOLVED_INSTANCE_REF = 4205
ERROR_WMI_ALREADY_ENABLED = 4206
ERROR_WMI_GUID_DISCONNECTED = 4207
ERROR_WMI_SERVER_UNAVAILABLE = 4208
ERROR_WMI_DP_FAILED = 4209
ERROR_WMI_INVALID_MOF = 4210
ERROR_WMI_INVALID_REGINFO = 4211
ERROR_WMI_ALREADY_DISABLED = 4212
ERROR_WMI_READ_ONLY = 4213
ERROR_WMI_SET_FAILURE = 4214
ERROR_INVALID_MEDIA = 4300
ERROR_INVALID_LIBRARY = 4301
ERROR_INVALID_MEDIA_POOL = 4302
ERROR_DRIVE_MEDIA_MISMATCH = 4303
ERROR_MEDIA_OFFLINE = 4304
ERROR_LIBRARY_OFFLINE = 4305
ERROR_EMPTY = 4306
ERROR_NOT_EMPTY = 4307
ERROR_MEDIA_UNAVAILABLE = 4308
ERROR_RESOURCE_DISABLED = 4309
ERROR_INVALID_CLEANER = 4310
ERROR_UNABLE_TO_CLEAN = 4311
ERROR_OBJECT_NOT_FOUND = 4312
ERROR_DATABASE_FAILURE = 4313
ERROR_DATABASE_FULL = 4314
ERROR_MEDIA_INCOMPATIBLE = 4315
ERROR_RESOURCE_NOT_PRESENT = 4316
ERROR_INVALID_OPERATION = 4317
ERROR_MEDIA_NOT_AVAILABLE = 4318
ERROR_DEVICE_NOT_AVAILABLE = 4319
ERROR_REQUEST_REFUSED = 4320
ERROR_INVALID_DRIVE_OBJECT = 4321
ERROR_LIBRARY_FULL = 4322
ERROR_MEDIUM_NOT_ACCESSIBLE = 4323
ERROR_UNABLE_TO_LOAD_MEDIUM = 4324
ERROR_UNABLE_TO_INVENTORY_DRIVE = 4325
ERROR_UNABLE_TO_INVENTORY_SLOT = 4326
ERROR_UNABLE_TO_INVENTORY_TRANSPORT = 4327
ERROR_TRANSPORT_FULL = 4328
ERROR_CONTROLLING_IEPORT = 4329
ERROR_UNABLE_TO_EJECT_MOUNTED_MEDIA = 4330
ERROR_CLEANER_SLOT_SET = 4331
ERROR_CLEANER_SLOT_NOT_SET = 4332
ERROR_CLEANER_CARTRIDGE_SPENT = 4333
ERROR_UNEXPECTED_OMID = 4334
ERROR_CANT_DELETE_LAST_ITEM = 4335
ERROR_MESSAGE_EXCEEDS_MAX_SIZE = 4336
ERROR_VOLUME_CONTAINS_SYS_FILES = 4337
ERROR_INDIGENOUS_TYPE = 4338
ERROR_NO_SUPPORTING_DRIVES = 4339
ERROR_CLEANER_CARTRIDGE_INSTALLED = 4340
ERROR_IEPORT_FULL = 4341
ERROR_FILE_OFFLINE = 4350
ERROR_REMOTE_STORAGE_NOT_ACTIVE = 4351
ERROR_REMOTE_STORAGE_MEDIA_ERROR = 4352
ERROR_NOT_A_REPARSE_POINT = 4390
ERROR_REPARSE_ATTRIBUTE_CONFLICT = 4391
ERROR_INVALID_REPARSE_DATA = 4392
ERROR_REPARSE_TAG_INVALID = 4393
ERROR_REPARSE_TAG_MISMATCH = 4394
ERROR_VOLUME_NOT_SIS_ENABLED = 4500
ERROR_DEPENDENT_RESOURCE_EXISTS = 5001
ERROR_DEPENDENCY_NOT_FOUND = 5002
ERROR_DEPENDENCY_ALREADY_EXISTS = 5003
ERROR_RESOURCE_NOT_ONLINE = 5004
ERROR_HOST_NODE_NOT_AVAILABLE = 5005
ERROR_RESOURCE_NOT_AVAILABLE = 5006
ERROR_RESOURCE_NOT_FOUND = 5007
ERROR_SHUTDOWN_CLUSTER = 5008
ERROR_CANT_EVICT_ACTIVE_NODE = 5009
ERROR_OBJECT_ALREADY_EXISTS = 5010
ERROR_OBJECT_IN_LIST = 5011
ERROR_GROUP_NOT_AVAILABLE = 5012
ERROR_GROUP_NOT_FOUND = 5013
ERROR_GROUP_NOT_ONLINE = 5014
ERROR_HOST_NODE_NOT_RESOURCE_OWNER = 5015
ERROR_HOST_NODE_NOT_GROUP_OWNER = 5016
ERROR_RESMON_CREATE_FAILED = 5017
ERROR_RESMON_ONLINE_FAILED = 5018
ERROR_RESOURCE_ONLINE = 5019
ERROR_QUORUM_RESOURCE = 5020
ERROR_NOT_QUORUM_CAPABLE = 5021
ERROR_CLUSTER_SHUTTING_DOWN = 5022
ERROR_INVALID_STATE = 5023
ERROR_RESOURCE_PROPERTIES_STORED = 5024
ERROR_NOT_QUORUM_CLASS = 5025
ERROR_CORE_RESOURCE = 5026
ERROR_QUORUM_RESOURCE_ONLINE_FAILED = 5027
ERROR_QUORUMLOG_OPEN_FAILED = 5028
ERROR_CLUSTERLOG_CORRUPT = 5029
ERROR_CLUSTERLOG_RECORD_EXCEEDS_MAXSIZE = 5030
ERROR_CLUSTERLOG_EXCEEDS_MAXSIZE = 5031
ERROR_CLUSTERLOG_CHKPOINT_NOT_FOUND = 5032
ERROR_CLUSTERLOG_NOT_ENOUGH_SPACE = 5033
ERROR_QUORUM_OWNER_ALIVE = 5034
ERROR_NETWORK_NOT_AVAILABLE = 5035
ERROR_NODE_NOT_AVAILABLE = 5036
ERROR_ALL_NODES_NOT_AVAILABLE = 5037
ERROR_RESOURCE_FAILED = 5038
ERROR_CLUSTER_INVALID_NODE = 5039
ERROR_CLUSTER_NODE_EXISTS = 5040
ERROR_CLUSTER_JOIN_IN_PROGRESS = 5041
ERROR_CLUSTER_NODE_NOT_FOUND = 5042
ERROR_CLUSTER_LOCAL_NODE_NOT_FOUND = 5043
ERROR_CLUSTER_NETWORK_EXISTS = 5044
ERROR_CLUSTER_NETWORK_NOT_FOUND = 5045
ERROR_CLUSTER_NETINTERFACE_EXISTS = 5046
ERROR_CLUSTER_NETINTERFACE_NOT_FOUND = 5047
ERROR_CLUSTER_INVALID_REQUEST = 5048
ERROR_CLUSTER_INVALID_NETWORK_PROVIDER = 5049
ERROR_CLUSTER_NODE_DOWN = 5050
ERROR_CLUSTER_NODE_UNREACHABLE = 5051
ERROR_CLUSTER_NODE_NOT_MEMBER = 5052
ERROR_CLUSTER_JOIN_NOT_IN_PROGRESS = 5053
ERROR_CLUSTER_INVALID_NETWORK = 5054
ERROR_CLUSTER_NODE_UP = 5056
ERROR_CLUSTER_IPADDR_IN_USE = 5057
ERROR_CLUSTER_NODE_NOT_PAUSED = 5058
ERROR_CLUSTER_NO_SECURITY_CONTEXT = 5059
ERROR_CLUSTER_NETWORK_NOT_INTERNAL = 5060
ERROR_CLUSTER_NODE_ALREADY_UP = 5061
ERROR_CLUSTER_NODE_ALREADY_DOWN = 5062
ERROR_CLUSTER_NETWORK_ALREADY_ONLINE = 5063
ERROR_CLUSTER_NETWORK_ALREADY_OFFLINE = 5064
ERROR_CLUSTER_NODE_ALREADY_MEMBER = 5065
ERROR_CLUSTER_LAST_INTERNAL_NETWORK = 5066
ERROR_CLUSTER_NETWORK_HAS_DEPENDENTS = 5067
ERROR_INVALID_OPERATION_ON_QUORUM = 5068
ERROR_DEPENDENCY_NOT_ALLOWED = 5069
ERROR_CLUSTER_NODE_PAUSED = 5070
ERROR_NODE_CANT_HOST_RESOURCE = 5071
ERROR_CLUSTER_NODE_NOT_READY = 5072
ERROR_CLUSTER_NODE_SHUTTING_DOWN = 5073
ERROR_CLUSTER_JOIN_ABORTED = 5074
ERROR_CLUSTER_INCOMPATIBLE_VERSIONS = 5075
ERROR_CLUSTER_MAXNUM_OF_RESOURCES_EXCEEDED = 5076
ERROR_CLUSTER_SYSTEM_CONFIG_CHANGED = 5077
ERROR_CLUSTER_RESOURCE_TYPE_NOT_FOUND = 5078
ERROR_CLUSTER_RESTYPE_NOT_SUPPORTED = 5079
ERROR_CLUSTER_RESNAME_NOT_FOUND = 5080
ERROR_CLUSTER_NO_RPC_PACKAGES_REGISTERED = 5081
ERROR_CLUSTER_OWNER_NOT_IN_PREFLIST = 5082
ERROR_CLUSTER_DATABASE_SEQMISMATCH = 5083
ERROR_RESMON_INVALID_STATE = 5084
ERROR_CLUSTER_GUM_NOT_LOCKER = 5085
ERROR_QUORUM_DISK_NOT_FOUND = 5086
ERROR_DATABASE_BACKUP_CORRUPT = 5087
ERROR_CLUSTER_NODE_ALREADY_HAS_DFS_ROOT = 5088
ERROR_RESOURCE_PROPERTY_UNCHANGEABLE = 5089
ERROR_CLUSTER_MEMBERSHIP_INVALID_STATE = 5890
ERROR_CLUSTER_QUORUMLOG_NOT_FOUND = 5891
ERROR_CLUSTER_MEMBERSHIP_HALT = 5892
ERROR_CLUSTER_INSTANCE_ID_MISMATCH = 5893
ERROR_CLUSTER_NETWORK_NOT_FOUND_FOR_IP = 5894
ERROR_CLUSTER_PROPERTY_DATA_TYPE_MISMATCH = 5895
ERROR_CLUSTER_EVICT_WITHOUT_CLEANUP = 5896
ERROR_CLUSTER_PARAMETER_MISMATCH = 5897
ERROR_NODE_CANNOT_BE_CLUSTERED = 5898
ERROR_CLUSTER_WRONG_OS_VERSION = 5899
ERROR_CLUSTER_CANT_CREATE_DUP_CLUSTER_NAME = 5900
ERROR_CLUSCFG_ALREADY_COMMITTED = 5901
ERROR_CLUSCFG_ROLLBACK_FAILED = 5902
ERROR_CLUSCFG_SYSTEM_DISK_DRIVE_LETTER_CONFLICT = 5903
ERROR_CLUSTER_OLD_VERSION = 5904
ERROR_CLUSTER_MISMATCHED_COMPUTER_ACCT_NAME = 5905
ERROR_CLUSTER_NO_NET_ADAPTERS = 5906
ERROR_CLUSTER_POISONED = 5907
ERROR_CLUSTER_GROUP_MOVING = 5908
ERROR_CLUSTER_RESOURCE_TYPE_BUSY = 5909
ERROR_RESOURCE_CALL_TIMED_OUT = 5910
ERROR_INVALID_CLUSTER_IPV6_ADDRESS = 5911
ERROR_CLUSTER_INTERNAL_INVALID_FUNCTION = 5912
ERROR_CLUSTER_PARAMETER_OUT_OF_BOUNDS = 5913
ERROR_CLUSTER_PARTIAL_SEND = 5914
ERROR_CLUSTER_REGISTRY_INVALID_FUNCTION = 5915
ERROR_CLUSTER_INVALID_STRING_TERMINATION = 5916
ERROR_CLUSTER_INVALID_STRING_FORMAT = 5917
ERROR_CLUSTER_DATABASE_TRANSACTION_IN_PROGRESS = 5918
ERROR_CLUSTER_DATABASE_TRANSACTION_NOT_IN_PROGRESS = 5919
ERROR_CLUSTER_NULL_DATA = 5920
ERROR_CLUSTER_PARTIAL_READ = 5921
ERROR_CLUSTER_PARTIAL_WRITE = 5922
ERROR_CLUSTER_CANT_DESERIALIZE_DATA = 5923
ERROR_DEPENDENT_RESOURCE_PROPERTY_CONFLICT = 5924
ERROR_CLUSTER_NO_QUORUM = 5925
ERROR_CLUSTER_INVALID_IPV6_NETWORK = 5926
ERROR_CLUSTER_INVALID_IPV6_TUNNEL_NETWORK = 5927
ERROR_QUORUM_NOT_ALLOWED_IN_THIS_GROUP = 5928
ERROR_DEPENDENCY_TREE_TOO_COMPLEX = 5929
ERROR_EXCEPTION_IN_RESOURCE_CALL = 5930
ERROR_CLUSTER_RHS_FAILED_INITIALIZATION = 5931
ERROR_CLUSTER_NOT_INSTALLED = 5932
ERROR_CLUSTER_RESOURCES_MUST_BE_ONLINE_ON_THE_SAME_NODE = 5933
ERROR_ENCRYPTION_FAILED = 6000
ERROR_DECRYPTION_FAILED = 6001
ERROR_FILE_ENCRYPTED = 6002
ERROR_NO_RECOVERY_POLICY = 6003
ERROR_NO_EFS = 6004
ERROR_WRONG_EFS = 6005
ERROR_NO_USER_KEYS = 6006
ERROR_FILE_NOT_ENCRYPTED = 6007
ERROR_NOT_EXPORT_FORMAT = 6008
ERROR_FILE_READ_ONLY = 6009
ERROR_DIR_EFS_DISALLOWED = 6010
ERROR_EFS_SERVER_NOT_TRUSTED = 6011
ERROR_BAD_RECOVERY_POLICY = 6012
ERROR_EFS_ALG_BLOB_TOO_BIG = 6013
ERROR_VOLUME_NOT_SUPPORT_EFS = 6014
ERROR_EFS_DISABLED = 6015
ERROR_EFS_VERSION_NOT_SUPPORT = 6016
ERROR_CS_ENCRYPTION_INVALID_SERVER_RESPONSE = 6017
ERROR_CS_ENCRYPTION_UNSUPPORTED_SERVER = 6018
ERROR_CS_ENCRYPTION_EXISTING_ENCRYPTED_FILE = 6019
ERROR_CS_ENCRYPTION_NEW_ENCRYPTED_FILE = 6020
ERROR_CS_ENCRYPTION_FILE_NOT_CSE = 6021
ERROR_NO_BROWSER_SERVERS_FOUND = 6118
ERROR_LOG_SECTOR_INVALID = 6600
ERROR_LOG_SECTOR_PARITY_INVALID = 6601
ERROR_LOG_SECTOR_REMAPPED = 6602
ERROR_LOG_BLOCK_INCOMPLETE = 6603
ERROR_LOG_INVALID_RANGE = 6604
ERROR_LOG_BLOCKS_EXHAUSTED = 6605
ERROR_LOG_READ_CONTEXT_INVALID = 6606
ERROR_LOG_RESTART_INVALID = 6607
ERROR_LOG_BLOCK_VERSION = 6608
ERROR_LOG_BLOCK_INVALID = 6609
ERROR_LOG_READ_MODE_INVALID = 6610
ERROR_LOG_NO_RESTART = 6611
ERROR_LOG_METADATA_CORRUPT = 6612
ERROR_LOG_METADATA_INVALID = 6613
ERROR_LOG_METADATA_INCONSISTENT = 6614
ERROR_LOG_RESERVATION_INVALID = 6615
ERROR_LOG_CANT_DELETE = 6616
ERROR_LOG_CONTAINER_LIMIT_EXCEEDED = 6617
ERROR_LOG_START_OF_LOG = 6618
ERROR_LOG_POLICY_ALREADY_INSTALLED = 6619
ERROR_LOG_POLICY_NOT_INSTALLED = 6620
ERROR_LOG_POLICY_INVALID = 6621
ERROR_LOG_POLICY_CONFLICT = 6622
ERROR_LOG_PINNED_ARCHIVE_TAIL = 6623
ERROR_LOG_RECORD_NONEXISTENT = 6624
ERROR_LOG_RECORDS_RESERVED_INVALID = 6625
ERROR_LOG_SPACE_RESERVED_INVALID = 6626
ERROR_LOG_TAIL_INVALID = 6627
ERROR_LOG_FULL = 6628
ERROR_COULD_NOT_RESIZE_LOG = 6629
ERROR_LOG_MULTIPLEXED = 6630
ERROR_LOG_DEDICATED = 6631
ERROR_LOG_ARCHIVE_NOT_IN_PROGRESS = 6632
ERROR_LOG_ARCHIVE_IN_PROGRESS = 6633
ERROR_LOG_EPHEMERAL = 6634
ERROR_LOG_NOT_ENOUGH_CONTAINERS = 6635
ERROR_LOG_CLIENT_ALREADY_REGISTERED = 6636
ERROR_LOG_CLIENT_NOT_REGISTERED = 6637
ERROR_LOG_FULL_HANDLER_IN_PROGRESS = 6638
ERROR_LOG_CONTAINER_READ_FAILED = 6639
ERROR_LOG_CONTAINER_WRITE_FAILED = 6640
ERROR_LOG_CONTAINER_OPEN_FAILED = 6641
ERROR_LOG_CONTAINER_STATE_INVALID = 6642
ERROR_LOG_STATE_INVALID = 6643
ERROR_LOG_PINNED = 6644
ERROR_LOG_METADATA_FLUSH_FAILED = 6645
ERROR_LOG_INCONSISTENT_SECURITY = 6646
ERROR_LOG_APPENDED_FLUSH_FAILED = 6647
ERROR_LOG_PINNED_RESERVATION = 6648
ERROR_INVALID_TRANSACTION = 6700
ERROR_TRANSACTION_NOT_ACTIVE = 6701
ERROR_TRANSACTION_REQUEST_NOT_VALID = 6702
ERROR_TRANSACTION_NOT_REQUESTED = 6703
ERROR_TRANSACTION_ALREADY_ABORTED = 6704
ERROR_TRANSACTION_ALREADY_COMMITTED = 6705
ERROR_TM_INITIALIZATION_FAILED = 6706
ERROR_RESOURCEMANAGER_READ_ONLY = 6707
ERROR_TRANSACTION_NOT_JOINED = 6708
ERROR_TRANSACTION_SUPERIOR_EXISTS = 6709
ERROR_CRM_PROTOCOL_ALREADY_EXISTS = 6710
ERROR_TRANSACTION_PROPAGATION_FAILED = 6711
ERROR_CRM_PROTOCOL_NOT_FOUND = 6712
ERROR_TRANSACTION_INVALID_MARSHALL_BUFFER = 6713
ERROR_CURRENT_TRANSACTION_NOT_VALID = 6714
ERROR_TRANSACTION_NOT_FOUND = 6715
ERROR_RESOURCEMANAGER_NOT_FOUND = 6716
ERROR_ENLISTMENT_NOT_FOUND = 6717
ERROR_TRANSACTIONMANAGER_NOT_FOUND = 6718
ERROR_TRANSACTIONMANAGER_NOT_ONLINE = 6719
ERROR_TRANSACTIONMANAGER_RECOVERY_NAME_COLLISION = 6720
ERROR_TRANSACTION_NOT_ROOT = 6721
ERROR_TRANSACTION_OBJECT_EXPIRED = 6722
ERROR_TRANSACTION_RESPONSE_NOT_ENLISTED = 6723
ERROR_TRANSACTION_RECORD_TOO_LONG = 6724
ERROR_IMPLICIT_TRANSACTION_NOT_SUPPORTED = 6725
ERROR_TRANSACTION_INTEGRITY_VIOLATED = 6726
ERROR_TRANSACTIONAL_CONFLICT = 6800
ERROR_RM_NOT_ACTIVE = 6801
ERROR_RM_METADATA_CORRUPT = 6802
ERROR_DIRECTORY_NOT_RM = 6803
ERROR_TRANSACTIONS_UNSUPPORTED_REMOTE = 6805
ERROR_LOG_RESIZE_INVALID_SIZE = 6806
ERROR_OBJECT_NO_LONGER_EXISTS = 6807
ERROR_STREAM_MINIVERSION_NOT_FOUND = 6808
ERROR_STREAM_MINIVERSION_NOT_VALID = 6809
ERROR_MINIVERSION_INACCESSIBLE_FROM_SPECIFIED_TRANSACTION = 6810
ERROR_CANT_OPEN_MINIVERSION_WITH_MODIFY_INTENT = 6811
ERROR_CANT_CREATE_MORE_STREAM_MINIVERSIONS = 6812
ERROR_REMOTE_FILE_VERSION_MISMATCH = 6814
ERROR_HANDLE_NO_LONGER_VALID = 6815
ERROR_NO_TXF_METADATA = 6816
ERROR_LOG_CORRUPTION_DETECTED = 6817
ERROR_CANT_RECOVER_WITH_HANDLE_OPEN = 6818
ERROR_RM_DISCONNECTED = 6819
ERROR_ENLISTMENT_NOT_SUPERIOR = 6820
ERROR_RECOVERY_NOT_NEEDED = 6821
ERROR_RM_ALREADY_STARTED = 6822
ERROR_FILE_IDENTITY_NOT_PERSISTENT = 6823
ERROR_CANT_BREAK_TRANSACTIONAL_DEPENDENCY = 6824
ERROR_CANT_CROSS_RM_BOUNDARY = 6825
ERROR_TXF_DIR_NOT_EMPTY = 6826
ERROR_INDOUBT_TRANSACTIONS_EXIST = 6827
ERROR_TM_VOLATILE = 6828
ERROR_ROLLBACK_TIMER_EXPIRED = 6829
ERROR_TXF_ATTRIBUTE_CORRUPT = 6830
ERROR_EFS_NOT_ALLOWED_IN_TRANSACTION = 6831
ERROR_TRANSACTIONAL_OPEN_NOT_ALLOWED = 6832
ERROR_LOG_GROWTH_FAILED = 6833
ERROR_TRANSACTED_MAPPING_UNSUPPORTED_REMOTE = 6834
ERROR_TXF_METADATA_ALREADY_PRESENT = 6835
ERROR_TRANSACTION_SCOPE_CALLBACKS_NOT_SET = 6836
ERROR_TRANSACTION_REQUIRED_PROMOTION = 6837
ERROR_CANNOT_EXECUTE_FILE_IN_TRANSACTION = 6838
ERROR_TRANSACTIONS_NOT_FROZEN = 6839
ERROR_TRANSACTION_FREEZE_IN_PROGRESS = 6840
ERROR_NOT_SNAPSHOT_VOLUME = 6841
ERROR_NO_SAVEPOINT_WITH_OPEN_FILES = 6842
ERROR_DATA_LOST_REPAIR = 6843
ERROR_SPARSE_NOT_ALLOWED_IN_TRANSACTION = 6844
ERROR_TM_IDENTITY_MISMATCH = 6845
ERROR_FLOATED_SECTION = 6846
ERROR_CANNOT_ACCEPT_TRANSACTED_WORK = 6847
ERROR_CANNOT_ABORT_TRANSACTIONS = 6848
ERROR_BAD_CLUSTERS = 6849
ERROR_COMPRESSION_NOT_ALLOWED_IN_TRANSACTION = 6850
ERROR_VOLUME_DIRTY = 6851
ERROR_NO_LINK_TRACKING_IN_TRANSACTION = 6852
ERROR_OPERATION_NOT_SUPPORTED_IN_TRANSACTION = 6853
ERROR_CTX_WINSTATION_NAME_INVALID = 7001
ERROR_CTX_INVALID_PD = 7002
ERROR_CTX_PD_NOT_FOUND = 7003
ERROR_CTX_WD_NOT_FOUND = 7004
ERROR_CTX_CANNOT_MAKE_EVENTLOG_ENTRY = 7005
ERROR_CTX_SERVICE_NAME_COLLISION = 7006
ERROR_CTX_CLOSE_PENDING = 7007
ERROR_CTX_NO_OUTBUF = 7008
ERROR_CTX_MODEM_INF_NOT_FOUND = 7009
ERROR_CTX_INVALID_MODEMNAME = 7010
ERROR_CTX_MODEM_RESPONSE_ERROR = 7011
ERROR_CTX_MODEM_RESPONSE_TIMEOUT = 7012
ERROR_CTX_MODEM_RESPONSE_NO_CARRIER = 7013
ERROR_CTX_MODEM_RESPONSE_NO_DIALTONE = 7014
ERROR_CTX_MODEM_RESPONSE_BUSY = 7015
ERROR_CTX_MODEM_RESPONSE_VOICE = 7016
ERROR_CTX_TD_ERROR = 7017
ERROR_CTX_WINSTATION_NOT_FOUND = 7022
ERROR_CTX_WINSTATION_ALREADY_EXISTS = 7023
ERROR_CTX_WINSTATION_BUSY = 7024
ERROR_CTX_BAD_VIDEO_MODE = 7025
ERROR_CTX_GRAPHICS_INVALID = 7035
ERROR_CTX_LOGON_DISABLED = 7037
ERROR_CTX_NOT_CONSOLE = 7038
ERROR_CTX_CLIENT_QUERY_TIMEOUT = 7040
ERROR_CTX_CONSOLE_DISCONNECT = 7041
ERROR_CTX_CONSOLE_CONNECT = 7042
ERROR_CTX_SHADOW_DENIED = 7044
ERROR_CTX_WINSTATION_ACCESS_DENIED = 7045
ERROR_CTX_INVALID_WD = 7049
ERROR_CTX_SHADOW_INVALID = 7050
ERROR_CTX_SHADOW_DISABLED = 7051
ERROR_CTX_CLIENT_LICENSE_IN_USE = 7052
ERROR_CTX_CLIENT_LICENSE_NOT_SET = 7053
ERROR_CTX_LICENSE_NOT_AVAILABLE = 7054
ERROR_CTX_LICENSE_CLIENT_INVALID = 7055
ERROR_CTX_LICENSE_EXPIRED = 7056
ERROR_CTX_SHADOW_NOT_RUNNING = 7057
ERROR_CTX_SHADOW_ENDED_BY_MODE_CHANGE = 7058
ERROR_ACTIVATION_COUNT_EXCEEDED = 7059
ERROR_CTX_WINSTATIONS_DISABLED = 7060
ERROR_CTX_ENCRYPTION_LEVEL_REQUIRED = 7061
ERROR_CTX_SESSION_IN_USE = 7062
ERROR_CTX_NO_FORCE_LOGOFF = 7063
ERROR_CTX_ACCOUNT_RESTRICTION = 7064
ERROR_RDP_PROTOCOL_ERROR = 7065
ERROR_CTX_CDM_CONNECT = 7066
ERROR_CTX_CDM_DISCONNECT = 7067
ERROR_CTX_SECURITY_LAYER_ERROR = 7068
ERROR_TS_INCOMPATIBLE_SESSIONS = 7069
FRS_ERR_INVALID_API_SEQUENCE = 8001
FRS_ERR_STARTING_SERVICE = 8002
FRS_ERR_STOPPING_SERVICE = 8003
FRS_ERR_INTERNAL_API = 8004
FRS_ERR_INTERNAL = 8005
FRS_ERR_SERVICE_COMM = 8006
FRS_ERR_INSUFFICIENT_PRIV = 8007
FRS_ERR_AUTHENTICATION = 8008
FRS_ERR_PARENT_INSUFFICIENT_PRIV = 8009
FRS_ERR_PARENT_AUTHENTICATION = 8010
FRS_ERR_CHILD_TO_PARENT_COMM = 8011
FRS_ERR_PARENT_TO_CHILD_COMM = 8012
FRS_ERR_SYSVOL_POPULATE = 8013
FRS_ERR_SYSVOL_POPULATE_TIMEOUT = 8014
FRS_ERR_SYSVOL_IS_BUSY = 8015
FRS_ERR_SYSVOL_DEMOTE = 8016
FRS_ERR_INVALID_SERVICE_PARAMETER = 8017
DS_S_SUCCESS = NO_ERROR
ERROR_DS_NOT_INSTALLED = 8200
ERROR_DS_MEMBERSHIP_EVALUATED_LOCALLY = 8201
ERROR_DS_NO_ATTRIBUTE_OR_VALUE = 8202
ERROR_DS_INVALID_ATTRIBUTE_SYNTAX = 8203
ERROR_DS_ATTRIBUTE_TYPE_UNDEFINED = 8204
ERROR_DS_ATTRIBUTE_OR_VALUE_EXISTS = 8205
ERROR_DS_BUSY = 8206
ERROR_DS_UNAVAILABLE = 8207
ERROR_DS_NO_RIDS_ALLOCATED = 8208
ERROR_DS_NO_MORE_RIDS = 8209
ERROR_DS_INCORRECT_ROLE_OWNER = 8210
ERROR_DS_RIDMGR_INIT_ERROR = 8211
ERROR_DS_OBJ_CLASS_VIOLATION = 8212
ERROR_DS_CANT_ON_NON_LEAF = 8213
ERROR_DS_CANT_ON_RDN = 8214
ERROR_DS_CANT_MOD_OBJ_CLASS = 8215
ERROR_DS_CROSS_DOM_MOVE_ERROR = 8216
ERROR_DS_GC_NOT_AVAILABLE = 8217
ERROR_SHARED_POLICY = 8218
ERROR_POLICY_OBJECT_NOT_FOUND = 8219
ERROR_POLICY_ONLY_IN_DS = 8220
ERROR_PROMOTION_ACTIVE = 8221
ERROR_NO_PROMOTION_ACTIVE = 8222
ERROR_DS_OPERATIONS_ERROR = 8224
ERROR_DS_PROTOCOL_ERROR = 8225
ERROR_DS_TIMELIMIT_EXCEEDED = 8226
ERROR_DS_SIZELIMIT_EXCEEDED = 8227
ERROR_DS_ADMIN_LIMIT_EXCEEDED = 8228
ERROR_DS_COMPARE_FALSE = 8229
ERROR_DS_COMPARE_TRUE = 8230
ERROR_DS_AUTH_METHOD_NOT_SUPPORTED = 8231
ERROR_DS_STRONG_AUTH_REQUIRED = 8232
ERROR_DS_INAPPROPRIATE_AUTH = 8233
ERROR_DS_AUTH_UNKNOWN = 8234
ERROR_DS_REFERRAL = 8235
ERROR_DS_UNAVAILABLE_CRIT_EXTENSION = 8236
ERROR_DS_CONFIDENTIALITY_REQUIRED = 8237
ERROR_DS_INAPPROPRIATE_MATCHING = 8238
ERROR_DS_CONSTRAINT_VIOLATION = 8239
ERROR_DS_NO_SUCH_OBJECT = 8240
ERROR_DS_ALIAS_PROBLEM = 8241
ERROR_DS_INVALID_DN_SYNTAX = 8242
ERROR_DS_IS_LEAF = 8243
ERROR_DS_ALIAS_DEREF_PROBLEM = 8244
ERROR_DS_UNWILLING_TO_PERFORM = 8245
ERROR_DS_LOOP_DETECT = 8246
ERROR_DS_NAMING_VIOLATION = 8247
ERROR_DS_OBJECT_RESULTS_TOO_LARGE = 8248
ERROR_DS_AFFECTS_MULTIPLE_DSAS = 8249
ERROR_DS_SERVER_DOWN = 8250
ERROR_DS_LOCAL_ERROR = 8251
ERROR_DS_ENCODING_ERROR = 8252
ERROR_DS_DECODING_ERROR = 8253
ERROR_DS_FILTER_UNKNOWN = 8254
ERROR_DS_PARAM_ERROR = 8255
ERROR_DS_NOT_SUPPORTED = 8256
ERROR_DS_NO_RESULTS_RETURNED = 8257
ERROR_DS_CONTROL_NOT_FOUND = 8258
ERROR_DS_CLIENT_LOOP = 8259
ERROR_DS_REFERRAL_LIMIT_EXCEEDED = 8260
ERROR_DS_SORT_CONTROL_MISSING = 8261
ERROR_DS_OFFSET_RANGE_ERROR = 8262
ERROR_DS_ROOT_MUST_BE_NC = 8301
ERROR_DS_ADD_REPLICA_INHIBITED = 8302
ERROR_DS_ATT_NOT_DEF_IN_SCHEMA = 8303
ERROR_DS_MAX_OBJ_SIZE_EXCEEDED = 8304
ERROR_DS_OBJ_STRING_NAME_EXISTS = 8305
ERROR_DS_NO_RDN_DEFINED_IN_SCHEMA = 8306
ERROR_DS_RDN_DOESNT_MATCH_SCHEMA = 8307
ERROR_DS_NO_REQUESTED_ATTS_FOUND = 8308
ERROR_DS_USER_BUFFER_TO_SMALL = 8309
ERROR_DS_ATT_IS_NOT_ON_OBJ = 8310
ERROR_DS_ILLEGAL_MOD_OPERATION = 8311
ERROR_DS_OBJ_TOO_LARGE = 8312
ERROR_DS_BAD_INSTANCE_TYPE = 8313
ERROR_DS_MASTERDSA_REQUIRED = 8314
ERROR_DS_OBJECT_CLASS_REQUIRED = 8315
ERROR_DS_MISSING_REQUIRED_ATT = 8316
ERROR_DS_ATT_NOT_DEF_FOR_CLASS = 8317
ERROR_DS_ATT_ALREADY_EXISTS = 8318
ERROR_DS_CANT_ADD_ATT_VALUES = 8320
ERROR_DS_SINGLE_VALUE_CONSTRAINT = 8321
ERROR_DS_RANGE_CONSTRAINT = 8322
ERROR_DS_ATT_VAL_ALREADY_EXISTS = 8323
ERROR_DS_CANT_REM_MISSING_ATT = 8324
ERROR_DS_CANT_REM_MISSING_ATT_VAL = 8325
ERROR_DS_ROOT_CANT_BE_SUBREF = 8326
ERROR_DS_NO_CHAINING = 8327
ERROR_DS_NO_CHAINED_EVAL = 8328
ERROR_DS_NO_PARENT_OBJECT = 8329
ERROR_DS_PARENT_IS_AN_ALIAS = 8330
ERROR_DS_CANT_MIX_MASTER_AND_REPS = 8331
ERROR_DS_CHILDREN_EXIST = 8332
ERROR_DS_OBJ_NOT_FOUND = 8333
ERROR_DS_ALIASED_OBJ_MISSING = 8334
ERROR_DS_BAD_NAME_SYNTAX = 8335
ERROR_DS_ALIAS_POINTS_TO_ALIAS = 8336
ERROR_DS_CANT_DEREF_ALIAS = 8337
ERROR_DS_OUT_OF_SCOPE = 8338
ERROR_DS_OBJECT_BEING_REMOVED = 8339
ERROR_DS_CANT_DELETE_DSA_OBJ = 8340
ERROR_DS_GENERIC_ERROR = 8341
ERROR_DS_DSA_MUST_BE_INT_MASTER = 8342
ERROR_DS_CLASS_NOT_DSA = 8343
ERROR_DS_INSUFF_ACCESS_RIGHTS = 8344
ERROR_DS_ILLEGAL_SUPERIOR = 8345
ERROR_DS_ATTRIBUTE_OWNED_BY_SAM = 8346
ERROR_DS_NAME_TOO_MANY_PARTS = 8347
ERROR_DS_NAME_TOO_LONG = 8348
ERROR_DS_NAME_VALUE_TOO_LONG = 8349
ERROR_DS_NAME_UNPARSEABLE = 8350
ERROR_DS_NAME_TYPE_UNKNOWN = 8351
ERROR_DS_NOT_AN_OBJECT = 8352
ERROR_DS_SEC_DESC_TOO_SHORT = 8353
ERROR_DS_SEC_DESC_INVALID = 8354
ERROR_DS_NO_DELETED_NAME = 8355
ERROR_DS_SUBREF_MUST_HAVE_PARENT = 8356
ERROR_DS_NCNAME_MUST_BE_NC = 8357
ERROR_DS_CANT_ADD_SYSTEM_ONLY = 8358
ERROR_DS_CLASS_MUST_BE_CONCRETE = 8359
ERROR_DS_INVALID_DMD = 8360
ERROR_DS_OBJ_GUID_EXISTS = 8361
ERROR_DS_NOT_ON_BACKLINK = 8362
ERROR_DS_NO_CROSSREF_FOR_NC = 8363
ERROR_DS_SHUTTING_DOWN = 8364
ERROR_DS_UNKNOWN_OPERATION = 8365
ERROR_DS_INVALID_ROLE_OWNER = 8366
ERROR_DS_COULDNT_CONTACT_FSMO = 8367
ERROR_DS_CROSS_NC_DN_RENAME = 8368
ERROR_DS_CANT_MOD_SYSTEM_ONLY = 8369
ERROR_DS_REPLICATOR_ONLY = 8370
ERROR_DS_OBJ_CLASS_NOT_DEFINED = 8371
ERROR_DS_OBJ_CLASS_NOT_SUBCLASS = 8372
ERROR_DS_NAME_REFERENCE_INVALID = 8373
ERROR_DS_CROSS_REF_EXISTS = 8374
ERROR_DS_CANT_DEL_MASTER_CROSSREF = 8375
ERROR_DS_SUBTREE_NOTIFY_NOT_NC_HEAD = 8376
ERROR_DS_NOTIFY_FILTER_TOO_COMPLEX = 8377
ERROR_DS_DUP_RDN = 8378
ERROR_DS_DUP_OID = 8379
ERROR_DS_DUP_MAPI_ID = 8380
ERROR_DS_DUP_SCHEMA_ID_GUID = 8381
ERROR_DS_DUP_LDAP_DISPLAY_NAME = 8382
ERROR_DS_SEMANTIC_ATT_TEST = 8383
ERROR_DS_SYNTAX_MISMATCH = 8384
ERROR_DS_EXISTS_IN_MUST_HAVE = 8385
ERROR_DS_EXISTS_IN_MAY_HAVE = 8386
ERROR_DS_NONEXISTENT_MAY_HAVE = 8387
ERROR_DS_NONEXISTENT_MUST_HAVE = 8388
ERROR_DS_AUX_CLS_TEST_FAIL = 8389
ERROR_DS_NONEXISTENT_POSS_SUP = 8390
ERROR_DS_SUB_CLS_TEST_FAIL = 8391
ERROR_DS_BAD_RDN_ATT_ID_SYNTAX = 8392
ERROR_DS_EXISTS_IN_AUX_CLS = 8393
ERROR_DS_EXISTS_IN_SUB_CLS = 8394
ERROR_DS_EXISTS_IN_POSS_SUP = 8395
ERROR_DS_RECALCSCHEMA_FAILED = 8396
ERROR_DS_TREE_DELETE_NOT_FINISHED = 8397
ERROR_DS_CANT_DELETE = 8398
ERROR_DS_ATT_SCHEMA_REQ_ID = 8399
ERROR_DS_BAD_ATT_SCHEMA_SYNTAX = 8400
ERROR_DS_CANT_CACHE_ATT = 8401
ERROR_DS_CANT_CACHE_CLASS = 8402
ERROR_DS_CANT_REMOVE_ATT_CACHE = 8403
ERROR_DS_CANT_REMOVE_CLASS_CACHE = 8404
ERROR_DS_CANT_RETRIEVE_DN = 8405
ERROR_DS_MISSING_SUPREF = 8406
ERROR_DS_CANT_RETRIEVE_INSTANCE = 8407
ERROR_DS_CODE_INCONSISTENCY = 8408
ERROR_DS_DATABASE_ERROR = 8409
ERROR_DS_GOVERNSID_MISSING = 8410
ERROR_DS_MISSING_EXPECTED_ATT = 8411
ERROR_DS_NCNAME_MISSING_CR_REF = 8412
ERROR_DS_SECURITY_CHECKING_ERROR = 8413
ERROR_DS_SCHEMA_NOT_LOADED = 8414
ERROR_DS_SCHEMA_ALLOC_FAILED = 8415
ERROR_DS_ATT_SCHEMA_REQ_SYNTAX = 8416
ERROR_DS_GCVERIFY_ERROR = 8417
ERROR_DS_DRA_SCHEMA_MISMATCH = 8418
ERROR_DS_CANT_FIND_DSA_OBJ = 8419
ERROR_DS_CANT_FIND_EXPECTED_NC = 8420
ERROR_DS_CANT_FIND_NC_IN_CACHE = 8421
ERROR_DS_CANT_RETRIEVE_CHILD = 8422
ERROR_DS_SECURITY_ILLEGAL_MODIFY = 8423
ERROR_DS_CANT_REPLACE_HIDDEN_REC = 8424
ERROR_DS_BAD_HIERARCHY_FILE = 8425
ERROR_DS_BUILD_HIERARCHY_TABLE_FAILED = 8426
ERROR_DS_CONFIG_PARAM_MISSING = 8427
ERROR_DS_COUNTING_AB_INDICES_FAILED = 8428
ERROR_DS_HIERARCHY_TABLE_MALLOC_FAILED = 8429
ERROR_DS_INTERNAL_FAILURE = 8430
ERROR_DS_UNKNOWN_ERROR = 8431
ERROR_DS_ROOT_REQUIRES_CLASS_TOP = 8432
ERROR_DS_REFUSING_FSMO_ROLES = 8433
ERROR_DS_MISSING_FSMO_SETTINGS = 8434
ERROR_DS_UNABLE_TO_SURRENDER_ROLES = 8435
ERROR_DS_DRA_GENERIC = 8436
ERROR_DS_DRA_INVALID_PARAMETER = 8437
ERROR_DS_DRA_BUSY = 8438
ERROR_DS_DRA_BAD_DN = 8439
ERROR_DS_DRA_BAD_NC = 8440
ERROR_DS_DRA_DN_EXISTS = 8441
ERROR_DS_DRA_INTERNAL_ERROR = 8442
ERROR_DS_DRA_INCONSISTENT_DIT = 8443
ERROR_DS_DRA_CONNECTION_FAILED = 8444
ERROR_DS_DRA_BAD_INSTANCE_TYPE = 8445
ERROR_DS_DRA_OUT_OF_MEM = 8446
ERROR_DS_DRA_MAIL_PROBLEM = 8447
ERROR_DS_DRA_REF_ALREADY_EXISTS = 8448
ERROR_DS_DRA_REF_NOT_FOUND = 8449
ERROR_DS_DRA_OBJ_IS_REP_SOURCE = 8450
ERROR_DS_DRA_DB_ERROR = 8451
ERROR_DS_DRA_NO_REPLICA = 8452
ERROR_DS_DRA_ACCESS_DENIED = 8453
ERROR_DS_DRA_NOT_SUPPORTED = 8454
ERROR_DS_DRA_RPC_CANCELLED = 8455
ERROR_DS_DRA_SOURCE_DISABLED = 8456
ERROR_DS_DRA_SINK_DISABLED = 8457
ERROR_DS_DRA_NAME_COLLISION = 8458
ERROR_DS_DRA_SOURCE_REINSTALLED = 8459
ERROR_DS_DRA_MISSING_PARENT = 8460
ERROR_DS_DRA_PREEMPTED = 8461
ERROR_DS_DRA_ABANDON_SYNC = 8462
ERROR_DS_DRA_SHUTDOWN = 8463
ERROR_DS_DRA_INCOMPATIBLE_PARTIAL_SET = 8464
ERROR_DS_DRA_SOURCE_IS_PARTIAL_REPLICA = 8465
ERROR_DS_DRA_EXTN_CONNECTION_FAILED = 8466
ERROR_DS_INSTALL_SCHEMA_MISMATCH = 8467
ERROR_DS_DUP_LINK_ID = 8468
ERROR_DS_NAME_ERROR_RESOLVING = 8469
ERROR_DS_NAME_ERROR_NOT_FOUND = 8470
ERROR_DS_NAME_ERROR_NOT_UNIQUE = 8471
ERROR_DS_NAME_ERROR_NO_MAPPING = 8472
ERROR_DS_NAME_ERROR_DOMAIN_ONLY = 8473
ERROR_DS_NAME_ERROR_NO_SYNTACTICAL_MAPPING = 8474
ERROR_DS_CONSTRUCTED_ATT_MOD = 8475
ERROR_DS_WRONG_OM_OBJ_CLASS = 8476
ERROR_DS_DRA_REPL_PENDING = 8477
ERROR_DS_DS_REQUIRED = 8478
ERROR_DS_INVALID_LDAP_DISPLAY_NAME = 8479
ERROR_DS_NON_BASE_SEARCH = 8480
ERROR_DS_CANT_RETRIEVE_ATTS = 8481
ERROR_DS_BACKLINK_WITHOUT_LINK = 8482
ERROR_DS_EPOCH_MISMATCH = 8483
ERROR_DS_SRC_NAME_MISMATCH = 8484
ERROR_DS_SRC_AND_DST_NC_IDENTICAL = 8485
ERROR_DS_DST_NC_MISMATCH = 8486
ERROR_DS_NOT_AUTHORITIVE_FOR_DST_NC = 8487
ERROR_DS_SRC_GUID_MISMATCH = 8488
ERROR_DS_CANT_MOVE_DELETED_OBJECT = 8489
ERROR_DS_PDC_OPERATION_IN_PROGRESS = 8490
ERROR_DS_CROSS_DOMAIN_CLEANUP_REQD = 8491
ERROR_DS_ILLEGAL_XDOM_MOVE_OPERATION = 8492
ERROR_DS_CANT_WITH_ACCT_GROUP_MEMBERSHPS = 8493
ERROR_DS_NC_MUST_HAVE_NC_PARENT = 8494
ERROR_DS_CR_IMPOSSIBLE_TO_VALIDATE = 8495
ERROR_DS_DST_DOMAIN_NOT_NATIVE = 8496
ERROR_DS_MISSING_INFRASTRUCTURE_CONTAINER = 8497
ERROR_DS_CANT_MOVE_ACCOUNT_GROUP = 8498
ERROR_DS_CANT_MOVE_RESOURCE_GROUP = 8499
ERROR_DS_INVALID_SEARCH_FLAG = 8500
ERROR_DS_NO_TREE_DELETE_ABOVE_NC = 8501
ERROR_DS_COULDNT_LOCK_TREE_FOR_DELETE = 8502
ERROR_DS_COULDNT_IDENTIFY_OBJECTS_FOR_TREE_DELETE = 8503
ERROR_DS_SAM_INIT_FAILURE = 8504
ERROR_DS_SENSITIVE_GROUP_VIOLATION = 8505
ERROR_DS_CANT_MOD_PRIMARYGROUPID = 8506
ERROR_DS_ILLEGAL_BASE_SCHEMA_MOD = 8507
ERROR_DS_NONSAFE_SCHEMA_CHANGE = 8508
ERROR_DS_SCHEMA_UPDATE_DISALLOWED = 8509
ERROR_DS_CANT_CREATE_UNDER_SCHEMA = 8510
ERROR_DS_INSTALL_NO_SRC_SCH_VERSION = 8511
ERROR_DS_INSTALL_NO_SCH_VERSION_IN_INIFILE = 8512
ERROR_DS_INVALID_GROUP_TYPE = 8513
ERROR_DS_NO_NEST_GLOBALGROUP_IN_MIXEDDOMAIN = 8514
ERROR_DS_NO_NEST_LOCALGROUP_IN_MIXEDDOMAIN = 8515
ERROR_DS_GLOBAL_CANT_HAVE_LOCAL_MEMBER = 8516
ERROR_DS_GLOBAL_CANT_HAVE_UNIVERSAL_MEMBER = 8517
ERROR_DS_UNIVERSAL_CANT_HAVE_LOCAL_MEMBER = 8518
ERROR_DS_GLOBAL_CANT_HAVE_CROSSDOMAIN_MEMBER = 8519
ERROR_DS_LOCAL_CANT_HAVE_CROSSDOMAIN_LOCAL_MEMBER = 8520
ERROR_DS_HAVE_PRIMARY_MEMBERS = 8521
ERROR_DS_STRING_SD_CONVERSION_FAILED = 8522
ERROR_DS_NAMING_MASTER_GC = 8523
ERROR_DS_DNS_LOOKUP_FAILURE = 8524
ERROR_DS_COULDNT_UPDATE_SPNS = 8525
ERROR_DS_CANT_RETRIEVE_SD = 8526
ERROR_DS_KEY_NOT_UNIQUE = 8527
ERROR_DS_WRONG_LINKED_ATT_SYNTAX = 8528
ERROR_DS_SAM_NEED_BOOTKEY_PASSWORD = 8529
ERROR_DS_SAM_NEED_BOOTKEY_FLOPPY = 8530
ERROR_DS_CANT_START = 8531
ERROR_DS_INIT_FAILURE = 8532
ERROR_DS_NO_PKT_PRIVACY_ON_CONNECTION = 8533
ERROR_DS_SOURCE_DOMAIN_IN_FOREST = 8534
ERROR_DS_DESTINATION_DOMAIN_NOT_IN_FOREST = 8535
ERROR_DS_DESTINATION_AUDITING_NOT_ENABLED = 8536
ERROR_DS_CANT_FIND_DC_FOR_SRC_DOMAIN = 8537
ERROR_DS_SRC_OBJ_NOT_GROUP_OR_USER = 8538
ERROR_DS_SRC_SID_EXISTS_IN_FOREST = 8539
ERROR_DS_SRC_AND_DST_OBJECT_CLASS_MISMATCH = 8540
ERROR_SAM_INIT_FAILURE = 8541
ERROR_DS_DRA_SCHEMA_INFO_SHIP = 8542
ERROR_DS_DRA_SCHEMA_CONFLICT = 8543
ERROR_DS_DRA_EARLIER_SCHEMA_CONFLICT = 8544
ERROR_DS_DRA_OBJ_NC_MISMATCH = 8545
ERROR_DS_NC_STILL_HAS_DSAS = 8546
ERROR_DS_GC_REQUIRED = 8547
ERROR_DS_LOCAL_MEMBER_OF_LOCAL_ONLY = 8548
ERROR_DS_NO_FPO_IN_UNIVERSAL_GROUPS = 8549
ERROR_DS_CANT_ADD_TO_GC = 8550
ERROR_DS_NO_CHECKPOINT_WITH_PDC = 8551
ERROR_DS_SOURCE_AUDITING_NOT_ENABLED = 8552
ERROR_DS_CANT_CREATE_IN_NONDOMAIN_NC = 8553
ERROR_DS_INVALID_NAME_FOR_SPN = 8554
ERROR_DS_FILTER_USES_CONTRUCTED_ATTRS = 8555
ERROR_DS_UNICODEPWD_NOT_IN_QUOTES = 8556
ERROR_DS_MACHINE_ACCOUNT_QUOTA_EXCEEDED = 8557
ERROR_DS_MUST_BE_RUN_ON_DST_DC = 8558
ERROR_DS_SRC_DC_MUST_BE_SP4_OR_GREATER = 8559
ERROR_DS_CANT_TREE_DELETE_CRITICAL_OBJ = 8560
ERROR_DS_INIT_FAILURE_CONSOLE = 8561
ERROR_DS_SAM_INIT_FAILURE_CONSOLE = 8562
ERROR_DS_FOREST_VERSION_TOO_HIGH = 8563
ERROR_DS_DOMAIN_VERSION_TOO_HIGH = 8564
ERROR_DS_FOREST_VERSION_TOO_LOW = 8565
ERROR_DS_DOMAIN_VERSION_TOO_LOW = 8566
ERROR_DS_INCOMPATIBLE_VERSION = 8567
ERROR_DS_LOW_DSA_VERSION = 8568
ERROR_DS_NO_BEHAVIOR_VERSION_IN_MIXEDDOMAIN = 8569
ERROR_DS_NOT_SUPPORTED_SORT_ORDER = 8570
ERROR_DS_NAME_NOT_UNIQUE = 8571
ERROR_DS_MACHINE_ACCOUNT_CREATED_PRENT4 = 8572
ERROR_DS_OUT_OF_VERSION_STORE = 8573
ERROR_DS_INCOMPATIBLE_CONTROLS_USED = 8574
ERROR_DS_NO_REF_DOMAIN = 8575
ERROR_DS_RESERVED_LINK_ID = 8576
ERROR_DS_LINK_ID_NOT_AVAILABLE = 8577
ERROR_DS_AG_CANT_HAVE_UNIVERSAL_MEMBER = 8578
ERROR_DS_MODIFYDN_DISALLOWED_BY_INSTANCE_TYPE = 8579
ERROR_DS_NO_OBJECT_MOVE_IN_SCHEMA_NC = 8580
ERROR_DS_MODIFYDN_DISALLOWED_BY_FLAG = 8581
ERROR_DS_MODIFYDN_WRONG_GRANDPARENT = 8582
ERROR_DS_NAME_ERROR_TRUST_REFERRAL = 8583
ERROR_NOT_SUPPORTED_ON_STANDARD_SERVER = 8584
ERROR_DS_CANT_ACCESS_REMOTE_PART_OF_AD = 8585
ERROR_DS_CR_IMPOSSIBLE_TO_VALIDATE_V2 = 8586
ERROR_DS_THREAD_LIMIT_EXCEEDED = 8587
ERROR_DS_NOT_CLOSEST = 8588
ERROR_DS_CANT_DERIVE_SPN_WITHOUT_SERVER_REF = 8589
ERROR_DS_SINGLE_USER_MODE_FAILED = 8590
ERROR_DS_NTDSCRIPT_SYNTAX_ERROR = 8591
ERROR_DS_NTDSCRIPT_PROCESS_ERROR = 8592
ERROR_DS_DIFFERENT_REPL_EPOCHS = 8593
ERROR_DS_DRS_EXTENSIONS_CHANGED = 8594
ERROR_DS_REPLICA_SET_CHANGE_NOT_ALLOWED_ON_DISABLED_CR = 8595
ERROR_DS_NO_MSDS_INTID = 8596
ERROR_DS_DUP_MSDS_INTID = 8597
ERROR_DS_EXISTS_IN_RDNATTID = 8598
ERROR_DS_AUTHORIZATION_FAILED = 8599
ERROR_DS_INVALID_SCRIPT = 8600
ERROR_DS_REMOTE_CROSSREF_OP_FAILED = 8601
ERROR_DS_CROSS_REF_BUSY = 8602
ERROR_DS_CANT_DERIVE_SPN_FOR_DELETED_DOMAIN = 8603
ERROR_DS_CANT_DEMOTE_WITH_WRITEABLE_NC = 8604
ERROR_DS_DUPLICATE_ID_FOUND = 8605
ERROR_DS_INSUFFICIENT_ATTR_TO_CREATE_OBJECT = 8606
ERROR_DS_GROUP_CONVERSION_ERROR = 8607
ERROR_DS_CANT_MOVE_APP_BASIC_GROUP = 8608
ERROR_DS_CANT_MOVE_APP_QUERY_GROUP = 8609
ERROR_DS_ROLE_NOT_VERIFIED = 8610
ERROR_DS_WKO_CONTAINER_CANNOT_BE_SPECIAL = 8611
ERROR_DS_DOMAIN_RENAME_IN_PROGRESS = 8612
ERROR_DS_EXISTING_AD_CHILD_NC = 8613
ERROR_DS_REPL_LIFETIME_EXCEEDED = 8614
ERROR_DS_DISALLOWED_IN_SYSTEM_CONTAINER = 8615
ERROR_DS_LDAP_SEND_QUEUE_FULL = 8616
ERROR_DS_DRA_OUT_SCHEDULE_WINDOW = 8617
ERROR_DS_POLICY_NOT_KNOWN = 8618
ERROR_NO_SITE_SETTINGS_OBJECT = 8619
ERROR_NO_SECRETS = 8620
ERROR_NO_WRITABLE_DC_FOUND = 8621
ERROR_DS_NO_SERVER_OBJECT = 8622
ERROR_DS_NO_NTDSA_OBJECT = 8623
ERROR_DS_NON_ASQ_SEARCH = 8624
ERROR_DS_AUDIT_FAILURE = 8625
ERROR_DS_INVALID_SEARCH_FLAG_SUBTREE = 8626
ERROR_DS_INVALID_SEARCH_FLAG_TUPLE = 8627
ERROR_DS_HIERARCHY_TABLE_TOO_DEEP = 8628
SEVERITY_SUCCESS = 0
SEVERITY_ERROR = 1
def HRESULT_FROM_WIN32(scode): return -2147024896 | (scode & 65535)
def SUCCEEDED(Status): return ((Status) >= 0)
def FAILED(Status): return (Status<0)
def HRESULT_CODE(hr): return ((hr) & 65535)
def SCODE_CODE(sc): return ((sc) & 65535)
def HRESULT_FACILITY(hr): return (((hr) >> 16) & 8191)
def SCODE_FACILITY(sc): return (((sc) >> 16) & 8191)
def HRESULT_SEVERITY(hr): return (((hr) >> 31) & 1)
def SCODE_SEVERITY(sc): return (((sc) >> 31) & 1)
FACILITY_NT_BIT = 268435456
def HRESULT_FROM_NT(x): return x | FACILITY_NT_BIT
def GetScode(hr): return hr
def ResultFromScode(sc): return sc
NOERROR = 0
E_UNEXPECTED = -2147418113
E_NOTIMPL = -2147467263
E_OUTOFMEMORY = -2147024882
E_INVALIDARG = -2147024809
E_NOINTERFACE = -2147467262
E_POINTER = -2147467261
E_HANDLE = -2147024890
E_ABORT = -2147467260
E_FAIL = -2147467259
E_ACCESSDENIED = -2147024891
win16_E_NOTIMPL = -2147483647
win16_E_OUTOFMEMORY = -2147483646
win16_E_INVALIDARG = -2147483645
win16_E_NOINTERFACE = -2147483644
win16_E_POINTER = -2147483643
win16_E_HANDLE = -2147483642
win16_E_ABORT = -2147483641
win16_E_FAIL = -2147483640
win16_E_ACCESSDENIED = -2147483639
E_PENDING = -2147483638
CO_E_INIT_TLS = -2147467258
CO_E_INIT_SHARED_ALLOCATOR = -2147467257
CO_E_INIT_MEMORY_ALLOCATOR = -2147467256
CO_E_INIT_CLASS_CACHE = -2147467255
CO_E_INIT_RPC_CHANNEL = -2147467254
CO_E_INIT_TLS_SET_CHANNEL_CONTROL = -2147467253
CO_E_INIT_TLS_CHANNEL_CONTROL = -2147467252
CO_E_INIT_UNACCEPTED_USER_ALLOCATOR = -2147467251
CO_E_INIT_SCM_MUTEX_EXISTS = -2147467250
CO_E_INIT_SCM_FILE_MAPPING_EXISTS = -2147467249
CO_E_INIT_SCM_MAP_VIEW_OF_FILE = -2147467248
CO_E_INIT_SCM_EXEC_FAILURE = -2147467247
CO_E_INIT_ONLY_SINGLE_THREADED = -2147467246
CO_E_CANT_REMOTE = -2147467245
CO_E_BAD_SERVER_NAME = -2147467244
CO_E_WRONG_SERVER_IDENTITY = -2147467243
CO_E_OLE1DDE_DISABLED = -2147467242
CO_E_RUNAS_SYNTAX = -2147467241
CO_E_CREATEPROCESS_FAILURE = -2147467240
CO_E_RUNAS_CREATEPROCESS_FAILURE = -2147467239
CO_E_RUNAS_LOGON_FAILURE = -2147467238
CO_E_LAUNCH_PERMSSION_DENIED = -2147467237
CO_E_START_SERVICE_FAILURE = -2147467236
CO_E_REMOTE_COMMUNICATION_FAILURE = -2147467235
CO_E_SERVER_START_TIMEOUT = -2147467234
CO_E_CLSREG_INCONSISTENT = -2147467233
CO_E_IIDREG_INCONSISTENT = -2147467232
CO_E_NOT_SUPPORTED = -2147467231
CO_E_RELOAD_DLL = -2147467230
CO_E_MSI_ERROR = -2147467229
OLE_E_FIRST = -2147221504
OLE_E_LAST = -2147221249
OLE_S_FIRST = 262144
OLE_S_LAST = 262399
OLE_E_OLEVERB = -2147221504
OLE_E_ADVF = -2147221503
OLE_E_ENUM_NOMORE = -2147221502
OLE_E_ADVISENOTSUPPORTED = -2147221501
OLE_E_NOCONNECTION = -2147221500
OLE_E_NOTRUNNING = -2147221499
OLE_E_NOCACHE = -2147221498
OLE_E_BLANK = -2147221497
OLE_E_CLASSDIFF = -2147221496
OLE_E_CANT_GETMONIKER = -2147221495
OLE_E_CANT_BINDTOSOURCE = -2147221494
OLE_E_STATIC = -2147221493
OLE_E_PROMPTSAVECANCELLED = -2147221492
OLE_E_INVALIDRECT = -2147221491
OLE_E_WRONGCOMPOBJ = -2147221490
OLE_E_INVALIDHWND = -2147221489
OLE_E_NOT_INPLACEACTIVE = -2147221488
OLE_E_CANTCONVERT = -2147221487
OLE_E_NOSTORAGE = -2147221486
DV_E_FORMATETC = -2147221404
DV_E_DVTARGETDEVICE = -2147221403
DV_E_STGMEDIUM = -2147221402
DV_E_STATDATA = -2147221401
DV_E_LINDEX = -2147221400
DV_E_TYMED = -2147221399
DV_E_CLIPFORMAT = -2147221398
DV_E_DVASPECT = -2147221397
DV_E_DVTARGETDEVICE_SIZE = -2147221396
DV_E_NOIVIEWOBJECT = -2147221395
DRAGDROP_E_FIRST = -2147221248
DRAGDROP_E_LAST = -2147221233
DRAGDROP_S_FIRST = 262400
DRAGDROP_S_LAST = 262415
DRAGDROP_E_NOTREGISTERED = -2147221248
DRAGDROP_E_ALREADYREGISTERED = -2147221247
DRAGDROP_E_INVALIDHWND = -2147221246
CLASSFACTORY_E_FIRST = -2147221232
CLASSFACTORY_E_LAST = -2147221217
CLASSFACTORY_S_FIRST = 262416
CLASSFACTORY_S_LAST = 262431
CLASS_E_NOAGGREGATION = -2147221232
CLASS_E_CLASSNOTAVAILABLE = -2147221231
CLASS_E_NOTLICENSED = -2147221230
MARSHAL_E_FIRST = -2147221216
MARSHAL_E_LAST = -2147221201
MARSHAL_S_FIRST = 262432
MARSHAL_S_LAST = 262447
DATA_E_FIRST = -2147221200
DATA_E_LAST = -2147221185
DATA_S_FIRST = 262448
DATA_S_LAST = 262463
VIEW_E_FIRST = -2147221184
VIEW_E_LAST = -2147221169
VIEW_S_FIRST = 262464
VIEW_S_LAST = 262479
VIEW_E_DRAW = -2147221184
REGDB_E_FIRST = -2147221168
REGDB_E_LAST = -2147221153
REGDB_S_FIRST = 262480
REGDB_S_LAST = 262495
REGDB_E_READREGDB = -2147221168
REGDB_E_WRITEREGDB = -2147221167
REGDB_E_KEYMISSING = -2147221166
REGDB_E_INVALIDVALUE = -2147221165
REGDB_E_CLASSNOTREG = -2147221164
REGDB_E_IIDNOTREG = -2147221163
CAT_E_FIRST = -2147221152
CAT_E_LAST = -2147221151
CAT_E_CATIDNOEXIST = -2147221152
CAT_E_NODESCRIPTION = -2147221151
CS_E_FIRST = -2147221148
CS_E_LAST = -2147221144
CS_E_PACKAGE_NOTFOUND = -2147221148
CS_E_NOT_DELETABLE = -2147221147
CS_E_CLASS_NOTFOUND = -2147221146
CS_E_INVALID_VERSION = -2147221145
CS_E_NO_CLASSSTORE = -2147221144
CACHE_E_FIRST = -2147221136
CACHE_E_LAST = -2147221121
CACHE_S_FIRST = 262512
CACHE_S_LAST = 262527
CACHE_E_NOCACHE_UPDATED = -2147221136
OLEOBJ_E_FIRST = -2147221120
OLEOBJ_E_LAST = -2147221105
OLEOBJ_S_FIRST = 262528
OLEOBJ_S_LAST = 262543
OLEOBJ_E_NOVERBS = -2147221120
OLEOBJ_E_INVALIDVERB = -2147221119
CLIENTSITE_E_FIRST = -2147221104
CLIENTSITE_E_LAST = -2147221089
CLIENTSITE_S_FIRST = 262544
CLIENTSITE_S_LAST = 262559
INPLACE_E_NOTUNDOABLE = -2147221088
INPLACE_E_NOTOOLSPACE = -2147221087
INPLACE_E_FIRST = -2147221088
INPLACE_E_LAST = -2147221073
INPLACE_S_FIRST = 262560
INPLACE_S_LAST = 262575
ENUM_E_FIRST = -2147221072
ENUM_E_LAST = -2147221057
ENUM_S_FIRST = 262576
ENUM_S_LAST = 262591
CONVERT10_E_FIRST = -2147221056
CONVERT10_E_LAST = -2147221041
CONVERT10_S_FIRST = 262592
CONVERT10_S_LAST = 262607
CONVERT10_E_OLESTREAM_GET = -2147221056
CONVERT10_E_OLESTREAM_PUT = -2147221055
CONVERT10_E_OLESTREAM_FMT = -2147221054
CONVERT10_E_OLESTREAM_BITMAP_TO_DIB = -2147221053
CONVERT10_E_STG_FMT = -2147221052
CONVERT10_E_STG_NO_STD_STREAM = -2147221051
CONVERT10_E_STG_DIB_TO_BITMAP = -2147221050
CLIPBRD_E_FIRST = -2147221040
CLIPBRD_E_LAST = -2147221025
CLIPBRD_S_FIRST = 262608
CLIPBRD_S_LAST = 262623
CLIPBRD_E_CANT_OPEN = -2147221040
CLIPBRD_E_CANT_EMPTY = -2147221039
CLIPBRD_E_CANT_SET = -2147221038
CLIPBRD_E_BAD_DATA = -2147221037
CLIPBRD_E_CANT_CLOSE = -2147221036
MK_E_FIRST = -2147221024
MK_E_LAST = -2147221009
MK_S_FIRST = 262624
MK_S_LAST = 262639
MK_E_CONNECTMANUALLY = -2147221024
MK_E_EXCEEDEDDEADLINE = -2147221023
MK_E_NEEDGENERIC = -2147221022
MK_E_UNAVAILABLE = -2147221021
MK_E_SYNTAX = -2147221020
MK_E_NOOBJECT = -2147221019
MK_E_INVALIDEXTENSION = -2147221018
MK_E_INTERMEDIATEINTERFACENOTSUPPORTED = -2147221017
MK_E_NOTBINDABLE = -2147221016
MK_E_NOTBOUND = -2147221015
MK_E_CANTOPENFILE = -2147221014
MK_E_MUSTBOTHERUSER = -2147221013
MK_E_NOINVERSE = -2147221012
MK_E_NOSTORAGE = -2147221011
MK_E_NOPREFIX = -2147221010
MK_E_ENUMERATION_FAILED = -2147221009
CO_E_FIRST = -2147221008
CO_E_LAST = -2147220993
CO_S_FIRST = 262640
CO_S_LAST = 262655
CO_E_NOTINITIALIZED = -2147221008
CO_E_ALREADYINITIALIZED = -2147221007
CO_E_CANTDETERMINECLASS = -2147221006
CO_E_CLASSSTRING = -2147221005
CO_E_IIDSTRING = -2147221004
CO_E_APPNOTFOUND = -2147221003
CO_E_APPSINGLEUSE = -2147221002
CO_E_ERRORINAPP = -2147221001
CO_E_DLLNOTFOUND = -2147221000
CO_E_ERRORINDLL = -2147220999
CO_E_WRONGOSFORAPP = -2147220998
CO_E_OBJNOTREG = -2147220997
CO_E_OBJISREG = -2147220996
CO_E_OBJNOTCONNECTED = -2147220995
CO_E_APPDIDNTREG = -2147220994
CO_E_RELEASED = -2147220993
CO_E_FAILEDTOIMPERSONATE = -2147220992
CO_E_FAILEDTOGETSECCTX = -2147220991
CO_E_FAILEDTOOPENTHREADTOKEN = -2147220990
CO_E_FAILEDTOGETTOKENINFO = -2147220989
CO_E_TRUSTEEDOESNTMATCHCLIENT = -2147220988
CO_E_FAILEDTOQUERYCLIENTBLANKET = -2147220987
CO_E_FAILEDTOSETDACL = -2147220986
CO_E_ACCESSCHECKFAILED = -2147220985
CO_E_NETACCESSAPIFAILED = -2147220984
CO_E_WRONGTRUSTEENAMESYNTAX = -2147220983
CO_E_INVALIDSID = -2147220982
CO_E_CONVERSIONFAILED = -2147220981
CO_E_NOMATCHINGSIDFOUND = -2147220980
CO_E_LOOKUPACCSIDFAILED = -2147220979
CO_E_NOMATCHINGNAMEFOUND = -2147220978
CO_E_LOOKUPACCNAMEFAILED = -2147220977
CO_E_SETSERLHNDLFAILED = -2147220976
CO_E_FAILEDTOGETWINDIR = -2147220975
CO_E_PATHTOOLONG = -2147220974
CO_E_FAILEDTOGENUUID = -2147220973
CO_E_FAILEDTOCREATEFILE = -2147220972
CO_E_FAILEDTOCLOSEHANDLE = -2147220971
CO_E_EXCEEDSYSACLLIMIT = -2147220970
CO_E_ACESINWRONGORDER = -2147220969
CO_E_INCOMPATIBLESTREAMVERSION = -2147220968
CO_E_FAILEDTOOPENPROCESSTOKEN = -2147220967
CO_E_DECODEFAILED = -2147220966
CO_E_ACNOTINITIALIZED = -2147220965
OLE_S_USEREG = 262144
OLE_S_STATIC = 262145
OLE_S_MAC_CLIPFORMAT = 262146
DRAGDROP_S_DROP = 262400
DRAGDROP_S_CANCEL = 262401
DRAGDROP_S_USEDEFAULTCURSORS = 262402
DATA_S_SAMEFORMATETC = 262448
VIEW_S_ALREADY_FROZEN = 262464
CACHE_S_FORMATETC_NOTSUPPORTED = 262512
CACHE_S_SAMECACHE = 262513
CACHE_S_SOMECACHES_NOTUPDATED = 262514
OLEOBJ_S_INVALIDVERB = 262528
OLEOBJ_S_CANNOT_DOVERB_NOW = 262529
OLEOBJ_S_INVALIDHWND = 262530
INPLACE_S_TRUNCATED = 262560
CONVERT10_S_NO_PRESENTATION = 262592
MK_S_REDUCED_TO_SELF = 262626
MK_S_ME = 262628
MK_S_HIM = 262629
MK_S_US = 262630
MK_S_MONIKERALREADYREGISTERED = 262631
CO_E_CLASS_CREATE_FAILED = -2146959359
CO_E_SCM_ERROR = -2146959358
CO_E_SCM_RPC_FAILURE = -2146959357
CO_E_BAD_PATH = -2146959356
CO_E_SERVER_EXEC_FAILURE = -2146959355
CO_E_OBJSRV_RPC_FAILURE = -2146959354
MK_E_NO_NORMALIZED = -2146959353
CO_E_SERVER_STOPPING = -2146959352
MEM_E_INVALID_ROOT = -2146959351
MEM_E_INVALID_LINK = -2146959344
MEM_E_INVALID_SIZE = -2146959343
CO_S_NOTALLINTERFACES = 524306
DISP_E_UNKNOWNINTERFACE = -2147352575
DISP_E_MEMBERNOTFOUND = -2147352573
DISP_E_PARAMNOTFOUND = -2147352572
DISP_E_TYPEMISMATCH = -2147352571
DISP_E_UNKNOWNNAME = -2147352570
DISP_E_NONAMEDARGS = -2147352569
DISP_E_BADVARTYPE = -2147352568
DISP_E_EXCEPTION = -2147352567
DISP_E_OVERFLOW = -2147352566
DISP_E_BADINDEX = -2147352565
DISP_E_UNKNOWNLCID = -2147352564
DISP_E_ARRAYISLOCKED = -2147352563
DISP_E_BADPARAMCOUNT = -2147352562
DISP_E_PARAMNOTOPTIONAL = -2147352561
DISP_E_BADCALLEE = -2147352560
DISP_E_NOTACOLLECTION = -2147352559
DISP_E_DIVBYZERO = -2147352558
TYPE_E_BUFFERTOOSMALL = -2147319786
TYPE_E_FIELDNOTFOUND = -2147319785
TYPE_E_INVDATAREAD = -2147319784
TYPE_E_UNSUPFORMAT = -2147319783
TYPE_E_REGISTRYACCESS = -2147319780
TYPE_E_LIBNOTREGISTERED = -2147319779
TYPE_E_UNDEFINEDTYPE = -2147319769
TYPE_E_QUALIFIEDNAMEDISALLOWED = -2147319768
TYPE_E_INVALIDSTATE = -2147319767
TYPE_E_WRONGTYPEKIND = -2147319766
TYPE_E_ELEMENTNOTFOUND = -2147319765
TYPE_E_AMBIGUOUSNAME = -2147319764
TYPE_E_NAMECONFLICT = -2147319763
TYPE_E_UNKNOWNLCID = -2147319762
TYPE_E_DLLFUNCTIONNOTFOUND = -2147319761
TYPE_E_BADMODULEKIND = -2147317571
TYPE_E_SIZETOOBIG = -2147317563
TYPE_E_DUPLICATEID = -2147317562
TYPE_E_INVALIDID = -2147317553
TYPE_E_TYPEMISMATCH = -2147316576
TYPE_E_OUTOFBOUNDS = -2147316575
TYPE_E_IOERROR = -2147316574
TYPE_E_CANTCREATETMPFILE = -2147316573
TYPE_E_CANTLOADLIBRARY = -2147312566
TYPE_E_INCONSISTENTPROPFUNCS = -2147312509
TYPE_E_CIRCULARTYPE = -2147312508
STG_E_INVALIDFUNCTION = -2147287039
STG_E_FILENOTFOUND = -2147287038
STG_E_PATHNOTFOUND = -2147287037
STG_E_TOOMANYOPENFILES = -2147287036
STG_E_ACCESSDENIED = -2147287035
STG_E_INVALIDHANDLE = -2147287034
STG_E_INSUFFICIENTMEMORY = -2147287032
STG_E_INVALIDPOINTER = -2147287031
STG_E_NOMOREFILES = -2147287022
STG_E_DISKISWRITEPROTECTED = -2147287021
STG_E_SEEKERROR = -2147287015
STG_E_WRITEFAULT = -2147287011
STG_E_READFAULT = -2147287010
STG_E_SHAREVIOLATION = -2147287008
STG_E_LOCKVIOLATION = -2147287007
STG_E_FILEALREADYEXISTS = -2147286960
STG_E_INVALIDPARAMETER = -2147286953
STG_E_MEDIUMFULL = -2147286928
STG_E_PROPSETMISMATCHED = -2147286800
STG_E_ABNORMALAPIEXIT = -2147286790
STG_E_INVALIDHEADER = -2147286789
STG_E_INVALIDNAME = -2147286788
STG_E_UNKNOWN = -2147286787
STG_E_UNIMPLEMENTEDFUNCTION = -2147286786
STG_E_INVALIDFLAG = -2147286785
STG_E_INUSE = -2147286784
STG_E_NOTCURRENT = -2147286783
STG_E_REVERTED = -2147286782
STG_E_CANTSAVE = -2147286781
STG_E_OLDFORMAT = -2147286780
STG_E_OLDDLL = -2147286779
STG_E_SHAREREQUIRED = -2147286778
STG_E_NOTFILEBASEDSTORAGE = -2147286777
STG_E_EXTANTMARSHALLINGS = -2147286776
STG_E_DOCFILECORRUPT = -2147286775
STG_E_BADBASEADDRESS = -2147286768
STG_E_INCOMPLETE = -2147286527
STG_E_TERMINATED = -2147286526
STG_S_CONVERTED = 197120
STG_S_BLOCK = 197121
STG_S_RETRYNOW = 197122
STG_S_MONITORING = 197123
STG_S_MULTIPLEOPENS = 197124
STG_S_CONSOLIDATIONFAILED = 197125
STG_S_CANNOTCONSOLIDATE = 197126
RPC_E_CALL_REJECTED = -2147418111
RPC_E_CALL_CANCELED = -2147418110
RPC_E_CANTPOST_INSENDCALL = -2147418109
RPC_E_CANTCALLOUT_INASYNCCALL = -2147418108
RPC_E_CANTCALLOUT_INEXTERNALCALL = -2147418107
RPC_E_CONNECTION_TERMINATED = -2147418106
RPC_E_SERVER_DIED = -2147418105
RPC_E_CLIENT_DIED = -2147418104
RPC_E_INVALID_DATAPACKET = -2147418103
RPC_E_CANTTRANSMIT_CALL = -2147418102
RPC_E_CLIENT_CANTMARSHAL_DATA = -2147418101
RPC_E_CLIENT_CANTUNMARSHAL_DATA = -2147418100
RPC_E_SERVER_CANTMARSHAL_DATA = -2147418099
RPC_E_SERVER_CANTUNMARSHAL_DATA = -2147418098
RPC_E_INVALID_DATA = -2147418097
RPC_E_INVALID_PARAMETER = -2147418096
RPC_E_CANTCALLOUT_AGAIN = -2147418095
RPC_E_SERVER_DIED_DNE = -2147418094
RPC_E_SYS_CALL_FAILED = -2147417856
RPC_E_OUT_OF_RESOURCES = -2147417855
RPC_E_ATTEMPTED_MULTITHREAD = -2147417854
RPC_E_NOT_REGISTERED = -2147417853
RPC_E_FAULT = -2147417852
RPC_E_SERVERFAULT = -2147417851
RPC_E_CHANGED_MODE = -2147417850
RPC_E_INVALIDMETHOD = -2147417849
RPC_E_DISCONNECTED = -2147417848
RPC_E_RETRY = -2147417847
RPC_E_SERVERCALL_RETRYLATER = -2147417846
RPC_E_SERVERCALL_REJECTED = -2147417845
RPC_E_INVALID_CALLDATA = -2147417844
RPC_E_CANTCALLOUT_ININPUTSYNCCALL = -2147417843
RPC_E_WRONG_THREAD = -2147417842
RPC_E_THREAD_NOT_INIT = -2147417841
RPC_E_VERSION_MISMATCH = -2147417840
RPC_E_INVALID_HEADER = -2147417839
RPC_E_INVALID_EXTENSION = -2147417838
RPC_E_INVALID_IPID = -2147417837
RPC_E_INVALID_OBJECT = -2147417836
RPC_S_CALLPENDING = -2147417835
RPC_S_WAITONTIMER = -2147417834
RPC_E_CALL_COMPLETE = -2147417833
RPC_E_UNSECURE_CALL = -2147417832
RPC_E_TOO_LATE = -2147417831
RPC_E_NO_GOOD_SECURITY_PACKAGES = -2147417830
RPC_E_ACCESS_DENIED = -2147417829
RPC_E_REMOTE_DISABLED = -2147417828
RPC_E_INVALID_OBJREF = -2147417827
RPC_E_NO_CONTEXT = -2147417826
RPC_E_TIMEOUT = -2147417825
RPC_E_NO_SYNC = -2147417824
RPC_E_UNEXPECTED = -2147352577
NTE_BAD_UID = -2146893823
NTE_BAD_HASH = -2146893822
NTE_BAD_KEY = -2146893821
NTE_BAD_LEN = -2146893820
NTE_BAD_DATA = -2146893819
NTE_BAD_SIGNATURE = -2146893818
NTE_BAD_VER = -2146893817
NTE_BAD_ALGID = -2146893816
NTE_BAD_FLAGS = -2146893815
NTE_BAD_TYPE = -2146893814
NTE_BAD_KEY_STATE = -2146893813
NTE_BAD_HASH_STATE = -2146893812
NTE_NO_KEY = -2146893811
NTE_NO_MEMORY = -2146893810
NTE_EXISTS = -2146893809
NTE_PERM = -2146893808
NTE_NOT_FOUND = -2146893807
NTE_DOUBLE_ENCRYPT = -2146893806
NTE_BAD_PROVIDER = -2146893805
NTE_BAD_PROV_TYPE = -2146893804
NTE_BAD_PUBLIC_KEY = -2146893803
NTE_BAD_KEYSET = -2146893802
NTE_PROV_TYPE_NOT_DEF = -2146893801
NTE_PROV_TYPE_ENTRY_BAD = -2146893800
NTE_KEYSET_NOT_DEF = -2146893799
NTE_KEYSET_ENTRY_BAD = -2146893798
NTE_PROV_TYPE_NO_MATCH = -2146893797
NTE_SIGNATURE_FILE_BAD = -2146893796
NTE_PROVIDER_DLL_FAIL = -2146893795
NTE_PROV_DLL_NOT_FOUND = -2146893794
NTE_BAD_KEYSET_PARAM = -2146893793
NTE_FAIL = -2146893792
NTE_SYS_ERR = -2146893791
CRYPT_E_MSG_ERROR = -2146889727
CRYPT_E_UNKNOWN_ALGO = -2146889726
CRYPT_E_OID_FORMAT = -2146889725
CRYPT_E_INVALID_MSG_TYPE = -2146889724
CRYPT_E_UNEXPECTED_ENCODING = -2146889723
CRYPT_E_AUTH_ATTR_MISSING = -2146889722
CRYPT_E_HASH_VALUE = -2146889721
CRYPT_E_INVALID_INDEX = -2146889720
CRYPT_E_ALREADY_DECRYPTED = -2146889719
CRYPT_E_NOT_DECRYPTED = -2146889718
CRYPT_E_RECIPIENT_NOT_FOUND = -2146889717
CRYPT_E_CONTROL_TYPE = -2146889716
CRYPT_E_ISSUER_SERIALNUMBER = -2146889715
CRYPT_E_SIGNER_NOT_FOUND = -2146889714
CRYPT_E_ATTRIBUTES_MISSING = -2146889713
CRYPT_E_STREAM_MSG_NOT_READY = -2146889712
CRYPT_E_STREAM_INSUFFICIENT_DATA = -2146889711
CRYPT_E_BAD_LEN = -2146885631
CRYPT_E_BAD_ENCODE = -2146885630
CRYPT_E_FILE_ERROR = -2146885629
CRYPT_E_NOT_FOUND = -2146885628
CRYPT_E_EXISTS = -2146885627
CRYPT_E_NO_PROVIDER = -2146885626
CRYPT_E_SELF_SIGNED = -2146885625
CRYPT_E_DELETED_PREV = -2146885624
CRYPT_E_NO_MATCH = -2146885623
CRYPT_E_UNEXPECTED_MSG_TYPE = -2146885622
CRYPT_E_NO_KEY_PROPERTY = -2146885621
CRYPT_E_NO_DECRYPT_CERT = -2146885620
CRYPT_E_BAD_MSG = -2146885619
CRYPT_E_NO_SIGNER = -2146885618
CRYPT_E_PENDING_CLOSE = -2146885617
CRYPT_E_REVOKED = -2146885616
CRYPT_E_NO_REVOCATION_DLL = -2146885615
CRYPT_E_NO_REVOCATION_CHECK = -2146885614
CRYPT_E_REVOCATION_OFFLINE = -2146885613
CRYPT_E_NOT_IN_REVOCATION_DATABASE = -2146885612
CRYPT_E_INVALID_NUMERIC_STRING = -2146885600
CRYPT_E_INVALID_PRINTABLE_STRING = -2146885599
CRYPT_E_INVALID_IA5_STRING = -2146885598
CRYPT_E_INVALID_X500_STRING = -2146885597
CRYPT_E_NOT_CHAR_STRING = -2146885596
CRYPT_E_FILERESIZED = -2146885595
CRYPT_E_SECURITY_SETTINGS = -2146885594
CRYPT_E_NO_VERIFY_USAGE_DLL = -2146885593
CRYPT_E_NO_VERIFY_USAGE_CHECK = -2146885592
CRYPT_E_VERIFY_USAGE_OFFLINE = -2146885591
CRYPT_E_NOT_IN_CTL = -2146885590
CRYPT_E_NO_TRUSTED_SIGNER = -2146885589
CRYPT_E_OSS_ERROR = -2146881536
CERTSRV_E_BAD_REQUESTSUBJECT = -2146877439
CERTSRV_E_NO_REQUEST = -2146877438
CERTSRV_E_BAD_REQUESTSTATUS = -2146877437
CERTSRV_E_PROPERTY_EMPTY = -2146877436
CERTDB_E_JET_ERROR = -2146873344
TRUST_E_SYSTEM_ERROR = -2146869247
TRUST_E_NO_SIGNER_CERT = -2146869246
TRUST_E_COUNTER_SIGNER = -2146869245
TRUST_E_CERT_SIGNATURE = -2146869244
TRUST_E_TIME_STAMP = -2146869243
TRUST_E_BAD_DIGEST = -2146869232
TRUST_E_BASIC_CONSTRAINTS = -2146869223
TRUST_E_FINANCIAL_CRITERIA = -2146869218
NTE_OP_OK = 0
TRUST_E_PROVIDER_UNKNOWN = -2146762751
TRUST_E_ACTION_UNKNOWN = -2146762750
TRUST_E_SUBJECT_FORM_UNKNOWN = -2146762749
TRUST_E_SUBJECT_NOT_TRUSTED = -2146762748
DIGSIG_E_ENCODE = -2146762747
DIGSIG_E_DECODE = -2146762746
DIGSIG_E_EXTENSIBILITY = -2146762745
DIGSIG_E_CRYPTO = -2146762744
PERSIST_E_SIZEDEFINITE = -2146762743
PERSIST_E_SIZEINDEFINITE = -2146762742
PERSIST_E_NOTSELFSIZING = -2146762741
TRUST_E_NOSIGNATURE = -2146762496
CERT_E_EXPIRED = -2146762495
CERT_E_VALIDITYPERIODNESTING = -2146762494
CERT_E_ROLE = -2146762493
CERT_E_PATHLENCONST = -2146762492
CERT_E_CRITICAL = -2146762491
CERT_E_PURPOSE = -2146762490
CERT_E_ISSUERCHAINING = -2146762489
CERT_E_MALFORMED = -2146762488
CERT_E_UNTRUSTEDROOT = -2146762487
CERT_E_CHAINING = -2146762486
TRUST_E_FAIL = -2146762485
CERT_E_REVOKED = -2146762484
CERT_E_UNTRUSTEDTESTROOT = -2146762483
CERT_E_REVOCATION_FAILURE = -2146762482
CERT_E_CN_NO_MATCH = -2146762481
CERT_E_WRONG_USAGE = -2146762480
SPAPI_E_EXPECTED_SECTION_NAME = -2146500608
SPAPI_E_BAD_SECTION_NAME_LINE = -2146500607
SPAPI_E_SECTION_NAME_TOO_LONG = -2146500606
SPAPI_E_GENERAL_SYNTAX = -2146500605
SPAPI_E_WRONG_INF_STYLE = -2146500352
SPAPI_E_SECTION_NOT_FOUND = -2146500351
SPAPI_E_LINE_NOT_FOUND = -2146500350
SPAPI_E_NO_ASSOCIATED_CLASS = -2146500096
SPAPI_E_CLASS_MISMATCH = -2146500095
SPAPI_E_DUPLICATE_FOUND = -2146500094
SPAPI_E_NO_DRIVER_SELECTED = -2146500093
SPAPI_E_KEY_DOES_NOT_EXIST = -2146500092
SPAPI_E_INVALID_DEVINST_NAME = -2146500091
SPAPI_E_INVALID_CLASS = -2146500090
SPAPI_E_DEVINST_ALREADY_EXISTS = -2146500089
SPAPI_E_DEVINFO_NOT_REGISTERED = -2146500088
SPAPI_E_INVALID_REG_PROPERTY = -2146500087
SPAPI_E_NO_INF = -2146500086
SPAPI_E_NO_SUCH_DEVINST = -2146500085
SPAPI_E_CANT_LOAD_CLASS_ICON = -2146500084
SPAPI_E_INVALID_CLASS_INSTALLER = -2146500083
SPAPI_E_DI_DO_DEFAULT = -2146500082
SPAPI_E_DI_NOFILECOPY = -2146500081
SPAPI_E_INVALID_HWPROFILE = -2146500080
SPAPI_E_NO_DEVICE_SELECTED = -2146500079
SPAPI_E_DEVINFO_LIST_LOCKED = -2146500078
SPAPI_E_DEVINFO_DATA_LOCKED = -2146500077
SPAPI_E_DI_BAD_PATH = -2146500076
SPAPI_E_NO_CLASSINSTALL_PARAMS = -2146500075
SPAPI_E_FILEQUEUE_LOCKED = -2146500074
SPAPI_E_BAD_SERVICE_INSTALLSECT = -2146500073
SPAPI_E_NO_CLASS_DRIVER_LIST = -2146500072
SPAPI_E_NO_ASSOCIATED_SERVICE = -2146500071
SPAPI_E_NO_DEFAULT_DEVICE_INTERFACE = -2146500070
SPAPI_E_DEVICE_INTERFACE_ACTIVE = -2146500069
SPAPI_E_DEVICE_INTERFACE_REMOVED = -2146500068
SPAPI_E_BAD_INTERFACE_INSTALLSECT = -2146500067
SPAPI_E_NO_SUCH_INTERFACE_CLASS = -2146500066
SPAPI_E_INVALID_REFERENCE_STRING = -2146500065
SPAPI_E_INVALID_MACHINENAME = -2146500064
SPAPI_E_REMOTE_COMM_FAILURE = -2146500063
SPAPI_E_MACHINE_UNAVAILABLE = -2146500062
SPAPI_E_NO_CONFIGMGR_SERVICES = -2146500061
SPAPI_E_INVALID_PROPPAGE_PROVIDER = -2146500060
SPAPI_E_NO_SUCH_DEVICE_INTERFACE = -2146500059
SPAPI_E_DI_POSTPROCESSING_REQUIRED = -2146500058
SPAPI_E_INVALID_COINSTALLER = -2146500057
SPAPI_E_NO_COMPAT_DRIVERS = -2146500056
SPAPI_E_NO_DEVICE_ICON = -2146500055
SPAPI_E_INVALID_INF_LOGCONFIG = -2146500054
SPAPI_E_DI_DONT_INSTALL = -2146500053
SPAPI_E_INVALID_FILTER_DRIVER = -2146500052
SPAPI_E_ERROR_NOT_INSTALLED = -2146496512
# Directory storage
ERROR_DS_NOT_INSTALLED = 8200
ERROR_DS_MEMBERSHIP_EVALUATED_LOCALLY = 8201
ERROR_DS_NO_ATTRIBUTE_OR_VALUE = 8202
ERROR_DS_INVALID_ATTRIBUTE_SYNTAX = 8203
ERROR_DS_ATTRIBUTE_TYPE_UNDEFINED = 8204
ERROR_DS_ATTRIBUTE_OR_VALUE_EXISTS = 8205
ERROR_DS_BUSY = 8206
ERROR_DS_UNAVAILABLE = 8207
ERROR_DS_NO_RIDS_ALLOCATED = 8208
ERROR_DS_NO_MORE_RIDS = 8209
ERROR_DS_INCORRECT_ROLE_OWNER = 8210
ERROR_DS_RIDMGR_INIT_ERROR = 8211
ERROR_DS_OBJ_CLASS_VIOLATION = 8212
ERROR_DS_CANT_ON_NON_LEAF = 8213
ERROR_DS_CANT_ON_RDN = 8214
ERROR_DS_CANT_MOD_OBJ_CLASS = 8215
ERROR_DS_CROSS_DOM_MOVE_ERROR = 8216
ERROR_DS_GC_NOT_AVAILABLE = 8217
ERROR_SHARED_POLICY = 8218
ERROR_POLICY_OBJECT_NOT_FOUND = 8219
ERROR_POLICY_ONLY_IN_DS = 8220
ERROR_PROMOTION_ACTIVE = 8221
ERROR_NO_PROMOTION_ACTIVE = 8222
ERROR_DS_OPERATIONS_ERROR = 8224
ERROR_DS_PROTOCOL_ERROR = 8225
ERROR_DS_TIMELIMIT_EXCEEDED = 8226
ERROR_DS_SIZELIMIT_EXCEEDED = 8227
ERROR_DS_ADMIN_LIMIT_EXCEEDED = 8228
ERROR_DS_COMPARE_FALSE = 8229
ERROR_DS_COMPARE_TRUE = 8230
ERROR_DS_AUTH_METHOD_NOT_SUPPORTED = 8231
ERROR_DS_STRONG_AUTH_REQUIRED = 8232
ERROR_DS_INAPPROPRIATE_AUTH = 8233
ERROR_DS_AUTH_UNKNOWN = 8234
ERROR_DS_REFERRAL = 8235
ERROR_DS_UNAVAILABLE_CRIT_EXTENSION = 8236
ERROR_DS_CONFIDENTIALITY_REQUIRED = 8237
ERROR_DS_INAPPROPRIATE_MATCHING = 8238
ERROR_DS_CONSTRAINT_VIOLATION = 8239
ERROR_DS_NO_SUCH_OBJECT = 8240
ERROR_DS_ALIAS_PROBLEM = 8241
ERROR_DS_INVALID_DN_SYNTAX = 8242
ERROR_DS_IS_LEAF = 8243
ERROR_DS_ALIAS_DEREF_PROBLEM = 8244
ERROR_DS_UNWILLING_TO_PERFORM = 8245
ERROR_DS_LOOP_DETECT = 8246
ERROR_DS_NAMING_VIOLATION = 8247
ERROR_DS_OBJECT_RESULTS_TOO_LARGE = 8248
ERROR_DS_AFFECTS_MULTIPLE_DSAS = 8249
ERROR_DS_SERVER_DOWN = 8250
ERROR_DS_LOCAL_ERROR = 8251
ERROR_DS_ENCODING_ERROR = 8252
ERROR_DS_DECODING_ERROR = 8253
ERROR_DS_FILTER_UNKNOWN = 8254
ERROR_DS_PARAM_ERROR = 8255
ERROR_DS_NOT_SUPPORTED = 8256
ERROR_DS_NO_RESULTS_RETURNED = 8257
ERROR_DS_CONTROL_NOT_FOUND = 8258
ERROR_DS_CLIENT_LOOP = 8259
ERROR_DS_REFERRAL_LIMIT_EXCEEDED = 8260
ERROR_DS_SORT_CONTROL_MISSING = 8261
ERROR_DS_OFFSET_RANGE_ERROR = 8262
ERROR_DS_ROOT_MUST_BE_NC = 8301
ERROR_DS_ADD_REPLICA_INHIBITED = 8302
ERROR_DS_ATT_NOT_DEF_IN_SCHEMA = 8303
ERROR_DS_MAX_OBJ_SIZE_EXCEEDED = 8304
ERROR_DS_OBJ_STRING_NAME_EXISTS = 8305
ERROR_DS_NO_RDN_DEFINED_IN_SCHEMA = 8306
ERROR_DS_RDN_DOESNT_MATCH_SCHEMA = 8307
ERROR_DS_NO_REQUESTED_ATTS_FOUND = 8308
ERROR_DS_USER_BUFFER_TO_SMALL = 8309
ERROR_DS_ATT_IS_NOT_ON_OBJ = 8310
ERROR_DS_ILLEGAL_MOD_OPERATION = 8311
ERROR_DS_OBJ_TOO_LARGE = 8312
ERROR_DS_BAD_INSTANCE_TYPE = 8313
ERROR_DS_MASTERDSA_REQUIRED = 8314
ERROR_DS_OBJECT_CLASS_REQUIRED = 8315
ERROR_DS_MISSING_REQUIRED_ATT = 8316
ERROR_DS_ATT_NOT_DEF_FOR_CLASS = 8317
ERROR_DS_ATT_ALREADY_EXISTS = 8318
ERROR_DS_CANT_ADD_ATT_VALUES = 8320
ERROR_DS_SINGLE_VALUE_CONSTRAINT = 8321
ERROR_DS_RANGE_CONSTRAINT = 8322
ERROR_DS_ATT_VAL_ALREADY_EXISTS = 8323
ERROR_DS_CANT_REM_MISSING_ATT = 8324
ERROR_DS_CANT_REM_MISSING_ATT_VAL = 8325
ERROR_DS_ROOT_CANT_BE_SUBREF = 8326
ERROR_DS_NO_CHAINING = 8327
ERROR_DS_NO_CHAINED_EVAL = 8328
ERROR_DS_NO_PARENT_OBJECT = 8329
ERROR_DS_PARENT_IS_AN_ALIAS = 8330
ERROR_DS_CANT_MIX_MASTER_AND_REPS = 8331
ERROR_DS_CHILDREN_EXIST = 8332
ERROR_DS_OBJ_NOT_FOUND = 8333
ERROR_DS_ALIASED_OBJ_MISSING = 8334
ERROR_DS_BAD_NAME_SYNTAX = 8335
ERROR_DS_ALIAS_POINTS_TO_ALIAS = 8336
ERROR_DS_CANT_DEREF_ALIAS = 8337
ERROR_DS_OUT_OF_SCOPE = 8338
ERROR_DS_OBJECT_BEING_REMOVED = 8339
ERROR_DS_CANT_DELETE_DSA_OBJ = 8340
ERROR_DS_GENERIC_ERROR = 8341
ERROR_DS_DSA_MUST_BE_INT_MASTER = 8342
ERROR_DS_CLASS_NOT_DSA = 8343
ERROR_DS_INSUFF_ACCESS_RIGHTS = 8344
ERROR_DS_ILLEGAL_SUPERIOR = 8345
ERROR_DS_ATTRIBUTE_OWNED_BY_SAM = 8346
ERROR_DS_NAME_TOO_MANY_PARTS = 8347
ERROR_DS_NAME_TOO_LONG = 8348
ERROR_DS_NAME_VALUE_TOO_LONG = 8349
ERROR_DS_NAME_UNPARSEABLE = 8350
ERROR_DS_NAME_TYPE_UNKNOWN = 8351
ERROR_DS_NOT_AN_OBJECT = 8352
ERROR_DS_SEC_DESC_TOO_SHORT = 8353
ERROR_DS_SEC_DESC_INVALID = 8354
ERROR_DS_NO_DELETED_NAME = 8355
ERROR_DS_SUBREF_MUST_HAVE_PARENT = 8356
ERROR_DS_NCNAME_MUST_BE_NC = 8357
ERROR_DS_CANT_ADD_SYSTEM_ONLY = 8358
ERROR_DS_CLASS_MUST_BE_CONCRETE = 8359
ERROR_DS_INVALID_DMD = 8360
ERROR_DS_OBJ_GUID_EXISTS = 8361
ERROR_DS_NOT_ON_BACKLINK = 8362
ERROR_DS_NO_CROSSREF_FOR_NC = 8363
ERROR_DS_SHUTTING_DOWN = 8364
ERROR_DS_UNKNOWN_OPERATION = 8365
ERROR_DS_INVALID_ROLE_OWNER = 8366
ERROR_DS_COULDNT_CONTACT_FSMO = 8367
ERROR_DS_CROSS_NC_DN_RENAME = 8368
ERROR_DS_CANT_MOD_SYSTEM_ONLY = 8369
ERROR_DS_REPLICATOR_ONLY = 8370
ERROR_DS_OBJ_CLASS_NOT_DEFINED = 8371
ERROR_DS_OBJ_CLASS_NOT_SUBCLASS = 8372
ERROR_DS_NAME_REFERENCE_INVALID = 8373
ERROR_DS_CROSS_REF_EXISTS = 8374
ERROR_DS_CANT_DEL_MASTER_CROSSREF = 8375
ERROR_DS_SUBTREE_NOTIFY_NOT_NC_HEAD = 8376
ERROR_DS_NOTIFY_FILTER_TOO_COMPLEX = 8377
ERROR_DS_DUP_RDN = 8378
ERROR_DS_DUP_OID = 8379
ERROR_DS_DUP_MAPI_ID = 8380
ERROR_DS_DUP_SCHEMA_ID_GUID = 8381
ERROR_DS_DUP_LDAP_DISPLAY_NAME = 8382
ERROR_DS_SEMANTIC_ATT_TEST = 8383
ERROR_DS_SYNTAX_MISMATCH = 8384
ERROR_DS_EXISTS_IN_MUST_HAVE = 8385
ERROR_DS_EXISTS_IN_MAY_HAVE = 8386
ERROR_DS_NONEXISTENT_MAY_HAVE = 8387
ERROR_DS_NONEXISTENT_MUST_HAVE = 8388
ERROR_DS_AUX_CLS_TEST_FAIL = 8389
ERROR_DS_NONEXISTENT_POSS_SUP = 8390
ERROR_DS_SUB_CLS_TEST_FAIL = 8391
ERROR_DS_BAD_RDN_ATT_ID_SYNTAX = 8392
ERROR_DS_EXISTS_IN_AUX_CLS = 8393
ERROR_DS_EXISTS_IN_SUB_CLS = 8394
ERROR_DS_EXISTS_IN_POSS_SUP = 8395
ERROR_DS_RECALCSCHEMA_FAILED = 8396
ERROR_DS_TREE_DELETE_NOT_FINISHED = 8397
ERROR_DS_CANT_DELETE = 8398
ERROR_DS_ATT_SCHEMA_REQ_ID = 8399
ERROR_DS_BAD_ATT_SCHEMA_SYNTAX = 8400
ERROR_DS_CANT_CACHE_ATT = 8401
ERROR_DS_CANT_CACHE_CLASS = 8402
ERROR_DS_CANT_REMOVE_ATT_CACHE = 8403
ERROR_DS_CANT_REMOVE_CLASS_CACHE = 8404
ERROR_DS_CANT_RETRIEVE_DN = 8405
ERROR_DS_MISSING_SUPREF = 8406
ERROR_DS_CANT_RETRIEVE_INSTANCE = 8407
ERROR_DS_CODE_INCONSISTENCY = 8408
ERROR_DS_DATABASE_ERROR = 8409
ERROR_DS_GOVERNSID_MISSING = 8410
ERROR_DS_MISSING_EXPECTED_ATT = 8411
ERROR_DS_NCNAME_MISSING_CR_REF = 8412
ERROR_DS_SECURITY_CHECKING_ERROR = 8413
ERROR_DS_SCHEMA_NOT_LOADED = 8414
ERROR_DS_SCHEMA_ALLOC_FAILED = 8415
ERROR_DS_ATT_SCHEMA_REQ_SYNTAX = 8416
ERROR_DS_GCVERIFY_ERROR = 8417
ERROR_DS_DRA_SCHEMA_MISMATCH = 8418
ERROR_DS_CANT_FIND_DSA_OBJ = 8419
ERROR_DS_CANT_FIND_EXPECTED_NC = 8420
ERROR_DS_CANT_FIND_NC_IN_CACHE = 8421
ERROR_DS_CANT_RETRIEVE_CHILD = 8422
ERROR_DS_SECURITY_ILLEGAL_MODIFY = 8423
ERROR_DS_CANT_REPLACE_HIDDEN_REC = 8424
ERROR_DS_BAD_HIERARCHY_FILE = 8425
ERROR_DS_BUILD_HIERARCHY_TABLE_FAILED = 8426
ERROR_DS_CONFIG_PARAM_MISSING = 8427
ERROR_DS_COUNTING_AB_INDICES_FAILED = 8428
ERROR_DS_HIERARCHY_TABLE_MALLOC_FAILED = 8429
ERROR_DS_INTERNAL_FAILURE = 8430
ERROR_DS_UNKNOWN_ERROR = 8431
ERROR_DS_ROOT_REQUIRES_CLASS_TOP = 8432
ERROR_DS_REFUSING_FSMO_ROLES = 8433
ERROR_DS_MISSING_FSMO_SETTINGS = 8434
ERROR_DS_UNABLE_TO_SURRENDER_ROLES = 8435
ERROR_DS_DRA_GENERIC = 8436
ERROR_DS_DRA_INVALID_PARAMETER = 8437
ERROR_DS_DRA_BUSY = 8438
ERROR_DS_DRA_BAD_DN = 8439
ERROR_DS_DRA_BAD_NC = 8440
ERROR_DS_DRA_DN_EXISTS = 8441
ERROR_DS_DRA_INTERNAL_ERROR = 8442
ERROR_DS_DRA_INCONSISTENT_DIT = 8443
ERROR_DS_DRA_CONNECTION_FAILED = 8444
ERROR_DS_DRA_BAD_INSTANCE_TYPE = 8445
ERROR_DS_DRA_OUT_OF_MEM = 8446
ERROR_DS_DRA_MAIL_PROBLEM = 8447
ERROR_DS_DRA_REF_ALREADY_EXISTS = 8448
ERROR_DS_DRA_REF_NOT_FOUND = 8449
ERROR_DS_DRA_OBJ_IS_REP_SOURCE = 8450
ERROR_DS_DRA_DB_ERROR = 8451
ERROR_DS_DRA_NO_REPLICA = 8452
ERROR_DS_DRA_ACCESS_DENIED = 8453
ERROR_DS_DRA_NOT_SUPPORTED = 8454
ERROR_DS_DRA_RPC_CANCELLED = 8455
ERROR_DS_DRA_SOURCE_DISABLED = 8456
ERROR_DS_DRA_SINK_DISABLED = 8457
ERROR_DS_DRA_NAME_COLLISION = 8458
ERROR_DS_DRA_SOURCE_REINSTALLED = 8459
ERROR_DS_DRA_MISSING_PARENT = 8460
ERROR_DS_DRA_PREEMPTED = 8461
ERROR_DS_DRA_ABANDON_SYNC = 8462
ERROR_DS_DRA_SHUTDOWN = 8463
ERROR_DS_DRA_INCOMPATIBLE_PARTIAL_SET = 8464
ERROR_DS_DRA_SOURCE_IS_PARTIAL_REPLICA = 8465
ERROR_DS_DRA_EXTN_CONNECTION_FAILED = 8466
ERROR_DS_INSTALL_SCHEMA_MISMATCH = 8467
ERROR_DS_DUP_LINK_ID = 8468
ERROR_DS_NAME_ERROR_RESOLVING = 8469
ERROR_DS_NAME_ERROR_NOT_FOUND = 8470
ERROR_DS_NAME_ERROR_NOT_UNIQUE = 8471
ERROR_DS_NAME_ERROR_NO_MAPPING = 8472
ERROR_DS_NAME_ERROR_DOMAIN_ONLY = 8473
ERROR_DS_NAME_ERROR_NO_SYNTACTICAL_MAPPING = 8474
ERROR_DS_CONSTRUCTED_ATT_MOD = 8475
ERROR_DS_WRONG_OM_OBJ_CLASS = 8476
ERROR_DS_DRA_REPL_PENDING = 8477
ERROR_DS_DS_REQUIRED = 8478
ERROR_DS_INVALID_LDAP_DISPLAY_NAME = 8479
ERROR_DS_NON_BASE_SEARCH = 8480
ERROR_DS_CANT_RETRIEVE_ATTS = 8481
ERROR_DS_BACKLINK_WITHOUT_LINK = 8482
ERROR_DS_EPOCH_MISMATCH = 8483
ERROR_DS_SRC_NAME_MISMATCH = 8484
ERROR_DS_SRC_AND_DST_NC_IDENTICAL = 8485
ERROR_DS_DST_NC_MISMATCH = 8486
ERROR_DS_NOT_AUTHORITIVE_FOR_DST_NC = 8487
ERROR_DS_SRC_GUID_MISMATCH = 8488
ERROR_DS_CANT_MOVE_DELETED_OBJECT = 8489
ERROR_DS_PDC_OPERATION_IN_PROGRESS = 8490
ERROR_DS_CROSS_DOMAIN_CLEANUP_REQD = 8491
ERROR_DS_ILLEGAL_XDOM_MOVE_OPERATION = 8492
ERROR_DS_CANT_WITH_ACCT_GROUP_MEMBERSHPS = 8493
ERROR_DS_NC_MUST_HAVE_NC_PARENT = 8494
ERROR_DS_CR_IMPOSSIBLE_TO_VALIDATE = 8495
ERROR_DS_DST_DOMAIN_NOT_NATIVE = 8496
ERROR_DS_MISSING_INFRASTRUCTURE_CONTAINER = 8497
ERROR_DS_CANT_MOVE_ACCOUNT_GROUP = 8498
ERROR_DS_CANT_MOVE_RESOURCE_GROUP = 8499
ERROR_DS_INVALID_SEARCH_FLAG = 8500
ERROR_DS_NO_TREE_DELETE_ABOVE_NC = 8501
ERROR_DS_COULDNT_LOCK_TREE_FOR_DELETE = 8502
ERROR_DS_COULDNT_IDENTIFY_OBJECTS_FOR_TREE_DELETE = 8503
ERROR_DS_SAM_INIT_FAILURE = 8504
ERROR_DS_SENSITIVE_GROUP_VIOLATION = 8505
ERROR_DS_CANT_MOD_PRIMARYGROUPID = 8506
ERROR_DS_ILLEGAL_BASE_SCHEMA_MOD = 8507
ERROR_DS_NONSAFE_SCHEMA_CHANGE = 8508
ERROR_DS_SCHEMA_UPDATE_DISALLOWED = 8509
ERROR_DS_CANT_CREATE_UNDER_SCHEMA = 8510
ERROR_DS_INSTALL_NO_SRC_SCH_VERSION = 8511
ERROR_DS_INSTALL_NO_SCH_VERSION_IN_INIFILE = 8512
ERROR_DS_INVALID_GROUP_TYPE = 8513
ERROR_DS_NO_NEST_GLOBALGROUP_IN_MIXEDDOMAIN = 8514
ERROR_DS_NO_NEST_LOCALGROUP_IN_MIXEDDOMAIN = 8515
ERROR_DS_GLOBAL_CANT_HAVE_LOCAL_MEMBER = 8516
ERROR_DS_GLOBAL_CANT_HAVE_UNIVERSAL_MEMBER = 8517
ERROR_DS_UNIVERSAL_CANT_HAVE_LOCAL_MEMBER = 8518
ERROR_DS_GLOBAL_CANT_HAVE_CROSSDOMAIN_MEMBER = 8519
ERROR_DS_LOCAL_CANT_HAVE_CROSSDOMAIN_LOCAL_MEMBER = 8520
ERROR_DS_HAVE_PRIMARY_MEMBERS = 8521
ERROR_DS_STRING_SD_CONVERSION_FAILED = 8522
ERROR_DS_NAMING_MASTER_GC = 8523
ERROR_DS_DNS_LOOKUP_FAILURE = 8524
ERROR_DS_COULDNT_UPDATE_SPNS = 8525
ERROR_DS_CANT_RETRIEVE_SD = 8526
ERROR_DS_KEY_NOT_UNIQUE = 8527
ERROR_DS_WRONG_LINKED_ATT_SYNTAX = 8528
ERROR_DS_SAM_NEED_BOOTKEY_PASSWORD = 8529
ERROR_DS_SAM_NEED_BOOTKEY_FLOPPY = 8530
ERROR_DS_CANT_START = 8531
ERROR_DS_INIT_FAILURE = 8532
ERROR_DS_NO_PKT_PRIVACY_ON_CONNECTION = 8533
ERROR_DS_SOURCE_DOMAIN_IN_FOREST = 8534
ERROR_DS_DESTINATION_DOMAIN_NOT_IN_FOREST = 8535
ERROR_DS_DESTINATION_AUDITING_NOT_ENABLED = 8536
ERROR_DS_CANT_FIND_DC_FOR_SRC_DOMAIN = 8537
ERROR_DS_SRC_OBJ_NOT_GROUP_OR_USER = 8538
ERROR_DS_SRC_SID_EXISTS_IN_FOREST = 8539
ERROR_DS_SRC_AND_DST_OBJECT_CLASS_MISMATCH = 8540
ERROR_SAM_INIT_FAILURE = 8541
ERROR_DS_DRA_SCHEMA_INFO_SHIP = 8542
ERROR_DS_DRA_SCHEMA_CONFLICT = 8543
ERROR_DS_DRA_EARLIER_SCHEMA_CONFLICT = 8544
ERROR_DS_DRA_OBJ_NC_MISMATCH = 8545
ERROR_DS_NC_STILL_HAS_DSAS = 8546
ERROR_DS_GC_REQUIRED = 8547
ERROR_DS_LOCAL_MEMBER_OF_LOCAL_ONLY = 8548
ERROR_DS_NO_FPO_IN_UNIVERSAL_GROUPS = 8549
ERROR_DS_CANT_ADD_TO_GC = 8550
ERROR_DS_NO_CHECKPOINT_WITH_PDC = 8551
ERROR_DS_SOURCE_AUDITING_NOT_ENABLED = 8552
ERROR_DS_CANT_CREATE_IN_NONDOMAIN_NC = 8553
ERROR_DS_INVALID_NAME_FOR_SPN = 8554
ERROR_DS_FILTER_USES_CONTRUCTED_ATTRS = 8555
ERROR_DS_UNICODEPWD_NOT_IN_QUOTES = 8556
ERROR_DS_MACHINE_ACCOUNT_QUOTA_EXCEEDED = 8557
ERROR_DS_MUST_BE_RUN_ON_DST_DC = 8558
ERROR_DS_SRC_DC_MUST_BE_SP4_OR_GREATER = 8559
ERROR_DS_CANT_TREE_DELETE_CRITICAL_OBJ = 8560
ERROR_DS_INIT_FAILURE_CONSOLE = 8561
ERROR_DS_SAM_INIT_FAILURE_CONSOLE = 8562
ERROR_DS_FOREST_VERSION_TOO_HIGH = 8563
ERROR_DS_DOMAIN_VERSION_TOO_HIGH = 8564
ERROR_DS_FOREST_VERSION_TOO_LOW = 8565
ERROR_DS_DOMAIN_VERSION_TOO_LOW = 8566
ERROR_DS_INCOMPATIBLE_VERSION = 8567
ERROR_DS_LOW_DSA_VERSION = 8568
ERROR_DS_NO_BEHAVIOR_VERSION_IN_MIXEDDOMAIN = 8569
ERROR_DS_NOT_SUPPORTED_SORT_ORDER = 8570
ERROR_DS_NAME_NOT_UNIQUE = 8571
ERROR_DS_MACHINE_ACCOUNT_CREATED_PRENT4 = 8572
ERROR_DS_OUT_OF_VERSION_STORE = 8573
ERROR_DS_INCOMPATIBLE_CONTROLS_USED = 8574
ERROR_DS_NO_REF_DOMAIN = 8575
ERROR_DS_RESERVED_LINK_ID = 8576
ERROR_DS_LINK_ID_NOT_AVAILABLE = 8577
ERROR_DS_AG_CANT_HAVE_UNIVERSAL_MEMBER = 8578
ERROR_DS_MODIFYDN_DISALLOWED_BY_INSTANCE_TYPE = 8579
ERROR_DS_NO_OBJECT_MOVE_IN_SCHEMA_NC = 8580
ERROR_DS_MODIFYDN_DISALLOWED_BY_FLAG = 8581
ERROR_DS_MODIFYDN_WRONG_GRANDPARENT = 8582
ERROR_DS_NAME_ERROR_TRUST_REFERRAL = 8583
ERROR_NOT_SUPPORTED_ON_STANDARD_SERVER = 8584
ERROR_DS_CANT_ACCESS_REMOTE_PART_OF_AD = 8585
ERROR_DS_CR_IMPOSSIBLE_TO_VALIDATE_V2 = 8586
ERROR_DS_THREAD_LIMIT_EXCEEDED = 8587
ERROR_DS_NOT_CLOSEST = 8588
ERROR_DS_CANT_DERIVE_SPN_WITHOUT_SERVER_REF = 8589
ERROR_DS_SINGLE_USER_MODE_FAILED = 8590
ERROR_DS_NTDSCRIPT_SYNTAX_ERROR = 8591
ERROR_DS_NTDSCRIPT_PROCESS_ERROR = 8592
ERROR_DS_DIFFERENT_REPL_EPOCHS = 8593
ERROR_DS_DRS_EXTENSIONS_CHANGED = 8594
ERROR_DS_REPLICA_SET_CHANGE_NOT_ALLOWED_ON_DISABLED_CR = 8595
ERROR_DS_NO_MSDS_INTID = 8596
ERROR_DS_DUP_MSDS_INTID = 8597
ERROR_DS_EXISTS_IN_RDNATTID = 8598
ERROR_DS_AUTHORIZATION_FAILED = 8599
ERROR_DS_INVALID_SCRIPT = 8600
ERROR_DS_REMOTE_CROSSREF_OP_FAILED = 8601
ERROR_DS_CROSS_REF_BUSY = 8602
ERROR_DS_CANT_DERIVE_SPN_FOR_DELETED_DOMAIN = 8603
ERROR_DS_CANT_DEMOTE_WITH_WRITEABLE_NC = 8604
ERROR_DS_DUPLICATE_ID_FOUND = 8605
ERROR_DS_INSUFFICIENT_ATTR_TO_CREATE_OBJECT = 8606
ERROR_DS_GROUP_CONVERSION_ERROR = 8607
ERROR_DS_CANT_MOVE_APP_BASIC_GROUP = 8608
ERROR_DS_CANT_MOVE_APP_QUERY_GROUP = 8609
ERROR_DS_ROLE_NOT_VERIFIED = 8610
ERROR_DS_WKO_CONTAINER_CANNOT_BE_SPECIAL = 8611
ERROR_DS_DOMAIN_RENAME_IN_PROGRESS = 8612
ERROR_DS_EXISTING_AD_CHILD_NC = 8613
ERROR_DS_REPL_LIFETIME_EXCEEDED = 8614
ERROR_DS_DISALLOWED_IN_SYSTEM_CONTAINER = 8615
ERROR_DS_LDAP_SEND_QUEUE_FULL = 8616
ERROR_DS_DRA_OUT_SCHEDULE_WINDOW = 8617
# Common dialog box error codes from cderr.h
CDERR_DIALOGFAILURE = 65535
CDERR_GENERALCODES = 0
CDERR_STRUCTSIZE = 1
CDERR_INITIALIZATION = 2
CDERR_NOTEMPLATE = 3
CDERR_NOHINSTANCE = 4
CDERR_LOADSTRFAILURE = 5
CDERR_FINDRESFAILURE = 6
CDERR_LOADRESFAILURE = 7
CDERR_LOCKRESFAILURE = 8
CDERR_MEMALLOCFAILURE = 9
CDERR_MEMLOCKFAILURE = 10
CDERR_NOHOOK = 11
CDERR_REGISTERMSGFAIL = 12
PDERR_PRINTERCODES = 4096
PDERR_SETUPFAILURE = 4097
PDERR_PARSEFAILURE = 4098
PDERR_RETDEFFAILURE = 4099
PDERR_LOADDRVFAILURE = 4100
PDERR_GETDEVMODEFAIL = 4101
PDERR_INITFAILURE = 4102
PDERR_NODEVICES = 4103
PDERR_NODEFAULTPRN = 4104
PDERR_DNDMMISMATCH = 4105
PDERR_CREATEICFAILURE = 4106
PDERR_PRINTERNOTFOUND = 4107
PDERR_DEFAULTDIFFERENT = 4108
CFERR_CHOOSEFONTCODES = 8192
CFERR_NOFONTS = 8193
CFERR_MAXLESSTHANMIN = 8194
FNERR_FILENAMECODES = 12288
FNERR_SUBCLASSFAILURE = 12289
FNERR_INVALIDFILENAME = 12290
FNERR_BUFFERTOOSMALL = 12291
FRERR_FINDREPLACECODES = 16384
FRERR_BUFFERLENGTHZERO = 16385
CCERR_CHOOSECOLORCODES = 20480
| apache-2.0 |
bgxavier/nova | nova/tests/unit/objects/test_flavor.py | 36 | 10238 | # Copyright 2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from nova import db
from nova import exception
from nova.objects import flavor as flavor_obj
from nova.tests.unit.objects import test_objects
fake_flavor = {
'created_at': None,
'updated_at': None,
'deleted_at': None,
'deleted': 0,
'id': 1,
'name': 'm1.foo',
'memory_mb': 1024,
'vcpus': 4,
'root_gb': 20,
'ephemeral_gb': 0,
'flavorid': 'm1.foo',
'swap': 0,
'rxtx_factor': 1.0,
'vcpu_weight': 1,
'disabled': False,
'is_public': True,
'extra_specs': {'foo': 'bar'},
}
class _TestFlavor(object):
@staticmethod
def _compare(test, db, obj):
for field, value in db.items():
test.assertEqual(db[field], obj[field])
def test_get_by_id(self):
with mock.patch.object(db, 'flavor_get') as get:
get.return_value = fake_flavor
flavor = flavor_obj.Flavor.get_by_id(self.context, 1)
self._compare(self, fake_flavor, flavor)
def test_get_by_name(self):
with mock.patch.object(db, 'flavor_get_by_name') as get_by_name:
get_by_name.return_value = fake_flavor
flavor = flavor_obj.Flavor.get_by_name(self.context, 'm1.foo')
self._compare(self, fake_flavor, flavor)
def test_get_by_flavor_id(self):
with mock.patch.object(db, 'flavor_get_by_flavor_id') as get_by_id:
get_by_id.return_value = fake_flavor
flavor = flavor_obj.Flavor.get_by_flavor_id(self.context,
'm1.foo')
self._compare(self, fake_flavor, flavor)
def test_add_access(self):
elevated = self.context.elevated()
flavor = flavor_obj.Flavor(context=elevated, flavorid='123')
with mock.patch.object(db, 'flavor_access_add') as add:
flavor.add_access('456')
add.assert_called_once_with(elevated, '123', '456')
def test_add_access_with_dirty_projects(self):
flavor = flavor_obj.Flavor(context=self.context, projects=['1'])
self.assertRaises(exception.ObjectActionError,
flavor.add_access, '2')
def test_remove_access(self):
elevated = self.context.elevated()
flavor = flavor_obj.Flavor(context=elevated, flavorid='123')
with mock.patch.object(db, 'flavor_access_remove') as remove:
flavor.remove_access('456')
remove.assert_called_once_with(elevated, '123', '456')
def test_create(self):
flavor = flavor_obj.Flavor(context=self.context)
flavor.name = 'm1.foo'
flavor.extra_specs = fake_flavor['extra_specs']
with mock.patch.object(db, 'flavor_create') as create:
create.return_value = fake_flavor
flavor.create()
self.assertEqual(self.context, flavor._context)
# NOTE(danms): Orphan this to avoid lazy-loads
flavor._context = None
self._compare(self, fake_flavor, flavor)
def test_create_with_projects(self):
context = self.context.elevated()
flavor = flavor_obj.Flavor(context=context)
flavor.name = 'm1.foo'
flavor.extra_specs = fake_flavor['extra_specs']
flavor.projects = ['project-1', 'project-2']
db_flavor = dict(fake_flavor, projects=list(flavor.projects))
with mock.patch.multiple(db, flavor_create=mock.DEFAULT,
flavor_access_get_by_flavor_id=mock.DEFAULT
) as methods:
methods['flavor_create'].return_value = db_flavor
methods['flavor_access_get_by_flavor_id'].return_value = [
{'project_id': 'project-1'},
{'project_id': 'project-2'}]
flavor.create()
methods['flavor_create'].assert_called_once_with(
context,
{'name': 'm1.foo',
'extra_specs': fake_flavor['extra_specs']},
projects=['project-1', 'project-2'])
self.assertEqual(context, flavor._context)
# NOTE(danms): Orphan this to avoid lazy-loads
flavor._context = None
self._compare(self, fake_flavor, flavor)
self.assertEqual(['project-1', 'project-2'], flavor.projects)
def test_create_with_id(self):
flavor = flavor_obj.Flavor(id=123)
self.assertRaises(exception.ObjectActionError, flavor.create,
self.context)
@mock.patch('nova.db.flavor_access_add')
@mock.patch('nova.db.flavor_access_remove')
@mock.patch('nova.db.flavor_extra_specs_delete')
@mock.patch('nova.db.flavor_extra_specs_update_or_create')
def test_save(self, mock_update, mock_delete, mock_remove, mock_add):
ctxt = self.context.elevated()
extra_specs = {'key1': 'value1', 'key2': 'value2'}
projects = ['project-1', 'project-2']
flavor = flavor_obj.Flavor(context=ctxt, flavorid='foo',
extra_specs=extra_specs, projects=projects)
flavor.obj_reset_changes()
# Test deleting an extra_specs key and project
del flavor.extra_specs['key1']
del flavor.projects[-1]
self.assertEqual(set(['extra_specs', 'projects']),
flavor.obj_what_changed())
flavor.save()
self.assertEqual({'key2': 'value2'}, flavor.extra_specs)
mock_delete.assert_called_once_with(ctxt, 'foo', 'key1')
self.assertEqual(['project-1'], flavor.projects)
mock_remove.assert_called_once_with(ctxt, 'foo', 'project-2')
# Test updating an extra_specs key value
flavor.extra_specs['key2'] = 'foobar'
self.assertEqual(set(['extra_specs']), flavor.obj_what_changed())
flavor.save()
self.assertEqual({'key2': 'foobar'}, flavor.extra_specs)
mock_update.assert_called_with(ctxt, 'foo', {'key2': 'foobar'})
# Test adding an extra_specs and project
flavor.extra_specs['key3'] = 'value3'
flavor.projects.append('project-3')
self.assertEqual(set(['extra_specs', 'projects']),
flavor.obj_what_changed())
flavor.save()
self.assertEqual({'key2': 'foobar', 'key3': 'value3'},
flavor.extra_specs)
mock_update.assert_called_with(ctxt, 'foo', {'key2': 'foobar',
'key3': 'value3'})
self.assertEqual(['project-1', 'project-3'], flavor.projects)
mock_add.assert_called_once_with(ctxt, 'foo', 'project-3')
@mock.patch('nova.db.flavor_create')
@mock.patch('nova.db.flavor_extra_specs_delete')
@mock.patch('nova.db.flavor_extra_specs_update_or_create')
def test_save_deleted_extra_specs(self, mock_update, mock_delete,
mock_create):
mock_create.return_value = dict(fake_flavor,
extra_specs={'key1': 'value1'})
ctxt = self.context.elevated()
flavor = flavor_obj.Flavor(context=ctxt)
flavor.flavorid = 'test'
flavor.extra_specs = {'key1': 'value1'}
flavor.create()
flavor.extra_specs = {}
flavor.save()
mock_delete.assert_called_once_with(ctxt, flavor.flavorid,
'key1')
self.assertFalse(mock_update.called)
def test_save_invalid_fields(self):
flavor = flavor_obj.Flavor(id=123)
self.assertRaises(exception.ObjectActionError, flavor.save)
def test_destroy(self):
flavor = flavor_obj.Flavor(context=self.context, id=123, name='foo')
with mock.patch.object(db, 'flavor_destroy') as destroy:
flavor.destroy()
destroy.assert_called_once_with(self.context, flavor.name)
def test_load_projects(self):
flavor = flavor_obj.Flavor(context=self.context, flavorid='foo')
with mock.patch.object(db, 'flavor_access_get_by_flavor_id') as get:
get.return_value = [{'project_id': 'project-1'}]
projects = flavor.projects
self.assertEqual(['project-1'], projects)
self.assertNotIn('projects', flavor.obj_what_changed())
def test_load_anything_else(self):
flavor = flavor_obj.Flavor()
self.assertRaises(exception.ObjectActionError,
getattr, flavor, 'name')
class TestFlavor(test_objects._LocalTest, _TestFlavor):
pass
class TestFlavorRemote(test_objects._RemoteTest, _TestFlavor):
pass
class _TestFlavorList(object):
def test_get_all(self):
with mock.patch.object(db, 'flavor_get_all') as get_all:
get_all.return_value = [fake_flavor]
filters = {'min_memory_mb': 4096}
flavors = flavor_obj.FlavorList.get_all(self.context,
inactive=False,
filters=filters,
sort_key='id',
sort_dir='asc')
self.assertEqual(1, len(flavors))
_TestFlavor._compare(self, fake_flavor, flavors[0])
get_all.assert_called_once_with(self.context, inactive=False,
filters=filters, sort_key='id',
sort_dir='asc', limit=None,
marker=None)
class TestFlavorList(test_objects._LocalTest, _TestFlavorList):
pass
class TestFlavorListRemote(test_objects._RemoteTest, _TestFlavorList):
pass
| apache-2.0 |
shakamunyi/neutron | neutron/db/api.py | 7 | 1845 | # Copyright 2011 VMware, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import contextlib
from oslo_config import cfg
from oslo_db.sqlalchemy import session
from sqlalchemy import exc
_FACADE = None
MAX_RETRIES = 10
def _create_facade_lazily():
global _FACADE
if _FACADE is None:
_FACADE = session.EngineFacade.from_config(cfg.CONF, sqlite_fk=True)
return _FACADE
def get_engine():
"""Helper method to grab engine."""
facade = _create_facade_lazily()
return facade.get_engine()
def dispose():
# Don't need to do anything if an enginefacade hasn't been created
if _FACADE is not None:
get_engine().pool.dispose()
def get_session(autocommit=True, expire_on_commit=False):
"""Helper method to grab session."""
facade = _create_facade_lazily()
return facade.get_session(autocommit=autocommit,
expire_on_commit=expire_on_commit)
@contextlib.contextmanager
def autonested_transaction(sess):
"""This is a convenience method to not bother with 'nested' parameter."""
try:
session_context = sess.begin_nested()
except exc.InvalidRequestError:
session_context = sess.begin(subtransactions=True)
finally:
with session_context as tx:
yield tx
| apache-2.0 |
csrg-utfsm/acscb | LGPL/CommonSoftware/nctest/ws/test/pyStructureEventTest.py | 8 | 1355 | #!/usr/bin/env python
from Acspy.Nc.CommonNC import CommonNC
from Acspy.Nc.Supplier import Supplier
import datacapEx
from datacapEx import ExecBlockProcessedEvent, DataCapturerId, ExecBlockStartedEvent, ScanStartedEvent
import asdmEX
s = Supplier('pyTest-NC')
name = 'DATACAP1'
s.publishEvent(name)
sessionId = asdmEX.IDLEntityRef('SessionId','X1','SID','1.0')
sb = asdmEX.IDLEntityRef('SB1','X1','SB1','1.0')
dcId = DataCapturerId (name, 'arrayId', sessionId, sb)
execBlockId = asdmEX.IDLEntityRef('ExecBlockId','X1','SB1','1.0')
d = ExecBlockProcessedEvent( dcId, 'statu', execBlockId, 0)
s.publishEvent(d)
execId = asdmEX.IDLEntityRef('4','3','2', '1')
execBlockId = asdmEX.IDLEntityRef('1','2','3','4')
sse = ScanStartedEvent(execId, "something", 4, [datacapEx.LAST, datacapEx.LAST],0)
s.publishEvent(sse)
execId = "23"
execBlockEntityRef = asdmEX.IDLEntityRef(execId,"X00000000","0","0")
sbId = asdmEX.IDLEntityRef(execId,"X00000000","0","0")
arrayId = "1"
time = 100
startExecBlock = datacapEx.ExecBlockStartedEvent(execBlockEntityRef,sbId,sessionId,arrayId,time)
s.publishEvent(startExecBlock)
endExecBlock = datacapEx.ExecBlockEndedEvent(execBlockEntityRef,sbId,sessionId,arrayId,datacapEx.SUCCESS,time+10)
s.publishEvent(endExecBlock)
print "All structures successfully sent!!"
s.destroyNotificationChannel()
| mit |
sjsucohort6/openstack | python/venv/lib/python2.7/site-packages/openstack/tests/unit/compute/v2/test_limits.py | 3 | 5732 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
import testtools
from openstack.compute.v2 import limits
ABSOLUTE_LIMITS = {
"maxImageMeta": 128,
"maxPersonality": 5,
"maxPersonalitySize": 10240,
"maxSecurityGroupRules": 20,
"maxSecurityGroups": 10,
"maxServerMeta": 128,
"maxTotalCores": 20,
"maxTotalFloatingIps": 10,
"maxTotalInstances": 10,
"maxTotalKeypairs": 100,
"maxTotalRAMSize": 51200,
"maxServerGroups": 10,
"maxServerGroupMembers": 10,
"totalFloatingIpsUsed": 1,
"totalSecurityGroupsUsed": 2,
"totalRAMUsed": 4,
"totalInstancesUsed": 5,
"totalServerGroupsUsed": 6,
"totalCoresUsed": 7
}
RATE_LIMIT = {
"limit": [
{
"next-available": "2012-11-27T17:22:18Z",
"remaining": 120,
"unit": "MINUTE",
"value": 120,
"verb": "POST"
},
],
"regex": ".*",
"uri": "*"
}
LIMITS_BODY = {
"limits": {
"absolute": ABSOLUTE_LIMITS,
"rate": [RATE_LIMIT]
}
}
class TestAbsoluteLimits(testtools.TestCase):
def test_basic(self):
sot = limits.AbsoluteLimits()
self.assertIsNone(sot.resource_key)
self.assertIsNone(sot.resources_key)
self.assertEqual("", sot.base_path)
self.assertIsNone(sot.service)
self.assertFalse(sot.allow_create)
self.assertFalse(sot.allow_retrieve)
self.assertFalse(sot.allow_update)
self.assertFalse(sot.allow_delete)
self.assertFalse(sot.allow_list)
def test_make_it(self):
sot = limits.AbsoluteLimits(ABSOLUTE_LIMITS)
self.assertEqual(ABSOLUTE_LIMITS["maxImageMeta"], sot.image_meta)
self.assertEqual(ABSOLUTE_LIMITS["maxPersonality"], sot.personality)
self.assertEqual(ABSOLUTE_LIMITS["maxPersonalitySize"],
sot.personality_size)
self.assertEqual(ABSOLUTE_LIMITS["maxSecurityGroupRules"],
sot.security_group_rules)
self.assertEqual(ABSOLUTE_LIMITS["maxSecurityGroups"],
sot.security_groups)
self.assertEqual(ABSOLUTE_LIMITS["maxServerMeta"], sot.server_meta)
self.assertEqual(ABSOLUTE_LIMITS["maxTotalCores"], sot.total_cores)
self.assertEqual(ABSOLUTE_LIMITS["maxTotalFloatingIps"],
sot.floating_ips)
self.assertEqual(ABSOLUTE_LIMITS["maxTotalInstances"],
sot.instances)
self.assertEqual(ABSOLUTE_LIMITS["maxTotalKeypairs"],
sot.keypairs)
self.assertEqual(ABSOLUTE_LIMITS["maxTotalRAMSize"],
sot.total_ram)
self.assertEqual(ABSOLUTE_LIMITS["maxServerGroups"], sot.server_groups)
self.assertEqual(ABSOLUTE_LIMITS["maxServerGroupMembers"],
sot.server_group_members)
self.assertEqual(ABSOLUTE_LIMITS["totalFloatingIpsUsed"],
sot.floating_ips_used)
self.assertEqual(ABSOLUTE_LIMITS["totalSecurityGroupsUsed"],
sot.security_groups_used)
self.assertEqual(ABSOLUTE_LIMITS["totalRAMUsed"], sot.total_ram_used)
self.assertEqual(ABSOLUTE_LIMITS["totalInstancesUsed"],
sot.instances_used)
self.assertEqual(ABSOLUTE_LIMITS["totalServerGroupsUsed"],
sot.server_groups_used)
self.assertEqual(ABSOLUTE_LIMITS["totalCoresUsed"],
sot.total_cores_used)
class TestRateLimits(testtools.TestCase):
def test_basic(self):
sot = limits.RateLimits()
self.assertIsNone(sot.resource_key)
self.assertIsNone(sot.resources_key)
self.assertEqual("", sot.base_path)
self.assertIsNone(sot.service)
self.assertFalse(sot.allow_create)
self.assertFalse(sot.allow_retrieve)
self.assertFalse(sot.allow_update)
self.assertFalse(sot.allow_delete)
self.assertFalse(sot.allow_list)
def test_make_it(self):
sot = limits.RateLimits(RATE_LIMIT)
self.assertEqual(RATE_LIMIT["regex"], sot.regex)
self.assertEqual(RATE_LIMIT["uri"], sot.uri)
self.assertEqual(RATE_LIMIT["limit"], sot.limits)
class TestLimits(testtools.TestCase):
def test_basic(self):
sot = limits.Limits()
self.assertEqual("limits", sot.resource_key)
self.assertEqual("/limits", sot.base_path)
self.assertEqual("compute", sot.service.service_type)
self.assertTrue(sot.allow_retrieve)
self.assertFalse(sot.allow_create)
self.assertFalse(sot.allow_update)
self.assertFalse(sot.allow_delete)
self.assertFalse(sot.allow_list)
@mock.patch("openstack.resource.Resource.get_data_by_id")
def test_get(self, mock_get):
# Only return values under the limits key since that's our
# resource_key, which would be filtered out in get_data_by_id.
mock_get.return_value = LIMITS_BODY["limits"]
sot = limits.Limits().get("fake session")
self.assertEqual(sot.absolute, limits.AbsoluteLimits(ABSOLUTE_LIMITS))
self.assertEqual(sot.rate, [limits.RateLimits(RATE_LIMIT)])
| mit |
bufferx/stormed-amqp | stormed/serialization.py | 4 | 6242 | import time
import datetime
from struct import Struct
from itertools import izip
from stormed import method
from stormed.message import ContentHeader, Message
def parse_fields(fields, data):
vals = []
offset = 0
bit_parser = None
for f in fields:
if f == 'bit':
if bit_parser is None:
bit_parser = BitParser(data[offset])
vals.append(bit_parser.get_bit())
else:
if bit_parser is not None:
bit_parser = None
offset += 1
parser = globals()['parse_%s' % f]
val, offset = parser(data, offset)
vals.append(val)
assert offset + int(bit_parser is not None) == len(data), \
'%d %d' % (offset, len(data))
return vals
def dump(o):
dumped_vals = []
bit_dumper = None
for name, typ in o._fields:
val = getattr(o, name)
if val is None:
continue
if typ == 'bit':
if bit_dumper is None:
bit_dumper = BitDumper()
bit_dumper.add_bit(val)
else:
if bit_dumper is not None:
dumped_vals.append(bit_dumper.get_octet())
bit_dumper = None
dumper = globals()['dump_%s' % typ]
v = dumper(val)
dumped_vals.append(v)
if bit_dumper is not None:
dumped_vals.append(bit_dumper.get_octet())
return ''.join(dumped_vals)
#TODO MOVE TO frame.py
method_header = Struct('!HH')
def parse_method(data):
class_id, method_id = method_header.unpack(data[:4])
mod = method.id2class[class_id]
inst = getattr(mod, 'id2method')[method_id]()
names = [ name for name, typ in inst._fields ]
types = [ typ for name, typ in inst._fields ]
vals = parse_fields(types, data[4:])
for name, val in izip(names, vals):
setattr(inst, name, val)
return inst
#TODO MOVE TO frame.py
def dump_method(m):
header = method_header.pack(m._class_id, m._method_id)
return '%s%s' % (header, dump(m))
content_header = Struct('!HHQH')
def parse_content_header(data):
hlen = content_header.size
class_id, _, msg_size, prop_flags = content_header.unpack(data[:hlen])
assert class_id == 60 # basic class
fields = []
for offset, fspec in zip(range(15, 0, -1), Message._fields):
if prop_flags & (1 << offset):
fields.append(fspec)
names = [ name for name, typ in fields ]
types = [ typ for name, typ in fields ]
prop_vals = parse_fields(types, data[hlen:])
properties = dict( (k,v) for k,v in zip(names, prop_vals) )
return ContentHeader(msg_size, properties)
#TODO MOVE TO frame.py
def dump_content_header(msg):
assert len(msg._fields) <= 15, "prop_flags > 15 not supported"
prop_flags = 0
for offset, (fname, ftype) in zip(range(15, 0, -1), msg._fields):
if getattr(msg, fname) is not None:
prop_flags |= 1 << offset
chp = content_header.pack(60, #basic class
0,
len(msg.body),
prop_flags)
return '%s%s' % (chp, dump(msg))
# --- low level parsing/dumping ---
class BitParser(object):
def __init__(self, octet):
self.bit_offset = 0
self.octet = ord(octet)
def get_bit(self):
assert self.bit_offset <= 7, "unpacking more that 8 bits is unsupported"
bit = self.octet & (1 << self.bit_offset)
self.bit_offset += 1
return bool(bit)
class BitDumper(object):
def __init__(self):
self.bit_offset = 0
self.octet = 0
def add_bit(self, bit):
assert self.bit_offset <= 7, "packing more that 8 bits is unsupported"
self.octet |= (1 if bit else 0) << self.bit_offset
self.bit_offset += 1
def get_octet(self):
return chr(self.octet)
def parse_octet(data, offset):
return ord(data[offset]), offset+1
def dump_octet(i):
return chr(i)
short = Struct('!H')
def parse_short(data, offset):
val = short.unpack_from(data, offset)[0]
return val, offset+2
def dump_short(i):
return short.pack(i)
_long = Struct('!L')
def parse_long(data, offset):
val = _long.unpack_from(data, offset)[0]
return val, offset+4
def dump_long(i):
return _long.pack(i)
_longlong = Struct('!Q')
def parse_longlong(data, offset):
val = _longlong.unpack_from(data, offset)[0]
return val, offset+8
def dump_longlong(i):
return _longlong.pack(i)
longstr_header = Struct('!L')
def parse_longstr(data, offset):
l = longstr_header.unpack_from(data, offset)[0]
val = data[offset+4: offset+4+l]
return val, offset+4+l
def dump_longstr(s):
encoded_s = s.encode('utf8')
return '%s%s' % (longstr_header.pack(len(encoded_s)), encoded_s)
def parse_shortstr(data, offset):
l = ord(data[offset])
val = data[offset+1: offset+1+l]
return val, offset+1+l
def dump_shortstr(s):
encoded_s = s.encode('utf8')
return '%s%s' % (chr(len(encoded_s)), encoded_s)
def parse_boolean(data, offset):
octet, offset = parse_octet(data, offset)
return bool(octet), offset
def dump_boolean(b):
if b:
return chr(1)
else:
return chr(0)
def dump_timestamp(dt):
secs = time.mktime(dt.timetuple())
return dump_longlong(secs)
def parse_timestamp(data, offset):
secs, offset = parse_longlong(data, offset)
dt = datetime.datetime.fromtimestamp(secs)
return dt, offset
def parse_table(data, offset):
s, new_offset = parse_longstr(data, offset)
d = {}
s_len = len(s)
offset = 0
while offset < s_len:
key, offset = parse_shortstr(s, offset)
typ = s[offset]
assert typ in field_type_dict, typ
val, offset = field_type_dict[typ](s, offset+1)
d[key] = val
return d, new_offset
field_type_dict = {
'F': parse_table,
's': parse_shortstr,
'S': parse_longstr,
't': parse_boolean,
'T': parse_timestamp,
}
def table2str(d):
return ''.join([ '%sS%s' % (dump_shortstr(k), dump_longstr(v))
for k, v in d.items() ])
def dump_table(d):
entries = table2str(d)
return dump_longstr(entries)
| mit |
javilinux/stampython | stampy/plugin/welcome.py | 2 | 2377 | #!/usr/bin/env python
# encoding: utf-8
#
# Description: Plugin for processing welcome to chats
# Author: Pablo Iranzo Gomez (Pablo.Iranzo@gmail.com)
import logging
import stampy.plugin.config
import stampy.stampy
from stampy.i18n import _
from stampy.i18n import _L
def init():
"""
Initializes module
:return: List of triggers for plugin
"""
triggers = ["*"]
return triggers
def run(message): # do not edit this line
"""
Executes plugin
:param message: message to run against
:return:
"""
# Send greetings
try:
if 'new_chat_participant' in message['message']:
welcomeuser(message=message)
except:
pass
return
def help(message): # do not edit this line
"""
Returns help for plugin
:param message: message to process
:return: help text
"""
commandtext = ""
if stampy.stampy.is_owner_or_admin(message):
commandtext = _("As admin or owner define 'welcome' to the greeting text sent to new chat members. You can use $username to put long name in the text\n\n")
return commandtext
def welcomeuser(message):
"""
Greets new users in chat
:param message: Message to process for newcomer events
:return:
"""
logger = logging.getLogger(__name__)
msgdetail = stampy.stampy.getmsgdetail(message=message)
chat_id = msgdetail['chat_id']
welcome = stampy.plugin.config.gconfig(key='welcome', default=False, gid=chat_id)
try:
newparticipant = message['message']['new_chat_participant']
except:
newparticipant = False
try:
newusername = newparticipant['username']
except:
newusername = ""
try:
newfirstname = newparticipant['first_name']
except:
newfirstname = ""
try:
newlastname = newparticipant['last_name']
except:
newlastname = ""
name = "%s %s ([`@%s`](https://t.me/%s))" % (newfirstname, newlastname, newusername, newusername)
greeting = welcome.replace("$username", name)
logger.debug(msg=_L('New user in chat, sending greetings: %s') % greeting)
if greeting:
stampy.stampy.sendmessage(chat_id=msgdetail["chat_id"], text=greeting,
reply_to_message_id=msgdetail["message_id"],
parse_mode="Markdown")
return
| gpl-3.0 |
johtani/elasticsearch | dev-tools/get-bwc-version.py | 29 | 2725 | # Licensed to Elasticsearch under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
# ownership. Elasticsearch licenses this file to you under
# the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on
# an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
'''
Downloads and extracts elasticsearch for backwards compatibility tests.
'''
import argparse
import os
import platform
import shutil
import subprocess
import urllib.request
import zipfile
def parse_config():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('--path', metavar='DIR', default='./backwards',
help='Where to extract elasticsearch')
parser.add_argument('--force', action='store_true', default=False,
help='Delete and redownload if the version already exists')
parser.add_argument('version', metavar='X.Y.Z',
help='Version of elasticsearch to grab')
return parser.parse_args()
def main():
c = parse_config()
if not os.path.exists(c.path):
print('Creating %s' % c.path)
os.mkdir(c.path)
is_windows = platform.system() == 'Windows'
os.chdir(c.path)
version_dir = 'elasticsearch-%s' % c.version
if os.path.exists(version_dir):
if c.force:
print('Removing old download %s' % version_dir)
shutil.rmtree(version_dir)
else:
print('Version %s exists at %s' % (c.version, version_dir))
return
# before 1.4.0, the zip file contains windows scripts, and tar.gz contained *nix scripts
if is_windows:
filename = '%s.zip' % version_dir
else:
filename = '%s.tar.gz' % version_dir
url = 'https://download.elasticsearch.org/elasticsearch/elasticsearch/%s' % filename
print('Downloading %s' % url)
urllib.request.urlretrieve(url, filename)
print('Extracting to %s' % version_dir)
if is_windows:
archive = zipfile.ZipFile(filename)
archive.extractall()
else:
# for some reason python's tarfile module has trouble with ES tgz?
subprocess.check_call('tar -xzf %s' % filename, shell=True)
print('Cleaning up %s' % filename)
os.remove(filename)
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
print('Ctrl-C caught, exiting')
| apache-2.0 |
WilJoey/tn_ckan | ckan/new_tests/logic/auth/test_init.py | 1 | 5834 | import nose
import ckan.model as core_model
import ckan.logic as logic
import ckan.new_tests.helpers as helpers
import ckan.logic.auth as logic_auth
class TestGetObjectErrors(object):
def _get_function(self, obj_type):
_get_object_functions = {
'package': logic_auth.get_package_object,
'resource': logic_auth.get_resource_object,
'related': logic_auth.get_related_object,
'user': logic_auth.get_user_object,
'group': logic_auth.get_group_object,
}
return _get_object_functions[obj_type]
def _get_object_in_context(self, obj_type):
if obj_type == 'user':
context = {'user_obj': 'a_fake_object'}
else:
context = {obj_type: 'a_fake_object'}
obj = self._get_function(obj_type)(context)
assert obj == 'a_fake_object'
def _get_object_id_not_found(self, obj_type):
nose.tools.assert_raises(logic.NotFound,
self._get_function(obj_type),
{'model': core_model},
{'id': 'not_here'})
def _get_object_id_none(self, obj_type):
nose.tools.assert_raises(logic.ValidationError,
self._get_function(obj_type),
{'model': core_model}, {})
def test_get_package_object_in_context(self):
self._get_object_in_context('package')
def test_get_resource_object_in_context(self):
self._get_object_in_context('resource')
def test_get_related_object_in_context(self):
self._get_object_in_context('related')
def test_get_user_object_in_context(self):
self._get_object_in_context('user')
def test_get_group_object_in_context(self):
self._get_object_in_context('group')
def test_get_package_object_id_not_found(self):
self._get_object_id_not_found('package')
def test_get_resource_object_id_not_found(self):
self._get_object_id_not_found('resource')
def test_get_related_object_id_not_found(self):
self._get_object_id_not_found('related')
def test_get_user_object_id_not_found(self):
self._get_object_id_not_found('user')
def test_get_group_object_id_not_found(self):
self._get_object_id_not_found('group')
def test_get_package_object_id_none(self):
self._get_object_id_none('package')
def test_get_resource_object_id_none(self):
self._get_object_id_none('resource')
def test_get_related_object_id_none(self):
self._get_object_id_none('related')
def test_get_user_object_id_none(self):
self._get_object_id_none('user')
def test_get_group_object_id_none(self):
self._get_object_id_none('group')
class TestGetObject(object):
@classmethod
def setup_class(cls):
helpers.reset_db()
def setup(self):
import ckan.model as model
# Reset the db before each test method.
model.repo.rebuild_db()
def test_get_package_object_with_id(self):
user_name = helpers.call_action('get_site_user')['name']
dataset = helpers.call_action('package_create',
context={'user': user_name},
name='test_dataset')
context = {'model': core_model}
obj = logic_auth.get_package_object(context, {'id': dataset['id']})
assert obj.id == dataset['id']
assert context['package'] == obj
def test_get_resource_object_with_id(self):
user_name = helpers.call_action('get_site_user')['name']
dataset = helpers.call_action('package_create',
context={'user': user_name},
name='test_dataset')
resource = helpers.call_action('resource_create',
context={'user': user_name},
package_id=dataset['id'],
url='http://foo')
context = {'model': core_model}
obj = logic_auth.get_resource_object(context, {'id': resource['id']})
assert obj.id == resource['id']
assert context['resource'] == obj
def test_get_related_object_with_id(self):
user_name = helpers.call_action('get_site_user')['name']
related = helpers.call_action('related_create',
context={'user': user_name},
title='test related', type='app')
context = {'model': core_model}
obj = logic_auth.get_related_object(context, {'id': related['id']})
assert obj.id == related['id']
assert context['related'] == obj
def test_get_user_object_with_id(self):
user_name = helpers.call_action('get_site_user')['name']
user = helpers.call_action('user_create',
context={'user': user_name},
name='test_user',
email='a@a.com',
password='pass')
context = {'model': core_model}
obj = logic_auth.get_user_object(context, {'id': user['id']})
assert obj.id == user['id']
assert context['user_obj'] == obj
def test_get_group_object_with_id(self):
user_name = helpers.call_action('get_site_user')['name']
group = helpers.call_action('group_create',
context={'user': user_name},
name='test_group')
context = {'model': core_model}
obj = logic_auth.get_group_object(context, {'id': group['id']})
assert obj.id == group['id']
assert context['group'] == obj
| mit |
EduardoMolina/SU2 | SU2_PY/SU2/util/mp_eval.py | 5 | 3376 | import os
import multiprocessing as mp
import numpy as np
import sys
if sys.version_info[0] > 2:
# In Py3, range corresponds to Py2 xrange
xrange = range
class mp_eval(object):
def __init__(self,function,num_procs=None):
self.__name__ = function.__name__
tasks = mp.JoinableQueue()
results = mp.Queue()
function = TaskMaster(function)
if num_procs is None:
num_procs = mp.cpu_count()
procs = [ QueueMaster( tasks, results, function )
for i in xrange(num_procs) ]
self.tasks = tasks
self.results = results
self.function = function
self.procs = procs
return
def __call__(self,inputs):
tasks = self.tasks
results = self.results
if isinstance(inputs,np.ndarray):
n_inputs = inputs.shape[0]
elif isinstance(inputs,list):
n_inputs = len(inputs)
else:
raise Exception('unsupported input')
for i_input,this_input in enumerate(inputs):
this_job = { 'index' : i_input ,
'input' : this_input ,
'result' : None }
tasks.put( this_job )
#end
# wait for tasks
tasks.join()
# pull results
result_list = [ [] ]*n_inputs
for i in xrange(n_inputs):
result = results.get()
i_result = result['index']
result_list[i_result] = result['result']
return result_list
def __del__(self):
for proc in self.procs:
self.tasks.put(None)
self.tasks.join()
return
class QueueMaster(mp.Process):
def __init__(self,task_queue,result_queue,task_class=None):
mp.Process.__init__(self)
self.task_queue = task_queue
self.result_queue = result_queue
self.task_class = task_class
self.daemon = True
self.start()
def run(self):
proc_name = self.name
parentPID = os.getppid()
while True:
if os.getppid() != parentPID:
break # parent died
this_job = self.task_queue.get()
if this_job is None:
self.task_queue.task_done()
break # kill signal
this_input = this_job['input']
this_task = self.task_class
this_data = this_task(*this_input)
this_job['result'] = this_data
self.result_queue.put(this_job)
self.task_queue.task_done()
#: while alive
return
class TaskMaster(object):
def __init__(self, func):
self.func = func
def __call__(self, *arg, **kwarg):
# makes object callable
result = self.func(*arg, **kwarg)
return result
def __str__(self):
return '%s' % self.func
# pickling
#def __getstate__(self):
#dict = self.__dict__.copy()
#data_dict = cloudpickle.dumps(dict)
#return data_dict
#def __setstate__(self,data_dict):
#self.__dict__ = pickle.loads(data_dict)
#return
| lgpl-2.1 |
nii-cloud/dodai-compute | nova/api/openstack/contrib/rescue.py | 2 | 2465 | # Copyright 2011 Openstack, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""The rescue mode extension."""
import webob
from webob import exc
from nova import compute
from nova import log as logging
from nova.api.openstack import extensions as exts
from nova.api.openstack import faults
LOG = logging.getLogger("nova.api.contrib.rescue")
def wrap_errors(fn):
""""Ensure errors are not passed along."""
def wrapped(*args):
try:
fn(*args)
except Exception, e:
return faults.Fault(exc.HTTPInternalServerError())
return wrapped
class Rescue(exts.ExtensionDescriptor):
"""The Rescue controller for the OpenStack API."""
def __init__(self):
super(Rescue, self).__init__()
self.compute_api = compute.API()
@wrap_errors
def _rescue(self, input_dict, req, instance_id):
"""Rescue an instance."""
context = req.environ["nova.context"]
self.compute_api.rescue(context, instance_id)
return webob.Response(status_int=202)
@wrap_errors
def _unrescue(self, input_dict, req, instance_id):
"""Unrescue an instance."""
context = req.environ["nova.context"]
self.compute_api.unrescue(context, instance_id)
return webob.Response(status_int=202)
def get_name(self):
return "Rescue"
def get_alias(self):
return "os-rescue"
def get_description(self):
return "Instance rescue mode"
def get_namespace(self):
return "http://docs.openstack.org/ext/rescue/api/v1.1"
def get_updated(self):
return "2011-08-18T00:00:00+00:00"
def get_actions(self):
"""Return the actions the extension adds, as required by contract."""
actions = [
exts.ActionExtension("servers", "rescue", self._rescue),
exts.ActionExtension("servers", "unrescue", self._unrescue),
]
return actions
| apache-2.0 |
newerthcom/savagerebirth | libs/python-2.72/Lib/encodings/mac_cyrillic.py | 593 | 13710 | """ Python Character Mapping Codec mac_cyrillic generated from 'MAPPINGS/VENDORS/APPLE/CYRILLIC.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='mac-cyrillic',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
u'\x00' # 0x00 -> CONTROL CHARACTER
u'\x01' # 0x01 -> CONTROL CHARACTER
u'\x02' # 0x02 -> CONTROL CHARACTER
u'\x03' # 0x03 -> CONTROL CHARACTER
u'\x04' # 0x04 -> CONTROL CHARACTER
u'\x05' # 0x05 -> CONTROL CHARACTER
u'\x06' # 0x06 -> CONTROL CHARACTER
u'\x07' # 0x07 -> CONTROL CHARACTER
u'\x08' # 0x08 -> CONTROL CHARACTER
u'\t' # 0x09 -> CONTROL CHARACTER
u'\n' # 0x0A -> CONTROL CHARACTER
u'\x0b' # 0x0B -> CONTROL CHARACTER
u'\x0c' # 0x0C -> CONTROL CHARACTER
u'\r' # 0x0D -> CONTROL CHARACTER
u'\x0e' # 0x0E -> CONTROL CHARACTER
u'\x0f' # 0x0F -> CONTROL CHARACTER
u'\x10' # 0x10 -> CONTROL CHARACTER
u'\x11' # 0x11 -> CONTROL CHARACTER
u'\x12' # 0x12 -> CONTROL CHARACTER
u'\x13' # 0x13 -> CONTROL CHARACTER
u'\x14' # 0x14 -> CONTROL CHARACTER
u'\x15' # 0x15 -> CONTROL CHARACTER
u'\x16' # 0x16 -> CONTROL CHARACTER
u'\x17' # 0x17 -> CONTROL CHARACTER
u'\x18' # 0x18 -> CONTROL CHARACTER
u'\x19' # 0x19 -> CONTROL CHARACTER
u'\x1a' # 0x1A -> CONTROL CHARACTER
u'\x1b' # 0x1B -> CONTROL CHARACTER
u'\x1c' # 0x1C -> CONTROL CHARACTER
u'\x1d' # 0x1D -> CONTROL CHARACTER
u'\x1e' # 0x1E -> CONTROL CHARACTER
u'\x1f' # 0x1F -> CONTROL CHARACTER
u' ' # 0x20 -> SPACE
u'!' # 0x21 -> EXCLAMATION MARK
u'"' # 0x22 -> QUOTATION MARK
u'#' # 0x23 -> NUMBER SIGN
u'$' # 0x24 -> DOLLAR SIGN
u'%' # 0x25 -> PERCENT SIGN
u'&' # 0x26 -> AMPERSAND
u"'" # 0x27 -> APOSTROPHE
u'(' # 0x28 -> LEFT PARENTHESIS
u')' # 0x29 -> RIGHT PARENTHESIS
u'*' # 0x2A -> ASTERISK
u'+' # 0x2B -> PLUS SIGN
u',' # 0x2C -> COMMA
u'-' # 0x2D -> HYPHEN-MINUS
u'.' # 0x2E -> FULL STOP
u'/' # 0x2F -> SOLIDUS
u'0' # 0x30 -> DIGIT ZERO
u'1' # 0x31 -> DIGIT ONE
u'2' # 0x32 -> DIGIT TWO
u'3' # 0x33 -> DIGIT THREE
u'4' # 0x34 -> DIGIT FOUR
u'5' # 0x35 -> DIGIT FIVE
u'6' # 0x36 -> DIGIT SIX
u'7' # 0x37 -> DIGIT SEVEN
u'8' # 0x38 -> DIGIT EIGHT
u'9' # 0x39 -> DIGIT NINE
u':' # 0x3A -> COLON
u';' # 0x3B -> SEMICOLON
u'<' # 0x3C -> LESS-THAN SIGN
u'=' # 0x3D -> EQUALS SIGN
u'>' # 0x3E -> GREATER-THAN SIGN
u'?' # 0x3F -> QUESTION MARK
u'@' # 0x40 -> COMMERCIAL AT
u'A' # 0x41 -> LATIN CAPITAL LETTER A
u'B' # 0x42 -> LATIN CAPITAL LETTER B
u'C' # 0x43 -> LATIN CAPITAL LETTER C
u'D' # 0x44 -> LATIN CAPITAL LETTER D
u'E' # 0x45 -> LATIN CAPITAL LETTER E
u'F' # 0x46 -> LATIN CAPITAL LETTER F
u'G' # 0x47 -> LATIN CAPITAL LETTER G
u'H' # 0x48 -> LATIN CAPITAL LETTER H
u'I' # 0x49 -> LATIN CAPITAL LETTER I
u'J' # 0x4A -> LATIN CAPITAL LETTER J
u'K' # 0x4B -> LATIN CAPITAL LETTER K
u'L' # 0x4C -> LATIN CAPITAL LETTER L
u'M' # 0x4D -> LATIN CAPITAL LETTER M
u'N' # 0x4E -> LATIN CAPITAL LETTER N
u'O' # 0x4F -> LATIN CAPITAL LETTER O
u'P' # 0x50 -> LATIN CAPITAL LETTER P
u'Q' # 0x51 -> LATIN CAPITAL LETTER Q
u'R' # 0x52 -> LATIN CAPITAL LETTER R
u'S' # 0x53 -> LATIN CAPITAL LETTER S
u'T' # 0x54 -> LATIN CAPITAL LETTER T
u'U' # 0x55 -> LATIN CAPITAL LETTER U
u'V' # 0x56 -> LATIN CAPITAL LETTER V
u'W' # 0x57 -> LATIN CAPITAL LETTER W
u'X' # 0x58 -> LATIN CAPITAL LETTER X
u'Y' # 0x59 -> LATIN CAPITAL LETTER Y
u'Z' # 0x5A -> LATIN CAPITAL LETTER Z
u'[' # 0x5B -> LEFT SQUARE BRACKET
u'\\' # 0x5C -> REVERSE SOLIDUS
u']' # 0x5D -> RIGHT SQUARE BRACKET
u'^' # 0x5E -> CIRCUMFLEX ACCENT
u'_' # 0x5F -> LOW LINE
u'`' # 0x60 -> GRAVE ACCENT
u'a' # 0x61 -> LATIN SMALL LETTER A
u'b' # 0x62 -> LATIN SMALL LETTER B
u'c' # 0x63 -> LATIN SMALL LETTER C
u'd' # 0x64 -> LATIN SMALL LETTER D
u'e' # 0x65 -> LATIN SMALL LETTER E
u'f' # 0x66 -> LATIN SMALL LETTER F
u'g' # 0x67 -> LATIN SMALL LETTER G
u'h' # 0x68 -> LATIN SMALL LETTER H
u'i' # 0x69 -> LATIN SMALL LETTER I
u'j' # 0x6A -> LATIN SMALL LETTER J
u'k' # 0x6B -> LATIN SMALL LETTER K
u'l' # 0x6C -> LATIN SMALL LETTER L
u'm' # 0x6D -> LATIN SMALL LETTER M
u'n' # 0x6E -> LATIN SMALL LETTER N
u'o' # 0x6F -> LATIN SMALL LETTER O
u'p' # 0x70 -> LATIN SMALL LETTER P
u'q' # 0x71 -> LATIN SMALL LETTER Q
u'r' # 0x72 -> LATIN SMALL LETTER R
u's' # 0x73 -> LATIN SMALL LETTER S
u't' # 0x74 -> LATIN SMALL LETTER T
u'u' # 0x75 -> LATIN SMALL LETTER U
u'v' # 0x76 -> LATIN SMALL LETTER V
u'w' # 0x77 -> LATIN SMALL LETTER W
u'x' # 0x78 -> LATIN SMALL LETTER X
u'y' # 0x79 -> LATIN SMALL LETTER Y
u'z' # 0x7A -> LATIN SMALL LETTER Z
u'{' # 0x7B -> LEFT CURLY BRACKET
u'|' # 0x7C -> VERTICAL LINE
u'}' # 0x7D -> RIGHT CURLY BRACKET
u'~' # 0x7E -> TILDE
u'\x7f' # 0x7F -> CONTROL CHARACTER
u'\u0410' # 0x80 -> CYRILLIC CAPITAL LETTER A
u'\u0411' # 0x81 -> CYRILLIC CAPITAL LETTER BE
u'\u0412' # 0x82 -> CYRILLIC CAPITAL LETTER VE
u'\u0413' # 0x83 -> CYRILLIC CAPITAL LETTER GHE
u'\u0414' # 0x84 -> CYRILLIC CAPITAL LETTER DE
u'\u0415' # 0x85 -> CYRILLIC CAPITAL LETTER IE
u'\u0416' # 0x86 -> CYRILLIC CAPITAL LETTER ZHE
u'\u0417' # 0x87 -> CYRILLIC CAPITAL LETTER ZE
u'\u0418' # 0x88 -> CYRILLIC CAPITAL LETTER I
u'\u0419' # 0x89 -> CYRILLIC CAPITAL LETTER SHORT I
u'\u041a' # 0x8A -> CYRILLIC CAPITAL LETTER KA
u'\u041b' # 0x8B -> CYRILLIC CAPITAL LETTER EL
u'\u041c' # 0x8C -> CYRILLIC CAPITAL LETTER EM
u'\u041d' # 0x8D -> CYRILLIC CAPITAL LETTER EN
u'\u041e' # 0x8E -> CYRILLIC CAPITAL LETTER O
u'\u041f' # 0x8F -> CYRILLIC CAPITAL LETTER PE
u'\u0420' # 0x90 -> CYRILLIC CAPITAL LETTER ER
u'\u0421' # 0x91 -> CYRILLIC CAPITAL LETTER ES
u'\u0422' # 0x92 -> CYRILLIC CAPITAL LETTER TE
u'\u0423' # 0x93 -> CYRILLIC CAPITAL LETTER U
u'\u0424' # 0x94 -> CYRILLIC CAPITAL LETTER EF
u'\u0425' # 0x95 -> CYRILLIC CAPITAL LETTER HA
u'\u0426' # 0x96 -> CYRILLIC CAPITAL LETTER TSE
u'\u0427' # 0x97 -> CYRILLIC CAPITAL LETTER CHE
u'\u0428' # 0x98 -> CYRILLIC CAPITAL LETTER SHA
u'\u0429' # 0x99 -> CYRILLIC CAPITAL LETTER SHCHA
u'\u042a' # 0x9A -> CYRILLIC CAPITAL LETTER HARD SIGN
u'\u042b' # 0x9B -> CYRILLIC CAPITAL LETTER YERU
u'\u042c' # 0x9C -> CYRILLIC CAPITAL LETTER SOFT SIGN
u'\u042d' # 0x9D -> CYRILLIC CAPITAL LETTER E
u'\u042e' # 0x9E -> CYRILLIC CAPITAL LETTER YU
u'\u042f' # 0x9F -> CYRILLIC CAPITAL LETTER YA
u'\u2020' # 0xA0 -> DAGGER
u'\xb0' # 0xA1 -> DEGREE SIGN
u'\u0490' # 0xA2 -> CYRILLIC CAPITAL LETTER GHE WITH UPTURN
u'\xa3' # 0xA3 -> POUND SIGN
u'\xa7' # 0xA4 -> SECTION SIGN
u'\u2022' # 0xA5 -> BULLET
u'\xb6' # 0xA6 -> PILCROW SIGN
u'\u0406' # 0xA7 -> CYRILLIC CAPITAL LETTER BYELORUSSIAN-UKRAINIAN I
u'\xae' # 0xA8 -> REGISTERED SIGN
u'\xa9' # 0xA9 -> COPYRIGHT SIGN
u'\u2122' # 0xAA -> TRADE MARK SIGN
u'\u0402' # 0xAB -> CYRILLIC CAPITAL LETTER DJE
u'\u0452' # 0xAC -> CYRILLIC SMALL LETTER DJE
u'\u2260' # 0xAD -> NOT EQUAL TO
u'\u0403' # 0xAE -> CYRILLIC CAPITAL LETTER GJE
u'\u0453' # 0xAF -> CYRILLIC SMALL LETTER GJE
u'\u221e' # 0xB0 -> INFINITY
u'\xb1' # 0xB1 -> PLUS-MINUS SIGN
u'\u2264' # 0xB2 -> LESS-THAN OR EQUAL TO
u'\u2265' # 0xB3 -> GREATER-THAN OR EQUAL TO
u'\u0456' # 0xB4 -> CYRILLIC SMALL LETTER BYELORUSSIAN-UKRAINIAN I
u'\xb5' # 0xB5 -> MICRO SIGN
u'\u0491' # 0xB6 -> CYRILLIC SMALL LETTER GHE WITH UPTURN
u'\u0408' # 0xB7 -> CYRILLIC CAPITAL LETTER JE
u'\u0404' # 0xB8 -> CYRILLIC CAPITAL LETTER UKRAINIAN IE
u'\u0454' # 0xB9 -> CYRILLIC SMALL LETTER UKRAINIAN IE
u'\u0407' # 0xBA -> CYRILLIC CAPITAL LETTER YI
u'\u0457' # 0xBB -> CYRILLIC SMALL LETTER YI
u'\u0409' # 0xBC -> CYRILLIC CAPITAL LETTER LJE
u'\u0459' # 0xBD -> CYRILLIC SMALL LETTER LJE
u'\u040a' # 0xBE -> CYRILLIC CAPITAL LETTER NJE
u'\u045a' # 0xBF -> CYRILLIC SMALL LETTER NJE
u'\u0458' # 0xC0 -> CYRILLIC SMALL LETTER JE
u'\u0405' # 0xC1 -> CYRILLIC CAPITAL LETTER DZE
u'\xac' # 0xC2 -> NOT SIGN
u'\u221a' # 0xC3 -> SQUARE ROOT
u'\u0192' # 0xC4 -> LATIN SMALL LETTER F WITH HOOK
u'\u2248' # 0xC5 -> ALMOST EQUAL TO
u'\u2206' # 0xC6 -> INCREMENT
u'\xab' # 0xC7 -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\xbb' # 0xC8 -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\u2026' # 0xC9 -> HORIZONTAL ELLIPSIS
u'\xa0' # 0xCA -> NO-BREAK SPACE
u'\u040b' # 0xCB -> CYRILLIC CAPITAL LETTER TSHE
u'\u045b' # 0xCC -> CYRILLIC SMALL LETTER TSHE
u'\u040c' # 0xCD -> CYRILLIC CAPITAL LETTER KJE
u'\u045c' # 0xCE -> CYRILLIC SMALL LETTER KJE
u'\u0455' # 0xCF -> CYRILLIC SMALL LETTER DZE
u'\u2013' # 0xD0 -> EN DASH
u'\u2014' # 0xD1 -> EM DASH
u'\u201c' # 0xD2 -> LEFT DOUBLE QUOTATION MARK
u'\u201d' # 0xD3 -> RIGHT DOUBLE QUOTATION MARK
u'\u2018' # 0xD4 -> LEFT SINGLE QUOTATION MARK
u'\u2019' # 0xD5 -> RIGHT SINGLE QUOTATION MARK
u'\xf7' # 0xD6 -> DIVISION SIGN
u'\u201e' # 0xD7 -> DOUBLE LOW-9 QUOTATION MARK
u'\u040e' # 0xD8 -> CYRILLIC CAPITAL LETTER SHORT U
u'\u045e' # 0xD9 -> CYRILLIC SMALL LETTER SHORT U
u'\u040f' # 0xDA -> CYRILLIC CAPITAL LETTER DZHE
u'\u045f' # 0xDB -> CYRILLIC SMALL LETTER DZHE
u'\u2116' # 0xDC -> NUMERO SIGN
u'\u0401' # 0xDD -> CYRILLIC CAPITAL LETTER IO
u'\u0451' # 0xDE -> CYRILLIC SMALL LETTER IO
u'\u044f' # 0xDF -> CYRILLIC SMALL LETTER YA
u'\u0430' # 0xE0 -> CYRILLIC SMALL LETTER A
u'\u0431' # 0xE1 -> CYRILLIC SMALL LETTER BE
u'\u0432' # 0xE2 -> CYRILLIC SMALL LETTER VE
u'\u0433' # 0xE3 -> CYRILLIC SMALL LETTER GHE
u'\u0434' # 0xE4 -> CYRILLIC SMALL LETTER DE
u'\u0435' # 0xE5 -> CYRILLIC SMALL LETTER IE
u'\u0436' # 0xE6 -> CYRILLIC SMALL LETTER ZHE
u'\u0437' # 0xE7 -> CYRILLIC SMALL LETTER ZE
u'\u0438' # 0xE8 -> CYRILLIC SMALL LETTER I
u'\u0439' # 0xE9 -> CYRILLIC SMALL LETTER SHORT I
u'\u043a' # 0xEA -> CYRILLIC SMALL LETTER KA
u'\u043b' # 0xEB -> CYRILLIC SMALL LETTER EL
u'\u043c' # 0xEC -> CYRILLIC SMALL LETTER EM
u'\u043d' # 0xED -> CYRILLIC SMALL LETTER EN
u'\u043e' # 0xEE -> CYRILLIC SMALL LETTER O
u'\u043f' # 0xEF -> CYRILLIC SMALL LETTER PE
u'\u0440' # 0xF0 -> CYRILLIC SMALL LETTER ER
u'\u0441' # 0xF1 -> CYRILLIC SMALL LETTER ES
u'\u0442' # 0xF2 -> CYRILLIC SMALL LETTER TE
u'\u0443' # 0xF3 -> CYRILLIC SMALL LETTER U
u'\u0444' # 0xF4 -> CYRILLIC SMALL LETTER EF
u'\u0445' # 0xF5 -> CYRILLIC SMALL LETTER HA
u'\u0446' # 0xF6 -> CYRILLIC SMALL LETTER TSE
u'\u0447' # 0xF7 -> CYRILLIC SMALL LETTER CHE
u'\u0448' # 0xF8 -> CYRILLIC SMALL LETTER SHA
u'\u0449' # 0xF9 -> CYRILLIC SMALL LETTER SHCHA
u'\u044a' # 0xFA -> CYRILLIC SMALL LETTER HARD SIGN
u'\u044b' # 0xFB -> CYRILLIC SMALL LETTER YERU
u'\u044c' # 0xFC -> CYRILLIC SMALL LETTER SOFT SIGN
u'\u044d' # 0xFD -> CYRILLIC SMALL LETTER E
u'\u044e' # 0xFE -> CYRILLIC SMALL LETTER YU
u'\u20ac' # 0xFF -> EURO SIGN
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
| gpl-2.0 |
qtproject/pyside-pyside | tests/QtWidgets/bug_785.py | 1 | 2144 | #############################################################################
##
## Copyright (C) 2016 The Qt Company Ltd.
## Contact: https://www.qt.io/licensing/
##
## This file is part of the test suite of PySide2.
##
## $QT_BEGIN_LICENSE:GPL-EXCEPT$
## Commercial License Usage
## Licensees holding valid commercial Qt licenses may use this file in
## accordance with the commercial license agreement provided with the
## Software or, alternatively, in accordance with the terms contained in
## a written agreement between you and The Qt Company. For licensing terms
## and conditions see https://www.qt.io/terms-conditions. For further
## information use the contact form at https://www.qt.io/contact-us.
##
## GNU General Public License Usage
## Alternatively, this file may be used under the terms of the GNU
## General Public License version 3 as published by the Free Software
## Foundation with exceptions as appearing in the file LICENSE.GPL3-EXCEPT
## included in the packaging of this file. Please review the following
## information to ensure the GNU General Public License requirements will
## be met: https://www.gnu.org/licenses/gpl-3.0.html.
##
## $QT_END_LICENSE$
##
#############################################################################
import sys
import unittest
from PySide2.QtCore import QItemSelection
from PySide2.QtGui import QStandardItemModel, QStandardItem
from PySide2.QtWidgets import QApplication
class Bug324(unittest.TestCase):
def testOperators(self):
model = QStandardItemModel()
for i in range(100):
model.appendRow(QStandardItem("Item: %d"%i))
first = model.index(0, 0)
second = model.index(10, 0)
third = model.index(20, 0)
fourth = model.index(30, 0)
sel = QItemSelection(first, second)
sel2 = QItemSelection()
sel2.select(third, fourth)
sel3 = sel + sel2 #check operator +
self.assertEqual(len(sel3), 2)
sel4 = sel
sel4 += sel2 #check operator +=
self.assertEqual(len(sel4), 2)
self.assertEqual(sel4, sel3)
if __name__ == "__main__":
unittest.main()
| lgpl-2.1 |
bd339/servo | components/script/dom/bindings/codegen/parser/tests/test_special_method_signature_mismatch.py | 241 | 6622 | def WebIDLTest(parser, harness):
threw = False
try:
parser.parse("""
interface SpecialMethodSignatureMismatch1 {
getter long long foo(long index);
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should have thrown.")
threw = False
try:
parser.parse("""
interface SpecialMethodSignatureMismatch2 {
getter void foo(unsigned long index);
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should have thrown.")
threw = False
try:
parser.parse("""
interface SpecialMethodSignatureMismatch3 {
getter boolean foo(unsigned long index, boolean extraArg);
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should have thrown.")
threw = False
try:
parser.parse("""
interface SpecialMethodSignatureMismatch4 {
getter boolean foo(unsigned long... index);
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should have thrown.")
threw = False
try:
parser.parse("""
interface SpecialMethodSignatureMismatch5 {
getter boolean foo(optional unsigned long index);
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should have thrown.")
threw = False
try:
parser.parse("""
interface SpecialMethodSignatureMismatch6 {
getter boolean foo();
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should have thrown.")
threw = False
try:
parser.parse("""
interface SpecialMethodSignatureMismatch7 {
deleter long long foo(long index);
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should have thrown.")
threw = False
try:
parser.parse("""
interface SpecialMethodSignatureMismatch9 {
deleter boolean foo(unsigned long index, boolean extraArg);
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should have thrown.")
threw = False
try:
parser.parse("""
interface SpecialMethodSignatureMismatch10 {
deleter boolean foo(unsigned long... index);
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should have thrown.")
threw = False
try:
parser.parse("""
interface SpecialMethodSignatureMismatch11 {
deleter boolean foo(optional unsigned long index);
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should have thrown.")
threw = False
try:
parser.parse("""
interface SpecialMethodSignatureMismatch12 {
deleter boolean foo();
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should have thrown.")
threw = False
try:
parser.parse("""
interface SpecialMethodSignatureMismatch13 {
setter long long foo(long index, long long value);
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should have thrown.")
threw = False
try:
parser.parse("""
interface SpecialMethodSignatureMismatch15 {
setter boolean foo(unsigned long index, boolean value, long long extraArg);
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should have thrown.")
threw = False
try:
parser.parse("""
interface SpecialMethodSignatureMismatch16 {
setter boolean foo(unsigned long index, boolean... value);
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should have thrown.")
threw = False
try:
parser.parse("""
interface SpecialMethodSignatureMismatch17 {
setter boolean foo(unsigned long index, optional boolean value);
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should have thrown.")
threw = False
try:
parser.parse("""
interface SpecialMethodSignatureMismatch18 {
setter boolean foo();
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should have thrown.")
threw = False
try:
parser.parse("""
interface SpecialMethodSignatureMismatch20 {
creator long long foo(long index, long long value);
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should have thrown.")
threw = False
try:
parser.parse("""
interface SpecialMethodSignatureMismatch22 {
creator boolean foo(unsigned long index, boolean value, long long extraArg);
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should have thrown.")
threw = False
try:
parser.parse("""
interface SpecialMethodSignatureMismatch23 {
creator boolean foo(unsigned long index, boolean... value);
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should have thrown.")
threw = False
try:
parser.parse("""
interface SpecialMethodSignatureMismatch24 {
creator boolean foo(unsigned long index, optional boolean value);
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should have thrown.")
threw = False
try:
parser.parse("""
interface SpecialMethodSignatureMismatch25 {
creator boolean foo();
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should have thrown.")
| mpl-2.0 |
mvala/AliPhysics | PWGMM/MC/aligenqa/aligenqa/compare.py | 41 | 10130 | import re
from rootpy import ROOT
from rootpy.io import root_open
from aligenqa import roofie, utils
def _reduce_single_canvases(results_dirs, gen_names, get_canvas_func, legend_regex):
fig_comp = roofie.Figure()
for i, (result_dir, marker) in enumerate(zip(results_dirs, ['opendiamond', 'cross'])):
c = get_canvas_func(result_dir)
fig_single = roofie.Figure()
fig_single.import_plottables_from_canvas(c)
colors = roofie.get_color_generator('root', ncolors=3)
for p in fig_single._plottables:
if legend_regex.match(p['legend_title']):
color = next(colors)
fig_comp.add_plottable(p['p'], markerstyle=marker, color=color)
# set the legend if this is the first file
if i == 0:
fig_comp.add_plottable(None, markerstyle='circle', legend_title=p['legend_title'], color=color)
# name the lines
fig_comp.add_plottable(None, markerstyle='opendiamond', legend_title=gen_names[0], color='black')
fig_comp.add_plottable(None, markerstyle='cross', legend_title=gen_names[1], color='black')
# configure the plot
fig_comp.xtitle = fig_single.xtitle
fig_comp.ytitle = fig_single.ytitle
fig_comp.legend.title = fig_single.legend.title
return fig_comp
def compare(args):
"""
Compare the 'highlight plots' for of two estimators for two given
triggers. Requires the plots to have been previously prepared by
running `prepare_plots`.
"""
ROOT.gROOT.SetBatch(True)
gen_names = []
if args.generator_name1:
gen_names.append(args.generator_name1)
else:
gen_names.append(utils.get_generator_name_from_filename(args.input_file1))
if args.generator_name2:
gen_names.append(args.generator_name2)
else:
gen_names.append(utils.get_generator_name_from_filename(args.input_file2))
with root_open(args.input_file1, "read") as f1:
with root_open(args.input_file2, "read") as f2:
results_dirs = []
for f, trigger in zip([f1, f2], [args.trigger1, args.trigger2]):
results_dirs.append(f.MultEstimators.__getattr__("results_post" + trigger))
latexdoc = roofie.Beamerdoc(author="PWG-MM",
title=r"Comparison of {0} {1} with {2} {3}".format(gen_names[0], args.trigger1,
gen_names[1], args.trigger2),
subtitle="Generator-level QA")
# Fish the plots we want out of the .root file and if necessary adjust some visual settings
sec = latexdoc.add_section(r'Comparison of "highlight" plots')
# filter out an reduced set of plottables
# to only low mult, high mult and MB.
# We find those plots by regex-matching the legend
legend_regex = re.compile(r'.*100' # contains substring "100"; ie 100% bin edge
r'|^0.0' # OR starts with "0.0" (highest mult bin edge)
r'|MB') # OR contains 'MB"
fig_comp = _reduce_single_canvases(results_dirs, gen_names,
lambda result_dir: result_dir.EtaLt15.dNdeta_summary, legend_regex)
sec.add_figure(fig_comp)
fig_comp = _reduce_single_canvases(results_dirs, gen_names,
lambda result_dir: result_dir.V0M.dNdeta_summary, legend_regex)
sec.add_figure(fig_comp)
fig_comp = _reduce_single_canvases(results_dirs, gen_names,
lambda result_dir: result_dir.EtaLt15.dNdeta_MB_ratio_summary, legend_regex)
sec.add_figure(fig_comp)
fig_comp = _reduce_single_canvases(results_dirs, gen_names,
lambda result_dir: result_dir.V0M.dNdeta_MB_ratio_summary, legend_regex)
sec.add_figure(fig_comp)
fig_comp = _reduce_single_canvases(results_dirs, gen_names,
lambda result_dir: result_dir.EtaLt15.dNdeta_MB_ratio_summary, legend_regex)
sec.add_figure(fig_comp)
fig_comp = _reduce_single_canvases(results_dirs, gen_names,
lambda result_dir: result_dir.EtaLt15.PNchEtaLt05_binned_in_NchEst,
legend_regex)
fig_comp.plot.logy = True
fig_comp.legend.position = 'tr'
fig_comp.plot.xmax = 150
sec.add_figure(fig_comp)
fig_comp = _reduce_single_canvases(results_dirs, gen_names,
lambda result_dir: result_dir.V0M.PNchEtaLt05_binned_in_NchEst, legend_regex)
fig_comp.plot.logy = True
fig_comp.legend.position = 'tr'
fig_comp.plot.xmax = 150
sec.add_figure(fig_comp)
fig_comp = _reduce_single_canvases(results_dirs, gen_names,
lambda result_dir: result_dir.EtaLt15.pid_ratios.proton_over_pich__vs__pt,
legend_regex)
sec.add_figure(fig_comp)
fig_comp = _reduce_single_canvases(results_dirs, gen_names,
lambda result_dir: result_dir.EtaLt15.pid_ratios.Xi_over_pich__vs__pt,
legend_regex)
fig_comp.plot.ymin = 0
sec.add_figure(fig_comp)
fig_comp = _reduce_single_canvases(results_dirs, gen_names,
lambda result_dir: result_dir.V0M.pid_ratios.proton_over_pich__vs__pt,
legend_regex)
sec.add_figure(fig_comp)
fig_comp = _reduce_single_canvases(results_dirs, gen_names,
lambda result_dir: result_dir.V0M.pid_ratios.Xi_over_pich__vs__pt,
legend_regex)
fig_comp.plot.ymin = 0
sec.add_figure(fig_comp)
# dN/dp_T either for charged particles or individual ones
fig_comp = _reduce_single_canvases(results_dirs, gen_names,
lambda result_dir: result_dir.V0M.dNchdpT,
legend_regex)
fig_comp.plot.logy = True
fig_comp.legend.position = 'tr'
sec.add_figure(fig_comp)
fig_comp = _reduce_single_canvases(results_dirs, gen_names,
lambda result_dir: result_dir.V0M.dNpdpT,
legend_regex)
fig_comp.plot.logy = True
fig_comp.legend.position = 'tr'
sec.add_figure(fig_comp)
fig_comp = _reduce_single_canvases(results_dirs, gen_names,
lambda result_dir: result_dir.V0M.dNpidpT,
legend_regex)
fig_comp.plot.logy = True
fig_comp.legend.position = 'tr'
sec.add_figure(fig_comp)
fig_comp = _reduce_single_canvases(results_dirs, gen_names,
lambda result_dir: result_dir.V0M.dNKdpT,
legend_regex)
fig_comp.plot.logy = True
fig_comp.legend.position = 'tr'
sec.add_figure(fig_comp)
# only compare a subset of estimators in the following plots; select them via regular expressions
legend_regex = re.compile(
r'^\|#eta\|#leq1.5'
r'|V0M' # OR V0M
)
fig_comp = _reduce_single_canvases(results_dirs, gen_names,
lambda result_dir: (result_dir.PNch_summary),
legend_regex)
fig_comp.plot.logy = True
fig_comp.legend.position = 'tr'
sec.add_figure(fig_comp)
fig_comp = _reduce_single_canvases(results_dirs, gen_names,
lambda result_dir: (result_dir
.pid_ratios_vs_refmult
.__getattr__("-2212_2212_div_-211_211")),
legend_regex)
fig_comp.plot.ymin = 0.04
fig_comp.plot.ymax = 0.13
sec.add_figure(fig_comp)
fig_comp = _reduce_single_canvases(results_dirs, gen_names,
lambda result_dir: (result_dir
.pid_ratios_vs_refmult
.__getattr__("3312_div_-211_211")),
legend_regex)
fig_comp.plot.ymin = 0.0005
fig_comp.plot.ymax = 0.003
sec.add_figure(fig_comp)
# only compare proton, charged kaons and pions
legend_regex = re.compile(r'^#pi'
r'|K\^{#pm}' # OR charged kaons
r'|^p$') # OR proton
fig_comp = _reduce_single_canvases(results_dirs, gen_names,
lambda result_dir: result_dir.EtaLt15.mean_pt, legend_regex)
sec.add_figure(fig_comp)
fig_comp = _reduce_single_canvases(results_dirs, gen_names,
lambda result_dir: result_dir.V0M.mean_pt, legend_regex)
sec.add_figure(fig_comp)
latexdoc.finalize_document(output_file_name="comparison.tex")
| bsd-3-clause |
amontefusco/gnuradio-amontefusco | gnuradio-examples/python/usrp/usrp_wfm_rcv_pll.py | 9 | 14148 | #!/usr/bin/env python
#
# Copyright 2005,2006,2007 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from gnuradio import gr, gru, eng_notation, optfir
from gnuradio import audio
from gnuradio import usrp
from gnuradio import blks2
from gnuradio.eng_option import eng_option
from gnuradio.wxgui import slider, powermate
from gnuradio.wxgui import stdgui2, fftsink2, form, scopesink2
from optparse import OptionParser
from usrpm import usrp_dbid
import sys
import math
import wx
def pick_subdevice(u):
"""
The user didn't specify a subdevice on the command line.
Try for one of these, in order: TV_RX, BASIC_RX, whatever is on side A.
@return a subdev_spec
"""
return usrp.pick_subdev(u, (usrp_dbid.TV_RX,
usrp_dbid.TV_RX_REV_2,
usrp_dbid.TV_RX_REV_3,
usrp_dbid.TV_RX_MIMO,
usrp_dbid.TV_RX_REV_2_MIMO,
usrp_dbid.TV_RX_REV_3_MIMO,
usrp_dbid.BASIC_RX))
class wfm_rx_block (stdgui2.std_top_block):
def __init__(self,frame,panel,vbox,argv):
stdgui2.std_top_block.__init__ (self,frame,panel,vbox,argv)
parser=OptionParser(option_class=eng_option)
parser.add_option("-R", "--rx-subdev-spec", type="subdev", default=None,
help="select USRP Rx side A or B (default=A)")
parser.add_option("-f", "--freq", type="eng_float", default=100.1e6,
help="set frequency to FREQ", metavar="FREQ")
parser.add_option("-g", "--gain", type="eng_float", default=65,
help="set gain in dB (default is midpoint)")
parser.add_option("-s", "--squelch", type="eng_float", default=0,
help="set squelch level (default is 0)")
parser.add_option("-V", "--volume", type="eng_float", default=None,
help="set volume (default is midpoint)")
parser.add_option("-O", "--audio-output", type="string", default="",
help="pcm device name. E.g., hw:0,0 or surround51 or /dev/dsp")
(options, args) = parser.parse_args()
if len(args) != 0:
parser.print_help()
sys.exit(1)
self.frame = frame
self.panel = panel
self.vol = 0
self.state = "FREQ"
self.freq = 0
# build graph
self.u = usrp.source_c() # usrp is data source
adc_rate = self.u.adc_rate() # 64 MS/s
usrp_decim = 200
self.u.set_decim_rate(usrp_decim)
usrp_rate = adc_rate / usrp_decim # 320 kS/s
chanfilt_decim = 1
demod_rate = usrp_rate / chanfilt_decim
audio_decimation = 10
audio_rate = 3*demod_rate / audio_decimation/2 # 48 kHz
if options.rx_subdev_spec is None:
options.rx_subdev_spec = pick_subdevice(self.u)
self.u.set_mux(usrp.determine_rx_mux_value(self.u, options.rx_subdev_spec))
self.subdev = usrp.selected_subdev(self.u, options.rx_subdev_spec)
chan_filt_coeffs = gr.firdes.low_pass_2 (1, # gain
usrp_rate, # sampling rate
90e3, # passband cutoff
30e3, # transition bandwidth
70, # stopband attenuation
gr.firdes.WIN_BLACKMAN)
print len(chan_filt_coeffs)
chan_filt = gr.fir_filter_ccf (chanfilt_decim, chan_filt_coeffs)
self.rchan_sample = blks2.rational_resampler_fff(3,2)
self.lchan_sample = blks2.rational_resampler_fff(3,2)
#self.guts = blks2.wfm_rcv (demod_rate, audio_decimation)
self.guts = blks2.wfm_rcv_pll (demod_rate, audio_decimation)
# FIXME rework {add,multiply}_const_* to handle multiple streams
self.volume_control_l = gr.multiply_const_ff(self.vol)
self.volume_control_r = gr.multiply_const_ff(self.vol)
# sound card as final sink
audio_sink = audio.sink (int (audio_rate),
options.audio_output,
False) # ok_to_block
# now wire it all together
self.connect (self.u, chan_filt, self.guts)
self.connect((self.guts, 0), self.lchan_sample,self.volume_control_l,(audio_sink,0))
self.connect((self.guts, 1), self.rchan_sample,self.volume_control_r,(audio_sink,1))
try:
self.guts.stereo_carrier_pll_recovery.squelch_enable(True)
except:
print "FYI: This implementation of the stereo_carrier_pll_recovery has no squelch implementation yet"
self._build_gui(vbox, usrp_rate, demod_rate, audio_rate)
if options.gain is None:
# if no gain was specified, use the mid-point in dB
g = self.subdev.gain_range()
options.gain = float(g[0]+g[1])/2
if options.volume is None:
g = self.volume_range()
options.volume = float(g[0]+g[1])/2
if abs(options.freq) < 1e6:
options.freq *= 1e6
# set initial values
self.set_gain(options.gain)
self.set_vol(options.volume)
try:
self.guts.stereo_carrier_pll_recovery.set_lock_threshold(options.squelch)
except:
print "FYI: This implementation of the stereo_carrier_pll_recovery has no squelch implementation yet"
if not(self.set_freq(options.freq)):
self._set_status_msg("Failed to set initial frequency")
def _set_status_msg(self, msg, which=0):
self.frame.GetStatusBar().SetStatusText(msg, which)
def _build_gui(self, vbox, usrp_rate, demod_rate, audio_rate):
def _form_set_freq(kv):
return self.set_freq(kv['freq'])
if 1:
self.src_fft = fftsink2.fft_sink_c(self.panel, title="Data from USRP",
fft_size=512, sample_rate=usrp_rate,
ref_scale=32768.0, ref_level=0, y_divs=12)
self.connect (self.u, self.src_fft)
vbox.Add (self.src_fft.win, 4, wx.EXPAND)
if 1:
post_fm_demod_fft = fftsink2.fft_sink_f(self.panel, title="Post FM Demod",
fft_size=512, sample_rate=demod_rate,
y_per_div=10, ref_level=0)
self.connect (self.guts.fm_demod, post_fm_demod_fft)
vbox.Add (post_fm_demod_fft.win, 4, wx.EXPAND)
if 0:
post_stereo_carrier_generator_fft = fftsink2.fft_sink_c (self.panel, title="Post Stereo_carrier_generator",
fft_size=512, sample_rate=audio_rate,
y_per_div=10, ref_level=0)
self.connect (self.guts.stereo_carrier_generator, post_stereo_carrier_generator_fft)
vbox.Add (post_stereo_carrier_generator_fft.win, 4, wx.EXPAND)
if 0:
post_deemphasis_left = fftsink2.fft_sink_f (self.panel, title="Post_Deemphasis_Left",
fft_size=512, sample_rate=audio_rate,
y_per_div=10, ref_level=0)
self.connect (self.guts.deemph_Left, post_deemphasis_left)
vbox.Add (post_deemphasis_left.win, 4, wx.EXPAND)
if 0:
post_deemphasis_right = fftsink2.fft_sink_f(self.panel, title="Post_Deemphasis_Right",
fft_size=512, sample_rate=audio_rate,
y_per_div=10, ref_level=-20)
self.connect (self.guts.deemph_Left, post_deemphasis_right)
vbox.Add (post_deemphasis_right.win, 4, wx.EXPAND)
if 0:
LmR_fft = fftsink2.fft_sink_f(self.panel, title="LmR",
fft_size=512, sample_rate=audio_rate,
y_per_div=10, ref_level=-20)
self.connect (self.guts.LmR_real,LmR_fft)
vbox.Add (LmR_fft.win, 4, wx.EXPAND)
if 0:
self.scope = scopesink2.scope_sink_f(self.panel, sample_rate=demod_rate)
self.connect (self.guts.fm_demod,self.scope)
vbox.Add (self.scope.win,4,wx.EXPAND)
# control area form at bottom
self.myform = myform = form.form()
hbox = wx.BoxSizer(wx.HORIZONTAL)
hbox.Add((5,0), 0)
myform['freq'] = form.float_field(
parent=self.panel, sizer=hbox, label="Freq", weight=1,
callback=myform.check_input_and_call(_form_set_freq, self._set_status_msg))
hbox.Add((5,0), 0)
myform['freq_slider'] = \
form.quantized_slider_field(parent=self.panel, sizer=hbox, weight=3,
range=(87.9e6, 108.1e6, 0.1e6),
callback=self.set_freq)
hbox.Add((5,0), 0)
vbox.Add(hbox, 0, wx.EXPAND)
hbox = wx.BoxSizer(wx.HORIZONTAL)
hbox.Add((5,0), 0)
myform['volume'] = \
form.quantized_slider_field(parent=self.panel, sizer=hbox, label="Volume",
weight=3, range=self.volume_range(),
callback=self.set_vol)
hbox.Add((5,0), 1)
myform['gain'] = \
form.quantized_slider_field(parent=self.panel, sizer=hbox, label="Gain",
weight=3, range=self.subdev.gain_range(),
callback=self.set_gain)
hbox.Add((5,0), 0)
myform['sqlch_thrsh'] = \
form.quantized_slider_field(parent=self.panel, sizer=hbox, label="Stereo Squelch Threshold",
weight=3, range=(0.0,1.0,0.01),
callback=self.set_squelch)
hbox.Add((5,0), 0)
vbox.Add(hbox, 0, wx.EXPAND)
try:
self.knob = powermate.powermate(self.frame)
self.rot = 0
powermate.EVT_POWERMATE_ROTATE (self.frame, self.on_rotate)
powermate.EVT_POWERMATE_BUTTON (self.frame, self.on_button)
except:
print "FYI: No Powermate or Contour Knob found"
def on_rotate (self, event):
self.rot += event.delta
if (self.state == "FREQ"):
if self.rot >= 3:
self.set_freq(self.freq + .1e6)
self.rot -= 3
elif self.rot <=-3:
self.set_freq(self.freq - .1e6)
self.rot += 3
else:
step = self.volume_range()[2]
if self.rot >= 3:
self.set_vol(self.vol + step)
self.rot -= 3
elif self.rot <=-3:
self.set_vol(self.vol - step)
self.rot += 3
def on_button (self, event):
if event.value == 0: # button up
return
self.rot = 0
if self.state == "FREQ":
self.state = "VOL"
else:
self.state = "FREQ"
self.update_status_bar ()
def set_vol (self, vol):
g = self.volume_range()
self.vol = max(g[0], min(g[1], vol))
self.volume_control_l.set_k(10**(self.vol/10))
self.volume_control_r.set_k(10**(self.vol/10))
self.myform['volume'].set_value(self.vol)
self.update_status_bar ()
def set_squelch(self,squelch_threshold):
try:
self.guts.stereo_carrier_pll_recovery.set_lock_threshold(squelch_threshold);
except:
print "FYI: This implementation of the stereo_carrier_pll_recovery has no squelch implementation yet"
def set_freq(self, target_freq):
"""
Set the center frequency we're interested in.
@param target_freq: frequency in Hz
@rypte: bool
Tuning is a two step process. First we ask the front-end to
tune as close to the desired frequency as it can. Then we use
the result of that operation and our target_frequency to
determine the value for the digital down converter.
"""
r = usrp.tune(self.u, 0, self.subdev, target_freq)
if r:
self.freq = target_freq
self.myform['freq'].set_value(target_freq) # update displayed value
self.myform['freq_slider'].set_value(target_freq) # update displayed value
self.update_status_bar()
self._set_status_msg("OK", 0)
return True
self._set_status_msg("Failed", 0)
return False
def set_gain(self, gain):
self.myform['gain'].set_value(gain) # update displayed value
self.subdev.set_gain(gain)
def update_status_bar (self):
msg = "Volume:%r Setting:%s" % (self.vol, self.state)
self._set_status_msg(msg, 1)
self.src_fft.set_baseband_freq(self.freq)
def volume_range(self):
return (-20.0, 0.0, 0.5)
if __name__ == '__main__':
app = stdgui2.stdapp (wfm_rx_block, "USRP WFM RX")
app.MainLoop ()
| gpl-3.0 |
takeshineshiro/django | django/middleware/gzip.py | 478 | 1831 | import re
from django.utils.cache import patch_vary_headers
from django.utils.text import compress_sequence, compress_string
re_accepts_gzip = re.compile(r'\bgzip\b')
class GZipMiddleware(object):
"""
This middleware compresses content if the browser allows gzip compression.
It sets the Vary header accordingly, so that caches will base their storage
on the Accept-Encoding header.
"""
def process_response(self, request, response):
# It's not worth attempting to compress really short responses.
if not response.streaming and len(response.content) < 200:
return response
# Avoid gzipping if we've already got a content-encoding.
if response.has_header('Content-Encoding'):
return response
patch_vary_headers(response, ('Accept-Encoding',))
ae = request.META.get('HTTP_ACCEPT_ENCODING', '')
if not re_accepts_gzip.search(ae):
return response
if response.streaming:
# Delete the `Content-Length` header for streaming content, because
# we won't know the compressed size until we stream it.
response.streaming_content = compress_sequence(response.streaming_content)
del response['Content-Length']
else:
# Return the compressed content only if it's actually shorter.
compressed_content = compress_string(response.content)
if len(compressed_content) >= len(response.content):
return response
response.content = compressed_content
response['Content-Length'] = str(len(response.content))
if response.has_header('ETag'):
response['ETag'] = re.sub('"$', ';gzip"', response['ETag'])
response['Content-Encoding'] = 'gzip'
return response
| bsd-3-clause |
seanchen/taiga-back | taiga/projects/history/serializers.py | 5 | 1379 | # Copyright (C) 2014 Andrey Antukh <niwi@niwi.be>
# Copyright (C) 2014 Jesús Espino <jespinog@gmail.com>
# Copyright (C) 2014 David Barragán <bameda@dbarragan.com>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from taiga.base.api import serializers
from taiga.base.fields import JsonField, I18NJsonField
from . import models
HISTORY_ENTRY_I18N_FIELDS=("points", "status", "severity", "priority", "type")
class HistoryEntrySerializer(serializers.ModelSerializer):
diff = JsonField()
snapshot = JsonField()
values = I18NJsonField(i18n_fields=HISTORY_ENTRY_I18N_FIELDS)
values_diff = I18NJsonField(i18n_fields=HISTORY_ENTRY_I18N_FIELDS)
user = JsonField()
delete_comment_user = JsonField()
class Meta:
model = models.HistoryEntry
| agpl-3.0 |
proteus-cpi/FixedPoint.py | fixedpoint/test/test_fixedpoint.py | 1 | 27232 | ###!/usr/bin/env python
"""
unit tests for FixedPoint
This module is intended to be run as a regression test during Python
installation
"""
import sys
# Added the module path to sys.path
sys.path.extend([ '../../'])
#print sys.path
from fixedpoint import *
__copyright__ = "Copyright (C) Python Software Foundation"
__author__ = "Downright Software Collective"
__version__ = 0, 1, 0
import unittest
from fixedpoint import FixedPoint, bankersRounding, addHalfAndChop, DEFAULT_PRECISION
# declare a derived class from FixedPoint for testing
class SonOfFixedPoint(FixedPoint):
"""A subclass of FixedPoint for testing"""
def __init__(self, value=0, precision=DEFAULT_PRECISION):
FixedPoint.__init__(self, value, precision)
def __repr__(self):
return "SonOfFixedPoint" + `(str(self), self.p)`
class FixedPointTest(unittest.TestCase):
"""Unit tests for FixedPointy"""
def testCreateDefault(self):
"""Simply create a default object."""
n = FixedPoint();
self.assertEquals(n.get_precision(), DEFAULT_PRECISION)
self.assertEquals(long(n), 0)
n = SonOfFixedPoint();
self.assertEquals(n.get_precision(), DEFAULT_PRECISION)
self.assertEquals(long(n), 0)
def testCreateFromNone(self):
"""try to slip None in"""
self.failUnlessRaises(TypeError, FixedPoint, None);
def testCreateFromString(self):
"""Create a FixedPoint from a string"""
# try an empty string
self.failUnlessRaises(ValueError, FixedPoint, "");
# try a fixed point zero
n = FixedPoint("0");
self.assertEquals(n.precision, DEFAULT_PRECISION)
self.assertEquals(n.n, 0L)
# try a floating point zero
n = FixedPoint("0.0");
self.assertEquals(n.precision, DEFAULT_PRECISION)
self.assertEquals(n.n, 0L)
# try a floating point number with a positive exponent
n = FixedPoint("42.3e5");
self.assertEquals(n.precision, DEFAULT_PRECISION)
self.assertEquals(n.n, 423000000L)
# try a floating point number with a negative exponent
n = FixedPoint("42.3e-1");
self.assertEquals(n.precision, DEFAULT_PRECISION)
self.assertEquals(n.n, 423L)
# try truncating the precision
n = FixedPoint("42.123");
self.assertEquals(n.precision, DEFAULT_PRECISION)
self.assertEquals(n.n, 4212)
def testCreateFromIntOrLong(self):
"""Create a FixedPoint from an int or a long"""
# try a negative
n = FixedPoint(-333);
self.assertEquals(n.precision, DEFAULT_PRECISION)
self.assertEquals(n.n, -33300)
# try a zero
n = FixedPoint(0);
self.assertEquals(n.precision, DEFAULT_PRECISION)
self.assertEquals(n.n, 0L)
# try a positive
n = FixedPoint(333);
self.assertEquals(n.precision, DEFAULT_PRECISION)
self.assertEquals(n.n, 33300L)
def testCreateFromFixedPoint(self):
"""Create a FixedPoint from another FixedPoint"""
# try a negative
n = FixedPoint(-333);
self.assertEquals(n.precision, DEFAULT_PRECISION)
self.assertEquals(n.n, -33300L)
# try a negative
x = FixedPoint(n);
self.assertEquals(x.precision, DEFAULT_PRECISION)
self.assertEquals(x.n, -33300L)
x = SonOfFixedPoint(n);
self.assertEquals(x.precision, DEFAULT_PRECISION)
self.assertEquals(x.n, -33300L)
def testCreateFromFloat(self):
"""Create a FixedPoint from a floating point number"""
# try a floating point zero
n = FixedPoint(0.0);
self.assertEquals(n.precision, DEFAULT_PRECISION)
self.assertEquals(n.n, 0L)
# try a floating point number with a positive exponent
n = FixedPoint(42.3e5);
self.assertEquals(n.precision, DEFAULT_PRECISION)
self.assertEquals(n.n, 423000000L)
# try a floating point number with a negative exponent
n = FixedPoint(42.3e-1);
self.assertEquals(n.precision, DEFAULT_PRECISION)
self.assertEquals(n.n, 423L)
# try truncating the precision
n = FixedPoint(42.123);
self.assertEquals(n.precision, DEFAULT_PRECISION)
self.assertEquals(n.n, 4212L)
def testCreateFromObject(self):
"""
Try to create a FixedPoint from something that can't be
coerced to a number.
"""
self.failUnlessRaises(TypeError, FixedPoint, object);
self.failUnlessRaises(TypeError, SonOfFixedPoint, object);
def testSetAndGetPrecision(self):
"""Change and retrieve the precision of an existin object"""
# try a floating point number with a negative exponent
n = FixedPoint(42.3e-1);
self.assertEquals(n.get_precision(), DEFAULT_PRECISION)
self.assertEquals(n.precision, DEFAULT_PRECISION)
self.assertEquals(n.n, 423L)
n = SonOfFixedPoint(42.3e-1);
self.assertEquals(n.get_precision(), DEFAULT_PRECISION)
self.assertEquals(n.precision, DEFAULT_PRECISION)
self.assertEquals(n.n, 423L)
# try something that's not a number
self.failUnlessRaises(TypeError, n.set_precision, object);
self.failUnlessRaises(TypeError, n.precision, object);
# try a negative number
self.failUnlessRaises(ValueError, n.set_precision, -3);
# try a precision greater than we started with
newprecision = DEFAULT_PRECISION + 1
n.set_precision(newprecision)
self.assertEquals(n.get_precision(), newprecision)
self.assertEquals(n.n, 4230L)
precision = n.precision + 1
n.precision += 1
self.assertEquals(n.precision, precision)
# try a precision less than we started with
newprecision = DEFAULT_PRECISION - 1
n.set_precision(newprecision)
self.assertEquals(n.get_precision(), newprecision)
self.assertEquals(n.n, 42)
def test__str__(self):
"""test conversion to string"""
# try the default
n = FixedPoint()
self.assertEquals(str(n), "0.00")
n = SonOfFixedPoint()
self.assertEquals(str(n), "0.00")
# try a floating point number with a negative exponent
n = FixedPoint(42.3e-1);
self.assertEquals(str(n), "4.23")
n = SonOfFixedPoint(42.3e-1);
self.assertEquals(str(n), "4.23")
# try a negative floating point number
n = FixedPoint(-4.23);
self.assertEquals(str(n), "-4.23")
# try an int
n = FixedPoint(1, 0);
self.assertEquals(str(n), "1.")
def test__repr__(self):
"""test representation"""
REPR_FORMAT = "FixedPoint('%s', %d)"
# try the default
n = FixedPoint()
self.assertEquals(repr(n), REPR_FORMAT % (str(n), n.get_precision()))
# try a floating point number with a negative exponent
n = FixedPoint(42.3e-1);
self.assertEquals(repr(n), REPR_FORMAT % (str(n), n.get_precision()))
# try a negative floating point number
n = FixedPoint(-4.23);
self.assertEquals(repr(n), REPR_FORMAT % (str(n), n.get_precision()))
# try an int
n = FixedPoint(1, 0);
self.assertEquals(repr(n), REPR_FORMAT % (str(n), n.get_precision()))
SON_OF_FORMAT = "SonOfFixedPoint('%s', %d)"
# try the default
n = SonOfFixedPoint()
self.assertEquals(repr(n), SON_OF_FORMAT % (str(n), n.get_precision()))
# try a floating point number with a negative exponent
n = SonOfFixedPoint(42.3e-1);
self.assertEquals(repr(n), SON_OF_FORMAT % (str(n), n.get_precision()))
# try a negative floating point number
n = SonOfFixedPoint(-4.23);
self.assertEquals(repr(n), SON_OF_FORMAT % (str(n), n.get_precision()))
# try an int
n = SonOfFixedPoint(1, 0);
self.assertEquals(repr(n), SON_OF_FORMAT % (str(n), n.get_precision()))
def test__copy__(self):
"""test shallow copy"""
import copy
# try a negative floating point number
n = FixedPoint(-4.23);
self.assertEquals(n, copy.copy(n))
self.failIf(n is copy.copy(n))
# try a negative floating point number
n = SonOfFixedPoint(-4.23);
self.assertEquals(n, copy.copy(n))
self.failIf(n is copy.copy(n))
def test__deepcopy__(self):
"""test deep copy"""
import copy
# try a negative floating point number
n = FixedPoint(-4.23);
self.assertEquals(n, copy.deepcopy(n))
self.failIf(n is copy.deepcopy(n))
# try a negative floating point number
n = SonOfFixedPoint(-4.23);
self.assertEquals(n, copy.deepcopy(n))
self.failIf(n is copy.deepcopy(n))
def test__cmp__(self):
"""test compare"""
# test two defaults
a = FixedPoint()
b = FixedPoint()
self.failIf(a < b)
self.failUnless(a == b)
self.failIf(a > b)
# test equal precision
a = FixedPoint(1.11)
b = FixedPoint(1.12)
self.failUnless(a < b)
self.failIf(a == b)
self.failIf(a > b)
# test unequal precision
a = FixedPoint(1.125, 3)
b = FixedPoint(1.12)
self.failIf(a < b)
self.failIf(a == b)
self.failUnless(a > b)
# test equal precision, with subclass
a = FixedPoint(1.11)
b = SonOfFixedPoint(1.12)
self.failUnless(a < b)
self.failIf(a == b)
self.failIf(a > b)
def test__hash__(self):
"""test the hash function"""
# test that we don't choke
# 2002-09-19 dfort -- we could test a lot more here
hash(FixedPoint())
def test__nonzero__(self):
"""test the truth value"""
# test the default
self.failIf(FixedPoint())
# test one that should be true
self.failUnless(FixedPoint(1.0e-15, 15))
def test__neg__(self):
"""test negative"""
# test the default
self.failIf(-FixedPoint())
# test one that should be true
self.failUnless(-FixedPoint(-1.0e-15, 15))
def test__abs__(self):
"""test absolute value"""
# test the default
d = FixedPoint()
self.assertEquals(abs(d), d)
# test a negative
n = FixedPoint(-1.0e-15, 15)
self.assertEquals(abs(n), -n)
def test__add__(self):
"""test addition"""
#test with a float
a = FixedPoint(3.33)
b = 3.3333
c = a + b
self.assertEquals(type(c), type(a))
self.assertEquals(c.n, 666)
# test two operands with the same precision
a = FixedPoint(3.33)
b = FixedPoint(6.66)
c = a + b
self.assertEquals(type(c), type(a))
self.assertEquals(c.n, 999L)
# test two operands with differing precision
a = FixedPoint(3.33)
b = FixedPoint(6.66, 3)
c = a + b
self.assertEquals(c.precision, 3)
self.assertEquals(c.n, 9990L)
a = FixedPoint(3.33)
b = FixedPoint(6.666, 3)
c = a + b
self.assertEquals(type(c), type(a))
self.assertEquals(c.n, 9996L)
# test negatives
a = FixedPoint(3.33)
b = FixedPoint(-6.66, 3)
c = a + b
self.assertEquals(type(c), type(a))
self.assertEquals(c.precision, 3)
self.assertEquals(c.n, -3330L)
a = FixedPoint(-3.33)
b = FixedPoint(-6.666, 3)
c = a + b
self.assertEquals(type(c), type(a))
self.assertEquals(c.precision, 3)
self.assertEquals(c.n, -9996L)
# test subclass
a = FixedPoint(3.33)
b = SonOfFixedPoint(6.666, 3)
c = a + b
self.assert_(isinstance(c, FixedPoint))
self.assertEquals(c.precision, 3)
self.assertEquals(c.n, 9996L)
a = FixedPoint(3.33)
b = SonOfFixedPoint(6.666, 3)
c = b + a
self.assertEquals(type(c), type(b))
self.assertEquals(c.precision, 3)
self.assertEquals(c.n, 9996)
def test__radd__(self):
"""test addition as the right argument"""
# test with a float
a = FixedPoint(3.33)
b = 3.3333
c = b + a
self.assertEquals(type(c), type(a))
self.assertEquals(c.precision, DEFAULT_PRECISION)
self.assertEquals(c.n, 666L)
# test subclass
a = SonOfFixedPoint(3.33)
b = 3.3333
c = b + a
self.assertEquals(type(c), type(a))
self.assertEquals(c.precision, DEFAULT_PRECISION)
self.assertEquals(c.n, 666L)
def test__sub__(self):
"""test subtraction"""
# test with a float
a = FixedPoint(3.33)
b = 3.3333
c = a - b
self.assertEquals(type(c), type(a))
self.assertEquals(c.precision, DEFAULT_PRECISION)
self.assertEquals(c, 0L)
a = SonOfFixedPoint(3.33)
b = 3.3333
c = a - b
self.assertEquals(type(c), type(a))
self.assertEquals(c.precision, DEFAULT_PRECISION)
self.assertEquals(c, 0L)
# test two operands with the same precision
a = FixedPoint(3.33)
b = FixedPoint(6.66)
c = b - a
self.assertEquals(c.precision, DEFAULT_PRECISION)
self.assertEquals(c.n, 333L)
# test two operands with differing precision
a = FixedPoint(3.33)
b = FixedPoint(6.66, 3)
c = b - a
self.assertEquals(c.precision, 3)
self.assertEquals(c.n, 3330L)
a = FixedPoint(3.33)
b = FixedPoint(6.666, 3)
c = b - a
self.assertEquals(c.precision, 3)
self.assertEquals(c.n, 3336L)
# test negatives
a = FixedPoint(3.33)
b = FixedPoint(-6.66, 3)
c = b - a
self.assertEquals(c.precision, 3)
self.assertEquals(c.n, -9990L)
a = FixedPoint(-3.33)
b = FixedPoint(-6.666, 3)
c = b - a
self.assertEquals(c.precision, 3)
self.assertEquals(c.n, -3336L)
# test subclass
a = FixedPoint(3.33)
b = SonOfFixedPoint(6.66, 3)
c = a - b
self.assertEquals(c.precision, 3)
self.assertEquals(c.n, -3330L)
#self.assertEquals(type(c), type(b))
self.assert_(isinstance(c, FixedPoint))
a = FixedPoint(3.33)
b = SonOfFixedPoint(6.66, 3)
c = b - a
self.assertEquals(c.precision, 3)
self.assertEquals(c.n, 3330L)
self.assertEquals(type(c), type(b))
def test__rsub__(self):
"""test subtraction as the right hand argument"""
# test with a float
a = FixedPoint(3.33)
b = 1.11
c = b - a
self.assertEquals(type(c), type(a))
self.assertEquals(c.precision, DEFAULT_PRECISION)
self.assertEquals(c.n, -222)
a = SonOfFixedPoint(3.33)
b = 1.11
c = b - a
self.assertEquals(type(c), type(a))
self.assertEquals(c.precision, DEFAULT_PRECISION)
self.assertEquals(c.n, -222)
def test__mul__(self):
"""test multiplication"""
#test with a float
a = FixedPoint(2)
b = 3.3333
c = a * b
self.assertEquals(type(c), type(a))
self.assertEquals(c.precision, DEFAULT_PRECISION)
self.assertEquals(c.n, 666L)
# test two operands with the same precision
a = FixedPoint(3.33)
b = FixedPoint(6.66)
c = b * a
self.assertEquals(type(c), type(a))
self.assertEquals(c.precision, DEFAULT_PRECISION)
self.assertEquals(c.n, 2218L)
# test two operands with differing precision
a = FixedPoint(3.33)
b = FixedPoint(6.66, 3)
c = b * a
self.assertEquals(type(c), type(a))
self.assertEquals(c.precision, 3)
self.assertEquals(c.n, 22178L)
# test negatives
a = FixedPoint(3.33)
b = FixedPoint(-6.66, 3)
c = b * a
self.assertEquals(type(c), type(a))
self.assertEquals(c.precision, 3)
self.assertEquals(c.n, -22178L)
a = FixedPoint(-3.33)
b = FixedPoint(-6.666, 3)
c = b * a
self.assertEquals(type(c), type(a))
self.assertEquals(c.precision, 3)
self.assertEquals(c.n, 22198L)
# test subclass
a = FixedPoint(3.33)
b = SonOfFixedPoint(6.66, 3)
c = a * b
#self.assertEquals(type(c), type(b))
self.assert_(isinstance(c, FixedPoint))
self.assertEquals(c.precision, 3)
self.assertEquals(c.n, 22178L)
a = FixedPoint(3.33)
b = SonOfFixedPoint(6.66, 3)
c = b * a
self.assertEquals(type(c), type(b))
self.assertEquals(c.precision, 3)
self.assertEquals(c.n, 22178L)
a = FixedPoint(3.33)
b = 3
c = a * b
self.assertEquals(type(c), type(a))
self.assertEquals(c.precision, DEFAULT_PRECISION)
self.assertEquals(c.n, 999)
def test__rmul__(self):
"""test multiplication"""
a = FixedPoint(3.33)
b = 3
c = b * a
self.assertEquals(type(c), type(a))
self.assertEquals(c.precision, DEFAULT_PRECISION)
self.assertEquals(c.n, 999L)
a = SonOfFixedPoint(3.33)
b = 3
c = b * a
self.assertEquals(type(c), type(a))
self.assertEquals(c.precision, DEFAULT_PRECISION)
self.assertEquals(c.n, 999L)
def test__div__(self):
"""test division"""
#test with a float
a = FixedPoint(6.66)
b = 3.3333
c = a / b
self.assertEquals(type(c), type(a))
self.assertEquals(c.precision, DEFAULT_PRECISION)
self.assertEquals(c.n, 200L)
a = SonOfFixedPoint(6.66)
b = 3.3333
c = a / b
self.assertEquals(type(c), type(a))
self.assertEquals(c.precision, DEFAULT_PRECISION)
self.assertEquals(c.n, 200L)
# test two operands with the same precision
a = FixedPoint(3.33)
b = FixedPoint(6.66)
c = b / a
self.assertEquals(type(c), type(a))
self.assertEquals(c.precision, DEFAULT_PRECISION)
self.assertEquals(c.n, 200L)
# test two operands with differing precision
a = FixedPoint(1)
b = FixedPoint(3, 3)
c = b / a
self.assertEquals(type(c), type(a))
self.assertEquals(c.precision, 3)
self.assertEquals(c.n, 3000L)
# test negatives
a = FixedPoint(3.33)
b = FixedPoint(-6.66, 3)
c = b / a
self.assertEquals(type(c), type(a))
self.assertEquals(c.precision, 3)
self.assertEquals(c.n, -2000L)
a = FixedPoint(-3.33)
b = FixedPoint(-6.66)
c = b / a
self.assertEquals(type(c), type(a))
self.assertEquals(c.precision, DEFAULT_PRECISION)
self.assertEquals(c.n, 200L)
# test subclass
a = FixedPoint(3.33)
b = SonOfFixedPoint(6.66, 3)
c = a / b
#self.assertEquals(type(c), type(b))
self.assert_(isinstance(c, FixedPoint))
self.assertEquals(c.precision, 3)
self.assertEquals(c.n, 500)
a = FixedPoint(3.33)
b = SonOfFixedPoint(6.66, 3)
c = b / a
self.assertEquals(type(c), type(b))
self.assertEquals(c.precision, 3)
self.assertEquals(c.n, 2000L)
a = FixedPoint(3.33)
b = 3
c = a / b
self.assertEquals(type(c), type(a))
self.assertEquals(c.precision, DEFAULT_PRECISION)
self.assertEquals(c.n, 111L)
def test__rdiv__(self):
"""test right division"""
a = FixedPoint(3)
b = 1
c = b / a
self.assertEquals(type(c), type(a))
self.assertEquals(c.precision, DEFAULT_PRECISION)
self.assertEquals(c.n, 33L)
a = SonOfFixedPoint(3.33, 6)
b = 1
c = b / a
self.assertEquals(type(c), type(a))
self.assertEquals(c.precision, 6)
self.assertEquals(c.n, 300300)
def test__divmod__(self):
"""test integer division with modulo"""
a = FixedPoint(3.33)
q, m = divmod(a, 2)
self.assertEquals(type(q), type(1L))
self.assertEquals(type(m), type(a))
self.assertEquals(q, 1)
self.assertEquals(m, FixedPoint(1.33))
a = SonOfFixedPoint(3.33)
q, m = divmod(a, 2)
self.assertEquals(type(q), type(1L))
self.assertEquals(type(m), type(a))
self.assertEquals(q, 1L)
self.assertEquals(m, FixedPoint(1.33))
a = FixedPoint(3.33)
b = FixedPoint(1.11)
q, m = divmod(a, b)
self.assertEquals(type(q), type(1L))
self.assertEquals(type(m), type(a))
self.assertEquals(q, 3L)
self.assertEquals(m, FixedPoint(0))
# 2002-10-19 dougfort -- this produces infinite recursion
## a = FixedPoint(3.33)
## b = SonOfFixedPoint(1.11)
## q, m = divmod(a, b)
## self.assertEquals(type(q), type(1L))
## self.assertEquals(type(m), type(a))
## self.assertEquals(q, 3L)
## self.assertEquals(m, FixedPoint(0))
def test__rdivmod__(self):
"""test right integer division with modulo"""
a = FixedPoint(3.33)
q, m = divmod(4, a)
self.assertEquals(q, 1L)
self.assertEquals(m, FixedPoint(0.67))
def test__mod__(self):
"""test modulo"""
a = FixedPoint(3.33)
b = 2
c = a % b
self.assertEquals(c, FixedPoint(1.33))
a = FixedPoint(3.33)
b = FixedPoint(1.111)
c = a % b
self.assertEquals(c, FixedPoint(0))
def test__rmod__(self):
"""test right modulo"""
a = FixedPoint(3.33)
b = 4
c = b % a
self.assertEquals(c, FixedPoint(0.67))
a = FixedPoint(3.33)
b = SonOfFixedPoint(6.666)
c = b % a
self.assertEquals(c, SonOfFixedPoint(0.01))
def test__float__(self):
"""test casting to float"""
self.assertEquals(float(4), float(FixedPoint(4)))
self.assertEquals(3.1416, float(FixedPoint(3.14159, 4)))
def test__long__(self):
"""test casting to long"""
self.assertEquals(4, long(FixedPoint(4)))
self.assertEquals(3, long(FixedPoint(3.14159, 4)))
def test__int__(self):
"""test casting to int"""
self.assertEquals(4, int(FixedPoint(4)))
self.assertEquals(3, int(FixedPoint(3.14159, 4)))
def testFrac(self):
"""test return of the fractional portion"""
self.assertEquals(
FixedPoint(), FixedPoint(4).frac())
self.assertEquals(
FixedPoint(0.1416, 4),
FixedPoint(3.14159, 4).frac())
def testBankersRounding(self):
"""test that bankers rounding works as expected"""
prevrounding = FixedPoint.round
FixedPoint.round = bankersRounding
# we expect to round 1 up because it's odd
self.assertEquals(
FixedPoint(1.5,0), FixedPoint(2.0,0))
# we expect to leave 2 alone because it's even
self.assertEquals(
FixedPoint(2.5,0), FixedPoint(2.0,0))
FixedPoint.round = prevrounding
def testAddHalfAndChop(self):
"""test that 'add half and chop' rounding works as expected"""
prevrounding = FixedPoint.round
FixedPoint.round = addHalfAndChop
# we expect to round 1 up
self.assertEquals(
FixedPoint(1.5,0), FixedPoint(2.0,0))
# we expect to round 2 up as well
self.assertEquals(
FixedPoint(2.5,0), FixedPoint(3.0,0))
FixedPoint.round = prevrounding
def testOriginal(self):
"""Tim's oringinal tests in __main__ of fixedpoint.py"""
fp = FixedPoint
o = fp("0.1")
self.assert_(str(o) == "0.10")
t = fp("-20e-2", 5)
self.assert_(str(t) == "-0.20000")
self.assert_(t < o)
self.assert_(o > t)
self.assert_(min(o, t) == min(t, o) == t)
self.assert_(max(o, t) == max(t, o) == o)
self.assert_(o != t)
self.assert_(--t == t)
self.assert_(abs(t) > abs(o))
self.assert_(abs(o) < abs(t))
self.assert_(o == o and t == t)
self.assert_(t.copy() == t)
self.assert_(o == -t/2 == -.5 * t)
self.assert_(abs(t) == o + o)
self.assert_(abs(o) == o)
self.assert_(o/t == -0.5)
self.assert_(-(t/o) == (-t)/o == t/-o == 2)
self.assert_(1 + o == o + 1 == fp(" +00.000011e+5 "))
self.assert_(1/o == 10)
self.assert_(o + t == t + o == -o)
self.assert_(2.0 * t == t * 2 == "2" * t == o/o * 2L * t)
self.assert_(1 - t == -(t - 1) == fp(6L)/5)
self.assert_(t*t == 4*o*o == o*4*o == o*o*4)
self.assert_(fp(2) - "1" == 1)
self.assert_(float(-1/t) == 5.0)
for p in range(20):
self.assert_(42 + fp("1e-20", p) - 42 == 0)
self.assert_(1/(42 + fp("1e-20", 20) - 42) == fp("100.0E18"))
o = fp(".9995", 4)
self.assert_(1 - o == fp("5e-4", 10))
o.set_precision(3)
self.assert_(o == 1)
o = fp(".9985", 4)
o.set_precision(3)
self.assert_(o == fp(".998", 10))
self.assert_(o == o.frac())
o.set_precision(100)
self.assert_(o == fp(".998", 10))
o.set_precision(2)
self.assert_(o == 1)
x = fp(1.99)
self.assert_(long(x) == -long(-x) == 1L)
self.assert_(int(x) == -int(-x) == 1)
self.assert_(x == long(x) + x.frac())
self.assert_(-x == long(-x) + (-x).frac())
self.assert_(fp(7) % 4 == 7 % fp(4) == 3)
self.assert_(fp(-7) % 4 == -7 % fp(4) == 1)
self.assert_(fp(-7) % -4 == -7 % fp(-4) == -3)
self.assert_(fp(7.0) % "-4.0" == 7 % fp(-4) == -1)
self.assert_(fp("5.5") % fp("1.1") == fp("5.5e100") % fp("1.1e100") == 0)
self.assert_(divmod(fp("1e100"), 3) == (long(fp("1e100")/3), 1))
def _make_suite():
"""
Factory to create a test suite
This is separated out for use by both installation regression
testing, and in a standalone unit test.
"""
return unittest.TestSuite((
unittest.makeSuite(FixedPointTest, "test"),
))
def test_main():
"""
Installation Regression Test
The name test_main is required
"""
import test_support
test_support.run_suite(_make_suite())
if __name__ == "__main__":
"""
Run as a stand-alone unit test.
"""
runner = unittest.TextTestRunner()
runner.run(_make_suite())
| gpl-2.0 |
fpeyre/shinken | test/test_dummy.py | 18 | 2022 | #!/usr/bin/env python
# Copyright (C) 2009-2014:
# Gabes Jean, naparuba@gmail.com
# Gerhard Lausser, Gerhard.Lausser@consol.de
#
# This file is part of Shinken.
#
# Shinken is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Shinken is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Shinken. If not, see <http://www.gnu.org/licenses/>.
#
# This file is used to test reading and processing of config files
#
from shinken_test import *
class TestConfig(ShinkenTest):
def setUp(self):
self.setup_with_file('etc/shinken_1r_1h_1s.cfg')
def test_dummy(self):
#
# Config is not correct because of a wrong relative path
# in the main config file
#
print "Get the hosts and services"
now = time.time()
host = self.sched.hosts.find_by_name("test_host_0")
host.checks_in_progress = []
host.act_depend_of = [] # ignore the router
router = self.sched.hosts.find_by_name("test_router_0")
router.checks_in_progress = []
router.act_depend_of = [] # ignore the router
svc = self.sched.services.find_srv_by_name_and_hostname("test_host_0", "test_ok_0")
svc.checks_in_progress = []
svc.act_depend_of = [] # no hostchecks on critical checkresults
self.scheduler_loop(2, [[host, 0, 'UP | value1=1 value2=2'], [router, 0, 'UP | rtt=10'], [svc, 2, 'BAD | value1=0 value2=0']])
self.assertEqual('UP', host.state)
self.assertEqual('HARD', host.state_type)
if __name__ == '__main__':
unittest.main()
| agpl-3.0 |
Nespa32/sm_project | wordcloud_gen/wordcloud_package/wordcloud/wordcloud.py | 1 | 14076 | # Author: Andreas Christian Mueller <amueller@ais.uni-bonn.de>
# (c) 2012
# Modified by: Paul Nechifor <paul@nechifor.net>
#
# License: MIT
from random import Random
import os
import re
import sys
import numpy as np
from operator import itemgetter
from PIL import Image
from PIL import ImageDraw
from PIL import ImageFont
from .query_integral_image import query_integral_image
item1 = itemgetter(1)
FONT_PATH = os.environ.get("FONT_PATH", "/usr/share/fonts/truetype/droid/DroidSansMono.ttf")
STOPWORDS = set([x.strip() for x in open(os.path.join(os.path.dirname(__file__),
'stopwords')).read().split('\n')])
def random_color_func(word, font_size, position, orientation, random_state=None):
"""Random hue color generation.
Default coloring method. This just picks a random hue with value 80% and
lumination 50%.
Parameters
----------
word, font_size, position, orientation : ignored.
random_state : random.Random object or None, (default=None)
If a random object is given, this is used for generating random numbers.
"""
if random_state is None:
random_state = Random()
return "hsl(%d, 80%%, 50%%)" % random_state.randint(0, 255)
class WordCloud(object):
"""Word cloud object for generating and drawing.
Parameters
----------
font_path : string
Font path to the font that will be used (OTF or TTF).
Defaults to DroidSansMono path on a Linux machine. If you are on
another OS or don't have this font, you need to adjust this path.
width : int (default=400)
Width of the canvas.
height : int (default=200)
Height of the canvas.
ranks_only : boolean (default=False)
Only use the rank of the words, not the actual counts.
prefer_horizontal : float (default=0.90)
The ratio of times to try horizontal fitting as opposed to vertical.
mask : nd-array or None (default=None)
If not None, gives a binary mask on where to draw words. All zero
entries will be considered "free" to draw on, while all non-zero
entries will be deemed occupied. If mask is not None, width and height will be
ignored and the shape of mask will be used instead.
max_words : number (default=200)
The maximum number of words.
stopwords : set of strings
The words that will be eliminated.
background_color : color value (default="black")
Background color for the word cloud image.
max_font_size : int or None (default=None)
Maximum font size for the largest word. If None, height of the image is
used.
Attributes
----------
words_ : list of tuples (string, float)
Word tokens with associated frequency.
layout_ : list of tuples (string, int, (int, int), int, color))
Encodes the fitted word cloud. Encodes for each word the string, font
size, position, orientation and color.
"""
def __init__(self, font_path=None, width=400, height=200, margin=5,
ranks_only=False, prefer_horizontal=0.9, mask=None, scale=1,
color_func=random_color_func, max_words=200, stopwords=None,
random_state=None, background_color='black', max_font_size=None):
if stopwords is None:
stopwords = STOPWORDS
if font_path is None:
font_path = FONT_PATH
self.font_path = font_path
self.width = width
self.height = height
self.margin = margin
self.ranks_only = ranks_only
self.prefer_horizontal = prefer_horizontal
self.mask = mask
self.scale = scale
self.color_func = color_func
self.max_words = max_words
self.stopwords = stopwords
if isinstance(random_state, int):
random_state = Random(random_state)
self.random_state = random_state
self.background_color = background_color
if max_font_size is None:
max_font_size = height
self.max_font_size = max_font_size
def fit_words(self, words):
"""Generate the positions for words.
Parameters
----------
words : array of tuples
A tuple contains the word and its frequency.
Returns
-------
layout_ : list of tuples (string, int, (int, int), int, color))
Encodes the fitted word cloud. Encodes for each word the string, font
size, position, orientation and color.
Notes
-----
Larger canvases with make the code significantly slower. If you need a large
word cloud, run this function with a lower canvas size, and draw it with a
larger scale.
In the current form it actually just uses the rank of the counts, i.e. the
relative differences don't matter. Play with setting the font_size in the
main loop for different styles.
"""
if self.random_state is not None:
random_state = self.random_state
else:
random_state = Random()
if self.mask is not None:
width = self.mask.shape[1]
height = self.mask.shape[0]
# the order of the cumsum's is important for speed ?!
integral = np.cumsum(np.cumsum(self.mask, axis=1), axis=0).astype(np.uint32)
else:
height, width = self.height, self.width
integral = np.zeros((height, width), dtype=np.uint32)
# create image
img_grey = Image.new("L", (width, height))
draw = ImageDraw.Draw(img_grey)
img_array = np.asarray(img_grey)
font_sizes, positions, orientations, colors = [], [], [], []
font_size = self.max_font_size
# start drawing grey image
for word, count in words:
# alternative way to set the font size
if not self.ranks_only:
font_size = min(font_size, int(100 * np.log(count + 100)))
while True:
# try to find a position
font = ImageFont.truetype(self.font_path, font_size)
# transpose font optionally
if random_state.random() < self.prefer_horizontal:
orientation = None
else:
orientation = Image.ROTATE_90
transposed_font = ImageFont.TransposedFont(font,
orientation=orientation)
draw.setfont(transposed_font)
# get size of resulting text
box_size = draw.textsize(word)
# find possible places using integral image:
result = query_integral_image(integral, box_size[1] + self.margin,
box_size[0] + self.margin, random_state)
if result is not None or font_size == 0:
break
# if we didn't find a place, make font smaller
font_size -= 1
if font_size == 0:
# we were unable to draw any more
break
x, y = np.array(result) + self.margin // 2
# actually draw the text
draw.text((y, x), word, fill="white")
positions.append((x, y))
orientations.append(orientation)
font_sizes.append(font_size)
colors.append(self.color_func(word, font_size, (x, y), orientation,
random_state=random_state))
# recompute integral image
if self.mask is None:
img_array = np.asarray(img_grey)
else:
img_array = np.asarray(img_grey) + self.mask
# recompute bottom right
# the order of the cumsum's is important for speed ?!
partial_integral = np.cumsum(np.cumsum(img_array[x:, y:], axis=1),
axis=0)
# paste recomputed part into old image
# if x or y is zero it is a bit annoying
if x > 0:
if y > 0:
partial_integral += (integral[x - 1, y:]
- integral[x - 1, y - 1])
else:
partial_integral += integral[x - 1, y:]
if y > 0:
partial_integral += integral[x:, y - 1][:, np.newaxis]
integral[x:, y:] = partial_integral
self.layout_ = list(zip(words, font_sizes, positions, orientations, colors))
return self.layout_
def process_text(self, text):
"""Splits a long text into words, eliminates the stopwords.
Parameters
----------
text : string
The text to be processed.
Returns
-------
words : list of tuples (string, float)
Word tokens with associated frequency.
Notes
-----
There are better ways to do word tokenization, but I don't want to
include all those things.
"""
d = {}
flags = re.UNICODE if sys.version < '3' and \
type(text) is unicode else 0
for word in re.findall(r"\w[\w']*", text, flags=flags):
if word.isdigit():
continue
word_lower = word.lower()
if word_lower in self.stopwords:
continue
# Look in lowercase dict.
if word_lower in d:
d2 = d[word_lower]
else:
d2 = {}
d[word_lower] = d2
# Look in any case dict.
d2[word] = d2.get(word, 0) + 1
d3 = {}
for d2 in d.values():
# Get the most popular case.
first = max(d2.items(), key=item1)[0]
d3[first] = sum(d2.values())
# merge plurals into the singular count (simple cases only)
for key in list(d3.keys()):
if key.endswith('s'):
key_singular = key[:-1]
if key_singular in d3:
val_plural = d3[key]
val_singular = d3[key_singular]
d3[key_singular] = val_singular + val_plural
del d3[key]
words = sorted(d3.items(), key=item1, reverse=True)
words = words[:self.max_words]
if len(d3.values()) > 0: # check in case there are no words
maximum = float(max(d3.values()))
else:
maximum = 1.0 # avoid by-zero division
for i, (word, count) in enumerate(words):
words[i] = word, count / maximum
self.words_ = words
return words
def generate(self, text):
"""Generate wordcloud from text.
Calls process_text and fit_words.
Returns
-------
self
"""
self.process_text(text)
self.fit_words(self.words_)
return self
def _check_generated(self):
"""Check if layout_ was computed, otherwise raise error."""
if not hasattr(self, "layout_"):
raise ValueError("WordCloud has not been calculated, call generate first.")
def to_image(self):
self._check_generated()
if self.mask is not None:
width = self.mask.shape[1]
height = self.mask.shape[0]
else:
height, width = self.height, self.width
img = Image.new("RGB", (width * self.scale, height * self.scale), self.background_color)
draw = ImageDraw.Draw(img)
for (word, count), font_size, position, orientation, color in self.layout_:
font = ImageFont.truetype(self.font_path, font_size * self.scale)
transposed_font = ImageFont.TransposedFont(font,
orientation=orientation)
draw.setfont(transposed_font)
pos = (position[1] * self.scale, position[0] * self.scale)
draw.text(pos, word, fill=color)
return img
def recolor(self, random_state=None, color_func=None):
"""Recolor existing layout.
Applying a new coloring is much faster than generating the whole wordcloud.
Parameters
----------
random_state : RandomState, int, or None, default=None
If not None, a fixed random state is used. If an int is given, this
is used as seed for a random.Random state.
color_func : function or None, default=None
Function to generate new color from word count, font size, position
and orientation. If None, self.color_func is used.
Returns
-------
self
"""
if isinstance(random_state, int):
random_state = Random(random_state)
self._check_generated()
if color_func is None:
color_func = self.color_func
self.layout_ = [(word, font_size, position, orientation,
color_func(word, font_size, position, orientation, random_state))
for word, font_size, position, orientation, _ in self.layout_]
return self
def to_file(self, filename):
"""Export to image file.
Parameters
----------
filename : string
Location to write to.
Returns
-------
self
"""
img = self.to_image()
img.save(filename)
return self
def to_array(self):
"""Convert to numpy array.
Returns
-------
image : nd-array size (width, height, 3)
Word cloud image as numpy matrix.
"""
return np.array(self.to_image())
def __array__(self):
"""Convert to numpy array.
Returns
-------
image : nd-array size (width, height, 3)
Word cloud image as numpy matrix.
"""
return self.to_array()
def to_html(self):
raise NotImplementedError("FIXME!!!")
| mit |
eayunstack/rally | setup.py | 334 | 1028 | # Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT
import setuptools
# In python < 2.7.4, a lazy loading of package `pbr` will break
# setuptools if some other modules registered functions in `atexit`.
# solution from: http://bugs.python.org/issue15881#msg170215
try:
import multiprocessing # noqa
except ImportError:
pass
setuptools.setup(
setup_requires=['pbr>=1.8'],
pbr=True)
| apache-2.0 |
anchor/vaultaire-tools | telemetry/watch_outstanding.py | 1 | 1176 | #!/usr/bin/env python2.6
import sys
'''watch a telemetry log and show how many databursts are outstanding over time
TTT 1393979427248973000 ffffffff messages_in = 68751
TTT 1393979427249019000 ffffffff acks_received_from_upstream = 68722
'''
from datetime import datetime
import time
import select
def outstanding_databursts_from_stream(stream):
'''from an iterable of telemetry messages, yield outstanding databursts
'''
messages_in = 0
for l in stream:
if l[:4] != 'TTT ':
continue
fields = l.strip().split()
if len(fields) < 6:
continue
if fields[3] == 'messages_in':
messages_in = int(fields[5])
elif fields[3] == 'acks_received_from_upstream':
acked = int(fields[5])
timestamp = datetime.fromtimestamp(int(fields[1])/10**9)
yield timestamp,messages_in-acked
def tail_file(f):
while True:
l = f.readline()
if l == '':
time.sleep(1)
yield l
if __name__ == '__main__':
if len(sys.argv) > 1:
f = open(sys.argv[1],"r",1)
else:
f = open("/dev/stdin","r",1)
tailf = tail_file(f)
for timestamp,outstanding in outstanding_databursts_from_stream(tailf):
print timestamp,'outstanding:',outstanding
sys.stdout.flush()
| bsd-3-clause |
kumaralokgithub/grpc | tools/run_tests/sanity/check_version.py | 14 | 2810 | #!/usr/bin/env python
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import sys
import yaml
import os
import re
import subprocess
errors = 0
os.chdir(os.path.join(os.path.dirname(sys.argv[0]), '../../..'))
# hack import paths to pick up extra code
sys.path.insert(0, os.path.abspath('tools/buildgen/plugins'))
from expand_version import Version
try:
branch_name = subprocess.check_output(
'git rev-parse --abbrev-ref HEAD',
shell=True)
except:
print('WARNING: not a git repository')
branch_name = None
if branch_name is not None:
m = re.match(r'^release-([0-9]+)_([0-9]+)$', branch_name)
if m:
print('RELEASE branch')
# version number should align with the branched version
check_version = lambda version: (
version.major == int(m.group(1)) and
version.minor == int(m.group(2)))
warning = 'Version key "%%s" value "%%s" should have a major version %s and minor version %s' % (m.group(1), m.group(2))
elif re.match(r'^debian/.*$', branch_name):
# no additional version checks for debian branches
check_version = lambda version: True
else:
# all other branches should have a -dev tag
check_version = lambda version: version.tag == 'dev'
warning = 'Version key "%s" value "%s" should have a -dev tag'
else:
check_version = lambda version: True
with open('build.yaml', 'r') as f:
build_yaml = yaml.load(f.read())
settings = build_yaml['settings']
top_version = Version(settings['version'])
if not check_version(top_version):
errors += 1
print(warning % ('version', top_version))
for tag, value in settings.iteritems():
if re.match(r'^[a-z]+_version$', tag):
value = Version(value)
if tag != 'core_version':
if value.major != top_version.major:
errors += 1
print('major version mismatch on %s: %d vs %d' % (tag, value.major,
top_version.major))
if value.minor != top_version.minor:
errors += 1
print('minor version mismatch on %s: %d vs %d' % (tag, value.minor,
top_version.minor))
if not check_version(value):
errors += 1
print(warning % (tag, value))
sys.exit(errors)
| apache-2.0 |
DarthMaulware/EquationGroupLeaks | Leak #5 - Lost In Translation/windows/Resources/Dsz/PyScripts/DataHandlers/Mcl_Cmd_GetAdmin_DataHandler.py | 1 | 2279 | # uncompyle6 version 2.9.10
# Python bytecode 2.7 (62211)
# Decompiled from: Python 3.6.0b2 (default, Oct 11 2016, 05:27:10)
# [GCC 6.2.0 20161005]
# Embedded file name: Mcl_Cmd_GetAdmin_DataHandler.py
def DataHandlerMain(namespace, InputFilename, OutputFilename):
import mcl.imports
import mcl.data.Input
import mcl.data.Output
import mcl.status
import mcl.target
import mcl.object.Message
mcl.imports.ImportNamesWithNamespace(namespace, 'mca.process.cmd.getadmin', globals())
input = mcl.data.Input.GetInput(InputFilename)
output = mcl.data.Output.StartOutput(OutputFilename, input)
output.Start('GetAdmin', 'getadmin', [])
msg = mcl.object.Message.DemarshalMessage(input.GetData())
if input.GetStatus() != mcl.status.MCL_SUCCESS:
errorMsg = msg.FindMessage(mcl.object.Message.MSG_KEY_RESULT_ERROR)
moduleError = errorMsg.FindU32(mcl.object.Message.MSG_KEY_RESULT_ERROR_MODULE)
osError = errorMsg.FindU32(mcl.object.Message.MSG_KEY_RESULT_ERROR_OS)
if moduleError == ERR_GETADMIN_FAILED:
import mcl.elevation.errors
output.RecordModuleError(moduleError, 0, errorStrings)
output.RecordModuleError(osError, 0, mcl.elevation.errors.errorStrings)
elif input.GetStatus() == ERR_JUMPUP_FAILED:
output.RecordModuleError(moduleError, 0, errorStrings)
import mcl.privilege.errors
output.RecordModuleError(osError, 0, mcl.privilege.errors.errorStrings)
else:
output.RecordModuleError(moduleError, osError, errorStrings)
output.EndWithStatus(input.GetStatus())
return True
if msg.GetCount() == 0:
output.EndWithStatus(mcl.target.CALL_SUCCEEDED)
return True
from mcl.object.XmlOutput import XmlOutput
xml = XmlOutput()
xml.Start('CredentialsSet')
output.RecordXml(xml)
output.GoToBackground()
output.End()
return True
if __name__ == '__main__':
import sys
try:
namespace, InputFilename, OutputFilename = sys.argv[1:]
except:
print '%s <namespace> <input filename> <output filename>' % sys.argv[0]
sys.exit(1)
if DataHandlerMain(namespace, InputFilename, OutputFilename) != True:
sys.exit(-1) | unlicense |
kosgroup/odoo | addons/website_payment/controllers/main.py | 9 | 4462 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import http, _
from odoo.http import request
class WebsitePayment(http.Controller):
@http.route(['/my/payment_method'], type='http', auth="user", website=True)
def payment_method(self, **kwargs):
acquirers = list(request.env['payment.acquirer'].search([('website_published', '=', True), ('registration_view_template_id', '!=', False)]))
partner = request.env.user.partner_id
payment_tokens = partner.payment_token_ids
payment_tokens |= partner.commercial_partner_id.sudo().payment_token_ids
values = {
'pms': payment_tokens,
'acquirers': acquirers
}
return_url = request.params.get('redirect', '/my/payment_method')
for acquirer in acquirers:
acquirer.form = acquirer.sudo()._registration_render(request.env.user.partner_id.id, {'error': {}, 'error_message': [], 'return_url': return_url, 'json': False, 'bootstrap_formatting': True})
return request.render("website_payment.pay_methods", values)
@http.route(['/website_payment/delete/'], methods=['POST'], type='http', auth="user", website=True)
def delete(self, delete_pm_id=None):
if delete_pm_id:
pay_meth = request.env['payment.token'].browse(int(delete_pm_id))
pay_meth.unlink()
return request.redirect('/my/payment_method')
@http.route(['/website_payment/pay'], type='http', auth='public', website=True)
def pay(self, reference='', amount=False, currency_id=None, acquirer_id=None, **kw):
env = request.env
user = env.user.sudo()
currency_id = currency_id and int(currency_id) or user.company_id.currency_id.id
currency = env['res.currency'].browse(currency_id)
# Try default one then fallback on first
acquirer_id = acquirer_id and int(acquirer_id) or \
env['ir.values'].get_default('payment.transaction', 'acquirer_id', company_id=user.company_id.id) or \
env['payment.acquirer'].search([('website_published', '=', True), ('company_id', '=', user.company_id.id)])[0].id
acquirer = env['payment.acquirer'].with_context(submit_class='btn btn-primary pull-right',
submit_txt=_('Pay Now')).browse(acquirer_id)
# auto-increment reference with a number suffix if the reference already exists
reference = request.env['payment.transaction'].get_next_reference(reference)
partner_id = user.partner_id.id if user.partner_id.id != request.website.partner_id.id else False
payment_form = acquirer.sudo().render(reference, float(amount), currency.id, values={'return_url': '/website_payment/confirm', 'partner_id': partner_id})
values = {
'reference': reference,
'acquirer': acquirer,
'currency': currency,
'amount': float(amount),
'payment_form': payment_form,
}
return request.render('website_payment.pay', values)
@http.route(['/website_payment/transaction'], type='json', auth="public", website=True)
def transaction(self, reference, amount, currency_id, acquirer_id):
partner_id = request.env.user.partner_id.id if request.env.user.partner_id != request.website.partner_id else False
values = {
'acquirer_id': int(acquirer_id),
'reference': reference,
'amount': float(amount),
'currency_id': int(currency_id),
'partner_id': partner_id,
}
tx = request.env['payment.transaction'].sudo().create(values)
request.session['website_payment_tx_id'] = tx.id
return tx.id
@http.route(['/website_payment/confirm'], type='http', auth='public', website=True)
def confirm(self, **kw):
tx_id = request.session.pop('website_payment_tx_id', False)
if tx_id:
tx = request.env['payment.transaction'].browse(tx_id)
status = (tx.state == 'done' and 'success') or 'danger'
message = (tx.state == 'done' and 'Your payment was successful! It may take some time to be validated on our end.') or 'OOps! There was a problem with your payment.'
return request.render('website_payment.confirm', {'tx': tx, 'status': status, 'message': message})
else:
return request.redirect('/my/home')
| gpl-3.0 |
ammarkhann/FinalSeniorCode | lib/python2.7/site-packages/google/auth/transport/__init__.py | 27 | 3442 | # Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Transport - HTTP client library support.
:mod:`google.auth` is designed to work with various HTTP client libraries such
as urllib3 and requests. In order to work across these libraries with different
interfaces some abstraction is needed.
This module provides two interfaces that are implemented by transport adapters
to support HTTP libraries. :class:`Request` defines the interface expected by
:mod:`google.auth` to make requests. :class:`Response` defines the interface
for the return value of :class:`Request`.
"""
import abc
import six
from six.moves import http_client
DEFAULT_REFRESH_STATUS_CODES = (http_client.UNAUTHORIZED,)
"""Sequence[int]: Which HTTP status code indicate that credentials should be
refreshed and a request should be retried.
"""
DEFAULT_MAX_REFRESH_ATTEMPTS = 2
"""int: How many times to refresh the credentials and retry a request."""
@six.add_metaclass(abc.ABCMeta)
class Response(object):
"""HTTP Response data."""
@abc.abstractproperty
def status(self):
"""int: The HTTP status code."""
raise NotImplementedError('status must be implemented.')
@abc.abstractproperty
def headers(self):
"""Mapping[str, str]: The HTTP response headers."""
raise NotImplementedError('headers must be implemented.')
@abc.abstractproperty
def data(self):
"""bytes: The response body."""
raise NotImplementedError('data must be implemented.')
@six.add_metaclass(abc.ABCMeta)
class Request(object):
"""Interface for a callable that makes HTTP requests.
Specific transport implementations should provide an implementation of
this that adapts their specific request / response API.
.. automethod:: __call__
"""
@abc.abstractmethod
def __call__(self, url, method='GET', body=None, headers=None,
timeout=None, **kwargs):
"""Make an HTTP request.
Args:
url (str): The URI to be requested.
method (str): The HTTP method to use for the request. Defaults
to 'GET'.
body (bytes): The payload / body in HTTP request.
headers (Mapping[str, str]): Request headers.
timeout (Optional[int]): The number of seconds to wait for a
response from the server. If not specified or if None, the
transport-specific default timeout will be used.
kwargs: Additionally arguments passed on to the transport's
request method.
Returns:
Response: The HTTP response.
Raises:
google.auth.exceptions.TransportError: If any exception occurred.
"""
# pylint: disable=redundant-returns-doc, missing-raises-doc
# (pylint doesn't play well with abstract docstrings.)
raise NotImplementedError('__call__ must be implemented.')
| mit |
carlodef/s2p | utils/s2p_to_potree.py | 2 | 6815 | #!/usr/bin/env python
# Copyright (C) 2017, Gabriele Facciolo <gfacciol@gmail.com>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import argparse
from codecs import open
import re
import subprocess
from bs4 import BeautifulSoup
import s2p
from s2p import common
def tmpfile(ext='', tmpdir='tmp'):
"""
Creates a temporary file in the cfg['temporary_dir'] directory.
Args:
ext: desired file extension. The dot has to be included.
Returns:
absolute path to the created file
"""
import tempfile
try:
os.mkdir(tmpdir)
except OSError:
pass
pass
fd, out = tempfile.mkstemp(suffix=ext, prefix='s2p_',
dir=os.path.expandvars(tmpdir))
os.close(fd) # http://www.logilab.org/blogentry/17873
return out
def plys_to_potree(input_plys, output, bin_dir='.', cloud_name="cloud"):
"""
Compute a multi-scale representation of a large point cloud.
The output file can be viewed with a web browser. This is useful for
huge point clouds. The input is a list of ply files.
If PotreeConverter is not available it doesn't fail.
Args:
output: path to the output folder
input_plys: list of paths to ply files
"""
PotreeConverter = os.path.join(bin_dir, 'PotreeConverter/build/PotreeConverter/PotreeConverter')
outdir = os.path.dirname(output)
# List ply files in text file
listfile = tmpfile('.txt', outdir)
with open(listfile, 'w') as f:
for p in input_plys:
f.write("%s\n" % p)
# Run PotreeConverter
common.run("mkdir -p %s" % output)
resourcedir = os.path.join(bin_dir, 'PotreeConverter/PotreeConverter/resources/page_template')
common.run("LC_ALL=C %s --list-of-files %s -o %s -p %s --edl-enabled --material RGB --overwrite --page-template %s" % (PotreeConverter, listfile, output, cloud_name, resourcedir))
# Cleanup
os.remove(listfile)
def read_tiles(tile_files):
tiles = []
tile_file_dir = os.path.dirname(tile_files)
err_log = os.path.join(outdir, "%s_invalid_tiles.txt" % key)
with open(tile_files, 'r') as f:
readlines = list(map(str.strip,
f.readlines()))
with open(err_log, 'w') as ferr:
for el in readlines:
t = os.path.dirname(os.path.join(tile_file_dir, el))
dsm = os.path.join(t, 'dsm.tif')
message = "ok"
if os.path.exists(dsm) is False:
message = "no dsm"
if message == "ok":
tiles.append(t)
return tiles
def test_for_potree(basedir):
PotreeConverter = os.path.join(basedir, 'PotreeConverter/build/PotreeConverter/PotreeConverter')
print('looking for:\n %s' % PotreeConverter)
if not os.path.exists(PotreeConverter):
print('not found\n')
raise subprocess.CalledProcessError(1, "PotreeConverter")
def produce_potree(s2p_outdirs_list, potreeoutdir):
"""
Produce a single multiscale point cloud for the whole processed region.
Args:
s2poutdirs_list: list of s2p output directories
"""
basedir = os.path.dirname(os.path.abspath(__file__))
test_for_potree(os.path.join(basedir, 'PotreeConverter_PLY_toolchain/'))
def plyvertex(fname):
with open(fname, 'r', 'utf-8') as f:
for x in f:
if x.split()[0] == 'element' and x.split()[1] == 'vertex':
return int(x.split()[2])
js_scripts = []
regex = re.compile("Potree\.loadPointCloud\(.*\);", re.DOTALL)
cloudoutdir = os.path.join(potreeoutdir, "cloud.potree")
# Produce a "cloud_?.html" file for all given s2p outdirs
for i, s2p_outdir in enumerate(s2p_outdirs_list):
tiles = s2p.read_tiles(os.path.join(s2p_outdir, 'tiles.txt'))
print(str(len(tiles))+' tiles found')
# collect all plys
plys = []
for t in tiles:
clo = os.path.join(os.path.abspath(os.path.dirname(t)), 'cloud.ply')
if os.path.isfile(clo):
if plyvertex(clo) > 0:
plys.append(clo)
# produce the potree point cloud
cloud_name = "cloud_{}".format(i)
plys_to_potree(
plys,
cloudoutdir,
os.path.join(basedir, 'PotreeConverter_PLY_toolchain/'),
cloud_name,
)
# Gather the js script inside the HTML file that is relevant
# to the point cloud
cloud_html = os.path.join(cloudoutdir, "{}.html".format(cloud_name))
with open(cloud_html) as f:
soup = BeautifulSoup(f, features="lxml")
script = soup.find_all("script")[-1]
js_script = re.search(regex, script.text).group(0)
js_scripts.append(js_script)
os.remove(cloud_html)
# The "main.html" file will contain a concatenation of all the js
# scripts that were gathered in the loop above.
# Use the last HTML file as a basis for the "main.html", and replace
# its js script by all the js scripts
main_html = os.path.join(cloudoutdir, "main.html")
script.string = re.sub(regex, "\n".join(js_scripts), script.text)
with open(main_html, "w") as f:
f.write(soup.prettify())
if __name__ == '__main__':
parser = argparse.ArgumentParser(description=('S2P: potree generation tool'))
parser.add_argument('s2pout', nargs='+',
help=('path(s) to the s2p output directory(ies)'))
parser.add_argument('--outdir', metavar='potree_outdir', default='.',
help=('path to output directory'))
args = parser.parse_args()
try:
produce_potree(args.s2pout, args.outdir)
except subprocess.CalledProcessError:
basedir = os.path.dirname(os.path.abspath(__file__))
print('You must download and compile PotreeConverter. Run the following commands:')
print(' > cd %s'%basedir)
print(' > git clone https://github.com/gfacciol/PotreeConverter_PLY_toolchain --recurse-submodules')
print(' > cd PotreeConverter_PLY_toolchain')
print(' > CC=gcc CXX=g++ make')
| agpl-3.0 |
geektoni/shogun | examples/undocumented/python/distance_canberraword.py | 2 | 1165 | #!/usr/bin/env python
from tools.load import LoadMatrix
import shogun as sg
lm=LoadMatrix()
traindna = lm.load_dna('../data/fm_train_dna.dat')
testdna = lm.load_dna('../data/fm_test_dna.dat')
parameter_list = [[traindna,testdna,3,0,False],[traindna,testdna,3,0,False]]
def distance_canberraword (fm_train_dna=traindna,fm_test_dna=testdna,order=3,gap=0,reverse=False):
charfeat=sg.create_string_features(fm_train_dna, sg.DNA)
feats_train=sg.create_string_features(charfeat, order-1, order, gap, reverse)
preproc = sg.create_transformer("SortWordString")
preproc.fit(feats_train)
feats_train = preproc.transform(feats_train)
charfeat=sg.create_string_features(fm_test_dna, sg.DNA)
feats_test=sg.create_string_features(charfeat, order-1, order, gap, reverse)
feats_test = preproc.transform(feats_test)
distance = sg.create_distance("CanberraWordDistance")
distance.init(feats_train, feats_train)
dm_train=distance.get_distance_matrix()
distance.init(feats_train, feats_test)
dm_test=distance.get_distance_matrix()
return distance,dm_train,dm_test
if __name__=='__main__':
print('CanberraWordDistance')
distance_canberraword(*parameter_list[0])
| bsd-3-clause |
amlyj/pythonStudy | 2.7/data_analysis/study_numpy/numpy_ndarray.py | 1 | 5118 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 17-7-22 上午12:41
# @Author : tom.lee
# @docs : http://old.sebug.net/paper/books/scipydoc/numpy_intro.html
# @File : study_numpy.py
# @Software: PyCharm
"""
numpy
Numpy是Python的一个科学计算的库,提供了矩阵运算的功能,其一般与Scipy,matplotlib一起使用.
NumPy提供了两种基本的对象:
ndarray(N-dimensional array object)ndarray(数组)是存储单一数据类型的多维数组;
ufunc(universal function object)而 ufunc则是能够对数组进行处理的函数。
"""
import numpy as np
def split_line():
print '*' * 6 ** 2
def np_version():
"""
版本
:return:
"""
print np.version.version
def np_list():
"""
numpy 数组 :
只能存储一种数据结构,
使用 "numpy.array()"来创建,
使用" dtype = numpy.类型" 来显示指定
:return:
"""
# 创建
l = np.array([1, 2, 3], dtype=np.int8)
a = np.array([1, 2, 3, 4])
b = np.array((5, 6, 7, 8))
c = np.array([[1, 2, 3, 4], [4, 5, 6, 7], [7, 8, 9, 10]])
print 'l:', l
print 'a:', a
print 'b:', b
print 'c:', c
split_line()
# 类型
print l.dtype, c.dtype
split_line()
# 大小: 数组a的shape只有一个元素,因此它是一维数组。
# 而数组c的shape有两个元素,因此它是二维数组,其中第0轴的长度为3,第1轴的长度为4
print l.shape, c.shape
split_line()
# 改变数组每个轴的长度 : 只是改变每个轴的大小,数组元素在内存中的位置并没有改变
c.shape = 4, 3
print c
split_line()
# 当某个轴的元素为-1时,将根据数组元素的个数自动计算此轴的长度,因此下面的程序将数组c的shape改为了(2,6)
c.shape = 2, -1
print c
split_line()
# 使用数组的reshape方法,可以创建一个改变了尺寸的新数组,原数组的shape保持不变
# 注意此时数组a和d其实共享数据存储内存区域
d = a.reshape((2, 2))
print 'a:', a
print 'd:', d
split_line()
def np_list_create():
# 使用xrange创建一维数组 [start,end,步长)包含起始位置,不包含终止位置,
# 元素个数: (end-start)/步长
np_lst = np.arange(0, 10, 1)
print np_lst
print '大小:%d' % np_lst.shape
split_line()
# 等差数列
# linspace(strat,end,size), [start,end]包含起始位置和终止位置,一共创建size个元素
# 可以通过endpoint关键字指定是否包括终值
print np.linspace(0, 1, 12)
split_line()
# 等比数列
# logspace(开始指数,结束指数,数量,底数默认10)
print np.logspace(0, 2, 20)
split_line()
def np_list_by_byte():
"""
使用frombuffer, fromstring, fromfile等函数可以从字节序列创建数组
使用时一定要传入dtype参数
Python的字符串实际上是字节序列,每个字符占一个字节,
因此如果从字符串s创建一个8bit的整数数组的话,所得到的数组正好就是字符串中每个字符的ASCII编码
:return:
"""
s = 'abcdefg'
print np.frombuffer(s, dtype=np.int8)
split_line()
print np.fromstring(s, dtype=np.int8)
split_line()
# 如果从字符串s创建16bit的整数数组,那么两个相邻的字节就表示一个整数,
# 把字节98和字节97当作一个16位的整数, 它的值就是98*256+97 = 25185。
# 可以看出内存中是以little endian(低位字节在前)方式保存数据的。
# 所以字符串的长度必须是偶数
print np.fromstring('abcdefgh', dtype=np.int16)
split_line()
def np_list_by_func():
"""
通过函数创建数组
:return:
"""
# fromfunction 传入一个函数,和表示一个维度大小的可迭代对象(元组,列表)
# 即(10,)表示一维数组,一维元素10个,此时函数接收一个参数
# (5,6)表示二维数组,一维元素5个,二维元素6个,此时函数接收2个参数
print np.fromfunction(lambda x: x + 1, (10,))
print np.fromfunction(lambda x, y: (x + 1) * (y + 1), (5, 6))
split_line()
def np_list_opt():
"""
numpy 列表基本操作和python list基本一致
:return:
"""
l = np.arange(10, 1, -1)
print l
print '做小值:', l.min()
print '最大值:', l.max()
print '下标0的元素:', l[0]
split_line()
# 高级用法,不会共享内存空间,以上操作会共享内存空间
print l[np.array([1, 5, 3])] # 使用数组获取下标元素
print l[[1, 5, 3]] # 使用列表获取下标元素
split_line()
# 列表直接过滤
print l[l > 3] # 直接获取列表大于3的值
print l > 3 # 判断列表元素是否大于3返回一个boolean 列表
split_line()
if __name__ == '__main__':
# np_version()
# np_list()
np_list_create()
# np_list_by_byte()
# np_list_by_func()
# np_list_opt()
print np.fromfunction(lambda x: x, (10,))
| mit |
oblique-labs/pyVM | rpython/rlib/parsing/ebnfparse.py | 2 | 58450 | import py
from rpython.rlib.parsing.parsing import PackratParser, Rule
from rpython.rlib.parsing.tree import Nonterminal, RPythonVisitor
from rpython.rlib.parsing.codebuilder import Codebuilder
from rpython.rlib.parsing.regexparse import parse_regex
from rpython.rlib.parsing.regex import StringExpression
from rpython.rlib.parsing.deterministic import DFA
from rpython.rlib.parsing.lexer import Lexer, DummyLexer
from rpython.rlib.objectmodel import we_are_translated
def make_ebnf_parser():
NONTERMINALNAME = parse_regex("([a-z]|_)[a-z0-9_]*")
SYMBOLNAME = parse_regex("_*[A-Z]([A-Z]|_)*")
LONGQUOTED = parse_regex(r'"[^\"]*(\\\"?[^\"]+)*(\\\")?"')
QUOTEDQUOTE = parse_regex("""'"'""")
COMMENT = parse_regex("#[^\\n]*\\n")
names1 = ['SYMBOLNAME', 'NONTERMINALNAME', 'QUOTE', 'QUOTE', 'IGNORE',
'IGNORE', 'IGNORE', 'IGNORE']
regexs1 = [SYMBOLNAME, NONTERMINALNAME, LONGQUOTED, QUOTEDQUOTE, COMMENT,
StringExpression('\n'), StringExpression(' '),
StringExpression('\t')]
rs, rules, transformer = parse_ebnf(r"""
file: list EOF;
list: element+;
element: <regex> | <production>;
regex: SYMBOLNAME ":" QUOTE ";";
production: NONTERMINALNAME ":" body? ";";
body: (expansion ["|"])* expansion;
expansion: decorated+;
decorated: enclosed "*" |
enclosed "+" |
enclosed "?" |
<enclosed>;
enclosed: "[" expansion "]" |
">" expansion "<" |
"<" primary ">" |
"(" <expansion> ")" |
<primary>;
primary: NONTERMINALNAME | SYMBOLNAME | QUOTE;
""")
names2, regexs2 = zip(*rs)
lexer = Lexer(regexs1 + list(regexs2), names1 + list(names2),
ignore=['IGNORE'])
parser = PackratParser(rules, "file")
return parser, lexer, transformer
def parse_ebnf(s):
visitor = ParserBuilder()
tokens = lexer.tokenize(s, True)
#print tokens
s = parser.parse(tokens)
s = s.visit(EBNFToAST())
assert len(s) == 1
s = s[0]
s.visit(visitor)
rules, changes = visitor.get_rules_and_changes()
maker = TransformerMaker(rules, changes)
ToAstVisitor = maker.make_transformer()
return zip(visitor.names, visitor.regexs), rules, ToAstVisitor
def check_for_missing_names(names, regexs, rules):
known_names = dict.fromkeys(names, True)
known_names["EOF"] = True
for rule in rules:
known_names[rule.nonterminal] = True
for rule in rules:
for expansion in rule.expansions:
for symbol in expansion:
if symbol not in known_names:
raise ValueError("symbol '%s' not known" % (symbol, ))
def make_parse_function(regexs, rules, eof=False):
from rpython.rlib.parsing.lexer import Lexer
names, regexs = zip(*regexs)
if "IGNORE" in names:
ignore = ["IGNORE"]
else:
ignore = []
check_for_missing_names(names, regexs, rules)
lexer = Lexer(list(regexs), list(names), ignore=ignore)
parser = PackratParser(rules, rules[0].nonterminal)
def parse(s):
tokens = lexer.tokenize(s, eof=eof)
s = parser.parse(tokens)
if not we_are_translated():
try:
if py.test.config.option.view:
s.view()
except AttributeError:
pass
return s
return parse
class ParserBuilder(object):
def __init__(self):
self.regexs = []
self.names = []
self.rules = []
self.changes = []
self.maybe_rules = {}
self.num_plus_symbols = 0
self.first_rule = None
self.literals = {}
def visit_file(self, node):
return node.children[0].visit(self)
def visit_list(self, node):
for child in node.children:
child.visit(self)
def visit_regex(self, node):
regextext = node.children[2].additional_info[1:-1].replace('\\"', '"')
regex = parse_regex(regextext)
if regex is None:
raise ValueError(
"%s is not a valid regular expression" % regextext)
self.regexs.append(regex)
self.names.append(node.children[0].additional_info)
def visit_production(self, node):
name = node.children[0].additional_info
if len(node.children) == 3:
self.changes.append([])
self.rules.append(Rule(name, [[]]))
return
expansions = node.children[2].visit(self)
changes = []
rule_expansions = []
for expansion in expansions:
expansion, change = zip(*expansion)
rule_expansions.append(list(expansion))
changes.append("".join(change))
if self.first_rule is None:
self.first_rule = name
self.changes.append(changes)
self.rules.append(Rule(name, rule_expansions))
def visit_body(self, node):
expansions = []
for child in node.children:
expansion = child.visit(self)
expansions.append(expansion)
return expansions
def visit_expansion(self, node):
expansions = []
for child in node.children:
expansion = child.visit(self)
expansions += expansion
return expansions
def visit_enclosed(self, node):
result = []
newchange = node.children[0].additional_info
for name, change in node.children[1].visit(self):
assert change == " " or change == newchange
result.append((name, newchange))
return result
def visit_decorated(self, node):
expansions = node.children[0].visit(self)
expansions, changes = zip(*expansions)
expansions, changes = list(expansions), "".join(changes)
if node.children[1].additional_info == "*":
name = "_star_symbol%s" % (len(self.maybe_rules), )
maybe_rule = True
expansions = [expansions + [name]]
changes = [changes + ">", changes]
elif node.children[1].additional_info == "+":
name = "_plus_symbol%s" % (self.num_plus_symbols, )
self.num_plus_symbols += 1
maybe_rule = False
expansions = [expansions + [name], expansions]
changes = [changes + ">", changes]
elif node.children[1].additional_info == "?":
name = "_maybe_symbol%s" % (len(self.maybe_rules), )
maybe_rule = True
expansions = [expansions]
changes = [changes]
self.rules.append(Rule(name, expansions))
self.changes.append(changes)
if maybe_rule:
self.maybe_rules[name] = self.rules[-1]
return [(name, ">")]
def visit_primary_parens(self, node):
if len(node.children) == 1:
return node.children[0].visit(self)
else:
return node.children[1].visit(self)
def visit_primary(self, node):
if node.children[0].symbol == "QUOTE":
from rpython.rlib.parsing.regexparse import unescape
content = node.children[0].additional_info[1:-1]
expression = unescape(content)
name = self.get_literal_name(expression)
return [(name, " ")]
else:
return [(node.children[0].additional_info, " ")]
def get_literal_name(self, expression):
if expression in self.literals:
return self.literals[expression]
name = "__%s_%s" % (len(self.literals), expression)
self.literals[expression] = name
self.regexs.insert(0, StringExpression(expression))
self.names.insert(0, name)
return name
def get_rules_and_changes(self):
self.fix_rule_order()
return self.add_all_possibilities()
def fix_rule_order(self):
if self.rules[0].nonterminal != self.first_rule:
for i, r in enumerate(self.rules):
if r.nonterminal == self.first_rule:
break
self.rules[i], self.rules[0] = self.rules[0], self.rules[i]
self.changes[i], self.changes[0] = self.changes[0], self.changes[i]
def add_all_possibilities(self):
all_rules = []
other_rules = []
all_changes = []
other_changes = []
for rule, changes in zip(self.rules, self.changes):
if rule.expansions == [[]]:
all_rules.append(rule)
all_changes.append([])
continue
real_changes = []
real_expansions = []
for index, (expansion, change) in enumerate(
zip(rule.expansions, changes)):
maybe_pattern = [symbol in self.maybe_rules
for symbol in expansion]
n = maybe_pattern.count(True)
if n == 0:
real_expansions.append(expansion)
real_changes.append(change)
continue
if n == len(expansion):
raise ValueError("Rule %r's expansion needs "
"at least one symbol with >0 repetitions"
% rule.nonterminal)
slices = []
start = 0
for i, (maybe, symbol) in enumerate(
zip(maybe_pattern, expansion)):
if maybe:
slices.append((start, i + 1))
start = i + 1
rest_slice = (start, i + 1)
name = rule.nonterminal
for i, (start, stop) in enumerate(slices):
nextname = "__%s_rest_%s_%s" % (rule.nonterminal, index, i)
if i < len(slices) - 1:
new_expansions = [
expansion[start: stop] + [nextname],
expansion[start: stop - 1] + [nextname]]
new_changes = [change[start: stop] + ">",
change[start: stop - 1] + ">"]
else:
rest_expansion = expansion[slice(*rest_slice)]
new_expansions = [
expansion[start: stop] + rest_expansion,
expansion[start: stop - 1] + rest_expansion]
rest_change = change[slice(*rest_slice)]
new_changes = [change[start: stop] + rest_change,
change[start: stop - 1] + rest_change]
if i == 0:
real_expansions += new_expansions
real_changes += new_changes
else:
other_rules.append(Rule(name, new_expansions))
other_changes.append(new_changes)
name = nextname
all_rules.append(Rule(rule.nonterminal, real_expansions))
all_changes.append(real_changes)
return all_rules + other_rules, all_changes + other_changes
class TransformerMaker(Codebuilder):
def __init__(self, rules, changes):
Codebuilder.__init__(self)
self.rules = rules
self.changes = changes
self.nonterminals = dict.fromkeys([rule.nonterminal for rule in rules])
def make_transformer(self, print_code=False):
self.start_block("class ToAST(object):")
for i in range(len(self.rules)):
self.create_visit_method(i)
self.start_block("def transform(self, tree):")
self.emit("#auto-generated code, don't edit")
self.emit("assert isinstance(tree, Nonterminal)")
startsymbol = self.rules[0].nonterminal
self.emit("assert tree.symbol == %r" % (startsymbol, ))
self.emit("r = self.visit_%s(tree)" % (startsymbol, ))
self.emit("assert len(r) == 1")
self.start_block("if not we_are_translated():")
self.start_block("try:")
self.start_block("if py.test.config.option.view:")
self.emit("r[0].view()")
self.end_block("option.view")
self.end_block("try")
self.start_block("except AttributeError:")
self.emit("pass")
self.end_block("except")
self.end_block("we_are_translated")
self.emit("return r[0]")
self.end_block("transform")
self.end_block("ToAST")
code = self.get_code()
if print_code:
print code
ns = {"RPythonVisitor": RPythonVisitor, "Nonterminal": Nonterminal,
"we_are_translated": we_are_translated, "py": py}
exec py.code.Source(code).compile() in ns
ToAST = ns["ToAST"]
ToAST.__module__ = "rpython.rlib.parsing.ebnfparse"
assert isinstance(ToAST, type)
assert ToAST.__name__ == "ToAST"
ToAST.source = code
ToAST.changes = self.changes
return ToAST
def dispatch(self, symbol, expr):
if symbol in self.nonterminals:
return "self.visit_%s(%s)" % (symbol, expr)
return "[%s]" % (expr, )
def create_visit_method(self, index):
rule = self.rules[index]
change = self.changes[index]
self.start_block("def visit_%s(self, node):" % (rule.nonterminal, ))
self.emit("#auto-generated code, don't edit")
if len(change) == 0:
self.emit("return [node]")
self.end_block(rule.nonterminal)
return
for expansion, subchange in self.generate_conditions(index):
if "<" in subchange:
i = subchange.index("<")
assert subchange.count("<") == 1, (
"cannot expand more than one node in rule %s" % (rule, ))
i = subchange.index("<")
returnval = self.dispatch(
expansion[i], "node.children[%s]" % (i, ))
self.emit("return " + returnval)
else:
self.create_returning_code(expansion, subchange)
self.end_block(rule.nonterminal)
def create_returning_code(self, expansion, subchange):
assert len(expansion) == len(subchange)
self.emit("children = []")
for i, (symbol, c) in enumerate(zip(expansion, subchange)):
if c == "[":
continue
expr = self.dispatch(symbol, "node.children[%s]" % (i, ))
if c == " ":
self.emit("children.extend(%s)" % (expr, ))
if c == ">":
self.emit("expr = %s" % (expr, ))
self.emit("assert len(expr) == 1")
self.emit("children.extend(expr[0].children)")
self.emit("return [Nonterminal(node.symbol, children)]")
def generate_conditions(self, index):
rule = self.rules[index]
change = self.changes[index]
len_partition = {}
if len(rule.expansions) == 1:
yield rule.expansions[0], change[0]
return
for expansion, subchange in zip(rule.expansions, change):
len_partition.setdefault(len(expansion), []).append(
(expansion, subchange))
len_partition = len_partition.items()
len_partition.sort()
last_length = len_partition[-1][0]
self.emit("length = len(node.children)")
for length, items in len_partition:
if length < last_length:
self.start_block("if length == %s:" % (length, ))
if len(items) == 1:
yield items[0]
if length < last_length:
self.end_block("if length ==")
continue
# XXX quite bad complexity, might be ok in practice
while items:
shorter = False
for i in range(length):
symbols = {}
for pos, item in enumerate(items):
expansion = item[0]
symbol = expansion[i]
symbols.setdefault(symbol, []).append((pos, item))
symbols = symbols.items()
symbols.sort()
remove = []
for symbol, subitems in symbols:
if (len(subitems) == 1 and
(len(items) - len(remove)) > 1):
self.start_block(
"if node.children[%s].symbol == %r:" % (
i, symbol))
pos, subitem = subitems[0]
yield subitem
remove.append(pos)
shorter = True
self.end_block("if node.children[")
remove.sort()
for pos in remove[::-1]:
items.pop(pos)
if shorter:
if len(items) == 1:
yield items[0]
items.pop(0)
else:
continue
break
# for the remaining items we do a brute force comparison
# could be even cleverer, but very unlikely to be useful
assert len(items) != 1
for expansion, subchange in items:
conds = []
for i, symbol in enumerate(expansion):
conds.append("node.children[%s].symbol == %r" % (
i, symbol))
self.start_block("if (%s):" % (" and ".join(conds), ))
yield expansion, subchange
self.end_block("if")
if length < last_length:
self.end_block("if length ==")
# generated code between this line and its other occurence
class EBNFToAST(object):
def visit_file(self, node):
#auto-generated code, don't edit
children = []
children.extend(self.visit_list(node.children[0]))
children.extend([node.children[1]])
return [Nonterminal(node.symbol, children)]
def visit__plus_symbol0(self, node):
#auto-generated code, don't edit
length = len(node.children)
if length == 1:
children = []
children.extend(self.visit_element(node.children[0]))
return [Nonterminal(node.symbol, children)]
children = []
children.extend(self.visit_element(node.children[0]))
expr = self.visit__plus_symbol0(node.children[1])
assert len(expr) == 1
children.extend(expr[0].children)
return [Nonterminal(node.symbol, children)]
def visit_list(self, node):
#auto-generated code, don't edit
children = []
expr = self.visit__plus_symbol0(node.children[0])
assert len(expr) == 1
children.extend(expr[0].children)
return [Nonterminal(node.symbol, children)]
def visit_element(self, node):
#auto-generated code, don't edit
length = len(node.children)
if node.children[0].symbol == 'production':
return self.visit_production(node.children[0])
return self.visit_regex(node.children[0])
def visit_regex(self, node):
#auto-generated code, don't edit
children = []
children.extend([node.children[0]])
children.extend([node.children[1]])
children.extend([node.children[2]])
children.extend([node.children[3]])
return [Nonterminal(node.symbol, children)]
def visit__maybe_symbol0(self, node):
#auto-generated code, don't edit
children = []
children.extend(self.visit_body(node.children[0]))
return [Nonterminal(node.symbol, children)]
def visit_production(self, node):
#auto-generated code, don't edit
length = len(node.children)
if length == 3:
children = []
children.extend([node.children[0]])
children.extend([node.children[1]])
children.extend([node.children[2]])
return [Nonterminal(node.symbol, children)]
children = []
children.extend([node.children[0]])
children.extend([node.children[1]])
expr = self.visit__maybe_symbol0(node.children[2])
assert len(expr) == 1
children.extend(expr[0].children)
children.extend([node.children[3]])
return [Nonterminal(node.symbol, children)]
def visit__star_symbol1(self, node):
#auto-generated code, don't edit
length = len(node.children)
if length == 2:
children = []
children.extend(self.visit_expansion(node.children[0]))
return [Nonterminal(node.symbol, children)]
children = []
children.extend(self.visit_expansion(node.children[0]))
expr = self.visit__star_symbol1(node.children[2])
assert len(expr) == 1
children.extend(expr[0].children)
return [Nonterminal(node.symbol, children)]
def visit_body(self, node):
#auto-generated code, don't edit
length = len(node.children)
if length == 1:
children = []
children.extend(self.visit_expansion(node.children[0]))
return [Nonterminal(node.symbol, children)]
children = []
expr = self.visit__star_symbol1(node.children[0])
assert len(expr) == 1
children.extend(expr[0].children)
children.extend(self.visit_expansion(node.children[1]))
return [Nonterminal(node.symbol, children)]
def visit__plus_symbol1(self, node):
#auto-generated code, don't edit
length = len(node.children)
if length == 1:
children = []
children.extend(self.visit_decorated(node.children[0]))
return [Nonterminal(node.symbol, children)]
children = []
children.extend(self.visit_decorated(node.children[0]))
expr = self.visit__plus_symbol1(node.children[1])
assert len(expr) == 1
children.extend(expr[0].children)
return [Nonterminal(node.symbol, children)]
def visit_expansion(self, node):
#auto-generated code, don't edit
children = []
expr = self.visit__plus_symbol1(node.children[0])
assert len(expr) == 1
children.extend(expr[0].children)
return [Nonterminal(node.symbol, children)]
def visit_decorated(self, node):
#auto-generated code, don't edit
length = len(node.children)
if length == 1:
return self.visit_enclosed(node.children[0])
if node.children[1].symbol == '__3_*':
children = []
children.extend(self.visit_enclosed(node.children[0]))
children.extend([node.children[1]])
return [Nonterminal(node.symbol, children)]
if node.children[1].symbol == '__4_+':
children = []
children.extend(self.visit_enclosed(node.children[0]))
children.extend([node.children[1]])
return [Nonterminal(node.symbol, children)]
children = []
children.extend(self.visit_enclosed(node.children[0]))
children.extend([node.children[1]])
return [Nonterminal(node.symbol, children)]
def visit_enclosed(self, node):
#auto-generated code, don't edit
length = len(node.children)
if length == 1:
return self.visit_primary(node.children[0])
if node.children[0].symbol == '__10_(':
return self.visit_expansion(node.children[1])
if node.children[0].symbol == '__6_[':
children = []
children.extend([node.children[0]])
children.extend(self.visit_expansion(node.children[1]))
children.extend([node.children[2]])
return [Nonterminal(node.symbol, children)]
if node.children[0].symbol == '__8_>':
children = []
children.extend([node.children[0]])
children.extend(self.visit_expansion(node.children[1]))
children.extend([node.children[2]])
return [Nonterminal(node.symbol, children)]
children = []
children.extend([node.children[0]])
children.extend(self.visit_primary(node.children[1]))
children.extend([node.children[2]])
return [Nonterminal(node.symbol, children)]
def visit_primary(self, node):
#auto-generated code, don't edit
length = len(node.children)
if node.children[0].symbol == 'NONTERMINALNAME':
children = []
children.extend([node.children[0]])
return [Nonterminal(node.symbol, children)]
if node.children[0].symbol == 'QUOTE':
children = []
children.extend([node.children[0]])
return [Nonterminal(node.symbol, children)]
children = []
children.extend([node.children[0]])
return [Nonterminal(node.symbol, children)]
def transform(self, tree):
#auto-generated code, don't edit
assert isinstance(tree, Nonterminal)
assert tree.symbol == 'file'
r = self.visit_file(tree)
assert len(r) == 1
if not we_are_translated():
try:
if py.test.config.option.view:
r[0].view()
except AttributeError:
pass
return r[0]
parser = PackratParser([Rule('file', [['list', 'EOF']]),
Rule('_plus_symbol0', [['element', '_plus_symbol0'], ['element']]),
Rule('list', [['_plus_symbol0']]),
Rule('element', [['regex'], ['production']]),
Rule('regex', [['SYMBOLNAME', '__0_:', 'QUOTE', '__1_;']]),
Rule('_maybe_symbol0', [['body']]),
Rule('production', [['NONTERMINALNAME', '__0_:', '_maybe_symbol0', '__1_;'], ['NONTERMINALNAME', '__0_:', '__1_;']]),
Rule('_star_symbol1', [['expansion', '__2_|', '_star_symbol1'], ['expansion', '__2_|']]),
Rule('body', [['_star_symbol1', 'expansion'], ['expansion']]),
Rule('_plus_symbol1', [['decorated', '_plus_symbol1'], ['decorated']]),
Rule('expansion', [['_plus_symbol1']]),
Rule('decorated', [['enclosed', '__3_*'], ['enclosed', '__4_+'], ['enclosed', '__5_?'], ['enclosed']]),
Rule('enclosed', [['__6_[', 'expansion', '__7_]'], ['__8_>', 'expansion', '__9_<'], ['__9_<', 'primary', '__8_>'], ['__10_(', 'expansion', '__11_)'], ['primary']]),
Rule('primary', [['NONTERMINALNAME'], ['SYMBOLNAME'], ['QUOTE']])],
'file')
def recognize(runner, i):
assert i >= 0
input = runner.text
state = 0
while 1:
if state == 0:
try:
char = input[i]
i += 1
except IndexError:
runner.state = 0
return ~i
if char == '\t':
state = 1
elif char == '\n':
state = 2
elif char == ' ':
state = 3
elif char == '#':
state = 4
elif char == '"':
state = 5
elif char == "'":
state = 6
elif char == ')':
state = 7
elif char == '(':
state = 8
elif char == '+':
state = 9
elif char == '*':
state = 10
elif char == ';':
state = 11
elif char == ':':
state = 12
elif char == '<':
state = 13
elif char == '?':
state = 14
elif char == '>':
state = 15
elif 'A' <= char <= 'Z':
state = 16
elif char == '[':
state = 17
elif char == ']':
state = 18
elif char == '_':
state = 19
elif 'a' <= char <= 'z':
state = 20
elif char == '|':
state = 21
else:
break
if state == 4:
try:
char = input[i]
i += 1
except IndexError:
runner.state = 4
return ~i
if char == '\n':
state = 27
elif '\x00' <= char <= '\t':
state = 4
continue
elif '\x0b' <= char <= '\xff':
state = 4
continue
else:
break
if state == 5:
try:
char = input[i]
i += 1
except IndexError:
runner.state = 5
return ~i
if char == '\\':
state = 24
elif char == '"':
state = 25
elif '\x00' <= char <= '!':
state = 5
continue
elif '#' <= char <= '[':
state = 5
continue
elif ']' <= char <= '\xff':
state = 5
continue
else:
break
if state == 6:
try:
char = input[i]
i += 1
except IndexError:
runner.state = 6
return ~i
if char == '"':
state = 22
else:
break
if state == 16:
runner.last_matched_index = i - 1
runner.last_matched_state = state
try:
char = input[i]
i += 1
except IndexError:
runner.state = 16
return i
if char == '_':
state = 16
continue
elif 'A' <= char <= 'Z':
state = 16
continue
else:
break
if state == 19:
runner.last_matched_index = i - 1
runner.last_matched_state = state
try:
char = input[i]
i += 1
except IndexError:
runner.state = 19
return i
if 'A' <= char <= 'Z':
state = 16
continue
elif char == '_':
state = 19
continue
elif '0' <= char <= '9':
state = 20
elif 'a' <= char <= 'z':
state = 20
else:
break
if state == 20:
runner.last_matched_index = i - 1
runner.last_matched_state = state
try:
char = input[i]
i += 1
except IndexError:
runner.state = 20
return i
if char == '_':
state = 20
continue
elif '0' <= char <= '9':
state = 20
continue
elif 'a' <= char <= 'z':
state = 20
continue
else:
break
if state == 22:
try:
char = input[i]
i += 1
except IndexError:
runner.state = 22
return ~i
if char == "'":
state = 23
else:
break
if state == 24:
try:
char = input[i]
i += 1
except IndexError:
runner.state = 24
return ~i
if char == '\\':
state = 24
continue
elif char == '"':
state = 26
elif '\x00' <= char <= '!':
state = 5
continue
elif '#' <= char <= '[':
state = 5
continue
elif ']' <= char <= '\xff':
state = 5
continue
else:
break
if state == 26:
runner.last_matched_index = i - 1
runner.last_matched_state = state
try:
char = input[i]
i += 1
except IndexError:
runner.state = 26
return i
if char == '"':
state = 25
elif '\x00' <= char <= '!':
state = 5
continue
elif '#' <= char <= '\xff':
state = 5
continue
else:
break
runner.last_matched_state = state
runner.last_matched_index = i - 1
runner.state = state
if i == len(input):
return i
else:
return ~i
break
runner.state = state
return ~i
lexer = DummyLexer(recognize, DFA(28,
{(0, '\t'): 1,
(0, '\n'): 2,
(0, ' '): 3,
(0, '"'): 5,
(0, '#'): 4,
(0, "'"): 6,
(0, '('): 8,
(0, ')'): 7,
(0, '*'): 10,
(0, '+'): 9,
(0, ':'): 12,
(0, ';'): 11,
(0, '<'): 13,
(0, '>'): 15,
(0, '?'): 14,
(0, 'A'): 16,
(0, 'B'): 16,
(0, 'C'): 16,
(0, 'D'): 16,
(0, 'E'): 16,
(0, 'F'): 16,
(0, 'G'): 16,
(0, 'H'): 16,
(0, 'I'): 16,
(0, 'J'): 16,
(0, 'K'): 16,
(0, 'L'): 16,
(0, 'M'): 16,
(0, 'N'): 16,
(0, 'O'): 16,
(0, 'P'): 16,
(0, 'Q'): 16,
(0, 'R'): 16,
(0, 'S'): 16,
(0, 'T'): 16,
(0, 'U'): 16,
(0, 'V'): 16,
(0, 'W'): 16,
(0, 'X'): 16,
(0, 'Y'): 16,
(0, 'Z'): 16,
(0, '['): 17,
(0, ']'): 18,
(0, '_'): 19,
(0, 'a'): 20,
(0, 'b'): 20,
(0, 'c'): 20,
(0, 'd'): 20,
(0, 'e'): 20,
(0, 'f'): 20,
(0, 'g'): 20,
(0, 'h'): 20,
(0, 'i'): 20,
(0, 'j'): 20,
(0, 'k'): 20,
(0, 'l'): 20,
(0, 'm'): 20,
(0, 'n'): 20,
(0, 'o'): 20,
(0, 'p'): 20,
(0, 'q'): 20,
(0, 'r'): 20,
(0, 's'): 20,
(0, 't'): 20,
(0, 'u'): 20,
(0, 'v'): 20,
(0, 'w'): 20,
(0, 'x'): 20,
(0, 'y'): 20,
(0, 'z'): 20,
(0, '|'): 21,
(4, '\x00'): 4,
(4, '\x01'): 4,
(4, '\x02'): 4,
(4, '\x03'): 4,
(4, '\x04'): 4,
(4, '\x05'): 4,
(4, '\x06'): 4,
(4, '\x07'): 4,
(4, '\x08'): 4,
(4, '\t'): 4,
(4, '\n'): 27,
(4, '\x0b'): 4,
(4, '\x0c'): 4,
(4, '\r'): 4,
(4, '\x0e'): 4,
(4, '\x0f'): 4,
(4, '\x10'): 4,
(4, '\x11'): 4,
(4, '\x12'): 4,
(4, '\x13'): 4,
(4, '\x14'): 4,
(4, '\x15'): 4,
(4, '\x16'): 4,
(4, '\x17'): 4,
(4, '\x18'): 4,
(4, '\x19'): 4,
(4, '\x1a'): 4,
(4, '\x1b'): 4,
(4, '\x1c'): 4,
(4, '\x1d'): 4,
(4, '\x1e'): 4,
(4, '\x1f'): 4,
(4, ' '): 4,
(4, '!'): 4,
(4, '"'): 4,
(4, '#'): 4,
(4, '$'): 4,
(4, '%'): 4,
(4, '&'): 4,
(4, "'"): 4,
(4, '('): 4,
(4, ')'): 4,
(4, '*'): 4,
(4, '+'): 4,
(4, ','): 4,
(4, '-'): 4,
(4, '.'): 4,
(4, '/'): 4,
(4, '0'): 4,
(4, '1'): 4,
(4, '2'): 4,
(4, '3'): 4,
(4, '4'): 4,
(4, '5'): 4,
(4, '6'): 4,
(4, '7'): 4,
(4, '8'): 4,
(4, '9'): 4,
(4, ':'): 4,
(4, ';'): 4,
(4, '<'): 4,
(4, '='): 4,
(4, '>'): 4,
(4, '?'): 4,
(4, '@'): 4,
(4, 'A'): 4,
(4, 'B'): 4,
(4, 'C'): 4,
(4, 'D'): 4,
(4, 'E'): 4,
(4, 'F'): 4,
(4, 'G'): 4,
(4, 'H'): 4,
(4, 'I'): 4,
(4, 'J'): 4,
(4, 'K'): 4,
(4, 'L'): 4,
(4, 'M'): 4,
(4, 'N'): 4,
(4, 'O'): 4,
(4, 'P'): 4,
(4, 'Q'): 4,
(4, 'R'): 4,
(4, 'S'): 4,
(4, 'T'): 4,
(4, 'U'): 4,
(4, 'V'): 4,
(4, 'W'): 4,
(4, 'X'): 4,
(4, 'Y'): 4,
(4, 'Z'): 4,
(4, '['): 4,
(4, '\\'): 4,
(4, ']'): 4,
(4, '^'): 4,
(4, '_'): 4,
(4, '`'): 4,
(4, 'a'): 4,
(4, 'b'): 4,
(4, 'c'): 4,
(4, 'd'): 4,
(4, 'e'): 4,
(4, 'f'): 4,
(4, 'g'): 4,
(4, 'h'): 4,
(4, 'i'): 4,
(4, 'j'): 4,
(4, 'k'): 4,
(4, 'l'): 4,
(4, 'm'): 4,
(4, 'n'): 4,
(4, 'o'): 4,
(4, 'p'): 4,
(4, 'q'): 4,
(4, 'r'): 4,
(4, 's'): 4,
(4, 't'): 4,
(4, 'u'): 4,
(4, 'v'): 4,
(4, 'w'): 4,
(4, 'x'): 4,
(4, 'y'): 4,
(4, 'z'): 4,
(4, '{'): 4,
(4, '|'): 4,
(4, '}'): 4,
(4, '~'): 4,
(4, '\x7f'): 4,
(4, '\x80'): 4,
(4, '\x81'): 4,
(4, '\x82'): 4,
(4, '\x83'): 4,
(4, '\x84'): 4,
(4, '\x85'): 4,
(4, '\x86'): 4,
(4, '\x87'): 4,
(4, '\x88'): 4,
(4, '\x89'): 4,
(4, '\x8a'): 4,
(4, '\x8b'): 4,
(4, '\x8c'): 4,
(4, '\x8d'): 4,
(4, '\x8e'): 4,
(4, '\x8f'): 4,
(4, '\x90'): 4,
(4, '\x91'): 4,
(4, '\x92'): 4,
(4, '\x93'): 4,
(4, '\x94'): 4,
(4, '\x95'): 4,
(4, '\x96'): 4,
(4, '\x97'): 4,
(4, '\x98'): 4,
(4, '\x99'): 4,
(4, '\x9a'): 4,
(4, '\x9b'): 4,
(4, '\x9c'): 4,
(4, '\x9d'): 4,
(4, '\x9e'): 4,
(4, '\x9f'): 4,
(4, '\xa0'): 4,
(4, '\xa1'): 4,
(4, '\xa2'): 4,
(4, '\xa3'): 4,
(4, '\xa4'): 4,
(4, '\xa5'): 4,
(4, '\xa6'): 4,
(4, '\xa7'): 4,
(4, '\xa8'): 4,
(4, '\xa9'): 4,
(4, '\xaa'): 4,
(4, '\xab'): 4,
(4, '\xac'): 4,
(4, '\xad'): 4,
(4, '\xae'): 4,
(4, '\xaf'): 4,
(4, '\xb0'): 4,
(4, '\xb1'): 4,
(4, '\xb2'): 4,
(4, '\xb3'): 4,
(4, '\xb4'): 4,
(4, '\xb5'): 4,
(4, '\xb6'): 4,
(4, '\xb7'): 4,
(4, '\xb8'): 4,
(4, '\xb9'): 4,
(4, '\xba'): 4,
(4, '\xbb'): 4,
(4, '\xbc'): 4,
(4, '\xbd'): 4,
(4, '\xbe'): 4,
(4, '\xbf'): 4,
(4, '\xc0'): 4,
(4, '\xc1'): 4,
(4, '\xc2'): 4,
(4, '\xc3'): 4,
(4, '\xc4'): 4,
(4, '\xc5'): 4,
(4, '\xc6'): 4,
(4, '\xc7'): 4,
(4, '\xc8'): 4,
(4, '\xc9'): 4,
(4, '\xca'): 4,
(4, '\xcb'): 4,
(4, '\xcc'): 4,
(4, '\xcd'): 4,
(4, '\xce'): 4,
(4, '\xcf'): 4,
(4, '\xd0'): 4,
(4, '\xd1'): 4,
(4, '\xd2'): 4,
(4, '\xd3'): 4,
(4, '\xd4'): 4,
(4, '\xd5'): 4,
(4, '\xd6'): 4,
(4, '\xd7'): 4,
(4, '\xd8'): 4,
(4, '\xd9'): 4,
(4, '\xda'): 4,
(4, '\xdb'): 4,
(4, '\xdc'): 4,
(4, '\xdd'): 4,
(4, '\xde'): 4,
(4, '\xdf'): 4,
(4, '\xe0'): 4,
(4, '\xe1'): 4,
(4, '\xe2'): 4,
(4, '\xe3'): 4,
(4, '\xe4'): 4,
(4, '\xe5'): 4,
(4, '\xe6'): 4,
(4, '\xe7'): 4,
(4, '\xe8'): 4,
(4, '\xe9'): 4,
(4, '\xea'): 4,
(4, '\xeb'): 4,
(4, '\xec'): 4,
(4, '\xed'): 4,
(4, '\xee'): 4,
(4, '\xef'): 4,
(4, '\xf0'): 4,
(4, '\xf1'): 4,
(4, '\xf2'): 4,
(4, '\xf3'): 4,
(4, '\xf4'): 4,
(4, '\xf5'): 4,
(4, '\xf6'): 4,
(4, '\xf7'): 4,
(4, '\xf8'): 4,
(4, '\xf9'): 4,
(4, '\xfa'): 4,
(4, '\xfb'): 4,
(4, '\xfc'): 4,
(4, '\xfd'): 4,
(4, '\xfe'): 4,
(4, '\xff'): 4,
(5, '\x00'): 5,
(5, '\x01'): 5,
(5, '\x02'): 5,
(5, '\x03'): 5,
(5, '\x04'): 5,
(5, '\x05'): 5,
(5, '\x06'): 5,
(5, '\x07'): 5,
(5, '\x08'): 5,
(5, '\t'): 5,
(5, '\n'): 5,
(5, '\x0b'): 5,
(5, '\x0c'): 5,
(5, '\r'): 5,
(5, '\x0e'): 5,
(5, '\x0f'): 5,
(5, '\x10'): 5,
(5, '\x11'): 5,
(5, '\x12'): 5,
(5, '\x13'): 5,
(5, '\x14'): 5,
(5, '\x15'): 5,
(5, '\x16'): 5,
(5, '\x17'): 5,
(5, '\x18'): 5,
(5, '\x19'): 5,
(5, '\x1a'): 5,
(5, '\x1b'): 5,
(5, '\x1c'): 5,
(5, '\x1d'): 5,
(5, '\x1e'): 5,
(5, '\x1f'): 5,
(5, ' '): 5,
(5, '!'): 5,
(5, '"'): 25,
(5, '#'): 5,
(5, '$'): 5,
(5, '%'): 5,
(5, '&'): 5,
(5, "'"): 5,
(5, '('): 5,
(5, ')'): 5,
(5, '*'): 5,
(5, '+'): 5,
(5, ','): 5,
(5, '-'): 5,
(5, '.'): 5,
(5, '/'): 5,
(5, '0'): 5,
(5, '1'): 5,
(5, '2'): 5,
(5, '3'): 5,
(5, '4'): 5,
(5, '5'): 5,
(5, '6'): 5,
(5, '7'): 5,
(5, '8'): 5,
(5, '9'): 5,
(5, ':'): 5,
(5, ';'): 5,
(5, '<'): 5,
(5, '='): 5,
(5, '>'): 5,
(5, '?'): 5,
(5, '@'): 5,
(5, 'A'): 5,
(5, 'B'): 5,
(5, 'C'): 5,
(5, 'D'): 5,
(5, 'E'): 5,
(5, 'F'): 5,
(5, 'G'): 5,
(5, 'H'): 5,
(5, 'I'): 5,
(5, 'J'): 5,
(5, 'K'): 5,
(5, 'L'): 5,
(5, 'M'): 5,
(5, 'N'): 5,
(5, 'O'): 5,
(5, 'P'): 5,
(5, 'Q'): 5,
(5, 'R'): 5,
(5, 'S'): 5,
(5, 'T'): 5,
(5, 'U'): 5,
(5, 'V'): 5,
(5, 'W'): 5,
(5, 'X'): 5,
(5, 'Y'): 5,
(5, 'Z'): 5,
(5, '['): 5,
(5, '\\'): 24,
(5, ']'): 5,
(5, '^'): 5,
(5, '_'): 5,
(5, '`'): 5,
(5, 'a'): 5,
(5, 'b'): 5,
(5, 'c'): 5,
(5, 'd'): 5,
(5, 'e'): 5,
(5, 'f'): 5,
(5, 'g'): 5,
(5, 'h'): 5,
(5, 'i'): 5,
(5, 'j'): 5,
(5, 'k'): 5,
(5, 'l'): 5,
(5, 'm'): 5,
(5, 'n'): 5,
(5, 'o'): 5,
(5, 'p'): 5,
(5, 'q'): 5,
(5, 'r'): 5,
(5, 's'): 5,
(5, 't'): 5,
(5, 'u'): 5,
(5, 'v'): 5,
(5, 'w'): 5,
(5, 'x'): 5,
(5, 'y'): 5,
(5, 'z'): 5,
(5, '{'): 5,
(5, '|'): 5,
(5, '}'): 5,
(5, '~'): 5,
(5, '\x7f'): 5,
(5, '\x80'): 5,
(5, '\x81'): 5,
(5, '\x82'): 5,
(5, '\x83'): 5,
(5, '\x84'): 5,
(5, '\x85'): 5,
(5, '\x86'): 5,
(5, '\x87'): 5,
(5, '\x88'): 5,
(5, '\x89'): 5,
(5, '\x8a'): 5,
(5, '\x8b'): 5,
(5, '\x8c'): 5,
(5, '\x8d'): 5,
(5, '\x8e'): 5,
(5, '\x8f'): 5,
(5, '\x90'): 5,
(5, '\x91'): 5,
(5, '\x92'): 5,
(5, '\x93'): 5,
(5, '\x94'): 5,
(5, '\x95'): 5,
(5, '\x96'): 5,
(5, '\x97'): 5,
(5, '\x98'): 5,
(5, '\x99'): 5,
(5, '\x9a'): 5,
(5, '\x9b'): 5,
(5, '\x9c'): 5,
(5, '\x9d'): 5,
(5, '\x9e'): 5,
(5, '\x9f'): 5,
(5, '\xa0'): 5,
(5, '\xa1'): 5,
(5, '\xa2'): 5,
(5, '\xa3'): 5,
(5, '\xa4'): 5,
(5, '\xa5'): 5,
(5, '\xa6'): 5,
(5, '\xa7'): 5,
(5, '\xa8'): 5,
(5, '\xa9'): 5,
(5, '\xaa'): 5,
(5, '\xab'): 5,
(5, '\xac'): 5,
(5, '\xad'): 5,
(5, '\xae'): 5,
(5, '\xaf'): 5,
(5, '\xb0'): 5,
(5, '\xb1'): 5,
(5, '\xb2'): 5,
(5, '\xb3'): 5,
(5, '\xb4'): 5,
(5, '\xb5'): 5,
(5, '\xb6'): 5,
(5, '\xb7'): 5,
(5, '\xb8'): 5,
(5, '\xb9'): 5,
(5, '\xba'): 5,
(5, '\xbb'): 5,
(5, '\xbc'): 5,
(5, '\xbd'): 5,
(5, '\xbe'): 5,
(5, '\xbf'): 5,
(5, '\xc0'): 5,
(5, '\xc1'): 5,
(5, '\xc2'): 5,
(5, '\xc3'): 5,
(5, '\xc4'): 5,
(5, '\xc5'): 5,
(5, '\xc6'): 5,
(5, '\xc7'): 5,
(5, '\xc8'): 5,
(5, '\xc9'): 5,
(5, '\xca'): 5,
(5, '\xcb'): 5,
(5, '\xcc'): 5,
(5, '\xcd'): 5,
(5, '\xce'): 5,
(5, '\xcf'): 5,
(5, '\xd0'): 5,
(5, '\xd1'): 5,
(5, '\xd2'): 5,
(5, '\xd3'): 5,
(5, '\xd4'): 5,
(5, '\xd5'): 5,
(5, '\xd6'): 5,
(5, '\xd7'): 5,
(5, '\xd8'): 5,
(5, '\xd9'): 5,
(5, '\xda'): 5,
(5, '\xdb'): 5,
(5, '\xdc'): 5,
(5, '\xdd'): 5,
(5, '\xde'): 5,
(5, '\xdf'): 5,
(5, '\xe0'): 5,
(5, '\xe1'): 5,
(5, '\xe2'): 5,
(5, '\xe3'): 5,
(5, '\xe4'): 5,
(5, '\xe5'): 5,
(5, '\xe6'): 5,
(5, '\xe7'): 5,
(5, '\xe8'): 5,
(5, '\xe9'): 5,
(5, '\xea'): 5,
(5, '\xeb'): 5,
(5, '\xec'): 5,
(5, '\xed'): 5,
(5, '\xee'): 5,
(5, '\xef'): 5,
(5, '\xf0'): 5,
(5, '\xf1'): 5,
(5, '\xf2'): 5,
(5, '\xf3'): 5,
(5, '\xf4'): 5,
(5, '\xf5'): 5,
(5, '\xf6'): 5,
(5, '\xf7'): 5,
(5, '\xf8'): 5,
(5, '\xf9'): 5,
(5, '\xfa'): 5,
(5, '\xfb'): 5,
(5, '\xfc'): 5,
(5, '\xfd'): 5,
(5, '\xfe'): 5,
(5, '\xff'): 5,
(6, '"'): 22,
(16, 'A'): 16,
(16, 'B'): 16,
(16, 'C'): 16,
(16, 'D'): 16,
(16, 'E'): 16,
(16, 'F'): 16,
(16, 'G'): 16,
(16, 'H'): 16,
(16, 'I'): 16,
(16, 'J'): 16,
(16, 'K'): 16,
(16, 'L'): 16,
(16, 'M'): 16,
(16, 'N'): 16,
(16, 'O'): 16,
(16, 'P'): 16,
(16, 'Q'): 16,
(16, 'R'): 16,
(16, 'S'): 16,
(16, 'T'): 16,
(16, 'U'): 16,
(16, 'V'): 16,
(16, 'W'): 16,
(16, 'X'): 16,
(16, 'Y'): 16,
(16, 'Z'): 16,
(16, '_'): 16,
(19, '0'): 20,
(19, '1'): 20,
(19, '2'): 20,
(19, '3'): 20,
(19, '4'): 20,
(19, '5'): 20,
(19, '6'): 20,
(19, '7'): 20,
(19, '8'): 20,
(19, '9'): 20,
(19, 'A'): 16,
(19, 'B'): 16,
(19, 'C'): 16,
(19, 'D'): 16,
(19, 'E'): 16,
(19, 'F'): 16,
(19, 'G'): 16,
(19, 'H'): 16,
(19, 'I'): 16,
(19, 'J'): 16,
(19, 'K'): 16,
(19, 'L'): 16,
(19, 'M'): 16,
(19, 'N'): 16,
(19, 'O'): 16,
(19, 'P'): 16,
(19, 'Q'): 16,
(19, 'R'): 16,
(19, 'S'): 16,
(19, 'T'): 16,
(19, 'U'): 16,
(19, 'V'): 16,
(19, 'W'): 16,
(19, 'X'): 16,
(19, 'Y'): 16,
(19, 'Z'): 16,
(19, '_'): 19,
(19, 'a'): 20,
(19, 'b'): 20,
(19, 'c'): 20,
(19, 'd'): 20,
(19, 'e'): 20,
(19, 'f'): 20,
(19, 'g'): 20,
(19, 'h'): 20,
(19, 'i'): 20,
(19, 'j'): 20,
(19, 'k'): 20,
(19, 'l'): 20,
(19, 'm'): 20,
(19, 'n'): 20,
(19, 'o'): 20,
(19, 'p'): 20,
(19, 'q'): 20,
(19, 'r'): 20,
(19, 's'): 20,
(19, 't'): 20,
(19, 'u'): 20,
(19, 'v'): 20,
(19, 'w'): 20,
(19, 'x'): 20,
(19, 'y'): 20,
(19, 'z'): 20,
(20, '0'): 20,
(20, '1'): 20,
(20, '2'): 20,
(20, '3'): 20,
(20, '4'): 20,
(20, '5'): 20,
(20, '6'): 20,
(20, '7'): 20,
(20, '8'): 20,
(20, '9'): 20,
(20, '_'): 20,
(20, 'a'): 20,
(20, 'b'): 20,
(20, 'c'): 20,
(20, 'd'): 20,
(20, 'e'): 20,
(20, 'f'): 20,
(20, 'g'): 20,
(20, 'h'): 20,
(20, 'i'): 20,
(20, 'j'): 20,
(20, 'k'): 20,
(20, 'l'): 20,
(20, 'm'): 20,
(20, 'n'): 20,
(20, 'o'): 20,
(20, 'p'): 20,
(20, 'q'): 20,
(20, 'r'): 20,
(20, 's'): 20,
(20, 't'): 20,
(20, 'u'): 20,
(20, 'v'): 20,
(20, 'w'): 20,
(20, 'x'): 20,
(20, 'y'): 20,
(20, 'z'): 20,
(22, "'"): 23,
(24, '\x00'): 5,
(24, '\x01'): 5,
(24, '\x02'): 5,
(24, '\x03'): 5,
(24, '\x04'): 5,
(24, '\x05'): 5,
(24, '\x06'): 5,
(24, '\x07'): 5,
(24, '\x08'): 5,
(24, '\t'): 5,
(24, '\n'): 5,
(24, '\x0b'): 5,
(24, '\x0c'): 5,
(24, '\r'): 5,
(24, '\x0e'): 5,
(24, '\x0f'): 5,
(24, '\x10'): 5,
(24, '\x11'): 5,
(24, '\x12'): 5,
(24, '\x13'): 5,
(24, '\x14'): 5,
(24, '\x15'): 5,
(24, '\x16'): 5,
(24, '\x17'): 5,
(24, '\x18'): 5,
(24, '\x19'): 5,
(24, '\x1a'): 5,
(24, '\x1b'): 5,
(24, '\x1c'): 5,
(24, '\x1d'): 5,
(24, '\x1e'): 5,
(24, '\x1f'): 5,
(24, ' '): 5,
(24, '!'): 5,
(24, '"'): 26,
(24, '#'): 5,
(24, '$'): 5,
(24, '%'): 5,
(24, '&'): 5,
(24, "'"): 5,
(24, '('): 5,
(24, ')'): 5,
(24, '*'): 5,
(24, '+'): 5,
(24, ','): 5,
(24, '-'): 5,
(24, '.'): 5,
(24, '/'): 5,
(24, '0'): 5,
(24, '1'): 5,
(24, '2'): 5,
(24, '3'): 5,
(24, '4'): 5,
(24, '5'): 5,
(24, '6'): 5,
(24, '7'): 5,
(24, '8'): 5,
(24, '9'): 5,
(24, ':'): 5,
(24, ';'): 5,
(24, '<'): 5,
(24, '='): 5,
(24, '>'): 5,
(24, '?'): 5,
(24, '@'): 5,
(24, 'A'): 5,
(24, 'B'): 5,
(24, 'C'): 5,
(24, 'D'): 5,
(24, 'E'): 5,
(24, 'F'): 5,
(24, 'G'): 5,
(24, 'H'): 5,
(24, 'I'): 5,
(24, 'J'): 5,
(24, 'K'): 5,
(24, 'L'): 5,
(24, 'M'): 5,
(24, 'N'): 5,
(24, 'O'): 5,
(24, 'P'): 5,
(24, 'Q'): 5,
(24, 'R'): 5,
(24, 'S'): 5,
(24, 'T'): 5,
(24, 'U'): 5,
(24, 'V'): 5,
(24, 'W'): 5,
(24, 'X'): 5,
(24, 'Y'): 5,
(24, 'Z'): 5,
(24, '['): 5,
(24, '\\'): 24,
(24, ']'): 5,
(24, '^'): 5,
(24, '_'): 5,
(24, '`'): 5,
(24, 'a'): 5,
(24, 'b'): 5,
(24, 'c'): 5,
(24, 'd'): 5,
(24, 'e'): 5,
(24, 'f'): 5,
(24, 'g'): 5,
(24, 'h'): 5,
(24, 'i'): 5,
(24, 'j'): 5,
(24, 'k'): 5,
(24, 'l'): 5,
(24, 'm'): 5,
(24, 'n'): 5,
(24, 'o'): 5,
(24, 'p'): 5,
(24, 'q'): 5,
(24, 'r'): 5,
(24, 's'): 5,
(24, 't'): 5,
(24, 'u'): 5,
(24, 'v'): 5,
(24, 'w'): 5,
(24, 'x'): 5,
(24, 'y'): 5,
(24, 'z'): 5,
(24, '{'): 5,
(24, '|'): 5,
(24, '}'): 5,
(24, '~'): 5,
(24, '\x7f'): 5,
(24, '\x80'): 5,
(24, '\x81'): 5,
(24, '\x82'): 5,
(24, '\x83'): 5,
(24, '\x84'): 5,
(24, '\x85'): 5,
(24, '\x86'): 5,
(24, '\x87'): 5,
(24, '\x88'): 5,
(24, '\x89'): 5,
(24, '\x8a'): 5,
(24, '\x8b'): 5,
(24, '\x8c'): 5,
(24, '\x8d'): 5,
(24, '\x8e'): 5,
(24, '\x8f'): 5,
(24, '\x90'): 5,
(24, '\x91'): 5,
(24, '\x92'): 5,
(24, '\x93'): 5,
(24, '\x94'): 5,
(24, '\x95'): 5,
(24, '\x96'): 5,
(24, '\x97'): 5,
(24, '\x98'): 5,
(24, '\x99'): 5,
(24, '\x9a'): 5,
(24, '\x9b'): 5,
(24, '\x9c'): 5,
(24, '\x9d'): 5,
(24, '\x9e'): 5,
(24, '\x9f'): 5,
(24, '\xa0'): 5,
(24, '\xa1'): 5,
(24, '\xa2'): 5,
(24, '\xa3'): 5,
(24, '\xa4'): 5,
(24, '\xa5'): 5,
(24, '\xa6'): 5,
(24, '\xa7'): 5,
(24, '\xa8'): 5,
(24, '\xa9'): 5,
(24, '\xaa'): 5,
(24, '\xab'): 5,
(24, '\xac'): 5,
(24, '\xad'): 5,
(24, '\xae'): 5,
(24, '\xaf'): 5,
(24, '\xb0'): 5,
(24, '\xb1'): 5,
(24, '\xb2'): 5,
(24, '\xb3'): 5,
(24, '\xb4'): 5,
(24, '\xb5'): 5,
(24, '\xb6'): 5,
(24, '\xb7'): 5,
(24, '\xb8'): 5,
(24, '\xb9'): 5,
(24, '\xba'): 5,
(24, '\xbb'): 5,
(24, '\xbc'): 5,
(24, '\xbd'): 5,
(24, '\xbe'): 5,
(24, '\xbf'): 5,
(24, '\xc0'): 5,
(24, '\xc1'): 5,
(24, '\xc2'): 5,
(24, '\xc3'): 5,
(24, '\xc4'): 5,
(24, '\xc5'): 5,
(24, '\xc6'): 5,
(24, '\xc7'): 5,
(24, '\xc8'): 5,
(24, '\xc9'): 5,
(24, '\xca'): 5,
(24, '\xcb'): 5,
(24, '\xcc'): 5,
(24, '\xcd'): 5,
(24, '\xce'): 5,
(24, '\xcf'): 5,
(24, '\xd0'): 5,
(24, '\xd1'): 5,
(24, '\xd2'): 5,
(24, '\xd3'): 5,
(24, '\xd4'): 5,
(24, '\xd5'): 5,
(24, '\xd6'): 5,
(24, '\xd7'): 5,
(24, '\xd8'): 5,
(24, '\xd9'): 5,
(24, '\xda'): 5,
(24, '\xdb'): 5,
(24, '\xdc'): 5,
(24, '\xdd'): 5,
(24, '\xde'): 5,
(24, '\xdf'): 5,
(24, '\xe0'): 5,
(24, '\xe1'): 5,
(24, '\xe2'): 5,
(24, '\xe3'): 5,
(24, '\xe4'): 5,
(24, '\xe5'): 5,
(24, '\xe6'): 5,
(24, '\xe7'): 5,
(24, '\xe8'): 5,
(24, '\xe9'): 5,
(24, '\xea'): 5,
(24, '\xeb'): 5,
(24, '\xec'): 5,
(24, '\xed'): 5,
(24, '\xee'): 5,
(24, '\xef'): 5,
(24, '\xf0'): 5,
(24, '\xf1'): 5,
(24, '\xf2'): 5,
(24, '\xf3'): 5,
(24, '\xf4'): 5,
(24, '\xf5'): 5,
(24, '\xf6'): 5,
(24, '\xf7'): 5,
(24, '\xf8'): 5,
(24, '\xf9'): 5,
(24, '\xfa'): 5,
(24, '\xfb'): 5,
(24, '\xfc'): 5,
(24, '\xfd'): 5,
(24, '\xfe'): 5,
(24, '\xff'): 5,
(26, '\x00'): 5,
(26, '\x01'): 5,
(26, '\x02'): 5,
(26, '\x03'): 5,
(26, '\x04'): 5,
(26, '\x05'): 5,
(26, '\x06'): 5,
(26, '\x07'): 5,
(26, '\x08'): 5,
(26, '\t'): 5,
(26, '\n'): 5,
(26, '\x0b'): 5,
(26, '\x0c'): 5,
(26, '\r'): 5,
(26, '\x0e'): 5,
(26, '\x0f'): 5,
(26, '\x10'): 5,
(26, '\x11'): 5,
(26, '\x12'): 5,
(26, '\x13'): 5,
(26, '\x14'): 5,
(26, '\x15'): 5,
(26, '\x16'): 5,
(26, '\x17'): 5,
(26, '\x18'): 5,
(26, '\x19'): 5,
(26, '\x1a'): 5,
(26, '\x1b'): 5,
(26, '\x1c'): 5,
(26, '\x1d'): 5,
(26, '\x1e'): 5,
(26, '\x1f'): 5,
(26, ' '): 5,
(26, '!'): 5,
(26, '"'): 25,
(26, '#'): 5,
(26, '$'): 5,
(26, '%'): 5,
(26, '&'): 5,
(26, "'"): 5,
(26, '('): 5,
(26, ')'): 5,
(26, '*'): 5,
(26, '+'): 5,
(26, ','): 5,
(26, '-'): 5,
(26, '.'): 5,
(26, '/'): 5,
(26, '0'): 5,
(26, '1'): 5,
(26, '2'): 5,
(26, '3'): 5,
(26, '4'): 5,
(26, '5'): 5,
(26, '6'): 5,
(26, '7'): 5,
(26, '8'): 5,
(26, '9'): 5,
(26, ':'): 5,
(26, ';'): 5,
(26, '<'): 5,
(26, '='): 5,
(26, '>'): 5,
(26, '?'): 5,
(26, '@'): 5,
(26, 'A'): 5,
(26, 'B'): 5,
(26, 'C'): 5,
(26, 'D'): 5,
(26, 'E'): 5,
(26, 'F'): 5,
(26, 'G'): 5,
(26, 'H'): 5,
(26, 'I'): 5,
(26, 'J'): 5,
(26, 'K'): 5,
(26, 'L'): 5,
(26, 'M'): 5,
(26, 'N'): 5,
(26, 'O'): 5,
(26, 'P'): 5,
(26, 'Q'): 5,
(26, 'R'): 5,
(26, 'S'): 5,
(26, 'T'): 5,
(26, 'U'): 5,
(26, 'V'): 5,
(26, 'W'): 5,
(26, 'X'): 5,
(26, 'Y'): 5,
(26, 'Z'): 5,
(26, '['): 5,
(26, '\\'): 5,
(26, ']'): 5,
(26, '^'): 5,
(26, '_'): 5,
(26, '`'): 5,
(26, 'a'): 5,
(26, 'b'): 5,
(26, 'c'): 5,
(26, 'd'): 5,
(26, 'e'): 5,
(26, 'f'): 5,
(26, 'g'): 5,
(26, 'h'): 5,
(26, 'i'): 5,
(26, 'j'): 5,
(26, 'k'): 5,
(26, 'l'): 5,
(26, 'm'): 5,
(26, 'n'): 5,
(26, 'o'): 5,
(26, 'p'): 5,
(26, 'q'): 5,
(26, 'r'): 5,
(26, 's'): 5,
(26, 't'): 5,
(26, 'u'): 5,
(26, 'v'): 5,
(26, 'w'): 5,
(26, 'x'): 5,
(26, 'y'): 5,
(26, 'z'): 5,
(26, '{'): 5,
(26, '|'): 5,
(26, '}'): 5,
(26, '~'): 5,
(26, '\x7f'): 5,
(26, '\x80'): 5,
(26, '\x81'): 5,
(26, '\x82'): 5,
(26, '\x83'): 5,
(26, '\x84'): 5,
(26, '\x85'): 5,
(26, '\x86'): 5,
(26, '\x87'): 5,
(26, '\x88'): 5,
(26, '\x89'): 5,
(26, '\x8a'): 5,
(26, '\x8b'): 5,
(26, '\x8c'): 5,
(26, '\x8d'): 5,
(26, '\x8e'): 5,
(26, '\x8f'): 5,
(26, '\x90'): 5,
(26, '\x91'): 5,
(26, '\x92'): 5,
(26, '\x93'): 5,
(26, '\x94'): 5,
(26, '\x95'): 5,
(26, '\x96'): 5,
(26, '\x97'): 5,
(26, '\x98'): 5,
(26, '\x99'): 5,
(26, '\x9a'): 5,
(26, '\x9b'): 5,
(26, '\x9c'): 5,
(26, '\x9d'): 5,
(26, '\x9e'): 5,
(26, '\x9f'): 5,
(26, '\xa0'): 5,
(26, '\xa1'): 5,
(26, '\xa2'): 5,
(26, '\xa3'): 5,
(26, '\xa4'): 5,
(26, '\xa5'): 5,
(26, '\xa6'): 5,
(26, '\xa7'): 5,
(26, '\xa8'): 5,
(26, '\xa9'): 5,
(26, '\xaa'): 5,
(26, '\xab'): 5,
(26, '\xac'): 5,
(26, '\xad'): 5,
(26, '\xae'): 5,
(26, '\xaf'): 5,
(26, '\xb0'): 5,
(26, '\xb1'): 5,
(26, '\xb2'): 5,
(26, '\xb3'): 5,
(26, '\xb4'): 5,
(26, '\xb5'): 5,
(26, '\xb6'): 5,
(26, '\xb7'): 5,
(26, '\xb8'): 5,
(26, '\xb9'): 5,
(26, '\xba'): 5,
(26, '\xbb'): 5,
(26, '\xbc'): 5,
(26, '\xbd'): 5,
(26, '\xbe'): 5,
(26, '\xbf'): 5,
(26, '\xc0'): 5,
(26, '\xc1'): 5,
(26, '\xc2'): 5,
(26, '\xc3'): 5,
(26, '\xc4'): 5,
(26, '\xc5'): 5,
(26, '\xc6'): 5,
(26, '\xc7'): 5,
(26, '\xc8'): 5,
(26, '\xc9'): 5,
(26, '\xca'): 5,
(26, '\xcb'): 5,
(26, '\xcc'): 5,
(26, '\xcd'): 5,
(26, '\xce'): 5,
(26, '\xcf'): 5,
(26, '\xd0'): 5,
(26, '\xd1'): 5,
(26, '\xd2'): 5,
(26, '\xd3'): 5,
(26, '\xd4'): 5,
(26, '\xd5'): 5,
(26, '\xd6'): 5,
(26, '\xd7'): 5,
(26, '\xd8'): 5,
(26, '\xd9'): 5,
(26, '\xda'): 5,
(26, '\xdb'): 5,
(26, '\xdc'): 5,
(26, '\xdd'): 5,
(26, '\xde'): 5,
(26, '\xdf'): 5,
(26, '\xe0'): 5,
(26, '\xe1'): 5,
(26, '\xe2'): 5,
(26, '\xe3'): 5,
(26, '\xe4'): 5,
(26, '\xe5'): 5,
(26, '\xe6'): 5,
(26, '\xe7'): 5,
(26, '\xe8'): 5,
(26, '\xe9'): 5,
(26, '\xea'): 5,
(26, '\xeb'): 5,
(26, '\xec'): 5,
(26, '\xed'): 5,
(26, '\xee'): 5,
(26, '\xef'): 5,
(26, '\xf0'): 5,
(26, '\xf1'): 5,
(26, '\xf2'): 5,
(26, '\xf3'): 5,
(26, '\xf4'): 5,
(26, '\xf5'): 5,
(26, '\xf6'): 5,
(26, '\xf7'): 5,
(26, '\xf8'): 5,
(26, '\xf9'): 5,
(26, '\xfa'): 5,
(26, '\xfb'): 5,
(26, '\xfc'): 5,
(26, '\xfd'): 5,
(26, '\xfe'): 5,
(26, '\xff'): 5},
set([1, 2, 3, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 23, 25, 26, 27]),
set([1, 2, 3, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 23, 25, 26, 27]),
['0, 0, 0, final*, 0, final*, start*, 0, final*, 0, 1, final*, start*, 0, 0, 0, 0, 0, 0, start|, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0',
'IGNORE',
'IGNORE',
'IGNORE',
'1, final*, 0, start|, 0, final*, start*, 0, final*, 0, 1, final|, start|, 0, final*, start*, 0, final*, 0, final|, start|, 0, 1, final*, start*, 0',
'1, final*, 0, final*, start*, start|, 0, final|, final*, start*, final*, 0, 0, start|, 0, final*, 0, final*, 0, 1, final|, final*, 0, final|, final*, start*, final*, 0, 0, start|, 0, final*, start|, 0, start*, final*, 0, final*, final|, final*, 0, 1, final*, start*, final*, 0, 0, final|, start|, 0, start|, 0, start*, final*, 0, 1, final|, start|, 0, final*, start*, final*, 0, final*, 1, final|, final*, 0, start|, 0, final*, start*, final*, start*, final*, 0, final|, start|, 0, 0, final*, 0, final|, start|, 0, 1, final*, start*, final*, 0, final*, final|, 1, final*, 0, start|, 0, final*, start*, final*, start*, final*, 0, final|, start|, 0, 0, 0, final*, start*, final*, start*, final*, 0, final|, start|, 0, final*, 0, 0, final*, 1, final|, final*, 0, start|, 0, final*, start*, final*, start*, final*, 0, final|, start|, 0, final*, 0, 0, final*, final|, 1, final*, 0, start|, 0, final*, start*, final*, start*, final*, 0, final|, start|, 0, final*, 0, 0, final*, final*, 0, 1, final|, start|, 0, 1, final*, start*, final*, start*, final*, 0, final|, start|, 0, final*, 0, 0, final*, final*, 0, final|, start|, 0, final*, start*, final*, start*, final*, 0, final|, start|, 0, final*, 0, 0, final*, 1, final|, final*, 0, 1, final|, start|, 0, 1, final*, start*, final*, start*, final*, 0, final|, start|, 0, final*, 0, 0, final*, final|, 1, final*, 0, final|, start|, final*, 0, 1, final|, start|, 0, final*, start*, final*, 0, final*, 1, final|, final*, 0, 1, final|, start|, 0, final*, start*, final*, start*, final*, 0, final|, start|, 0, 0, final*, 0, final|, start|, 0, 1, final*, start*, final*, 0, final*, final|, 1, final*, 0, final|, start|, 0, 1, final*, start*, final*, start*, final*, 0, final|, start|, 0, 0, final*, 0, 1, final|, start|, 0, final*, start*, final*, 0, final*, final*, 0, 1, final|, start|, 0, final*, start*, final*, start*, final*, 0, final|, start|, 0, 0, final*, 0, final|, start|, 0, 1, final*, start*, final*, 0, final*, final*, 0, final|, start|, 0, 1, final*, start*, final*, start*, final*, 0, final|, start|, 0, 0',
'1',
'__11_)',
'__10_(',
'__4_+',
'__3_*',
'__1_;',
'__0_:',
'__9_<',
'__5_?',
'__8_>',
'SYMBOLNAME',
'__6_[',
'__7_]',
'NONTERMINALNAME',
'NONTERMINALNAME',
'__2_|',
'2',
'QUOTE',
'final*, 0, 1, final*, 0, final|, start|, 0, final*, 0, final|, start|, 0, 1, final*, 0, final|, start|, 0, 1, final*, start*, final*, start*, final*, 0, final|, start|, 0, 1, 0, final*, 0, final|, start|, 0, 1, final*, start*, final*, 0, 1, final*, 0, final|, start|, 0, 0, start|, 0, final*, start*, final*, 0, final|, start|, 0, 1, 0, final*, 0, final|, start|, 0, 1, final*, start*, final*, 0, 1, final*, 0, final|, start|, 0, final*, 0, start|, 0, final*, 0, final|, start|, 0, 1, final*, start*, final*, start*, final*, 0, final|, start|, 0, 1, 0, final*, 0, final|, start|, 0, 1, final*, start*, final*, 0, 1, final*, 0, final|, start|, 0, final*, 0, final|, start|, 0, 1, final*, 0, start|, 0, final*, start*, final*, start*, final*, 0, final|, start|, 0, 1, 0, final*, 0, final|, start|, 0, 1, final*, start*, final*, 0, 1, final*, 0, final|, start|, 0, final*, 0, final|, start|, 0, 1, final*, 0, final|, start|, 0, 1, final*, start*, final*, start*, final*, 0, final|, start|, 0, 1, 0, final*, 0, 1, final*, 0, final|, start|, 0, final*, 0, start|, 0, final*, 0, final|, start|, 0, 1, final*, start*, final*, start*, final*, 0, final|, start|, 0, 1, 0',
'QUOTE',
'QUOTE',
'IGNORE']), {'IGNORE': None})
# generated code between this line and its other occurence
if __name__ == '__main__':
f = py.path.local(__file__)
oldcontent = f.read()
s = "# GENERATED CODE BETWEEN THIS LINE AND ITS OTHER OCCURENCE\n".lower()
pre, gen, after = oldcontent.split(s)
parser, lexer, ToAST = make_ebnf_parser()
transformer = ToAST.source
newcontent = "%s%s%s\nparser = %r\n%s\n%s%s" % (
pre, s, transformer.replace("ToAST", "EBNFToAST"),
parser, lexer.get_dummy_repr(), s, after)
print newcontent
f.write(newcontent)
| mit |
tvibliani/odoo | addons/association/__openerp__.py | 260 | 1700 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Associations Management',
'version': '0.1',
'category': 'Specific Industry Applications',
'description': """
This module is to configure modules related to an association.
==============================================================
It installs the profile for associations to manage events, registrations, memberships,
membership products (schemes).
""",
'author': 'OpenERP SA',
'depends': ['base_setup', 'membership', 'event'],
'data': ['security/ir.model.access.csv', 'profile_association.xml'],
'demo': [],
'installable': True,
'auto_install': False,
'website': 'https://www.odoo.com'
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
sahiljain/catapult | third_party/gsutil/third_party/boto/boto/manage/task.py | 153 | 6781 | # Copyright (c) 2006-2009 Mitch Garnaat http://garnaat.org/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
import boto
from boto.sdb.db.property import StringProperty, DateTimeProperty, IntegerProperty
from boto.sdb.db.model import Model
import datetime, subprocess, time
from boto.compat import StringIO
def check_hour(val):
if val == '*':
return
if int(val) < 0 or int(val) > 23:
raise ValueError
class Task(Model):
"""
A scheduled, repeating task that can be executed by any participating servers.
The scheduling is similar to cron jobs. Each task has an hour attribute.
The allowable values for hour are [0-23|*].
To keep the operation reasonably efficient and not cause excessive polling,
the minimum granularity of a Task is hourly. Some examples:
hour='*' - the task would be executed each hour
hour='3' - the task would be executed at 3AM GMT each day.
"""
name = StringProperty()
hour = StringProperty(required=True, validator=check_hour, default='*')
command = StringProperty(required=True)
last_executed = DateTimeProperty()
last_status = IntegerProperty()
last_output = StringProperty()
message_id = StringProperty()
@classmethod
def start_all(cls, queue_name):
for task in cls.all():
task.start(queue_name)
def __init__(self, id=None, **kw):
super(Task, self).__init__(id, **kw)
self.hourly = self.hour == '*'
self.daily = self.hour != '*'
self.now = datetime.datetime.utcnow()
def check(self):
"""
Determine how long until the next scheduled time for a Task.
Returns the number of seconds until the next scheduled time or zero
if the task needs to be run immediately.
If it's an hourly task and it's never been run, run it now.
If it's a daily task and it's never been run and the hour is right, run it now.
"""
boto.log.info('checking Task[%s]-now=%s, last=%s' % (self.name, self.now, self.last_executed))
if self.hourly and not self.last_executed:
return 0
if self.daily and not self.last_executed:
if int(self.hour) == self.now.hour:
return 0
else:
return max( (int(self.hour)-self.now.hour), (self.now.hour-int(self.hour)) )*60*60
delta = self.now - self.last_executed
if self.hourly:
if delta.seconds >= 60*60:
return 0
else:
return 60*60 - delta.seconds
else:
if int(self.hour) == self.now.hour:
if delta.days >= 1:
return 0
else:
return 82800 # 23 hours, just to be safe
else:
return max( (int(self.hour)-self.now.hour), (self.now.hour-int(self.hour)) )*60*60
def _run(self, msg, vtimeout):
boto.log.info('Task[%s] - running:%s' % (self.name, self.command))
log_fp = StringIO()
process = subprocess.Popen(self.command, shell=True, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
nsecs = 5
current_timeout = vtimeout
while process.poll() is None:
boto.log.info('nsecs=%s, timeout=%s' % (nsecs, current_timeout))
if nsecs >= current_timeout:
current_timeout += vtimeout
boto.log.info('Task[%s] - setting timeout to %d seconds' % (self.name, current_timeout))
if msg:
msg.change_visibility(current_timeout)
time.sleep(5)
nsecs += 5
t = process.communicate()
log_fp.write(t[0])
log_fp.write(t[1])
boto.log.info('Task[%s] - output: %s' % (self.name, log_fp.getvalue()))
self.last_executed = self.now
self.last_status = process.returncode
self.last_output = log_fp.getvalue()[0:1023]
def run(self, msg, vtimeout=60):
delay = self.check()
boto.log.info('Task[%s] - delay=%s seconds' % (self.name, delay))
if delay == 0:
self._run(msg, vtimeout)
queue = msg.queue
new_msg = queue.new_message(self.id)
new_msg = queue.write(new_msg)
self.message_id = new_msg.id
self.put()
boto.log.info('Task[%s] - new message id=%s' % (self.name, new_msg.id))
msg.delete()
boto.log.info('Task[%s] - deleted message %s' % (self.name, msg.id))
else:
boto.log.info('new_vtimeout: %d' % delay)
msg.change_visibility(delay)
def start(self, queue_name):
boto.log.info('Task[%s] - starting with queue: %s' % (self.name, queue_name))
queue = boto.lookup('sqs', queue_name)
msg = queue.new_message(self.id)
msg = queue.write(msg)
self.message_id = msg.id
self.put()
boto.log.info('Task[%s] - start successful' % self.name)
class TaskPoller(object):
def __init__(self, queue_name):
self.sqs = boto.connect_sqs()
self.queue = self.sqs.lookup(queue_name)
def poll(self, wait=60, vtimeout=60):
while True:
m = self.queue.read(vtimeout)
if m:
task = Task.get_by_id(m.get_body())
if task:
if not task.message_id or m.id == task.message_id:
boto.log.info('Task[%s] - read message %s' % (task.name, m.id))
task.run(m, vtimeout)
else:
boto.log.info('Task[%s] - found extraneous message, ignoring' % task.name)
else:
time.sleep(wait)
| bsd-3-clause |
tctimmeh/dc-django-base | dcbase/tests/unit/models/testUserProfile.py | 1 | 1456 | from dcbase.apps import TIMEZONE_SESSION_KEY
from django.utils.timezone import get_current_timezone
from dcbase.models import UserProfile
from dcbase.tests.unit import UnitTestCase
from django.utils import translation
from django.utils.translation import LANGUAGE_SESSION_KEY
class TestUserProfile(UnitTestCase):
def test_profileIsAddedWhenUserIsCreated(self):
user = self.createUser()
self.assertIsInstance(user.profile, UserProfile)
def test_profileIsCreatedWithCurrentLanguage(self):
language = 'fr'
translation.activate(language)
user = self.createUser()
self.assertEqual(language, user.profile.language)
def test_loggingInSetsLanguageFromProfile(self):
initialLanguage = 'en'
translation.activate(initialLanguage)
user = self.createUser()
user.profile.language = 'fr'
user.profile.save()
self.logOut()
self.assertEqual(initialLanguage, translation.get_language())
self.logInAs(user)
self.assertEqual('fr', self.client.session[LANGUAGE_SESSION_KEY])
def test_loggingInSetsTimezoneFromProfile(self):
expected = 'America/Edmonton'
user = self.createUser()
user.profile.timezone = expected
user.profile.save()
self.assertNotEqual(expected, get_current_timezone())
self.logInAs(user)
self.assertEqual(expected, self.client.session[TIMEZONE_SESSION_KEY])
| mit |
RaoUmer/django | django/contrib/admin/validation.py | 104 | 20582 | from django.core.exceptions import ImproperlyConfigured
from django.db import models
from django.db.models.fields import FieldDoesNotExist
from django.forms.models import (BaseModelForm, BaseModelFormSet, fields_for_model,
_get_foreign_key)
from django.contrib.admin import ListFilter, FieldListFilter
from django.contrib.admin.util import get_fields_from_path, NotRelationField
from django.contrib.admin.options import (flatten_fieldsets, BaseModelAdmin,
HORIZONTAL, VERTICAL)
__all__ = ['validate']
def validate(cls, model):
"""
Does basic ModelAdmin option validation. Calls custom validation
classmethod in the end if it is provided in cls. The signature of the
custom validation classmethod should be: def validate(cls, model).
"""
# Before we can introspect models, they need to be fully loaded so that
# inter-relations are set up correctly. We force that here.
models.get_apps()
opts = model._meta
validate_base(cls, model)
# list_display
if hasattr(cls, 'list_display'):
check_isseq(cls, 'list_display', cls.list_display)
for idx, field in enumerate(cls.list_display):
if not callable(field):
if not hasattr(cls, field):
if not hasattr(model, field):
try:
opts.get_field(field)
except models.FieldDoesNotExist:
raise ImproperlyConfigured("%s.list_display[%d], %r is not a callable or an attribute of %r or found in the model %r."
% (cls.__name__, idx, field, cls.__name__, model._meta.object_name))
else:
# getattr(model, field) could be an X_RelatedObjectsDescriptor
f = fetch_attr(cls, model, opts, "list_display[%d]" % idx, field)
if isinstance(f, models.ManyToManyField):
raise ImproperlyConfigured("'%s.list_display[%d]', '%s' is a ManyToManyField which is not supported."
% (cls.__name__, idx, field))
# list_display_links
if hasattr(cls, 'list_display_links'):
check_isseq(cls, 'list_display_links', cls.list_display_links)
for idx, field in enumerate(cls.list_display_links):
if field not in cls.list_display:
raise ImproperlyConfigured("'%s.list_display_links[%d]' "
"refers to '%s' which is not defined in 'list_display'."
% (cls.__name__, idx, field))
# list_filter
if hasattr(cls, 'list_filter'):
check_isseq(cls, 'list_filter', cls.list_filter)
for idx, item in enumerate(cls.list_filter):
# There are three options for specifying a filter:
# 1: 'field' - a basic field filter, possibly w/ relationships (eg, 'field__rel')
# 2: ('field', SomeFieldListFilter) - a field-based list filter class
# 3: SomeListFilter - a non-field list filter class
if callable(item) and not isinstance(item, models.Field):
# If item is option 3, it should be a ListFilter...
if not issubclass(item, ListFilter):
raise ImproperlyConfigured("'%s.list_filter[%d]' is '%s'"
" which is not a descendant of ListFilter."
% (cls.__name__, idx, item.__name__))
# ... but not a FieldListFilter.
if issubclass(item, FieldListFilter):
raise ImproperlyConfigured("'%s.list_filter[%d]' is '%s'"
" which is of type FieldListFilter but is not"
" associated with a field name."
% (cls.__name__, idx, item.__name__))
else:
if isinstance(item, (tuple, list)):
# item is option #2
field, list_filter_class = item
if not issubclass(list_filter_class, FieldListFilter):
raise ImproperlyConfigured("'%s.list_filter[%d][1]'"
" is '%s' which is not of type FieldListFilter."
% (cls.__name__, idx, list_filter_class.__name__))
else:
# item is option #1
field = item
# Validate the field string
try:
get_fields_from_path(model, field)
except (NotRelationField, FieldDoesNotExist):
raise ImproperlyConfigured("'%s.list_filter[%d]' refers to '%s'"
" which does not refer to a Field."
% (cls.__name__, idx, field))
# list_per_page = 100
if hasattr(cls, 'list_per_page') and not isinstance(cls.list_per_page, int):
raise ImproperlyConfigured("'%s.list_per_page' should be a integer."
% cls.__name__)
# list_max_show_all
if hasattr(cls, 'list_max_show_all') and not isinstance(cls.list_max_show_all, int):
raise ImproperlyConfigured("'%s.list_max_show_all' should be an integer."
% cls.__name__)
# list_editable
if hasattr(cls, 'list_editable') and cls.list_editable:
check_isseq(cls, 'list_editable', cls.list_editable)
for idx, field_name in enumerate(cls.list_editable):
try:
field = opts.get_field_by_name(field_name)[0]
except models.FieldDoesNotExist:
raise ImproperlyConfigured("'%s.list_editable[%d]' refers to a "
"field, '%s', not defined on %s.%s."
% (cls.__name__, idx, field_name, model._meta.app_label, model.__name__))
if field_name not in cls.list_display:
raise ImproperlyConfigured("'%s.list_editable[%d]' refers to "
"'%s' which is not defined in 'list_display'."
% (cls.__name__, idx, field_name))
if field_name in cls.list_display_links:
raise ImproperlyConfigured("'%s' cannot be in both '%s.list_editable'"
" and '%s.list_display_links'"
% (field_name, cls.__name__, cls.__name__))
if not cls.list_display_links and cls.list_display[0] in cls.list_editable:
raise ImproperlyConfigured("'%s.list_editable[%d]' refers to"
" the first field in list_display, '%s', which can't be"
" used unless list_display_links is set."
% (cls.__name__, idx, cls.list_display[0]))
if not field.editable:
raise ImproperlyConfigured("'%s.list_editable[%d]' refers to a "
"field, '%s', which isn't editable through the admin."
% (cls.__name__, idx, field_name))
# search_fields = ()
if hasattr(cls, 'search_fields'):
check_isseq(cls, 'search_fields', cls.search_fields)
# date_hierarchy = None
if cls.date_hierarchy:
f = get_field(cls, model, opts, 'date_hierarchy', cls.date_hierarchy)
if not isinstance(f, (models.DateField, models.DateTimeField)):
raise ImproperlyConfigured("'%s.date_hierarchy is "
"neither an instance of DateField nor DateTimeField."
% cls.__name__)
# ordering = None
if cls.ordering:
check_isseq(cls, 'ordering', cls.ordering)
for idx, field in enumerate(cls.ordering):
if field == '?' and len(cls.ordering) != 1:
raise ImproperlyConfigured("'%s.ordering' has the random "
"ordering marker '?', but contains other fields as "
"well. Please either remove '?' or the other fields."
% cls.__name__)
if field == '?':
continue
if field.startswith('-'):
field = field[1:]
# Skip ordering in the format field1__field2 (FIXME: checking
# this format would be nice, but it's a little fiddly).
if '__' in field:
continue
get_field(cls, model, opts, 'ordering[%d]' % idx, field)
if hasattr(cls, "readonly_fields"):
check_readonly_fields(cls, model, opts)
# list_select_related = False
# save_as = False
# save_on_top = False
for attr in ('list_select_related', 'save_as', 'save_on_top'):
if not isinstance(getattr(cls, attr), bool):
raise ImproperlyConfigured("'%s.%s' should be a boolean."
% (cls.__name__, attr))
# inlines = []
if hasattr(cls, 'inlines'):
check_isseq(cls, 'inlines', cls.inlines)
for idx, inline in enumerate(cls.inlines):
if not issubclass(inline, BaseModelAdmin):
raise ImproperlyConfigured("'%s.inlines[%d]' does not inherit "
"from BaseModelAdmin." % (cls.__name__, idx))
if not inline.model:
raise ImproperlyConfigured("'model' is a required attribute "
"of '%s.inlines[%d]'." % (cls.__name__, idx))
if not issubclass(inline.model, models.Model):
raise ImproperlyConfigured("'%s.inlines[%d].model' does not "
"inherit from models.Model." % (cls.__name__, idx))
validate_base(inline, inline.model)
validate_inline(inline, cls, model)
def validate_inline(cls, parent, parent_model):
# model is already verified to exist and be a Model
if cls.fk_name: # default value is None
f = get_field(cls, cls.model, cls.model._meta, 'fk_name', cls.fk_name)
if not isinstance(f, models.ForeignKey):
raise ImproperlyConfigured("'%s.fk_name is not an instance of "
"models.ForeignKey." % cls.__name__)
fk = _get_foreign_key(parent_model, cls.model, fk_name=cls.fk_name, can_fail=True)
# extra = 3
if not isinstance(cls.extra, int):
raise ImproperlyConfigured("'%s.extra' should be a integer."
% cls.__name__)
# max_num = None
max_num = getattr(cls, 'max_num', None)
if max_num is not None and not isinstance(max_num, int):
raise ImproperlyConfigured("'%s.max_num' should be an integer or None (default)."
% cls.__name__)
# formset
if hasattr(cls, 'formset') and not issubclass(cls.formset, BaseModelFormSet):
raise ImproperlyConfigured("'%s.formset' does not inherit from "
"BaseModelFormSet." % cls.__name__)
# exclude
if hasattr(cls, 'exclude') and cls.exclude:
if fk and fk.name in cls.exclude:
raise ImproperlyConfigured("%s cannot exclude the field "
"'%s' - this is the foreign key to the parent model "
"%s.%s." % (cls.__name__, fk.name, parent_model._meta.app_label, parent_model.__name__))
if hasattr(cls, "readonly_fields"):
check_readonly_fields(cls, cls.model, cls.model._meta)
def validate_fields_spec(cls, model, opts, flds, label):
"""
Validate the fields specification in `flds` from a ModelAdmin subclass
`cls` for the `model` model. `opts` is `model`'s Meta inner class.
Use `label` for reporting problems to the user.
The fields specification can be a ``fields`` option or a ``fields``
sub-option from a ``fieldsets`` option component.
"""
for fields in flds:
# The entry in fields might be a tuple. If it is a standalone
# field, make it into a tuple to make processing easier.
if type(fields) != tuple:
fields = (fields,)
for field in fields:
if field in cls.readonly_fields:
# Stuff can be put in fields that isn't actually a
# model field if it's in readonly_fields,
# readonly_fields will handle the validation of such
# things.
continue
check_formfield(cls, model, opts, label, field)
try:
f = opts.get_field(field)
except models.FieldDoesNotExist:
# If we can't find a field on the model that matches, it could be an
# extra field on the form; nothing to check so move on to the next field.
continue
if isinstance(f, models.ManyToManyField) and not f.rel.through._meta.auto_created:
raise ImproperlyConfigured("'%s.%s' "
"can't include the ManyToManyField field '%s' because "
"'%s' manually specifies a 'through' model." % (
cls.__name__, label, field, field))
def validate_base(cls, model):
opts = model._meta
# raw_id_fields
if hasattr(cls, 'raw_id_fields'):
check_isseq(cls, 'raw_id_fields', cls.raw_id_fields)
for idx, field in enumerate(cls.raw_id_fields):
f = get_field(cls, model, opts, 'raw_id_fields', field)
if not isinstance(f, (models.ForeignKey, models.ManyToManyField)):
raise ImproperlyConfigured("'%s.raw_id_fields[%d]', '%s' must "
"be either a ForeignKey or ManyToManyField."
% (cls.__name__, idx, field))
# fields
if cls.fields: # default value is None
check_isseq(cls, 'fields', cls.fields)
validate_fields_spec(cls, model, opts, cls.fields, 'fields')
if cls.fieldsets:
raise ImproperlyConfigured('Both fieldsets and fields are specified in %s.' % cls.__name__)
if len(cls.fields) > len(set(cls.fields)):
raise ImproperlyConfigured('There are duplicate field(s) in %s.fields' % cls.__name__)
# fieldsets
if cls.fieldsets: # default value is None
check_isseq(cls, 'fieldsets', cls.fieldsets)
for idx, fieldset in enumerate(cls.fieldsets):
check_isseq(cls, 'fieldsets[%d]' % idx, fieldset)
if len(fieldset) != 2:
raise ImproperlyConfigured("'%s.fieldsets[%d]' does not "
"have exactly two elements." % (cls.__name__, idx))
check_isdict(cls, 'fieldsets[%d][1]' % idx, fieldset[1])
if 'fields' not in fieldset[1]:
raise ImproperlyConfigured("'fields' key is required in "
"%s.fieldsets[%d][1] field options dict."
% (cls.__name__, idx))
validate_fields_spec(cls, model, opts, fieldset[1]['fields'], "fieldsets[%d][1]['fields']" % idx)
flattened_fieldsets = flatten_fieldsets(cls.fieldsets)
if len(flattened_fieldsets) > len(set(flattened_fieldsets)):
raise ImproperlyConfigured('There are duplicate field(s) in %s.fieldsets' % cls.__name__)
# exclude
if cls.exclude: # default value is None
check_isseq(cls, 'exclude', cls.exclude)
for field in cls.exclude:
check_formfield(cls, model, opts, 'exclude', field)
try:
f = opts.get_field(field)
except models.FieldDoesNotExist:
# If we can't find a field on the model that matches,
# it could be an extra field on the form.
continue
if len(cls.exclude) > len(set(cls.exclude)):
raise ImproperlyConfigured('There are duplicate field(s) in %s.exclude' % cls.__name__)
# form
if hasattr(cls, 'form') and not issubclass(cls.form, BaseModelForm):
raise ImproperlyConfigured("%s.form does not inherit from "
"BaseModelForm." % cls.__name__)
# filter_vertical
if hasattr(cls, 'filter_vertical'):
check_isseq(cls, 'filter_vertical', cls.filter_vertical)
for idx, field in enumerate(cls.filter_vertical):
f = get_field(cls, model, opts, 'filter_vertical', field)
if not isinstance(f, models.ManyToManyField):
raise ImproperlyConfigured("'%s.filter_vertical[%d]' must be "
"a ManyToManyField." % (cls.__name__, idx))
# filter_horizontal
if hasattr(cls, 'filter_horizontal'):
check_isseq(cls, 'filter_horizontal', cls.filter_horizontal)
for idx, field in enumerate(cls.filter_horizontal):
f = get_field(cls, model, opts, 'filter_horizontal', field)
if not isinstance(f, models.ManyToManyField):
raise ImproperlyConfigured("'%s.filter_horizontal[%d]' must be "
"a ManyToManyField." % (cls.__name__, idx))
# radio_fields
if hasattr(cls, 'radio_fields'):
check_isdict(cls, 'radio_fields', cls.radio_fields)
for field, val in cls.radio_fields.items():
f = get_field(cls, model, opts, 'radio_fields', field)
if not (isinstance(f, models.ForeignKey) or f.choices):
raise ImproperlyConfigured("'%s.radio_fields['%s']' "
"is neither an instance of ForeignKey nor does "
"have choices set." % (cls.__name__, field))
if not val in (HORIZONTAL, VERTICAL):
raise ImproperlyConfigured("'%s.radio_fields['%s']' "
"is neither admin.HORIZONTAL nor admin.VERTICAL."
% (cls.__name__, field))
# prepopulated_fields
if hasattr(cls, 'prepopulated_fields'):
check_isdict(cls, 'prepopulated_fields', cls.prepopulated_fields)
for field, val in cls.prepopulated_fields.items():
f = get_field(cls, model, opts, 'prepopulated_fields', field)
if isinstance(f, (models.DateTimeField, models.ForeignKey,
models.ManyToManyField)):
raise ImproperlyConfigured("'%s.prepopulated_fields['%s']' "
"is either a DateTimeField, ForeignKey or "
"ManyToManyField. This isn't allowed."
% (cls.__name__, field))
check_isseq(cls, "prepopulated_fields['%s']" % field, val)
for idx, f in enumerate(val):
get_field(cls, model, opts, "prepopulated_fields['%s'][%d]" % (field, idx), f)
def check_isseq(cls, label, obj):
if not isinstance(obj, (list, tuple)):
raise ImproperlyConfigured("'%s.%s' must be a list or tuple." % (cls.__name__, label))
def check_isdict(cls, label, obj):
if not isinstance(obj, dict):
raise ImproperlyConfigured("'%s.%s' must be a dictionary." % (cls.__name__, label))
def get_field(cls, model, opts, label, field):
try:
return opts.get_field(field)
except models.FieldDoesNotExist:
raise ImproperlyConfigured("'%s.%s' refers to field '%s' that is missing from model '%s.%s'."
% (cls.__name__, label, field, model._meta.app_label, model.__name__))
def check_formfield(cls, model, opts, label, field):
if getattr(cls.form, 'base_fields', None):
try:
cls.form.base_fields[field]
except KeyError:
raise ImproperlyConfigured("'%s.%s' refers to field '%s' that "
"is missing from the form." % (cls.__name__, label, field))
else:
fields = fields_for_model(model)
try:
fields[field]
except KeyError:
raise ImproperlyConfigured("'%s.%s' refers to field '%s' that "
"is missing from the form." % (cls.__name__, label, field))
def fetch_attr(cls, model, opts, label, field):
try:
return opts.get_field(field)
except models.FieldDoesNotExist:
pass
try:
return getattr(model, field)
except AttributeError:
raise ImproperlyConfigured("'%s.%s' refers to '%s' that is neither a field, method or property of model '%s.%s'."
% (cls.__name__, label, field, model._meta.app_label, model.__name__))
def check_readonly_fields(cls, model, opts):
check_isseq(cls, "readonly_fields", cls.readonly_fields)
for idx, field in enumerate(cls.readonly_fields):
if not callable(field):
if not hasattr(cls, field):
if not hasattr(model, field):
try:
opts.get_field(field)
except models.FieldDoesNotExist:
raise ImproperlyConfigured("%s.readonly_fields[%d], %r is not a callable or an attribute of %r or found in the model %r."
% (cls.__name__, idx, field, cls.__name__, model._meta.object_name))
| bsd-3-clause |
nthien/pulp | bindings/pulp/bindings/exceptions.py | 15 | 3895 | """
Defines exception classes to handle server connection and request exceptions
"""
from gettext import gettext as _
class RequestException(Exception):
"""
Base exception class for all exceptions that originate by the Pulp server. These
exceptions coming from the server use the standard exception structure and can be parsed
accordingly.
"""
def __init__(self, response_body):
Exception.__init__(self)
self.href = response_body.pop('_href', None)
self.http_request_method = response_body.pop('http_request_method', None)
self.http_status = response_body.pop('http_status', None)
self.error_message = response_body.pop('error_message', None)
self.exception = response_body.pop('exception', None)
self.traceback = response_body.pop('traceback', None)
# Anything not explicitly removed above represents extra data to further
# classify the exception.
self.extra_data = response_body
def __str__(self):
message_data = {'m': self.http_request_method,
'h': self.href,
's': self.http_status,
'g': self.error_message}
return _(
'RequestException: %(m)s request on %(h)s failed with %(s)s - %(g)s' % message_data)
# Response code = 400
class BadRequestException(RequestException):
pass
# Response code = 401
class PermissionsException(RequestException):
pass
# Response code = 404
class NotFoundException(RequestException):
pass
# Response code = 409
class ConflictException(RequestException):
pass
# Response code >= 500
class PulpServerException(RequestException):
pass
# Response code >= 500 and not a Pulp formatted error
class ApacheServerException(Exception):
"""
If Apache raises the error, it won't be in the standard Pulp format.
Therefore this class does not subclass RequestException and simply
stores the string returned from Apache.
We store the response body given to us with the error, but it's an HTML
page that basically says stuff broke, so it's not terribly useful. The
user will still likely need to go to the server to figure out what went
wrong.
"""
def __init__(self, message):
"""
@param message: the response body apache returns with the error
@type message: str
"""
Exception.__init__(self)
self.message = message
class ClientSSLException(Exception):
"""
Raised in the event the client-side libraries refuse to communicate with the server.
"""
pass
class ClientCertificateExpiredException(ClientSSLException):
"""
Raised when the client certificate has expired. The
client-side libraries will check for this before initiating the request.
"""
def __init__(self, cert_filename):
Exception.__init__(self)
self.cert_filename = cert_filename
class CertificateVerificationException(ClientSSLException):
"""
Raised when the client does not trust the authority that signed the server's SSL certificate.
This could indicate a man-in-the-middle attack, a self-signed certificate, or a certificate
signed by an untrusted certificate authority.
"""
pass
class MissingCAPathException(ClientSSLException):
"""
Raised when the bindings are given a ca_path that either doesn't exist or can't be determined to
exist due to permissions.
"""
pass
class ConnectionException(Exception):
"""
Exception to indicate a less than favorable response from the server.
The arguments are [0] the response status as an integer and
[1] the response message as a dict, if we managed to decode from json,
or a str if we didn't [2] potentially a traceback, if the server response
was a python error, otherwise it will be None
"""
pass
| gpl-2.0 |
lpramuk/automation-tools | tests/test_log.py | 4 | 5728 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import pytest
from automation_tools.satellite6.log import LogAnalyzer
@pytest.fixture(autouse=True)
def execute(mocker):
"""Mock fabric's execute function to avoid call through network"""
execute_mock = mocker.patch('automation_tools.satellite6.log.execute')
def execute_function_argmument(function, host):
function()
return host
execute_mock.side_effect = execute_function_argmument
return execute_mock
def run_mock_helper(mocker, return_value):
"""Helper to mock run"""
run_mock = mocker.patch('automation_tools.satellite6.log.run')
run_mock.return_value = return_value
return run_mock
@pytest.fixture
def run_50(mocker):
"""Mock fabric's run function to avoid call through network. return
string 50 all the time run is executed"""
return run_mock_helper(mocker, '50')
@pytest.fixture
def run_with_error(mocker):
"""Mock fabric's run function to avoid call through network. return
string with file not available"""
return run_mock_helper(
mocker,
'/bin/bash: /var/log/foreman-installer/satellite.log: No such file '
'or directory')
def test_log_analyzer_enter(run_50):
"""Check with __enter__ calls fabric functions to get log files state"""
analyzer = LogAnalyzer('root@foo.bar')
analyzer.__enter__()
expected_state = {
'/var/log/foreman-installer/satellite.log': 50,
'/var/log/foreman-installer/capsule.log': 50,
'/var/log/satellite-installer/satellite-installer.log': 50,
'/var/log/capsule-installer/capsule-installer.log': 50,
'/var/log/foreman/production.log': 50,
'/var/log/foreman-proxy/proxy.log': 50,
'/var/log/candlepin/candlepin.log': 50,
'/var/log/messages': 50,
'/var/log/mongodb/mongodb.log': 50,
'/var/log/tomcat/catalina.out': 50
}
assert analyzer.log_state == expected_state
assert run_50.call_count == len(expected_state)
for log_file in expected_state:
run_50.assert_any_call('wc -l < %s' % log_file, quiet=True)
# Assertiing calling enter again will calculate delta
run_50.return_value = 55
analyzer._update_log_files_state() # noqa
for lines_appended in analyzer.log_state.values():
assert 5 == lines_appended # result of 55 - 50
def test_log_analyzer_exit(mocker):
"""Check exit get lines appended on log files"""
analyzer = LogAnalyzer('root@foo.bar')
# Mocking
analyzer._update_log_files_state = mocker.Mock() # noqa
# Defining log state with files with and without lines appended
log_with_lines_appended = {
'/var/log/candlepin/candlepin.log': 1,
'/var/log/messages': 2,
'/var/log/mongodb/mongodb.log': 3,
'/var/log/tomcat/catalina.out': 4
}
log_without_lines_appended = {
'/var/log/foreman-installer/satellite.log': 0,
'/var/log/foreman/production.log': -1,
'/var/log/foreman-proxy/proxy.log': 0,
}
analyzer.log_state.update(log_with_lines_appended)
analyzer.log_state.update(log_without_lines_appended)
# Defining context which will be returned for files with lines appended
log_files_content = {
'/var/log/candlepin/candlepin.log': 'foo',
'/var/log/messages': 'bar',
'/var/log/mongodb/mongodb.log': 'baz',
'/var/log/tomcat/catalina.out': 'blah'
}
def tail_side_effect(tail_cmd, quiet):
assert quiet
for log_file, content in log_files_content.items():
if tail_cmd.endswith(log_file):
return content
run_mock = mocker.patch('automation_tools.satellite6.log.run')
run_mock.side_effect = tail_side_effect
analyzer.__exit__(None, None, None)
analyzer._update_log_files_state.assert_called_once_with() # noqa
assert run_mock.call_count == len(log_with_lines_appended)
for log_file, lines_appended in log_with_lines_appended.items():
cmd = (
'tail -n {lines} {file} | grep -e "ERROR" '
'-e "EXCEPTION" '
'-e "returned 1 instead of one of \\[0\\]" '
'-e "Could not find the inverse association for repository" '
'-e "undefined method" '
'{file}'
)
run_mock.assert_any_call(
cmd.format(lines=lines_appended, file=log_file), quiet=True)
def test_log_analyzer_file_not_available(run_with_error):
# Testing enter
not_zero_state = {
'/var/log/foreman-installer/satellite.log': 50,
'/var/log/foreman/production.log': 50,
'/var/log/foreman-proxy/proxy.log': 50,
'/var/log/candlepin/candlepin.log': 50,
'/var/log/messages': 50,
'/var/log/mongodb/mongodb.log': 50,
'/var/log/tomcat/catalina.out': 50
}
analyzer = LogAnalyzer('root@foo.bar')
analyzer.log_state = dict(not_zero_state.items())
analyzer.__enter__()
assert run_with_error.call_count == len(not_zero_state)
for line_appended in analyzer.log_state.values():
assert 0 == line_appended
# Testing exit
run_with_error.reset_mock()
analyzer.log_state = dict(not_zero_state.items())
analyzer.__exit__(None, None, None)
assert run_with_error.call_count == len(not_zero_state)
for line_appended in analyzer.log_state.values():
assert 0 == line_appended
@pytest.fixture
def print_mock(mocker):
"""Mock _print_wrapper function"""
return mocker.patch('automation_tools.satellite6.log._print_wrapper')
@pytest.fixture
def save_log_mock(mocker):
"""Mock _print_wrapper function"""
return mocker.patch('automation_tools.satellite6.log._save_full_log')
| gpl-3.0 |
chaudum/crate-viz | src/crate/__init__.py | 1 | 1227 | # -*- coding: utf-8; -*-
#
# Licensed to CRATE Technology GmbH ("Crate") under one or more contributor
# license agreements. See the NOTICE file distributed with this work for
# additional information regarding copyright ownership. Crate licenses
# this file to you under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. You may
# obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# However, if you have executed another commercial license agreement
# with Crate these terms will supersede the license and you may use the
# software solely pursuant to the terms of the relevant commercial agreement.
# this is a namespace package
try:
import pkg_resources
pkg_resources.declare_namespace(__name__)
except ImportError:
import pkgutil
__path__ = pkgutil.extend_path(__path__, __name__)
| apache-2.0 |
JesseLivezey/plankton | pylearn2/scripts/datasets/download_binarized_mnist.py | 44 | 1673 | """
Download script for the unlabeled version of the MNIST dataset, used in
On the Quantitative Analysis of Deep Belief Networks
Salakhutdinov and Murray
http://www.mit.edu/~rsalakhu/papers/dbn_ais.pdf
The MNIST database of handwritten digits
LeCun and Cortes
http://yann.lecun.com/exdb/mnist/
"""
from __future__ import print_function
__authors__ = "Vincent Dumoulin"
__copyright__ = "Copyright 2014, Universite de Montreal"
__credits__ = ["Vincent Dumoulin"]
__license__ = "3-clause BSD"
__maintainer__ = "LISA Lab"
__email__ = "pylearn-dev@googlegroups"
import os
import urllib
import numpy
assert 'PYLEARN2_DATA_PATH' in os.environ, "PYLEARN2_DATA_PATH not defined"
mnist_path = os.path.join(os.environ['PYLEARN2_DATA_PATH'], "binarized_mnist")
if not os.path.isdir(mnist_path):
print("creating path: " + mnist_path)
os.makedirs(mnist_path)
in_dir = os.listdir(mnist_path)
mnist_files = ["binarized_mnist_train", "binarized_mnist_valid",
"binarized_mnist_test"]
base_url = "http://www.cs.toronto.edu/~larocheh/public/datasets/" + \
"binarized_mnist/"
if not all([f + ".npy" in in_dir for f in mnist_files]) or in_dir == []:
print("Downloading MNIST data...")
npy_out = [os.path.join(mnist_path, f + ".npy") for f in mnist_files]
mnist_url = ["".join([base_url, f, ".amat"]) for f in mnist_files]
for n_out, m_url in zip(npy_out, mnist_url):
print("Downloading " + m_url + "...", end='')
numpy.save(n_out, numpy.loadtxt(urllib.urlretrieve(m_url)[0]))
print(" Done")
print("Done downloading MNIST")
else:
print("MNIST files already in PYLEARN2_DATA_PATH")
| bsd-3-clause |
mkaluza/external_chromium_org | tools/telemetry/telemetry/value/list_of_scalar_values_unittest.py | 26 | 3046 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import unittest
from telemetry import value
from telemetry.page import page_set
from telemetry.value import list_of_scalar_values
class TestBase(unittest.TestCase):
def setUp(self):
self.page_set = page_set.PageSet.FromDict({
"description": "hello",
"archive_path": "foo.wpr",
"pages": [
{"url": "http://www.bar.com/"},
{"url": "http://www.baz.com/"},
{"url": "http://www.foo.com/"}
]
}, os.path.dirname(__file__))
@property
def pages(self):
return self.page_set.pages
class ValueTest(TestBase):
def testListSamePageMergingWithSamePageConcatenatePolicy(self):
page0 = self.pages[0]
v0 = list_of_scalar_values.ListOfScalarValues(
page0, 'x', 'unit',
[1,2], same_page_merge_policy=value.CONCATENATE)
v1 = list_of_scalar_values.ListOfScalarValues(
page0, 'x', 'unit',
[3,4], same_page_merge_policy=value.CONCATENATE)
self.assertTrue(v1.IsMergableWith(v0))
vM = (list_of_scalar_values.ListOfScalarValues.
MergeLikeValuesFromSamePage([v0, v1]))
self.assertEquals(page0, vM.page)
self.assertEquals('x', vM.name)
self.assertEquals('unit', vM.units)
self.assertEquals(value.CONCATENATE, vM.same_page_merge_policy)
self.assertEquals(True, vM.important)
self.assertEquals([1, 2, 3, 4], vM.values)
def testListSamePageMergingWithPickFirstPolicy(self):
page0 = self.pages[0]
v0 = list_of_scalar_values.ListOfScalarValues(
page0, 'x', 'unit',
[1,2], same_page_merge_policy=value.PICK_FIRST)
v1 = list_of_scalar_values.ListOfScalarValues(
page0, 'x', 'unit',
[3,4], same_page_merge_policy=value.PICK_FIRST)
self.assertTrue(v1.IsMergableWith(v0))
vM = (list_of_scalar_values.ListOfScalarValues.
MergeLikeValuesFromSamePage([v0, v1]))
self.assertEquals(page0, vM.page)
self.assertEquals('x', vM.name)
self.assertEquals('unit', vM.units)
self.assertEquals(value.PICK_FIRST, vM.same_page_merge_policy)
self.assertEquals(True, vM.important)
self.assertEquals([1, 2], vM.values)
def testListDifferentPageMerging(self):
page0 = self.pages[0]
v0 = list_of_scalar_values.ListOfScalarValues(
page0, 'x', 'unit',
[1, 2], same_page_merge_policy=value.PICK_FIRST)
v1 = list_of_scalar_values.ListOfScalarValues(
page0, 'x', 'unit',
[3, 4], same_page_merge_policy=value.PICK_FIRST)
self.assertTrue(v1.IsMergableWith(v0))
vM = (list_of_scalar_values.ListOfScalarValues.
MergeLikeValuesFromDifferentPages([v0, v1]))
self.assertEquals(None, vM.page)
self.assertEquals('x', vM.name)
self.assertEquals('unit', vM.units)
self.assertEquals(value.PICK_FIRST, vM.same_page_merge_policy)
self.assertEquals(True, vM.important)
self.assertEquals([1, 2, 3, 4], vM.values)
| bsd-3-clause |
zouyapeng/horizon | openstack_dashboard/dashboards/project/data_processing/nodegroup_templates/workflows/create.py | 6 | 11799 | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from django.utils.translation import ugettext_lazy as _
from saharaclient.api import base as api_base
from horizon import exceptions
from horizon import forms
from horizon import workflows
from openstack_dashboard.api import network
from openstack_dashboard.api import sahara as saharaclient
from openstack_dashboard.dashboards.project.data_processing.utils \
import helpers
from openstack_dashboard.dashboards.project.data_processing.utils \
import workflow_helpers
from openstack_dashboard.dashboards.project.instances \
import utils as nova_utils
LOG = logging.getLogger(__name__)
class GeneralConfigAction(workflows.Action):
nodegroup_name = forms.CharField(label=_("Template Name"))
description = forms.CharField(label=_("Description"),
required=False,
widget=forms.Textarea)
flavor = forms.ChoiceField(label=_("OpenStack Flavor"))
storage = forms.ChoiceField(
label=_("Storage location"),
help_text=_("Choose a storage location"),
choices=[("ephemeral_drive", "Ephemeral Drive"),
("cinder_volume", "Cinder Volume")],
widget=forms.Select(attrs={"class": "storage_field"}))
volumes_per_node = forms.IntegerField(
label=_("Volumes per node"),
required=False,
initial=1,
widget=forms.TextInput(attrs={"class": "volume_per_node_field"})
)
volumes_size = forms.IntegerField(
label=_("Volumes size (GB)"),
required=False,
initial=10,
widget=forms.TextInput(attrs={"class": "volume_size_field"})
)
hidden_configure_field = forms.CharField(
required=False,
widget=forms.HiddenInput(attrs={"class": "hidden_configure_field"}))
def __init__(self, request, *args, **kwargs):
super(GeneralConfigAction, self).__init__(request, *args, **kwargs)
hlps = helpers.Helpers(request)
plugin, hadoop_version = (
workflow_helpers.get_plugin_and_hadoop_version(request))
process_choices = []
try:
version_details = saharaclient.plugin_get_version_details(request,
plugin,
hadoop_version)
for service, processes in version_details.node_processes.items():
for process in processes:
process_choices.append(
(str(service) + ":" + str(process), process))
except Exception:
exceptions.handle(request,
_("Unable to generate process choices."))
if not saharaclient.SAHARA_AUTO_IP_ALLOCATION_ENABLED:
pools = network.floating_ip_pools_list(request)
pool_choices = [(pool.id, pool.name) for pool in pools]
pool_choices.insert(0, (None, "Do not assign floating IPs"))
self.fields['floating_ip_pool'] = forms.ChoiceField(
label=_("Floating IP pool"),
choices=pool_choices,
required=False)
self.fields["autogroup"] = forms.BooleanField(
label=_("Auto Security Group"),
widget=forms.CheckboxInput(),
help_text=_("Create security group for this Node Group."),
required=False)
groups = network.security_group_list(request)
security_group_list = [(sg.id, sg.name) for sg in groups]
self.fields["groups"] = forms.MultipleChoiceField(
label=_("Security Groups"),
widget=forms.CheckboxSelectMultiple(),
help_text=_("Launch instances in these security groups."),
choices=security_group_list,
required=False)
self.fields["processes"] = forms.MultipleChoiceField(
label=_("Processes"),
widget=forms.CheckboxSelectMultiple(),
help_text=_("Processes to be launched in node group"),
choices=process_choices)
self.fields["plugin_name"] = forms.CharField(
widget=forms.HiddenInput(),
initial=plugin
)
self.fields["hadoop_version"] = forms.CharField(
widget=forms.HiddenInput(),
initial=hadoop_version
)
node_parameters = hlps.get_general_node_group_configs(plugin,
hadoop_version)
for param in node_parameters:
self.fields[param.name] = workflow_helpers.build_control(param)
def populate_flavor_choices(self, request, context):
flavors = nova_utils.flavor_list(request)
if flavors:
return nova_utils.sort_flavor_list(request, flavors)
return []
def get_help_text(self):
extra = dict()
plugin, hadoop_version = (
workflow_helpers.get_plugin_and_hadoop_version(self.request))
extra["plugin_name"] = plugin
extra["hadoop_version"] = hadoop_version
return super(GeneralConfigAction, self).get_help_text(extra)
class Meta:
name = _("Configure Node Group Template")
help_text_template = (
"project/data_processing.nodegroup_templates"
"/_configure_general_help.html")
class GeneralConfig(workflows.Step):
action_class = GeneralConfigAction
contributes = ("general_nodegroup_name", )
def contribute(self, data, context):
for k, v in data.items():
if "hidden" in k:
continue
context["general_" + k] = v if v != "None" else None
post = self.workflow.request.POST
context['general_processes'] = post.getlist("processes")
return context
class ConfigureNodegroupTemplate(workflow_helpers.ServiceParametersWorkflow,
workflow_helpers.StatusFormatMixin):
slug = "configure_nodegroup_template"
name = _("Create Node Group Template")
finalize_button_name = _("Create")
success_message = _("Created Node Group Template %s")
name_property = "general_nodegroup_name"
success_url = "horizon:project:data_processing.nodegroup_templates:index"
default_steps = (GeneralConfig,)
def __init__(self, request, context_seed, entry_point, *args, **kwargs):
hlps = helpers.Helpers(request)
plugin, hadoop_version = (
workflow_helpers.get_plugin_and_hadoop_version(request))
general_parameters = hlps.get_general_node_group_configs(
plugin,
hadoop_version)
service_parameters = hlps.get_targeted_node_group_configs(
plugin,
hadoop_version)
self._populate_tabs(general_parameters, service_parameters)
super(ConfigureNodegroupTemplate, self).__init__(request,
context_seed,
entry_point,
*args, **kwargs)
def is_valid(self):
missing = self.depends_on - set(self.context.keys())
if missing:
raise exceptions.WorkflowValidationError(
"Unable to complete the workflow. The values %s are "
"required but not present." % ", ".join(missing))
checked_steps = []
if "general_processes" in self.context:
checked_steps = self.context["general_processes"]
enabled_services = set([])
for process_name in checked_steps:
enabled_services.add(str(process_name).split(":")[0])
steps_valid = True
for step in self.steps:
process_name = str(getattr(step, "process_name", None))
if process_name not in enabled_services and \
not isinstance(step, GeneralConfig):
continue
if not step.action.is_valid():
steps_valid = False
step.has_errors = True
if not steps_valid:
return steps_valid
return self.validate(self.context)
def handle(self, request, context):
try:
processes = []
for service_process in context["general_processes"]:
processes.append(str(service_process).split(":")[1])
configs_dict = (
workflow_helpers.parse_configs_from_context(
context, self.defaults))
plugin, hadoop_version = (
workflow_helpers.get_plugin_and_hadoop_version(request))
volumes_per_node = None
volumes_size = None
if context["general_storage"] == "cinder_volume":
volumes_per_node = context["general_volumes_per_node"]
volumes_size = context["general_volumes_size"]
saharaclient.nodegroup_template_create(
request,
name=context["general_nodegroup_name"],
plugin_name=plugin,
hadoop_version=hadoop_version,
description=context["general_description"],
flavor_id=context["general_flavor"],
volumes_per_node=volumes_per_node,
volumes_size=volumes_size,
node_processes=processes,
node_configs=configs_dict,
floating_ip_pool=context.get("general_floating_ip_pool"),
security_groups=context["general_groups"],
auto_security_group=context["general_autogroup"])
return True
except api_base.APIException as e:
self.error_description = str(e)
return False
except Exception:
exceptions.handle(request)
class SelectPluginAction(workflows.Action,
workflow_helpers.PluginAndVersionMixin):
hidden_create_field = forms.CharField(
required=False,
widget=forms.HiddenInput(attrs={"class": "hidden_create_field"}))
def __init__(self, request, *args, **kwargs):
super(SelectPluginAction, self).__init__(request, *args, **kwargs)
sahara = saharaclient.client(request)
self._generate_plugin_version_fields(sahara)
class Meta:
name = _("Select plugin and hadoop version")
help_text_template = ("project/data_processing.nodegroup_templates"
"/_create_general_help.html")
class SelectPlugin(workflows.Step):
action_class = SelectPluginAction
contributes = ("plugin_name", "hadoop_version")
def contribute(self, data, context):
context = super(SelectPlugin, self).contribute(data, context)
context["plugin_name"] = data.get('plugin_name', None)
context["hadoop_version"] = \
data.get(context["plugin_name"] + "_version", None)
return context
class CreateNodegroupTemplate(workflows.Workflow):
slug = "create_nodegroup_template"
name = _("Create Node Group Template")
finalize_button_name = _("Create")
success_message = _("Created")
failure_message = _("Could not create")
success_url = "horizon:project:data_processing.nodegroup_templates:index"
default_steps = (SelectPlugin,)
| apache-2.0 |
carvalhomb/tsmells | fetch/src/snavtofamix/src/common/entity_references/EntityReferences.py | 1 | 10176 | # !/usr/bin/python
# This file is part of snavtofamix (Source Navigator to FAMIX).
#
# snavtofamix is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 2 of the License, or (at your
# option) any later version.
#
# snavtofamix is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along
# with snavtofamix; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
# Copyright 2006,2007 University of Antwerp
# Author(s): Bart Van Rompaey <bart.vanrompaey2@ua.ac.be>,
# Bart Du Bois <bart.dubois@ua.ac.be>
from MultipleLocationEntity import MultipleLocationEntity
from common.ResolveableInformation import TypeReference
##
# Encapsulation of data necessary to refer to an entity.
# The idea is to replace all low-level data from Entities
# by an instance of an EntityReference (or its future subclasses).
##
class EntityReference:
def __init__(self):
self.sourceFile = None
self.lineNr = None
def getSourceFile(self):
return self.sourceFile
def setSourceFile(self, sourceFile):
self.sourceFile = sourceFile
def getLineNr(self):
return self.lineNr
def setLineNr(self, lineNr):
self.lineNr = lineNr
def __eq__(self,other):
isEqual = False
if (other != None) & (type(other) == type(self)):
isEqual = other.getSourceFile() == self.getSourceFile()
isEqual = isEqual & (other.getLineNr() == self.getLineNr())
return isEqual
def __repr__(self):
return `self.__dict__`.replace("',","'\n")
##
# Encapsulation of data required to refer to a named entity
# that is possibly located in a namespace.
##
class NamedEntityReference(EntityReference):
def __init__(self):
self.name = None
self.namespaceName = None
def getName(self):
if self.name == None:
return ""
else:
return self.name
def setName(self, name):
self.name = name
def __eq__(self,other):
isEqual = False
if other != None:
isEqual = EntityReference.__eq__(self, other)
isEqual = isEqual & (other.getName() == self.getName())
return isEqual
class NamespaceElementEntityReference(NamedEntityReference):
def getNamespaceName(self):
if self.namespaceName == None:
return ""
else:
return self.namespaceName
def setNamespaceName(self, namespaceName):
self.namespaceName = namespaceName
def __eq__(self,other):
isEqual = False
if other != None:
isEqual = NamedEntityReference.__eq__(self, other)
isEqual = isEqual & (other.getNamespaceName() == self.getNamespaceName())
return isEqual
##
# Encapsulation of data specific to refer to an
# attribute or method. This reference does not
# derive from NamespaceElementEntityReference, as it should
# always be subclassed in combination with an
# other superclass that *is* a subclass of NamespaceElementEntityReference.
##
class ClassMemberEntityReference:
def __init__(self):
self.ownerName = None
self.ownerTemplateParameters = None
def getOwnerName(self):
if self.ownerName == None:
return ""
else:
return self.ownerName
def setOwnerName(self, name):
self.ownerName = name
def getOwnerTemplateParameters(self):
if self.ownerTemplateParameters == None:
return ""
else:
return self.ownerTemplateParameters
def setOwnerTemplateParameters(self, parameters):
self.ownerTemplateParameters = parameters
def __eq__(self,other):
isEqual = False
if (other != None) and (type(other) == type(self)):
isEqual = other.getOwnerName() == self.getOwnerName()
isEqual = isEqual & (other.getOwnerTemplateParameters() == self.getOwnerTemplateParameters())
return isEqual
##
# Encapsulation of the information
# required to refer to a function or method.
##
class InvokableEntityReference(NamespaceElementEntityReference):
def __init__(self):
NamespaceElementEntityReference.__init__(self)
self.formalParameters = None
self.isAnImplementation = None
self.locationEntity = None
self.typeReference = None
def getFormalParameters(self):
if self.formalParameters == None:
return ""
else:
return self.formalParameters
def getBracketLessFormalParameters(self):
return self.getFormalParameters().lstrip("(").rstrip(")")
def setFormalParameters(self, formalParameters):
self.formalParameters = formalParameters.replace(" *","*").replace(" []","[]")
def getSignature(self):
return self.getName() + self.getFormalParameters()
def getOwnerName(self):
return ""
def getOwnerTemplateParameters(self):
return ""
def isAbstract(self):
return False
def isImplementation(self):
return self.isAnImplementation
def setIsImplementation(self, isImplementation):
self.isAnImplementation = isImplementation
def getUniqueName(self):
uniqueName = self.getSignature()
if self.getNamespaceName() != "":
uniqueName = self.getNamespaceName() + "::" + uniqueName
return uniqueName
##
# Set the MultipleLocationEntity instance
# characterizing the location of the entity
# refered to in this reference.
##
def setLocation(self, locationEntity):
self.locationEntity = locationEntity
##
# Return a MultipleLocationEntity instance
# characterizing the location of the entity
# refered to in this reference.
##
def getLocation(self):
return self.locationEntity
def getTypeReference(self):
if self.typeReference == None:
return TypeReference()
else:
return self.typeReference
def setTypeReference(self, typeReference):
self.typeReference = typeReference
##
# Verify whether this invokeable entity is equal to a given one.
# This verification is a.o. used to discover whether an invokeable entity
# might be the implementation of a declaration that is already
# in the dictionary.
##
def __eq__(self,other):
isEqual = (other != None) & (type(other) == type(self))
if isEqual:
isEqual = other.getName() == self.getName()
isEqual = isEqual & (other.getFormalParameters() == self.getFormalParameters())
isEqual = isEqual & (other.getNamespaceName() == self.getNamespaceName())
#isEqual = isEqual & (other.getLocation() == self.getLocation())
#isEqual = isEqual & (other.isImplementation() == self.isImplementation())
# by ignoring the isImplementation, we ensure that an implementation
# is equal to a definition and vice versa
if isEqual:
haveEqualType = (other.getTypeReference() == self.getTypeReference())
isEqual = isEqual & haveEqualType
return isEqual
##
# Encapsulation of the information
# required to refer to a method.
##
class MethodEntityReference(InvokableEntityReference,ClassMemberEntityReference):
def __init__(self):
InvokableEntityReference.__init__(self)
ClassMemberEntityReference.__init__(self)
self.isAbstractMethod = None
self.parentReference = None
self.modifiers = None
self.hasClassScope = False
def isConstructor(self):
return (self.getName() == self.getOwnerName())
def setParentReference(self, parentReference):
self.parentReference = parentReference
def getParentReference(self):
if self.parentReference == None:
return TypeReference()
else:
return self.parentReference
def getOwnerName(self):
return ClassMemberEntityReference.getOwnerName(self)
def getOwnerTemplateParameters(self):
return ClassMemberEntityReference.getOwnerTemplateParameters(self)
def isAbstract(self):
return self.isAbstractMethod
def setIsAbstract(self, isAbstract):
self.isAbstractMethod = isAbstract
def setClassScope(self, classScope):
self.hasClassScope = classScope
def getClassScope(self):
return self.hasClassScope
def setModifiers(self, modifiers):
self.modifiers = modifiers
def getModifiers(self):
return self.modifiers
def getUniqueName(self):
uniqueName = self.getSignature()
ownerName = self.getOwnerName()
if self.getOwnerTemplateParameters() != "":
ownerName += "<" + self.getOwnerTemplateParameters() + ">"
uniqueName = ownerName + "." + uniqueName
if self.getNamespaceName() != "":
uniqueName = self.getNamespaceName() + "::" + uniqueName
return uniqueName
def __eq__(self,other):
isEqual = (other != None) & (type(other) == type(self))
if isEqual:
isEqual = InvokableEntityReference.__eq__(self,other)
isEqual = isEqual & ClassMemberEntityReference.__eq__(self,other)
isEqual = isEqual & (other.isAbstract() == self.isAbstract())
#isEqual = isEqual & (other.getLocation() == self.getLocation())
return isEqual
class AccessReference:
""" represents a language independent data access """
def __init__(self):
self.src_class = None
self.src_name = None
self.formalSrcParams = None
self.dst_owner = None
self.dst_name = None
self.sourceFile = None
self.start = None
def getSrcUniqueName(self):
#return ''.join([self.src_class,".",self.src_name,"(",self.formalSrcParams,")"])
return self.src_class+"."+self.src_name+"("+self.formalSrcParams+")"
| gpl-2.0 |
sinbazhou/odoo | addons/base_gengo/ir_translation.py | 343 | 4344 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Business Applications
# Copyright (C) 2004-2012 OpenERP S.A. (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
from openerp.tools.translate import _
LANG_CODE_MAPPING = {
'ar_SY': ('ar', 'Arabic'),
'id_ID': ('id', 'Indonesian'),
'nl_NL': ('nl', 'Dutch'),
'fr_CA': ('fr-ca', 'French (Canada)'),
'pl_PL': ('pl', 'Polish'),
'zh_TW': ('zh-tw', 'Chinese (Traditional)'),
'sv_SE': ('sv', 'Swedish'),
'ko_KR': ('ko', 'Korean'),
'pt_PT': ('pt', 'Portuguese (Europe)'),
'en_US': ('en', 'English'),
'ja_JP': ('ja', 'Japanese'),
'es_ES': ('es', 'Spanish (Spain)'),
'zh_CN': ('zh', 'Chinese (Simplified)'),
'de_DE': ('de', 'German'),
'fr_FR': ('fr', 'French'),
'fr_BE': ('fr', 'French'),
'ru_RU': ('ru', 'Russian'),
'it_IT': ('it', 'Italian'),
'pt_BR': ('pt-br', 'Portuguese (Brazil)'),
'th_TH': ('th', 'Thai'),
'nb_NO': ('no', 'Norwegian'),
'ro_RO': ('ro', 'Romanian'),
'tr_TR': ('tr', 'Turkish'),
'bg_BG': ('bg', 'Bulgarian'),
'da_DK': ('da', 'Danish'),
'en_GB': ('en-gb', 'English (British)'),
'el_GR': ('el', 'Greek'),
'vi_VN': ('vi', 'Vietnamese'),
'he_IL': ('he', 'Hebrew'),
'hu_HU': ('hu', 'Hungarian'),
'fi_FI': ('fi', 'Finnish')
}
class ir_translation(osv.Model):
_name = "ir.translation"
_inherit = "ir.translation"
_columns = {
'gengo_comment': fields.text("Comments & Activity Linked to Gengo"),
'order_id': fields.char('Gengo Order ID', size=32),
"gengo_translation": fields.selection([('machine', 'Translation By Machine'),
('standard', 'Standard'),
('pro', 'Pro'),
('ultra', 'Ultra')], "Gengo Translation Service Level", help='You can select here the service level you want for an automatic translation using Gengo.'),
}
def _get_all_supported_languages(self, cr, uid, context=None):
flag, gengo = self.pool.get('base.gengo.translations').gengo_authentication(cr, uid, context=context)
if not flag:
raise osv.except_osv(_('Gengo Authentication Error'), gengo)
supported_langs = {}
lang_pair = gengo.getServiceLanguagePairs(lc_src='en')
if lang_pair['opstat'] == 'ok':
for g_lang in lang_pair['response']:
if g_lang['lc_tgt'] not in supported_langs:
supported_langs[g_lang['lc_tgt']] = []
supported_langs[g_lang['lc_tgt']] += [g_lang['tier']]
return supported_langs
def _get_gengo_corresponding_language(cr, lang):
return lang in LANG_CODE_MAPPING and LANG_CODE_MAPPING[lang][0] or lang
def _get_source_query(self, cr, uid, name, types, lang, source, res_id):
query, params = super(ir_translation, self)._get_source_query(cr, uid, name, types, lang, source, res_id)
query += """
ORDER BY
CASE
WHEN gengo_translation=%s then 10
WHEN gengo_translation=%s then 20
WHEN gengo_translation=%s then 30
WHEN gengo_translation=%s then 40
ELSE 0
END DESC
"""
params += ('machine', 'standard', 'ultra', 'pro',)
return (query, params)
| agpl-3.0 |
sebrandon1/nova | nova/tests/unit/api/openstack/compute/test_consoles.py | 3 | 11416 | # Copyright 2010-2011 OpenStack Foundation
# Copyright 2011 Piston Cloud Computing, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import uuid as stdlib_uuid
from oslo_policy import policy as oslo_policy
from oslo_utils import timeutils
import webob
from nova.api.openstack.compute import consoles as consoles_v21
from nova.compute import vm_states
from nova import exception
from nova import policy
from nova import test
from nova.tests.unit.api.openstack import fakes
from nova.tests.unit import matchers
FAKE_UUID = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
class FakeInstanceDB(object):
def __init__(self):
self.instances_by_id = {}
self.ids_by_uuid = {}
self.max_id = 0
def return_server_by_id(self, context, id):
if id not in self.instances_by_id:
self._add_server(id=id)
return dict(self.instances_by_id[id])
def return_server_by_uuid(self, context, uuid):
if uuid not in self.ids_by_uuid:
self._add_server(uuid=uuid)
return dict(self.instances_by_id[self.ids_by_uuid[uuid]])
def _add_server(self, id=None, uuid=None):
if id is None:
id = self.max_id + 1
if uuid is None:
uuid = str(stdlib_uuid.uuid4())
instance = stub_instance(id, uuid=uuid)
self.instances_by_id[id] = instance
self.ids_by_uuid[uuid] = id
if id > self.max_id:
self.max_id = id
def stub_instance(id, user_id='fake', project_id='fake', host=None,
vm_state=None, task_state=None,
reservation_id="", uuid=FAKE_UUID, image_ref="10",
flavor_id="1", name=None, key_name='',
access_ipv4=None, access_ipv6=None, progress=0):
if host is not None:
host = str(host)
if key_name:
key_data = 'FAKE'
else:
key_data = ''
# ReservationID isn't sent back, hack it in there.
server_name = name or "server%s" % id
if reservation_id != "":
server_name = "reservation_%s" % (reservation_id, )
instance = {
"id": int(id),
"created_at": datetime.datetime(2010, 10, 10, 12, 0, 0),
"updated_at": datetime.datetime(2010, 11, 11, 11, 0, 0),
"admin_pass": "",
"user_id": user_id,
"project_id": project_id,
"image_ref": image_ref,
"kernel_id": "",
"ramdisk_id": "",
"launch_index": 0,
"key_name": key_name,
"key_data": key_data,
"vm_state": vm_state or vm_states.BUILDING,
"task_state": task_state,
"memory_mb": 0,
"vcpus": 0,
"root_gb": 0,
"hostname": "",
"host": host,
"instance_type": {},
"user_data": "",
"reservation_id": reservation_id,
"mac_address": "",
"launched_at": timeutils.utcnow(),
"terminated_at": timeutils.utcnow(),
"availability_zone": "",
"display_name": server_name,
"display_description": "",
"locked": False,
"metadata": [],
"access_ip_v4": access_ipv4,
"access_ip_v6": access_ipv6,
"uuid": uuid,
"progress": progress}
return instance
class ConsolesControllerTestV21(test.NoDBTestCase):
def setUp(self):
super(ConsolesControllerTestV21, self).setUp()
self.flags(verbose=True)
self.instance_db = FakeInstanceDB()
self.stub_out('nova.db.instance_get',
self.instance_db.return_server_by_id)
self.stub_out('nova.db.instance_get_by_uuid',
self.instance_db.return_server_by_uuid)
self.uuid = str(stdlib_uuid.uuid4())
self.url = '/v2/fake/servers/%s/consoles' % self.uuid
self._set_up_controller()
def _set_up_controller(self):
self.controller = consoles_v21.ConsolesController()
def test_create_console(self):
def fake_create_console(cons_self, context, instance_id):
self.assertEqual(instance_id, self.uuid)
return {}
self.stub_out('nova.console.api.API.create_console',
fake_create_console)
req = fakes.HTTPRequest.blank(self.url)
self.controller.create(req, self.uuid, None)
def test_create_console_unknown_instance(self):
def fake_create_console(cons_self, context, instance_id):
raise exception.InstanceNotFound(instance_id=instance_id)
self.stub_out('nova.console.api.API.create_console',
fake_create_console)
req = fakes.HTTPRequest.blank(self.url)
self.assertRaises(webob.exc.HTTPNotFound, self.controller.create,
req, self.uuid, None)
def test_show_console(self):
def fake_get_console(cons_self, context, instance_id, console_id):
self.assertEqual(instance_id, self.uuid)
self.assertEqual(console_id, 20)
pool = dict(console_type='fake_type',
public_hostname='fake_hostname')
return dict(id=console_id, password='fake_password',
port='fake_port', pool=pool, instance_name='inst-0001')
expected = {'console': {'id': 20,
'port': 'fake_port',
'host': 'fake_hostname',
'password': 'fake_password',
'instance_name': 'inst-0001',
'console_type': 'fake_type'}}
self.stub_out('nova.console.api.API.get_console', fake_get_console)
req = fakes.HTTPRequest.blank(self.url + '/20')
res_dict = self.controller.show(req, self.uuid, '20')
self.assertThat(res_dict, matchers.DictMatches(expected))
def test_show_console_unknown_console(self):
def fake_get_console(cons_self, context, instance_id, console_id):
raise exception.ConsoleNotFound(console_id=console_id)
self.stub_out('nova.console.api.API.get_console', fake_get_console)
req = fakes.HTTPRequest.blank(self.url + '/20')
self.assertRaises(webob.exc.HTTPNotFound, self.controller.show,
req, self.uuid, '20')
def test_show_console_unknown_instance(self):
def fake_get_console(cons_self, context, instance_id, console_id):
raise exception.ConsoleNotFoundForInstance(
instance_uuid=instance_id)
self.stub_out('nova.console.api.API.get_console', fake_get_console)
req = fakes.HTTPRequest.blank(self.url + '/20')
self.assertRaises(webob.exc.HTTPNotFound, self.controller.show,
req, self.uuid, '20')
def test_list_consoles(self):
def fake_get_consoles(cons_self, context, instance_id):
self.assertEqual(instance_id, self.uuid)
pool1 = dict(console_type='fake_type',
public_hostname='fake_hostname')
cons1 = dict(id=10, password='fake_password',
port='fake_port', pool=pool1)
pool2 = dict(console_type='fake_type2',
public_hostname='fake_hostname2')
cons2 = dict(id=11, password='fake_password2',
port='fake_port2', pool=pool2)
return [cons1, cons2]
expected = {'consoles':
[{'console': {'id': 10, 'console_type': 'fake_type'}},
{'console': {'id': 11, 'console_type': 'fake_type2'}}]}
self.stub_out('nova.console.api.API.get_consoles', fake_get_consoles)
req = fakes.HTTPRequest.blank(self.url)
res_dict = self.controller.index(req, self.uuid)
self.assertThat(res_dict, matchers.DictMatches(expected))
def test_delete_console(self):
def fake_get_console(cons_self, context, instance_id, console_id):
self.assertEqual(instance_id, self.uuid)
self.assertEqual(console_id, 20)
pool = dict(console_type='fake_type',
public_hostname='fake_hostname')
return dict(id=console_id, password='fake_password',
port='fake_port', pool=pool)
def fake_delete_console(cons_self, context, instance_id, console_id):
self.assertEqual(instance_id, self.uuid)
self.assertEqual(console_id, 20)
self.stub_out('nova.console.api.API.get_console', fake_get_console)
self.stub_out('nova.console.api.API.delete_console',
fake_delete_console)
req = fakes.HTTPRequest.blank(self.url + '/20')
self.controller.delete(req, self.uuid, '20')
def test_delete_console_unknown_console(self):
def fake_delete_console(cons_self, context, instance_id, console_id):
raise exception.ConsoleNotFound(console_id=console_id)
self.stub_out('nova.console.api.API.delete_console',
fake_delete_console)
req = fakes.HTTPRequest.blank(self.url + '/20')
self.assertRaises(webob.exc.HTTPNotFound, self.controller.delete,
req, self.uuid, '20')
def test_delete_console_unknown_instance(self):
def fake_delete_console(cons_self, context, instance_id, console_id):
raise exception.ConsoleNotFoundForInstance(
instance_uuid=instance_id)
self.stub_out('nova.console.api.API.delete_console',
fake_delete_console)
req = fakes.HTTPRequest.blank(self.url + '/20')
self.assertRaises(webob.exc.HTTPNotFound, self.controller.delete,
req, self.uuid, '20')
def _test_fail_policy(self, rule, action, data=None):
rules = {
rule: "!",
}
policy.set_rules(oslo_policy.Rules.from_dict(rules))
req = fakes.HTTPRequest.blank(self.url + '/20')
if data is not None:
self.assertRaises(exception.PolicyNotAuthorized, action,
req, self.uuid, data)
else:
self.assertRaises(exception.PolicyNotAuthorized, action,
req, self.uuid)
def test_delete_console_fail_policy(self):
self._test_fail_policy("os_compute_api:os-consoles:delete",
self.controller.delete, data='20')
def test_create_console_fail_policy(self):
self._test_fail_policy("os_compute_api:os-consoles:create",
self.controller.create, data='20')
def test_index_console_fail_policy(self):
self._test_fail_policy("os_compute_api:os-consoles:index",
self.controller.index)
def test_show_console_fail_policy(self):
self._test_fail_policy("os_compute_api:os-consoles:show",
self.controller.show, data='20')
| apache-2.0 |
applicationdevm/XlsxWriter | xlsxwriter/test/comparison/test_textbox15.py | 8 | 1119 | ###############################################################################
#
# Tests for XlsxWriter.
#
# Copyright (c), 2013-2015, John McNamara, jmcnamara@cpan.org
#
from ..excel_comparsion_test import ExcelComparisonTest
from ...workbook import Workbook
class TestCompareXLSXFiles(ExcelComparisonTest):
"""
Test file created by XlsxWriter against a file created by Excel.
"""
def setUp(self):
self.maxDiff = None
filename = 'textbox15.xlsx'
test_dir = 'xlsxwriter/test/comparison/'
self.got_filename = test_dir + '_test_' + filename
self.exp_filename = test_dir + 'xlsx_files/' + filename
self.ignore_files = []
self.ignore_elements = {}
def test_create_file(self):
"""Test the creation of a simple XlsxWriter file with textbox(s)."""
workbook = Workbook(self.got_filename)
worksheet = workbook.add_worksheet()
worksheet.insert_textbox('E9', 'This is some text',
{'align': {'horizontal': 'center'}})
workbook.close()
self.assertExcelEqual()
| bsd-2-clause |
kobejean/tensorflow | tensorflow/contrib/linear_optimizer/python/ops/sharded_mutable_dense_hashtable.py | 19 | 6628 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Sharded mutable dense hash table."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from six.moves import range
from tensorflow.contrib import lookup
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import data_flow_ops
from tensorflow.python.ops import math_ops
class ShardedMutableDenseHashTable(lookup.LookupInterface):
"""A sharded version of MutableDenseHashTable.
It is designed to be interface compatible with LookupInterface and
MutableDenseHashTable, with the exception of the export method, which is
replaced by an export_sharded method.
The _ShardedMutableDenseHashTable keeps `num_shards` MutableDenseHashTable
internally. The shard is computed via the modulo operation on the key.
"""
# TODO(andreasst): consider moving this to lookup module
def __init__(self,
key_dtype,
value_dtype,
default_value,
empty_key,
num_shards=1,
checkpoint=True,
name='ShardedMutableHashTable'):
with ops.name_scope(name, 'sharded_mutable_hash_table') as scope:
super(ShardedMutableDenseHashTable, self).__init__(key_dtype,
value_dtype, scope)
table_shards = []
for i in range(num_shards):
table_shards.append(
lookup.MutableDenseHashTable(
key_dtype=key_dtype,
value_dtype=value_dtype,
default_value=default_value,
empty_key=empty_key,
checkpoint=checkpoint,
name='%s-%d-of-%d' % (name, i + 1, num_shards)))
self._table_shards = table_shards
# TODO(andreasst): add a value_shape() method to LookupInterface
# pylint: disable=protected-access
self._value_shape = self._table_shards[0]._value_shape
# pylint: enable=protected-access
@property
def _num_shards(self):
return len(self._table_shards)
@property
def table_shards(self):
return self._table_shards
def size(self, name=None):
with ops.name_scope(name, 'sharded_mutable_hash_table_size'):
sizes = [
self._table_shards[i].size() for i in range(self._num_shards)
]
return math_ops.add_n(sizes)
def _shard_indices(self, keys):
key_shape = keys.get_shape()
if key_shape.ndims > 1:
# If keys are a matrix (i.e. a single key is a vector), we use the first
# element of each key vector to determine the shard.
keys = array_ops.slice(keys, [0, 0], [key_shape[0].value, 1])
keys = array_ops.reshape(keys, [-1])
indices = math_ops.mod(math_ops.abs(keys), self._num_shards)
return math_ops.cast(indices, dtypes.int32)
def _check_keys(self, keys):
if not keys.get_shape().is_fully_defined():
raise ValueError('Key shape must be fully defined, got %s.' %
keys.get_shape())
if keys.get_shape().ndims != 1 and keys.get_shape().ndims != 2:
raise ValueError('Expected a vector or matrix for keys, got %s.' %
keys.get_shape())
def lookup(self, keys, name=None):
if keys.dtype.base_dtype != self._key_dtype:
raise TypeError('Signature mismatch. Keys must be dtype %s, got %s.' %
(self._key_dtype, keys.dtype))
self._check_keys(keys)
num_shards = self._num_shards
if num_shards == 1:
return self._table_shards[0].lookup(keys, name=name)
shard_indices = self._shard_indices(keys)
# TODO(andreasst): support 'keys' that are not vectors
key_shards = data_flow_ops.dynamic_partition(keys, shard_indices,
num_shards)
value_shards = [
self._table_shards[i].lookup(key_shards[i], name=name)
for i in range(num_shards)
]
num_keys = keys.get_shape().dims[0]
original_indices = math_ops.range(num_keys)
partitioned_indices = data_flow_ops.dynamic_partition(original_indices,
shard_indices,
num_shards)
result = data_flow_ops.dynamic_stitch(partitioned_indices, value_shards)
result.set_shape(
tensor_shape.TensorShape([num_keys]).concatenate(self._value_shape))
return result
def insert(self, keys, values, name=None):
self._check_keys(keys)
num_shards = self._num_shards
if num_shards == 1:
return self._table_shards[0].insert(keys, values, name=name)
shard_indices = self._shard_indices(keys)
# TODO(andreasst): support 'keys' that are not vectors
key_shards = data_flow_ops.dynamic_partition(keys, shard_indices,
num_shards)
value_shards = data_flow_ops.dynamic_partition(values, shard_indices,
num_shards)
return_values = [
self._table_shards[i].insert(key_shards[i], value_shards[i], name=name)
for i in range(num_shards)
]
return control_flow_ops.group(*return_values)
def export_sharded(self, name=None):
"""Returns lists of the keys and values tensors in the sharded table.
Args:
name: name of the table.
Returns:
A pair of lists with the first list containing the key tensors and the
second list containing the value tensors from each shard.
"""
keys_list = []
values_list = []
for table_shard in self._table_shards:
exported_keys, exported_values = table_shard.export(name=name)
keys_list.append(exported_keys)
values_list.append(exported_values)
return keys_list, values_list
| apache-2.0 |
DinoCow/airflow | airflow/sensors/web_hdfs_sensor.py | 7 | 1167 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module is deprecated. Please use `airflow.providers.apache.hdfs.sensors.web_hdfs`."""
import warnings
# pylint: disable=unused-import
from airflow.providers.apache.hdfs.sensors.web_hdfs import WebHdfsSensor # noqa
warnings.warn(
"This module is deprecated. Please use `airflow.providers.apache.hdfs.sensors.web_hdfs`.",
DeprecationWarning,
stacklevel=2,
)
| apache-2.0 |
ambikeshwar1991/gnuradio-3.7.4 | gr-fft/python/fft/__init__.py | 54 | 1057 | #
# Copyright 2012 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
'''
Fourier-transform blocks and related functions.
'''
import os
try:
from fft_swig import *
except ImportError:
dirname, filename = os.path.split(os.path.abspath(__file__))
__path__.append(os.path.join(dirname, "..", "..", "swig"))
from fft_swig import *
| gpl-3.0 |
mumuwoyou/vnpy | vn.trader/ibGateway/ibGateway.py | 1 | 26652 | # encoding: UTF-8
'''
ibpy的gateway接入
注意事项:
1. ib api只能获取和操作当前连接后下的单,并且每次重启程序后,之前下的单子收不到
2. ib api的成交也只会推送当前连接后的成交
3. ib api的持仓和账户更新可以订阅成主推模式,因此qryAccount和qryPosition就用不到了
4. 目前只支持股票和期货交易,ib api里期权合约的确定是基于Contract对象的多个字段,比较复杂暂时没做
5. 海外市场的交易规则和国内有很多细节上的不同,所以一些字段类型的映射可能不合理,如果发现问题欢迎指出
'''
import os
import json
import calendar
from datetime import datetime, timedelta
from copy import copy
from PyQt4 import QtGui, QtCore
from ib.ext.Contract import Contract
from ib.ext.Order import Order
from ib.ext.EWrapper import EWrapper
from ib.ext.EClientSocket import EClientSocket
from vtGateway import *
# 以下为一些VT类型和CTP类型的映射字典
# 价格类型映射
priceTypeMap = {}
priceTypeMap[PRICETYPE_LIMITPRICE] = 'LMT'
priceTypeMap[PRICETYPE_MARKETPRICE] = 'MKT'
priceTypeMapReverse = {v: k for k, v in priceTypeMap.items()}
# 方向类型映射
directionMap = {}
directionMap[DIRECTION_LONG] = 'BUY'
directionMap[DIRECTION_SHORT] = 'SSHORT'
directionMap[DIRECTION_SELL] = 'SELL'
directionMapReverse = {v: k for k, v in directionMap.items()}
directionMapReverse['BOT'] = DIRECTION_LONG
directionMapReverse['SLD'] = DIRECTION_SHORT
# 交易所类型映射
exchangeMap = {}
exchangeMap[EXCHANGE_SMART] = 'SMART'
exchangeMap[EXCHANGE_NYMEX] = 'NYMEX'
exchangeMap[EXCHANGE_GLOBEX] = 'GLOBEX'
exchangeMap[EXCHANGE_IDEALPRO] = 'IDEALPRO'
exchangeMapReverse = {v:k for k,v in exchangeMap.items()}
# 报单状态映射
orderStatusMap = {}
orderStatusMap[STATUS_NOTTRADED] = 'Submitted'
orderStatusMap[STATUS_ALLTRADED] = 'Filled'
orderStatusMap[STATUS_CANCELLED] = 'Cancelled'
orderStatusMapReverse = {v:k for k,v in orderStatusMap.items()}
orderStatusMapReverse['PendingSubmit'] = STATUS_UNKNOWN # 这里未来视乎需求可以拓展vt订单的状态类型
orderStatusMapReverse['PendingCancel'] = STATUS_UNKNOWN
orderStatusMapReverse['PreSubmitted'] = STATUS_UNKNOWN
orderStatusMapReverse['Inactive'] = STATUS_UNKNOWN
# 合约类型映射
productClassMap = {}
productClassMap[PRODUCT_EQUITY] = 'STK'
productClassMap[PRODUCT_FUTURES] = 'FUT'
productClassMap[PRODUCT_OPTION] = 'OPT'
productClassMap[PRODUCT_FOREX] = 'CASH'
# 期权类型映射
optionTypeMap = {}
optionTypeMap[OPTION_CALL] = 'CALL'
optionTypeMap[OPTION_PUT] = 'PUT'
optionTypeMap = {v:k for k,v in optionTypeMap.items()}
# 货币类型映射
currencyMap = {}
currencyMap[CURRENCY_USD] = 'USD'
currencyMap[CURRENCY_CNY] = 'CNY'
currencyMap = {v:k for k,v in currencyMap.items()}
# Tick数据的Field和名称映射
tickFieldMap = {}
tickFieldMap[0] = 'bidVolume1'
tickFieldMap[1] = 'bidPrice1'
tickFieldMap[2] = 'askPrice1'
tickFieldMap[3] = 'askVolume1'
tickFieldMap[4] = 'lastPrice'
tickFieldMap[5] = 'lastVolume'
tickFieldMap[6] = 'highPrice'
tickFieldMap[7] = 'lowPrice'
tickFieldMap[8] = 'volume'
tickFieldMap[9] = 'preClosePrice'
tickFieldMap[14] = 'openPrice'
tickFieldMap[20] = 'openInterest'
# Account数据Key和名称的映射
accountKeyMap = {}
accountKeyMap['NetLiquidationByCurrency'] = 'balance'
accountKeyMap['NetLiquidation'] = 'balance'
accountKeyMap['UnrealizedPnL'] = 'positionProfit'
accountKeyMap['AvailableFunds'] = 'available'
accountKeyMap['MaintMarginReq'] = 'margin'
########################################################################
class IbGateway(VtGateway):
"""IB接口"""
#----------------------------------------------------------------------
def __init__(self, eventEngine, gatewayName='IB'):
"""Constructor"""
super(IbGateway, self).__init__(eventEngine, gatewayName)
self.host = EMPTY_STRING # 连接地址
self.port = EMPTY_INT # 连接端口
self.clientId = EMPTY_INT # 用户编号
self.tickerId = 0 # 订阅行情时的代码编号
self.tickDict = {} # tick快照字典,key为tickerId,value为VtTickData对象
self.orderId = 0 # 订单编号
self.orderDict = {} # 报单字典,key为orderId,value为VtOrderData对象
self.accountDict = {} # 账户字典
self.connected = False # 连接状态
self.wrapper = IbWrapper(self) # 回调接口
self.connection = EClientSocket(self.wrapper) # 主动接口
#----------------------------------------------------------------------
def connect(self):
"""连接"""
# 载入json文件
fileName = self.gatewayName + '_connect.json'
fileName = os.getcwd() + '/ibGateway/' + fileName
try:
f = file(fileName)
except IOError:
log = VtLogData()
log.gatewayName = self.gatewayName
log.logContent = u'读取连接配置出错,请检查'
self.onLog(log)
return
# 解析json文件
setting = json.load(f)
try:
self.host = str(setting['host'])
self.port = int(setting['port'])
self.clientId = int(setting['clientId'])
except KeyError:
log = VtLogData()
log.gatewayName = self.gatewayName
log.logContent = u'连接配置缺少字段,请检查'
self.onLog(log)
return
# 发起连接
self.connection.eConnect(self.host, self.port, self.clientId)
# 查询服务器时间
self.connection.reqCurrentTime()
# 请求账户数据主推更新
self.connection.reqAccountUpdates(True, '')
#----------------------------------------------------------------------
def subscribe(self, subscribeReq):
"""订阅行情"""
# 订阅行情
self.tickerId += 1
contract = Contract()
contract.m_symbol = str(subscribeReq.symbol)
contract.m_exchange = exchangeMap.get(subscribeReq.exchange, '')
contract.m_secType = productClassMap.get(subscribeReq.productClass, '')
contract.m_currency = currencyMap.get(subscribeReq.currency, '')
contract.m_expiry = subscribeReq.expiry
contract.m_strike = subscribeReq.strikePrice
contract.m_right = optionTypeMap.get(subscribeReq.optionType, '')
if contract.m_secType == 'FUT' and not subscribeReq.expiry:
# 期货 如果没有设置过期时间, 默认设置为下个月
dt_obj = datetime.now()
days = calendar.monthrange(dt_obj.year, dt_obj.month)[1]
nextMonth = dt_obj + timedelta(days=(days - dt_obj.day + 1))
contract.m_expiry = nextMonth.strftime('%Y%m')
self.connection.reqMktData(self.tickerId, contract, '', False)
# 获取合约详细信息
self.connection.reqContractDetails(self.tickerId, contract)
# 创建Tick对象并保存到字典中
tick = VtTickData()
tick.symbol = subscribeReq.symbol
tick.exchange = subscribeReq.exchange
tick.vtSymbol = '.'.join([tick.symbol, tick.exchange])
tick.gatewayName = self.gatewayName
tick.__setattr__('m_secType', productClassMap.get(subscribeReq.productClass, ''))
self.tickDict[self.tickerId] = tick
#----------------------------------------------------------------------
def sendOrder(self, orderReq):
"""发单"""
# 增加报单号1,最后再次进行查询
# 这里双重设计的目的是为了防止某些情况下,连续发单时,nextOrderId的回调推送速度慢导致没有更新
self.orderId += 1
# 创建合约对象
contract = Contract()
contract.m_symbol = str(orderReq.symbol)
contract.m_exchange = exchangeMap.get(orderReq.exchange, '')
contract.m_secType = productClassMap.get(orderReq.productClass, '')
contract.m_currency = currencyMap.get(orderReq.currency, '')
contract.m_expiry = orderReq.expiry
contract.m_strike = orderReq.strikePrice
contract.m_right = optionTypeMap.get(orderReq.optionType, '')
# 创建委托对象
order = Order()
order.m_orderId = self.orderId
order.m_clientId = self.clientId
order.m_action = directionMap.get(orderReq.direction, '')
order.m_lmtPrice = orderReq.price
order.m_totalQuantity = orderReq.volume
order.m_orderType = priceTypeMap.get(orderReq.priceType, '')
# 发送委托
self.connection.placeOrder(self.orderId, contract, order)
# 查询下一个有效编号
self.connection.reqIds(1)
#----------------------------------------------------------------------
def cancelOrder(self, cancelOrderReq):
"""撤单"""
self.connection.cancelOrder(cancelOrderReq.orderID)
#----------------------------------------------------------------------
def qryAccount(self):
"""查询账户资金"""
log = VtLogData()
log.gatewayName = self.gatewayName
log.logContent = u'IB接口账户信息提供主推更新,无需查询'
self.onLog(log)
#----------------------------------------------------------------------
def qryPosition(self):
"""查询持仓"""
log = VtLogData()
log.gatewayName = self.gatewayName
log.logContent = u'IB接口持仓信息提供主推更新,无需查询'
self.onLog(log)
#----------------------------------------------------------------------
def close(self):
"""关闭"""
self.connection.eDisconnect()
########################################################################
class IbWrapper(EWrapper):
"""IB回调接口的实现"""
#----------------------------------------------------------------------
def __init__(self, gateway):
"""Constructor"""
super(IbWrapper, self).__init__()
self.connectionStatus = False # 连接状态
self.gateway = gateway # gateway对象
self.gatewayName = gateway.gatewayName # gateway对象名称
self.tickDict = gateway.tickDict # tick快照字典,key为tickerId,value为VtTickData对象
self.orderDict = gateway.orderDict # order字典
self.accountDict = gateway.accountDict # account字典
#----------------------------------------------------------------------
def tickPrice(self, tickerId, field, price, canAutoExecute):
"""行情推送(价格相关)"""
if field in tickFieldMap:
tick = self.tickDict[tickerId]
key = tickFieldMap[field]
tick.__setattr__(key, price)
# 外汇单独设置时间, tickString 没有返回外汇时间
if tick.m_secType == 'CASH':
dt_obj = datetime.now()
tick.time = dt_obj.strftime('%H:%M:%S.%f')
tick.date = dt_obj.strftime('%Y%m%d')
# 行情数据更新
newtick = copy(tick)
self.gateway.onTick(newtick)
else:
print field
#----------------------------------------------------------------------
def tickSize(self, tickerId, field, size):
"""行情推送(量相关)"""
if field in tickFieldMap:
tick = self.tickDict[tickerId]
key = tickFieldMap[field]
tick.__setattr__(key, size)
# 外汇单独设置时间, tickString 没有返回外汇时间
if tick.m_secType == 'CASH':
dt_obj = datetime.now()
tick.time = dt_obj.strftime('%H:%M:%S.%f')
tick.date = dt_obj.strftime('%Y%m%d')
# 行情数据更新
newtick = copy(tick)
self.gateway.onTick(newtick)
else:
print field
#----------------------------------------------------------------------
def tickOptionComputation(self, tickerId, field, impliedVol, delta, optPrice, pvDividend, gamma, vega, theta, undPrice):
"""行情推送(期权数值)"""
pass
#----------------------------------------------------------------------
def tickGeneric(self, tickerId, tickType, value):
"""行情推送(某些通用字段)"""
pass
#----------------------------------------------------------------------
def tickString(self, tickerId, tickType, value):
"""行情推送,特殊字段相关"""
if tickType == 45:
dt_obj = datetime.fromtimestamp(int(value))
tick = self.tickDict[tickerId]
tick.time = dt_obj.strftime('%H:%M:%S.%f')
tick.date = dt_obj.strftime('%Y%m%d')
# 这里使用copy的目的是为了保证推送到事件系统中的对象
# 不会被当前的API线程修改,否则可能出现多线程数据同步错误
newtick = copy(tick)
self.gateway.onTick(newtick)
#----------------------------------------------------------------------
def tickEFP(self, tickerId, tickType, basisPoints, formattedBasisPoints, impliedFuture, holdDays, futureExpiry, dividendImpact, dividendsToExpiry):
"""行情推送(合约属性相关)"""
pass
#----------------------------------------------------------------------
def orderStatus(self, orderId, status, filled, remaining, avgFillPrice, permId, parentId, lastFillPrice, clientId, whyHeld):
"""报单成交回报"""
orderId = str(orderId)
if orderId in self.orderDict:
od = self.orderDict[orderId]
else:
od = VtOrderData() # od代表orderData
od.orderID = orderId
od.vtOrderID = '.'.join([self.gatewayName, orderId])
od.gatewayName = self.gatewayName
self.orderDict[orderId] = od
od.status = orderStatusMapReverse.get(status, STATUS_UNKNOWN)
od.tradedVolume = filled
newod = copy(od)
self.gateway.onOrder(newod)
#----------------------------------------------------------------------
def openOrder(self, orderId, contract, order, orderState):
"""报单信息推送"""
orderId = str(orderId) # orderId是整数
if orderId in self.orderDict:
od = self.orderDict[orderId]
else:
od = VtOrderData() # od代表orderData
od.orderID = orderId
od.vtOrderID = '.'.join([self.gatewayName, orderId])
od.symbol = contract.m_symbol
od.exchange = exchangeMapReverse.get(contract.m_exchange, '')
od.vtSymbol = '.'.join([od.symbol, od.exchange])
od.gatewayName = self.gatewayName
self.orderDict[orderId] = od
od.direction = directionMapReverse.get(order.m_action, '')
od.price = order.m_lmtPrice
od.totalVolume = order.m_totalQuantity
newod = copy(od)
self.gateway.onOrder(newod)
#----------------------------------------------------------------------
def openOrderEnd(self):
""" generated source for method openOrderEnd """
pass
#----------------------------------------------------------------------
def updateAccountValue(self, key, value, currency, accountName):
"""更新账户数据"""
# 仅逐个字段更新数据,这里对于没有currency的推送忽略
if currency:
name = '.'.join([accountName, currency])
if name in self.accountDict:
account = self.accountDict[name]
else:
account = VtAccountData()
account.accountID = name
account.vtAccountID = name
account.gatewayName = self.gatewayName
self.accountDict[name] = account
if key in accountKeyMap:
k = accountKeyMap[key]
account.__setattr__(k, float(value))
#----------------------------------------------------------------------
def updatePortfolio(self, contract, position, marketPrice, marketValue, averageCost, unrealizedPNL, realizedPNL, accountName):
"""持仓更新推送"""
pos = VtPositionData()
pos.symbol = contract.m_symbol
pos.exchange = exchangeMapReverse.get(contract.m_exchange, contract.m_exchange)
pos.vtSymbol = '.'.join([pos.symbol, pos.exchange])
pos.direction = DIRECTION_NET
pos.position = position
pos.price = averageCost
pos.vtPositionName = pos.vtSymbol
pos.gatewayName = self.gatewayName
self.gateway.onPosition(pos)
#----------------------------------------------------------------------
def updateAccountTime(self, timeStamp):
"""更新账户数据的时间"""
# 推送数据
for account in self.accountDict.values():
newaccount = copy(account)
self.gateway.onAccount(newaccount)
#----------------------------------------------------------------------
def accountDownloadEnd(self, accountName):
""" generated source for method accountDownloadEnd """
pass
#----------------------------------------------------------------------
def nextValidId(self, orderId):
"""下一个有效报单编号更新"""
self.gateway.orderId = orderId
#----------------------------------------------------------------------
def contractDetails(self, reqId, contractDetails):
"""合约查询回报"""
contract = VtContractData()
contract.gatewayName = self.gatewayName
contract.symbol = contractDetails.m_summary.m_symbol
contract.exchange = contractDetails.m_summary.m_exchange
contract.vtSymbol = '.'.join([contract.symbol, contract.exchange])
contract.name = contractDetails.m_summary.m_localSymbol.decode('UTF-8')
# 合约类型
if contractDetails.m_summary.m_secType == 'STK':
contract.productClass = PRODUCT_EQUITY
elif contractDetails.m_summary.m_secType == 'CASH':
contract.productClass = PRODUCT_FOREX
elif contractDetails.m_summary.m_secType == 'FUT':
contract.productClass = PRODUCT_FUTURES
elif contractDetails.m_summary.m_secType == 'OPT':
contract.productClass = PRODUCT_OPTION
else:
contract.productClass = PRODUCT_UNKNOWN
# 推送
self.gateway.onContract(contract)
#----------------------------------------------------------------------
def bondContractDetails(self, reqId, contractDetails):
""" generated source for method bondContractDetails """
#----------------------------------------------------------------------
def contractDetailsEnd(self, reqId):
""" 获取合约结束 """
log = VtLogData()
log.gatewayName = self.gatewayName
log.logContent = u'交易合约信息获取完成'
self.gateway.onLog(log)
#----------------------------------------------------------------------
def execDetails(self, reqId, contract, execution):
"""成交推送"""
trade = VtTradeData()
trade.gatewayName = self.gatewayName
trade.tradeID = execution.m_execId
trade.vtTradeID = '.'.join([self.gatewayName, trade.tradeID])
trade.symbol = contract.m_symbol
trade.exchange = exchangeMapReverse.get(contract.m_exchange, '')
trade.vtSymbol = '.'.join([trade.symbol, trade.exchange])
trade.orderID = str(execution.m_orderId)
trade.direction = directionMapReverse.get(execution.m_side, '')
trade.price = execution.m_price
trade.volume = execution.m_shares
trade.tradeTime = execution.m_time
self.gateway.onTrade(trade)
#----------------------------------------------------------------------
def execDetailsEnd(self, reqId):
""" generated source for method execDetailsEnd """
pass
#----------------------------------------------------------------------
def updateMktDepth(self, tickerId, position, operation, side, price, size):
""" generated source for method updateMktDepth """
pass
#----------------------------------------------------------------------
def updateMktDepthL2(self, tickerId, position, marketMaker, operation, side, price, size):
""" generated source for method updateMktDepthL2 """
pass
#----------------------------------------------------------------------
def updateNewsBulletin(self, msgId, msgType, message, origExchange):
""" generated source for method updateNewsBulletin """
pass
#----------------------------------------------------------------------
def managedAccounts(self, accountsList):
""" generated source for method managedAccounts """
pass
#----------------------------------------------------------------------
def receiveFA(self, faDataType, xml):
""" generated source for method receiveFA """
pass
#----------------------------------------------------------------------
def historicalData(self, reqId, date, open, high, low, close, volume, count, WAP, hasGaps):
""" generated source for method historicalData """
pass
#----------------------------------------------------------------------
def scannerParameters(self, xml):
""" generated source for method scannerParameters """
pass
#----------------------------------------------------------------------
def scannerData(self, reqId, rank, contractDetails, distance, benchmark, projection, legsStr):
''' generated source for method scannerData '''
pass
#----------------------------------------------------------------------
def scannerDataEnd(self, reqId):
""" generated source for method scannerDataEnd """
pass
#----------------------------------------------------------------------
def realtimeBar(self, reqId, time, open, high, low, close, volume, wap, count):
""" generated source for method realtimeBar """
pass
#----------------------------------------------------------------------
def currentTime(self, time):
""" generated source for method currentTime """
dt_obj = datetime.fromtimestamp(time)
t = dt_obj.strftime("%Y-%m-%d %H:%M:%S.%f")
self.connectionStatus = True
self.gateway.connected = True
log = VtLogData()
log.gatewayName = self.gatewayName
log.logContent = (u'IB接口连接成功,当前服务器时间 %s' %t)
self.gateway.onLog(log)
#----------------------------------------------------------------------
def fundamentalData(self, reqId, data):
""" generated source for method fundamentalData """
pass
#----------------------------------------------------------------------
def deltaNeutralValidation(self, reqId, underComp):
""" generated source for method deltaNeutralValidation """
pass
#----------------------------------------------------------------------
def tickSnapshotEnd(self, reqId):
""" generated source for method tickSnapshotEnd """
pass
#----------------------------------------------------------------------
def marketDataType(self, reqId, marketDataType):
""" generated source for method marketDataType """
pass
#----------------------------------------------------------------------
def commissionReport(self, commissionReport):
""" generated source for method commissionReport """
pass
#----------------------------------------------------------------------
def position(self, account, contract, pos, avgCost):
""" generated source for method position """
pass
#----------------------------------------------------------------------
def positionEnd(self):
""" generated source for method positionEnd """
pass
#----------------------------------------------------------------------
def accountSummary(self, reqId, account, tag, value, currency):
""" generated source for method accountSummary """
pass
#----------------------------------------------------------------------
def accountSummaryEnd(self, reqId):
""" generated source for method accountSummaryEnd """
pass
#----------------------------------------------------------------------
def error(self, id=None, errorCode=None, errorMsg=None):
"""错误回报"""
err = VtErrorData()
err.gatewayName = self.gatewayName
err.errorID = errorCode
err.errorMsg = errorMsg
self.gateway.onError(err)
#----------------------------------------------------------------------
def error_0(self, strval=None):
"""错误回报(单一字符串)"""
err = VtErrorData()
err.gatewayName = self.gatewayName
err.errorMsg = strval
self.gateway.onError(err)
#----------------------------------------------------------------------
def error_1(self, id=None, errorCode=None, errorMsg=None):
"""错误回报(字符串和代码)"""
err = VtErrorData()
err.gatewayName = self.gatewayName
err.errorID = errorCode
err.errorMsg = errorMsg
self.gateway.onError(err)
#----------------------------------------------------------------------
def connectionClosed(self):
"""连接断开"""
self.connectionStatus = False
self.gateway.connected = False
log = VtLogData()
log.gatewayName = self.gatewayName
log.logContent = (u'IB接口连接断开')
self.gateway.onLog(log)
| mit |
lucychambers/lucychambers.github.io | .bundle/ruby/2.0.0/gems/pygments.rb-0.6.0/vendor/pygments-main/tests/test_cfm.py | 34 | 1433 | # -*- coding: utf-8 -*-
"""
Basic ColdfusionHtmlLexer Test
~~~~~~~~~~~~~~~~~
:copyright: Copyright 2014 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import unittest
import os
from pygments.token import Token
from pygments.lexers import ColdfusionHtmlLexer
class ColdfusionHtmlLexerTest(unittest.TestCase):
def setUp(self):
self.lexer = ColdfusionHtmlLexer()
def testBasicComment(self):
fragment = u'<!--- cfcomment --->'
expected = [
(Token.Text, u''),
(Token.Comment.Multiline, u'<!---'),
(Token.Comment.Multiline, u' cfcomment '),
(Token.Comment.Multiline, u'--->'),
(Token.Text, u'\n'),
]
self.assertEqual(expected, list(self.lexer.get_tokens(fragment)))
def testNestedComment(self):
fragment = u'<!--- nested <!--- cfcomment ---> --->'
expected = [
(Token.Text, u''),
(Token.Comment.Multiline, u'<!---'),
(Token.Comment.Multiline, u' nested '),
(Token.Comment.Multiline, u'<!---'),
(Token.Comment.Multiline, u' cfcomment '),
(Token.Comment.Multiline, u'--->'),
(Token.Comment.Multiline, u' '),
(Token.Comment.Multiline, u'--->'),
(Token.Text, u'\n'),
]
self.assertEqual(expected, list(self.lexer.get_tokens(fragment)))
| gpl-2.0 |
KanagiMiss/cl-auto-reply | gae/lib/requests/packages/chardet/big5freq.py | 3133 | 82594 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# Big5 frequency table
# by Taiwan's Mandarin Promotion Council
# <http://www.edu.tw:81/mandr/>
#
# 128 --> 0.42261
# 256 --> 0.57851
# 512 --> 0.74851
# 1024 --> 0.89384
# 2048 --> 0.97583
#
# Ideal Distribution Ratio = 0.74851/(1-0.74851) =2.98
# Random Distribution Ration = 512/(5401-512)=0.105
#
# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR
BIG5_TYPICAL_DISTRIBUTION_RATIO = 0.75
#Char to FreqOrder table
BIG5_TABLE_SIZE = 5376
Big5CharToFreqOrder = (
1,1801,1506, 255,1431, 198, 9, 82, 6,5008, 177, 202,3681,1256,2821, 110, # 16
3814, 33,3274, 261, 76, 44,2114, 16,2946,2187,1176, 659,3971, 26,3451,2653, # 32
1198,3972,3350,4202, 410,2215, 302, 590, 361,1964, 8, 204, 58,4510,5009,1932, # 48
63,5010,5011, 317,1614, 75, 222, 159,4203,2417,1480,5012,3555,3091, 224,2822, # 64
3682, 3, 10,3973,1471, 29,2787,1135,2866,1940, 873, 130,3275,1123, 312,5013, # 80
4511,2052, 507, 252, 682,5014, 142,1915, 124, 206,2947, 34,3556,3204, 64, 604, # 96
5015,2501,1977,1978, 155,1991, 645, 641,1606,5016,3452, 337, 72, 406,5017, 80, # 112
630, 238,3205,1509, 263, 939,1092,2654, 756,1440,1094,3453, 449, 69,2987, 591, # 128
179,2096, 471, 115,2035,1844, 60, 50,2988, 134, 806,1869, 734,2036,3454, 180, # 144
995,1607, 156, 537,2907, 688,5018, 319,1305, 779,2145, 514,2379, 298,4512, 359, # 160
2502, 90,2716,1338, 663, 11, 906,1099,2553, 20,2441, 182, 532,1716,5019, 732, # 176
1376,4204,1311,1420,3206, 25,2317,1056, 113, 399, 382,1950, 242,3455,2474, 529, # 192
3276, 475,1447,3683,5020, 117, 21, 656, 810,1297,2300,2334,3557,5021, 126,4205, # 208
706, 456, 150, 613,4513, 71,1118,2037,4206, 145,3092, 85, 835, 486,2115,1246, # 224
1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,5022,2128,2359, 347,3815, 221, # 240
3558,3135,5023,1956,1153,4207, 83, 296,1199,3093, 192, 624, 93,5024, 822,1898, # 256
2823,3136, 795,2065, 991,1554,1542,1592, 27, 43,2867, 859, 139,1456, 860,4514, # 272
437, 712,3974, 164,2397,3137, 695, 211,3037,2097, 195,3975,1608,3559,3560,3684, # 288
3976, 234, 811,2989,2098,3977,2233,1441,3561,1615,2380, 668,2077,1638, 305, 228, # 304
1664,4515, 467, 415,5025, 262,2099,1593, 239, 108, 300, 200,1033, 512,1247,2078, # 320
5026,5027,2176,3207,3685,2682, 593, 845,1062,3277, 88,1723,2038,3978,1951, 212, # 336
266, 152, 149, 468,1899,4208,4516, 77, 187,5028,3038, 37, 5,2990,5029,3979, # 352
5030,5031, 39,2524,4517,2908,3208,2079, 55, 148, 74,4518, 545, 483,1474,1029, # 368
1665, 217,1870,1531,3138,1104,2655,4209, 24, 172,3562, 900,3980,3563,3564,4519, # 384
32,1408,2824,1312, 329, 487,2360,2251,2717, 784,2683, 4,3039,3351,1427,1789, # 400
188, 109, 499,5032,3686,1717,1790, 888,1217,3040,4520,5033,3565,5034,3352,1520, # 416
3687,3981, 196,1034, 775,5035,5036, 929,1816, 249, 439, 38,5037,1063,5038, 794, # 432
3982,1435,2301, 46, 178,3278,2066,5039,2381,5040, 214,1709,4521, 804, 35, 707, # 448
324,3688,1601,2554, 140, 459,4210,5041,5042,1365, 839, 272, 978,2262,2580,3456, # 464
2129,1363,3689,1423, 697, 100,3094, 48, 70,1231, 495,3139,2196,5043,1294,5044, # 480
2080, 462, 586,1042,3279, 853, 256, 988, 185,2382,3457,1698, 434,1084,5045,3458, # 496
314,2625,2788,4522,2335,2336, 569,2285, 637,1817,2525, 757,1162,1879,1616,3459, # 512
287,1577,2116, 768,4523,1671,2868,3566,2526,1321,3816, 909,2418,5046,4211, 933, # 528
3817,4212,2053,2361,1222,4524, 765,2419,1322, 786,4525,5047,1920,1462,1677,2909, # 544
1699,5048,4526,1424,2442,3140,3690,2600,3353,1775,1941,3460,3983,4213, 309,1369, # 560
1130,2825, 364,2234,1653,1299,3984,3567,3985,3986,2656, 525,1085,3041, 902,2001, # 576
1475, 964,4527, 421,1845,1415,1057,2286, 940,1364,3141, 376,4528,4529,1381, 7, # 592
2527, 983,2383, 336,1710,2684,1846, 321,3461, 559,1131,3042,2752,1809,1132,1313, # 608
265,1481,1858,5049, 352,1203,2826,3280, 167,1089, 420,2827, 776, 792,1724,3568, # 624
4214,2443,3281,5050,4215,5051, 446, 229, 333,2753, 901,3818,1200,1557,4530,2657, # 640
1921, 395,2754,2685,3819,4216,1836, 125, 916,3209,2626,4531,5052,5053,3820,5054, # 656
5055,5056,4532,3142,3691,1133,2555,1757,3462,1510,2318,1409,3569,5057,2146, 438, # 672
2601,2910,2384,3354,1068, 958,3043, 461, 311,2869,2686,4217,1916,3210,4218,1979, # 688
383, 750,2755,2627,4219, 274, 539, 385,1278,1442,5058,1154,1965, 384, 561, 210, # 704
98,1295,2556,3570,5059,1711,2420,1482,3463,3987,2911,1257, 129,5060,3821, 642, # 720
523,2789,2790,2658,5061, 141,2235,1333, 68, 176, 441, 876, 907,4220, 603,2602, # 736
710, 171,3464, 404, 549, 18,3143,2398,1410,3692,1666,5062,3571,4533,2912,4534, # 752
5063,2991, 368,5064, 146, 366, 99, 871,3693,1543, 748, 807,1586,1185, 22,2263, # 768
379,3822,3211,5065,3212, 505,1942,2628,1992,1382,2319,5066, 380,2362, 218, 702, # 784
1818,1248,3465,3044,3572,3355,3282,5067,2992,3694, 930,3283,3823,5068, 59,5069, # 800
585, 601,4221, 497,3466,1112,1314,4535,1802,5070,1223,1472,2177,5071, 749,1837, # 816
690,1900,3824,1773,3988,1476, 429,1043,1791,2236,2117, 917,4222, 447,1086,1629, # 832
5072, 556,5073,5074,2021,1654, 844,1090, 105, 550, 966,1758,2828,1008,1783, 686, # 848
1095,5075,2287, 793,1602,5076,3573,2603,4536,4223,2948,2302,4537,3825, 980,2503, # 864
544, 353, 527,4538, 908,2687,2913,5077, 381,2629,1943,1348,5078,1341,1252, 560, # 880
3095,5079,3467,2870,5080,2054, 973, 886,2081, 143,4539,5081,5082, 157,3989, 496, # 896
4224, 57, 840, 540,2039,4540,4541,3468,2118,1445, 970,2264,1748,1966,2082,4225, # 912
3144,1234,1776,3284,2829,3695, 773,1206,2130,1066,2040,1326,3990,1738,1725,4226, # 928
279,3145, 51,1544,2604, 423,1578,2131,2067, 173,4542,1880,5083,5084,1583, 264, # 944
610,3696,4543,2444, 280, 154,5085,5086,5087,1739, 338,1282,3096, 693,2871,1411, # 960
1074,3826,2445,5088,4544,5089,5090,1240, 952,2399,5091,2914,1538,2688, 685,1483, # 976
4227,2475,1436, 953,4228,2055,4545, 671,2400, 79,4229,2446,3285, 608, 567,2689, # 992
3469,4230,4231,1691, 393,1261,1792,2401,5092,4546,5093,5094,5095,5096,1383,1672, # 1008
3827,3213,1464, 522,1119, 661,1150, 216, 675,4547,3991,1432,3574, 609,4548,2690, # 1024
2402,5097,5098,5099,4232,3045, 0,5100,2476, 315, 231,2447, 301,3356,4549,2385, # 1040
5101, 233,4233,3697,1819,4550,4551,5102, 96,1777,1315,2083,5103, 257,5104,1810, # 1056
3698,2718,1139,1820,4234,2022,1124,2164,2791,1778,2659,5105,3097, 363,1655,3214, # 1072
5106,2993,5107,5108,5109,3992,1567,3993, 718, 103,3215, 849,1443, 341,3357,2949, # 1088
1484,5110,1712, 127, 67, 339,4235,2403, 679,1412, 821,5111,5112, 834, 738, 351, # 1104
2994,2147, 846, 235,1497,1881, 418,1993,3828,2719, 186,1100,2148,2756,3575,1545, # 1120
1355,2950,2872,1377, 583,3994,4236,2581,2995,5113,1298,3699,1078,2557,3700,2363, # 1136
78,3829,3830, 267,1289,2100,2002,1594,4237, 348, 369,1274,2197,2178,1838,4552, # 1152
1821,2830,3701,2757,2288,2003,4553,2951,2758, 144,3358, 882,4554,3995,2759,3470, # 1168
4555,2915,5114,4238,1726, 320,5115,3996,3046, 788,2996,5116,2831,1774,1327,2873, # 1184
3997,2832,5117,1306,4556,2004,1700,3831,3576,2364,2660, 787,2023, 506, 824,3702, # 1200
534, 323,4557,1044,3359,2024,1901, 946,3471,5118,1779,1500,1678,5119,1882,4558, # 1216
165, 243,4559,3703,2528, 123, 683,4239, 764,4560, 36,3998,1793, 589,2916, 816, # 1232
626,1667,3047,2237,1639,1555,1622,3832,3999,5120,4000,2874,1370,1228,1933, 891, # 1248
2084,2917, 304,4240,5121, 292,2997,2720,3577, 691,2101,4241,1115,4561, 118, 662, # 1264
5122, 611,1156, 854,2386,1316,2875, 2, 386, 515,2918,5123,5124,3286, 868,2238, # 1280
1486, 855,2661, 785,2216,3048,5125,1040,3216,3578,5126,3146, 448,5127,1525,5128, # 1296
2165,4562,5129,3833,5130,4242,2833,3579,3147, 503, 818,4001,3148,1568, 814, 676, # 1312
1444, 306,1749,5131,3834,1416,1030, 197,1428, 805,2834,1501,4563,5132,5133,5134, # 1328
1994,5135,4564,5136,5137,2198, 13,2792,3704,2998,3149,1229,1917,5138,3835,2132, # 1344
5139,4243,4565,2404,3580,5140,2217,1511,1727,1120,5141,5142, 646,3836,2448, 307, # 1360
5143,5144,1595,3217,5145,5146,5147,3705,1113,1356,4002,1465,2529,2530,5148, 519, # 1376
5149, 128,2133, 92,2289,1980,5150,4003,1512, 342,3150,2199,5151,2793,2218,1981, # 1392
3360,4244, 290,1656,1317, 789, 827,2365,5152,3837,4566, 562, 581,4004,5153, 401, # 1408
4567,2252, 94,4568,5154,1399,2794,5155,1463,2025,4569,3218,1944,5156, 828,1105, # 1424
4245,1262,1394,5157,4246, 605,4570,5158,1784,2876,5159,2835, 819,2102, 578,2200, # 1440
2952,5160,1502, 436,3287,4247,3288,2836,4005,2919,3472,3473,5161,2721,2320,5162, # 1456
5163,2337,2068, 23,4571, 193, 826,3838,2103, 699,1630,4248,3098, 390,1794,1064, # 1472
3581,5164,1579,3099,3100,1400,5165,4249,1839,1640,2877,5166,4572,4573, 137,4250, # 1488
598,3101,1967, 780, 104, 974,2953,5167, 278, 899, 253, 402, 572, 504, 493,1339, # 1504
5168,4006,1275,4574,2582,2558,5169,3706,3049,3102,2253, 565,1334,2722, 863, 41, # 1520
5170,5171,4575,5172,1657,2338, 19, 463,2760,4251, 606,5173,2999,3289,1087,2085, # 1536
1323,2662,3000,5174,1631,1623,1750,4252,2691,5175,2878, 791,2723,2663,2339, 232, # 1552
2421,5176,3001,1498,5177,2664,2630, 755,1366,3707,3290,3151,2026,1609, 119,1918, # 1568
3474, 862,1026,4253,5178,4007,3839,4576,4008,4577,2265,1952,2477,5179,1125, 817, # 1584
4254,4255,4009,1513,1766,2041,1487,4256,3050,3291,2837,3840,3152,5180,5181,1507, # 1600
5182,2692, 733, 40,1632,1106,2879, 345,4257, 841,2531, 230,4578,3002,1847,3292, # 1616
3475,5183,1263, 986,3476,5184, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562, # 1632
4010,4011,2954, 967,2761,2665,1349, 592,2134,1692,3361,3003,1995,4258,1679,4012, # 1648
1902,2188,5185, 739,3708,2724,1296,1290,5186,4259,2201,2202,1922,1563,2605,2559, # 1664
1871,2762,3004,5187, 435,5188, 343,1108, 596, 17,1751,4579,2239,3477,3709,5189, # 1680
4580, 294,3582,2955,1693, 477, 979, 281,2042,3583, 643,2043,3710,2631,2795,2266, # 1696
1031,2340,2135,2303,3584,4581, 367,1249,2560,5190,3585,5191,4582,1283,3362,2005, # 1712
240,1762,3363,4583,4584, 836,1069,3153, 474,5192,2149,2532, 268,3586,5193,3219, # 1728
1521,1284,5194,1658,1546,4260,5195,3587,3588,5196,4261,3364,2693,1685,4262, 961, # 1744
1673,2632, 190,2006,2203,3841,4585,4586,5197, 570,2504,3711,1490,5198,4587,2633, # 1760
3293,1957,4588, 584,1514, 396,1045,1945,5199,4589,1968,2449,5200,5201,4590,4013, # 1776
619,5202,3154,3294, 215,2007,2796,2561,3220,4591,3221,4592, 763,4263,3842,4593, # 1792
5203,5204,1958,1767,2956,3365,3712,1174, 452,1477,4594,3366,3155,5205,2838,1253, # 1808
2387,2189,1091,2290,4264, 492,5206, 638,1169,1825,2136,1752,4014, 648, 926,1021, # 1824
1324,4595, 520,4596, 997, 847,1007, 892,4597,3843,2267,1872,3713,2405,1785,4598, # 1840
1953,2957,3103,3222,1728,4265,2044,3714,4599,2008,1701,3156,1551, 30,2268,4266, # 1856
5207,2027,4600,3589,5208, 501,5209,4267, 594,3478,2166,1822,3590,3479,3591,3223, # 1872
829,2839,4268,5210,1680,3157,1225,4269,5211,3295,4601,4270,3158,2341,5212,4602, # 1888
4271,5213,4015,4016,5214,1848,2388,2606,3367,5215,4603, 374,4017, 652,4272,4273, # 1904
375,1140, 798,5216,5217,5218,2366,4604,2269, 546,1659, 138,3051,2450,4605,5219, # 1920
2254, 612,1849, 910, 796,3844,1740,1371, 825,3845,3846,5220,2920,2562,5221, 692, # 1936
444,3052,2634, 801,4606,4274,5222,1491, 244,1053,3053,4275,4276, 340,5223,4018, # 1952
1041,3005, 293,1168, 87,1357,5224,1539, 959,5225,2240, 721, 694,4277,3847, 219, # 1968
1478, 644,1417,3368,2666,1413,1401,1335,1389,4019,5226,5227,3006,2367,3159,1826, # 1984
730,1515, 184,2840, 66,4607,5228,1660,2958, 246,3369, 378,1457, 226,3480, 975, # 2000
4020,2959,1264,3592, 674, 696,5229, 163,5230,1141,2422,2167, 713,3593,3370,4608, # 2016
4021,5231,5232,1186, 15,5233,1079,1070,5234,1522,3224,3594, 276,1050,2725, 758, # 2032
1126, 653,2960,3296,5235,2342, 889,3595,4022,3104,3007, 903,1250,4609,4023,3481, # 2048
3596,1342,1681,1718, 766,3297, 286, 89,2961,3715,5236,1713,5237,2607,3371,3008, # 2064
5238,2962,2219,3225,2880,5239,4610,2505,2533, 181, 387,1075,4024, 731,2190,3372, # 2080
5240,3298, 310, 313,3482,2304, 770,4278, 54,3054, 189,4611,3105,3848,4025,5241, # 2096
1230,1617,1850, 355,3597,4279,4612,3373, 111,4280,3716,1350,3160,3483,3055,4281, # 2112
2150,3299,3598,5242,2797,4026,4027,3009, 722,2009,5243,1071, 247,1207,2343,2478, # 2128
1378,4613,2010, 864,1437,1214,4614, 373,3849,1142,2220, 667,4615, 442,2763,2563, # 2144
3850,4028,1969,4282,3300,1840, 837, 170,1107, 934,1336,1883,5244,5245,2119,4283, # 2160
2841, 743,1569,5246,4616,4284, 582,2389,1418,3484,5247,1803,5248, 357,1395,1729, # 2176
3717,3301,2423,1564,2241,5249,3106,3851,1633,4617,1114,2086,4285,1532,5250, 482, # 2192
2451,4618,5251,5252,1492, 833,1466,5253,2726,3599,1641,2842,5254,1526,1272,3718, # 2208
4286,1686,1795, 416,2564,1903,1954,1804,5255,3852,2798,3853,1159,2321,5256,2881, # 2224
4619,1610,1584,3056,2424,2764, 443,3302,1163,3161,5257,5258,4029,5259,4287,2506, # 2240
3057,4620,4030,3162,2104,1647,3600,2011,1873,4288,5260,4289, 431,3485,5261, 250, # 2256
97, 81,4290,5262,1648,1851,1558, 160, 848,5263, 866, 740,1694,5264,2204,2843, # 2272
3226,4291,4621,3719,1687, 950,2479, 426, 469,3227,3720,3721,4031,5265,5266,1188, # 2288
424,1996, 861,3601,4292,3854,2205,2694, 168,1235,3602,4293,5267,2087,1674,4622, # 2304
3374,3303, 220,2565,1009,5268,3855, 670,3010, 332,1208, 717,5269,5270,3603,2452, # 2320
4032,3375,5271, 513,5272,1209,2882,3376,3163,4623,1080,5273,5274,5275,5276,2534, # 2336
3722,3604, 815,1587,4033,4034,5277,3605,3486,3856,1254,4624,1328,3058,1390,4035, # 2352
1741,4036,3857,4037,5278, 236,3858,2453,3304,5279,5280,3723,3859,1273,3860,4625, # 2368
5281, 308,5282,4626, 245,4627,1852,2480,1307,2583, 430, 715,2137,2454,5283, 270, # 2384
199,2883,4038,5284,3606,2727,1753, 761,1754, 725,1661,1841,4628,3487,3724,5285, # 2400
5286, 587, 14,3305, 227,2608, 326, 480,2270, 943,2765,3607, 291, 650,1884,5287, # 2416
1702,1226, 102,1547, 62,3488, 904,4629,3489,1164,4294,5288,5289,1224,1548,2766, # 2432
391, 498,1493,5290,1386,1419,5291,2056,1177,4630, 813, 880,1081,2368, 566,1145, # 2448
4631,2291,1001,1035,2566,2609,2242, 394,1286,5292,5293,2069,5294, 86,1494,1730, # 2464
4039, 491,1588, 745, 897,2963, 843,3377,4040,2767,2884,3306,1768, 998,2221,2070, # 2480
397,1827,1195,1970,3725,3011,3378, 284,5295,3861,2507,2138,2120,1904,5296,4041, # 2496
2151,4042,4295,1036,3490,1905, 114,2567,4296, 209,1527,5297,5298,2964,2844,2635, # 2512
2390,2728,3164, 812,2568,5299,3307,5300,1559, 737,1885,3726,1210, 885, 28,2695, # 2528
3608,3862,5301,4297,1004,1780,4632,5302, 346,1982,2222,2696,4633,3863,1742, 797, # 2544
1642,4043,1934,1072,1384,2152, 896,4044,3308,3727,3228,2885,3609,5303,2569,1959, # 2560
4634,2455,1786,5304,5305,5306,4045,4298,1005,1308,3728,4299,2729,4635,4636,1528, # 2576
2610, 161,1178,4300,1983, 987,4637,1101,4301, 631,4046,1157,3229,2425,1343,1241, # 2592
1016,2243,2570, 372, 877,2344,2508,1160, 555,1935, 911,4047,5307, 466,1170, 169, # 2608
1051,2921,2697,3729,2481,3012,1182,2012,2571,1251,2636,5308, 992,2345,3491,1540, # 2624
2730,1201,2071,2406,1997,2482,5309,4638, 528,1923,2191,1503,1874,1570,2369,3379, # 2640
3309,5310, 557,1073,5311,1828,3492,2088,2271,3165,3059,3107, 767,3108,2799,4639, # 2656
1006,4302,4640,2346,1267,2179,3730,3230, 778,4048,3231,2731,1597,2667,5312,4641, # 2672
5313,3493,5314,5315,5316,3310,2698,1433,3311, 131, 95,1504,4049, 723,4303,3166, # 2688
1842,3610,2768,2192,4050,2028,2105,3731,5317,3013,4051,1218,5318,3380,3232,4052, # 2704
4304,2584, 248,1634,3864, 912,5319,2845,3732,3060,3865, 654, 53,5320,3014,5321, # 2720
1688,4642, 777,3494,1032,4053,1425,5322, 191, 820,2121,2846, 971,4643, 931,3233, # 2736
135, 664, 783,3866,1998, 772,2922,1936,4054,3867,4644,2923,3234, 282,2732, 640, # 2752
1372,3495,1127, 922, 325,3381,5323,5324, 711,2045,5325,5326,4055,2223,2800,1937, # 2768
4056,3382,2224,2255,3868,2305,5327,4645,3869,1258,3312,4057,3235,2139,2965,4058, # 2784
4059,5328,2225, 258,3236,4646, 101,1227,5329,3313,1755,5330,1391,3314,5331,2924, # 2800
2057, 893,5332,5333,5334,1402,4305,2347,5335,5336,3237,3611,5337,5338, 878,1325, # 2816
1781,2801,4647, 259,1385,2585, 744,1183,2272,4648,5339,4060,2509,5340, 684,1024, # 2832
4306,5341, 472,3612,3496,1165,3315,4061,4062, 322,2153, 881, 455,1695,1152,1340, # 2848
660, 554,2154,4649,1058,4650,4307, 830,1065,3383,4063,4651,1924,5342,1703,1919, # 2864
5343, 932,2273, 122,5344,4652, 947, 677,5345,3870,2637, 297,1906,1925,2274,4653, # 2880
2322,3316,5346,5347,4308,5348,4309, 84,4310, 112, 989,5349, 547,1059,4064, 701, # 2896
3613,1019,5350,4311,5351,3497, 942, 639, 457,2306,2456, 993,2966, 407, 851, 494, # 2912
4654,3384, 927,5352,1237,5353,2426,3385, 573,4312, 680, 921,2925,1279,1875, 285, # 2928
790,1448,1984, 719,2168,5354,5355,4655,4065,4066,1649,5356,1541, 563,5357,1077, # 2944
5358,3386,3061,3498, 511,3015,4067,4068,3733,4069,1268,2572,3387,3238,4656,4657, # 2960
5359, 535,1048,1276,1189,2926,2029,3167,1438,1373,2847,2967,1134,2013,5360,4313, # 2976
1238,2586,3109,1259,5361, 700,5362,2968,3168,3734,4314,5363,4315,1146,1876,1907, # 2992
4658,2611,4070, 781,2427, 132,1589, 203, 147, 273,2802,2407, 898,1787,2155,4071, # 3008
4072,5364,3871,2803,5365,5366,4659,4660,5367,3239,5368,1635,3872, 965,5369,1805, # 3024
2699,1516,3614,1121,1082,1329,3317,4073,1449,3873, 65,1128,2848,2927,2769,1590, # 3040
3874,5370,5371, 12,2668, 45, 976,2587,3169,4661, 517,2535,1013,1037,3240,5372, # 3056
3875,2849,5373,3876,5374,3499,5375,2612, 614,1999,2323,3877,3110,2733,2638,5376, # 3072
2588,4316, 599,1269,5377,1811,3735,5378,2700,3111, 759,1060, 489,1806,3388,3318, # 3088
1358,5379,5380,2391,1387,1215,2639,2256, 490,5381,5382,4317,1759,2392,2348,5383, # 3104
4662,3878,1908,4074,2640,1807,3241,4663,3500,3319,2770,2349, 874,5384,5385,3501, # 3120
3736,1859, 91,2928,3737,3062,3879,4664,5386,3170,4075,2669,5387,3502,1202,1403, # 3136
3880,2969,2536,1517,2510,4665,3503,2511,5388,4666,5389,2701,1886,1495,1731,4076, # 3152
2370,4667,5390,2030,5391,5392,4077,2702,1216, 237,2589,4318,2324,4078,3881,4668, # 3168
4669,2703,3615,3504, 445,4670,5393,5394,5395,5396,2771, 61,4079,3738,1823,4080, # 3184
5397, 687,2046, 935, 925, 405,2670, 703,1096,1860,2734,4671,4081,1877,1367,2704, # 3200
3389, 918,2106,1782,2483, 334,3320,1611,1093,4672, 564,3171,3505,3739,3390, 945, # 3216
2641,2058,4673,5398,1926, 872,4319,5399,3506,2705,3112, 349,4320,3740,4082,4674, # 3232
3882,4321,3741,2156,4083,4675,4676,4322,4677,2408,2047, 782,4084, 400, 251,4323, # 3248
1624,5400,5401, 277,3742, 299,1265, 476,1191,3883,2122,4324,4325,1109, 205,5402, # 3264
2590,1000,2157,3616,1861,5403,5404,5405,4678,5406,4679,2573, 107,2484,2158,4085, # 3280
3507,3172,5407,1533, 541,1301, 158, 753,4326,2886,3617,5408,1696, 370,1088,4327, # 3296
4680,3618, 579, 327, 440, 162,2244, 269,1938,1374,3508, 968,3063, 56,1396,3113, # 3312
2107,3321,3391,5409,1927,2159,4681,3016,5410,3619,5411,5412,3743,4682,2485,5413, # 3328
2804,5414,1650,4683,5415,2613,5416,5417,4086,2671,3392,1149,3393,4087,3884,4088, # 3344
5418,1076, 49,5419, 951,3242,3322,3323, 450,2850, 920,5420,1812,2805,2371,4328, # 3360
1909,1138,2372,3885,3509,5421,3243,4684,1910,1147,1518,2428,4685,3886,5422,4686, # 3376
2393,2614, 260,1796,3244,5423,5424,3887,3324, 708,5425,3620,1704,5426,3621,1351, # 3392
1618,3394,3017,1887, 944,4329,3395,4330,3064,3396,4331,5427,3744, 422, 413,1714, # 3408
3325, 500,2059,2350,4332,2486,5428,1344,1911, 954,5429,1668,5430,5431,4089,2409, # 3424
4333,3622,3888,4334,5432,2307,1318,2512,3114, 133,3115,2887,4687, 629, 31,2851, # 3440
2706,3889,4688, 850, 949,4689,4090,2970,1732,2089,4335,1496,1853,5433,4091, 620, # 3456
3245, 981,1242,3745,3397,1619,3746,1643,3326,2140,2457,1971,1719,3510,2169,5434, # 3472
3246,5435,5436,3398,1829,5437,1277,4690,1565,2048,5438,1636,3623,3116,5439, 869, # 3488
2852, 655,3890,3891,3117,4092,3018,3892,1310,3624,4691,5440,5441,5442,1733, 558, # 3504
4692,3747, 335,1549,3065,1756,4336,3748,1946,3511,1830,1291,1192, 470,2735,2108, # 3520
2806, 913,1054,4093,5443,1027,5444,3066,4094,4693, 982,2672,3399,3173,3512,3247, # 3536
3248,1947,2807,5445, 571,4694,5446,1831,5447,3625,2591,1523,2429,5448,2090, 984, # 3552
4695,3749,1960,5449,3750, 852, 923,2808,3513,3751, 969,1519, 999,2049,2325,1705, # 3568
5450,3118, 615,1662, 151, 597,4095,2410,2326,1049, 275,4696,3752,4337, 568,3753, # 3584
3626,2487,4338,3754,5451,2430,2275, 409,3249,5452,1566,2888,3514,1002, 769,2853, # 3600
194,2091,3174,3755,2226,3327,4339, 628,1505,5453,5454,1763,2180,3019,4096, 521, # 3616
1161,2592,1788,2206,2411,4697,4097,1625,4340,4341, 412, 42,3119, 464,5455,2642, # 3632
4698,3400,1760,1571,2889,3515,2537,1219,2207,3893,2643,2141,2373,4699,4700,3328, # 3648
1651,3401,3627,5456,5457,3628,2488,3516,5458,3756,5459,5460,2276,2092, 460,5461, # 3664
4701,5462,3020, 962, 588,3629, 289,3250,2644,1116, 52,5463,3067,1797,5464,5465, # 3680
5466,1467,5467,1598,1143,3757,4342,1985,1734,1067,4702,1280,3402, 465,4703,1572, # 3696
510,5468,1928,2245,1813,1644,3630,5469,4704,3758,5470,5471,2673,1573,1534,5472, # 3712
5473, 536,1808,1761,3517,3894,3175,2645,5474,5475,5476,4705,3518,2929,1912,2809, # 3728
5477,3329,1122, 377,3251,5478, 360,5479,5480,4343,1529, 551,5481,2060,3759,1769, # 3744
2431,5482,2930,4344,3330,3120,2327,2109,2031,4706,1404, 136,1468,1479, 672,1171, # 3760
3252,2308, 271,3176,5483,2772,5484,2050, 678,2736, 865,1948,4707,5485,2014,4098, # 3776
2971,5486,2737,2227,1397,3068,3760,4708,4709,1735,2931,3403,3631,5487,3895, 509, # 3792
2854,2458,2890,3896,5488,5489,3177,3178,4710,4345,2538,4711,2309,1166,1010, 552, # 3808
681,1888,5490,5491,2972,2973,4099,1287,1596,1862,3179, 358, 453, 736, 175, 478, # 3824
1117, 905,1167,1097,5492,1854,1530,5493,1706,5494,2181,3519,2292,3761,3520,3632, # 3840
4346,2093,4347,5495,3404,1193,2489,4348,1458,2193,2208,1863,1889,1421,3331,2932, # 3856
3069,2182,3521, 595,2123,5496,4100,5497,5498,4349,1707,2646, 223,3762,1359, 751, # 3872
3121, 183,3522,5499,2810,3021, 419,2374, 633, 704,3897,2394, 241,5500,5501,5502, # 3888
838,3022,3763,2277,2773,2459,3898,1939,2051,4101,1309,3122,2246,1181,5503,1136, # 3904
2209,3899,2375,1446,4350,2310,4712,5504,5505,4351,1055,2615, 484,3764,5506,4102, # 3920
625,4352,2278,3405,1499,4353,4103,5507,4104,4354,3253,2279,2280,3523,5508,5509, # 3936
2774, 808,2616,3765,3406,4105,4355,3123,2539, 526,3407,3900,4356, 955,5510,1620, # 3952
4357,2647,2432,5511,1429,3766,1669,1832, 994, 928,5512,3633,1260,5513,5514,5515, # 3968
1949,2293, 741,2933,1626,4358,2738,2460, 867,1184, 362,3408,1392,5516,5517,4106, # 3984
4359,1770,1736,3254,2934,4713,4714,1929,2707,1459,1158,5518,3070,3409,2891,1292, # 4000
1930,2513,2855,3767,1986,1187,2072,2015,2617,4360,5519,2574,2514,2170,3768,2490, # 4016
3332,5520,3769,4715,5521,5522, 666,1003,3023,1022,3634,4361,5523,4716,1814,2257, # 4032
574,3901,1603, 295,1535, 705,3902,4362, 283, 858, 417,5524,5525,3255,4717,4718, # 4048
3071,1220,1890,1046,2281,2461,4107,1393,1599, 689,2575, 388,4363,5526,2491, 802, # 4064
5527,2811,3903,2061,1405,2258,5528,4719,3904,2110,1052,1345,3256,1585,5529, 809, # 4080
5530,5531,5532, 575,2739,3524, 956,1552,1469,1144,2328,5533,2329,1560,2462,3635, # 4096
3257,4108, 616,2210,4364,3180,2183,2294,5534,1833,5535,3525,4720,5536,1319,3770, # 4112
3771,1211,3636,1023,3258,1293,2812,5537,5538,5539,3905, 607,2311,3906, 762,2892, # 4128
1439,4365,1360,4721,1485,3072,5540,4722,1038,4366,1450,2062,2648,4367,1379,4723, # 4144
2593,5541,5542,4368,1352,1414,2330,2935,1172,5543,5544,3907,3908,4724,1798,1451, # 4160
5545,5546,5547,5548,2936,4109,4110,2492,2351, 411,4111,4112,3637,3333,3124,4725, # 4176
1561,2674,1452,4113,1375,5549,5550, 47,2974, 316,5551,1406,1591,2937,3181,5552, # 4192
1025,2142,3125,3182, 354,2740, 884,2228,4369,2412, 508,3772, 726,3638, 996,2433, # 4208
3639, 729,5553, 392,2194,1453,4114,4726,3773,5554,5555,2463,3640,2618,1675,2813, # 4224
919,2352,2975,2353,1270,4727,4115, 73,5556,5557, 647,5558,3259,2856,2259,1550, # 4240
1346,3024,5559,1332, 883,3526,5560,5561,5562,5563,3334,2775,5564,1212, 831,1347, # 4256
4370,4728,2331,3909,1864,3073, 720,3910,4729,4730,3911,5565,4371,5566,5567,4731, # 4272
5568,5569,1799,4732,3774,2619,4733,3641,1645,2376,4734,5570,2938, 669,2211,2675, # 4288
2434,5571,2893,5572,5573,1028,3260,5574,4372,2413,5575,2260,1353,5576,5577,4735, # 4304
3183, 518,5578,4116,5579,4373,1961,5580,2143,4374,5581,5582,3025,2354,2355,3912, # 4320
516,1834,1454,4117,2708,4375,4736,2229,2620,1972,1129,3642,5583,2776,5584,2976, # 4336
1422, 577,1470,3026,1524,3410,5585,5586, 432,4376,3074,3527,5587,2594,1455,2515, # 4352
2230,1973,1175,5588,1020,2741,4118,3528,4737,5589,2742,5590,1743,1361,3075,3529, # 4368
2649,4119,4377,4738,2295, 895, 924,4378,2171, 331,2247,3076, 166,1627,3077,1098, # 4384
5591,1232,2894,2231,3411,4739, 657, 403,1196,2377, 542,3775,3412,1600,4379,3530, # 4400
5592,4740,2777,3261, 576, 530,1362,4741,4742,2540,2676,3776,4120,5593, 842,3913, # 4416
5594,2814,2032,1014,4121, 213,2709,3413, 665, 621,4380,5595,3777,2939,2435,5596, # 4432
2436,3335,3643,3414,4743,4381,2541,4382,4744,3644,1682,4383,3531,1380,5597, 724, # 4448
2282, 600,1670,5598,1337,1233,4745,3126,2248,5599,1621,4746,5600, 651,4384,5601, # 4464
1612,4385,2621,5602,2857,5603,2743,2312,3078,5604, 716,2464,3079, 174,1255,2710, # 4480
4122,3645, 548,1320,1398, 728,4123,1574,5605,1891,1197,3080,4124,5606,3081,3082, # 4496
3778,3646,3779, 747,5607, 635,4386,4747,5608,5609,5610,4387,5611,5612,4748,5613, # 4512
3415,4749,2437, 451,5614,3780,2542,2073,4388,2744,4389,4125,5615,1764,4750,5616, # 4528
4390, 350,4751,2283,2395,2493,5617,4391,4126,2249,1434,4127, 488,4752, 458,4392, # 4544
4128,3781, 771,1330,2396,3914,2576,3184,2160,2414,1553,2677,3185,4393,5618,2494, # 4560
2895,2622,1720,2711,4394,3416,4753,5619,2543,4395,5620,3262,4396,2778,5621,2016, # 4576
2745,5622,1155,1017,3782,3915,5623,3336,2313, 201,1865,4397,1430,5624,4129,5625, # 4592
5626,5627,5628,5629,4398,1604,5630, 414,1866, 371,2595,4754,4755,3532,2017,3127, # 4608
4756,1708, 960,4399, 887, 389,2172,1536,1663,1721,5631,2232,4130,2356,2940,1580, # 4624
5632,5633,1744,4757,2544,4758,4759,5634,4760,5635,2074,5636,4761,3647,3417,2896, # 4640
4400,5637,4401,2650,3418,2815, 673,2712,2465, 709,3533,4131,3648,4402,5638,1148, # 4656
502, 634,5639,5640,1204,4762,3649,1575,4763,2623,3783,5641,3784,3128, 948,3263, # 4672
121,1745,3916,1110,5642,4403,3083,2516,3027,4132,3785,1151,1771,3917,1488,4133, # 4688
1987,5643,2438,3534,5644,5645,2094,5646,4404,3918,1213,1407,2816, 531,2746,2545, # 4704
3264,1011,1537,4764,2779,4405,3129,1061,5647,3786,3787,1867,2897,5648,2018, 120, # 4720
4406,4407,2063,3650,3265,2314,3919,2678,3419,1955,4765,4134,5649,3535,1047,2713, # 4736
1266,5650,1368,4766,2858, 649,3420,3920,2546,2747,1102,2859,2679,5651,5652,2000, # 4752
5653,1111,3651,2977,5654,2495,3921,3652,2817,1855,3421,3788,5655,5656,3422,2415, # 4768
2898,3337,3266,3653,5657,2577,5658,3654,2818,4135,1460, 856,5659,3655,5660,2899, # 4784
2978,5661,2900,3922,5662,4408, 632,2517, 875,3923,1697,3924,2296,5663,5664,4767, # 4800
3028,1239, 580,4768,4409,5665, 914, 936,2075,1190,4136,1039,2124,5666,5667,5668, # 4816
5669,3423,1473,5670,1354,4410,3925,4769,2173,3084,4137, 915,3338,4411,4412,3339, # 4832
1605,1835,5671,2748, 398,3656,4413,3926,4138, 328,1913,2860,4139,3927,1331,4414, # 4848
3029, 937,4415,5672,3657,4140,4141,3424,2161,4770,3425, 524, 742, 538,3085,1012, # 4864
5673,5674,3928,2466,5675, 658,1103, 225,3929,5676,5677,4771,5678,4772,5679,3267, # 4880
1243,5680,4142, 963,2250,4773,5681,2714,3658,3186,5682,5683,2596,2332,5684,4774, # 4896
5685,5686,5687,3536, 957,3426,2547,2033,1931,2941,2467, 870,2019,3659,1746,2780, # 4912
2781,2439,2468,5688,3930,5689,3789,3130,3790,3537,3427,3791,5690,1179,3086,5691, # 4928
3187,2378,4416,3792,2548,3188,3131,2749,4143,5692,3428,1556,2549,2297, 977,2901, # 4944
2034,4144,1205,3429,5693,1765,3430,3189,2125,1271, 714,1689,4775,3538,5694,2333, # 4960
3931, 533,4417,3660,2184, 617,5695,2469,3340,3539,2315,5696,5697,3190,5698,5699, # 4976
3932,1988, 618, 427,2651,3540,3431,5700,5701,1244,1690,5702,2819,4418,4776,5703, # 4992
3541,4777,5704,2284,1576, 473,3661,4419,3432, 972,5705,3662,5706,3087,5707,5708, # 5008
4778,4779,5709,3793,4145,4146,5710, 153,4780, 356,5711,1892,2902,4420,2144, 408, # 5024
803,2357,5712,3933,5713,4421,1646,2578,2518,4781,4782,3934,5714,3935,4422,5715, # 5040
2416,3433, 752,5716,5717,1962,3341,2979,5718, 746,3030,2470,4783,4423,3794, 698, # 5056
4784,1893,4424,3663,2550,4785,3664,3936,5719,3191,3434,5720,1824,1302,4147,2715, # 5072
3937,1974,4425,5721,4426,3192, 823,1303,1288,1236,2861,3542,4148,3435, 774,3938, # 5088
5722,1581,4786,1304,2862,3939,4787,5723,2440,2162,1083,3268,4427,4149,4428, 344, # 5104
1173, 288,2316, 454,1683,5724,5725,1461,4788,4150,2597,5726,5727,4789, 985, 894, # 5120
5728,3436,3193,5729,1914,2942,3795,1989,5730,2111,1975,5731,4151,5732,2579,1194, # 5136
425,5733,4790,3194,1245,3796,4429,5734,5735,2863,5736, 636,4791,1856,3940, 760, # 5152
1800,5737,4430,2212,1508,4792,4152,1894,1684,2298,5738,5739,4793,4431,4432,2213, # 5168
479,5740,5741, 832,5742,4153,2496,5743,2980,2497,3797, 990,3132, 627,1815,2652, # 5184
4433,1582,4434,2126,2112,3543,4794,5744, 799,4435,3195,5745,4795,2113,1737,3031, # 5200
1018, 543, 754,4436,3342,1676,4796,4797,4154,4798,1489,5746,3544,5747,2624,2903, # 5216
4155,5748,5749,2981,5750,5751,5752,5753,3196,4799,4800,2185,1722,5754,3269,3270, # 5232
1843,3665,1715, 481, 365,1976,1857,5755,5756,1963,2498,4801,5757,2127,3666,3271, # 5248
433,1895,2064,2076,5758, 602,2750,5759,5760,5761,5762,5763,3032,1628,3437,5764, # 5264
3197,4802,4156,2904,4803,2519,5765,2551,2782,5766,5767,5768,3343,4804,2905,5769, # 5280
4805,5770,2864,4806,4807,1221,2982,4157,2520,5771,5772,5773,1868,1990,5774,5775, # 5296
5776,1896,5777,5778,4808,1897,4158, 318,5779,2095,4159,4437,5780,5781, 485,5782, # 5312
938,3941, 553,2680, 116,5783,3942,3667,5784,3545,2681,2783,3438,3344,2820,5785, # 5328
3668,2943,4160,1747,2944,2983,5786,5787, 207,5788,4809,5789,4810,2521,5790,3033, # 5344
890,3669,3943,5791,1878,3798,3439,5792,2186,2358,3440,1652,5793,5794,5795, 941, # 5360
2299, 208,3546,4161,2020, 330,4438,3944,2906,2499,3799,4439,4811,5796,5797,5798, # 5376 #last 512
#Everything below is of no interest for detection purpose
2522,1613,4812,5799,3345,3945,2523,5800,4162,5801,1637,4163,2471,4813,3946,5802, # 5392
2500,3034,3800,5803,5804,2195,4814,5805,2163,5806,5807,5808,5809,5810,5811,5812, # 5408
5813,5814,5815,5816,5817,5818,5819,5820,5821,5822,5823,5824,5825,5826,5827,5828, # 5424
5829,5830,5831,5832,5833,5834,5835,5836,5837,5838,5839,5840,5841,5842,5843,5844, # 5440
5845,5846,5847,5848,5849,5850,5851,5852,5853,5854,5855,5856,5857,5858,5859,5860, # 5456
5861,5862,5863,5864,5865,5866,5867,5868,5869,5870,5871,5872,5873,5874,5875,5876, # 5472
5877,5878,5879,5880,5881,5882,5883,5884,5885,5886,5887,5888,5889,5890,5891,5892, # 5488
5893,5894,5895,5896,5897,5898,5899,5900,5901,5902,5903,5904,5905,5906,5907,5908, # 5504
5909,5910,5911,5912,5913,5914,5915,5916,5917,5918,5919,5920,5921,5922,5923,5924, # 5520
5925,5926,5927,5928,5929,5930,5931,5932,5933,5934,5935,5936,5937,5938,5939,5940, # 5536
5941,5942,5943,5944,5945,5946,5947,5948,5949,5950,5951,5952,5953,5954,5955,5956, # 5552
5957,5958,5959,5960,5961,5962,5963,5964,5965,5966,5967,5968,5969,5970,5971,5972, # 5568
5973,5974,5975,5976,5977,5978,5979,5980,5981,5982,5983,5984,5985,5986,5987,5988, # 5584
5989,5990,5991,5992,5993,5994,5995,5996,5997,5998,5999,6000,6001,6002,6003,6004, # 5600
6005,6006,6007,6008,6009,6010,6011,6012,6013,6014,6015,6016,6017,6018,6019,6020, # 5616
6021,6022,6023,6024,6025,6026,6027,6028,6029,6030,6031,6032,6033,6034,6035,6036, # 5632
6037,6038,6039,6040,6041,6042,6043,6044,6045,6046,6047,6048,6049,6050,6051,6052, # 5648
6053,6054,6055,6056,6057,6058,6059,6060,6061,6062,6063,6064,6065,6066,6067,6068, # 5664
6069,6070,6071,6072,6073,6074,6075,6076,6077,6078,6079,6080,6081,6082,6083,6084, # 5680
6085,6086,6087,6088,6089,6090,6091,6092,6093,6094,6095,6096,6097,6098,6099,6100, # 5696
6101,6102,6103,6104,6105,6106,6107,6108,6109,6110,6111,6112,6113,6114,6115,6116, # 5712
6117,6118,6119,6120,6121,6122,6123,6124,6125,6126,6127,6128,6129,6130,6131,6132, # 5728
6133,6134,6135,6136,6137,6138,6139,6140,6141,6142,6143,6144,6145,6146,6147,6148, # 5744
6149,6150,6151,6152,6153,6154,6155,6156,6157,6158,6159,6160,6161,6162,6163,6164, # 5760
6165,6166,6167,6168,6169,6170,6171,6172,6173,6174,6175,6176,6177,6178,6179,6180, # 5776
6181,6182,6183,6184,6185,6186,6187,6188,6189,6190,6191,6192,6193,6194,6195,6196, # 5792
6197,6198,6199,6200,6201,6202,6203,6204,6205,6206,6207,6208,6209,6210,6211,6212, # 5808
6213,6214,6215,6216,6217,6218,6219,6220,6221,6222,6223,3670,6224,6225,6226,6227, # 5824
6228,6229,6230,6231,6232,6233,6234,6235,6236,6237,6238,6239,6240,6241,6242,6243, # 5840
6244,6245,6246,6247,6248,6249,6250,6251,6252,6253,6254,6255,6256,6257,6258,6259, # 5856
6260,6261,6262,6263,6264,6265,6266,6267,6268,6269,6270,6271,6272,6273,6274,6275, # 5872
6276,6277,6278,6279,6280,6281,6282,6283,6284,6285,4815,6286,6287,6288,6289,6290, # 5888
6291,6292,4816,6293,6294,6295,6296,6297,6298,6299,6300,6301,6302,6303,6304,6305, # 5904
6306,6307,6308,6309,6310,6311,4817,4818,6312,6313,6314,6315,6316,6317,6318,4819, # 5920
6319,6320,6321,6322,6323,6324,6325,6326,6327,6328,6329,6330,6331,6332,6333,6334, # 5936
6335,6336,6337,4820,6338,6339,6340,6341,6342,6343,6344,6345,6346,6347,6348,6349, # 5952
6350,6351,6352,6353,6354,6355,6356,6357,6358,6359,6360,6361,6362,6363,6364,6365, # 5968
6366,6367,6368,6369,6370,6371,6372,6373,6374,6375,6376,6377,6378,6379,6380,6381, # 5984
6382,6383,6384,6385,6386,6387,6388,6389,6390,6391,6392,6393,6394,6395,6396,6397, # 6000
6398,6399,6400,6401,6402,6403,6404,6405,6406,6407,6408,6409,6410,3441,6411,6412, # 6016
6413,6414,6415,6416,6417,6418,6419,6420,6421,6422,6423,6424,6425,4440,6426,6427, # 6032
6428,6429,6430,6431,6432,6433,6434,6435,6436,6437,6438,6439,6440,6441,6442,6443, # 6048
6444,6445,6446,6447,6448,6449,6450,6451,6452,6453,6454,4821,6455,6456,6457,6458, # 6064
6459,6460,6461,6462,6463,6464,6465,6466,6467,6468,6469,6470,6471,6472,6473,6474, # 6080
6475,6476,6477,3947,3948,6478,6479,6480,6481,3272,4441,6482,6483,6484,6485,4442, # 6096
6486,6487,6488,6489,6490,6491,6492,6493,6494,6495,6496,4822,6497,6498,6499,6500, # 6112
6501,6502,6503,6504,6505,6506,6507,6508,6509,6510,6511,6512,6513,6514,6515,6516, # 6128
6517,6518,6519,6520,6521,6522,6523,6524,6525,6526,6527,6528,6529,6530,6531,6532, # 6144
6533,6534,6535,6536,6537,6538,6539,6540,6541,6542,6543,6544,6545,6546,6547,6548, # 6160
6549,6550,6551,6552,6553,6554,6555,6556,2784,6557,4823,6558,6559,6560,6561,6562, # 6176
6563,6564,6565,6566,6567,6568,6569,3949,6570,6571,6572,4824,6573,6574,6575,6576, # 6192
6577,6578,6579,6580,6581,6582,6583,4825,6584,6585,6586,3950,2785,6587,6588,6589, # 6208
6590,6591,6592,6593,6594,6595,6596,6597,6598,6599,6600,6601,6602,6603,6604,6605, # 6224
6606,6607,6608,6609,6610,6611,6612,4826,6613,6614,6615,4827,6616,6617,6618,6619, # 6240
6620,6621,6622,6623,6624,6625,4164,6626,6627,6628,6629,6630,6631,6632,6633,6634, # 6256
3547,6635,4828,6636,6637,6638,6639,6640,6641,6642,3951,2984,6643,6644,6645,6646, # 6272
6647,6648,6649,4165,6650,4829,6651,6652,4830,6653,6654,6655,6656,6657,6658,6659, # 6288
6660,6661,6662,4831,6663,6664,6665,6666,6667,6668,6669,6670,6671,4166,6672,4832, # 6304
3952,6673,6674,6675,6676,4833,6677,6678,6679,4167,6680,6681,6682,3198,6683,6684, # 6320
6685,6686,6687,6688,6689,6690,6691,6692,6693,6694,6695,6696,6697,4834,6698,6699, # 6336
6700,6701,6702,6703,6704,6705,6706,6707,6708,6709,6710,6711,6712,6713,6714,6715, # 6352
6716,6717,6718,6719,6720,6721,6722,6723,6724,6725,6726,6727,6728,6729,6730,6731, # 6368
6732,6733,6734,4443,6735,6736,6737,6738,6739,6740,6741,6742,6743,6744,6745,4444, # 6384
6746,6747,6748,6749,6750,6751,6752,6753,6754,6755,6756,6757,6758,6759,6760,6761, # 6400
6762,6763,6764,6765,6766,6767,6768,6769,6770,6771,6772,6773,6774,6775,6776,6777, # 6416
6778,6779,6780,6781,4168,6782,6783,3442,6784,6785,6786,6787,6788,6789,6790,6791, # 6432
4169,6792,6793,6794,6795,6796,6797,6798,6799,6800,6801,6802,6803,6804,6805,6806, # 6448
6807,6808,6809,6810,6811,4835,6812,6813,6814,4445,6815,6816,4446,6817,6818,6819, # 6464
6820,6821,6822,6823,6824,6825,6826,6827,6828,6829,6830,6831,6832,6833,6834,6835, # 6480
3548,6836,6837,6838,6839,6840,6841,6842,6843,6844,6845,6846,4836,6847,6848,6849, # 6496
6850,6851,6852,6853,6854,3953,6855,6856,6857,6858,6859,6860,6861,6862,6863,6864, # 6512
6865,6866,6867,6868,6869,6870,6871,6872,6873,6874,6875,6876,6877,3199,6878,6879, # 6528
6880,6881,6882,4447,6883,6884,6885,6886,6887,6888,6889,6890,6891,6892,6893,6894, # 6544
6895,6896,6897,6898,6899,6900,6901,6902,6903,6904,4170,6905,6906,6907,6908,6909, # 6560
6910,6911,6912,6913,6914,6915,6916,6917,6918,6919,6920,6921,6922,6923,6924,6925, # 6576
6926,6927,4837,6928,6929,6930,6931,6932,6933,6934,6935,6936,3346,6937,6938,4838, # 6592
6939,6940,6941,4448,6942,6943,6944,6945,6946,4449,6947,6948,6949,6950,6951,6952, # 6608
6953,6954,6955,6956,6957,6958,6959,6960,6961,6962,6963,6964,6965,6966,6967,6968, # 6624
6969,6970,6971,6972,6973,6974,6975,6976,6977,6978,6979,6980,6981,6982,6983,6984, # 6640
6985,6986,6987,6988,6989,6990,6991,6992,6993,6994,3671,6995,6996,6997,6998,4839, # 6656
6999,7000,7001,7002,3549,7003,7004,7005,7006,7007,7008,7009,7010,7011,7012,7013, # 6672
7014,7015,7016,7017,7018,7019,7020,7021,7022,7023,7024,7025,7026,7027,7028,7029, # 6688
7030,4840,7031,7032,7033,7034,7035,7036,7037,7038,4841,7039,7040,7041,7042,7043, # 6704
7044,7045,7046,7047,7048,7049,7050,7051,7052,7053,7054,7055,7056,7057,7058,7059, # 6720
7060,7061,7062,7063,7064,7065,7066,7067,7068,7069,7070,2985,7071,7072,7073,7074, # 6736
7075,7076,7077,7078,7079,7080,4842,7081,7082,7083,7084,7085,7086,7087,7088,7089, # 6752
7090,7091,7092,7093,7094,7095,7096,7097,7098,7099,7100,7101,7102,7103,7104,7105, # 6768
7106,7107,7108,7109,7110,7111,7112,7113,7114,7115,7116,7117,7118,4450,7119,7120, # 6784
7121,7122,7123,7124,7125,7126,7127,7128,7129,7130,7131,7132,7133,7134,7135,7136, # 6800
7137,7138,7139,7140,7141,7142,7143,4843,7144,7145,7146,7147,7148,7149,7150,7151, # 6816
7152,7153,7154,7155,7156,7157,7158,7159,7160,7161,7162,7163,7164,7165,7166,7167, # 6832
7168,7169,7170,7171,7172,7173,7174,7175,7176,7177,7178,7179,7180,7181,7182,7183, # 6848
7184,7185,7186,7187,7188,4171,4172,7189,7190,7191,7192,7193,7194,7195,7196,7197, # 6864
7198,7199,7200,7201,7202,7203,7204,7205,7206,7207,7208,7209,7210,7211,7212,7213, # 6880
7214,7215,7216,7217,7218,7219,7220,7221,7222,7223,7224,7225,7226,7227,7228,7229, # 6896
7230,7231,7232,7233,7234,7235,7236,7237,7238,7239,7240,7241,7242,7243,7244,7245, # 6912
7246,7247,7248,7249,7250,7251,7252,7253,7254,7255,7256,7257,7258,7259,7260,7261, # 6928
7262,7263,7264,7265,7266,7267,7268,7269,7270,7271,7272,7273,7274,7275,7276,7277, # 6944
7278,7279,7280,7281,7282,7283,7284,7285,7286,7287,7288,7289,7290,7291,7292,7293, # 6960
7294,7295,7296,4844,7297,7298,7299,7300,7301,7302,7303,7304,7305,7306,7307,7308, # 6976
7309,7310,7311,7312,7313,7314,7315,7316,4451,7317,7318,7319,7320,7321,7322,7323, # 6992
7324,7325,7326,7327,7328,7329,7330,7331,7332,7333,7334,7335,7336,7337,7338,7339, # 7008
7340,7341,7342,7343,7344,7345,7346,7347,7348,7349,7350,7351,7352,7353,4173,7354, # 7024
7355,4845,7356,7357,7358,7359,7360,7361,7362,7363,7364,7365,7366,7367,7368,7369, # 7040
7370,7371,7372,7373,7374,7375,7376,7377,7378,7379,7380,7381,7382,7383,7384,7385, # 7056
7386,7387,7388,4846,7389,7390,7391,7392,7393,7394,7395,7396,7397,7398,7399,7400, # 7072
7401,7402,7403,7404,7405,3672,7406,7407,7408,7409,7410,7411,7412,7413,7414,7415, # 7088
7416,7417,7418,7419,7420,7421,7422,7423,7424,7425,7426,7427,7428,7429,7430,7431, # 7104
7432,7433,7434,7435,7436,7437,7438,7439,7440,7441,7442,7443,7444,7445,7446,7447, # 7120
7448,7449,7450,7451,7452,7453,4452,7454,3200,7455,7456,7457,7458,7459,7460,7461, # 7136
7462,7463,7464,7465,7466,7467,7468,7469,7470,7471,7472,7473,7474,4847,7475,7476, # 7152
7477,3133,7478,7479,7480,7481,7482,7483,7484,7485,7486,7487,7488,7489,7490,7491, # 7168
7492,7493,7494,7495,7496,7497,7498,7499,7500,7501,7502,3347,7503,7504,7505,7506, # 7184
7507,7508,7509,7510,7511,7512,7513,7514,7515,7516,7517,7518,7519,7520,7521,4848, # 7200
7522,7523,7524,7525,7526,7527,7528,7529,7530,7531,7532,7533,7534,7535,7536,7537, # 7216
7538,7539,7540,7541,7542,7543,7544,7545,7546,7547,7548,7549,3801,4849,7550,7551, # 7232
7552,7553,7554,7555,7556,7557,7558,7559,7560,7561,7562,7563,7564,7565,7566,7567, # 7248
7568,7569,3035,7570,7571,7572,7573,7574,7575,7576,7577,7578,7579,7580,7581,7582, # 7264
7583,7584,7585,7586,7587,7588,7589,7590,7591,7592,7593,7594,7595,7596,7597,7598, # 7280
7599,7600,7601,7602,7603,7604,7605,7606,7607,7608,7609,7610,7611,7612,7613,7614, # 7296
7615,7616,4850,7617,7618,3802,7619,7620,7621,7622,7623,7624,7625,7626,7627,7628, # 7312
7629,7630,7631,7632,4851,7633,7634,7635,7636,7637,7638,7639,7640,7641,7642,7643, # 7328
7644,7645,7646,7647,7648,7649,7650,7651,7652,7653,7654,7655,7656,7657,7658,7659, # 7344
7660,7661,7662,7663,7664,7665,7666,7667,7668,7669,7670,4453,7671,7672,7673,7674, # 7360
7675,7676,7677,7678,7679,7680,7681,7682,7683,7684,7685,7686,7687,7688,7689,7690, # 7376
7691,7692,7693,7694,7695,7696,7697,3443,7698,7699,7700,7701,7702,4454,7703,7704, # 7392
7705,7706,7707,7708,7709,7710,7711,7712,7713,2472,7714,7715,7716,7717,7718,7719, # 7408
7720,7721,7722,7723,7724,7725,7726,7727,7728,7729,7730,7731,3954,7732,7733,7734, # 7424
7735,7736,7737,7738,7739,7740,7741,7742,7743,7744,7745,7746,7747,7748,7749,7750, # 7440
3134,7751,7752,4852,7753,7754,7755,4853,7756,7757,7758,7759,7760,4174,7761,7762, # 7456
7763,7764,7765,7766,7767,7768,7769,7770,7771,7772,7773,7774,7775,7776,7777,7778, # 7472
7779,7780,7781,7782,7783,7784,7785,7786,7787,7788,7789,7790,7791,7792,7793,7794, # 7488
7795,7796,7797,7798,7799,7800,7801,7802,7803,7804,7805,4854,7806,7807,7808,7809, # 7504
7810,7811,7812,7813,7814,7815,7816,7817,7818,7819,7820,7821,7822,7823,7824,7825, # 7520
4855,7826,7827,7828,7829,7830,7831,7832,7833,7834,7835,7836,7837,7838,7839,7840, # 7536
7841,7842,7843,7844,7845,7846,7847,3955,7848,7849,7850,7851,7852,7853,7854,7855, # 7552
7856,7857,7858,7859,7860,3444,7861,7862,7863,7864,7865,7866,7867,7868,7869,7870, # 7568
7871,7872,7873,7874,7875,7876,7877,7878,7879,7880,7881,7882,7883,7884,7885,7886, # 7584
7887,7888,7889,7890,7891,4175,7892,7893,7894,7895,7896,4856,4857,7897,7898,7899, # 7600
7900,2598,7901,7902,7903,7904,7905,7906,7907,7908,4455,7909,7910,7911,7912,7913, # 7616
7914,3201,7915,7916,7917,7918,7919,7920,7921,4858,7922,7923,7924,7925,7926,7927, # 7632
7928,7929,7930,7931,7932,7933,7934,7935,7936,7937,7938,7939,7940,7941,7942,7943, # 7648
7944,7945,7946,7947,7948,7949,7950,7951,7952,7953,7954,7955,7956,7957,7958,7959, # 7664
7960,7961,7962,7963,7964,7965,7966,7967,7968,7969,7970,7971,7972,7973,7974,7975, # 7680
7976,7977,7978,7979,7980,7981,4859,7982,7983,7984,7985,7986,7987,7988,7989,7990, # 7696
7991,7992,7993,7994,7995,7996,4860,7997,7998,7999,8000,8001,8002,8003,8004,8005, # 7712
8006,8007,8008,8009,8010,8011,8012,8013,8014,8015,8016,4176,8017,8018,8019,8020, # 7728
8021,8022,8023,4861,8024,8025,8026,8027,8028,8029,8030,8031,8032,8033,8034,8035, # 7744
8036,4862,4456,8037,8038,8039,8040,4863,8041,8042,8043,8044,8045,8046,8047,8048, # 7760
8049,8050,8051,8052,8053,8054,8055,8056,8057,8058,8059,8060,8061,8062,8063,8064, # 7776
8065,8066,8067,8068,8069,8070,8071,8072,8073,8074,8075,8076,8077,8078,8079,8080, # 7792
8081,8082,8083,8084,8085,8086,8087,8088,8089,8090,8091,8092,8093,8094,8095,8096, # 7808
8097,8098,8099,4864,4177,8100,8101,8102,8103,8104,8105,8106,8107,8108,8109,8110, # 7824
8111,8112,8113,8114,8115,8116,8117,8118,8119,8120,4178,8121,8122,8123,8124,8125, # 7840
8126,8127,8128,8129,8130,8131,8132,8133,8134,8135,8136,8137,8138,8139,8140,8141, # 7856
8142,8143,8144,8145,4865,4866,8146,8147,8148,8149,8150,8151,8152,8153,8154,8155, # 7872
8156,8157,8158,8159,8160,8161,8162,8163,8164,8165,4179,8166,8167,8168,8169,8170, # 7888
8171,8172,8173,8174,8175,8176,8177,8178,8179,8180,8181,4457,8182,8183,8184,8185, # 7904
8186,8187,8188,8189,8190,8191,8192,8193,8194,8195,8196,8197,8198,8199,8200,8201, # 7920
8202,8203,8204,8205,8206,8207,8208,8209,8210,8211,8212,8213,8214,8215,8216,8217, # 7936
8218,8219,8220,8221,8222,8223,8224,8225,8226,8227,8228,8229,8230,8231,8232,8233, # 7952
8234,8235,8236,8237,8238,8239,8240,8241,8242,8243,8244,8245,8246,8247,8248,8249, # 7968
8250,8251,8252,8253,8254,8255,8256,3445,8257,8258,8259,8260,8261,8262,4458,8263, # 7984
8264,8265,8266,8267,8268,8269,8270,8271,8272,4459,8273,8274,8275,8276,3550,8277, # 8000
8278,8279,8280,8281,8282,8283,8284,8285,8286,8287,8288,8289,4460,8290,8291,8292, # 8016
8293,8294,8295,8296,8297,8298,8299,8300,8301,8302,8303,8304,8305,8306,8307,4867, # 8032
8308,8309,8310,8311,8312,3551,8313,8314,8315,8316,8317,8318,8319,8320,8321,8322, # 8048
8323,8324,8325,8326,4868,8327,8328,8329,8330,8331,8332,8333,8334,8335,8336,8337, # 8064
8338,8339,8340,8341,8342,8343,8344,8345,8346,8347,8348,8349,8350,8351,8352,8353, # 8080
8354,8355,8356,8357,8358,8359,8360,8361,8362,8363,4869,4461,8364,8365,8366,8367, # 8096
8368,8369,8370,4870,8371,8372,8373,8374,8375,8376,8377,8378,8379,8380,8381,8382, # 8112
8383,8384,8385,8386,8387,8388,8389,8390,8391,8392,8393,8394,8395,8396,8397,8398, # 8128
8399,8400,8401,8402,8403,8404,8405,8406,8407,8408,8409,8410,4871,8411,8412,8413, # 8144
8414,8415,8416,8417,8418,8419,8420,8421,8422,4462,8423,8424,8425,8426,8427,8428, # 8160
8429,8430,8431,8432,8433,2986,8434,8435,8436,8437,8438,8439,8440,8441,8442,8443, # 8176
8444,8445,8446,8447,8448,8449,8450,8451,8452,8453,8454,8455,8456,8457,8458,8459, # 8192
8460,8461,8462,8463,8464,8465,8466,8467,8468,8469,8470,8471,8472,8473,8474,8475, # 8208
8476,8477,8478,4180,8479,8480,8481,8482,8483,8484,8485,8486,8487,8488,8489,8490, # 8224
8491,8492,8493,8494,8495,8496,8497,8498,8499,8500,8501,8502,8503,8504,8505,8506, # 8240
8507,8508,8509,8510,8511,8512,8513,8514,8515,8516,8517,8518,8519,8520,8521,8522, # 8256
8523,8524,8525,8526,8527,8528,8529,8530,8531,8532,8533,8534,8535,8536,8537,8538, # 8272
8539,8540,8541,8542,8543,8544,8545,8546,8547,8548,8549,8550,8551,8552,8553,8554, # 8288
8555,8556,8557,8558,8559,8560,8561,8562,8563,8564,4872,8565,8566,8567,8568,8569, # 8304
8570,8571,8572,8573,4873,8574,8575,8576,8577,8578,8579,8580,8581,8582,8583,8584, # 8320
8585,8586,8587,8588,8589,8590,8591,8592,8593,8594,8595,8596,8597,8598,8599,8600, # 8336
8601,8602,8603,8604,8605,3803,8606,8607,8608,8609,8610,8611,8612,8613,4874,3804, # 8352
8614,8615,8616,8617,8618,8619,8620,8621,3956,8622,8623,8624,8625,8626,8627,8628, # 8368
8629,8630,8631,8632,8633,8634,8635,8636,8637,8638,2865,8639,8640,8641,8642,8643, # 8384
8644,8645,8646,8647,8648,8649,8650,8651,8652,8653,8654,8655,8656,4463,8657,8658, # 8400
8659,4875,4876,8660,8661,8662,8663,8664,8665,8666,8667,8668,8669,8670,8671,8672, # 8416
8673,8674,8675,8676,8677,8678,8679,8680,8681,4464,8682,8683,8684,8685,8686,8687, # 8432
8688,8689,8690,8691,8692,8693,8694,8695,8696,8697,8698,8699,8700,8701,8702,8703, # 8448
8704,8705,8706,8707,8708,8709,2261,8710,8711,8712,8713,8714,8715,8716,8717,8718, # 8464
8719,8720,8721,8722,8723,8724,8725,8726,8727,8728,8729,8730,8731,8732,8733,4181, # 8480
8734,8735,8736,8737,8738,8739,8740,8741,8742,8743,8744,8745,8746,8747,8748,8749, # 8496
8750,8751,8752,8753,8754,8755,8756,8757,8758,8759,8760,8761,8762,8763,4877,8764, # 8512
8765,8766,8767,8768,8769,8770,8771,8772,8773,8774,8775,8776,8777,8778,8779,8780, # 8528
8781,8782,8783,8784,8785,8786,8787,8788,4878,8789,4879,8790,8791,8792,4880,8793, # 8544
8794,8795,8796,8797,8798,8799,8800,8801,4881,8802,8803,8804,8805,8806,8807,8808, # 8560
8809,8810,8811,8812,8813,8814,8815,3957,8816,8817,8818,8819,8820,8821,8822,8823, # 8576
8824,8825,8826,8827,8828,8829,8830,8831,8832,8833,8834,8835,8836,8837,8838,8839, # 8592
8840,8841,8842,8843,8844,8845,8846,8847,4882,8848,8849,8850,8851,8852,8853,8854, # 8608
8855,8856,8857,8858,8859,8860,8861,8862,8863,8864,8865,8866,8867,8868,8869,8870, # 8624
8871,8872,8873,8874,8875,8876,8877,8878,8879,8880,8881,8882,8883,8884,3202,8885, # 8640
8886,8887,8888,8889,8890,8891,8892,8893,8894,8895,8896,8897,8898,8899,8900,8901, # 8656
8902,8903,8904,8905,8906,8907,8908,8909,8910,8911,8912,8913,8914,8915,8916,8917, # 8672
8918,8919,8920,8921,8922,8923,8924,4465,8925,8926,8927,8928,8929,8930,8931,8932, # 8688
4883,8933,8934,8935,8936,8937,8938,8939,8940,8941,8942,8943,2214,8944,8945,8946, # 8704
8947,8948,8949,8950,8951,8952,8953,8954,8955,8956,8957,8958,8959,8960,8961,8962, # 8720
8963,8964,8965,4884,8966,8967,8968,8969,8970,8971,8972,8973,8974,8975,8976,8977, # 8736
8978,8979,8980,8981,8982,8983,8984,8985,8986,8987,8988,8989,8990,8991,8992,4885, # 8752
8993,8994,8995,8996,8997,8998,8999,9000,9001,9002,9003,9004,9005,9006,9007,9008, # 8768
9009,9010,9011,9012,9013,9014,9015,9016,9017,9018,9019,9020,9021,4182,9022,9023, # 8784
9024,9025,9026,9027,9028,9029,9030,9031,9032,9033,9034,9035,9036,9037,9038,9039, # 8800
9040,9041,9042,9043,9044,9045,9046,9047,9048,9049,9050,9051,9052,9053,9054,9055, # 8816
9056,9057,9058,9059,9060,9061,9062,9063,4886,9064,9065,9066,9067,9068,9069,4887, # 8832
9070,9071,9072,9073,9074,9075,9076,9077,9078,9079,9080,9081,9082,9083,9084,9085, # 8848
9086,9087,9088,9089,9090,9091,9092,9093,9094,9095,9096,9097,9098,9099,9100,9101, # 8864
9102,9103,9104,9105,9106,9107,9108,9109,9110,9111,9112,9113,9114,9115,9116,9117, # 8880
9118,9119,9120,9121,9122,9123,9124,9125,9126,9127,9128,9129,9130,9131,9132,9133, # 8896
9134,9135,9136,9137,9138,9139,9140,9141,3958,9142,9143,9144,9145,9146,9147,9148, # 8912
9149,9150,9151,4888,9152,9153,9154,9155,9156,9157,9158,9159,9160,9161,9162,9163, # 8928
9164,9165,9166,9167,9168,9169,9170,9171,9172,9173,9174,9175,4889,9176,9177,9178, # 8944
9179,9180,9181,9182,9183,9184,9185,9186,9187,9188,9189,9190,9191,9192,9193,9194, # 8960
9195,9196,9197,9198,9199,9200,9201,9202,9203,4890,9204,9205,9206,9207,9208,9209, # 8976
9210,9211,9212,9213,9214,9215,9216,9217,9218,9219,9220,9221,9222,4466,9223,9224, # 8992
9225,9226,9227,9228,9229,9230,9231,9232,9233,9234,9235,9236,9237,9238,9239,9240, # 9008
9241,9242,9243,9244,9245,4891,9246,9247,9248,9249,9250,9251,9252,9253,9254,9255, # 9024
9256,9257,4892,9258,9259,9260,9261,4893,4894,9262,9263,9264,9265,9266,9267,9268, # 9040
9269,9270,9271,9272,9273,4467,9274,9275,9276,9277,9278,9279,9280,9281,9282,9283, # 9056
9284,9285,3673,9286,9287,9288,9289,9290,9291,9292,9293,9294,9295,9296,9297,9298, # 9072
9299,9300,9301,9302,9303,9304,9305,9306,9307,9308,9309,9310,9311,9312,9313,9314, # 9088
9315,9316,9317,9318,9319,9320,9321,9322,4895,9323,9324,9325,9326,9327,9328,9329, # 9104
9330,9331,9332,9333,9334,9335,9336,9337,9338,9339,9340,9341,9342,9343,9344,9345, # 9120
9346,9347,4468,9348,9349,9350,9351,9352,9353,9354,9355,9356,9357,9358,9359,9360, # 9136
9361,9362,9363,9364,9365,9366,9367,9368,9369,9370,9371,9372,9373,4896,9374,4469, # 9152
9375,9376,9377,9378,9379,4897,9380,9381,9382,9383,9384,9385,9386,9387,9388,9389, # 9168
9390,9391,9392,9393,9394,9395,9396,9397,9398,9399,9400,9401,9402,9403,9404,9405, # 9184
9406,4470,9407,2751,9408,9409,3674,3552,9410,9411,9412,9413,9414,9415,9416,9417, # 9200
9418,9419,9420,9421,4898,9422,9423,9424,9425,9426,9427,9428,9429,3959,9430,9431, # 9216
9432,9433,9434,9435,9436,4471,9437,9438,9439,9440,9441,9442,9443,9444,9445,9446, # 9232
9447,9448,9449,9450,3348,9451,9452,9453,9454,9455,9456,9457,9458,9459,9460,9461, # 9248
9462,9463,9464,9465,9466,9467,9468,9469,9470,9471,9472,4899,9473,9474,9475,9476, # 9264
9477,4900,9478,9479,9480,9481,9482,9483,9484,9485,9486,9487,9488,3349,9489,9490, # 9280
9491,9492,9493,9494,9495,9496,9497,9498,9499,9500,9501,9502,9503,9504,9505,9506, # 9296
9507,9508,9509,9510,9511,9512,9513,9514,9515,9516,9517,9518,9519,9520,4901,9521, # 9312
9522,9523,9524,9525,9526,4902,9527,9528,9529,9530,9531,9532,9533,9534,9535,9536, # 9328
9537,9538,9539,9540,9541,9542,9543,9544,9545,9546,9547,9548,9549,9550,9551,9552, # 9344
9553,9554,9555,9556,9557,9558,9559,9560,9561,9562,9563,9564,9565,9566,9567,9568, # 9360
9569,9570,9571,9572,9573,9574,9575,9576,9577,9578,9579,9580,9581,9582,9583,9584, # 9376
3805,9585,9586,9587,9588,9589,9590,9591,9592,9593,9594,9595,9596,9597,9598,9599, # 9392
9600,9601,9602,4903,9603,9604,9605,9606,9607,4904,9608,9609,9610,9611,9612,9613, # 9408
9614,4905,9615,9616,9617,9618,9619,9620,9621,9622,9623,9624,9625,9626,9627,9628, # 9424
9629,9630,9631,9632,4906,9633,9634,9635,9636,9637,9638,9639,9640,9641,9642,9643, # 9440
4907,9644,9645,9646,9647,9648,9649,9650,9651,9652,9653,9654,9655,9656,9657,9658, # 9456
9659,9660,9661,9662,9663,9664,9665,9666,9667,9668,9669,9670,9671,9672,4183,9673, # 9472
9674,9675,9676,9677,4908,9678,9679,9680,9681,4909,9682,9683,9684,9685,9686,9687, # 9488
9688,9689,9690,4910,9691,9692,9693,3675,9694,9695,9696,2945,9697,9698,9699,9700, # 9504
9701,9702,9703,9704,9705,4911,9706,9707,9708,9709,9710,9711,9712,9713,9714,9715, # 9520
9716,9717,9718,9719,9720,9721,9722,9723,9724,9725,9726,9727,9728,9729,9730,9731, # 9536
9732,9733,9734,9735,4912,9736,9737,9738,9739,9740,4913,9741,9742,9743,9744,9745, # 9552
9746,9747,9748,9749,9750,9751,9752,9753,9754,9755,9756,9757,9758,4914,9759,9760, # 9568
9761,9762,9763,9764,9765,9766,9767,9768,9769,9770,9771,9772,9773,9774,9775,9776, # 9584
9777,9778,9779,9780,9781,9782,4915,9783,9784,9785,9786,9787,9788,9789,9790,9791, # 9600
9792,9793,4916,9794,9795,9796,9797,9798,9799,9800,9801,9802,9803,9804,9805,9806, # 9616
9807,9808,9809,9810,9811,9812,9813,9814,9815,9816,9817,9818,9819,9820,9821,9822, # 9632
9823,9824,9825,9826,9827,9828,9829,9830,9831,9832,9833,9834,9835,9836,9837,9838, # 9648
9839,9840,9841,9842,9843,9844,9845,9846,9847,9848,9849,9850,9851,9852,9853,9854, # 9664
9855,9856,9857,9858,9859,9860,9861,9862,9863,9864,9865,9866,9867,9868,4917,9869, # 9680
9870,9871,9872,9873,9874,9875,9876,9877,9878,9879,9880,9881,9882,9883,9884,9885, # 9696
9886,9887,9888,9889,9890,9891,9892,4472,9893,9894,9895,9896,9897,3806,9898,9899, # 9712
9900,9901,9902,9903,9904,9905,9906,9907,9908,9909,9910,9911,9912,9913,9914,4918, # 9728
9915,9916,9917,4919,9918,9919,9920,9921,4184,9922,9923,9924,9925,9926,9927,9928, # 9744
9929,9930,9931,9932,9933,9934,9935,9936,9937,9938,9939,9940,9941,9942,9943,9944, # 9760
9945,9946,4920,9947,9948,9949,9950,9951,9952,9953,9954,9955,4185,9956,9957,9958, # 9776
9959,9960,9961,9962,9963,9964,9965,4921,9966,9967,9968,4473,9969,9970,9971,9972, # 9792
9973,9974,9975,9976,9977,4474,9978,9979,9980,9981,9982,9983,9984,9985,9986,9987, # 9808
9988,9989,9990,9991,9992,9993,9994,9995,9996,9997,9998,9999,10000,10001,10002,10003, # 9824
10004,10005,10006,10007,10008,10009,10010,10011,10012,10013,10014,10015,10016,10017,10018,10019, # 9840
10020,10021,4922,10022,4923,10023,10024,10025,10026,10027,10028,10029,10030,10031,10032,10033, # 9856
10034,10035,10036,10037,10038,10039,10040,10041,10042,10043,10044,10045,10046,10047,10048,4924, # 9872
10049,10050,10051,10052,10053,10054,10055,10056,10057,10058,10059,10060,10061,10062,10063,10064, # 9888
10065,10066,10067,10068,10069,10070,10071,10072,10073,10074,10075,10076,10077,10078,10079,10080, # 9904
10081,10082,10083,10084,10085,10086,10087,4475,10088,10089,10090,10091,10092,10093,10094,10095, # 9920
10096,10097,4476,10098,10099,10100,10101,10102,10103,10104,10105,10106,10107,10108,10109,10110, # 9936
10111,2174,10112,10113,10114,10115,10116,10117,10118,10119,10120,10121,10122,10123,10124,10125, # 9952
10126,10127,10128,10129,10130,10131,10132,10133,10134,10135,10136,10137,10138,10139,10140,3807, # 9968
4186,4925,10141,10142,10143,10144,10145,10146,10147,4477,4187,10148,10149,10150,10151,10152, # 9984
10153,4188,10154,10155,10156,10157,10158,10159,10160,10161,4926,10162,10163,10164,10165,10166, #10000
10167,10168,10169,10170,10171,10172,10173,10174,10175,10176,10177,10178,10179,10180,10181,10182, #10016
10183,10184,10185,10186,10187,10188,10189,10190,10191,10192,3203,10193,10194,10195,10196,10197, #10032
10198,10199,10200,4478,10201,10202,10203,10204,4479,10205,10206,10207,10208,10209,10210,10211, #10048
10212,10213,10214,10215,10216,10217,10218,10219,10220,10221,10222,10223,10224,10225,10226,10227, #10064
10228,10229,10230,10231,10232,10233,10234,4927,10235,10236,10237,10238,10239,10240,10241,10242, #10080
10243,10244,10245,10246,10247,10248,10249,10250,10251,10252,10253,10254,10255,10256,10257,10258, #10096
10259,10260,10261,10262,10263,10264,10265,10266,10267,10268,10269,10270,10271,10272,10273,4480, #10112
4928,4929,10274,10275,10276,10277,10278,10279,10280,10281,10282,10283,10284,10285,10286,10287, #10128
10288,10289,10290,10291,10292,10293,10294,10295,10296,10297,10298,10299,10300,10301,10302,10303, #10144
10304,10305,10306,10307,10308,10309,10310,10311,10312,10313,10314,10315,10316,10317,10318,10319, #10160
10320,10321,10322,10323,10324,10325,10326,10327,10328,10329,10330,10331,10332,10333,10334,4930, #10176
10335,10336,10337,10338,10339,10340,10341,10342,4931,10343,10344,10345,10346,10347,10348,10349, #10192
10350,10351,10352,10353,10354,10355,3088,10356,2786,10357,10358,10359,10360,4189,10361,10362, #10208
10363,10364,10365,10366,10367,10368,10369,10370,10371,10372,10373,10374,10375,4932,10376,10377, #10224
10378,10379,10380,10381,10382,10383,10384,10385,10386,10387,10388,10389,10390,10391,10392,4933, #10240
10393,10394,10395,4934,10396,10397,10398,10399,10400,10401,10402,10403,10404,10405,10406,10407, #10256
10408,10409,10410,10411,10412,3446,10413,10414,10415,10416,10417,10418,10419,10420,10421,10422, #10272
10423,4935,10424,10425,10426,10427,10428,10429,10430,4936,10431,10432,10433,10434,10435,10436, #10288
10437,10438,10439,10440,10441,10442,10443,4937,10444,10445,10446,10447,4481,10448,10449,10450, #10304
10451,10452,10453,10454,10455,10456,10457,10458,10459,10460,10461,10462,10463,10464,10465,10466, #10320
10467,10468,10469,10470,10471,10472,10473,10474,10475,10476,10477,10478,10479,10480,10481,10482, #10336
10483,10484,10485,10486,10487,10488,10489,10490,10491,10492,10493,10494,10495,10496,10497,10498, #10352
10499,10500,10501,10502,10503,10504,10505,4938,10506,10507,10508,10509,10510,2552,10511,10512, #10368
10513,10514,10515,10516,3447,10517,10518,10519,10520,10521,10522,10523,10524,10525,10526,10527, #10384
10528,10529,10530,10531,10532,10533,10534,10535,10536,10537,10538,10539,10540,10541,10542,10543, #10400
4482,10544,4939,10545,10546,10547,10548,10549,10550,10551,10552,10553,10554,10555,10556,10557, #10416
10558,10559,10560,10561,10562,10563,10564,10565,10566,10567,3676,4483,10568,10569,10570,10571, #10432
10572,3448,10573,10574,10575,10576,10577,10578,10579,10580,10581,10582,10583,10584,10585,10586, #10448
10587,10588,10589,10590,10591,10592,10593,10594,10595,10596,10597,10598,10599,10600,10601,10602, #10464
10603,10604,10605,10606,10607,10608,10609,10610,10611,10612,10613,10614,10615,10616,10617,10618, #10480
10619,10620,10621,10622,10623,10624,10625,10626,10627,4484,10628,10629,10630,10631,10632,4940, #10496
10633,10634,10635,10636,10637,10638,10639,10640,10641,10642,10643,10644,10645,10646,10647,10648, #10512
10649,10650,10651,10652,10653,10654,10655,10656,4941,10657,10658,10659,2599,10660,10661,10662, #10528
10663,10664,10665,10666,3089,10667,10668,10669,10670,10671,10672,10673,10674,10675,10676,10677, #10544
10678,10679,10680,4942,10681,10682,10683,10684,10685,10686,10687,10688,10689,10690,10691,10692, #10560
10693,10694,10695,10696,10697,4485,10698,10699,10700,10701,10702,10703,10704,4943,10705,3677, #10576
10706,10707,10708,10709,10710,10711,10712,4944,10713,10714,10715,10716,10717,10718,10719,10720, #10592
10721,10722,10723,10724,10725,10726,10727,10728,4945,10729,10730,10731,10732,10733,10734,10735, #10608
10736,10737,10738,10739,10740,10741,10742,10743,10744,10745,10746,10747,10748,10749,10750,10751, #10624
10752,10753,10754,10755,10756,10757,10758,10759,10760,10761,4946,10762,10763,10764,10765,10766, #10640
10767,4947,4948,10768,10769,10770,10771,10772,10773,10774,10775,10776,10777,10778,10779,10780, #10656
10781,10782,10783,10784,10785,10786,10787,10788,10789,10790,10791,10792,10793,10794,10795,10796, #10672
10797,10798,10799,10800,10801,10802,10803,10804,10805,10806,10807,10808,10809,10810,10811,10812, #10688
10813,10814,10815,10816,10817,10818,10819,10820,10821,10822,10823,10824,10825,10826,10827,10828, #10704
10829,10830,10831,10832,10833,10834,10835,10836,10837,10838,10839,10840,10841,10842,10843,10844, #10720
10845,10846,10847,10848,10849,10850,10851,10852,10853,10854,10855,10856,10857,10858,10859,10860, #10736
10861,10862,10863,10864,10865,10866,10867,10868,10869,10870,10871,10872,10873,10874,10875,10876, #10752
10877,10878,4486,10879,10880,10881,10882,10883,10884,10885,4949,10886,10887,10888,10889,10890, #10768
10891,10892,10893,10894,10895,10896,10897,10898,10899,10900,10901,10902,10903,10904,10905,10906, #10784
10907,10908,10909,10910,10911,10912,10913,10914,10915,10916,10917,10918,10919,4487,10920,10921, #10800
10922,10923,10924,10925,10926,10927,10928,10929,10930,10931,10932,4950,10933,10934,10935,10936, #10816
10937,10938,10939,10940,10941,10942,10943,10944,10945,10946,10947,10948,10949,4488,10950,10951, #10832
10952,10953,10954,10955,10956,10957,10958,10959,4190,10960,10961,10962,10963,10964,10965,10966, #10848
10967,10968,10969,10970,10971,10972,10973,10974,10975,10976,10977,10978,10979,10980,10981,10982, #10864
10983,10984,10985,10986,10987,10988,10989,10990,10991,10992,10993,10994,10995,10996,10997,10998, #10880
10999,11000,11001,11002,11003,11004,11005,11006,3960,11007,11008,11009,11010,11011,11012,11013, #10896
11014,11015,11016,11017,11018,11019,11020,11021,11022,11023,11024,11025,11026,11027,11028,11029, #10912
11030,11031,11032,4951,11033,11034,11035,11036,11037,11038,11039,11040,11041,11042,11043,11044, #10928
11045,11046,11047,4489,11048,11049,11050,11051,4952,11052,11053,11054,11055,11056,11057,11058, #10944
4953,11059,11060,11061,11062,11063,11064,11065,11066,11067,11068,11069,11070,11071,4954,11072, #10960
11073,11074,11075,11076,11077,11078,11079,11080,11081,11082,11083,11084,11085,11086,11087,11088, #10976
11089,11090,11091,11092,11093,11094,11095,11096,11097,11098,11099,11100,11101,11102,11103,11104, #10992
11105,11106,11107,11108,11109,11110,11111,11112,11113,11114,11115,3808,11116,11117,11118,11119, #11008
11120,11121,11122,11123,11124,11125,11126,11127,11128,11129,11130,11131,11132,11133,11134,4955, #11024
11135,11136,11137,11138,11139,11140,11141,11142,11143,11144,11145,11146,11147,11148,11149,11150, #11040
11151,11152,11153,11154,11155,11156,11157,11158,11159,11160,11161,4956,11162,11163,11164,11165, #11056
11166,11167,11168,11169,11170,11171,11172,11173,11174,11175,11176,11177,11178,11179,11180,4957, #11072
11181,11182,11183,11184,11185,11186,4958,11187,11188,11189,11190,11191,11192,11193,11194,11195, #11088
11196,11197,11198,11199,11200,3678,11201,11202,11203,11204,11205,11206,4191,11207,11208,11209, #11104
11210,11211,11212,11213,11214,11215,11216,11217,11218,11219,11220,11221,11222,11223,11224,11225, #11120
11226,11227,11228,11229,11230,11231,11232,11233,11234,11235,11236,11237,11238,11239,11240,11241, #11136
11242,11243,11244,11245,11246,11247,11248,11249,11250,11251,4959,11252,11253,11254,11255,11256, #11152
11257,11258,11259,11260,11261,11262,11263,11264,11265,11266,11267,11268,11269,11270,11271,11272, #11168
11273,11274,11275,11276,11277,11278,11279,11280,11281,11282,11283,11284,11285,11286,11287,11288, #11184
11289,11290,11291,11292,11293,11294,11295,11296,11297,11298,11299,11300,11301,11302,11303,11304, #11200
11305,11306,11307,11308,11309,11310,11311,11312,11313,11314,3679,11315,11316,11317,11318,4490, #11216
11319,11320,11321,11322,11323,11324,11325,11326,11327,11328,11329,11330,11331,11332,11333,11334, #11232
11335,11336,11337,11338,11339,11340,11341,11342,11343,11344,11345,11346,11347,4960,11348,11349, #11248
11350,11351,11352,11353,11354,11355,11356,11357,11358,11359,11360,11361,11362,11363,11364,11365, #11264
11366,11367,11368,11369,11370,11371,11372,11373,11374,11375,11376,11377,3961,4961,11378,11379, #11280
11380,11381,11382,11383,11384,11385,11386,11387,11388,11389,11390,11391,11392,11393,11394,11395, #11296
11396,11397,4192,11398,11399,11400,11401,11402,11403,11404,11405,11406,11407,11408,11409,11410, #11312
11411,4962,11412,11413,11414,11415,11416,11417,11418,11419,11420,11421,11422,11423,11424,11425, #11328
11426,11427,11428,11429,11430,11431,11432,11433,11434,11435,11436,11437,11438,11439,11440,11441, #11344
11442,11443,11444,11445,11446,11447,11448,11449,11450,11451,11452,11453,11454,11455,11456,11457, #11360
11458,11459,11460,11461,11462,11463,11464,11465,11466,11467,11468,11469,4963,11470,11471,4491, #11376
11472,11473,11474,11475,4964,11476,11477,11478,11479,11480,11481,11482,11483,11484,11485,11486, #11392
11487,11488,11489,11490,11491,11492,4965,11493,11494,11495,11496,11497,11498,11499,11500,11501, #11408
11502,11503,11504,11505,11506,11507,11508,11509,11510,11511,11512,11513,11514,11515,11516,11517, #11424
11518,11519,11520,11521,11522,11523,11524,11525,11526,11527,11528,11529,3962,11530,11531,11532, #11440
11533,11534,11535,11536,11537,11538,11539,11540,11541,11542,11543,11544,11545,11546,11547,11548, #11456
11549,11550,11551,11552,11553,11554,11555,11556,11557,11558,11559,11560,11561,11562,11563,11564, #11472
4193,4194,11565,11566,11567,11568,11569,11570,11571,11572,11573,11574,11575,11576,11577,11578, #11488
11579,11580,11581,11582,11583,11584,11585,11586,11587,11588,11589,11590,11591,4966,4195,11592, #11504
11593,11594,11595,11596,11597,11598,11599,11600,11601,11602,11603,11604,3090,11605,11606,11607, #11520
11608,11609,11610,4967,11611,11612,11613,11614,11615,11616,11617,11618,11619,11620,11621,11622, #11536
11623,11624,11625,11626,11627,11628,11629,11630,11631,11632,11633,11634,11635,11636,11637,11638, #11552
11639,11640,11641,11642,11643,11644,11645,11646,11647,11648,11649,11650,11651,11652,11653,11654, #11568
11655,11656,11657,11658,11659,11660,11661,11662,11663,11664,11665,11666,11667,11668,11669,11670, #11584
11671,11672,11673,11674,4968,11675,11676,11677,11678,11679,11680,11681,11682,11683,11684,11685, #11600
11686,11687,11688,11689,11690,11691,11692,11693,3809,11694,11695,11696,11697,11698,11699,11700, #11616
11701,11702,11703,11704,11705,11706,11707,11708,11709,11710,11711,11712,11713,11714,11715,11716, #11632
11717,11718,3553,11719,11720,11721,11722,11723,11724,11725,11726,11727,11728,11729,11730,4969, #11648
11731,11732,11733,11734,11735,11736,11737,11738,11739,11740,4492,11741,11742,11743,11744,11745, #11664
11746,11747,11748,11749,11750,11751,11752,4970,11753,11754,11755,11756,11757,11758,11759,11760, #11680
11761,11762,11763,11764,11765,11766,11767,11768,11769,11770,11771,11772,11773,11774,11775,11776, #11696
11777,11778,11779,11780,11781,11782,11783,11784,11785,11786,11787,11788,11789,11790,4971,11791, #11712
11792,11793,11794,11795,11796,11797,4972,11798,11799,11800,11801,11802,11803,11804,11805,11806, #11728
11807,11808,11809,11810,4973,11811,11812,11813,11814,11815,11816,11817,11818,11819,11820,11821, #11744
11822,11823,11824,11825,11826,11827,11828,11829,11830,11831,11832,11833,11834,3680,3810,11835, #11760
11836,4974,11837,11838,11839,11840,11841,11842,11843,11844,11845,11846,11847,11848,11849,11850, #11776
11851,11852,11853,11854,11855,11856,11857,11858,11859,11860,11861,11862,11863,11864,11865,11866, #11792
11867,11868,11869,11870,11871,11872,11873,11874,11875,11876,11877,11878,11879,11880,11881,11882, #11808
11883,11884,4493,11885,11886,11887,11888,11889,11890,11891,11892,11893,11894,11895,11896,11897, #11824
11898,11899,11900,11901,11902,11903,11904,11905,11906,11907,11908,11909,11910,11911,11912,11913, #11840
11914,11915,4975,11916,11917,11918,11919,11920,11921,11922,11923,11924,11925,11926,11927,11928, #11856
11929,11930,11931,11932,11933,11934,11935,11936,11937,11938,11939,11940,11941,11942,11943,11944, #11872
11945,11946,11947,11948,11949,4976,11950,11951,11952,11953,11954,11955,11956,11957,11958,11959, #11888
11960,11961,11962,11963,11964,11965,11966,11967,11968,11969,11970,11971,11972,11973,11974,11975, #11904
11976,11977,11978,11979,11980,11981,11982,11983,11984,11985,11986,11987,4196,11988,11989,11990, #11920
11991,11992,4977,11993,11994,11995,11996,11997,11998,11999,12000,12001,12002,12003,12004,12005, #11936
12006,12007,12008,12009,12010,12011,12012,12013,12014,12015,12016,12017,12018,12019,12020,12021, #11952
12022,12023,12024,12025,12026,12027,12028,12029,12030,12031,12032,12033,12034,12035,12036,12037, #11968
12038,12039,12040,12041,12042,12043,12044,12045,12046,12047,12048,12049,12050,12051,12052,12053, #11984
12054,12055,12056,12057,12058,12059,12060,12061,4978,12062,12063,12064,12065,12066,12067,12068, #12000
12069,12070,12071,12072,12073,12074,12075,12076,12077,12078,12079,12080,12081,12082,12083,12084, #12016
12085,12086,12087,12088,12089,12090,12091,12092,12093,12094,12095,12096,12097,12098,12099,12100, #12032
12101,12102,12103,12104,12105,12106,12107,12108,12109,12110,12111,12112,12113,12114,12115,12116, #12048
12117,12118,12119,12120,12121,12122,12123,4979,12124,12125,12126,12127,12128,4197,12129,12130, #12064
12131,12132,12133,12134,12135,12136,12137,12138,12139,12140,12141,12142,12143,12144,12145,12146, #12080
12147,12148,12149,12150,12151,12152,12153,12154,4980,12155,12156,12157,12158,12159,12160,4494, #12096
12161,12162,12163,12164,3811,12165,12166,12167,12168,12169,4495,12170,12171,4496,12172,12173, #12112
12174,12175,12176,3812,12177,12178,12179,12180,12181,12182,12183,12184,12185,12186,12187,12188, #12128
12189,12190,12191,12192,12193,12194,12195,12196,12197,12198,12199,12200,12201,12202,12203,12204, #12144
12205,12206,12207,12208,12209,12210,12211,12212,12213,12214,12215,12216,12217,12218,12219,12220, #12160
12221,4981,12222,12223,12224,12225,12226,12227,12228,12229,12230,12231,12232,12233,12234,12235, #12176
4982,12236,12237,12238,12239,12240,12241,12242,12243,12244,12245,4983,12246,12247,12248,12249, #12192
4984,12250,12251,12252,12253,12254,12255,12256,12257,12258,12259,12260,12261,12262,12263,12264, #12208
4985,12265,4497,12266,12267,12268,12269,12270,12271,12272,12273,12274,12275,12276,12277,12278, #12224
12279,12280,12281,12282,12283,12284,12285,12286,12287,4986,12288,12289,12290,12291,12292,12293, #12240
12294,12295,12296,2473,12297,12298,12299,12300,12301,12302,12303,12304,12305,12306,12307,12308, #12256
12309,12310,12311,12312,12313,12314,12315,12316,12317,12318,12319,3963,12320,12321,12322,12323, #12272
12324,12325,12326,12327,12328,12329,12330,12331,12332,4987,12333,12334,12335,12336,12337,12338, #12288
12339,12340,12341,12342,12343,12344,12345,12346,12347,12348,12349,12350,12351,12352,12353,12354, #12304
12355,12356,12357,12358,12359,3964,12360,12361,12362,12363,12364,12365,12366,12367,12368,12369, #12320
12370,3965,12371,12372,12373,12374,12375,12376,12377,12378,12379,12380,12381,12382,12383,12384, #12336
12385,12386,12387,12388,12389,12390,12391,12392,12393,12394,12395,12396,12397,12398,12399,12400, #12352
12401,12402,12403,12404,12405,12406,12407,12408,4988,12409,12410,12411,12412,12413,12414,12415, #12368
12416,12417,12418,12419,12420,12421,12422,12423,12424,12425,12426,12427,12428,12429,12430,12431, #12384
12432,12433,12434,12435,12436,12437,12438,3554,12439,12440,12441,12442,12443,12444,12445,12446, #12400
12447,12448,12449,12450,12451,12452,12453,12454,12455,12456,12457,12458,12459,12460,12461,12462, #12416
12463,12464,4989,12465,12466,12467,12468,12469,12470,12471,12472,12473,12474,12475,12476,12477, #12432
12478,12479,12480,4990,12481,12482,12483,12484,12485,12486,12487,12488,12489,4498,12490,12491, #12448
12492,12493,12494,12495,12496,12497,12498,12499,12500,12501,12502,12503,12504,12505,12506,12507, #12464
12508,12509,12510,12511,12512,12513,12514,12515,12516,12517,12518,12519,12520,12521,12522,12523, #12480
12524,12525,12526,12527,12528,12529,12530,12531,12532,12533,12534,12535,12536,12537,12538,12539, #12496
12540,12541,12542,12543,12544,12545,12546,12547,12548,12549,12550,12551,4991,12552,12553,12554, #12512
12555,12556,12557,12558,12559,12560,12561,12562,12563,12564,12565,12566,12567,12568,12569,12570, #12528
12571,12572,12573,12574,12575,12576,12577,12578,3036,12579,12580,12581,12582,12583,3966,12584, #12544
12585,12586,12587,12588,12589,12590,12591,12592,12593,12594,12595,12596,12597,12598,12599,12600, #12560
12601,12602,12603,12604,12605,12606,12607,12608,12609,12610,12611,12612,12613,12614,12615,12616, #12576
12617,12618,12619,12620,12621,12622,12623,12624,12625,12626,12627,12628,12629,12630,12631,12632, #12592
12633,12634,12635,12636,12637,12638,12639,12640,12641,12642,12643,12644,12645,12646,4499,12647, #12608
12648,12649,12650,12651,12652,12653,12654,12655,12656,12657,12658,12659,12660,12661,12662,12663, #12624
12664,12665,12666,12667,12668,12669,12670,12671,12672,12673,12674,12675,12676,12677,12678,12679, #12640
12680,12681,12682,12683,12684,12685,12686,12687,12688,12689,12690,12691,12692,12693,12694,12695, #12656
12696,12697,12698,4992,12699,12700,12701,12702,12703,12704,12705,12706,12707,12708,12709,12710, #12672
12711,12712,12713,12714,12715,12716,12717,12718,12719,12720,12721,12722,12723,12724,12725,12726, #12688
12727,12728,12729,12730,12731,12732,12733,12734,12735,12736,12737,12738,12739,12740,12741,12742, #12704
12743,12744,12745,12746,12747,12748,12749,12750,12751,12752,12753,12754,12755,12756,12757,12758, #12720
12759,12760,12761,12762,12763,12764,12765,12766,12767,12768,12769,12770,12771,12772,12773,12774, #12736
12775,12776,12777,12778,4993,2175,12779,12780,12781,12782,12783,12784,12785,12786,4500,12787, #12752
12788,12789,12790,12791,12792,12793,12794,12795,12796,12797,12798,12799,12800,12801,12802,12803, #12768
12804,12805,12806,12807,12808,12809,12810,12811,12812,12813,12814,12815,12816,12817,12818,12819, #12784
12820,12821,12822,12823,12824,12825,12826,4198,3967,12827,12828,12829,12830,12831,12832,12833, #12800
12834,12835,12836,12837,12838,12839,12840,12841,12842,12843,12844,12845,12846,12847,12848,12849, #12816
12850,12851,12852,12853,12854,12855,12856,12857,12858,12859,12860,12861,4199,12862,12863,12864, #12832
12865,12866,12867,12868,12869,12870,12871,12872,12873,12874,12875,12876,12877,12878,12879,12880, #12848
12881,12882,12883,12884,12885,12886,12887,4501,12888,12889,12890,12891,12892,12893,12894,12895, #12864
12896,12897,12898,12899,12900,12901,12902,12903,12904,12905,12906,12907,12908,12909,12910,12911, #12880
12912,4994,12913,12914,12915,12916,12917,12918,12919,12920,12921,12922,12923,12924,12925,12926, #12896
12927,12928,12929,12930,12931,12932,12933,12934,12935,12936,12937,12938,12939,12940,12941,12942, #12912
12943,12944,12945,12946,12947,12948,12949,12950,12951,12952,12953,12954,12955,12956,1772,12957, #12928
12958,12959,12960,12961,12962,12963,12964,12965,12966,12967,12968,12969,12970,12971,12972,12973, #12944
12974,12975,12976,12977,12978,12979,12980,12981,12982,12983,12984,12985,12986,12987,12988,12989, #12960
12990,12991,12992,12993,12994,12995,12996,12997,4502,12998,4503,12999,13000,13001,13002,13003, #12976
4504,13004,13005,13006,13007,13008,13009,13010,13011,13012,13013,13014,13015,13016,13017,13018, #12992
13019,13020,13021,13022,13023,13024,13025,13026,13027,13028,13029,3449,13030,13031,13032,13033, #13008
13034,13035,13036,13037,13038,13039,13040,13041,13042,13043,13044,13045,13046,13047,13048,13049, #13024
13050,13051,13052,13053,13054,13055,13056,13057,13058,13059,13060,13061,13062,13063,13064,13065, #13040
13066,13067,13068,13069,13070,13071,13072,13073,13074,13075,13076,13077,13078,13079,13080,13081, #13056
13082,13083,13084,13085,13086,13087,13088,13089,13090,13091,13092,13093,13094,13095,13096,13097, #13072
13098,13099,13100,13101,13102,13103,13104,13105,13106,13107,13108,13109,13110,13111,13112,13113, #13088
13114,13115,13116,13117,13118,3968,13119,4995,13120,13121,13122,13123,13124,13125,13126,13127, #13104
4505,13128,13129,13130,13131,13132,13133,13134,4996,4506,13135,13136,13137,13138,13139,4997, #13120
13140,13141,13142,13143,13144,13145,13146,13147,13148,13149,13150,13151,13152,13153,13154,13155, #13136
13156,13157,13158,13159,4998,13160,13161,13162,13163,13164,13165,13166,13167,13168,13169,13170, #13152
13171,13172,13173,13174,13175,13176,4999,13177,13178,13179,13180,13181,13182,13183,13184,13185, #13168
13186,13187,13188,13189,13190,13191,13192,13193,13194,13195,13196,13197,13198,13199,13200,13201, #13184
13202,13203,13204,13205,13206,5000,13207,13208,13209,13210,13211,13212,13213,13214,13215,13216, #13200
13217,13218,13219,13220,13221,13222,13223,13224,13225,13226,13227,4200,5001,13228,13229,13230, #13216
13231,13232,13233,13234,13235,13236,13237,13238,13239,13240,3969,13241,13242,13243,13244,3970, #13232
13245,13246,13247,13248,13249,13250,13251,13252,13253,13254,13255,13256,13257,13258,13259,13260, #13248
13261,13262,13263,13264,13265,13266,13267,13268,3450,13269,13270,13271,13272,13273,13274,13275, #13264
13276,5002,13277,13278,13279,13280,13281,13282,13283,13284,13285,13286,13287,13288,13289,13290, #13280
13291,13292,13293,13294,13295,13296,13297,13298,13299,13300,13301,13302,3813,13303,13304,13305, #13296
13306,13307,13308,13309,13310,13311,13312,13313,13314,13315,13316,13317,13318,13319,13320,13321, #13312
13322,13323,13324,13325,13326,13327,13328,4507,13329,13330,13331,13332,13333,13334,13335,13336, #13328
13337,13338,13339,13340,13341,5003,13342,13343,13344,13345,13346,13347,13348,13349,13350,13351, #13344
13352,13353,13354,13355,13356,13357,13358,13359,13360,13361,13362,13363,13364,13365,13366,13367, #13360
5004,13368,13369,13370,13371,13372,13373,13374,13375,13376,13377,13378,13379,13380,13381,13382, #13376
13383,13384,13385,13386,13387,13388,13389,13390,13391,13392,13393,13394,13395,13396,13397,13398, #13392
13399,13400,13401,13402,13403,13404,13405,13406,13407,13408,13409,13410,13411,13412,13413,13414, #13408
13415,13416,13417,13418,13419,13420,13421,13422,13423,13424,13425,13426,13427,13428,13429,13430, #13424
13431,13432,4508,13433,13434,13435,4201,13436,13437,13438,13439,13440,13441,13442,13443,13444, #13440
13445,13446,13447,13448,13449,13450,13451,13452,13453,13454,13455,13456,13457,5005,13458,13459, #13456
13460,13461,13462,13463,13464,13465,13466,13467,13468,13469,13470,4509,13471,13472,13473,13474, #13472
13475,13476,13477,13478,13479,13480,13481,13482,13483,13484,13485,13486,13487,13488,13489,13490, #13488
13491,13492,13493,13494,13495,13496,13497,13498,13499,13500,13501,13502,13503,13504,13505,13506, #13504
13507,13508,13509,13510,13511,13512,13513,13514,13515,13516,13517,13518,13519,13520,13521,13522, #13520
13523,13524,13525,13526,13527,13528,13529,13530,13531,13532,13533,13534,13535,13536,13537,13538, #13536
13539,13540,13541,13542,13543,13544,13545,13546,13547,13548,13549,13550,13551,13552,13553,13554, #13552
13555,13556,13557,13558,13559,13560,13561,13562,13563,13564,13565,13566,13567,13568,13569,13570, #13568
13571,13572,13573,13574,13575,13576,13577,13578,13579,13580,13581,13582,13583,13584,13585,13586, #13584
13587,13588,13589,13590,13591,13592,13593,13594,13595,13596,13597,13598,13599,13600,13601,13602, #13600
13603,13604,13605,13606,13607,13608,13609,13610,13611,13612,13613,13614,13615,13616,13617,13618, #13616
13619,13620,13621,13622,13623,13624,13625,13626,13627,13628,13629,13630,13631,13632,13633,13634, #13632
13635,13636,13637,13638,13639,13640,13641,13642,5006,13643,13644,13645,13646,13647,13648,13649, #13648
13650,13651,5007,13652,13653,13654,13655,13656,13657,13658,13659,13660,13661,13662,13663,13664, #13664
13665,13666,13667,13668,13669,13670,13671,13672,13673,13674,13675,13676,13677,13678,13679,13680, #13680
13681,13682,13683,13684,13685,13686,13687,13688,13689,13690,13691,13692,13693,13694,13695,13696, #13696
13697,13698,13699,13700,13701,13702,13703,13704,13705,13706,13707,13708,13709,13710,13711,13712, #13712
13713,13714,13715,13716,13717,13718,13719,13720,13721,13722,13723,13724,13725,13726,13727,13728, #13728
13729,13730,13731,13732,13733,13734,13735,13736,13737,13738,13739,13740,13741,13742,13743,13744, #13744
13745,13746,13747,13748,13749,13750,13751,13752,13753,13754,13755,13756,13757,13758,13759,13760, #13760
13761,13762,13763,13764,13765,13766,13767,13768,13769,13770,13771,13772,13773,13774,3273,13775, #13776
13776,13777,13778,13779,13780,13781,13782,13783,13784,13785,13786,13787,13788,13789,13790,13791, #13792
13792,13793,13794,13795,13796,13797,13798,13799,13800,13801,13802,13803,13804,13805,13806,13807, #13808
13808,13809,13810,13811,13812,13813,13814,13815,13816,13817,13818,13819,13820,13821,13822,13823, #13824
13824,13825,13826,13827,13828,13829,13830,13831,13832,13833,13834,13835,13836,13837,13838,13839, #13840
13840,13841,13842,13843,13844,13845,13846,13847,13848,13849,13850,13851,13852,13853,13854,13855, #13856
13856,13857,13858,13859,13860,13861,13862,13863,13864,13865,13866,13867,13868,13869,13870,13871, #13872
13872,13873,13874,13875,13876,13877,13878,13879,13880,13881,13882,13883,13884,13885,13886,13887, #13888
13888,13889,13890,13891,13892,13893,13894,13895,13896,13897,13898,13899,13900,13901,13902,13903, #13904
13904,13905,13906,13907,13908,13909,13910,13911,13912,13913,13914,13915,13916,13917,13918,13919, #13920
13920,13921,13922,13923,13924,13925,13926,13927,13928,13929,13930,13931,13932,13933,13934,13935, #13936
13936,13937,13938,13939,13940,13941,13942,13943,13944,13945,13946,13947,13948,13949,13950,13951, #13952
13952,13953,13954,13955,13956,13957,13958,13959,13960,13961,13962,13963,13964,13965,13966,13967, #13968
13968,13969,13970,13971,13972) #13973
# flake8: noqa
| mit |
wildjan/Flask | Work/Trivia - Module 5/env/Lib/site-packages/pip/_vendor/requests/packages/urllib3/request.py | 567 | 5808 | # urllib3/request.py
# Copyright 2008-2013 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
#
# This module is part of urllib3 and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
try:
from urllib.parse import urlencode
except ImportError:
from urllib import urlencode
from .filepost import encode_multipart_formdata
__all__ = ['RequestMethods']
class RequestMethods(object):
"""
Convenience mixin for classes who implement a :meth:`urlopen` method, such
as :class:`~urllib3.connectionpool.HTTPConnectionPool` and
:class:`~urllib3.poolmanager.PoolManager`.
Provides behavior for making common types of HTTP request methods and
decides which type of request field encoding to use.
Specifically,
:meth:`.request_encode_url` is for sending requests whose fields are encoded
in the URL (such as GET, HEAD, DELETE).
:meth:`.request_encode_body` is for sending requests whose fields are
encoded in the *body* of the request using multipart or www-form-urlencoded
(such as for POST, PUT, PATCH).
:meth:`.request` is for making any kind of request, it will look up the
appropriate encoding format and use one of the above two methods to make
the request.
Initializer parameters:
:param headers:
Headers to include with all requests, unless other headers are given
explicitly.
"""
_encode_url_methods = set(['DELETE', 'GET', 'HEAD', 'OPTIONS'])
def __init__(self, headers=None):
self.headers = headers or {}
def urlopen(self, method, url, body=None, headers=None,
encode_multipart=True, multipart_boundary=None,
**kw): # Abstract
raise NotImplemented("Classes extending RequestMethods must implement "
"their own ``urlopen`` method.")
def request(self, method, url, fields=None, headers=None, **urlopen_kw):
"""
Make a request using :meth:`urlopen` with the appropriate encoding of
``fields`` based on the ``method`` used.
This is a convenience method that requires the least amount of manual
effort. It can be used in most situations, while still having the option
to drop down to more specific methods when necessary, such as
:meth:`request_encode_url`, :meth:`request_encode_body`,
or even the lowest level :meth:`urlopen`.
"""
method = method.upper()
if method in self._encode_url_methods:
return self.request_encode_url(method, url, fields=fields,
headers=headers,
**urlopen_kw)
else:
return self.request_encode_body(method, url, fields=fields,
headers=headers,
**urlopen_kw)
def request_encode_url(self, method, url, fields=None, **urlopen_kw):
"""
Make a request using :meth:`urlopen` with the ``fields`` encoded in
the url. This is useful for request methods like GET, HEAD, DELETE, etc.
"""
if fields:
url += '?' + urlencode(fields)
return self.urlopen(method, url, **urlopen_kw)
def request_encode_body(self, method, url, fields=None, headers=None,
encode_multipart=True, multipart_boundary=None,
**urlopen_kw):
"""
Make a request using :meth:`urlopen` with the ``fields`` encoded in
the body. This is useful for request methods like POST, PUT, PATCH, etc.
When ``encode_multipart=True`` (default), then
:meth:`urllib3.filepost.encode_multipart_formdata` is used to encode the
payload with the appropriate content type. Otherwise
:meth:`urllib.urlencode` is used with the
'application/x-www-form-urlencoded' content type.
Multipart encoding must be used when posting files, and it's reasonably
safe to use it in other times too. However, it may break request signing,
such as with OAuth.
Supports an optional ``fields`` parameter of key/value strings AND
key/filetuple. A filetuple is a (filename, data, MIME type) tuple where
the MIME type is optional. For example: ::
fields = {
'foo': 'bar',
'fakefile': ('foofile.txt', 'contents of foofile'),
'realfile': ('barfile.txt', open('realfile').read()),
'typedfile': ('bazfile.bin', open('bazfile').read(),
'image/jpeg'),
'nonamefile': 'contents of nonamefile field',
}
When uploading a file, providing a filename (the first parameter of the
tuple) is optional but recommended to best mimick behavior of browsers.
Note that if ``headers`` are supplied, the 'Content-Type' header will be
overwritten because it depends on the dynamic random boundary string
which is used to compose the body of the request. The random boundary
string can be explicitly set with the ``multipart_boundary`` parameter.
"""
if encode_multipart:
body, content_type = encode_multipart_formdata(fields or {},
boundary=multipart_boundary)
else:
body, content_type = (urlencode(fields or {}),
'application/x-www-form-urlencoded')
if headers is None:
headers = self.headers
headers_ = {'Content-Type': content_type}
headers_.update(headers)
return self.urlopen(method, url, body=body, headers=headers_,
**urlopen_kw)
| apache-2.0 |
fermat618/pida | pida-plugins/rfc/rfc.py | 1 | 10272 | # -*- coding: utf-8 -*-
# Copyright (c) 2007 The PIDA Project
#Permission is hereby granted, free of charge, to any person obtaining a copy
#of this software and associated documentation files (the "Software"), to deal
#in the Software without restriction, including without limitation the rights
#to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
#copies of the Software, and to permit persons to whom the Software is
#furnished to do so, subject to the following conditions:
#The above copyright notice and this permission notice shall be included in
#all copies or substantial portions of the Software.
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
#FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
#AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
#LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
#OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
#SOFTWARE.
import os
import gtk
import re
import urllib
import pkgutil
from pygtkhelpers.gthreads import GeneratorTask, gcall
from pygtkhelpers.ui.objectlist import ObjectList, Column
# PIDA Imports
from pida.core.environment import home
from pida.core.service import Service
from pida.core.actions import ActionsConfig
from pida.core.features import FeaturesConfig
from pida.core.actions import TYPE_REMEMBER_TOGGLE, TYPE_NORMAL
from pida.ui.views import PidaView, WindowConfig
# locale
from pida.core.locale import Locale
locale = Locale('rfc')
_ = locale.gettext
class RfcItem(object):
def __init__(self, number='0000', data=''):
self.number = number
self.data = data
list = re.split('\(([^\(]*)\)', data)
self.description = list[0]
class RfcView(PidaView):
key = 'rfc.list'
label_text = 'RFC'
def create_ui(self):
self._vbox = gtk.VBox(spacing=3)
self._vbox.set_border_width(6)
self.add_main_widget(self._vbox)
self.create_toolbar()
self.create_searchbar()
self.create_list()
self.create_progressbar()
self._vbox.show_all()
def create_searchbar(self):
h = gtk.HBox()
self._search_description = gtk.Entry()
self._search_description.connect('changed', self._on_search_changed)
l = gtk.Label()
l.set_text(_('Filter : '))
h.pack_start(l, expand=False)
h.pack_start(self._search_description)
self._vbox.pack_start(h, expand=False)
self._search_description.show_all()
def create_toolbar(self):
self._uim = gtk.UIManager()
self._uim.insert_action_group(self.svc.get_action_group(), 0)
uidef_data = pkgutil.get_data(__name__, 'uidef/rfc-toolbar.xml')
self._uim.add_ui_from_string(uidef_data)
self._uim.ensure_update()
self._toolbar = self._uim.get_toplevels('toolbar')[0]
self._toolbar.set_style(gtk.TOOLBAR_ICONS)
self._toolbar.set_icon_size(gtk.ICON_SIZE_SMALL_TOOLBAR)
self._vbox.pack_start(self._toolbar, expand=False)
self._toolbar.show_all()
def create_list(self):
self._list = ObjectList(
[
Column('number', title=_('Number')),
Column('description', title=_('Description'))
]
)
self._scroll = gtk.ScrolledWindow()
self._scroll.add(self._list)
self._list.connect('item-activated', self._on_list_double_click)
self._vbox.pack_start(self._scroll)
self._list.show_all()
def create_progressbar(self):
self._progressbar = gtk.ProgressBar()
self._progressbar.set_text(_('Download RFC Index'))
self._vbox.pack_start(self._progressbar, expand=False)
self._progressbar.set_no_show_all(True)
self._progressbar.hide()
def update_progressbar(self, current, max):
if max > 1:
self._progressbar.set_fraction(float(current) / float(max))
def show_progressbar(self, show):
self._progressbar.set_no_show_all(False)
if show:
self._progressbar.show()
else:
self._progressbar.hide()
def set_items(self, items):
self._list.extend(items)
def clear(self):
self._list.clear()
def can_be_closed(self):
self.svc.get_action('show_rfc').set_active(False)
def _on_list_double_click(self, ot, item):
self.svc.browse(id=item.number)
def _on_search_changed(self, w):
self.svc.filter(self._search_description.get_text())
class RfcActions(ActionsConfig):
def create_actions(self):
RfcWindowConfig.action = self.create_action(
'show_rfc',
TYPE_REMEMBER_TOGGLE,
_('Rfc Viewer'),
_('Show the rfc'),
'',
self.on_show_rfc,
'',
)
self.create_action(
'rfc_refreshindex',
TYPE_NORMAL,
_('Refresh RFC Index'),
_('Refresh RFC Index'),
gtk.STOCK_REFRESH,
self.on_rfc_refreshindex,
)
self.create_action(
'rfc_downloadindex',
TYPE_NORMAL,
_('Download RFC Index'),
_('Download RFC Index'),
gtk.STOCK_GO_DOWN,
self.on_rfc_downloadindex,
)
def on_show_rfc(self, action):
if action.get_active():
self.svc.show_rfc()
else:
self.svc.hide_rfc()
def on_rfc_downloadindex(self, action):
self.svc.download_index()
def on_rfc_refreshindex(self, action):
self.svc.refresh_index()
class RfcWindowConfig(WindowConfig):
key = RfcView.key
label_text = RfcView.label_text
class RfcFeaturesConfig(FeaturesConfig):
def subscribe_all_foreign(self):
self.subscribe_foreign('window', 'window-config',
RfcWindowConfig)
# Service class
class Rfc(Service):
"""Fetch rfc list and show an rfc"""
actions_config = RfcActions
features_config = RfcFeaturesConfig
url_rfcindex = 'http://www.ietf.org/download/rfc-index.txt'
url_rfctmpl = 'http://tools.ietf.org/html/rfc'
buffer_len = 16384
def start(self):
self._file = home()/'rfc-index.txt'
self._view = RfcView(self)
self._has_loaded = False
self.list = []
self.counter = 0
self.task = None
self._filter_id = 0
self.is_refresh = False
def show_rfc(self):
self.boss.cmd('window', 'add_view', paned='Plugin', view=self._view)
if not self._has_loaded:
self._has_loaded = True
if not self.is_refresh:
gcall(self.refresh_index)
self.is_refresh = True
def hide_rfc(self):
self.boss.cmd('window', 'remove_view', view=self._view)
def download_index(self):
if self.task != None:
self.task.stop()
def _download_index_finished():
self._view.show_progressbar(False)
self.get_action('rfc_downloadindex').set_sensitive(True)
self.boss.cmd('notify', 'notify', title=_('RFC'), data=_('Index download completed'))
gcall(self.refresh_index)
self.task = GeneratorTask(self._download_index,
_download_index_finished)
self.task.start()
def refresh_index(self):
def _refresh_index_finished():
self._view.set_items(self.list)
def _refresh_index_add(item):
self.list.append(item)
def _refresh_index():
try:
fp = self._file.open()
except IOError:
return
data = ''
zap = True
for line in fp:
line = line.rstrip('\n')
data += line.strip(' ') + ' '
if line == '':
t = data.split(' ', 1)
if zap == False:
if data != '' and t[1].strip(' ') != 'Not Issued.':
yield RfcItem(number=t[0], data=t[1])
data = ''
elif t[0] == '0001':
zap = False
elif zap == True:
data = ''
fp.close()
self.list = []
self._view.clear()
task = GeneratorTask(_refresh_index, _refresh_index_add, _refresh_index_finished)
task.start()
def filter(self, pattern):
self._filter_id += 1
gcall(self._filter, pattern, self._filter_id)
def _filter(self, pattern, id):
if pattern == '':
if self._filter_id == id:
self._view.set_items(self.list)
else:
r = re.compile(pattern, re.IGNORECASE)
list = [item for item in self.list if r.search(item.data)]
if self._filter_id == id:
self._view.set_items(list)
def _download_index(self):
self.get_action('rfc_downloadindex').set_sensitive(False)
self._view.show_progressbar(True)
sock = urllib.urlopen(self.url_rfcindex)
fp = self._filename.open('w')
progress_max = 0
progress_current = 0
if sock.headers.has_key('content-length'):
progress_max = int(sock.headers.getheader('content-length'))
try:
while True:
buffer = sock.read(self.buffer_len)
if buffer == '':
break
fp.write(buffer)
progress_current += len(buffer)
gcall(self._view.update_progressbar, progress_current,
progress_max)
finally:
sock.close()
fp.close()
yield None
def browse(self, id):
self.boss.cmd('browseweb', 'browse', url=(self.url_rfctmpl + id))
def stop(self):
if self.task != None:
self.task.stop()
if self.get_action('show_rfc').get_active():
self.hide_rfc()
# Required Service attribute for service loading
Service = Rfc
# vim:set shiftwidth=4 tabstop=4 expandtab textwidth=79:
| gpl-2.0 |
TeamOrion-Devices/kernel_lge_msm8974 | arch/ia64/scripts/unwcheck.py | 13143 | 1714 | #!/usr/bin/python
#
# Usage: unwcheck.py FILE
#
# This script checks the unwind info of each function in file FILE
# and verifies that the sum of the region-lengths matches the total
# length of the function.
#
# Based on a shell/awk script originally written by Harish Patil,
# which was converted to Perl by Matthew Chapman, which was converted
# to Python by David Mosberger.
#
import os
import re
import sys
if len(sys.argv) != 2:
print "Usage: %s FILE" % sys.argv[0]
sys.exit(2)
readelf = os.getenv("READELF", "readelf")
start_pattern = re.compile("<([^>]*)>: \[0x([0-9a-f]+)-0x([0-9a-f]+)\]")
rlen_pattern = re.compile(".*rlen=([0-9]+)")
def check_func (func, slots, rlen_sum):
if slots != rlen_sum:
global num_errors
num_errors += 1
if not func: func = "[%#x-%#x]" % (start, end)
print "ERROR: %s: %lu slots, total region length = %lu" % (func, slots, rlen_sum)
return
num_funcs = 0
num_errors = 0
func = False
slots = 0
rlen_sum = 0
for line in os.popen("%s -u %s" % (readelf, sys.argv[1])):
m = start_pattern.match(line)
if m:
check_func(func, slots, rlen_sum)
func = m.group(1)
start = long(m.group(2), 16)
end = long(m.group(3), 16)
slots = 3 * (end - start) / 16
rlen_sum = 0L
num_funcs += 1
else:
m = rlen_pattern.match(line)
if m:
rlen_sum += long(m.group(1))
check_func(func, slots, rlen_sum)
if num_errors == 0:
print "No errors detected in %u functions." % num_funcs
else:
if num_errors > 1:
err="errors"
else:
err="error"
print "%u %s detected in %u functions." % (num_errors, err, num_funcs)
sys.exit(1)
| gpl-2.0 |
paulhayes/fofix | src/Video.py | 2 | 9281 | ##################################################################
# -*- coding: iso-8859-1 -*- #
# #
# Frets on Fire #
# Copyright (C) 2006 Sami Kyöstilä #
# #
# This program is free software; you can redistribute it and/or #
# modify it under the terms of the GNU General Public License #
# as published by the Free Software Foundation; either version 2 #
# of the License, or (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program; if not, write to the Free Software #
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, #
# MA 02110-1301, USA. #
#####################################################################
import pygame
import os
from OpenGL.GL import *
from OpenGL.GL.ARB.multisample import *
from PIL import Image
import Log
import struct
from Language import _
class Video:
def __init__(self, caption = "Game", icon = None):
self.screen = None
self.caption = caption
self.icon = icon
self.fullscreen = False
self.flags = True
self.multisamples = 0
self.error = []
self.default = False
def setMode(self, resolution, fullscreen = False, flags = pygame.OPENGL | pygame.DOUBLEBUF,
multisamples = 0):
if fullscreen:
flags |= pygame.FULLSCREEN
self.flags = flags
self.fullscreen = fullscreen
self.multisamples = multisamples
try:
pygame.display.quit()
except:
pass
pygame.display.init()
pygame.display.gl_set_attribute(pygame.GL_RED_SIZE, 8)
pygame.display.gl_set_attribute(pygame.GL_GREEN_SIZE, 8)
pygame.display.gl_set_attribute(pygame.GL_BLUE_SIZE, 8)
pygame.display.gl_set_attribute(pygame.GL_ALPHA_SIZE, 8)
if multisamples:
pygame.display.gl_set_attribute(pygame.GL_MULTISAMPLEBUFFERS, 1);
pygame.display.gl_set_attribute(pygame.GL_MULTISAMPLESAMPLES, multisamples);
# evilynux - Setting window icon for platforms other than MS Windows.
# pygame claims that some window manager requires this to be
# before display.set_mode(), so there it is!
# Note: For the MS Windows icon, see below.
if not os.name == "nt" and self.icon != None:
pygame.display.set_icon(pygame.image.load(self.icon))
try:
self.screen = pygame.display.set_mode(resolution, flags)
except Exception, e:
errortype = str(e)
if "video mode" in errortype:
self.resolutionReset()
else: # "Couldn't find matching GLX visual"
self.multisampleReset(resolution)
pygame.display.set_caption(self.caption)
pygame.mouse.set_visible(False)
#stump: fix the window icon under Windows
# We would use pygame.display.set_icon(), but due to what appears to be
# a bug in SDL, the alpha channel is lost and some parts of the image are
# corrupted. As a result, we go the long way and load and set the icon
# by hand to work around the bug.
if os.name == 'nt':
import win32api
import win32gui
import win32con
hwnd = pygame.display.get_wm_info()['window']
# The Windows icon functions want the byte order in memory to be "BGRx"
# for some reason. Use the alpha channel as a placeholder for the "x"
# and swap the channels to fit. Also turn the image upside down as the
# API wants.
icon = Image.open(self.icon)
iconFixedUp = Image.merge('RGBA', [icon.split()[i] for i in (2, 1, 0, 3)]).transpose(Image.FLIP_TOP_BOTTOM)
# Scale the images to the icon sizes needed.
bigIcon = iconFixedUp.resize((32, 32), Image.BICUBIC)
smallIcon = iconFixedUp.resize((16, 16), Image.BICUBIC)
# The icon resources hold two bitmaps: the first is 32-bit pixel data
# (which for some reason doesn't hold the alpha, though we used the alpha
# channel to fill up that space - the fourth channel is ignored) and the
# second is a 1-bit alpha mask. For some reason, we need to invert the
# alpha channel before turning it into the mask.
bigIconColorData = bigIcon.tostring()
bigIconMaskData = bigIcon.split()[3].point(lambda p: 255 - p).convert('1').tostring()
smallIconColorData = smallIcon.tostring()
smallIconMaskData = smallIcon.split()[3].point(lambda p: 255 - p).convert('1').tostring()
# Put together icon resource structures - a header, then the pixel data,
# then the alpha mask. See documentation for the BITMAPINFOHEADER
# structure for what the fields mean. Not all fields are used -
# http://msdn.microsoft.com/en-us/library/ms997538.aspx says which ones
# don't matter and says to set them to zero. We double the height for
# reasons mentioned on that page, too.
bigIconData = struct.pack('<LllHHLLllLL', 40, 32, 64, 1, 32, 0, len(bigIconColorData+bigIconMaskData), 0, 0, 0, 0) + bigIconColorData + bigIconMaskData
smallIconData = struct.pack('<LllHHLLllLL', 40, 16, 32, 1, 32, 0, len(smallIconColorData+smallIconMaskData), 0, 0, 0, 0) + smallIconColorData + smallIconMaskData
# Finally actually create the icons from the icon resource structures.
hIconBig = win32gui.CreateIconFromResource(bigIconData, True, 0x00030000)
hIconSmall = win32gui.CreateIconFromResource(smallIconData, True, 0x00030000)
# And set the window's icon to our fresh new icon handles.
win32api.SendMessage(hwnd, win32con.WM_SETICON, win32con.ICON_BIG, hIconBig)
win32api.SendMessage(hwnd, win32con.WM_SETICON, win32con.ICON_SMALL, hIconSmall)
if multisamples:
try:
glEnable(GL_MULTISAMPLE_ARB)
except:
pass
return bool(self.screen)
def screenError(self):
Log.error("Video setup failed. Make sure your graphics card supports 32-bit display modes.")
raise
def resolutionReset(self):
Log.warn("Video setup failed. Trying default windowed resolution.")
self.error.append(_("Video setup failed with your resolution settings, and so were reset to defaults."))
if self.fullscreen:
self.flags ^= pygame.FULLSCREEN
self.fullscreen = False
try:
self.screen = pygame.display.set_mode((800,600), self.flags)
self.default = True
except Exception:
if self.multisamples:
self.multisampleReset((800, 600))
else:
self.screenError()
def multisampleReset(self, resolution):
Log.warn("Video setup failed. Trying without antialiasing.")
pygame.display.gl_set_attribute(pygame.GL_MULTISAMPLEBUFFERS, 0)
pygame.display.gl_set_attribute(pygame.GL_MULTISAMPLESAMPLES, 0)
self.multisamples = 0
try:
self.screen = pygame.display.set_mode(resolution, self.flags)
except Exception, e:
if "video mode" in str(e):
self.resolutionReset()
else:
self.screenError()
def toggleFullscreen(self):
assert self.screen
return pygame.display.toggle_fullscreen()
def flip(self):
pygame.display.flip()
def getVideoModes(self):
return pygame.display.list_modes()
#stump
def disableScreensaver(self):
if os.name == 'nt':
# See the DisableScreensaver and RestoreScreensaver functions in
# modules/video_output/msw/common.c in the source code for VLC.
import win32gui
import win32con
import atexit
Log.debug('Disabling screensaver.')
old_lowpowertimeout = win32gui.SystemParametersInfo(win32con.SPI_GETLOWPOWERTIMEOUT)
if old_lowpowertimeout != 0:
atexit.register(lambda: win32gui.SystemParametersInfo(win32con.SPI_SETLOWPOWERTIMEOUT, old_lowpowertimeout))
win32gui.SystemParametersInfo(win32con.SPI_SETLOWPOWERTIMEOUT, 0)
old_powerofftimeout = win32gui.SystemParametersInfo(win32con.SPI_GETPOWEROFFTIMEOUT)
if old_powerofftimeout != 0:
atexit.register(lambda: win32gui.SystemParametersInfo(win32con.SPI_SETPOWEROFFTIMEOUT, old_powerofftimeout))
win32gui.SystemParametersInfo(win32con.SPI_SETPOWEROFFTIMEOUT, 0)
old_screensavetimeout = win32gui.SystemParametersInfo(win32con.SPI_GETSCREENSAVETIMEOUT)
if old_screensavetimeout != 0:
atexit.register(lambda: win32gui.SystemParametersInfo(win32con.SPI_SETSCREENSAVETIMEOUT, old_screensavetimeout))
win32gui.SystemParametersInfo(win32con.SPI_SETSCREENSAVETIMEOUT, 0)
else:
Log.debug('Screensaver disabling is not implemented on this platform.')
| gpl-2.0 |
DaniilLeksin/gc | wx/richtext.py | 1 | 185239 | # This file was created automatically by SWIG 1.3.29.
# Don't modify this file, modify the SWIG interface instead.
import _richtext
import new
new_instancemethod = new.instancemethod
def _swig_setattr_nondynamic(self,class_type,name,value,static=1):
if (name == "thisown"): return self.this.own(value)
if (name == "this"):
if type(value).__name__ == 'PySwigObject':
self.__dict__[name] = value
return
method = class_type.__swig_setmethods__.get(name,None)
if method: return method(self,value)
if (not static) or hasattr(self,name):
self.__dict__[name] = value
else:
raise AttributeError("You cannot add attributes to %s" % self)
def _swig_setattr(self,class_type,name,value):
return _swig_setattr_nondynamic(self,class_type,name,value,0)
def _swig_getattr(self,class_type,name):
if (name == "thisown"): return self.this.own()
method = class_type.__swig_getmethods__.get(name,None)
if method: return method(self)
raise AttributeError,name
def _swig_repr(self):
try: strthis = "proxy of " + self.this.__repr__()
except: strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
import types
try:
_object = types.ObjectType
_newclass = 1
except AttributeError:
class _object : pass
_newclass = 0
del types
def _swig_setattr_nondynamic_method(set):
def set_attr(self,name,value):
if (name == "thisown"): return self.this.own(value)
if hasattr(self,name) or (name == "this"):
set(self,name,value)
else:
raise AttributeError("You cannot add attributes to %s" % self)
return set_attr
import _windows
import _core
import _controls
wx = _core
__docfilter__ = wx.__DocFilter(globals())
#---------------------------------------------------------------------------
RICHTEXT_TYPE_ANY = _richtext.RICHTEXT_TYPE_ANY
RICHTEXT_TYPE_TEXT = _richtext.RICHTEXT_TYPE_TEXT
RICHTEXT_TYPE_XML = _richtext.RICHTEXT_TYPE_XML
RICHTEXT_TYPE_HTML = _richtext.RICHTEXT_TYPE_HTML
RICHTEXT_TYPE_RTF = _richtext.RICHTEXT_TYPE_RTF
RICHTEXT_TYPE_PDF = _richtext.RICHTEXT_TYPE_PDF
RICHTEXT_FIXED_WIDTH = _richtext.RICHTEXT_FIXED_WIDTH
RICHTEXT_FIXED_HEIGHT = _richtext.RICHTEXT_FIXED_HEIGHT
RICHTEXT_VARIABLE_WIDTH = _richtext.RICHTEXT_VARIABLE_WIDTH
RICHTEXT_VARIABLE_HEIGHT = _richtext.RICHTEXT_VARIABLE_HEIGHT
RICHTEXT_LAYOUT_SPECIFIED_RECT = _richtext.RICHTEXT_LAYOUT_SPECIFIED_RECT
RICHTEXT_DRAW_IGNORE_CACHE = _richtext.RICHTEXT_DRAW_IGNORE_CACHE
RICHTEXT_FORMATTED = _richtext.RICHTEXT_FORMATTED
RICHTEXT_UNFORMATTED = _richtext.RICHTEXT_UNFORMATTED
RICHTEXT_CACHE_SIZE = _richtext.RICHTEXT_CACHE_SIZE
RICHTEXT_HEIGHT_ONLY = _richtext.RICHTEXT_HEIGHT_ONLY
RICHTEXT_SETSTYLE_NONE = _richtext.RICHTEXT_SETSTYLE_NONE
RICHTEXT_SETSTYLE_WITH_UNDO = _richtext.RICHTEXT_SETSTYLE_WITH_UNDO
RICHTEXT_SETSTYLE_OPTIMIZE = _richtext.RICHTEXT_SETSTYLE_OPTIMIZE
RICHTEXT_SETSTYLE_PARAGRAPHS_ONLY = _richtext.RICHTEXT_SETSTYLE_PARAGRAPHS_ONLY
RICHTEXT_SETSTYLE_CHARACTERS_ONLY = _richtext.RICHTEXT_SETSTYLE_CHARACTERS_ONLY
RICHTEXT_SETSTYLE_RENUMBER = _richtext.RICHTEXT_SETSTYLE_RENUMBER
RICHTEXT_SETSTYLE_SPECIFY_LEVEL = _richtext.RICHTEXT_SETSTYLE_SPECIFY_LEVEL
RICHTEXT_SETSTYLE_RESET = _richtext.RICHTEXT_SETSTYLE_RESET
RICHTEXT_SETSTYLE_REMOVE = _richtext.RICHTEXT_SETSTYLE_REMOVE
RICHTEXT_INSERT_NONE = _richtext.RICHTEXT_INSERT_NONE
RICHTEXT_INSERT_WITH_PREVIOUS_PARAGRAPH_STYLE = _richtext.RICHTEXT_INSERT_WITH_PREVIOUS_PARAGRAPH_STYLE
RICHTEXT_INSERT_INTERACTIVE = _richtext.RICHTEXT_INSERT_INTERACTIVE
TEXT_ATTR_KEEP_FIRST_PARA_STYLE = _richtext.TEXT_ATTR_KEEP_FIRST_PARA_STYLE
RICHTEXT_HITTEST_NONE = _richtext.RICHTEXT_HITTEST_NONE
RICHTEXT_HITTEST_BEFORE = _richtext.RICHTEXT_HITTEST_BEFORE
RICHTEXT_HITTEST_AFTER = _richtext.RICHTEXT_HITTEST_AFTER
RICHTEXT_HITTEST_ON = _richtext.RICHTEXT_HITTEST_ON
RICHTEXT_HITTEST_OUTSIDE = _richtext.RICHTEXT_HITTEST_OUTSIDE
RICHTEXT_HITTEST_NO_NESTED_OBJECTS = _richtext.RICHTEXT_HITTEST_NO_NESTED_OBJECTS
RICHTEXT_HITTEST_NO_FLOATING_OBJECTS = _richtext.RICHTEXT_HITTEST_NO_FLOATING_OBJECTS
TEXT_BOX_ATTR_FLOAT = _richtext.TEXT_BOX_ATTR_FLOAT
TEXT_BOX_ATTR_CLEAR = _richtext.TEXT_BOX_ATTR_CLEAR
TEXT_BOX_ATTR_COLLAPSE_BORDERS = _richtext.TEXT_BOX_ATTR_COLLAPSE_BORDERS
TEXT_BOX_ATTR_VERTICAL_ALIGNMENT = _richtext.TEXT_BOX_ATTR_VERTICAL_ALIGNMENT
TEXT_BOX_ATTR_BOX_STYLE_NAME = _richtext.TEXT_BOX_ATTR_BOX_STYLE_NAME
TEXT_ATTR_UNITS_TENTHS_MM = _richtext.TEXT_ATTR_UNITS_TENTHS_MM
TEXT_ATTR_UNITS_PIXELS = _richtext.TEXT_ATTR_UNITS_PIXELS
TEXT_ATTR_UNITS_PERCENTAGE = _richtext.TEXT_ATTR_UNITS_PERCENTAGE
TEXT_ATTR_UNITS_POINTS = _richtext.TEXT_ATTR_UNITS_POINTS
TEXT_ATTR_UNITS_MASK = _richtext.TEXT_ATTR_UNITS_MASK
TEXT_BOX_ATTR_POSITION_STATIC = _richtext.TEXT_BOX_ATTR_POSITION_STATIC
TEXT_BOX_ATTR_POSITION_RELATIVE = _richtext.TEXT_BOX_ATTR_POSITION_RELATIVE
TEXT_BOX_ATTR_POSITION_ABSOLUTE = _richtext.TEXT_BOX_ATTR_POSITION_ABSOLUTE
TEXT_BOX_ATTR_POSITION_MASK = _richtext.TEXT_BOX_ATTR_POSITION_MASK
#---------------------------------------------------------------------------
class TextAttrDimension(object):
"""Proxy of C++ TextAttrDimension class"""
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
__init__(self) -> TextAttrDimension
__init__(self, int value, int units=TEXT_ATTR_UNITS_TENTHS_MM) -> TextAttrDimension
"""
_richtext.TextAttrDimension_swiginit(self,_richtext.new_TextAttrDimension(*args))
__swig_destroy__ = _richtext.delete_TextAttrDimension
__del__ = lambda self : None;
def Reset(*args, **kwargs):
"""Reset(self)"""
return _richtext.TextAttrDimension_Reset(*args, **kwargs)
def EqPartial(*args, **kwargs):
"""EqPartial(self, TextAttrDimension dim) -> bool"""
return _richtext.TextAttrDimension_EqPartial(*args, **kwargs)
def Apply(*args, **kwargs):
"""Apply(self, TextAttrDimension dim, TextAttrDimension compareWith=None) -> bool"""
return _richtext.TextAttrDimension_Apply(*args, **kwargs)
def CollectCommonAttributes(*args, **kwargs):
"""
CollectCommonAttributes(self, TextAttrDimension attr, TextAttrDimension clashingAttr,
TextAttrDimension absentAttr)
"""
return _richtext.TextAttrDimension_CollectCommonAttributes(*args, **kwargs)
def __eq__(*args, **kwargs):
"""__eq__(self, TextAttrDimension dim) -> bool"""
return _richtext.TextAttrDimension___eq__(*args, **kwargs)
def GetValue(*args, **kwargs):
"""GetValue(self) -> int"""
return _richtext.TextAttrDimension_GetValue(*args, **kwargs)
def GetValueMM(*args, **kwargs):
"""GetValueMM(self) -> float"""
return _richtext.TextAttrDimension_GetValueMM(*args, **kwargs)
def SetValueMM(*args, **kwargs):
"""SetValueMM(self, float value)"""
return _richtext.TextAttrDimension_SetValueMM(*args, **kwargs)
def SetValue(*args):
"""
SetValue(self, int value)
SetValue(self, int value, TextAttrDimensionFlags flags)
SetValue(self, TextAttrDimension dim)
"""
return _richtext.TextAttrDimension_SetValue(*args)
def GetUnits(*args, **kwargs):
"""GetUnits(self) -> int"""
return _richtext.TextAttrDimension_GetUnits(*args, **kwargs)
def SetUnits(*args, **kwargs):
"""SetUnits(self, int units)"""
return _richtext.TextAttrDimension_SetUnits(*args, **kwargs)
def GetPosition(*args, **kwargs):
"""GetPosition(self) -> int"""
return _richtext.TextAttrDimension_GetPosition(*args, **kwargs)
def SetPosition(*args, **kwargs):
"""SetPosition(self, int pos)"""
return _richtext.TextAttrDimension_SetPosition(*args, **kwargs)
def GetFlags(*args, **kwargs):
"""GetFlags(self) -> TextAttrDimensionFlags"""
return _richtext.TextAttrDimension_GetFlags(*args, **kwargs)
def SetFlags(*args, **kwargs):
"""SetFlags(self, TextAttrDimensionFlags flags)"""
return _richtext.TextAttrDimension_SetFlags(*args, **kwargs)
m_value = property(_richtext.TextAttrDimension_m_value_get, _richtext.TextAttrDimension_m_value_set)
m_flags = property(_richtext.TextAttrDimension_m_flags_get, _richtext.TextAttrDimension_m_flags_set)
_richtext.TextAttrDimension_swigregister(TextAttrDimension)
class TextAttrDimensions(object):
"""Proxy of C++ TextAttrDimensions class"""
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args, **kwargs):
"""__init__(self) -> TextAttrDimensions"""
_richtext.TextAttrDimensions_swiginit(self,_richtext.new_TextAttrDimensions(*args, **kwargs))
__swig_destroy__ = _richtext.delete_TextAttrDimensions
__del__ = lambda self : None;
def Reset(*args, **kwargs):
"""Reset(self)"""
return _richtext.TextAttrDimensions_Reset(*args, **kwargs)
def __eq__(*args, **kwargs):
"""__eq__(self, TextAttrDimensions dims) -> bool"""
return _richtext.TextAttrDimensions___eq__(*args, **kwargs)
def EqPartial(*args, **kwargs):
"""EqPartial(self, TextAttrDimensions dims) -> bool"""
return _richtext.TextAttrDimensions_EqPartial(*args, **kwargs)
def Apply(*args, **kwargs):
"""Apply(self, TextAttrDimensions dims, TextAttrDimensions compareWith=None) -> bool"""
return _richtext.TextAttrDimensions_Apply(*args, **kwargs)
def CollectCommonAttributes(*args, **kwargs):
"""
CollectCommonAttributes(self, TextAttrDimensions attr, TextAttrDimensions clashingAttr,
TextAttrDimensions absentAttr)
"""
return _richtext.TextAttrDimensions_CollectCommonAttributes(*args, **kwargs)
def RemoveStyle(*args, **kwargs):
"""RemoveStyle(self, TextAttrDimensions attr) -> bool"""
return _richtext.TextAttrDimensions_RemoveStyle(*args, **kwargs)
def GetLeft(*args, **kwargs):
"""GetLeft(self) -> TextAttrDimension"""
return _richtext.TextAttrDimensions_GetLeft(*args, **kwargs)
def GetRight(*args, **kwargs):
"""GetRight(self) -> TextAttrDimension"""
return _richtext.TextAttrDimensions_GetRight(*args, **kwargs)
def GetTop(*args, **kwargs):
"""GetTop(self) -> TextAttrDimension"""
return _richtext.TextAttrDimensions_GetTop(*args, **kwargs)
def GetBottom(*args, **kwargs):
"""GetBottom(self) -> TextAttrDimension"""
return _richtext.TextAttrDimensions_GetBottom(*args, **kwargs)
def IsValid(*args, **kwargs):
"""IsValid(self) -> bool"""
return _richtext.TextAttrDimensions_IsValid(*args, **kwargs)
m_left = property(_richtext.TextAttrDimensions_m_left_get, _richtext.TextAttrDimensions_m_left_set)
m_top = property(_richtext.TextAttrDimensions_m_top_get, _richtext.TextAttrDimensions_m_top_set)
m_right = property(_richtext.TextAttrDimensions_m_right_get, _richtext.TextAttrDimensions_m_right_set)
m_bottom = property(_richtext.TextAttrDimensions_m_bottom_get, _richtext.TextAttrDimensions_m_bottom_set)
_richtext.TextAttrDimensions_swigregister(TextAttrDimensions)
class TextAttrDimensionConverter(object):
"""Proxy of C++ TextAttrDimensionConverter class"""
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
__init__(self, DC dc, double scale=1.0, Size parentSize=DefaultSize) -> TextAttrDimensionConverter
__init__(self, int ppi, double scale=1.0, Size parentSize=DefaultSize) -> TextAttrDimensionConverter
"""
_richtext.TextAttrDimensionConverter_swiginit(self,_richtext.new_TextAttrDimensionConverter(*args))
__swig_destroy__ = _richtext.delete_TextAttrDimensionConverter
__del__ = lambda self : None;
def GetPixels(*args, **kwargs):
"""GetPixels(self, TextAttrDimension dim, int direction=HORIZONTAL) -> int"""
return _richtext.TextAttrDimensionConverter_GetPixels(*args, **kwargs)
def GetTenthsMM(*args, **kwargs):
"""GetTenthsMM(self, TextAttrDimension dim) -> int"""
return _richtext.TextAttrDimensionConverter_GetTenthsMM(*args, **kwargs)
def ConvertTenthsMMToPixels(*args, **kwargs):
"""ConvertTenthsMMToPixels(self, int units) -> int"""
return _richtext.TextAttrDimensionConverter_ConvertTenthsMMToPixels(*args, **kwargs)
def ConvertPixelsToTenthsMM(*args, **kwargs):
"""ConvertPixelsToTenthsMM(self, int pixels) -> int"""
return _richtext.TextAttrDimensionConverter_ConvertPixelsToTenthsMM(*args, **kwargs)
m_ppi = property(_richtext.TextAttrDimensionConverter_m_ppi_get, _richtext.TextAttrDimensionConverter_m_ppi_set)
m_scale = property(_richtext.TextAttrDimensionConverter_m_scale_get, _richtext.TextAttrDimensionConverter_m_scale_set)
m_parentSize = property(_richtext.TextAttrDimensionConverter_m_parentSize_get, _richtext.TextAttrDimensionConverter_m_parentSize_set)
_richtext.TextAttrDimensionConverter_swigregister(TextAttrDimensionConverter)
TEXT_BOX_ATTR_BORDER_NONE = _richtext.TEXT_BOX_ATTR_BORDER_NONE
TEXT_BOX_ATTR_BORDER_SOLID = _richtext.TEXT_BOX_ATTR_BORDER_SOLID
TEXT_BOX_ATTR_BORDER_DOTTED = _richtext.TEXT_BOX_ATTR_BORDER_DOTTED
TEXT_BOX_ATTR_BORDER_DASHED = _richtext.TEXT_BOX_ATTR_BORDER_DASHED
TEXT_BOX_ATTR_BORDER_DOUBLE = _richtext.TEXT_BOX_ATTR_BORDER_DOUBLE
TEXT_BOX_ATTR_BORDER_GROOVE = _richtext.TEXT_BOX_ATTR_BORDER_GROOVE
TEXT_BOX_ATTR_BORDER_RIDGE = _richtext.TEXT_BOX_ATTR_BORDER_RIDGE
TEXT_BOX_ATTR_BORDER_INSET = _richtext.TEXT_BOX_ATTR_BORDER_INSET
TEXT_BOX_ATTR_BORDER_OUTSET = _richtext.TEXT_BOX_ATTR_BORDER_OUTSET
TEXT_BOX_ATTR_BORDER_STYLE = _richtext.TEXT_BOX_ATTR_BORDER_STYLE
TEXT_BOX_ATTR_BORDER_COLOUR = _richtext.TEXT_BOX_ATTR_BORDER_COLOUR
TEXT_BOX_ATTR_BORDER_THIN = _richtext.TEXT_BOX_ATTR_BORDER_THIN
TEXT_BOX_ATTR_BORDER_MEDIUM = _richtext.TEXT_BOX_ATTR_BORDER_MEDIUM
TEXT_BOX_ATTR_BORDER_THICK = _richtext.TEXT_BOX_ATTR_BORDER_THICK
TEXT_BOX_ATTR_FLOAT_NONE = _richtext.TEXT_BOX_ATTR_FLOAT_NONE
TEXT_BOX_ATTR_FLOAT_LEFT = _richtext.TEXT_BOX_ATTR_FLOAT_LEFT
TEXT_BOX_ATTR_FLOAT_RIGHT = _richtext.TEXT_BOX_ATTR_FLOAT_RIGHT
TEXT_BOX_ATTR_CLEAR_NONE = _richtext.TEXT_BOX_ATTR_CLEAR_NONE
TEXT_BOX_ATTR_CLEAR_LEFT = _richtext.TEXT_BOX_ATTR_CLEAR_LEFT
TEXT_BOX_ATTR_CLEAR_RIGHT = _richtext.TEXT_BOX_ATTR_CLEAR_RIGHT
TEXT_BOX_ATTR_CLEAR_BOTH = _richtext.TEXT_BOX_ATTR_CLEAR_BOTH
TEXT_BOX_ATTR_COLLAPSE_NONE = _richtext.TEXT_BOX_ATTR_COLLAPSE_NONE
TEXT_BOX_ATTR_COLLAPSE_FULL = _richtext.TEXT_BOX_ATTR_COLLAPSE_FULL
TEXT_BOX_ATTR_VERTICAL_ALIGNMENT_NONE = _richtext.TEXT_BOX_ATTR_VERTICAL_ALIGNMENT_NONE
TEXT_BOX_ATTR_VERTICAL_ALIGNMENT_TOP = _richtext.TEXT_BOX_ATTR_VERTICAL_ALIGNMENT_TOP
TEXT_BOX_ATTR_VERTICAL_ALIGNMENT_CENTRE = _richtext.TEXT_BOX_ATTR_VERTICAL_ALIGNMENT_CENTRE
TEXT_BOX_ATTR_VERTICAL_ALIGNMENT_BOTTOM = _richtext.TEXT_BOX_ATTR_VERTICAL_ALIGNMENT_BOTTOM
class TextAttrBorder(object):
"""Proxy of C++ TextAttrBorder class"""
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args, **kwargs):
"""__init__(self) -> TextAttrBorder"""
_richtext.TextAttrBorder_swiginit(self,_richtext.new_TextAttrBorder(*args, **kwargs))
def __eq__(*args, **kwargs):
"""__eq__(self, TextAttrBorder border) -> bool"""
return _richtext.TextAttrBorder___eq__(*args, **kwargs)
def Reset(*args, **kwargs):
"""Reset(self)"""
return _richtext.TextAttrBorder_Reset(*args, **kwargs)
def EqPartial(*args, **kwargs):
"""EqPartial(self, TextAttrBorder border) -> bool"""
return _richtext.TextAttrBorder_EqPartial(*args, **kwargs)
def Apply(*args, **kwargs):
"""Apply(self, TextAttrBorder border, TextAttrBorder compareWith=None) -> bool"""
return _richtext.TextAttrBorder_Apply(*args, **kwargs)
def RemoveStyle(*args, **kwargs):
"""RemoveStyle(self, TextAttrBorder attr) -> bool"""
return _richtext.TextAttrBorder_RemoveStyle(*args, **kwargs)
def CollectCommonAttributes(*args, **kwargs):
"""CollectCommonAttributes(self, TextAttrBorder attr, TextAttrBorder clashingAttr, TextAttrBorder absentAttr)"""
return _richtext.TextAttrBorder_CollectCommonAttributes(*args, **kwargs)
def SetStyle(*args, **kwargs):
"""SetStyle(self, int style)"""
return _richtext.TextAttrBorder_SetStyle(*args, **kwargs)
def GetStyle(*args, **kwargs):
"""GetStyle(self) -> int"""
return _richtext.TextAttrBorder_GetStyle(*args, **kwargs)
def SetColour(*args):
"""
SetColour(self, unsigned long colour)
SetColour(self, Colour colour)
"""
return _richtext.TextAttrBorder_SetColour(*args)
def GetColourLong(*args, **kwargs):
"""GetColourLong(self) -> unsigned long"""
return _richtext.TextAttrBorder_GetColourLong(*args, **kwargs)
def GetColour(*args, **kwargs):
"""GetColour(self) -> Colour"""
return _richtext.TextAttrBorder_GetColour(*args, **kwargs)
def GetWidth(*args):
"""
GetWidth(self) -> TextAttrDimension
GetWidth(self) -> TextAttrDimension
"""
return _richtext.TextAttrBorder_GetWidth(*args)
def SetWidth(*args):
"""
SetWidth(self, TextAttrDimension width)
SetWidth(self, int value, int units=TEXT_ATTR_UNITS_TENTHS_MM)
"""
return _richtext.TextAttrBorder_SetWidth(*args)
def HasStyle(*args, **kwargs):
"""HasStyle(self) -> bool"""
return _richtext.TextAttrBorder_HasStyle(*args, **kwargs)
def HasColour(*args, **kwargs):
"""HasColour(self) -> bool"""
return _richtext.TextAttrBorder_HasColour(*args, **kwargs)
def HasWidth(*args, **kwargs):
"""HasWidth(self) -> bool"""
return _richtext.TextAttrBorder_HasWidth(*args, **kwargs)
def IsValid(*args, **kwargs):
"""IsValid(self) -> bool"""
return _richtext.TextAttrBorder_IsValid(*args, **kwargs)
def MakeValid(*args, **kwargs):
"""MakeValid(self)"""
return _richtext.TextAttrBorder_MakeValid(*args, **kwargs)
def GetFlags(*args, **kwargs):
"""GetFlags(self) -> int"""
return _richtext.TextAttrBorder_GetFlags(*args, **kwargs)
def SetFlags(*args, **kwargs):
"""SetFlags(self, int flags)"""
return _richtext.TextAttrBorder_SetFlags(*args, **kwargs)
def AddFlag(*args, **kwargs):
"""AddFlag(self, int flag)"""
return _richtext.TextAttrBorder_AddFlag(*args, **kwargs)
def RemoveFlag(*args, **kwargs):
"""RemoveFlag(self, int flag)"""
return _richtext.TextAttrBorder_RemoveFlag(*args, **kwargs)
m_borderStyle = property(_richtext.TextAttrBorder_m_borderStyle_get, _richtext.TextAttrBorder_m_borderStyle_set)
m_borderColour = property(_richtext.TextAttrBorder_m_borderColour_get, _richtext.TextAttrBorder_m_borderColour_set)
m_borderWidth = property(_richtext.TextAttrBorder_m_borderWidth_get, _richtext.TextAttrBorder_m_borderWidth_set)
m_flags = property(_richtext.TextAttrBorder_m_flags_get, _richtext.TextAttrBorder_m_flags_set)
_richtext.TextAttrBorder_swigregister(TextAttrBorder)
class TextAttrBorders(object):
"""Proxy of C++ TextAttrBorders class"""
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args, **kwargs):
"""__init__(self) -> TextAttrBorders"""
_richtext.TextAttrBorders_swiginit(self,_richtext.new_TextAttrBorders(*args, **kwargs))
def __eq__(*args, **kwargs):
"""__eq__(self, TextAttrBorders borders) -> bool"""
return _richtext.TextAttrBorders___eq__(*args, **kwargs)
def SetStyle(*args, **kwargs):
"""SetStyle(self, int style)"""
return _richtext.TextAttrBorders_SetStyle(*args, **kwargs)
def SetColour(*args):
"""
SetColour(self, unsigned long colour)
SetColour(self, Colour colour)
"""
return _richtext.TextAttrBorders_SetColour(*args)
def SetWidth(*args):
"""
SetWidth(self, TextAttrDimension width)
SetWidth(self, int value, int units=TEXT_ATTR_UNITS_TENTHS_MM)
"""
return _richtext.TextAttrBorders_SetWidth(*args)
def Reset(*args, **kwargs):
"""Reset(self)"""
return _richtext.TextAttrBorders_Reset(*args, **kwargs)
def EqPartial(*args, **kwargs):
"""EqPartial(self, TextAttrBorders borders) -> bool"""
return _richtext.TextAttrBorders_EqPartial(*args, **kwargs)
def Apply(*args, **kwargs):
"""Apply(self, TextAttrBorders borders, TextAttrBorders compareWith=None) -> bool"""
return _richtext.TextAttrBorders_Apply(*args, **kwargs)
def RemoveStyle(*args, **kwargs):
"""RemoveStyle(self, TextAttrBorders attr) -> bool"""
return _richtext.TextAttrBorders_RemoveStyle(*args, **kwargs)
def CollectCommonAttributes(*args, **kwargs):
"""
CollectCommonAttributes(self, TextAttrBorders attr, TextAttrBorders clashingAttr,
TextAttrBorders absentAttr)
"""
return _richtext.TextAttrBorders_CollectCommonAttributes(*args, **kwargs)
def IsValid(*args, **kwargs):
"""IsValid(self) -> bool"""
return _richtext.TextAttrBorders_IsValid(*args, **kwargs)
def GetLeft(*args):
"""
GetLeft(self) -> TextAttrBorder
GetLeft(self) -> TextAttrBorder
"""
return _richtext.TextAttrBorders_GetLeft(*args)
def GetRight(*args):
"""
GetRight(self) -> TextAttrBorder
GetRight(self) -> TextAttrBorder
"""
return _richtext.TextAttrBorders_GetRight(*args)
def GetTop(*args):
"""
GetTop(self) -> TextAttrBorder
GetTop(self) -> TextAttrBorder
"""
return _richtext.TextAttrBorders_GetTop(*args)
def GetBottom(*args):
"""
GetBottom(self) -> TextAttrBorder
GetBottom(self) -> TextAttrBorder
"""
return _richtext.TextAttrBorders_GetBottom(*args)
m_left = property(_richtext.TextAttrBorders_m_left_get, _richtext.TextAttrBorders_m_left_set)
m_right = property(_richtext.TextAttrBorders_m_right_get, _richtext.TextAttrBorders_m_right_set)
m_top = property(_richtext.TextAttrBorders_m_top_get, _richtext.TextAttrBorders_m_top_set)
m_bottom = property(_richtext.TextAttrBorders_m_bottom_get, _richtext.TextAttrBorders_m_bottom_set)
_richtext.TextAttrBorders_swigregister(TextAttrBorders)
class TextBoxAttr(object):
"""Proxy of C++ TextBoxAttr class"""
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
__init__(self) -> TextBoxAttr
__init__(self, TextBoxAttr attr) -> TextBoxAttr
"""
_richtext.TextBoxAttr_swiginit(self,_richtext.new_TextBoxAttr(*args))
def Init(*args, **kwargs):
"""Init(self)"""
return _richtext.TextBoxAttr_Init(*args, **kwargs)
def Reset(*args, **kwargs):
"""Reset(self)"""
return _richtext.TextBoxAttr_Reset(*args, **kwargs)
def __eq__(*args, **kwargs):
"""__eq__(self, TextBoxAttr attr) -> bool"""
return _richtext.TextBoxAttr___eq__(*args, **kwargs)
def EqPartial(*args, **kwargs):
"""EqPartial(self, TextBoxAttr attr) -> bool"""
return _richtext.TextBoxAttr_EqPartial(*args, **kwargs)
def Apply(*args, **kwargs):
"""Apply(self, TextBoxAttr style, TextBoxAttr compareWith=None) -> bool"""
return _richtext.TextBoxAttr_Apply(*args, **kwargs)
def CollectCommonAttributes(*args, **kwargs):
"""CollectCommonAttributes(self, TextBoxAttr attr, TextBoxAttr clashingAttr, TextBoxAttr absentAttr)"""
return _richtext.TextBoxAttr_CollectCommonAttributes(*args, **kwargs)
def RemoveStyle(*args, **kwargs):
"""RemoveStyle(self, TextBoxAttr attr) -> bool"""
return _richtext.TextBoxAttr_RemoveStyle(*args, **kwargs)
def SetFlags(*args, **kwargs):
"""SetFlags(self, int flags)"""
return _richtext.TextBoxAttr_SetFlags(*args, **kwargs)
def GetFlags(*args, **kwargs):
"""GetFlags(self) -> int"""
return _richtext.TextBoxAttr_GetFlags(*args, **kwargs)
def HasFlag(*args, **kwargs):
"""HasFlag(self, int flag) -> bool"""
return _richtext.TextBoxAttr_HasFlag(*args, **kwargs)
def RemoveFlag(*args, **kwargs):
"""RemoveFlag(self, int flag)"""
return _richtext.TextBoxAttr_RemoveFlag(*args, **kwargs)
def AddFlag(*args, **kwargs):
"""AddFlag(self, int flag)"""
return _richtext.TextBoxAttr_AddFlag(*args, **kwargs)
def GetFloatMode(*args, **kwargs):
"""GetFloatMode(self) -> int"""
return _richtext.TextBoxAttr_GetFloatMode(*args, **kwargs)
def SetFloatMode(*args, **kwargs):
"""SetFloatMode(self, int mode)"""
return _richtext.TextBoxAttr_SetFloatMode(*args, **kwargs)
def HasFloatMode(*args, **kwargs):
"""HasFloatMode(self) -> bool"""
return _richtext.TextBoxAttr_HasFloatMode(*args, **kwargs)
def IsFloating(*args, **kwargs):
"""IsFloating(self) -> bool"""
return _richtext.TextBoxAttr_IsFloating(*args, **kwargs)
def GetClearMode(*args, **kwargs):
"""GetClearMode(self) -> int"""
return _richtext.TextBoxAttr_GetClearMode(*args, **kwargs)
def SetClearMode(*args, **kwargs):
"""SetClearMode(self, int mode)"""
return _richtext.TextBoxAttr_SetClearMode(*args, **kwargs)
def HasClearMode(*args, **kwargs):
"""HasClearMode(self) -> bool"""
return _richtext.TextBoxAttr_HasClearMode(*args, **kwargs)
def GetCollapseBorders(*args, **kwargs):
"""GetCollapseBorders(self) -> int"""
return _richtext.TextBoxAttr_GetCollapseBorders(*args, **kwargs)
def SetCollapseBorders(*args, **kwargs):
"""SetCollapseBorders(self, int collapse)"""
return _richtext.TextBoxAttr_SetCollapseBorders(*args, **kwargs)
def HasCollapseBorders(*args, **kwargs):
"""HasCollapseBorders(self) -> bool"""
return _richtext.TextBoxAttr_HasCollapseBorders(*args, **kwargs)
def GetVerticalAlignment(*args, **kwargs):
"""GetVerticalAlignment(self) -> int"""
return _richtext.TextBoxAttr_GetVerticalAlignment(*args, **kwargs)
def SetVerticalAlignment(*args, **kwargs):
"""SetVerticalAlignment(self, int verticalAlignment)"""
return _richtext.TextBoxAttr_SetVerticalAlignment(*args, **kwargs)
def HasVerticalAlignment(*args, **kwargs):
"""HasVerticalAlignment(self) -> bool"""
return _richtext.TextBoxAttr_HasVerticalAlignment(*args, **kwargs)
def GetMargins(*args):
"""
GetMargins(self) -> TextAttrDimensions
GetMargins(self) -> TextAttrDimensions
"""
return _richtext.TextBoxAttr_GetMargins(*args)
def GetLeftMargin(*args):
"""
GetLeftMargin(self) -> TextAttrDimension
GetLeftMargin(self) -> TextAttrDimension
"""
return _richtext.TextBoxAttr_GetLeftMargin(*args)
def GetRightMargin(*args):
"""
GetRightMargin(self) -> TextAttrDimension
GetRightMargin(self) -> TextAttrDimension
"""
return _richtext.TextBoxAttr_GetRightMargin(*args)
def GetTopMargin(*args):
"""
GetTopMargin(self) -> TextAttrDimension
GetTopMargin(self) -> TextAttrDimension
"""
return _richtext.TextBoxAttr_GetTopMargin(*args)
def GetBottomMargin(*args):
"""
GetBottomMargin(self) -> TextAttrDimension
GetBottomMargin(self) -> TextAttrDimension
"""
return _richtext.TextBoxAttr_GetBottomMargin(*args)
def GetPosition(*args):
"""
GetPosition(self) -> TextAttrDimensions
GetPosition(self) -> TextAttrDimensions
"""
return _richtext.TextBoxAttr_GetPosition(*args)
def GetLeft(*args):
"""
GetLeft(self) -> TextAttrDimension
GetLeft(self) -> TextAttrDimension
"""
return _richtext.TextBoxAttr_GetLeft(*args)
def GetRight(*args):
"""
GetRight(self) -> TextAttrDimension
GetRight(self) -> TextAttrDimension
"""
return _richtext.TextBoxAttr_GetRight(*args)
def GetTop(*args):
"""
GetTop(self) -> TextAttrDimension
GetTop(self) -> TextAttrDimension
"""
return _richtext.TextBoxAttr_GetTop(*args)
def GetBottom(*args):
"""
GetBottom(self) -> TextAttrDimension
GetBottom(self) -> TextAttrDimension
"""
return _richtext.TextBoxAttr_GetBottom(*args)
def GetPadding(*args):
"""
GetPadding(self) -> TextAttrDimensions
GetPadding(self) -> TextAttrDimensions
"""
return _richtext.TextBoxAttr_GetPadding(*args)
def GetLeftPadding(*args):
"""
GetLeftPadding(self) -> TextAttrDimension
GetLeftPadding(self) -> TextAttrDimension
"""
return _richtext.TextBoxAttr_GetLeftPadding(*args)
def GetRightPadding(*args):
"""
GetRightPadding(self) -> TextAttrDimension
GetRightPadding(self) -> TextAttrDimension
"""
return _richtext.TextBoxAttr_GetRightPadding(*args)
def GetTopPadding(*args):
"""
GetTopPadding(self) -> TextAttrDimension
GetTopPadding(self) -> TextAttrDimension
"""
return _richtext.TextBoxAttr_GetTopPadding(*args)
def GetBottomPadding(*args):
"""
GetBottomPadding(self) -> TextAttrDimension
GetBottomPadding(self) -> TextAttrDimension
"""
return _richtext.TextBoxAttr_GetBottomPadding(*args)
def GetBorder(*args):
"""
GetBorder(self) -> TextAttrBorders
GetBorder(self) -> TextAttrBorders
"""
return _richtext.TextBoxAttr_GetBorder(*args)
def GetLeftBorder(*args):
"""
GetLeftBorder(self) -> TextAttrBorder
GetLeftBorder(self) -> TextAttrBorder
"""
return _richtext.TextBoxAttr_GetLeftBorder(*args)
def GetTopBorder(*args):
"""
GetTopBorder(self) -> TextAttrBorder
GetTopBorder(self) -> TextAttrBorder
"""
return _richtext.TextBoxAttr_GetTopBorder(*args)
def GetRightBorder(*args):
"""
GetRightBorder(self) -> TextAttrBorder
GetRightBorder(self) -> TextAttrBorder
"""
return _richtext.TextBoxAttr_GetRightBorder(*args)
def GetBottomBorder(*args):
"""
GetBottomBorder(self) -> TextAttrBorder
GetBottomBorder(self) -> TextAttrBorder
"""
return _richtext.TextBoxAttr_GetBottomBorder(*args)
def GetOutline(*args):
"""
GetOutline(self) -> TextAttrBorders
GetOutline(self) -> TextAttrBorders
"""
return _richtext.TextBoxAttr_GetOutline(*args)
def GetLeftOutline(*args):
"""
GetLeftOutline(self) -> TextAttrBorder
GetLeftOutline(self) -> TextAttrBorder
"""
return _richtext.TextBoxAttr_GetLeftOutline(*args)
def GetTopOutline(*args):
"""
GetTopOutline(self) -> TextAttrBorder
GetTopOutline(self) -> TextAttrBorder
"""
return _richtext.TextBoxAttr_GetTopOutline(*args)
def GetRightOutline(*args):
"""
GetRightOutline(self) -> TextAttrBorder
GetRightOutline(self) -> TextAttrBorder
"""
return _richtext.TextBoxAttr_GetRightOutline(*args)
def GetBottomOutline(*args):
"""
GetBottomOutline(self) -> TextAttrBorder
GetBottomOutline(self) -> TextAttrBorder
"""
return _richtext.TextBoxAttr_GetBottomOutline(*args)
def GetSize(*args):
"""
GetSize(self) -> wxTextAttrSize
GetSize(self) -> wxTextAttrSize
"""
return _richtext.TextBoxAttr_GetSize(*args)
def SetSize(*args, **kwargs):
"""SetSize(self, wxTextAttrSize sz)"""
return _richtext.TextBoxAttr_SetSize(*args, **kwargs)
def GetWidth(*args):
"""
GetWidth(self) -> TextAttrDimension
GetWidth(self) -> TextAttrDimension
"""
return _richtext.TextBoxAttr_GetWidth(*args)
def GetHeight(*args):
"""
GetHeight(self) -> TextAttrDimension
GetHeight(self) -> TextAttrDimension
"""
return _richtext.TextBoxAttr_GetHeight(*args)
def GetBoxStyleName(*args, **kwargs):
"""GetBoxStyleName(self) -> String"""
return _richtext.TextBoxAttr_GetBoxStyleName(*args, **kwargs)
def SetBoxStyleName(*args, **kwargs):
"""SetBoxStyleName(self, String name)"""
return _richtext.TextBoxAttr_SetBoxStyleName(*args, **kwargs)
def HasBoxStyleName(*args, **kwargs):
"""HasBoxStyleName(self) -> bool"""
return _richtext.TextBoxAttr_HasBoxStyleName(*args, **kwargs)
m_flags = property(_richtext.TextBoxAttr_m_flags_get, _richtext.TextBoxAttr_m_flags_set)
m_margins = property(_richtext.TextBoxAttr_m_margins_get, _richtext.TextBoxAttr_m_margins_set)
m_padding = property(_richtext.TextBoxAttr_m_padding_get, _richtext.TextBoxAttr_m_padding_set)
m_position = property(_richtext.TextBoxAttr_m_position_get, _richtext.TextBoxAttr_m_position_set)
m_size = property(_richtext.TextBoxAttr_m_size_get, _richtext.TextBoxAttr_m_size_set)
m_border = property(_richtext.TextBoxAttr_m_border_get, _richtext.TextBoxAttr_m_border_set)
m_outline = property(_richtext.TextBoxAttr_m_outline_get, _richtext.TextBoxAttr_m_outline_set)
m_floatMode = property(_richtext.TextBoxAttr_m_floatMode_get, _richtext.TextBoxAttr_m_floatMode_set)
m_clearMode = property(_richtext.TextBoxAttr_m_clearMode_get, _richtext.TextBoxAttr_m_clearMode_set)
m_collapseMode = property(_richtext.TextBoxAttr_m_collapseMode_get, _richtext.TextBoxAttr_m_collapseMode_set)
m_verticalAlignment = property(_richtext.TextBoxAttr_m_verticalAlignment_get, _richtext.TextBoxAttr_m_verticalAlignment_set)
m_boxStyleName = property(_richtext.TextBoxAttr_m_boxStyleName_get, _richtext.TextBoxAttr_m_boxStyleName_set)
_richtext.TextBoxAttr_swigregister(TextBoxAttr)
#---------------------------------------------------------------------------
class RichTextAttr(_controls.TextAttr):
"""Proxy of C++ RichTextAttr class"""
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
__init__(self, TextAttr attr) -> RichTextAttr
__init__(self, RichTextAttr attr) -> RichTextAttr
__init__(self) -> RichTextAttr
"""
_richtext.RichTextAttr_swiginit(self,_richtext.new_RichTextAttr(*args))
__swig_destroy__ = _richtext.delete_RichTextAttr
__del__ = lambda self : None;
def Copy(*args, **kwargs):
"""Copy(self, RichTextAttr attr)"""
return _richtext.RichTextAttr_Copy(*args, **kwargs)
def __eq__(*args, **kwargs):
"""__eq__(self, RichTextAttr attr) -> bool"""
return _richtext.RichTextAttr___eq__(*args, **kwargs)
def EqPartial(*args, **kwargs):
"""EqPartial(self, RichTextAttr attr) -> bool"""
return _richtext.RichTextAttr_EqPartial(*args, **kwargs)
def Apply(*args, **kwargs):
"""Apply(self, RichTextAttr style, RichTextAttr compareWith=None) -> bool"""
return _richtext.RichTextAttr_Apply(*args, **kwargs)
def CollectCommonAttributes(*args, **kwargs):
"""CollectCommonAttributes(self, RichTextAttr attr, RichTextAttr clashingAttr, RichTextAttr absentAttr)"""
return _richtext.RichTextAttr_CollectCommonAttributes(*args, **kwargs)
def RemoveStyle(*args, **kwargs):
"""RemoveStyle(self, RichTextAttr attr) -> bool"""
return _richtext.RichTextAttr_RemoveStyle(*args, **kwargs)
def GetTextBoxAttr(*args):
"""
GetTextBoxAttr(self) -> TextBoxAttr
GetTextBoxAttr(self) -> TextBoxAttr
"""
return _richtext.RichTextAttr_GetTextBoxAttr(*args)
def SetTextBoxAttr(*args, **kwargs):
"""SetTextBoxAttr(self, TextBoxAttr attr)"""
return _richtext.RichTextAttr_SetTextBoxAttr(*args, **kwargs)
m_textBoxAttr = property(_richtext.RichTextAttr_m_textBoxAttr_get, _richtext.RichTextAttr_m_textBoxAttr_set)
_richtext.RichTextAttr_swigregister(RichTextAttr)
class RichTextFontTable(_core.Object):
"""Proxy of C++ RichTextFontTable class"""
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args, **kwargs):
"""__init__(self) -> RichTextFontTable"""
_richtext.RichTextFontTable_swiginit(self,_richtext.new_RichTextFontTable(*args, **kwargs))
__swig_destroy__ = _richtext.delete_RichTextFontTable
__del__ = lambda self : None;
def IsOk(*args, **kwargs):
"""IsOk(self) -> bool"""
return _richtext.RichTextFontTable_IsOk(*args, **kwargs)
def FindFont(*args, **kwargs):
"""FindFont(self, RichTextAttr fontSpec) -> Font"""
return _richtext.RichTextFontTable_FindFont(*args, **kwargs)
def Clear(*args, **kwargs):
"""Clear(self)"""
return _richtext.RichTextFontTable_Clear(*args, **kwargs)
_richtext.RichTextFontTable_swigregister(RichTextFontTable)
class RichTextRange(object):
"""
RichTextRange is a data structure that represents a range of text
within a `RichTextCtrl`. It simply contains integer ``start`` and
``end`` properties and a few operations useful for dealing with
ranges. In most places in wxPython where a RichTextRange is expected a
2-tuple containing (start, end) can be used instead.
"""
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args, **kwargs):
"""
__init__(self, long start=0, long end=0) -> RichTextRange
Creates a new range object.
"""
_richtext.RichTextRange_swiginit(self,_richtext.new_RichTextRange(*args, **kwargs))
__swig_destroy__ = _richtext.delete_RichTextRange
__del__ = lambda self : None;
def __eq__(*args, **kwargs):
"""
__eq__(self, PyObject other) -> bool
Test for equality of RichTextRange objects.
"""
return _richtext.RichTextRange___eq__(*args, **kwargs)
def __sub__(*args, **kwargs):
"""__sub__(self, RichTextRange range) -> RichTextRange"""
return _richtext.RichTextRange___sub__(*args, **kwargs)
def __add__(*args, **kwargs):
"""__add__(self, RichTextRange range) -> RichTextRange"""
return _richtext.RichTextRange___add__(*args, **kwargs)
def SetRange(*args, **kwargs):
"""SetRange(self, long start, long end)"""
return _richtext.RichTextRange_SetRange(*args, **kwargs)
def SetStart(*args, **kwargs):
"""SetStart(self, long start)"""
return _richtext.RichTextRange_SetStart(*args, **kwargs)
def GetStart(*args, **kwargs):
"""GetStart(self) -> long"""
return _richtext.RichTextRange_GetStart(*args, **kwargs)
start = property(GetStart, SetStart)
def SetEnd(*args, **kwargs):
"""SetEnd(self, long end)"""
return _richtext.RichTextRange_SetEnd(*args, **kwargs)
def GetEnd(*args, **kwargs):
"""GetEnd(self) -> long"""
return _richtext.RichTextRange_GetEnd(*args, **kwargs)
end = property(GetEnd, SetEnd)
def IsOutside(*args, **kwargs):
"""
IsOutside(self, RichTextRange range) -> bool
Returns true if this range is completely outside 'range'
"""
return _richtext.RichTextRange_IsOutside(*args, **kwargs)
def IsWithin(*args, **kwargs):
"""
IsWithin(self, RichTextRange range) -> bool
Returns true if this range is completely within 'range'
"""
return _richtext.RichTextRange_IsWithin(*args, **kwargs)
def Contains(*args, **kwargs):
"""
Contains(self, long pos) -> bool
Returns true if the given position is within this range. Allow for the
possibility of an empty range - assume the position is within this
empty range.
"""
return _richtext.RichTextRange_Contains(*args, **kwargs)
def LimitTo(*args, **kwargs):
"""
LimitTo(self, RichTextRange range) -> bool
Limit this range to be within 'range'
"""
return _richtext.RichTextRange_LimitTo(*args, **kwargs)
def GetLength(*args, **kwargs):
"""
GetLength(self) -> long
Gets the length of the range
"""
return _richtext.RichTextRange_GetLength(*args, **kwargs)
def Swap(*args, **kwargs):
"""
Swap(self)
Swaps the start and end
"""
return _richtext.RichTextRange_Swap(*args, **kwargs)
def ToInternal(*args, **kwargs):
"""
ToInternal(self) -> RichTextRange
Convert to internal form: (n, n) is the range of a single character.
"""
return _richtext.RichTextRange_ToInternal(*args, **kwargs)
def FromInternal(*args, **kwargs):
"""
FromInternal(self) -> RichTextRange
Convert from internal to public API form: (n, n+1) is the range of a
single character.
"""
return _richtext.RichTextRange_FromInternal(*args, **kwargs)
def Get(*args, **kwargs):
"""
Get() -> (start,end)
Returns the start and end properties as a tuple.
"""
return _richtext.RichTextRange_Get(*args, **kwargs)
def __str__(self): return str(self.Get())
def __repr__(self): return 'RichTextRange'+str(self.Get())
def __len__(self): return len(self.Get())
def __getitem__(self, index): return self.Get()[index]
def __setitem__(self, index, val):
if index == 0: self.start = val
elif index == 1: self.end = val
else: raise IndexError
def __nonzero__(self): return self.Get() != (0,0)
__safe_for_unpickling__ = True
def __reduce__(self): return (RichTextRange, self.Get())
End = property(GetEnd,SetEnd,doc="See `GetEnd` and `SetEnd`")
Length = property(GetLength,doc="See `GetLength`")
Start = property(GetStart,SetStart,doc="See `GetStart` and `SetStart`")
_richtext.RichTextRange_swigregister(RichTextRange)
class RichTextDrawingContext(_core.Object):
"""Proxy of C++ RichTextDrawingContext class"""
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args, **kwargs):
"""__init__(self, RichTextBuffer buffer) -> RichTextDrawingContext"""
_richtext.RichTextDrawingContext_swiginit(self,_richtext.new_RichTextDrawingContext(*args, **kwargs))
def Init(*args, **kwargs):
"""Init(self)"""
return _richtext.RichTextDrawingContext_Init(*args, **kwargs)
def HasVirtualAttributes(*args, **kwargs):
"""HasVirtualAttributes(self, RichTextObject obj) -> bool"""
return _richtext.RichTextDrawingContext_HasVirtualAttributes(*args, **kwargs)
def GetVirtualAttributes(*args, **kwargs):
"""GetVirtualAttributes(self, RichTextObject obj) -> RichTextAttr"""
return _richtext.RichTextDrawingContext_GetVirtualAttributes(*args, **kwargs)
def ApplyVirtualAttributes(*args, **kwargs):
"""ApplyVirtualAttributes(self, RichTextAttr attr, RichTextObject obj) -> bool"""
return _richtext.RichTextDrawingContext_ApplyVirtualAttributes(*args, **kwargs)
m_buffer = property(_richtext.RichTextDrawingContext_m_buffer_get, _richtext.RichTextDrawingContext_m_buffer_set)
_richtext.RichTextDrawingContext_swigregister(RichTextDrawingContext)
cvar = _richtext.cvar
RICHTEXT_ALL = cvar.RICHTEXT_ALL
RICHTEXT_NONE = cvar.RICHTEXT_NONE
class RichTextObject(_core.Object):
"""
This is the base class for all drawable objects in a `RichTextCtrl`.
The data displayed in a `RichTextCtrl` is handled by `RichTextBuffer`,
and a `RichTextCtrl` always has one such buffer.
The content is represented by a hierarchy of objects, all derived from
`RichTextObject`. An object might be an image, a fragment of text, a
paragraph, or a whole buffer. Objects store a an attribute object
containing style information; a paragraph object can contain both
paragraph and character information, but content objects such as text
can only store character information. The final style displayed in the
control or in a printout is a combination of base style, paragraph
style and content (character) style.
The top of the hierarchy is the buffer, a kind of
`RichTextParagraphLayoutBox`. containing further `RichTextParagraph`
objects, each of which can include text, images and potentially other
types of objects.
Each object maintains a range (start and end position) measured from
the start of the main parent object.
When Layout is called on an object, it is given a size which the
object must limit itself to, or one or more flexible directions
(vertical or horizontal). So, for example, a centred paragraph is
given the page width to play with (minus any margins), but can extend
indefinitely in the vertical direction. The implementation of Layout
caches the calculated size and position.
When the buffer is modified, a range is invalidated (marked as
requiring layout), so that only the minimum amount of layout is
performed.
A paragraph of pure text with the same style contains just one further
object, a `RichTextPlainText` object. When styling is applied to part
of this object, the object is decomposed into separate objects, one
object for each different character style. So each object within a
paragraph always has just one attribute object to denote its character
style. Of course, this can lead to fragmentation after a lot of edit
operations, potentially leading to several objects with the same style
where just one would do. So a Defragment function is called when
updating the control's display, to ensure that the minimum number of
objects is used.
To implement your own RichTextObjects in Python you must derive a
class from `PyRichTextObject`, which has been instrumented to forward
the virtual C++ method calls to the Python methods in the derived
class. (This class hasn't been implemented yet!)
"""
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self): raise AttributeError, "No constructor defined"
__repr__ = _swig_repr
__swig_destroy__ = _richtext.delete_RichTextObject
__del__ = lambda self : None;
def Draw(*args, **kwargs):
"""
Draw(self, DC dc, RichTextDrawingContext context, RichTextRange range,
wxRichTextSelection selection, Rect rect,
int descent, int style) -> bool
"""
return _richtext.RichTextObject_Draw(*args, **kwargs)
def Layout(*args, **kwargs):
"""
Layout(self, DC dc, RichTextDrawingContext context, Rect rect, Rect parentRect,
int style) -> bool
"""
return _richtext.RichTextObject_Layout(*args, **kwargs)
def HitTest(*args, **kwargs):
"""
HitTest(self, DC dc, RichTextDrawingContext context, Point pt, long OUTPUT,
RichTextObject obj, RichTextObject contextObj,
int flags=0) -> int
"""
return _richtext.RichTextObject_HitTest(*args, **kwargs)
def FindPosition(*args, **kwargs):
"""
FindPosition(self, DC dc, RichTextDrawingContext context, long index,
Point OUTPUT, int OUTPUT, bool forceLineStart) -> bool
"""
return _richtext.RichTextObject_FindPosition(*args, **kwargs)
def GetBestSize(*args, **kwargs):
"""GetBestSize(self) -> Size"""
return _richtext.RichTextObject_GetBestSize(*args, **kwargs)
def GetRangeSize(*args, **kwargs):
"""
GetRangeSize(self, RichTextRange range, Size OUTPUT, int OUTPUT, DC dc,
RichTextDrawingContext context, int flags,
Point position=wxPoint(0,0)) -> bool
"""
return _richtext.RichTextObject_GetRangeSize(*args, **kwargs)
def DoSplit(*args, **kwargs):
"""DoSplit(self, long pos) -> RichTextObject"""
return _richtext.RichTextObject_DoSplit(*args, **kwargs)
def CalculateRange(*args, **kwargs):
"""CalculateRange(self, long start, long OUTPUT)"""
return _richtext.RichTextObject_CalculateRange(*args, **kwargs)
def DeleteRange(*args, **kwargs):
"""DeleteRange(self, RichTextRange range) -> bool"""
return _richtext.RichTextObject_DeleteRange(*args, **kwargs)
def IsEmpty(*args, **kwargs):
"""IsEmpty(self) -> bool"""
return _richtext.RichTextObject_IsEmpty(*args, **kwargs)
def IsFloatable(*args, **kwargs):
"""IsFloatable(self) -> bool"""
return _richtext.RichTextObject_IsFloatable(*args, **kwargs)
def IsFloating(*args, **kwargs):
"""IsFloating(self) -> bool"""
return _richtext.RichTextObject_IsFloating(*args, **kwargs)
def GetFloatDirection(*args, **kwargs):
"""GetFloatDirection(self) -> int"""
return _richtext.RichTextObject_GetFloatDirection(*args, **kwargs)
def GetTextForRange(*args, **kwargs):
"""GetTextForRange(self, RichTextRange range) -> String"""
return _richtext.RichTextObject_GetTextForRange(*args, **kwargs)
def CanMerge(*args, **kwargs):
"""CanMerge(self, RichTextObject object, RichTextDrawingContext context) -> bool"""
return _richtext.RichTextObject_CanMerge(*args, **kwargs)
def Merge(self, obj, context):
"""Merge(self, RichTextObject object) -> bool"""
val = _richtext.RichTextObject_Merge(self, obj, context)
if val:
obj.this.own(True)
return val
def Dump(*args, **kwargs):
"""Dump(self) -> String"""
return _richtext.RichTextObject_Dump(*args, **kwargs)
def CanEditProperties(*args, **kwargs):
"""CanEditProperties(self) -> bool"""
return _richtext.RichTextObject_CanEditProperties(*args, **kwargs)
def EditProperties(*args, **kwargs):
"""EditProperties(self, Window parent, RichTextBuffer buffer) -> bool"""
return _richtext.RichTextObject_EditProperties(*args, **kwargs)
def ImportFromXML(*args, **kwargs):
"""
ImportFromXML(self, RichTextBuffer buffer, wxXmlNode node, RichTextXMLHandler handler,
bool recurse) -> bool
"""
return _richtext.RichTextObject_ImportFromXML(*args, **kwargs)
def ExportXML(*args):
"""
ExportXML(self, wxOutputStream stream, int indent, RichTextXMLHandler handler) -> bool
ExportXML(self, wxXmlNode parent, RichTextXMLHandler handler) -> bool
"""
return _richtext.RichTextObject_ExportXML(*args)
def UsesParagraphAttributes(*args, **kwargs):
"""UsesParagraphAttributes(self) -> bool"""
return _richtext.RichTextObject_UsesParagraphAttributes(*args, **kwargs)
def GetXMLNodeName(*args, **kwargs):
"""GetXMLNodeName(self) -> String"""
return _richtext.RichTextObject_GetXMLNodeName(*args, **kwargs)
def GetCachedSize(*args, **kwargs):
"""GetCachedSize(self) -> Size"""
return _richtext.RichTextObject_GetCachedSize(*args, **kwargs)
def SetCachedSize(*args, **kwargs):
"""SetCachedSize(self, Size sz)"""
return _richtext.RichTextObject_SetCachedSize(*args, **kwargs)
CachedSize = property(GetCachedSize,SetCachedSize)
def GetPosition(*args, **kwargs):
"""GetPosition(self) -> Point"""
return _richtext.RichTextObject_GetPosition(*args, **kwargs)
def SetPosition(*args, **kwargs):
"""SetPosition(self, Point pos)"""
return _richtext.RichTextObject_SetPosition(*args, **kwargs)
Position = property(GetPosition,SetPosition)
def GetRect(*args, **kwargs):
"""GetRect(self) -> Rect"""
return _richtext.RichTextObject_GetRect(*args, **kwargs)
Rect = property(GetRect)
def SetRange(*args, **kwargs):
"""SetRange(self, RichTextRange range)"""
return _richtext.RichTextObject_SetRange(*args, **kwargs)
def GetRange(*args, **kwargs):
"""GetRange(self) -> RichTextRange"""
return _richtext.RichTextObject_GetRange(*args, **kwargs)
Range = property(GetRange,SetRange)
def IsComposite(*args, **kwargs):
"""IsComposite(self) -> bool"""
return _richtext.RichTextObject_IsComposite(*args, **kwargs)
def GetParent(*args, **kwargs):
"""GetParent(self) -> RichTextObject"""
return _richtext.RichTextObject_GetParent(*args, **kwargs)
def SetParent(*args, **kwargs):
"""SetParent(self, RichTextObject parent)"""
return _richtext.RichTextObject_SetParent(*args, **kwargs)
Parent = property(GetParent,SetParent)
def SetSameMargins(*args, **kwargs):
"""SetSameMargins(self, int margin)"""
return _richtext.RichTextObject_SetSameMargins(*args, **kwargs)
def SetMargins(*args, **kwargs):
"""SetMargins(self, int leftMargin, int rightMargin, int topMargin, int bottomMargin)"""
return _richtext.RichTextObject_SetMargins(*args, **kwargs)
def GetLeftMargin(*args, **kwargs):
"""GetLeftMargin(self) -> int"""
return _richtext.RichTextObject_GetLeftMargin(*args, **kwargs)
def GetRightMargin(*args, **kwargs):
"""GetRightMargin(self) -> int"""
return _richtext.RichTextObject_GetRightMargin(*args, **kwargs)
def GetTopMargin(*args, **kwargs):
"""GetTopMargin(self) -> int"""
return _richtext.RichTextObject_GetTopMargin(*args, **kwargs)
def GetBottomMargin(*args, **kwargs):
"""GetBottomMargin(self) -> int"""
return _richtext.RichTextObject_GetBottomMargin(*args, **kwargs)
def SetAttributes(*args, **kwargs):
"""SetAttributes(self, RichTextAttr attr)"""
return _richtext.RichTextObject_SetAttributes(*args, **kwargs)
def GetAttributes(*args, **kwargs):
"""GetAttributes(self) -> RichTextAttr"""
return _richtext.RichTextObject_GetAttributes(*args, **kwargs)
Attributes = property(GetAttributes,SetAttributes)
def SetDescent(*args, **kwargs):
"""SetDescent(self, int descent)"""
return _richtext.RichTextObject_SetDescent(*args, **kwargs)
def GetDescent(*args, **kwargs):
"""GetDescent(self) -> int"""
return _richtext.RichTextObject_GetDescent(*args, **kwargs)
Descent = property(GetDescent,SetDescent)
def GetBuffer(*args, **kwargs):
"""GetBuffer(self) -> RichTextBuffer"""
return _richtext.RichTextObject_GetBuffer(*args, **kwargs)
def Clone(*args, **kwargs):
"""Clone(self) -> RichTextObject"""
return _richtext.RichTextObject_Clone(*args, **kwargs)
def Copy(*args, **kwargs):
"""Copy(self, RichTextObject obj)"""
return _richtext.RichTextObject_Copy(*args, **kwargs)
def Reference(*args, **kwargs):
"""Reference(self)"""
return _richtext.RichTextObject_Reference(*args, **kwargs)
def Dereference(*args, **kwargs):
"""Dereference(self)"""
return _richtext.RichTextObject_Dereference(*args, **kwargs)
def ConvertTenthsMMToPixelsDC(*args, **kwargs):
"""ConvertTenthsMMToPixelsDC(self, DC dc, int units) -> int"""
return _richtext.RichTextObject_ConvertTenthsMMToPixelsDC(*args, **kwargs)
def ConvertTenthsMMToPixels(*args, **kwargs):
"""ConvertTenthsMMToPixels(int ppi, int units, double scale=1.0) -> int"""
return _richtext.RichTextObject_ConvertTenthsMMToPixels(*args, **kwargs)
ConvertTenthsMMToPixels = staticmethod(ConvertTenthsMMToPixels)
def ConvertPixelsToTenthsMM(*args):
"""
ConvertPixelsToTenthsMM(DC dc, int pixels) -> int
ConvertPixelsToTenthsMM(int ppi, int pixels, double scale=1.0) -> int
"""
return _richtext.RichTextObject_ConvertPixelsToTenthsMM(*args)
ConvertPixelsToTenthsMM = staticmethod(ConvertPixelsToTenthsMM)
def DrawBoxAttributes(*args, **kwargs):
"""
DrawBoxAttributes(DC dc, RichTextBuffer buffer, RichTextAttr attr, Rect boxRect,
int flags=0) -> bool
"""
return _richtext.RichTextObject_DrawBoxAttributes(*args, **kwargs)
DrawBoxAttributes = staticmethod(DrawBoxAttributes)
def DrawBorder(*args, **kwargs):
"""
DrawBorder(DC dc, RichTextBuffer buffer, TextAttrBorders attr,
Rect rect, int flags=0) -> bool
"""
return _richtext.RichTextObject_DrawBorder(*args, **kwargs)
DrawBorder = staticmethod(DrawBorder)
def GetBoxRects(*args, **kwargs):
"""
GetBoxRects(DC dc, RichTextBuffer buffer, RichTextAttr attr, Rect marginRect,
Rect borderRect, Rect contentRect,
Rect paddingRect, Rect outlineRect) -> bool
"""
return _richtext.RichTextObject_GetBoxRects(*args, **kwargs)
GetBoxRects = staticmethod(GetBoxRects)
def GetTotalMargin(*args, **kwargs):
"""
GetTotalMargin(DC dc, RichTextBuffer buffer, RichTextAttr attr, int leftMargin,
int rightMargin, int topMargin,
int bottomMargin) -> bool
"""
return _richtext.RichTextObject_GetTotalMargin(*args, **kwargs)
GetTotalMargin = staticmethod(GetTotalMargin)
def AdjustAvailableSpace(*args, **kwargs):
"""
AdjustAvailableSpace(DC dc, RichTextBuffer buffer, RichTextAttr parentAttr,
RichTextAttr childAttr, Rect availableParentSpace,
Rect availableContainerSpace) -> Rect
"""
return _richtext.RichTextObject_AdjustAvailableSpace(*args, **kwargs)
AdjustAvailableSpace = staticmethod(AdjustAvailableSpace)
_richtext.RichTextObject_swigregister(RichTextObject)
def RichTextObject_ConvertTenthsMMToPixels(*args, **kwargs):
"""RichTextObject_ConvertTenthsMMToPixels(int ppi, int units, double scale=1.0) -> int"""
return _richtext.RichTextObject_ConvertTenthsMMToPixels(*args, **kwargs)
def RichTextObject_ConvertPixelsToTenthsMM(*args):
"""
ConvertPixelsToTenthsMM(DC dc, int pixels) -> int
RichTextObject_ConvertPixelsToTenthsMM(int ppi, int pixels, double scale=1.0) -> int
"""
return _richtext.RichTextObject_ConvertPixelsToTenthsMM(*args)
def RichTextObject_DrawBoxAttributes(*args, **kwargs):
"""
RichTextObject_DrawBoxAttributes(DC dc, RichTextBuffer buffer, RichTextAttr attr, Rect boxRect,
int flags=0) -> bool
"""
return _richtext.RichTextObject_DrawBoxAttributes(*args, **kwargs)
def RichTextObject_DrawBorder(*args, **kwargs):
"""
RichTextObject_DrawBorder(DC dc, RichTextBuffer buffer, TextAttrBorders attr,
Rect rect, int flags=0) -> bool
"""
return _richtext.RichTextObject_DrawBorder(*args, **kwargs)
def RichTextObject_GetBoxRects(*args, **kwargs):
"""
RichTextObject_GetBoxRects(DC dc, RichTextBuffer buffer, RichTextAttr attr, Rect marginRect,
Rect borderRect, Rect contentRect,
Rect paddingRect, Rect outlineRect) -> bool
"""
return _richtext.RichTextObject_GetBoxRects(*args, **kwargs)
def RichTextObject_GetTotalMargin(*args, **kwargs):
"""
RichTextObject_GetTotalMargin(DC dc, RichTextBuffer buffer, RichTextAttr attr, int leftMargin,
int rightMargin, int topMargin,
int bottomMargin) -> bool
"""
return _richtext.RichTextObject_GetTotalMargin(*args, **kwargs)
def RichTextObject_AdjustAvailableSpace(*args, **kwargs):
"""
RichTextObject_AdjustAvailableSpace(DC dc, RichTextBuffer buffer, RichTextAttr parentAttr,
RichTextAttr childAttr, Rect availableParentSpace,
Rect availableContainerSpace) -> Rect
"""
return _richtext.RichTextObject_AdjustAvailableSpace(*args, **kwargs)
class RichTextObjectList_iterator(object):
"""This class serves as an iterator for a wxRichTextObjectList object."""
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self): raise AttributeError, "No constructor defined"
__repr__ = _swig_repr
__swig_destroy__ = _richtext.delete_RichTextObjectList_iterator
__del__ = lambda self : None;
def next(*args, **kwargs):
"""next(self) -> RichTextObject"""
return _richtext.RichTextObjectList_iterator_next(*args, **kwargs)
_richtext.RichTextObjectList_iterator_swigregister(RichTextObjectList_iterator)
class RichTextObjectList(object):
"""
This class wraps a wxList-based class and gives it a Python
sequence-like interface. Sequence operations supported are length,
index access and iteration.
"""
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self): raise AttributeError, "No constructor defined"
__repr__ = _swig_repr
__swig_destroy__ = _richtext.delete_RichTextObjectList
__del__ = lambda self : None;
def __len__(*args, **kwargs):
"""__len__(self) -> size_t"""
return _richtext.RichTextObjectList___len__(*args, **kwargs)
def __getitem__(*args, **kwargs):
"""__getitem__(self, size_t index) -> RichTextObject"""
return _richtext.RichTextObjectList___getitem__(*args, **kwargs)
def __contains__(*args, **kwargs):
"""__contains__(self, RichTextObject obj) -> bool"""
return _richtext.RichTextObjectList___contains__(*args, **kwargs)
def __iter__(*args, **kwargs):
"""__iter__(self) -> RichTextObjectList_iterator"""
return _richtext.RichTextObjectList___iter__(*args, **kwargs)
def index(*args, **kwargs):
"""index(self, RichTextObject obj) -> int"""
return _richtext.RichTextObjectList_index(*args, **kwargs)
def __repr__(self):
return "wxRichTextObjectList: " + repr(list(self))
_richtext.RichTextObjectList_swigregister(RichTextObjectList)
class RichTextCompositeObject(RichTextObject):
"""Objects of this class can contain other rich text objects."""
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self): raise AttributeError, "No constructor defined"
__repr__ = _swig_repr
__swig_destroy__ = _richtext.delete_RichTextCompositeObject
__del__ = lambda self : None;
def GetChildren(*args, **kwargs):
"""GetChildren(self) -> RichTextObjectList"""
return _richtext.RichTextCompositeObject_GetChildren(*args, **kwargs)
def GetChildCount(*args, **kwargs):
"""GetChildCount(self) -> size_t"""
return _richtext.RichTextCompositeObject_GetChildCount(*args, **kwargs)
def GetChild(*args, **kwargs):
"""GetChild(self, size_t n) -> RichTextObject"""
return _richtext.RichTextCompositeObject_GetChild(*args, **kwargs)
def Copy(*args, **kwargs):
"""Copy(self, RichTextCompositeObject obj)"""
return _richtext.RichTextCompositeObject_Copy(*args, **kwargs)
def AppendChild(*args, **kwargs):
"""AppendChild(self, RichTextObject child) -> size_t"""
return _richtext.RichTextCompositeObject_AppendChild(*args, **kwargs)
def InsertChild(*args, **kwargs):
"""InsertChild(self, RichTextObject child, RichTextObject inFrontOf) -> bool"""
return _richtext.RichTextCompositeObject_InsertChild(*args, **kwargs)
def RemoveChild(self, child, deleteChild=False):
val = _richtext.RichTextCompositeObject_RemoveChild(self, child, deleteChild)
self.this.own(not deleteChild)
return val
def DeleteChildren(*args, **kwargs):
"""DeleteChildren(self) -> bool"""
return _richtext.RichTextCompositeObject_DeleteChildren(*args, **kwargs)
def Defragment(*args, **kwargs):
"""Defragment(self, RichTextDrawingContext context, RichTextRange range=wxRICHTEXT_ALL) -> bool"""
return _richtext.RichTextCompositeObject_Defragment(*args, **kwargs)
_richtext.RichTextCompositeObject_swigregister(RichTextCompositeObject)
class RichTextParagraphLayoutBox(RichTextCompositeObject):
"""This box knows how to lay out paragraphs."""
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
__init__(self, RichTextObject parent=None) -> RichTextParagraphLayoutBox
__init__(self, RichTextParagraphLayoutBox obj) -> RichTextParagraphLayoutBox
This box knows how to lay out paragraphs.
"""
_richtext.RichTextParagraphLayoutBox_swiginit(self,_richtext.new_RichTextParagraphLayoutBox(*args))
__swig_destroy__ = _richtext.delete_RichTextParagraphLayoutBox
__del__ = lambda self : None;
def SetRichTextCtrl(*args, **kwargs):
"""SetRichTextCtrl(self, RichTextCtrl ctrl)"""
return _richtext.RichTextParagraphLayoutBox_SetRichTextCtrl(*args, **kwargs)
def GetRichTextCtrl(*args, **kwargs):
"""GetRichTextCtrl(self) -> RichTextCtrl"""
return _richtext.RichTextParagraphLayoutBox_GetRichTextCtrl(*args, **kwargs)
def SetPartialParagraph(*args, **kwargs):
"""SetPartialParagraph(self, bool partialPara)"""
return _richtext.RichTextParagraphLayoutBox_SetPartialParagraph(*args, **kwargs)
def GetPartialParagraph(*args, **kwargs):
"""GetPartialParagraph(self) -> bool"""
return _richtext.RichTextParagraphLayoutBox_GetPartialParagraph(*args, **kwargs)
def GetStyleSheet(*args, **kwargs):
"""GetStyleSheet(self) -> wxRichTextStyleSheet"""
return _richtext.RichTextParagraphLayoutBox_GetStyleSheet(*args, **kwargs)
def DrawFloats(*args, **kwargs):
"""
DrawFloats(self, DC dc, RichTextDrawingContext context, RichTextRange range,
wxRichTextSelection selection, Rect rect,
int descent, int style)
"""
return _richtext.RichTextParagraphLayoutBox_DrawFloats(*args, **kwargs)
def MoveAnchoredObjectToParagraph(*args, **kwargs):
"""MoveAnchoredObjectToParagraph(self, RichTextParagraph from, RichTextParagraph to, RichTextObject obj)"""
return _richtext.RichTextParagraphLayoutBox_MoveAnchoredObjectToParagraph(*args, **kwargs)
def Init(*args, **kwargs):
"""Init(self)"""
return _richtext.RichTextParagraphLayoutBox_Init(*args, **kwargs)
def Clear(*args, **kwargs):
"""Clear(self)"""
return _richtext.RichTextParagraphLayoutBox_Clear(*args, **kwargs)
def Reset(*args, **kwargs):
"""Reset(self)"""
return _richtext.RichTextParagraphLayoutBox_Reset(*args, **kwargs)
def AddParagraph(*args, **kwargs):
"""AddParagraph(self, String text, RichTextAttr paraStyle=None) -> RichTextRange"""
return _richtext.RichTextParagraphLayoutBox_AddParagraph(*args, **kwargs)
def AddImage(*args, **kwargs):
"""AddImage(self, Image image, RichTextAttr paraStyle=None) -> RichTextRange"""
return _richtext.RichTextParagraphLayoutBox_AddImage(*args, **kwargs)
def AddParagraphs(*args, **kwargs):
"""AddParagraphs(self, String text, RichTextAttr paraStyle=None) -> RichTextRange"""
return _richtext.RichTextParagraphLayoutBox_AddParagraphs(*args, **kwargs)
def GetLineAtPosition(*args, **kwargs):
"""GetLineAtPosition(self, long pos, bool caretPosition=False) -> RichTextLine"""
return _richtext.RichTextParagraphLayoutBox_GetLineAtPosition(*args, **kwargs)
def GetLineAtYPosition(*args, **kwargs):
"""GetLineAtYPosition(self, int y) -> RichTextLine"""
return _richtext.RichTextParagraphLayoutBox_GetLineAtYPosition(*args, **kwargs)
def GetParagraphAtPosition(*args, **kwargs):
"""GetParagraphAtPosition(self, long pos, bool caretPosition=False) -> RichTextParagraph"""
return _richtext.RichTextParagraphLayoutBox_GetParagraphAtPosition(*args, **kwargs)
def GetLineSizeAtPosition(*args, **kwargs):
"""GetLineSizeAtPosition(self, long pos, bool caretPosition=False) -> Size"""
return _richtext.RichTextParagraphLayoutBox_GetLineSizeAtPosition(*args, **kwargs)
def GetVisibleLineNumber(*args, **kwargs):
"""GetVisibleLineNumber(self, long pos, bool caretPosition=False, bool startOfLine=False) -> long"""
return _richtext.RichTextParagraphLayoutBox_GetVisibleLineNumber(*args, **kwargs)
def GetLineForVisibleLineNumber(*args, **kwargs):
"""GetLineForVisibleLineNumber(self, long lineNumber) -> RichTextLine"""
return _richtext.RichTextParagraphLayoutBox_GetLineForVisibleLineNumber(*args, **kwargs)
def GetLeafObjectAtPosition(*args, **kwargs):
"""GetLeafObjectAtPosition(self, long position) -> RichTextObject"""
return _richtext.RichTextParagraphLayoutBox_GetLeafObjectAtPosition(*args, **kwargs)
def GetParagraphAtLine(*args, **kwargs):
"""GetParagraphAtLine(self, long paragraphNumber) -> RichTextParagraph"""
return _richtext.RichTextParagraphLayoutBox_GetParagraphAtLine(*args, **kwargs)
def GetParagraphForLine(*args, **kwargs):
"""GetParagraphForLine(self, RichTextLine line) -> RichTextParagraph"""
return _richtext.RichTextParagraphLayoutBox_GetParagraphForLine(*args, **kwargs)
def GetParagraphLength(*args, **kwargs):
"""GetParagraphLength(self, long paragraphNumber) -> int"""
return _richtext.RichTextParagraphLayoutBox_GetParagraphLength(*args, **kwargs)
def GetParagraphCount(*args, **kwargs):
"""GetParagraphCount(self) -> int"""
return _richtext.RichTextParagraphLayoutBox_GetParagraphCount(*args, **kwargs)
def GetLineCount(*args, **kwargs):
"""GetLineCount(self) -> int"""
return _richtext.RichTextParagraphLayoutBox_GetLineCount(*args, **kwargs)
def GetParagraphText(*args, **kwargs):
"""GetParagraphText(self, long paragraphNumber) -> String"""
return _richtext.RichTextParagraphLayoutBox_GetParagraphText(*args, **kwargs)
def XYToPosition(*args, **kwargs):
"""XYToPosition(self, long x, long y) -> long"""
return _richtext.RichTextParagraphLayoutBox_XYToPosition(*args, **kwargs)
def PositionToXY(*args, **kwargs):
"""PositionToXY(self, long pos, long x, long y) -> bool"""
return _richtext.RichTextParagraphLayoutBox_PositionToXY(*args, **kwargs)
def SetStyle(*args, **kwargs):
"""SetStyle(self, RichTextRange range, RichTextAttr style, int flags=RICHTEXT_SETSTYLE_WITH_UNDO) -> bool"""
return _richtext.RichTextParagraphLayoutBox_SetStyle(*args, **kwargs)
def GetStyle(*args, **kwargs):
"""GetStyle(self, long position, RichTextAttr style) -> bool"""
return _richtext.RichTextParagraphLayoutBox_GetStyle(*args, **kwargs)
def GetUncombinedStyle(*args, **kwargs):
"""GetUncombinedStyle(self, long position, RichTextAttr style) -> bool"""
return _richtext.RichTextParagraphLayoutBox_GetUncombinedStyle(*args, **kwargs)
def DoGetStyle(*args, **kwargs):
"""DoGetStyle(self, long position, RichTextAttr style, bool combineStyles=True) -> bool"""
return _richtext.RichTextParagraphLayoutBox_DoGetStyle(*args, **kwargs)
def GetStyleForRange(*args, **kwargs):
"""GetStyleForRange(self, RichTextRange range, RichTextAttr style) -> bool"""
return _richtext.RichTextParagraphLayoutBox_GetStyleForRange(*args, **kwargs)
def CollectStyle(*args, **kwargs):
"""
CollectStyle(self, RichTextAttr currentStyle, RichTextAttr style, RichTextAttr clashingAttr,
RichTextAttr absentAttr) -> bool
"""
return _richtext.RichTextParagraphLayoutBox_CollectStyle(*args, **kwargs)
def SetListStyle(*args):
"""
SetListStyle(self, RichTextRange range, wxRichTextListStyleDefinition def,
int flags=RICHTEXT_SETSTYLE_WITH_UNDO, int startFrom=1,
int specifiedLevel=-1) -> bool
SetListStyle(self, RichTextRange range, String defName, int flags=RICHTEXT_SETSTYLE_WITH_UNDO,
int startFrom=1, int specifiedLevel=-1) -> bool
"""
return _richtext.RichTextParagraphLayoutBox_SetListStyle(*args)
def ClearListStyle(*args, **kwargs):
"""ClearListStyle(self, RichTextRange range, int flags=RICHTEXT_SETSTYLE_WITH_UNDO) -> bool"""
return _richtext.RichTextParagraphLayoutBox_ClearListStyle(*args, **kwargs)
def NumberList(*args):
"""
NumberList(self, RichTextRange range, wxRichTextListStyleDefinition def=None,
int flags=RICHTEXT_SETSTYLE_WITH_UNDO,
int startFrom=1, int specifiedLevel=-1) -> bool
NumberList(self, RichTextRange range, String defName, int flags=RICHTEXT_SETSTYLE_WITH_UNDO,
int startFrom=1, int specifiedLevel=-1) -> bool
"""
return _richtext.RichTextParagraphLayoutBox_NumberList(*args)
def PromoteList(*args):
"""
PromoteList(self, int promoteBy, RichTextRange range, wxRichTextListStyleDefinition def=None,
int flags=RICHTEXT_SETSTYLE_WITH_UNDO,
int specifiedLevel=-1) -> bool
PromoteList(self, int promoteBy, RichTextRange range, String defName,
int flags=RICHTEXT_SETSTYLE_WITH_UNDO, int specifiedLevel=-1) -> bool
"""
return _richtext.RichTextParagraphLayoutBox_PromoteList(*args)
def DoNumberList(*args, **kwargs):
"""
DoNumberList(self, RichTextRange range, RichTextRange promotionRange,
int promoteBy, wxRichTextListStyleDefinition def,
int flags=RICHTEXT_SETSTYLE_WITH_UNDO, int startFrom=1,
int specifiedLevel=-1) -> bool
"""
return _richtext.RichTextParagraphLayoutBox_DoNumberList(*args, **kwargs)
def FindNextParagraphNumber(*args, **kwargs):
"""FindNextParagraphNumber(self, RichTextParagraph previousParagraph, RichTextAttr attr) -> bool"""
return _richtext.RichTextParagraphLayoutBox_FindNextParagraphNumber(*args, **kwargs)
def HasCharacterAttributes(*args, **kwargs):
"""HasCharacterAttributes(self, RichTextRange range, RichTextAttr style) -> bool"""
return _richtext.RichTextParagraphLayoutBox_HasCharacterAttributes(*args, **kwargs)
def HasParagraphAttributes(*args, **kwargs):
"""HasParagraphAttributes(self, RichTextRange range, RichTextAttr style) -> bool"""
return _richtext.RichTextParagraphLayoutBox_HasParagraphAttributes(*args, **kwargs)
def InsertFragment(*args, **kwargs):
"""InsertFragment(self, long position, RichTextParagraphLayoutBox fragment) -> bool"""
return _richtext.RichTextParagraphLayoutBox_InsertFragment(*args, **kwargs)
def CopyFragment(*args, **kwargs):
"""CopyFragment(self, RichTextRange range, RichTextParagraphLayoutBox fragment) -> bool"""
return _richtext.RichTextParagraphLayoutBox_CopyFragment(*args, **kwargs)
def ApplyStyleSheet(*args, **kwargs):
"""ApplyStyleSheet(self, wxRichTextStyleSheet styleSheet) -> bool"""
return _richtext.RichTextParagraphLayoutBox_ApplyStyleSheet(*args, **kwargs)
def Copy(*args, **kwargs):
"""Copy(self, RichTextParagraphLayoutBox obj)"""
return _richtext.RichTextParagraphLayoutBox_Copy(*args, **kwargs)
def UpdateRanges(*args, **kwargs):
"""UpdateRanges(self)"""
return _richtext.RichTextParagraphLayoutBox_UpdateRanges(*args, **kwargs)
def GetText(*args, **kwargs):
"""GetText(self) -> String"""
return _richtext.RichTextParagraphLayoutBox_GetText(*args, **kwargs)
def SetDefaultStyle(*args, **kwargs):
"""SetDefaultStyle(self, RichTextAttr style) -> bool"""
return _richtext.RichTextParagraphLayoutBox_SetDefaultStyle(*args, **kwargs)
def GetDefaultStyle(*args, **kwargs):
"""GetDefaultStyle(self) -> RichTextAttr"""
return _richtext.RichTextParagraphLayoutBox_GetDefaultStyle(*args, **kwargs)
def SetBasicStyle(*args, **kwargs):
"""SetBasicStyle(self, RichTextAttr style)"""
return _richtext.RichTextParagraphLayoutBox_SetBasicStyle(*args, **kwargs)
def GetBasicStyle(*args, **kwargs):
"""GetBasicStyle(self) -> RichTextAttr"""
return _richtext.RichTextParagraphLayoutBox_GetBasicStyle(*args, **kwargs)
def Invalidate(*args, **kwargs):
"""Invalidate(self, RichTextRange invalidRange=wxRICHTEXT_ALL)"""
return _richtext.RichTextParagraphLayoutBox_Invalidate(*args, **kwargs)
def UpdateFloatingObjects(*args, **kwargs):
"""UpdateFloatingObjects(self, Rect availableRect, RichTextObject untilObj=None) -> bool"""
return _richtext.RichTextParagraphLayoutBox_UpdateFloatingObjects(*args, **kwargs)
def GetInvalidRange(*args, **kwargs):
"""GetInvalidRange(self, bool wholeParagraphs=False) -> RichTextRange"""
return _richtext.RichTextParagraphLayoutBox_GetInvalidRange(*args, **kwargs)
def GetFloatCollector(*args, **kwargs):
"""GetFloatCollector(self) -> wxRichTextFloatCollector"""
return _richtext.RichTextParagraphLayoutBox_GetFloatCollector(*args, **kwargs)
_richtext.RichTextParagraphLayoutBox_swigregister(RichTextParagraphLayoutBox)
class RichTextBox(RichTextCompositeObject):
"""Proxy of C++ RichTextBox class"""
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
__init__(self, RichTextObject parent=None) -> RichTextBox
__init__(self, RichTextBox obj) -> RichTextBox
"""
_richtext.RichTextBox_swiginit(self,_richtext.new_RichTextBox(*args))
def Copy(*args, **kwargs):
"""Copy(self, RichTextBox obj)"""
return _richtext.RichTextBox_Copy(*args, **kwargs)
_richtext.RichTextBox_swigregister(RichTextBox)
class RichTextLine(object):
"""
This object represents a line in a paragraph, and stores offsets from
the start of the paragraph representing the start and end positions of
the line.
"""
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args, **kwargs):
"""
__init__(self, RichTextParagraph parent) -> RichTextLine
This object represents a line in a paragraph, and stores offsets from
the start of the paragraph representing the start and end positions of
the line.
"""
_richtext.RichTextLine_swiginit(self,_richtext.new_RichTextLine(*args, **kwargs))
__swig_destroy__ = _richtext.delete_RichTextLine
__del__ = lambda self : None;
def SetRange(*args, **kwargs):
"""SetRange(self, RichTextRange range)"""
return _richtext.RichTextLine_SetRange(*args, **kwargs)
def GetParent(*args, **kwargs):
"""GetParent(self) -> RichTextParagraph"""
return _richtext.RichTextLine_GetParent(*args, **kwargs)
def GetRange(*args, **kwargs):
"""GetRange(self) -> RichTextRange"""
return _richtext.RichTextLine_GetRange(*args, **kwargs)
def GetAbsoluteRange(*args, **kwargs):
"""GetAbsoluteRange(self) -> RichTextRange"""
return _richtext.RichTextLine_GetAbsoluteRange(*args, **kwargs)
def GetSize(*args, **kwargs):
"""GetSize(self) -> Size"""
return _richtext.RichTextLine_GetSize(*args, **kwargs)
def SetSize(*args, **kwargs):
"""SetSize(self, Size sz)"""
return _richtext.RichTextLine_SetSize(*args, **kwargs)
def GetPosition(*args, **kwargs):
"""GetPosition(self) -> Point"""
return _richtext.RichTextLine_GetPosition(*args, **kwargs)
def SetPosition(*args, **kwargs):
"""SetPosition(self, Point pos)"""
return _richtext.RichTextLine_SetPosition(*args, **kwargs)
def GetAbsolutePosition(*args, **kwargs):
"""GetAbsolutePosition(self) -> Point"""
return _richtext.RichTextLine_GetAbsolutePosition(*args, **kwargs)
def GetRect(*args, **kwargs):
"""GetRect(self) -> Rect"""
return _richtext.RichTextLine_GetRect(*args, **kwargs)
def SetDescent(*args, **kwargs):
"""SetDescent(self, int descent)"""
return _richtext.RichTextLine_SetDescent(*args, **kwargs)
def GetDescent(*args, **kwargs):
"""GetDescent(self) -> int"""
return _richtext.RichTextLine_GetDescent(*args, **kwargs)
def Init(*args, **kwargs):
"""Init(self, RichTextParagraph parent)"""
return _richtext.RichTextLine_Init(*args, **kwargs)
def Copy(*args, **kwargs):
"""Copy(self, RichTextLine obj)"""
return _richtext.RichTextLine_Copy(*args, **kwargs)
def Clone(*args, **kwargs):
"""Clone(self) -> RichTextLine"""
return _richtext.RichTextLine_Clone(*args, **kwargs)
_richtext.RichTextLine_swigregister(RichTextLine)
class RichTextParagraph(RichTextBox):
"""
This object represents a single paragraph (or in a straight text
editor, a line).
"""
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args, **kwargs):
"""
__init__(self, String text, RichTextObject parent=None, RichTextAttr paraStyle=None,
RichTextAttr charStyle=None) -> RichTextParagraph
This object represents a single paragraph (or in a straight text
editor, a line).
"""
_richtext.RichTextParagraph_swiginit(self,_richtext.new_RichTextParagraph(*args, **kwargs))
__swig_destroy__ = _richtext.delete_RichTextParagraph
__del__ = lambda self : None;
def GetLines(*args, **kwargs):
"""GetLines(self) -> wxRichTextLineList"""
return _richtext.RichTextParagraph_GetLines(*args, **kwargs)
def Copy(*args, **kwargs):
"""Copy(self, RichTextParagraph obj)"""
return _richtext.RichTextParagraph_Copy(*args, **kwargs)
def ClearLines(*args, **kwargs):
"""ClearLines(self)"""
return _richtext.RichTextParagraph_ClearLines(*args, **kwargs)
def ApplyParagraphStyle(*args, **kwargs):
"""ApplyParagraphStyle(self, RichTextLine line, RichTextAttr attr, Rect rect, DC dc)"""
return _richtext.RichTextParagraph_ApplyParagraphStyle(*args, **kwargs)
def InsertText(*args, **kwargs):
"""InsertText(self, long pos, String text) -> bool"""
return _richtext.RichTextParagraph_InsertText(*args, **kwargs)
def SplitAt(*args, **kwargs):
"""SplitAt(self, long pos, RichTextObject previousObject=None) -> RichTextObject"""
return _richtext.RichTextParagraph_SplitAt(*args, **kwargs)
def MoveToList(*args, **kwargs):
"""MoveToList(self, RichTextObject obj, wxList list)"""
return _richtext.RichTextParagraph_MoveToList(*args, **kwargs)
def MoveFromList(*args, **kwargs):
"""MoveFromList(self, wxList list)"""
return _richtext.RichTextParagraph_MoveFromList(*args, **kwargs)
def GetContiguousPlainText(*args, **kwargs):
"""GetContiguousPlainText(self, String text, RichTextRange range, bool fromStart=True) -> bool"""
return _richtext.RichTextParagraph_GetContiguousPlainText(*args, **kwargs)
def FindWrapPosition(*args, **kwargs):
"""
FindWrapPosition(self, RichTextRange range, DC dc, RichTextDrawingContext context,
int availableSpace, long wrapPosition,
wxArrayInt partialExtents) -> bool
"""
return _richtext.RichTextParagraph_FindWrapPosition(*args, **kwargs)
def FindObjectAtPosition(*args, **kwargs):
"""FindObjectAtPosition(self, long position) -> RichTextObject"""
return _richtext.RichTextParagraph_FindObjectAtPosition(*args, **kwargs)
def GetBulletText(*args, **kwargs):
"""GetBulletText(self) -> String"""
return _richtext.RichTextParagraph_GetBulletText(*args, **kwargs)
def AllocateLine(*args, **kwargs):
"""AllocateLine(self, int pos) -> RichTextLine"""
return _richtext.RichTextParagraph_AllocateLine(*args, **kwargs)
def ClearUnusedLines(*args, **kwargs):
"""ClearUnusedLines(self, int lineCount) -> bool"""
return _richtext.RichTextParagraph_ClearUnusedLines(*args, **kwargs)
def GetCombinedAttributes(*args, **kwargs):
"""GetCombinedAttributes(self, RichTextAttr contentStyle=None) -> RichTextAttr"""
return _richtext.RichTextParagraph_GetCombinedAttributes(*args, **kwargs)
def GetFirstLineBreakPosition(*args, **kwargs):
"""GetFirstLineBreakPosition(self, long pos) -> long"""
return _richtext.RichTextParagraph_GetFirstLineBreakPosition(*args, **kwargs)
def InitDefaultTabs(*args, **kwargs):
"""InitDefaultTabs()"""
return _richtext.RichTextParagraph_InitDefaultTabs(*args, **kwargs)
InitDefaultTabs = staticmethod(InitDefaultTabs)
def ClearDefaultTabs(*args, **kwargs):
"""ClearDefaultTabs()"""
return _richtext.RichTextParagraph_ClearDefaultTabs(*args, **kwargs)
ClearDefaultTabs = staticmethod(ClearDefaultTabs)
def GetDefaultTabs(*args, **kwargs):
"""GetDefaultTabs() -> wxArrayInt"""
return _richtext.RichTextParagraph_GetDefaultTabs(*args, **kwargs)
GetDefaultTabs = staticmethod(GetDefaultTabs)
_richtext.RichTextParagraph_swigregister(RichTextParagraph)
def RichTextParagraph_InitDefaultTabs(*args):
"""RichTextParagraph_InitDefaultTabs()"""
return _richtext.RichTextParagraph_InitDefaultTabs(*args)
def RichTextParagraph_ClearDefaultTabs(*args):
"""RichTextParagraph_ClearDefaultTabs()"""
return _richtext.RichTextParagraph_ClearDefaultTabs(*args)
def RichTextParagraph_GetDefaultTabs(*args):
"""RichTextParagraph_GetDefaultTabs() -> wxArrayInt"""
return _richtext.RichTextParagraph_GetDefaultTabs(*args)
class RichTextPlainText(RichTextObject):
"""This object represents a single piece of text."""
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args, **kwargs):
"""
__init__(self, String text=wxEmptyString, RichTextObject parent=None,
RichTextAttr style=None) -> RichTextPlainText
This object represents a single piece of text.
"""
_richtext.RichTextPlainText_swiginit(self,_richtext.new_RichTextPlainText(*args, **kwargs))
def GetFirstLineBreakPosition(*args, **kwargs):
"""GetFirstLineBreakPosition(self, long pos) -> long"""
return _richtext.RichTextPlainText_GetFirstLineBreakPosition(*args, **kwargs)
def GetText(*args, **kwargs):
"""GetText(self) -> String"""
return _richtext.RichTextPlainText_GetText(*args, **kwargs)
def SetText(*args, **kwargs):
"""SetText(self, String text)"""
return _richtext.RichTextPlainText_SetText(*args, **kwargs)
def Copy(*args, **kwargs):
"""Copy(self, RichTextPlainText obj)"""
return _richtext.RichTextPlainText_Copy(*args, **kwargs)
_richtext.RichTextPlainText_swigregister(RichTextPlainText)
class RichTextImage(RichTextObject):
"""This object represents an image."""
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
__init__(self, RichTextObject parent=None) -> RichTextImage
__init__(self, Image image, RichTextObject parent=None, RichTextAttr charStyle=None) -> RichTextImage
__init__(self, wxRichTextImageBlock imageBlock, RichTextObject parent=None,
RichTextAttr charStyle=None) -> RichTextImage
__init__(self, RichTextImage obj) -> RichTextImage
This object represents an image.
"""
_richtext.RichTextImage_swiginit(self,_richtext.new_RichTextImage(*args))
def GetImageCache(*args, **kwargs):
"""GetImageCache(self) -> Bitmap"""
return _richtext.RichTextImage_GetImageCache(*args, **kwargs)
def SetImageCache(*args, **kwargs):
"""SetImageCache(self, Bitmap bitmap)"""
return _richtext.RichTextImage_SetImageCache(*args, **kwargs)
def ResetImageCache(*args, **kwargs):
"""ResetImageCache(self)"""
return _richtext.RichTextImage_ResetImageCache(*args, **kwargs)
def GetImageBlock(*args, **kwargs):
"""GetImageBlock(self) -> wxRichTextImageBlock"""
return _richtext.RichTextImage_GetImageBlock(*args, **kwargs)
def Copy(*args, **kwargs):
"""Copy(self, RichTextImage obj)"""
return _richtext.RichTextImage_Copy(*args, **kwargs)
def LoadImageCache(*args, **kwargs):
"""LoadImageCache(self, DC dc, bool resetCache=False) -> bool"""
return _richtext.RichTextImage_LoadImageCache(*args, **kwargs)
_richtext.RichTextImage_swigregister(RichTextImage)
class RichTextFileHandlerList_iterator(object):
"""This class serves as an iterator for a wxRichTextFileHandlerList object."""
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self): raise AttributeError, "No constructor defined"
__repr__ = _swig_repr
__swig_destroy__ = _richtext.delete_RichTextFileHandlerList_iterator
__del__ = lambda self : None;
def next(*args, **kwargs):
"""next(self) -> RichTextFileHandler"""
return _richtext.RichTextFileHandlerList_iterator_next(*args, **kwargs)
_richtext.RichTextFileHandlerList_iterator_swigregister(RichTextFileHandlerList_iterator)
class RichTextFileHandlerList(object):
"""
This class wraps a wxList-based class and gives it a Python
sequence-like interface. Sequence operations supported are length,
index access and iteration.
"""
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self): raise AttributeError, "No constructor defined"
__repr__ = _swig_repr
__swig_destroy__ = _richtext.delete_RichTextFileHandlerList
__del__ = lambda self : None;
def __len__(*args, **kwargs):
"""__len__(self) -> size_t"""
return _richtext.RichTextFileHandlerList___len__(*args, **kwargs)
def __getitem__(*args, **kwargs):
"""__getitem__(self, size_t index) -> RichTextFileHandler"""
return _richtext.RichTextFileHandlerList___getitem__(*args, **kwargs)
def __contains__(*args, **kwargs):
"""__contains__(self, RichTextFileHandler obj) -> bool"""
return _richtext.RichTextFileHandlerList___contains__(*args, **kwargs)
def __iter__(*args, **kwargs):
"""__iter__(self) -> RichTextFileHandlerList_iterator"""
return _richtext.RichTextFileHandlerList___iter__(*args, **kwargs)
def __repr__(self):
return "wxRichTextFileHandlerList: " + repr(list(self))
_richtext.RichTextFileHandlerList_swigregister(RichTextFileHandlerList)
class RichTextBuffer(RichTextParagraphLayoutBox):
"""This is a kind of box, used to represent the whole buffer."""
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args, **kwargs):
"""
__init__(self) -> RichTextBuffer
This is a kind of box, used to represent the whole buffer.
"""
_richtext.RichTextBuffer_swiginit(self,_richtext.new_RichTextBuffer(*args, **kwargs))
__swig_destroy__ = _richtext.delete_RichTextBuffer
__del__ = lambda self : None;
def GetCommandProcessor(*args, **kwargs):
"""GetCommandProcessor(self) -> wxCommandProcessor"""
return _richtext.RichTextBuffer_GetCommandProcessor(*args, **kwargs)
def SetStyleSheet(*args, **kwargs):
"""SetStyleSheet(self, wxRichTextStyleSheet styleSheet)"""
return _richtext.RichTextBuffer_SetStyleSheet(*args, **kwargs)
def SetStyleSheetAndNotify(*args, **kwargs):
"""SetStyleSheetAndNotify(self, wxRichTextStyleSheet sheet) -> bool"""
return _richtext.RichTextBuffer_SetStyleSheetAndNotify(*args, **kwargs)
def PushStyleSheet(*args, **kwargs):
"""PushStyleSheet(self, wxRichTextStyleSheet styleSheet) -> bool"""
return _richtext.RichTextBuffer_PushStyleSheet(*args, **kwargs)
def PopStyleSheet(*args, **kwargs):
"""PopStyleSheet(self) -> wxRichTextStyleSheet"""
return _richtext.RichTextBuffer_PopStyleSheet(*args, **kwargs)
def GetFontTable(*args, **kwargs):
"""GetFontTable(self) -> RichTextFontTable"""
return _richtext.RichTextBuffer_GetFontTable(*args, **kwargs)
def SetFontTable(*args, **kwargs):
"""SetFontTable(self, RichTextFontTable table)"""
return _richtext.RichTextBuffer_SetFontTable(*args, **kwargs)
def Init(*args, **kwargs):
"""Init(self)"""
return _richtext.RichTextBuffer_Init(*args, **kwargs)
def ResetAndClearCommands(*args, **kwargs):
"""ResetAndClearCommands(self)"""
return _richtext.RichTextBuffer_ResetAndClearCommands(*args, **kwargs)
def LoadFile(*args, **kwargs):
"""LoadFile(self, String filename, int type=RICHTEXT_TYPE_ANY) -> bool"""
return _richtext.RichTextBuffer_LoadFile(*args, **kwargs)
def SaveFile(*args, **kwargs):
"""SaveFile(self, String filename, int type=RICHTEXT_TYPE_ANY) -> bool"""
return _richtext.RichTextBuffer_SaveFile(*args, **kwargs)
def LoadStream(*args, **kwargs):
"""LoadStream(self, InputStream stream, int type=RICHTEXT_TYPE_ANY) -> bool"""
return _richtext.RichTextBuffer_LoadStream(*args, **kwargs)
def SaveStream(*args, **kwargs):
"""SaveStream(self, wxOutputStream stream, int type=RICHTEXT_TYPE_ANY) -> bool"""
return _richtext.RichTextBuffer_SaveStream(*args, **kwargs)
def SetHandlerFlags(*args, **kwargs):
"""SetHandlerFlags(self, int flags)"""
return _richtext.RichTextBuffer_SetHandlerFlags(*args, **kwargs)
def GetHandlerFlags(*args, **kwargs):
"""GetHandlerFlags(self) -> int"""
return _richtext.RichTextBuffer_GetHandlerFlags(*args, **kwargs)
def BeginBatchUndo(*args, **kwargs):
"""BeginBatchUndo(self, String cmdName) -> bool"""
return _richtext.RichTextBuffer_BeginBatchUndo(*args, **kwargs)
def EndBatchUndo(*args, **kwargs):
"""EndBatchUndo(self) -> bool"""
return _richtext.RichTextBuffer_EndBatchUndo(*args, **kwargs)
def BatchingUndo(*args, **kwargs):
"""BatchingUndo(self) -> bool"""
return _richtext.RichTextBuffer_BatchingUndo(*args, **kwargs)
def SubmitAction(*args, **kwargs):
"""SubmitAction(self, RichTextAction action) -> bool"""
return _richtext.RichTextBuffer_SubmitAction(*args, **kwargs)
def GetBatchedCommand(*args, **kwargs):
"""GetBatchedCommand(self) -> RichTextCommand"""
return _richtext.RichTextBuffer_GetBatchedCommand(*args, **kwargs)
def BeginSuppressUndo(*args, **kwargs):
"""BeginSuppressUndo(self) -> bool"""
return _richtext.RichTextBuffer_BeginSuppressUndo(*args, **kwargs)
def EndSuppressUndo(*args, **kwargs):
"""EndSuppressUndo(self) -> bool"""
return _richtext.RichTextBuffer_EndSuppressUndo(*args, **kwargs)
def SuppressingUndo(*args, **kwargs):
"""SuppressingUndo(self) -> bool"""
return _richtext.RichTextBuffer_SuppressingUndo(*args, **kwargs)
def CopyToClipboard(*args, **kwargs):
"""CopyToClipboard(self, RichTextRange range) -> bool"""
return _richtext.RichTextBuffer_CopyToClipboard(*args, **kwargs)
def PasteFromClipboard(*args, **kwargs):
"""PasteFromClipboard(self, long position) -> bool"""
return _richtext.RichTextBuffer_PasteFromClipboard(*args, **kwargs)
def CanPasteFromClipboard(*args, **kwargs):
"""CanPasteFromClipboard(self) -> bool"""
return _richtext.RichTextBuffer_CanPasteFromClipboard(*args, **kwargs)
def BeginStyle(*args, **kwargs):
"""BeginStyle(self, RichTextAttr style) -> bool"""
return _richtext.RichTextBuffer_BeginStyle(*args, **kwargs)
def EndStyle(*args, **kwargs):
"""EndStyle(self) -> bool"""
return _richtext.RichTextBuffer_EndStyle(*args, **kwargs)
def EndAllStyles(*args, **kwargs):
"""EndAllStyles(self) -> bool"""
return _richtext.RichTextBuffer_EndAllStyles(*args, **kwargs)
def ClearStyleStack(*args, **kwargs):
"""ClearStyleStack(self)"""
return _richtext.RichTextBuffer_ClearStyleStack(*args, **kwargs)
def GetStyleStackSize(*args, **kwargs):
"""GetStyleStackSize(self) -> size_t"""
return _richtext.RichTextBuffer_GetStyleStackSize(*args, **kwargs)
def BeginBold(*args, **kwargs):
"""BeginBold(self) -> bool"""
return _richtext.RichTextBuffer_BeginBold(*args, **kwargs)
def EndBold(*args, **kwargs):
"""EndBold(self) -> bool"""
return _richtext.RichTextBuffer_EndBold(*args, **kwargs)
def BeginItalic(*args, **kwargs):
"""BeginItalic(self) -> bool"""
return _richtext.RichTextBuffer_BeginItalic(*args, **kwargs)
def EndItalic(*args, **kwargs):
"""EndItalic(self) -> bool"""
return _richtext.RichTextBuffer_EndItalic(*args, **kwargs)
def BeginUnderline(*args, **kwargs):
"""BeginUnderline(self) -> bool"""
return _richtext.RichTextBuffer_BeginUnderline(*args, **kwargs)
def EndUnderline(*args, **kwargs):
"""EndUnderline(self) -> bool"""
return _richtext.RichTextBuffer_EndUnderline(*args, **kwargs)
def BeginFontSize(*args, **kwargs):
"""BeginFontSize(self, int pointSize) -> bool"""
return _richtext.RichTextBuffer_BeginFontSize(*args, **kwargs)
def EndFontSize(*args, **kwargs):
"""EndFontSize(self) -> bool"""
return _richtext.RichTextBuffer_EndFontSize(*args, **kwargs)
def BeginFont(*args, **kwargs):
"""BeginFont(self, Font font) -> bool"""
return _richtext.RichTextBuffer_BeginFont(*args, **kwargs)
def EndFont(*args, **kwargs):
"""EndFont(self) -> bool"""
return _richtext.RichTextBuffer_EndFont(*args, **kwargs)
def BeginTextColour(*args, **kwargs):
"""BeginTextColour(self, Colour colour) -> bool"""
return _richtext.RichTextBuffer_BeginTextColour(*args, **kwargs)
def EndTextColour(*args, **kwargs):
"""EndTextColour(self) -> bool"""
return _richtext.RichTextBuffer_EndTextColour(*args, **kwargs)
def BeginAlignment(*args, **kwargs):
"""BeginAlignment(self, int alignment) -> bool"""
return _richtext.RichTextBuffer_BeginAlignment(*args, **kwargs)
def EndAlignment(*args, **kwargs):
"""EndAlignment(self) -> bool"""
return _richtext.RichTextBuffer_EndAlignment(*args, **kwargs)
def BeginLeftIndent(*args, **kwargs):
"""BeginLeftIndent(self, int leftIndent, int leftSubIndent=0) -> bool"""
return _richtext.RichTextBuffer_BeginLeftIndent(*args, **kwargs)
def EndLeftIndent(*args, **kwargs):
"""EndLeftIndent(self) -> bool"""
return _richtext.RichTextBuffer_EndLeftIndent(*args, **kwargs)
def BeginRightIndent(*args, **kwargs):
"""BeginRightIndent(self, int rightIndent) -> bool"""
return _richtext.RichTextBuffer_BeginRightIndent(*args, **kwargs)
def EndRightIndent(*args, **kwargs):
"""EndRightIndent(self) -> bool"""
return _richtext.RichTextBuffer_EndRightIndent(*args, **kwargs)
def BeginParagraphSpacing(*args, **kwargs):
"""BeginParagraphSpacing(self, int before, int after) -> bool"""
return _richtext.RichTextBuffer_BeginParagraphSpacing(*args, **kwargs)
def EndParagraphSpacing(*args, **kwargs):
"""EndParagraphSpacing(self) -> bool"""
return _richtext.RichTextBuffer_EndParagraphSpacing(*args, **kwargs)
def BeginLineSpacing(*args, **kwargs):
"""BeginLineSpacing(self, int lineSpacing) -> bool"""
return _richtext.RichTextBuffer_BeginLineSpacing(*args, **kwargs)
def EndLineSpacing(*args, **kwargs):
"""EndLineSpacing(self) -> bool"""
return _richtext.RichTextBuffer_EndLineSpacing(*args, **kwargs)
def BeginNumberedBullet(*args, **kwargs):
"""
BeginNumberedBullet(self, int bulletNumber, int leftIndent, int leftSubIndent,
int bulletStyle=wxTEXT_ATTR_BULLET_STYLE_ARABIC|wxTEXT_ATTR_BULLET_STYLE_PERIOD) -> bool
"""
return _richtext.RichTextBuffer_BeginNumberedBullet(*args, **kwargs)
def EndNumberedBullet(*args, **kwargs):
"""EndNumberedBullet(self) -> bool"""
return _richtext.RichTextBuffer_EndNumberedBullet(*args, **kwargs)
def BeginSymbolBullet(*args, **kwargs):
"""BeginSymbolBullet(self, String symbol, int leftIndent, int leftSubIndent, int bulletStyle=TEXT_ATTR_BULLET_STYLE_SYMBOL) -> bool"""
return _richtext.RichTextBuffer_BeginSymbolBullet(*args, **kwargs)
def EndSymbolBullet(*args, **kwargs):
"""EndSymbolBullet(self) -> bool"""
return _richtext.RichTextBuffer_EndSymbolBullet(*args, **kwargs)
def BeginStandardBullet(*args, **kwargs):
"""
BeginStandardBullet(self, String bulletName, int leftIndent, int leftSubIndent,
int bulletStyle=TEXT_ATTR_BULLET_STYLE_STANDARD) -> bool
"""
return _richtext.RichTextBuffer_BeginStandardBullet(*args, **kwargs)
def EndStandardBullet(*args, **kwargs):
"""EndStandardBullet(self) -> bool"""
return _richtext.RichTextBuffer_EndStandardBullet(*args, **kwargs)
def BeginCharacterStyle(*args, **kwargs):
"""BeginCharacterStyle(self, String characterStyle) -> bool"""
return _richtext.RichTextBuffer_BeginCharacterStyle(*args, **kwargs)
def EndCharacterStyle(*args, **kwargs):
"""EndCharacterStyle(self) -> bool"""
return _richtext.RichTextBuffer_EndCharacterStyle(*args, **kwargs)
def BeginParagraphStyle(*args, **kwargs):
"""BeginParagraphStyle(self, String paragraphStyle) -> bool"""
return _richtext.RichTextBuffer_BeginParagraphStyle(*args, **kwargs)
def EndParagraphStyle(*args, **kwargs):
"""EndParagraphStyle(self) -> bool"""
return _richtext.RichTextBuffer_EndParagraphStyle(*args, **kwargs)
def BeginListStyle(*args, **kwargs):
"""BeginListStyle(self, String listStyle, int level=1, int number=1) -> bool"""
return _richtext.RichTextBuffer_BeginListStyle(*args, **kwargs)
def EndListStyle(*args, **kwargs):
"""EndListStyle(self) -> bool"""
return _richtext.RichTextBuffer_EndListStyle(*args, **kwargs)
def BeginURL(*args, **kwargs):
"""BeginURL(self, String url, String characterStyle=wxEmptyString) -> bool"""
return _richtext.RichTextBuffer_BeginURL(*args, **kwargs)
def EndURL(*args, **kwargs):
"""EndURL(self) -> bool"""
return _richtext.RichTextBuffer_EndURL(*args, **kwargs)
def AddEventHandler(*args, **kwargs):
"""AddEventHandler(self, EvtHandler handler) -> bool"""
return _richtext.RichTextBuffer_AddEventHandler(*args, **kwargs)
def RemoveEventHandler(*args, **kwargs):
"""RemoveEventHandler(self, EvtHandler handler, bool deleteHandler=False) -> bool"""
return _richtext.RichTextBuffer_RemoveEventHandler(*args, **kwargs)
def ClearEventHandlers(*args, **kwargs):
"""ClearEventHandlers(self)"""
return _richtext.RichTextBuffer_ClearEventHandlers(*args, **kwargs)
def SendEvent(*args, **kwargs):
"""SendEvent(self, Event event, bool sendToAll=True) -> bool"""
return _richtext.RichTextBuffer_SendEvent(*args, **kwargs)
def Copy(*args, **kwargs):
"""Copy(self, RichTextBuffer obj)"""
return _richtext.RichTextBuffer_Copy(*args, **kwargs)
def InsertParagraphsWithUndo(*args, **kwargs):
"""
InsertParagraphsWithUndo(self, long pos, RichTextParagraphLayoutBox paragraphs, RichTextCtrl ctrl,
int flags=0) -> bool
"""
return _richtext.RichTextBuffer_InsertParagraphsWithUndo(*args, **kwargs)
def InsertTextWithUndo(*args, **kwargs):
"""InsertTextWithUndo(self, long pos, String text, RichTextCtrl ctrl, int flags=0) -> bool"""
return _richtext.RichTextBuffer_InsertTextWithUndo(*args, **kwargs)
def InsertNewlineWithUndo(*args, **kwargs):
"""InsertNewlineWithUndo(self, long pos, RichTextCtrl ctrl, int flags=0) -> bool"""
return _richtext.RichTextBuffer_InsertNewlineWithUndo(*args, **kwargs)
def InsertImageWithUndo(*args, **kwargs):
"""
InsertImageWithUndo(self, long pos, wxRichTextImageBlock imageBlock, RichTextCtrl ctrl,
int flags=0) -> bool
"""
return _richtext.RichTextBuffer_InsertImageWithUndo(*args, **kwargs)
def DeleteRangeWithUndo(*args, **kwargs):
"""DeleteRangeWithUndo(self, RichTextRange range, RichTextCtrl ctrl) -> bool"""
return _richtext.RichTextBuffer_DeleteRangeWithUndo(*args, **kwargs)
def Modify(*args, **kwargs):
"""Modify(self, bool modify=True)"""
return _richtext.RichTextBuffer_Modify(*args, **kwargs)
def IsModified(*args, **kwargs):
"""IsModified(self) -> bool"""
return _richtext.RichTextBuffer_IsModified(*args, **kwargs)
def GetStyleForNewParagraph(*args, **kwargs):
"""
GetStyleForNewParagraph(self, RichTextBuffer buffer, long pos, bool caretPosition=False,
bool lookUpNewParaStyle=False) -> RichTextAttr
"""
return _richtext.RichTextBuffer_GetStyleForNewParagraph(*args, **kwargs)
def GetHandlers(*args, **kwargs):
"""GetHandlers() -> wxRichTextFileHandlerList_t"""
return _richtext.RichTextBuffer_GetHandlers(*args, **kwargs)
GetHandlers = staticmethod(GetHandlers)
def AddHandler(*args, **kwargs):
"""AddHandler(RichTextFileHandler handler)"""
return _richtext.RichTextBuffer_AddHandler(*args, **kwargs)
AddHandler = staticmethod(AddHandler)
def InsertHandler(*args, **kwargs):
"""InsertHandler(RichTextFileHandler handler)"""
return _richtext.RichTextBuffer_InsertHandler(*args, **kwargs)
InsertHandler = staticmethod(InsertHandler)
def RemoveHandler(*args, **kwargs):
"""RemoveHandler(String name) -> bool"""
return _richtext.RichTextBuffer_RemoveHandler(*args, **kwargs)
RemoveHandler = staticmethod(RemoveHandler)
def FindHandlerByName(*args, **kwargs):
"""FindHandlerByName(String name) -> RichTextFileHandler"""
return _richtext.RichTextBuffer_FindHandlerByName(*args, **kwargs)
FindHandlerByName = staticmethod(FindHandlerByName)
def FindHandlerByExtension(*args, **kwargs):
"""FindHandlerByExtension(String extension, int imageType) -> RichTextFileHandler"""
return _richtext.RichTextBuffer_FindHandlerByExtension(*args, **kwargs)
FindHandlerByExtension = staticmethod(FindHandlerByExtension)
def FindHandlerByFilename(*args, **kwargs):
"""FindHandlerByFilename(String filename, int imageType) -> RichTextFileHandler"""
return _richtext.RichTextBuffer_FindHandlerByFilename(*args, **kwargs)
FindHandlerByFilename = staticmethod(FindHandlerByFilename)
def FindHandlerByType(*args, **kwargs):
"""FindHandlerByType(int imageType) -> RichTextFileHandler"""
return _richtext.RichTextBuffer_FindHandlerByType(*args, **kwargs)
FindHandlerByType = staticmethod(FindHandlerByType)
def GetExtWildcard(*args, **kwargs):
"""
GetExtWildcard(self, bool combine=False, bool save=False) --> (wildcards, types)
Gets a wildcard string for the file dialog based on all the currently
loaded richtext file handlers, and a list that can be used to map
those filter types to the file handler type.
"""
return _richtext.RichTextBuffer_GetExtWildcard(*args, **kwargs)
GetExtWildcard = staticmethod(GetExtWildcard)
def CleanUpHandlers(*args, **kwargs):
"""CleanUpHandlers()"""
return _richtext.RichTextBuffer_CleanUpHandlers(*args, **kwargs)
CleanUpHandlers = staticmethod(CleanUpHandlers)
def InitStandardHandlers(*args, **kwargs):
"""InitStandardHandlers()"""
return _richtext.RichTextBuffer_InitStandardHandlers(*args, **kwargs)
InitStandardHandlers = staticmethod(InitStandardHandlers)
def GetRenderer(*args, **kwargs):
"""GetRenderer() -> RichTextRenderer"""
return _richtext.RichTextBuffer_GetRenderer(*args, **kwargs)
GetRenderer = staticmethod(GetRenderer)
def SetRenderer(*args, **kwargs):
"""SetRenderer(RichTextRenderer renderer)"""
return _richtext.RichTextBuffer_SetRenderer(*args, **kwargs)
SetRenderer = staticmethod(SetRenderer)
def GetBulletRightMargin(*args, **kwargs):
"""GetBulletRightMargin() -> int"""
return _richtext.RichTextBuffer_GetBulletRightMargin(*args, **kwargs)
GetBulletRightMargin = staticmethod(GetBulletRightMargin)
def SetBulletRightMargin(*args, **kwargs):
"""SetBulletRightMargin(int margin)"""
return _richtext.RichTextBuffer_SetBulletRightMargin(*args, **kwargs)
SetBulletRightMargin = staticmethod(SetBulletRightMargin)
def GetBulletProportion(*args, **kwargs):
"""GetBulletProportion() -> float"""
return _richtext.RichTextBuffer_GetBulletProportion(*args, **kwargs)
GetBulletProportion = staticmethod(GetBulletProportion)
def SetBulletProportion(*args, **kwargs):
"""SetBulletProportion(float prop)"""
return _richtext.RichTextBuffer_SetBulletProportion(*args, **kwargs)
SetBulletProportion = staticmethod(SetBulletProportion)
def GetScale(*args, **kwargs):
"""GetScale(self) -> double"""
return _richtext.RichTextBuffer_GetScale(*args, **kwargs)
def SetScale(*args, **kwargs):
"""SetScale(self, double scale)"""
return _richtext.RichTextBuffer_SetScale(*args, **kwargs)
_richtext.RichTextBuffer_swigregister(RichTextBuffer)
def RichTextBuffer_GetHandlers(*args):
"""RichTextBuffer_GetHandlers() -> wxRichTextFileHandlerList_t"""
return _richtext.RichTextBuffer_GetHandlers(*args)
def RichTextBuffer_AddHandler(*args, **kwargs):
"""RichTextBuffer_AddHandler(RichTextFileHandler handler)"""
return _richtext.RichTextBuffer_AddHandler(*args, **kwargs)
def RichTextBuffer_InsertHandler(*args, **kwargs):
"""RichTextBuffer_InsertHandler(RichTextFileHandler handler)"""
return _richtext.RichTextBuffer_InsertHandler(*args, **kwargs)
def RichTextBuffer_RemoveHandler(*args, **kwargs):
"""RichTextBuffer_RemoveHandler(String name) -> bool"""
return _richtext.RichTextBuffer_RemoveHandler(*args, **kwargs)
def RichTextBuffer_FindHandlerByName(*args, **kwargs):
"""RichTextBuffer_FindHandlerByName(String name) -> RichTextFileHandler"""
return _richtext.RichTextBuffer_FindHandlerByName(*args, **kwargs)
def RichTextBuffer_FindHandlerByExtension(*args, **kwargs):
"""RichTextBuffer_FindHandlerByExtension(String extension, int imageType) -> RichTextFileHandler"""
return _richtext.RichTextBuffer_FindHandlerByExtension(*args, **kwargs)
def RichTextBuffer_FindHandlerByFilename(*args, **kwargs):
"""RichTextBuffer_FindHandlerByFilename(String filename, int imageType) -> RichTextFileHandler"""
return _richtext.RichTextBuffer_FindHandlerByFilename(*args, **kwargs)
def RichTextBuffer_FindHandlerByType(*args, **kwargs):
"""RichTextBuffer_FindHandlerByType(int imageType) -> RichTextFileHandler"""
return _richtext.RichTextBuffer_FindHandlerByType(*args, **kwargs)
def RichTextBuffer_GetExtWildcard(*args, **kwargs):
"""
GetExtWildcard(self, bool combine=False, bool save=False) --> (wildcards, types)
Gets a wildcard string for the file dialog based on all the currently
loaded richtext file handlers, and a list that can be used to map
those filter types to the file handler type.
"""
return _richtext.RichTextBuffer_GetExtWildcard(*args, **kwargs)
def RichTextBuffer_CleanUpHandlers(*args):
"""RichTextBuffer_CleanUpHandlers()"""
return _richtext.RichTextBuffer_CleanUpHandlers(*args)
def RichTextBuffer_InitStandardHandlers(*args):
"""RichTextBuffer_InitStandardHandlers()"""
return _richtext.RichTextBuffer_InitStandardHandlers(*args)
def RichTextBuffer_GetRenderer(*args):
"""RichTextBuffer_GetRenderer() -> RichTextRenderer"""
return _richtext.RichTextBuffer_GetRenderer(*args)
def RichTextBuffer_SetRenderer(*args, **kwargs):
"""RichTextBuffer_SetRenderer(RichTextRenderer renderer)"""
return _richtext.RichTextBuffer_SetRenderer(*args, **kwargs)
def RichTextBuffer_GetBulletRightMargin(*args):
"""RichTextBuffer_GetBulletRightMargin() -> int"""
return _richtext.RichTextBuffer_GetBulletRightMargin(*args)
def RichTextBuffer_SetBulletRightMargin(*args, **kwargs):
"""RichTextBuffer_SetBulletRightMargin(int margin)"""
return _richtext.RichTextBuffer_SetBulletRightMargin(*args, **kwargs)
def RichTextBuffer_GetBulletProportion(*args):
"""RichTextBuffer_GetBulletProportion() -> float"""
return _richtext.RichTextBuffer_GetBulletProportion(*args)
def RichTextBuffer_SetBulletProportion(*args, **kwargs):
"""RichTextBuffer_SetBulletProportion(float prop)"""
return _richtext.RichTextBuffer_SetBulletProportion(*args, **kwargs)
#---------------------------------------------------------------------------
RICHTEXT_HANDLER_INCLUDE_STYLESHEET = _richtext.RICHTEXT_HANDLER_INCLUDE_STYLESHEET
RICHTEXT_HANDLER_SAVE_IMAGES_TO_MEMORY = _richtext.RICHTEXT_HANDLER_SAVE_IMAGES_TO_MEMORY
RICHTEXT_HANDLER_SAVE_IMAGES_TO_FILES = _richtext.RICHTEXT_HANDLER_SAVE_IMAGES_TO_FILES
RICHTEXT_HANDLER_SAVE_IMAGES_TO_BASE64 = _richtext.RICHTEXT_HANDLER_SAVE_IMAGES_TO_BASE64
RICHTEXT_HANDLER_NO_HEADER_FOOTER = _richtext.RICHTEXT_HANDLER_NO_HEADER_FOOTER
RICHTEXT_HANDLER_CONVERT_FACENAMES = _richtext.RICHTEXT_HANDLER_CONVERT_FACENAMES
class RichTextFileHandler(_core.Object):
"""Base class for file handlers"""
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self): raise AttributeError, "No constructor defined"
__repr__ = _swig_repr
__swig_destroy__ = _richtext.delete_RichTextFileHandler
__del__ = lambda self : None;
def LoadStream(*args, **kwargs):
"""LoadStream(self, RichTextBuffer buffer, InputStream stream) -> bool"""
return _richtext.RichTextFileHandler_LoadStream(*args, **kwargs)
def SaveStream(*args, **kwargs):
"""SaveStream(self, RichTextBuffer buffer, wxOutputStream stream) -> bool"""
return _richtext.RichTextFileHandler_SaveStream(*args, **kwargs)
def LoadFile(*args, **kwargs):
"""LoadFile(self, RichTextBuffer buffer, String filename) -> bool"""
return _richtext.RichTextFileHandler_LoadFile(*args, **kwargs)
def SaveFile(*args, **kwargs):
"""SaveFile(self, RichTextBuffer buffer, String filename) -> bool"""
return _richtext.RichTextFileHandler_SaveFile(*args, **kwargs)
def CanHandle(*args, **kwargs):
"""CanHandle(self, String filename) -> bool"""
return _richtext.RichTextFileHandler_CanHandle(*args, **kwargs)
def CanSave(*args, **kwargs):
"""CanSave(self) -> bool"""
return _richtext.RichTextFileHandler_CanSave(*args, **kwargs)
def CanLoad(*args, **kwargs):
"""CanLoad(self) -> bool"""
return _richtext.RichTextFileHandler_CanLoad(*args, **kwargs)
def IsVisible(*args, **kwargs):
"""IsVisible(self) -> bool"""
return _richtext.RichTextFileHandler_IsVisible(*args, **kwargs)
def SetVisible(*args, **kwargs):
"""SetVisible(self, bool visible)"""
return _richtext.RichTextFileHandler_SetVisible(*args, **kwargs)
def SetName(*args, **kwargs):
"""SetName(self, String name)"""
return _richtext.RichTextFileHandler_SetName(*args, **kwargs)
def GetName(*args, **kwargs):
"""GetName(self) -> String"""
return _richtext.RichTextFileHandler_GetName(*args, **kwargs)
Name = property(GetName,SetName)
def SetExtension(*args, **kwargs):
"""SetExtension(self, String ext)"""
return _richtext.RichTextFileHandler_SetExtension(*args, **kwargs)
def GetExtension(*args, **kwargs):
"""GetExtension(self) -> String"""
return _richtext.RichTextFileHandler_GetExtension(*args, **kwargs)
Extension = property(GetExtension,SetExtension)
def SetType(*args, **kwargs):
"""SetType(self, int type)"""
return _richtext.RichTextFileHandler_SetType(*args, **kwargs)
def GetType(*args, **kwargs):
"""GetType(self) -> int"""
return _richtext.RichTextFileHandler_GetType(*args, **kwargs)
Type = property(GetType,SetType)
def SetFlags(*args, **kwargs):
"""SetFlags(self, int flags)"""
return _richtext.RichTextFileHandler_SetFlags(*args, **kwargs)
def GetFlags(*args, **kwargs):
"""GetFlags(self) -> int"""
return _richtext.RichTextFileHandler_GetFlags(*args, **kwargs)
Flags = property(GetFlags,SetFlags)
def SetEncoding(*args, **kwargs):
"""SetEncoding(self, String encoding)"""
return _richtext.RichTextFileHandler_SetEncoding(*args, **kwargs)
def GetEncoding(*args, **kwargs):
"""GetEncoding(self) -> String"""
return _richtext.RichTextFileHandler_GetEncoding(*args, **kwargs)
Encoding = property(GetEncoding,SetEncoding)
_richtext.RichTextFileHandler_swigregister(RichTextFileHandler)
class RichTextPlainTextHandler(RichTextFileHandler):
"""Proxy of C++ RichTextPlainTextHandler class"""
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args, **kwargs):
"""__init__(self, String name=TextName, String ext=TextExt, int type=RICHTEXT_TYPE_TEXT) -> RichTextPlainTextHandler"""
_richtext.RichTextPlainTextHandler_swiginit(self,_richtext.new_RichTextPlainTextHandler(*args, **kwargs))
_richtext.RichTextPlainTextHandler_swigregister(RichTextPlainTextHandler)
TextName = cvar.TextName
TextExt = cvar.TextExt
#---------------------------------------------------------------------------
class RichTextRenderer(_core.Object):
"""Proxy of C++ RichTextRenderer class"""
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self): raise AttributeError, "No constructor defined"
__repr__ = _swig_repr
__swig_destroy__ = _richtext.delete_RichTextRenderer
__del__ = lambda self : None;
def DrawStandardBullet(*args, **kwargs):
"""
DrawStandardBullet(self, RichTextParagraph paragraph, DC dc, RichTextAttr attr,
Rect rect) -> bool
"""
return _richtext.RichTextRenderer_DrawStandardBullet(*args, **kwargs)
def DrawTextBullet(*args, **kwargs):
"""
DrawTextBullet(self, RichTextParagraph paragraph, DC dc, RichTextAttr attr,
Rect rect, String text) -> bool
"""
return _richtext.RichTextRenderer_DrawTextBullet(*args, **kwargs)
def DrawBitmapBullet(*args, **kwargs):
"""
DrawBitmapBullet(self, RichTextParagraph paragraph, DC dc, RichTextAttr attr,
Rect rect) -> bool
"""
return _richtext.RichTextRenderer_DrawBitmapBullet(*args, **kwargs)
def EnumerateStandardBulletNames(*args, **kwargs):
"""EnumerateStandardBulletNames(self, wxArrayString bulletNames) -> bool"""
return _richtext.RichTextRenderer_EnumerateStandardBulletNames(*args, **kwargs)
_richtext.RichTextRenderer_swigregister(RichTextRenderer)
class RichTextStdRenderer(RichTextRenderer):
"""Proxy of C++ RichTextStdRenderer class"""
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args, **kwargs):
"""__init__(self) -> RichTextStdRenderer"""
_richtext.RichTextStdRenderer_swiginit(self,_richtext.new_RichTextStdRenderer(*args, **kwargs))
_richtext.RichTextStdRenderer_swigregister(RichTextStdRenderer)
#---------------------------------------------------------------------------
RE_READONLY = _richtext.RE_READONLY
RE_MULTILINE = _richtext.RE_MULTILINE
RE_CENTER_CARET = _richtext.RE_CENTER_CARET
RE_CENTRE_CARET = _richtext.RE_CENTRE_CARET
RICHTEXT_SHIFT_DOWN = _richtext.RICHTEXT_SHIFT_DOWN
RICHTEXT_CTRL_DOWN = _richtext.RICHTEXT_CTRL_DOWN
RICHTEXT_ALT_DOWN = _richtext.RICHTEXT_ALT_DOWN
class RichTextCtrl(_core.Control,_core.TextCtrlIface,_windows.ScrollHelper):
"""Proxy of C++ RichTextCtrl class"""
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args, **kwargs):
"""
__init__(self, Window parent, int id=-1, String value=EmptyString,
Point pos=DefaultPosition, Size size=DefaultSize,
long style=RE_MULTILINE, Validator validator=DefaultValidator,
String name=RichTextCtrlNameStr) -> RichTextCtrl
"""
_richtext.RichTextCtrl_swiginit(self,_richtext.new_RichTextCtrl(*args, **kwargs))
self._setOORInfo(self)
def Create(*args, **kwargs):
"""
Create(self, Window parent, int id=-1, String value=EmptyString,
Point pos=DefaultPosition, Size size=DefaultSize,
long style=RE_MULTILINE, Validator validator=DefaultValidator,
String name=RichTextCtrlNameStr) -> bool
"""
return _richtext.RichTextCtrl_Create(*args, **kwargs)
def GetValue(*args, **kwargs):
"""GetValue(self) -> String"""
return _richtext.RichTextCtrl_GetValue(*args, **kwargs)
def IsSingleLine(*args, **kwargs):
"""IsSingleLine(self) -> bool"""
return _richtext.RichTextCtrl_IsSingleLine(*args, **kwargs)
def IsMultiLine(*args, **kwargs):
"""IsMultiLine(self) -> bool"""
return _richtext.RichTextCtrl_IsMultiLine(*args, **kwargs)
def GetFilename(*args, **kwargs):
"""GetFilename(self) -> String"""
return _richtext.RichTextCtrl_GetFilename(*args, **kwargs)
def SetFilename(*args, **kwargs):
"""SetFilename(self, String filename)"""
return _richtext.RichTextCtrl_SetFilename(*args, **kwargs)
def SetDelayedLayoutThreshold(*args, **kwargs):
"""
SetDelayedLayoutThreshold(self, long threshold)
Set the threshold in character positions for doing layout optimization
during sizing.
"""
return _richtext.RichTextCtrl_SetDelayedLayoutThreshold(*args, **kwargs)
def GetDelayedLayoutThreshold(*args, **kwargs):
"""
GetDelayedLayoutThreshold(self) -> long
Get the threshold in character positions for doing layout optimization
during sizing.
"""
return _richtext.RichTextCtrl_GetDelayedLayoutThreshold(*args, **kwargs)
def GetFullLayoutRequired(*args, **kwargs):
"""GetFullLayoutRequired(self) -> bool"""
return _richtext.RichTextCtrl_GetFullLayoutRequired(*args, **kwargs)
def SetFullLayoutRequired(*args, **kwargs):
"""SetFullLayoutRequired(self, bool b)"""
return _richtext.RichTextCtrl_SetFullLayoutRequired(*args, **kwargs)
def GetFullLayoutTime(*args, **kwargs):
"""GetFullLayoutTime(self) -> wxLongLong"""
return _richtext.RichTextCtrl_GetFullLayoutTime(*args, **kwargs)
def SetFullLayoutTime(*args, **kwargs):
"""SetFullLayoutTime(self, wxLongLong t)"""
return _richtext.RichTextCtrl_SetFullLayoutTime(*args, **kwargs)
def GetFullLayoutSavedPosition(*args, **kwargs):
"""GetFullLayoutSavedPosition(self) -> long"""
return _richtext.RichTextCtrl_GetFullLayoutSavedPosition(*args, **kwargs)
def SetFullLayoutSavedPosition(*args, **kwargs):
"""SetFullLayoutSavedPosition(self, long p)"""
return _richtext.RichTextCtrl_SetFullLayoutSavedPosition(*args, **kwargs)
def ForceDelayedLayout(*args, **kwargs):
"""ForceDelayedLayout(self)"""
return _richtext.RichTextCtrl_ForceDelayedLayout(*args, **kwargs)
def SetTextCursor(*args, **kwargs):
"""
SetTextCursor(self, Cursor cursor)
Set text cursor
"""
return _richtext.RichTextCtrl_SetTextCursor(*args, **kwargs)
def GetTextCursor(*args, **kwargs):
"""
GetTextCursor(self) -> Cursor
Get text cursor
"""
return _richtext.RichTextCtrl_GetTextCursor(*args, **kwargs)
def SetURLCursor(*args, **kwargs):
"""
SetURLCursor(self, Cursor cursor)
Set URL cursor
"""
return _richtext.RichTextCtrl_SetURLCursor(*args, **kwargs)
def GetURLCursor(*args, **kwargs):
"""
GetURLCursor(self) -> Cursor
Get URL cursor
"""
return _richtext.RichTextCtrl_GetURLCursor(*args, **kwargs)
def GetCaretAtLineStart(*args, **kwargs):
"""GetCaretAtLineStart(self) -> bool"""
return _richtext.RichTextCtrl_GetCaretAtLineStart(*args, **kwargs)
def SetCaretAtLineStart(*args, **kwargs):
"""SetCaretAtLineStart(self, bool atStart)"""
return _richtext.RichTextCtrl_SetCaretAtLineStart(*args, **kwargs)
def GetDragging(*args, **kwargs):
"""GetDragging(self) -> bool"""
return _richtext.RichTextCtrl_GetDragging(*args, **kwargs)
def SetDragging(*args, **kwargs):
"""SetDragging(self, bool dragging)"""
return _richtext.RichTextCtrl_SetDragging(*args, **kwargs)
def GetPreDrag(*args, **kwargs):
"""GetPreDrag(self) -> bool"""
return _richtext.RichTextCtrl_GetPreDrag(*args, **kwargs)
def SetPreDrag(*args, **kwargs):
"""SetPreDrag(self, bool pd)"""
return _richtext.RichTextCtrl_SetPreDrag(*args, **kwargs)
def GetDragStartPoint(*args, **kwargs):
"""GetDragStartPoint(self) -> Point"""
return _richtext.RichTextCtrl_GetDragStartPoint(*args, **kwargs)
def SetDragStartPoint(*args, **kwargs):
"""SetDragStartPoint(self, Point sp)"""
return _richtext.RichTextCtrl_SetDragStartPoint(*args, **kwargs)
def GetDragStartTime(*args, **kwargs):
"""GetDragStartTime(self) -> DateTime"""
return _richtext.RichTextCtrl_GetDragStartTime(*args, **kwargs)
def SetDragStartTime(*args, **kwargs):
"""SetDragStartTime(self, DateTime st)"""
return _richtext.RichTextCtrl_SetDragStartTime(*args, **kwargs)
def GetBufferBitmap(*args, **kwargs):
"""GetBufferBitmap(self) -> Bitmap"""
return _richtext.RichTextCtrl_GetBufferBitmap(*args, **kwargs)
def GetContextMenu(*args, **kwargs):
"""GetContextMenu(self) -> Menu"""
return _richtext.RichTextCtrl_GetContextMenu(*args, **kwargs)
def SetContextMenu(*args, **kwargs):
"""SetContextMenu(self, Menu menu)"""
return _richtext.RichTextCtrl_SetContextMenu(*args, **kwargs)
def GetSelectionAnchor(*args, **kwargs):
"""GetSelectionAnchor(self) -> long"""
return _richtext.RichTextCtrl_GetSelectionAnchor(*args, **kwargs)
def SetSelectionAnchor(*args, **kwargs):
"""SetSelectionAnchor(self, long anchor)"""
return _richtext.RichTextCtrl_SetSelectionAnchor(*args, **kwargs)
def LoadFile(*args, **kwargs):
"""
LoadFile(self, String file, int type=RICHTEXT_TYPE_ANY) -> bool
Load the contents of the document from the given filename.
"""
return _richtext.RichTextCtrl_LoadFile(*args, **kwargs)
def SaveFile(*args, **kwargs):
"""
SaveFile(self, String file=EmptyString, int type=RICHTEXT_TYPE_ANY) -> bool
Save the contents of the document to the given filename, or if the
empty string is passed then to the filename set with `SetFilename`.
"""
return _richtext.RichTextCtrl_SaveFile(*args, **kwargs)
def SetHandlerFlags(*args, **kwargs):
"""
SetHandlerFlags(self, int flags)
Set the handler flags, controlling loading and saving.
"""
return _richtext.RichTextCtrl_SetHandlerFlags(*args, **kwargs)
def GetHandlerFlags(*args, **kwargs):
"""
GetHandlerFlags(self) -> int
Get the handler flags, controlling loading and saving.
"""
return _richtext.RichTextCtrl_GetHandlerFlags(*args, **kwargs)
def SetMaxLength(*args, **kwargs):
"""
SetMaxLength(self, unsigned long len)
Set the max number of characters which may be entered in a single line
text control.
"""
return _richtext.RichTextCtrl_SetMaxLength(*args, **kwargs)
def SetStyle(*args, **kwargs):
"""
SetStyle(self, RichTextRange range, RichTextAttr style) -> bool
Set the style for the text in ``range`` to ``style``
"""
return _richtext.RichTextCtrl_SetStyle(*args, **kwargs)
def GetStyle(*args, **kwargs):
"""
GetStyle(self, long position, RichTextAttr style) -> bool
Retrieve the style used at the given position. Copies the style
values at ``position`` into the ``style`` parameter and returns ``True``
if successful. Returns ``False`` otherwise.
"""
return _richtext.RichTextCtrl_GetStyle(*args, **kwargs)
def GetStyleForRange(*args, **kwargs):
"""
GetStyleForRange(self, RichTextRange range, RichTextAttr style) -> bool
Get the common set of styles for the range
"""
return _richtext.RichTextCtrl_GetStyleForRange(*args, **kwargs)
def SetStyleEx(*args, **kwargs):
"""
SetStyleEx(self, RichTextRange range, RichTextAttr style, int flags=RICHTEXT_SETSTYLE_WITH_UNDO) -> bool
Extended style setting operation with flags including:
RICHTEXT_SETSTYLE_WITH_UNDO, RICHTEXT_SETSTYLE_OPTIMIZE,
RICHTEXT_SETSTYLE_PARAGRAPHS_ONLY, RICHTEXT_SETSTYLE_CHARACTERS_ONLY
"""
return _richtext.RichTextCtrl_SetStyleEx(*args, **kwargs)
def GetUncombinedStyle(*args, **kwargs):
"""
GetUncombinedStyle(self, long position, RichTextAttr style) -> bool
Get the content (uncombined) attributes for this position. Copies the
style values at ``position`` into the ``style`` parameter and returns
``True`` if successful. Returns ``False`` otherwise.
"""
return _richtext.RichTextCtrl_GetUncombinedStyle(*args, **kwargs)
def SetDefaultStyle(*args, **kwargs):
"""
SetDefaultStyle(self, RichTextAttr style) -> bool
Set the style used by default for the rich text document.
"""
return _richtext.RichTextCtrl_SetDefaultStyle(*args, **kwargs)
def GetDefaultStyle(*args, **kwargs):
"""
GetDefaultStyle(self) -> RichTextAttr
Retrieves a copy of the default style object.
"""
return _richtext.RichTextCtrl_GetDefaultStyle(*args, **kwargs)
def SetListStyle(*args, **kwargs):
"""
SetListStyle(self, RichTextRange range, String defName, int flags=RICHTEXT_SETSTYLE_WITH_UNDO,
int startFrom=1, int specifiedLevel=-1) -> bool
"""
return _richtext.RichTextCtrl_SetListStyle(*args, **kwargs)
def ClearListStyle(*args, **kwargs):
"""ClearListStyle(self, RichTextRange range, int flags=RICHTEXT_SETSTYLE_WITH_UNDO) -> bool"""
return _richtext.RichTextCtrl_ClearListStyle(*args, **kwargs)
def NumberList(*args, **kwargs):
"""
NumberList(self, RichTextRange range, String defName, int flags=RICHTEXT_SETSTYLE_WITH_UNDO,
int startFrom=1, int specifiedLevel=-1) -> bool
"""
return _richtext.RichTextCtrl_NumberList(*args, **kwargs)
def PromoteList(*args, **kwargs):
"""
PromoteList(self, int promoteBy, RichTextRange range, String defName,
int flags=RICHTEXT_SETSTYLE_WITH_UNDO, int specifiedLevel=-1) -> bool
"""
return _richtext.RichTextCtrl_PromoteList(*args, **kwargs)
def Delete(*args, **kwargs):
"""Delete(self, RichTextRange range) -> bool"""
return _richtext.RichTextCtrl_Delete(*args, **kwargs)
def HitTestXY(*args, **kwargs):
"""
HitTestRC(self, Point pt) --> (result, col, row)
Returns the column and row of the given point in pixels. Note that
``pt`` should be given in device coordinates, and not be adjusted for
the client area origin nor for scrolling. The return value is a tuple
of the hit test result and the column and row values.
"""
return _richtext.RichTextCtrl_HitTestXY(*args, **kwargs)
def FindContainerAtPoint(*args, **kwargs):
"""
FindContainerAtPoint(self, Point pt, long position, int hit, RichTextObject hitObj,
int flags=0) -> RichTextParagraphLayoutBox
"""
return _richtext.RichTextCtrl_FindContainerAtPoint(*args, **kwargs)
def DeleteSelection(*args, **kwargs):
"""
DeleteSelection(self)
Remove the current selection.
"""
return _richtext.RichTextCtrl_DeleteSelection(*args, **kwargs)
def CanDeleteSelection(*args, **kwargs):
"""
CanDeleteSelection(self) -> bool
Returns ``True`` if the selection can be removed from the document.
"""
return _richtext.RichTextCtrl_CanDeleteSelection(*args, **kwargs)
def HasSelection(*args, **kwargs):
"""HasSelection(self) -> bool"""
return _richtext.RichTextCtrl_HasSelection(*args, **kwargs)
def WriteImage(*args, **kwargs):
"""
WriteImage(self, Image image, int bitmapType=BITMAP_TYPE_PNG) -> bool
Write an image at the current insertion point. Supply optional type to
use for internal and file storage of the raw data.
"""
return _richtext.RichTextCtrl_WriteImage(*args, **kwargs)
def WriteBitmap(*args, **kwargs):
"""
WriteBitmap(self, Bitmap bitmap, int bitmapType=BITMAP_TYPE_PNG) -> bool
Write a bitmap at the current insertion point. Supply optional type to
use for internal and file storage of the raw data.
"""
return _richtext.RichTextCtrl_WriteBitmap(*args, **kwargs)
def WriteImageFile(*args, **kwargs):
"""
WriteImageFile(self, String filename, int bitmapType) -> bool
Load an image from file and write at the current insertion point.
"""
return _richtext.RichTextCtrl_WriteImageFile(*args, **kwargs)
def WriteImageBlock(*args, **kwargs):
"""
WriteImageBlock(self, wxRichTextImageBlock imageBlock) -> bool
Write an image block at the current insertion point.
"""
return _richtext.RichTextCtrl_WriteImageBlock(*args, **kwargs)
def Newline(*args, **kwargs):
"""
Newline(self) -> bool
Insert a newline (actually paragraph) at the current insertion point.
"""
return _richtext.RichTextCtrl_Newline(*args, **kwargs)
def LineBreak(*args, **kwargs):
"""
LineBreak(self) -> bool
Insert a line break at the current insertion point.
"""
return _richtext.RichTextCtrl_LineBreak(*args, **kwargs)
def SetBasicStyle(*args, **kwargs):
"""SetBasicStyle(self, RichTextAttr style)"""
return _richtext.RichTextCtrl_SetBasicStyle(*args, **kwargs)
def GetBasicStyle(*args, **kwargs):
"""
GetBasicStyle(self) -> RichTextAttr
Get basic (overall) style
"""
return _richtext.RichTextCtrl_GetBasicStyle(*args, **kwargs)
def BeginStyle(*args, **kwargs):
"""
BeginStyle(self, RichTextAttr style) -> bool
Begin using a style
"""
return _richtext.RichTextCtrl_BeginStyle(*args, **kwargs)
def EndStyle(*args, **kwargs):
"""
EndStyle(self) -> bool
End the style
"""
return _richtext.RichTextCtrl_EndStyle(*args, **kwargs)
def EndAllStyles(*args, **kwargs):
"""
EndAllStyles(self) -> bool
End all styles
"""
return _richtext.RichTextCtrl_EndAllStyles(*args, **kwargs)
def BeginBold(*args, **kwargs):
"""
BeginBold(self) -> bool
Begin using bold
"""
return _richtext.RichTextCtrl_BeginBold(*args, **kwargs)
def EndBold(*args, **kwargs):
"""
EndBold(self) -> bool
End using bold
"""
return _richtext.RichTextCtrl_EndBold(*args, **kwargs)
def BeginItalic(*args, **kwargs):
"""
BeginItalic(self) -> bool
Begin using italic
"""
return _richtext.RichTextCtrl_BeginItalic(*args, **kwargs)
def EndItalic(*args, **kwargs):
"""
EndItalic(self) -> bool
End using italic
"""
return _richtext.RichTextCtrl_EndItalic(*args, **kwargs)
def BeginUnderline(*args, **kwargs):
"""
BeginUnderline(self) -> bool
Begin using underline
"""
return _richtext.RichTextCtrl_BeginUnderline(*args, **kwargs)
def EndUnderline(*args, **kwargs):
"""
EndUnderline(self) -> bool
End using underline
"""
return _richtext.RichTextCtrl_EndUnderline(*args, **kwargs)
def BeginFontSize(*args, **kwargs):
"""
BeginFontSize(self, int pointSize) -> bool
Begin using point size
"""
return _richtext.RichTextCtrl_BeginFontSize(*args, **kwargs)
def EndFontSize(*args, **kwargs):
"""
EndFontSize(self) -> bool
End using point size
"""
return _richtext.RichTextCtrl_EndFontSize(*args, **kwargs)
def BeginFont(*args, **kwargs):
"""
BeginFont(self, Font font) -> bool
Begin using this font
"""
return _richtext.RichTextCtrl_BeginFont(*args, **kwargs)
def EndFont(*args, **kwargs):
"""
EndFont(self) -> bool
End using a font
"""
return _richtext.RichTextCtrl_EndFont(*args, **kwargs)
def BeginTextColour(*args, **kwargs):
"""
BeginTextColour(self, Colour colour) -> bool
Begin using this colour
"""
return _richtext.RichTextCtrl_BeginTextColour(*args, **kwargs)
def EndTextColour(*args, **kwargs):
"""
EndTextColour(self) -> bool
End using a colour
"""
return _richtext.RichTextCtrl_EndTextColour(*args, **kwargs)
def BeginAlignment(*args, **kwargs):
"""
BeginAlignment(self, int alignment) -> bool
Begin using alignment
"""
return _richtext.RichTextCtrl_BeginAlignment(*args, **kwargs)
def EndAlignment(*args, **kwargs):
"""
EndAlignment(self) -> bool
End alignment
"""
return _richtext.RichTextCtrl_EndAlignment(*args, **kwargs)
def BeginLeftIndent(*args, **kwargs):
"""
BeginLeftIndent(self, int leftIndent, int leftSubIndent=0) -> bool
Begin left indent
"""
return _richtext.RichTextCtrl_BeginLeftIndent(*args, **kwargs)
def EndLeftIndent(*args, **kwargs):
"""
EndLeftIndent(self) -> bool
End left indent
"""
return _richtext.RichTextCtrl_EndLeftIndent(*args, **kwargs)
def BeginRightIndent(*args, **kwargs):
"""
BeginRightIndent(self, int rightIndent) -> bool
Begin right indent
"""
return _richtext.RichTextCtrl_BeginRightIndent(*args, **kwargs)
def EndRightIndent(*args, **kwargs):
"""
EndRightIndent(self) -> bool
End right indent
"""
return _richtext.RichTextCtrl_EndRightIndent(*args, **kwargs)
def BeginParagraphSpacing(*args, **kwargs):
"""
BeginParagraphSpacing(self, int before, int after) -> bool
Begin paragraph spacing
"""
return _richtext.RichTextCtrl_BeginParagraphSpacing(*args, **kwargs)
def EndParagraphSpacing(*args, **kwargs):
"""
EndParagraphSpacing(self) -> bool
End paragraph spacing
"""
return _richtext.RichTextCtrl_EndParagraphSpacing(*args, **kwargs)
def BeginLineSpacing(*args, **kwargs):
"""
BeginLineSpacing(self, int lineSpacing) -> bool
Begin line spacing
"""
return _richtext.RichTextCtrl_BeginLineSpacing(*args, **kwargs)
def EndLineSpacing(*args, **kwargs):
"""
EndLineSpacing(self) -> bool
End line spacing
"""
return _richtext.RichTextCtrl_EndLineSpacing(*args, **kwargs)
def BeginNumberedBullet(*args, **kwargs):
"""
BeginNumberedBullet(self, int bulletNumber, int leftIndent, int leftSubIndent,
int bulletStyle=wxTEXT_ATTR_BULLET_STYLE_ARABIC|wxTEXT_ATTR_BULLET_STYLE_PERIOD) -> bool
Begin numbered bullet
"""
return _richtext.RichTextCtrl_BeginNumberedBullet(*args, **kwargs)
def EndNumberedBullet(*args, **kwargs):
"""
EndNumberedBullet(self) -> bool
End numbered bullet
"""
return _richtext.RichTextCtrl_EndNumberedBullet(*args, **kwargs)
def BeginSymbolBullet(*args, **kwargs):
"""
BeginSymbolBullet(self, String symbol, int leftIndent, int leftSubIndent, int bulletStyle=TEXT_ATTR_BULLET_STYLE_SYMBOL) -> bool
Begin symbol bullet
"""
return _richtext.RichTextCtrl_BeginSymbolBullet(*args, **kwargs)
def EndSymbolBullet(*args, **kwargs):
"""
EndSymbolBullet(self) -> bool
End symbol bullet
"""
return _richtext.RichTextCtrl_EndSymbolBullet(*args, **kwargs)
def BeginStandardBullet(*args, **kwargs):
"""
BeginStandardBullet(self, String bulletName, int leftIndent, int leftSubIndent,
int bulletStyle=TEXT_ATTR_BULLET_STYLE_STANDARD) -> bool
Begin standard bullet
"""
return _richtext.RichTextCtrl_BeginStandardBullet(*args, **kwargs)
def EndStandardBullet(*args, **kwargs):
"""
EndStandardBullet(self) -> bool
End standard bullet
"""
return _richtext.RichTextCtrl_EndStandardBullet(*args, **kwargs)
def BeginCharacterStyle(*args, **kwargs):
"""
BeginCharacterStyle(self, String characterStyle) -> bool
Begin named character style
"""
return _richtext.RichTextCtrl_BeginCharacterStyle(*args, **kwargs)
def EndCharacterStyle(*args, **kwargs):
"""
EndCharacterStyle(self) -> bool
End named character style
"""
return _richtext.RichTextCtrl_EndCharacterStyle(*args, **kwargs)
def BeginParagraphStyle(*args, **kwargs):
"""
BeginParagraphStyle(self, String paragraphStyle) -> bool
Begin named paragraph style
"""
return _richtext.RichTextCtrl_BeginParagraphStyle(*args, **kwargs)
def EndParagraphStyle(*args, **kwargs):
"""
EndParagraphStyle(self) -> bool
End named character style
"""
return _richtext.RichTextCtrl_EndParagraphStyle(*args, **kwargs)
def BeginListStyle(*args, **kwargs):
"""
BeginListStyle(self, String listStyle, int level=1, int number=1) -> bool
Begin named list style.
"""
return _richtext.RichTextCtrl_BeginListStyle(*args, **kwargs)
def EndListStyle(*args, **kwargs):
"""
EndListStyle(self) -> bool
End named list style.
"""
return _richtext.RichTextCtrl_EndListStyle(*args, **kwargs)
def BeginURL(*args, **kwargs):
"""
BeginURL(self, String url, String characterStyle=wxEmptyString) -> bool
Begin URL.
"""
return _richtext.RichTextCtrl_BeginURL(*args, **kwargs)
def EndURL(*args, **kwargs):
"""
EndURL(self) -> bool
End URL.
"""
return _richtext.RichTextCtrl_EndURL(*args, **kwargs)
def SetDefaultStyleToCursorStyle(*args, **kwargs):
"""
SetDefaultStyleToCursorStyle(self) -> bool
Sets the default style to the style under the cursor
"""
return _richtext.RichTextCtrl_SetDefaultStyleToCursorStyle(*args, **kwargs)
def SelectNone(*args, **kwargs):
"""
SelectNone(self)
Clear the selection
"""
return _richtext.RichTextCtrl_SelectNone(*args, **kwargs)
def SelectWord(*args, **kwargs):
"""
SelectWord(self, long position) -> bool
Select the word at the given character position
"""
return _richtext.RichTextCtrl_SelectWord(*args, **kwargs)
def GetSelectionRange(*args, **kwargs):
"""
GetSelectionRange(self) -> RichTextRange
Get the selection range in character positions.
"""
return _richtext.RichTextCtrl_GetSelectionRange(*args, **kwargs)
def SetSelectionRange(*args, **kwargs):
"""
SetSelectionRange(self, RichTextRange range)
Set the selection range in character positions. The end point of range
is specified as the last character position of the span of text, plus
one. So, for example, to set the selection for a character at position
5, use the range (5,6).
"""
return _richtext.RichTextCtrl_SetSelectionRange(*args, **kwargs)
def GetInternalSelectionRange(*args, **kwargs):
"""
GetInternalSelectionRange(self) -> RichTextRange
Get the selection range in character positions. The range is in
internal format, i.e. a single character selection is denoted by (n,n).
"""
return _richtext.RichTextCtrl_GetInternalSelectionRange(*args, **kwargs)
def SetInternalSelectionRange(*args, **kwargs):
"""
SetInternalSelectionRange(self, RichTextRange range)
Set the selection range in character positions. The range is in
internal format, i.e. a single character selection is denoted by (n,n).
"""
return _richtext.RichTextCtrl_SetInternalSelectionRange(*args, **kwargs)
def AddParagraph(*args, **kwargs):
"""
AddParagraph(self, String text) -> RichTextRange
Add a new paragraph of text to the end of the buffer
"""
return _richtext.RichTextCtrl_AddParagraph(*args, **kwargs)
def AddImage(*args, **kwargs):
"""
AddImage(self, Image image) -> RichTextRange
Add an image
"""
return _richtext.RichTextCtrl_AddImage(*args, **kwargs)
def LayoutContent(*args, **kwargs):
"""
LayoutContent(self, bool onlyVisibleRect=False) -> bool
Layout the buffer: which we must do before certain operations, such as
setting the caret position.
"""
return _richtext.RichTextCtrl_LayoutContent(*args, **kwargs)
def MoveCaret(*args, **kwargs):
"""
MoveCaret(self, long pos, bool showAtLineStart=False) -> bool
Move the caret to the given character position
"""
return _richtext.RichTextCtrl_MoveCaret(*args, **kwargs)
def MoveRight(*args, **kwargs):
"""
MoveRight(self, int noPositions=1, int flags=0) -> bool
Move right
"""
return _richtext.RichTextCtrl_MoveRight(*args, **kwargs)
def MoveLeft(*args, **kwargs):
"""
MoveLeft(self, int noPositions=1, int flags=0) -> bool
Move left
"""
return _richtext.RichTextCtrl_MoveLeft(*args, **kwargs)
def MoveUp(*args, **kwargs):
"""
MoveUp(self, int noLines=1, int flags=0) -> bool
Move up
"""
return _richtext.RichTextCtrl_MoveUp(*args, **kwargs)
def MoveDown(*args, **kwargs):
"""
MoveDown(self, int noLines=1, int flags=0) -> bool
Move down
"""
return _richtext.RichTextCtrl_MoveDown(*args, **kwargs)
def MoveToLineEnd(*args, **kwargs):
"""
MoveToLineEnd(self, int flags=0) -> bool
Move to the end of the line
"""
return _richtext.RichTextCtrl_MoveToLineEnd(*args, **kwargs)
def MoveToLineStart(*args, **kwargs):
"""
MoveToLineStart(self, int flags=0) -> bool
Move to the start of the line
"""
return _richtext.RichTextCtrl_MoveToLineStart(*args, **kwargs)
def MoveToParagraphEnd(*args, **kwargs):
"""
MoveToParagraphEnd(self, int flags=0) -> bool
Move to the end of the paragraph
"""
return _richtext.RichTextCtrl_MoveToParagraphEnd(*args, **kwargs)
def MoveToParagraphStart(*args, **kwargs):
"""
MoveToParagraphStart(self, int flags=0) -> bool
Move to the start of the paragraph
"""
return _richtext.RichTextCtrl_MoveToParagraphStart(*args, **kwargs)
def MoveHome(*args, **kwargs):
"""
MoveHome(self, int flags=0) -> bool
Move to the start of the buffer
"""
return _richtext.RichTextCtrl_MoveHome(*args, **kwargs)
def MoveEnd(*args, **kwargs):
"""
MoveEnd(self, int flags=0) -> bool
Move to the end of the buffer
"""
return _richtext.RichTextCtrl_MoveEnd(*args, **kwargs)
def PageUp(*args, **kwargs):
"""
PageUp(self, int noPages=1, int flags=0) -> bool
Move n pages up
"""
return _richtext.RichTextCtrl_PageUp(*args, **kwargs)
def PageDown(*args, **kwargs):
"""
PageDown(self, int noPages=1, int flags=0) -> bool
Move n pages down
"""
return _richtext.RichTextCtrl_PageDown(*args, **kwargs)
def WordLeft(*args, **kwargs):
"""
WordLeft(self, int noPages=1, int flags=0) -> bool
Move n words left
"""
return _richtext.RichTextCtrl_WordLeft(*args, **kwargs)
def WordRight(*args, **kwargs):
"""
WordRight(self, int noPages=1, int flags=0) -> bool
Move n words right
"""
return _richtext.RichTextCtrl_WordRight(*args, **kwargs)
def GetBuffer(*args, **kwargs):
"""
GetBuffer(self) -> RichTextBuffer
Returns the buffer associated with the control.
"""
return _richtext.RichTextCtrl_GetBuffer(*args, **kwargs)
def BeginBatchUndo(*args, **kwargs):
"""
BeginBatchUndo(self, String cmdName) -> bool
Start batching undo history for commands
"""
return _richtext.RichTextCtrl_BeginBatchUndo(*args, **kwargs)
def EndBatchUndo(*args, **kwargs):
"""
EndBatchUndo(self) -> bool
End batching undo history for commands.
"""
return _richtext.RichTextCtrl_EndBatchUndo(*args, **kwargs)
def BatchingUndo(*args, **kwargs):
"""
BatchingUndo(self) -> bool
Are we batching undo history for commands?
"""
return _richtext.RichTextCtrl_BatchingUndo(*args, **kwargs)
def BeginSuppressUndo(*args, **kwargs):
"""
BeginSuppressUndo(self) -> bool
Start suppressing undo history for commands.
"""
return _richtext.RichTextCtrl_BeginSuppressUndo(*args, **kwargs)
def EndSuppressUndo(*args, **kwargs):
"""
EndSuppressUndo(self) -> bool
End suppressing undo history for commands.
"""
return _richtext.RichTextCtrl_EndSuppressUndo(*args, **kwargs)
def SuppressingUndo(*args, **kwargs):
"""
SuppressingUndo(self) -> bool
Are we suppressing undo history for commands?
"""
return _richtext.RichTextCtrl_SuppressingUndo(*args, **kwargs)
def HasCharacterAttributes(*args, **kwargs):
"""
HasCharacterAttributes(self, RichTextRange range, RichTextAttr style) -> bool
Test if this whole range has character attributes of the specified
kind. If any of the attributes are different within the range, the
test fails. You can use this to implement, for example, bold button
updating. ``style`` must have flags indicating which attributes are of
interest.
"""
return _richtext.RichTextCtrl_HasCharacterAttributes(*args, **kwargs)
def HasParagraphAttributes(*args, **kwargs):
"""
HasParagraphAttributes(self, RichTextRange range, RichTextAttr style) -> bool
Test if this whole range has paragraph attributes of the specified
kind. If any of the attributes are different within the range, the
test fails. You can use this to implement, for example, centering
button updating. style must have flags indicating which attributes are
of interest.
"""
return _richtext.RichTextCtrl_HasParagraphAttributes(*args, **kwargs)
def IsSelectionBold(*args, **kwargs):
"""
IsSelectionBold(self) -> bool
Is all of the selection bold?
"""
return _richtext.RichTextCtrl_IsSelectionBold(*args, **kwargs)
def IsSelectionItalics(*args, **kwargs):
"""
IsSelectionItalics(self) -> bool
Is all of the selection italics?
"""
return _richtext.RichTextCtrl_IsSelectionItalics(*args, **kwargs)
def IsSelectionUnderlined(*args, **kwargs):
"""
IsSelectionUnderlined(self) -> bool
Is all of the selection underlined?
"""
return _richtext.RichTextCtrl_IsSelectionUnderlined(*args, **kwargs)
def DoesSelectionHaveTextEffectFlag(*args, **kwargs):
"""DoesSelectionHaveTextEffectFlag(self, int flag) -> bool"""
return _richtext.RichTextCtrl_DoesSelectionHaveTextEffectFlag(*args, **kwargs)
def IsSelectionAligned(*args, **kwargs):
"""
IsSelectionAligned(self, int alignment) -> bool
Is all of the selection aligned according to the specified flag?
"""
return _richtext.RichTextCtrl_IsSelectionAligned(*args, **kwargs)
def ApplyBoldToSelection(*args, **kwargs):
"""
ApplyBoldToSelection(self) -> bool
Apply bold to the selection
"""
return _richtext.RichTextCtrl_ApplyBoldToSelection(*args, **kwargs)
def ApplyItalicToSelection(*args, **kwargs):
"""
ApplyItalicToSelection(self) -> bool
Apply italic to the selection
"""
return _richtext.RichTextCtrl_ApplyItalicToSelection(*args, **kwargs)
def ApplyUnderlineToSelection(*args, **kwargs):
"""
ApplyUnderlineToSelection(self) -> bool
Apply underline to the selection
"""
return _richtext.RichTextCtrl_ApplyUnderlineToSelection(*args, **kwargs)
def ApplyTextEffectToSelection(*args, **kwargs):
"""ApplyTextEffectToSelection(self, int flags) -> bool"""
return _richtext.RichTextCtrl_ApplyTextEffectToSelection(*args, **kwargs)
def ApplyAlignmentToSelection(*args, **kwargs):
"""
ApplyAlignmentToSelection(self, int alignment) -> bool
Apply alignment to the selection
"""
return _richtext.RichTextCtrl_ApplyAlignmentToSelection(*args, **kwargs)
def ApplyStyle(*args, **kwargs):
"""
ApplyStyle(self, wxRichTextStyleDefinition def) -> bool
Apply a named style to the selection
"""
return _richtext.RichTextCtrl_ApplyStyle(*args, **kwargs)
def SetStyleSheet(*args, **kwargs):
"""
SetStyleSheet(self, wxRichTextStyleSheet styleSheet)
Set style sheet, if any.
"""
return _richtext.RichTextCtrl_SetStyleSheet(*args, **kwargs)
def GetStyleSheet(*args, **kwargs):
"""GetStyleSheet(self) -> wxRichTextStyleSheet"""
return _richtext.RichTextCtrl_GetStyleSheet(*args, **kwargs)
def PushStyleSheet(*args, **kwargs):
"""
PushStyleSheet(self, wxRichTextStyleSheet styleSheet) -> bool
Push style sheet to top of stack
"""
return _richtext.RichTextCtrl_PushStyleSheet(*args, **kwargs)
def PopStyleSheet(*args, **kwargs):
"""
PopStyleSheet(self) -> wxRichTextStyleSheet
Pop style sheet from top of stack
"""
return _richtext.RichTextCtrl_PopStyleSheet(*args, **kwargs)
def ApplyStyleSheet(*args, **kwargs):
"""
ApplyStyleSheet(self, wxRichTextStyleSheet styleSheet=None) -> bool
Apply the style sheet to the buffer, for example if the styles have
changed.
"""
return _richtext.RichTextCtrl_ApplyStyleSheet(*args, **kwargs)
def ShowContextMenu(*args, **kwargs):
"""ShowContextMenu(self, Menu menu, Point pt, bool addPropertyCommands=True) -> bool"""
return _richtext.RichTextCtrl_ShowContextMenu(*args, **kwargs)
def PrepareContextMenu(*args, **kwargs):
"""PrepareContextMenu(self, Menu menu, Point pt, bool addPropertyCommands=True) -> int"""
return _richtext.RichTextCtrl_PrepareContextMenu(*args, **kwargs)
Buffer = property(GetBuffer)
DelayedLayoutThreshold = property(GetDelayedLayoutThreshold,SetDelayedLayoutThreshold)
Filename = property(GetFilename,SetFilename)
InternalSelectionRange = property(GetInternalSelectionRange,SetInternalSelectionRange)
SelectionRange = property(GetSelectionRange,SetSelectionRange)
StyleSheet = property(GetStyleSheet,SetStyleSheet)
TextCursor = property(GetTextCursor,SetTextCursor)
URLCursor = property(GetURLCursor,SetURLCursor)
def SetupScrollbars(*args, **kwargs):
"""SetupScrollbars(self, bool atTop=False)"""
return _richtext.RichTextCtrl_SetupScrollbars(*args, **kwargs)
def KeyboardNavigate(*args, **kwargs):
"""KeyboardNavigate(self, int keyCode, int flags) -> bool"""
return _richtext.RichTextCtrl_KeyboardNavigate(*args, **kwargs)
def PositionCaret(*args, **kwargs):
"""PositionCaret(self)"""
return _richtext.RichTextCtrl_PositionCaret(*args, **kwargs)
def ExtendSelection(*args, **kwargs):
"""ExtendSelection(self, long oldPosition, long newPosition, int flags) -> bool"""
return _richtext.RichTextCtrl_ExtendSelection(*args, **kwargs)
def ScrollIntoView(*args, **kwargs):
"""ScrollIntoView(self, long position, int keyCode) -> bool"""
return _richtext.RichTextCtrl_ScrollIntoView(*args, **kwargs)
def SetCaretPosition(*args, **kwargs):
"""SetCaretPosition(self, long position, bool showAtLineStart=False)"""
return _richtext.RichTextCtrl_SetCaretPosition(*args, **kwargs)
def GetCaretPosition(*args, **kwargs):
"""GetCaretPosition(self) -> long"""
return _richtext.RichTextCtrl_GetCaretPosition(*args, **kwargs)
def GetAdjustedCaretPosition(*args, **kwargs):
"""GetAdjustedCaretPosition(self, long caretPos) -> long"""
return _richtext.RichTextCtrl_GetAdjustedCaretPosition(*args, **kwargs)
def MoveCaretForward(*args, **kwargs):
"""MoveCaretForward(self, long oldPosition)"""
return _richtext.RichTextCtrl_MoveCaretForward(*args, **kwargs)
def MoveCaretBack(*args, **kwargs):
"""MoveCaretBack(self, long oldPosition)"""
return _richtext.RichTextCtrl_MoveCaretBack(*args, **kwargs)
def GetCaretPositionForIndex(*args, **kwargs):
"""GetCaretPositionForIndex(self, long position, Rect rect) -> bool"""
return _richtext.RichTextCtrl_GetCaretPositionForIndex(*args, **kwargs)
def GetVisibleLineForCaretPosition(*args, **kwargs):
"""GetVisibleLineForCaretPosition(self, long caretPosition) -> RichTextLine"""
return _richtext.RichTextCtrl_GetVisibleLineForCaretPosition(*args, **kwargs)
def GetCommandProcessor(*args, **kwargs):
"""GetCommandProcessor(self) -> wxCommandProcessor"""
return _richtext.RichTextCtrl_GetCommandProcessor(*args, **kwargs)
def DeleteSelectedContent(*args, **kwargs):
"""DeleteSelectedContent(self, long OUTPUT) -> bool"""
return _richtext.RichTextCtrl_DeleteSelectedContent(*args, **kwargs)
def GetPhysicalPoint(*args, **kwargs):
"""GetPhysicalPoint(self, Point ptLogical) -> Point"""
return _richtext.RichTextCtrl_GetPhysicalPoint(*args, **kwargs)
def GetLogicalPoint(*args, **kwargs):
"""GetLogicalPoint(self, Point ptPhysical) -> Point"""
return _richtext.RichTextCtrl_GetLogicalPoint(*args, **kwargs)
def FindNextWordPosition(*args, **kwargs):
"""FindNextWordPosition(self, int direction=1) -> long"""
return _richtext.RichTextCtrl_FindNextWordPosition(*args, **kwargs)
def IsPositionVisible(*args, **kwargs):
"""IsPositionVisible(self, long pos) -> bool"""
return _richtext.RichTextCtrl_IsPositionVisible(*args, **kwargs)
def GetFirstVisiblePosition(*args, **kwargs):
"""GetFirstVisiblePosition(self) -> long"""
return _richtext.RichTextCtrl_GetFirstVisiblePosition(*args, **kwargs)
def GetCaretPositionForDefaultStyle(*args, **kwargs):
"""GetCaretPositionForDefaultStyle(self) -> long"""
return _richtext.RichTextCtrl_GetCaretPositionForDefaultStyle(*args, **kwargs)
def SetCaretPositionForDefaultStyle(*args, **kwargs):
"""SetCaretPositionForDefaultStyle(self, long pos)"""
return _richtext.RichTextCtrl_SetCaretPositionForDefaultStyle(*args, **kwargs)
def IsDefaultStyleShowing(*args, **kwargs):
"""IsDefaultStyleShowing(self) -> bool"""
return _richtext.RichTextCtrl_IsDefaultStyleShowing(*args, **kwargs)
def SetAndShowDefaultStyle(*args, **kwargs):
"""SetAndShowDefaultStyle(self, RichTextAttr attr)"""
return _richtext.RichTextCtrl_SetAndShowDefaultStyle(*args, **kwargs)
def GetFirstVisiblePoint(*args, **kwargs):
"""GetFirstVisiblePoint(self) -> Point"""
return _richtext.RichTextCtrl_GetFirstVisiblePoint(*args, **kwargs)
def GetScrollPageSize(*args, **kwargs):
"""GetScrollPageSize(self, int orient) -> int"""
return _richtext.RichTextCtrl_GetScrollPageSize(*args, **kwargs)
def SetScrollPageSize(*args, **kwargs):
"""SetScrollPageSize(self, int orient, int pageSize)"""
return _richtext.RichTextCtrl_SetScrollPageSize(*args, **kwargs)
def SetScrollRate(*args, **kwargs):
"""SetScrollRate(self, int xstep, int ystep)"""
return _richtext.RichTextCtrl_SetScrollRate(*args, **kwargs)
def GetViewStart(*args, **kwargs):
"""
GetViewStart() -> (x,y)
Get the view start
"""
return _richtext.RichTextCtrl_GetViewStart(*args, **kwargs)
def SetScale(*args, **kwargs):
"""SetScale(self, double xs, double ys)"""
return _richtext.RichTextCtrl_SetScale(*args, **kwargs)
def GetScaleX(*args, **kwargs):
"""GetScaleX(self) -> double"""
return _richtext.RichTextCtrl_GetScaleX(*args, **kwargs)
def GetScaleY(*args, **kwargs):
"""GetScaleY(self) -> double"""
return _richtext.RichTextCtrl_GetScaleY(*args, **kwargs)
def CalcScrolledPosition(*args):
"""
CalcScrolledPosition(self, Point pt) -> Point
CalcScrolledPosition(int x, int y) -> (sx, sy)
Translate between scrolled and unscrolled coordinates.
"""
return _richtext.RichTextCtrl_CalcScrolledPosition(*args)
def CalcUnscrolledPosition(*args):
"""
CalcUnscrolledPosition(self, Point pt) -> Point
CalcUnscrolledPosition(int x, int y) -> (ux, uy)
Translate between scrolled and unscrolled coordinates.
"""
return _richtext.RichTextCtrl_CalcUnscrolledPosition(*args)
def SetTargetRect(*args, **kwargs):
"""SetTargetRect(self, Rect rect)"""
return _richtext.RichTextCtrl_SetTargetRect(*args, **kwargs)
def GetTargetRect(*args, **kwargs):
"""GetTargetRect(self) -> Rect"""
return _richtext.RichTextCtrl_GetTargetRect(*args, **kwargs)
def IsEmpty(*args, **kwargs):
"""
IsEmpty(self) -> bool
Returns True if the value in the text field is empty.
"""
return _richtext.RichTextCtrl_IsEmpty(*args, **kwargs)
def SetModified(*args, **kwargs):
"""SetModified(self, bool modified)"""
return _richtext.RichTextCtrl_SetModified(*args, **kwargs)
_richtext.RichTextCtrl_swigregister(RichTextCtrl)
RichTextCtrlNameStr = cvar.RichTextCtrlNameStr
def PreRichTextCtrl(*args, **kwargs):
"""PreRichTextCtrl() -> RichTextCtrl"""
val = _richtext.new_PreRichTextCtrl(*args, **kwargs)
return val
#---------------------------------------------------------------------------
wxEVT_COMMAND_RICHTEXT_LEFT_CLICK = _richtext.wxEVT_COMMAND_RICHTEXT_LEFT_CLICK
wxEVT_COMMAND_RICHTEXT_RIGHT_CLICK = _richtext.wxEVT_COMMAND_RICHTEXT_RIGHT_CLICK
wxEVT_COMMAND_RICHTEXT_MIDDLE_CLICK = _richtext.wxEVT_COMMAND_RICHTEXT_MIDDLE_CLICK
wxEVT_COMMAND_RICHTEXT_LEFT_DCLICK = _richtext.wxEVT_COMMAND_RICHTEXT_LEFT_DCLICK
wxEVT_COMMAND_RICHTEXT_RETURN = _richtext.wxEVT_COMMAND_RICHTEXT_RETURN
wxEVT_COMMAND_RICHTEXT_CHARACTER = _richtext.wxEVT_COMMAND_RICHTEXT_CHARACTER
wxEVT_COMMAND_RICHTEXT_DELETE = _richtext.wxEVT_COMMAND_RICHTEXT_DELETE
wxEVT_COMMAND_RICHTEXT_STYLESHEET_CHANGING = _richtext.wxEVT_COMMAND_RICHTEXT_STYLESHEET_CHANGING
wxEVT_COMMAND_RICHTEXT_STYLESHEET_CHANGED = _richtext.wxEVT_COMMAND_RICHTEXT_STYLESHEET_CHANGED
wxEVT_COMMAND_RICHTEXT_STYLESHEET_REPLACING = _richtext.wxEVT_COMMAND_RICHTEXT_STYLESHEET_REPLACING
wxEVT_COMMAND_RICHTEXT_STYLESHEET_REPLACED = _richtext.wxEVT_COMMAND_RICHTEXT_STYLESHEET_REPLACED
wxEVT_COMMAND_RICHTEXT_CONTENT_INSERTED = _richtext.wxEVT_COMMAND_RICHTEXT_CONTENT_INSERTED
wxEVT_COMMAND_RICHTEXT_CONTENT_DELETED = _richtext.wxEVT_COMMAND_RICHTEXT_CONTENT_DELETED
wxEVT_COMMAND_RICHTEXT_STYLE_CHANGED = _richtext.wxEVT_COMMAND_RICHTEXT_STYLE_CHANGED
wxEVT_COMMAND_RICHTEXT_SELECTION_CHANGED = _richtext.wxEVT_COMMAND_RICHTEXT_SELECTION_CHANGED
wxEVT_COMMAND_RICHTEXT_BUFFER_RESET = _richtext.wxEVT_COMMAND_RICHTEXT_BUFFER_RESET
EVT_RICHTEXT_LEFT_CLICK = wx.PyEventBinder(wxEVT_COMMAND_RICHTEXT_LEFT_CLICK, 1)
EVT_RICHTEXT_RIGHT_CLICK = wx.PyEventBinder(wxEVT_COMMAND_RICHTEXT_RIGHT_CLICK, 1)
EVT_RICHTEXT_MIDDLE_CLICK = wx.PyEventBinder(wxEVT_COMMAND_RICHTEXT_MIDDLE_CLICK, 1)
EVT_RICHTEXT_LEFT_DCLICK = wx.PyEventBinder(wxEVT_COMMAND_RICHTEXT_LEFT_DCLICK, 1)
EVT_RICHTEXT_RETURN = wx.PyEventBinder( wxEVT_COMMAND_RICHTEXT_RETURN, 1)
EVT_RICHTEXT_CHARACTER = wx.PyEventBinder( wxEVT_COMMAND_RICHTEXT_CHARACTER, 1)
EVT_RICHTEXT_DELETE = wx.PyEventBinder( wxEVT_COMMAND_RICHTEXT_DELETE, 1)
EVT_RICHTEXT_STYLESHEET_CHANGING = wx.PyEventBinder( wxEVT_COMMAND_RICHTEXT_STYLESHEET_CHANGING, 1)
EVT_RICHTEXT_STYLESHEET_CHANGED = wx.PyEventBinder( wxEVT_COMMAND_RICHTEXT_STYLESHEET_CHANGED, 1)
EVT_RICHTEXT_STYLESHEET_REPLACING = wx.PyEventBinder( wxEVT_COMMAND_RICHTEXT_STYLESHEET_REPLACING, 1)
EVT_RICHTEXT_STYLESHEET_REPLACED = wx.PyEventBinder( wxEVT_COMMAND_RICHTEXT_STYLESHEET_REPLACED, 1)
EVT_RICHTEXT_CONTENT_INSERTED = wx.PyEventBinder( wxEVT_COMMAND_RICHTEXT_CONTENT_INSERTED, 1)
EVT_RICHTEXT_CONTENT_DELETED = wx.PyEventBinder( wxEVT_COMMAND_RICHTEXT_CONTENT_DELETED, 1)
EVT_RICHTEXT_STYLE_CHANGED = wx.PyEventBinder( wxEVT_COMMAND_RICHTEXT_STYLE_CHANGED, 1)
EVT_RICHTEXT_SELECTION_CHANGED = wx.PyEventBinder( wxEVT_COMMAND_RICHTEXT_SELECTION_CHANGED, 1)
EVT_RICHTEXT_BUFFER_RESET = wx.PyEventBinder( wxEVT_COMMAND_RICHTEXT_BUFFER_RESET, 1)
class RichTextEvent(_core.NotifyEvent):
"""Proxy of C++ RichTextEvent class"""
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args, **kwargs):
"""__init__(self, EventType commandType=wxEVT_NULL, int winid=0) -> RichTextEvent"""
_richtext.RichTextEvent_swiginit(self,_richtext.new_RichTextEvent(*args, **kwargs))
def GetPosition(*args, **kwargs):
"""GetPosition(self) -> int"""
return _richtext.RichTextEvent_GetPosition(*args, **kwargs)
def SetPosition(*args, **kwargs):
"""SetPosition(self, int n)"""
return _richtext.RichTextEvent_SetPosition(*args, **kwargs)
def GetFlags(*args, **kwargs):
"""GetFlags(self) -> int"""
return _richtext.RichTextEvent_GetFlags(*args, **kwargs)
def SetFlags(*args, **kwargs):
"""SetFlags(self, int flags)"""
return _richtext.RichTextEvent_SetFlags(*args, **kwargs)
def GetOldStyleSheet(*args, **kwargs):
"""GetOldStyleSheet(self) -> wxRichTextStyleSheet"""
return _richtext.RichTextEvent_GetOldStyleSheet(*args, **kwargs)
def SetOldStyleSheet(*args, **kwargs):
"""SetOldStyleSheet(self, wxRichTextStyleSheet sheet)"""
return _richtext.RichTextEvent_SetOldStyleSheet(*args, **kwargs)
def GetNewStyleSheet(*args, **kwargs):
"""GetNewStyleSheet(self) -> wxRichTextStyleSheet"""
return _richtext.RichTextEvent_GetNewStyleSheet(*args, **kwargs)
def SetNewStyleSheet(*args, **kwargs):
"""SetNewStyleSheet(self, wxRichTextStyleSheet sheet)"""
return _richtext.RichTextEvent_SetNewStyleSheet(*args, **kwargs)
def GetRange(*args, **kwargs):
"""GetRange(self) -> RichTextRange"""
return _richtext.RichTextEvent_GetRange(*args, **kwargs)
def SetRange(*args, **kwargs):
"""SetRange(self, RichTextRange range)"""
return _richtext.RichTextEvent_SetRange(*args, **kwargs)
def GetCharacter(*args, **kwargs):
"""GetCharacter(self) -> wxChar"""
return _richtext.RichTextEvent_GetCharacter(*args, **kwargs)
def SetCharacter(*args, **kwargs):
"""SetCharacter(self, wxChar ch)"""
return _richtext.RichTextEvent_SetCharacter(*args, **kwargs)
Flags = property(GetFlags,SetFlags)
Index = property(GetPosition,SetPosition)
OldStyleSheet = property(GetOldStyleSheet,SetOldStyleSheet)
NewStyleSheet = property(GetNewStyleSheet,SetNewStyleSheet)
Range = property(GetRange,SetRange)
Character = property(GetCharacter,SetCharacter)
_richtext.RichTextEvent_swigregister(RichTextEvent)
#---------------------------------------------------------------------------
class RichTextHTMLHandler(RichTextFileHandler):
"""Proxy of C++ RichTextHTMLHandler class"""
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args, **kwargs):
"""__init__(self, String name=HtmlName, String ext=HtmlExt, int type=RICHTEXT_TYPE_HTML) -> RichTextHTMLHandler"""
_richtext.RichTextHTMLHandler_swiginit(self,_richtext.new_RichTextHTMLHandler(*args, **kwargs))
def SetTemporaryImageLocations(*args, **kwargs):
"""
SetTemporaryImageLocations(self, wxArrayString locations)
Set the list of image locations generated by the last operation
"""
return _richtext.RichTextHTMLHandler_SetTemporaryImageLocations(*args, **kwargs)
def GetTemporaryImageLocations(*args, **kwargs):
"""
GetTemporaryImageLocations(self) -> wxArrayString
Get the list of image locations generated by the last operation
"""
return _richtext.RichTextHTMLHandler_GetTemporaryImageLocations(*args, **kwargs)
TemporaryImageLocations = property(GetTemporaryImageLocations,SetTemporaryImageLocations)
def ClearTemporaryImageLocations(*args, **kwargs):
"""
ClearTemporaryImageLocations(self)
Clear the image locations generated by the last operation
"""
return _richtext.RichTextHTMLHandler_ClearTemporaryImageLocations(*args, **kwargs)
def DeleteTemporaryImages(*args, **kwargs):
"""
DeleteTemporaryImages(self) -> bool
Delete the in-memory or temporary files generated by the last operation
"""
return _richtext.RichTextHTMLHandler_DeleteTemporaryImages(*args, **kwargs)
def SetFileCounter(*args, **kwargs):
"""
SetFileCounter(int counter)
Reset the file counter, in case, for example, the same names are required each
time
"""
return _richtext.RichTextHTMLHandler_SetFileCounter(*args, **kwargs)
SetFileCounter = staticmethod(SetFileCounter)
def SetTempDir(*args, **kwargs):
"""
SetTempDir(self, String tempDir)
Set the directory for storing temporary files. If empty, the system temporary
directory will be used.
"""
return _richtext.RichTextHTMLHandler_SetTempDir(*args, **kwargs)
def GetTempDir(*args, **kwargs):
"""
GetTempDir(self) -> String
Get the directory for storing temporary files. If empty, the system temporary
directory will be used.
"""
return _richtext.RichTextHTMLHandler_GetTempDir(*args, **kwargs)
TempDir = property(GetTempDir,SetTempDir)
def SetFontSizeMapping(*args, **kwargs):
"""
SetFontSizeMapping(self, wxArrayInt fontSizeMapping)
Set mapping from point size to HTML font size. There should be 7 elements, one
for each HTML font size, each element specifying the maximum point size for
that HTML font size. E.g. 8, 10, 13, 17, 22, 29, 100
"""
return _richtext.RichTextHTMLHandler_SetFontSizeMapping(*args, **kwargs)
def GetFontSizeMapping(*args, **kwargs):
"""
GetFontSizeMapping(self) -> wxArrayInt
Get mapping deom point size to HTML font size.
"""
return _richtext.RichTextHTMLHandler_GetFontSizeMapping(*args, **kwargs)
FontSizeMapping = property(GetFontSizeMapping,SetFontSizeMapping)
_richtext.RichTextHTMLHandler_swigregister(RichTextHTMLHandler)
HtmlName = cvar.HtmlName
HtmlExt = cvar.HtmlExt
def RichTextHTMLHandler_SetFileCounter(*args, **kwargs):
"""
RichTextHTMLHandler_SetFileCounter(int counter)
Reset the file counter, in case, for example, the same names are required each
time
"""
return _richtext.RichTextHTMLHandler_SetFileCounter(*args, **kwargs)
#---------------------------------------------------------------------------
class RichTextXMLHandler(RichTextFileHandler):
"""Proxy of C++ RichTextXMLHandler class"""
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args, **kwargs):
"""__init__(self, String name=XmlName, String ext=XmlExt, int type=RICHTEXT_TYPE_XML) -> RichTextXMLHandler"""
_richtext.RichTextXMLHandler_swiginit(self,_richtext.new_RichTextXMLHandler(*args, **kwargs))
_richtext.RichTextXMLHandler_swigregister(RichTextXMLHandler)
XmlName = cvar.XmlName
XmlExt = cvar.XmlExt
#---------------------------------------------------------------------------
RICHTEXT_PRINT_MAX_PAGES = _richtext.RICHTEXT_PRINT_MAX_PAGES
RICHTEXT_PAGE_ODD = _richtext.RICHTEXT_PAGE_ODD
RICHTEXT_PAGE_EVEN = _richtext.RICHTEXT_PAGE_EVEN
RICHTEXT_PAGE_ALL = _richtext.RICHTEXT_PAGE_ALL
RICHTEXT_PAGE_LEFT = _richtext.RICHTEXT_PAGE_LEFT
RICHTEXT_PAGE_CENTRE = _richtext.RICHTEXT_PAGE_CENTRE
RICHTEXT_PAGE_RIGHT = _richtext.RICHTEXT_PAGE_RIGHT
class RichTextPrintout(_windows.Printout):
"""Proxy of C++ RichTextPrintout class"""
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args, **kwargs):
"""__init__(self, String title=wxT("Printout")) -> RichTextPrintout"""
_richtext.RichTextPrintout_swiginit(self,_richtext.new_RichTextPrintout(*args, **kwargs))
__swig_destroy__ = _richtext.delete_RichTextPrintout
__del__ = lambda self : None;
def SetRichTextBuffer(*args, **kwargs):
"""SetRichTextBuffer(self, RichTextBuffer buffer)"""
return _richtext.RichTextPrintout_SetRichTextBuffer(*args, **kwargs)
def GetRichTextBuffer(*args, **kwargs):
"""GetRichTextBuffer(self) -> RichTextBuffer"""
return _richtext.RichTextPrintout_GetRichTextBuffer(*args, **kwargs)
def SetHeaderFooterData(*args, **kwargs):
"""SetHeaderFooterData(self, wxRichTextHeaderFooterData data)"""
return _richtext.RichTextPrintout_SetHeaderFooterData(*args, **kwargs)
def GetHeaderFooterData(*args, **kwargs):
"""GetHeaderFooterData(self) -> wxRichTextHeaderFooterData"""
return _richtext.RichTextPrintout_GetHeaderFooterData(*args, **kwargs)
def SetMargins(*args, **kwargs):
"""SetMargins(self, int top=254, int bottom=254, int left=254, int right=254)"""
return _richtext.RichTextPrintout_SetMargins(*args, **kwargs)
def CalculateScaling(*args, **kwargs):
"""CalculateScaling(self, DC dc, Rect textRect, Rect headerRect, Rect footerRect)"""
return _richtext.RichTextPrintout_CalculateScaling(*args, **kwargs)
_richtext.RichTextPrintout_swigregister(RichTextPrintout)
class RichTextPrinting(_core.Object):
"""Proxy of C++ RichTextPrinting class"""
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args, **kwargs):
"""__init__(self, String name=wxT("Printing"), Window parentWindow=None) -> RichTextPrinting"""
_richtext.RichTextPrinting_swiginit(self,_richtext.new_RichTextPrinting(*args, **kwargs))
__swig_destroy__ = _richtext.delete_RichTextPrinting
__del__ = lambda self : None;
def PreviewFile(*args, **kwargs):
"""PreviewFile(self, String richTextFile) -> bool"""
return _richtext.RichTextPrinting_PreviewFile(*args, **kwargs)
def PreviewBuffer(*args, **kwargs):
"""PreviewBuffer(self, RichTextBuffer buffer) -> bool"""
return _richtext.RichTextPrinting_PreviewBuffer(*args, **kwargs)
def PrintFile(*args, **kwargs):
"""PrintFile(self, String richTextFile) -> bool"""
return _richtext.RichTextPrinting_PrintFile(*args, **kwargs)
def PrintBuffer(*args, **kwargs):
"""PrintBuffer(self, RichTextBuffer buffer) -> bool"""
return _richtext.RichTextPrinting_PrintBuffer(*args, **kwargs)
def PageSetup(*args, **kwargs):
"""PageSetup(self)"""
return _richtext.RichTextPrinting_PageSetup(*args, **kwargs)
def SetHeaderFooterData(*args, **kwargs):
"""SetHeaderFooterData(self, wxRichTextHeaderFooterData data)"""
return _richtext.RichTextPrinting_SetHeaderFooterData(*args, **kwargs)
def GetHeaderFooterData(*args, **kwargs):
"""GetHeaderFooterData(self) -> wxRichTextHeaderFooterData"""
return _richtext.RichTextPrinting_GetHeaderFooterData(*args, **kwargs)
def SetHeaderText(*args, **kwargs):
"""SetHeaderText(self, String text, int page=RICHTEXT_PAGE_ALL, int location=RICHTEXT_PAGE_CENTRE)"""
return _richtext.RichTextPrinting_SetHeaderText(*args, **kwargs)
def GetHeaderText(*args, **kwargs):
"""GetHeaderText(self, int page=RICHTEXT_PAGE_EVEN, int location=RICHTEXT_PAGE_CENTRE) -> String"""
return _richtext.RichTextPrinting_GetHeaderText(*args, **kwargs)
def SetFooterText(*args, **kwargs):
"""SetFooterText(self, String text, int page=RICHTEXT_PAGE_ALL, int location=RICHTEXT_PAGE_CENTRE)"""
return _richtext.RichTextPrinting_SetFooterText(*args, **kwargs)
def GetFooterText(*args, **kwargs):
"""GetFooterText(self, int page=RICHTEXT_PAGE_EVEN, int location=RICHTEXT_PAGE_CENTRE) -> String"""
return _richtext.RichTextPrinting_GetFooterText(*args, **kwargs)
def SetShowOnFirstPage(*args, **kwargs):
"""SetShowOnFirstPage(self, bool show)"""
return _richtext.RichTextPrinting_SetShowOnFirstPage(*args, **kwargs)
def SetHeaderFooterFont(*args, **kwargs):
"""SetHeaderFooterFont(self, Font font)"""
return _richtext.RichTextPrinting_SetHeaderFooterFont(*args, **kwargs)
def SetHeaderFooterTextColour(*args, **kwargs):
"""SetHeaderFooterTextColour(self, Colour font)"""
return _richtext.RichTextPrinting_SetHeaderFooterTextColour(*args, **kwargs)
def GetPrintData(*args, **kwargs):
"""GetPrintData(self) -> PrintData"""
return _richtext.RichTextPrinting_GetPrintData(*args, **kwargs)
def GetPageSetupData(*args, **kwargs):
"""GetPageSetupData(self) -> PageSetupDialogData"""
return _richtext.RichTextPrinting_GetPageSetupData(*args, **kwargs)
def SetPrintData(*args, **kwargs):
"""SetPrintData(self, PrintData printData)"""
return _richtext.RichTextPrinting_SetPrintData(*args, **kwargs)
def SetPageSetupData(*args, **kwargs):
"""SetPageSetupData(self, wxPageSetupData pageSetupData)"""
return _richtext.RichTextPrinting_SetPageSetupData(*args, **kwargs)
def SetRichTextBufferPreview(*args, **kwargs):
"""SetRichTextBufferPreview(self, RichTextBuffer buf)"""
return _richtext.RichTextPrinting_SetRichTextBufferPreview(*args, **kwargs)
def GetRichTextBufferPreview(*args, **kwargs):
"""GetRichTextBufferPreview(self) -> RichTextBuffer"""
return _richtext.RichTextPrinting_GetRichTextBufferPreview(*args, **kwargs)
def SetRichTextBufferPrinting(*args, **kwargs):
"""SetRichTextBufferPrinting(self, RichTextBuffer buf)"""
return _richtext.RichTextPrinting_SetRichTextBufferPrinting(*args, **kwargs)
def GetRichTextBufferPrinting(*args, **kwargs):
"""GetRichTextBufferPrinting(self) -> RichTextBuffer"""
return _richtext.RichTextPrinting_GetRichTextBufferPrinting(*args, **kwargs)
def SetParentWindow(*args, **kwargs):
"""SetParentWindow(self, Window parent)"""
return _richtext.RichTextPrinting_SetParentWindow(*args, **kwargs)
def GetParentWindow(*args, **kwargs):
"""GetParentWindow(self) -> Window"""
return _richtext.RichTextPrinting_GetParentWindow(*args, **kwargs)
def SetTitle(*args, **kwargs):
"""SetTitle(self, String title)"""
return _richtext.RichTextPrinting_SetTitle(*args, **kwargs)
def GetTitle(*args, **kwargs):
"""GetTitle(self) -> String"""
return _richtext.RichTextPrinting_GetTitle(*args, **kwargs)
def SetPreviewRect(*args, **kwargs):
"""SetPreviewRect(self, Rect rect)"""
return _richtext.RichTextPrinting_SetPreviewRect(*args, **kwargs)
def GetPreviewRect(*args, **kwargs):
"""GetPreviewRect(self) -> Rect"""
return _richtext.RichTextPrinting_GetPreviewRect(*args, **kwargs)
_richtext.RichTextPrinting_swigregister(RichTextPrinting)
| apache-2.0 |
ArcherCraftStore/ArcherVMPeridot | Python/Lib/xml/sax/__init__.py | 237 | 3503 | """Simple API for XML (SAX) implementation for Python.
This module provides an implementation of the SAX 2 interface;
information about the Java version of the interface can be found at
http://www.megginson.com/SAX/. The Python version of the interface is
documented at <...>.
This package contains the following modules:
handler -- Base classes and constants which define the SAX 2 API for
the 'client-side' of SAX for Python.
saxutils -- Implementation of the convenience classes commonly used to
work with SAX.
xmlreader -- Base classes and constants which define the SAX 2 API for
the parsers used with SAX for Python.
expatreader -- Driver that allows use of the Expat parser with SAX.
"""
from .xmlreader import InputSource
from .handler import ContentHandler, ErrorHandler
from ._exceptions import SAXException, SAXNotRecognizedException, \
SAXParseException, SAXNotSupportedException, \
SAXReaderNotAvailable
def parse(source, handler, errorHandler=ErrorHandler()):
parser = make_parser()
parser.setContentHandler(handler)
parser.setErrorHandler(errorHandler)
parser.parse(source)
def parseString(string, handler, errorHandler=ErrorHandler()):
from io import BytesIO
if errorHandler is None:
errorHandler = ErrorHandler()
parser = make_parser()
parser.setContentHandler(handler)
parser.setErrorHandler(errorHandler)
inpsrc = InputSource()
inpsrc.setByteStream(BytesIO(string))
parser.parse(inpsrc)
# this is the parser list used by the make_parser function if no
# alternatives are given as parameters to the function
default_parser_list = ["xml.sax.expatreader"]
# tell modulefinder that importing sax potentially imports expatreader
_false = 0
if _false:
import xml.sax.expatreader
import os, sys
if "PY_SAX_PARSER" in os.environ:
default_parser_list = os.environ["PY_SAX_PARSER"].split(",")
del os
_key = "python.xml.sax.parser"
if sys.platform[:4] == "java" and sys.registry.containsKey(_key):
default_parser_list = sys.registry.getProperty(_key).split(",")
def make_parser(parser_list = []):
"""Creates and returns a SAX parser.
Creates the first parser it is able to instantiate of the ones
given in the list created by doing parser_list +
default_parser_list. The lists must contain the names of Python
modules containing both a SAX parser and a create_parser function."""
for parser_name in parser_list + default_parser_list:
try:
return _create_parser(parser_name)
except ImportError as e:
import sys
if parser_name in sys.modules:
# The parser module was found, but importing it
# failed unexpectedly, pass this exception through
raise
except SAXReaderNotAvailable:
# The parser module detected that it won't work properly,
# so try the next one
pass
raise SAXReaderNotAvailable("No parsers found", None)
# --- Internal utility methods used by make_parser
if sys.platform[ : 4] == "java":
def _create_parser(parser_name):
from org.python.core import imp
drv_module = imp.importName(parser_name, 0, globals())
return drv_module.create_parser()
else:
def _create_parser(parser_name):
drv_module = __import__(parser_name,{},{},['create_parser'])
return drv_module.create_parser()
del sys
| apache-2.0 |
django-oscar/django-oscar-sagepay-direct | oscar_sagepay/dashboard/views.py | 1 | 1351 | from django.views.generic import ListView, DetailView
from django.db.models import Q
from oscar_sagepay import models
from . import forms
class Transactions(ListView):
model = models.RequestResponse
context_object_name = 'transactions'
template_name = 'sagepay/dashboard/request_list.html'
paginate_by = 20
form_class = forms.TransactionSearch
query = None
def get(self, request, *args, **kwargs):
self.form = self.form_class(request.GET)
return super(Transactions, self).get(request, *args, **kwargs)
def get_queryset(self):
# Allow txns to be filtered by matching against the vendor code and the
# Sagepay TX ID.
qs = super(Transactions, self).get_queryset()
if self.form.is_valid():
self.query = self.form.cleaned_data['q']
filters = (Q(vendor_tx_code__contains=self.query) |
Q(tx_id__contains=self.query))
qs = qs.filter(filters)
return qs
def get_context_data(self, **kwargs):
ctx = super(Transactions, self).get_context_data(**kwargs)
ctx['form'] = self.form
ctx['query'] = self.query
return ctx
class Transaction(DetailView):
model = models.RequestResponse
context_object_name = 'txn'
template_name = 'sagepay/dashboard/request_detail.html'
| bsd-3-clause |
barbour-em/osf.io | tasks.py | 1 | 23428 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Invoke tasks. To run a task, run ``$ invoke <COMMAND>``. To see a list of
commands, run ``$ invoke --list``.
"""
import os
import sys
import code
import platform
import subprocess
import logging
from invoke import task, run
from website import settings
logging.getLogger('invoke').setLevel(logging.CRITICAL)
HERE = os.path.dirname(os.path.abspath(__file__))
WHEELHOUSE_PATH = os.environ.get('WHEELHOUSE')
def get_bin_path():
"""Get parent path of current python binary.
"""
return os.path.dirname(sys.executable)
def bin_prefix(cmd):
"""Prefix command with current binary path.
"""
return os.path.join(get_bin_path(), cmd)
try:
__import__('rednose')
except ImportError:
TEST_CMD = 'nosetests'
else:
TEST_CMD = 'nosetests --rednose'
@task
def server(host=None, port=5000, debug=True, live=False):
"""Run the app server."""
from website.app import init_app
app = init_app(set_backends=True, routes=True)
settings.API_SERVER_PORT = port
if live:
from livereload import Server
server = Server(app.wsgi_app)
server.watch(os.path.join(HERE, 'website', 'static', 'public'))
server.serve(port=port)
else:
app.run(host=host, port=port, debug=debug, threaded=debug, extra_files=[settings.ASSET_HASH_PATH])
@task
def apiserver(port=8000, live=False):
"""Run the API server."""
cmd = 'python manage.py runserver {}'.format(port)
if live:
cmd += ' livereload'
run(cmd, echo=True)
SHELL_BANNER = """
{version}
+--------------------------------------------------+
|cccccccccccccccccccccccccccccccccccccccccccccccccc|
|ccccccccccccccccccccccOOOOOOOccccccccccccccccccccc|
|ccccccccccccccccccccOOOOOOOOOOcccccccccccccccccccc|
|cccccccccccccccccccOOOOOOOOOOOOccccccccccccccccccc|
|cccccccccOOOOOOOcccOOOOOOOOOOOOcccOOOOOOOccccccccc|
|cccccccOOOOOOOOOOccOOOOOsssOOOOcOOOOOOOOOOOccccccc|
|ccccccOOOOOOOOOOOOccOOssssssOOccOOOOOOOOOOOccccccc|
|ccccccOOOOOOOOOOOOOcOssssssssOcOOOOOOOOOOOOOcccccc|
|ccccccOOOOOOOOOOOOsOcOssssssOOOOOOOOOOOOOOOccccccc|
|cccccccOOOOOOOOOOOssccOOOOOOcOssOOOOOOOOOOcccccccc|
|cccccccccOOOOOOOsssOccccccccccOssOOOOOOOcccccccccc|
|cccccOOOccccOOssssOccccccccccccOssssOccccOOOcccccc|
|ccOOOOOOOOOOOOOccccccccccccccccccccOOOOOOOOOOOOccc|
|cOOOOOOOOssssssOcccccccccccccccccOOssssssOOOOOOOOc|
|cOOOOOOOssssssssOccccccccccccccccOsssssssOOOOOOOOc|
|cOOOOOOOOsssssssOccccccccccccccccOsssssssOOOOOOOOc|
|cOOOOOOOOOssssOOccccccccccccccccccOsssssOOOOOOOOcc|
|cccOOOOOOOOOOOOOOOccccccccccccccOOOOOOOOOOOOOOOccc|
|ccccccccccccOOssssOOccccccccccOssssOOOcccccccccccc|
|ccccccccOOOOOOOOOssOccccOOcccOsssOOOOOOOOccccccccc|
|cccccccOOOOOOOOOOOsOcOOssssOcOssOOOOOOOOOOOccccccc|
|ccccccOOOOOOOOOOOOOOOsssssssOcOOOOOOOOOOOOOOcccccc|
|ccccccOOOOOOOOOOOOOcOssssssssOcOOOOOOOOOOOOOcccccc|
|ccccccOOOOOOOOOOOOcccOssssssOcccOOOOOOOOOOOccccccc|
|ccccccccOOOOOOOOOcccOOOOOOOOOOcccOOOOOOOOOcccccccc|
|ccccccccccOOOOcccccOOOOOOOOOOOcccccOOOOccccccccccc|
|ccccccccccccccccccccOOOOOOOOOOcccccccccccccccccccc|
|cccccccccccccccccccccOOOOOOOOOcccccccccccccccccccc|
|cccccccccccccccccccccccOOOOccccccccccccccccccccccc|
|cccccccccccccccccccccccccccccccccccccccccccccccccc|
+--------------------------------------------------+
Welcome to the OSF Python Shell. Happy hacking!
Available variables:
{context}
"""
def make_shell_context():
from modularodm import Q
from framework.auth import User, Auth
from framework.mongo import database
from website.app import init_app
from website.project.model import Node
from website import models # all models
from website import settings
import requests
app = init_app()
context = {
'app': app,
'db': database,
'User': User,
'Auth': Auth,
'Node': Node,
'Q': Q,
'models': models,
'run_tests': test,
'rget': requests.get,
'rpost': requests.post,
'rdelete': requests.delete,
'rput': requests.put,
'settings': settings,
}
try: # Add a fake factory for generating fake names, emails, etc.
from faker import Factory
fake = Factory.create()
context['fake'] = fake
except ImportError:
pass
return context
def format_context(context):
lines = []
for name, obj in context.items():
line = "{name}: {obj!r}".format(**locals())
lines.append(line)
return '\n'.join(lines)
# Shell command adapted from Flask-Script. See NOTICE for license info.
@task
def shell():
context = make_shell_context()
banner = SHELL_BANNER.format(version=sys.version,
context=format_context(context)
)
try:
try:
# 0.10.x
from IPython.Shell import IPShellEmbed
ipshell = IPShellEmbed(banner=banner)
ipshell(global_ns={}, local_ns=context)
except ImportError:
# 0.12+
from IPython import embed
embed(banner1=banner, user_ns=context)
return
except ImportError:
pass
# fallback to basic python shell
code.interact(banner, local=context)
return
@task(aliases=['mongo'])
def mongoserver(daemon=False, config=None):
"""Run the mongod process.
"""
if not config:
platform_configs = {
'darwin': '/usr/local/etc/tokumx.conf', # default for homebrew install
'linux': '/etc/tokumx.conf',
}
platform = str(sys.platform).lower()
config = platform_configs.get(platform)
port = settings.DB_PORT
cmd = 'mongod --port {0}'.format(port)
if config:
cmd += ' --config {0}'.format(config)
if daemon:
cmd += " --fork"
run(cmd, echo=True)
@task(aliases=['mongoshell'])
def mongoclient():
"""Run the mongo shell for the OSF database."""
db = settings.DB_NAME
port = settings.DB_PORT
run("mongo {db} --port {port}".format(db=db, port=port), pty=True)
@task
def mongodump(path):
"""Back up the contents of the running OSF database"""
db = settings.DB_NAME
port = settings.DB_PORT
cmd = "mongodump --db {db} --port {port} --out {path}".format(
db=db,
port=port,
path=path,
pty=True)
if settings.DB_USER:
cmd += ' --username {0}'.format(settings.DB_USER)
if settings.DB_PASS:
cmd += ' --password {0}'.format(settings.DB_PASS)
run(cmd, echo=True)
print()
print("To restore from the dumped database, run `invoke mongorestore {0}`".format(
os.path.join(path, settings.DB_NAME)))
@task
def mongorestore(path, drop=False):
"""Restores the running OSF database with the contents of the database at
the location given its argument.
By default, the contents of the specified database are added to
the existing database. The `--drop` option will cause the existing database
to be dropped.
A caveat: if you `invoke mongodump {path}`, you must restore with
`invoke mongorestore {path}/{settings.DB_NAME}, as that's where the
database dump will be stored.
"""
db = settings.DB_NAME
port = settings.DB_PORT
cmd = "mongorestore --db {db} --port {port}".format(
db=db,
port=port,
pty=True)
if settings.DB_USER:
cmd += ' --username {0}'.format(settings.DB_USER)
if settings.DB_PASS:
cmd += ' --password {0}'.format(settings.DB_PASS)
if drop:
cmd += " --drop"
cmd += " " + path
run(cmd, echo=True)
@task
def sharejs(host=None, port=None, db_host=None, db_port=None, db_name=None, cors_allow_origin=None):
"""Start a local ShareJS server."""
if host:
os.environ['SHAREJS_SERVER_HOST'] = host
if port:
os.environ['SHAREJS_SERVER_PORT'] = port
if db_host:
os.environ['SHAREJS_DB_HOST'] = db_host
if db_port:
os.environ['SHAREJS_DB_PORT'] = db_port
if db_name:
os.environ['SHAREJS_DB_NAME'] = db_name
if cors_allow_origin:
os.environ['SHAREJS_CORS_ALLOW_ORIGIN'] = cors_allow_origin
if settings.SENTRY_DSN:
os.environ['SHAREJS_SENTRY_DSN'] = settings.SENTRY_DSN
share_server = os.path.join(settings.ADDON_PATH, 'wiki', 'shareServer.js')
run("node {0}".format(share_server))
@task(aliases=['celery'])
def celery_worker(level="debug"):
"""Run the Celery process."""
cmd = 'celery worker -A framework.tasks -l {0}'.format(level)
run(bin_prefix(cmd))
@task
def rabbitmq():
"""Start a local rabbitmq server.
NOTE: this is for development only. The production environment should start
the server as a daemon.
"""
run("rabbitmq-server", pty=True)
@task(aliases=['elastic'])
def elasticsearch():
"""Start a local elasticsearch server
NOTE: Requires that elasticsearch is installed. See README for instructions
"""
import platform
if platform.linux_distribution()[0] == 'Ubuntu':
run("sudo service elasticsearch start")
elif platform.system() == 'Darwin': # Mac OSX
run('elasticsearch')
else:
print("Your system is not recognized, you will have to start elasticsearch manually")
@task
def migrate_search(delete=False, index=settings.ELASTIC_INDEX):
'''Migrate the search-enabled models.'''
from website.search_migration.migrate import migrate
migrate(delete, index=index)
@task
def mailserver(port=1025):
"""Run a SMTP test server."""
cmd = 'python -m smtpd -n -c DebuggingServer localhost:{port}'.format(port=port)
run(bin_prefix(cmd), pty=True)
@task(aliases=['flake8'])
def flake():
run('flake8 .', echo=True)
def pip_install(req_file):
"""Return the proper 'pip install' command for installing the dependencies
defined in ``req_file``.
"""
cmd = bin_prefix('pip install --exists-action w --upgrade -r {} '.format(req_file))
if WHEELHOUSE_PATH:
cmd += ' --no-index --find-links={}'.format(WHEELHOUSE_PATH)
return cmd
@task(aliases=['req'])
def requirements(addons=False, release=False, dev=False):
"""Install python dependencies.
Examples:
inv requirements --dev
inv requirements --addons
inv requirements --release
"""
if release or addons:
addon_requirements()
# "release" takes precedence
if release:
req_file = os.path.join(HERE, 'requirements', 'release.txt')
elif dev: # then dev requirements
req_file = os.path.join(HERE, 'requirements', 'dev.txt')
else: # then base requirements
req_file = os.path.join(HERE, 'requirements.txt')
run(pip_install(req_file), echo=True)
@task
def test_module(module=None, verbosity=2):
"""Helper for running tests.
"""
# Allow selecting specific submodule
module_fmt = ' '.join(module) if isinstance(module, list) else module
args = " --verbosity={0} -s {1}".format(verbosity, module_fmt)
# Use pty so the process buffers "correctly"
run(bin_prefix(TEST_CMD) + args, pty=True)
@task
def test_osf():
"""Run the OSF test suite."""
test_module(module="tests/")
@task
def test_addons():
"""Run all the tests in the addons directory.
"""
modules = []
for addon in settings.ADDONS_REQUESTED:
module = os.path.join(settings.BASE_PATH, 'addons', addon)
modules.append(module)
test_module(module=modules)
@task
def test(all=False, syntax=False):
"""Alias of `invoke test_osf`.
"""
if syntax:
flake()
if all:
test_all()
else:
test_osf()
@task
def test_all(syntax=False):
if syntax:
flake()
test_osf()
test_addons()
karma(single=True, browsers='PhantomJS')
@task
def karma(single=False, sauce=False, browsers=None):
"""Run JS tests with Karma. Requires Chrome to be installed."""
karma_bin = os.path.join(
HERE, 'node_modules', 'karma', 'bin', 'karma'
)
cmd = '{} start'.format(karma_bin)
if sauce:
cmd += ' karma.saucelabs.conf.js'
if single:
cmd += ' --single-run'
# Use browsers if specified on the command-line, otherwise default
# what's specified in karma.conf.js
if browsers:
cmd += ' --browsers {}'.format(browsers)
run(cmd, echo=True)
@task
def wheelhouse(addons=False, release=False, dev=False):
if release:
req_file = os.path.join(HERE, 'requirements', 'release.txt')
elif dev:
req_file = os.path.join(HERE, 'requirements', 'dev.txt')
else:
req_file = os.path.join(HERE, 'requirements.txt')
cmd = 'pip wheel --find-links={} -r {} --wheel-dir={}'.format(WHEELHOUSE_PATH, req_file, WHEELHOUSE_PATH)
run(cmd, pty=True)
if not addons:
return
for directory in os.listdir(settings.ADDON_PATH):
path = os.path.join(settings.ADDON_PATH, directory)
if os.path.isdir(path):
req_file = os.path.join(path, 'requirements.txt')
if os.path.exists(req_file):
cmd = 'pip wheel --find-links={} -r {} --wheel-dir={}'.format(WHEELHOUSE_PATH, req_file, WHEELHOUSE_PATH)
run(cmd, pty=True)
@task
def addon_requirements():
"""Install all addon requirements."""
for directory in os.listdir(settings.ADDON_PATH):
path = os.path.join(settings.ADDON_PATH, directory)
if os.path.isdir(path):
try:
requirements_file = os.path.join(path, 'requirements.txt')
open(requirements_file)
print('Installing requirements for {0}'.format(directory))
cmd = 'pip install --exists-action w --upgrade -r {0}'.format(requirements_file)
if WHEELHOUSE_PATH:
cmd += ' --no-index --find-links={}'.format(WHEELHOUSE_PATH)
run(bin_prefix(cmd))
except IOError:
pass
print('Finished')
@task
def encryption(owner=None):
"""Generate GnuPG key.
For local development:
> invoke encryption
On Linode:
> sudo env/bin/invoke encryption --owner www-data
"""
if not settings.USE_GNUPG:
print('GnuPG is not enabled. No GnuPG key will be generated.')
return
import gnupg
gpg = gnupg.GPG(gnupghome=settings.GNUPG_HOME, gpgbinary=settings.GNUPG_BINARY)
keys = gpg.list_keys()
if keys:
print('Existing GnuPG key found')
return
print('Generating GnuPG key')
input_data = gpg.gen_key_input(name_real='OSF Generated Key')
gpg.gen_key(input_data)
if owner:
run('sudo chown -R {0} {1}'.format(owner, settings.GNUPG_HOME))
@task
def travis_addon_settings():
for directory in os.listdir(settings.ADDON_PATH):
path = os.path.join(settings.ADDON_PATH, directory, 'settings')
if os.path.isdir(path):
try:
open(os.path.join(path, 'local-travis.py'))
run('cp {path}/local-travis.py {path}/local.py'.format(path=path))
except IOError:
pass
@task
def copy_addon_settings():
for directory in os.listdir(settings.ADDON_PATH):
path = os.path.join(settings.ADDON_PATH, directory, 'settings')
if os.path.isdir(path) and not os.path.isfile(os.path.join(path, 'local.py')):
try:
open(os.path.join(path, 'local-dist.py'))
run('cp {path}/local-dist.py {path}/local.py'.format(path=path))
except IOError:
pass
@task
def copy_settings(addons=False):
# Website settings
if not os.path.isfile('website/settings/local.py'):
print('Creating local.py file')
run('cp website/settings/local-dist.py website/settings/local.py')
# Addon settings
if addons:
copy_addon_settings()
@task
def packages():
brew_commands = [
'update',
'upgrade',
'install libxml2',
'install libxslt',
'install elasticsearch',
'install gpg',
'install node',
'tap tokutek/tokumx',
'install tokumx-bin',
]
if platform.system() == 'Darwin':
print('Running brew commands')
for item in brew_commands:
command = 'brew {cmd}'.format(cmd=item)
run(command)
elif platform.system() == 'Linux':
# TODO: Write a script similar to brew bundle for Ubuntu
# e.g., run('sudo apt-get install [list of packages]')
pass
@task
def npm_bower():
print('Installing bower')
run('npm install -g bower', echo=True)
@task(aliases=['bower'])
def bower_install():
print('Installing bower-managed packages')
bower_bin = os.path.join(HERE, 'node_modules', 'bower', 'bin', 'bower')
run('{} prune'.format(bower_bin), echo=True)
run('{} install'.format(bower_bin), echo=True)
@task
def setup():
"""Creates local settings, installs requirements, and generates encryption key"""
copy_settings(addons=True)
packages()
requirements(addons=True, dev=True)
encryption()
from website.app import build_js_config_files
from website import settings
# Build nodeCategories.json before building assets
build_js_config_files(settings)
assets(dev=True, watch=False)
@task
def analytics():
from website.app import init_app
import matplotlib
matplotlib.use('Agg')
init_app()
from scripts import metrics
from scripts.analytics import (
logs, addons, comments, folders, links, watch, email_invites,
permissions, profile, benchmarks
)
modules = (
metrics, logs, addons, comments, folders, links, watch, email_invites,
permissions, profile, benchmarks
)
for module in modules:
module.main()
@task
def clear_sessions(months=1, dry_run=False):
from website.app import init_app
init_app(routes=False, set_backends=True)
from scripts import clear_sessions
clear_sessions.clear_sessions_relative(months=months, dry_run=dry_run)
# Release tasks
@task
def hotfix(name, finish=False, push=False):
"""Rename hotfix branch to hotfix/<next-patch-version> and optionally
finish hotfix.
"""
print('Checking out master to calculate curent version')
run('git checkout master')
latest_version = latest_tag_info()['current_version']
print('Current version is: {}'.format(latest_version))
major, minor, patch = latest_version.split('.')
next_patch_version = '.'.join([major, minor, str(int(patch) + 1)])
print('Bumping to next patch version: {}'.format(next_patch_version))
print('Renaming branch...')
new_branch_name = 'hotfix/{}'.format(next_patch_version)
run('git checkout {}'.format(name), echo=True)
run('git branch -m {}'.format(new_branch_name), echo=True)
if finish:
run('git flow hotfix finish {}'.format(next_patch_version), echo=True, pty=True)
if push:
run('git push origin master', echo=True)
run('git push --tags', echo=True)
run('git push origin develop', echo=True)
@task
def feature(name, finish=False, push=False):
"""Rename the current branch to a feature branch and optionally finish it."""
print('Renaming branch...')
run('git branch -m feature/{}'.format(name), echo=True)
if finish:
run('git flow feature finish {}'.format(name), echo=True)
if push:
run('git push origin develop', echo=True)
# Adapted from bumpversion
def latest_tag_info():
try:
# git-describe doesn't update the git-index, so we do that
# subprocess.check_output(["git", "update-index", "--refresh"])
# get info about the latest tag in git
describe_out = subprocess.check_output([
"git",
"describe",
"--dirty",
"--tags",
"--long",
"--abbrev=40"
], stderr=subprocess.STDOUT
).decode().split("-")
except subprocess.CalledProcessError as err:
raise err
# logger.warn("Error when running git describe")
return {}
info = {}
if describe_out[-1].strip() == "dirty":
info["dirty"] = True
describe_out.pop()
info["commit_sha"] = describe_out.pop().lstrip("g")
info["distance_to_latest_tag"] = int(describe_out.pop())
info["current_version"] = describe_out.pop().lstrip("v")
# assert type(info["current_version"]) == str
assert 0 == len(describe_out)
return info
# Tasks for generating and bundling SSL certificates
# See http://cosdev.readthedocs.org/en/latest/osf/ops.html for details
@task
def generate_key(domain, bits=2048):
cmd = 'openssl genrsa -des3 -out {0}.key {1}'.format(domain, bits)
run(cmd)
@task
def generate_key_nopass(domain):
cmd = 'openssl rsa -in {domain}.key -out {domain}.key.nopass'.format(
domain=domain
)
run(cmd)
@task
def generate_csr(domain):
cmd = 'openssl req -new -key {domain}.key.nopass -out {domain}.csr'.format(
domain=domain
)
run(cmd)
@task
def request_ssl_cert(domain):
"""Generate a key, a key with password removed, and a signing request for
the specified domain.
Usage:
> invoke request_ssl_cert pizza.osf.io
"""
generate_key(domain)
generate_key_nopass(domain)
generate_csr(domain)
@task
def bundle_certs(domain, cert_path):
"""Concatenate certificates from NameCheap in the correct order. Certificate
files must be in the same directory.
"""
cert_files = [
'{0}.crt'.format(domain),
'COMODORSADomainValidationSecureServerCA.crt',
'COMODORSAAddTrustCA.crt',
'AddTrustExternalCARoot.crt',
]
certs = ' '.join(
os.path.join(cert_path, cert_file)
for cert_file in cert_files
)
cmd = 'cat {certs} > {domain}.bundle.crt'.format(
certs=certs,
domain=domain,
)
run(cmd)
@task
def clean_assets():
"""Remove built JS files."""
public_path = os.path.join(HERE, 'website', 'static', 'public')
js_path = os.path.join(public_path, 'js')
run('rm -rf {0}'.format(js_path), echo=True)
@task(aliases=['pack'])
def webpack(clean=False, watch=False, dev=False):
"""Build static assets with webpack."""
if clean:
clean_assets()
webpack_bin = os.path.join(HERE, 'node_modules', 'webpack', 'bin', 'webpack.js')
args = [webpack_bin]
if settings.DEBUG_MODE and dev:
args += ['--colors']
else:
args += ['--progress']
if watch:
args += ['--watch']
config_file = 'webpack.dev.config.js' if dev else 'webpack.prod.config.js'
args += ['--config {0}'.format(config_file)]
command = ' '.join(args)
run(command, echo=True)
@task()
def assets(dev=False, watch=False):
"""Install and build static assets."""
npm = 'npm install'
if not dev:
npm += ' --production'
run(npm, echo=True)
bower_install()
# Always set clean=False to prevent possible mistakes
# on prod
webpack(clean=False, watch=watch, dev=dev)
@task
def generate_self_signed(domain):
"""Generate self-signed SSL key and certificate.
"""
cmd = (
'openssl req -x509 -nodes -days 365 -newkey rsa:2048'
' -keyout {0}.key -out {0}.crt'
).format(domain)
run(cmd)
@task
def update_citation_styles():
from scripts import parse_citation_styles
total = parse_citation_styles.main()
print("Parsed {} styles".format(total))
| apache-2.0 |
jesseditson/rethinkdb | external/v8_3.30.33.16/build/gyp/test/generator-output/gyptest-subdir2-deep.py | 216 | 1034 | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies building a target from a .gyp file a few subdirectories
deep when the --generator-output= option is used to put the build
configuration files in a separate directory tree.
"""
import TestGyp
# Android doesn't support --generator-output.
test = TestGyp.TestGyp(formats=['!android'])
test.writable(test.workpath('src'), False)
test.writable(test.workpath('src/subdir2/deeper/build'), True)
test.run_gyp('deeper.gyp',
'-Dset_symroot=1',
'--generator-output=' + test.workpath('gypfiles'),
chdir='src/subdir2/deeper')
test.build('deeper.gyp', test.ALL, chdir='gypfiles')
chdir = 'gypfiles'
if test.format == 'xcode':
chdir = 'src/subdir2/deeper'
test.run_built_executable('deeper',
chdir=chdir,
stdout="Hello from deeper.c\n")
test.pass_test()
| agpl-3.0 |
omniacreator/qtcreator | tests/system/suite_qtquick/tst_qtquick_creation/test.py | 3 | 4077 | #############################################################################
##
## Copyright (C) 2014 Digia Plc and/or its subsidiary(-ies).
## Contact: http://www.qt-project.org/legal
##
## This file is part of Qt Creator.
##
## Commercial License Usage
## Licensees holding valid commercial Qt licenses may use this file in
## accordance with the commercial license agreement provided with the
## Software or, alternatively, in accordance with the terms contained in
## a written agreement between you and Digia. For licensing terms and
## conditions see http://qt.digia.com/licensing. For further information
## use the contact form at http://qt.digia.com/contact-us.
##
## GNU Lesser General Public License Usage
## Alternatively, this file may be used under the terms of the GNU Lesser
## General Public License version 2.1 as published by the Free Software
## Foundation and appearing in the file LICENSE.LGPL included in the
## packaging of this file. Please review the following information to
## ensure the GNU Lesser General Public License version 2.1 requirements
## will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
##
## In addition, as a special exception, Digia gives you certain additional
## rights. These rights are described in the Digia Qt LGPL Exception
## version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
##
#############################################################################
source("../../shared/qtcreator.py")
def main():
startApplication("qtcreator" + SettingsPath)
if not startedWithoutPluginError():
return
for targ, qVer in [[Targets.DESKTOP_480_GCC, "1.1"], [Targets.DESKTOP_521_DEFAULT, "2.1"],
[Targets.DESKTOP_521_DEFAULT, "2.2"]]:
# using a temporary directory won't mess up a potentially existing
workingDir = tempDir()
checkedTargets, projectName = createNewQtQuickApplication(workingDir, targets=targ,
qtQuickVersion=qVer)
test.log("Building project Qt Quick %s Application (%s)"
% (qVer, Targets.getStringForTarget(targ)))
result = modifyRunSettingsForHookInto(projectName, len(checkedTargets), 11223)
invokeMenuItem("Build", "Build All")
waitForCompile()
if not checkCompile():
test.fatal("Compile failed")
else:
checkLastBuild()
test.log("Running project (includes build)")
if result:
result = addExecutableAsAttachableAUT(projectName, 11223)
allowAppThroughWinFW(workingDir, projectName)
if result:
result = runAndCloseApp(True, projectName, 11223,
"subprocessFunctionQuick%s" % qVer[0],
SubprocessType.QT_QUICK_APPLICATION, quickVersion=qVer)
else:
result = runAndCloseApp(sType=SubprocessType.QT_QUICK_APPLICATION)
removeExecutableAsAttachableAUT(projectName, 11223)
deleteAppFromWinFW(workingDir, projectName)
else:
result = runAndCloseApp()
if result == None:
checkCompile()
else:
logApplicationOutput()
invokeMenuItem("File", "Close All Projects and Editors")
invokeMenuItem("File", "Exit")
def subprocessFunctionGenericQuick(quickVersion):
helloWorldText = waitForObject("{container={type='QtQuick%dApplicationViewer' visible='1' "
"unnamed='1'} enabled='true' text='Hello World' type='Text' "
"unnamed='1' visible='true'}" % quickVersion)
test.log("Clicking 'Hello World' Text to close QtQuick%dApplicationViewer" % quickVersion)
mouseClick(helloWorldText, 5, 5, 0, Qt.LeftButton)
def subprocessFunctionQuick1():
subprocessFunctionGenericQuick(1)
def subprocessFunctionQuick2():
subprocessFunctionGenericQuick(2)
| lgpl-2.1 |
geopython/QGIS | tests/src/python/test_qgsserver_accesscontrol_wcs.py | 10 | 5400 | # -*- coding: utf-8 -*-
"""QGIS Unit tests for QgsServer.
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'Stephane Brunner'
__date__ = '28/08/2015'
__copyright__ = 'Copyright 2015, The QGIS Project'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
print('CTEST_FULL_OUTPUT')
from qgis.testing import unittest
import urllib.request
import urllib.parse
import urllib.error
from test_qgsserver_accesscontrol import TestQgsServerAccessControl
class TestQgsServerAccessControlWCS(TestQgsServerAccessControl):
def test_wcs_getcapabilities(self):
query_string = "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WCS",
"VERSION": "1.0.0",
"REQUEST": "GetCapabilities",
}.items())])
response, headers = self._get_fullaccess(query_string)
self.assertTrue(
str(response).find("<name>dem</name>") != -1,
"No dem layer in WCS/GetCapabilities\n%s" % response)
response, headers = self._get_restricted(query_string)
self.assertTrue(
str(response).find("<name>dem</name>") != -1,
"No dem layer in WCS/GetCapabilities\n%s" % response)
query_string = "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WCS",
"VERSION": "1.0.0",
"REQUEST": "GetCapabilities",
"TEST": "dem",
}.items())])
response, headers = self._get_restricted(query_string)
self.assertFalse(
str(response).find("<name>dem</name>") != -1,
"Unexpected dem layer in WCS/GetCapabilities\n%s" % response)
def test_wcs_describecoverage(self):
query_string = "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WCS",
"VERSION": "1.0.0",
"REQUEST": "DescribeCoverage",
"COVERAGE": "dem",
}.items())])
response, headers = self._get_fullaccess(query_string)
self.assertTrue(
str(response).find("<name>dem</name>") != -1,
"No dem layer in DescribeCoverage\n%s" % response)
response, headers = self._get_restricted(query_string)
self.assertTrue(
str(response).find("<name>dem</name>") != -1,
"No dem layer in DescribeCoverage\n%s" % response)
query_string = "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WCS",
"VERSION": "1.0.0",
"REQUEST": "DescribeCoverage",
"COVERAGE": "dem",
"TEST": "dem",
}.items())])
response, headers = self._get_restricted(query_string)
self.assertFalse(
str(response).find("<name>dem</name>") != -1,
"Unexpected dem layer in DescribeCoverage\n%s" % response)
def test_wcs_getcoverage(self):
query_string = "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WCS",
"VERSION": "1.0.0",
"REQUEST": "GetCoverage",
"COVERAGE": "dem",
"CRS": "EPSG:3857",
"BBOX": "-1387454,4252256,431091,5458375",
"HEIGHT": "100",
"WIDTH": "100",
"FORMAT": "GTiff",
}.items())])
response, headers = self._get_fullaccess(query_string)
self.assertEqual(
headers.get("Content-Type"), "image/tiff",
"Content type for GetMap is wrong: %s" % headers.get("Content-Type"))
self.assertTrue(
self._geo_img_diff(response, "WCS_GetCoverage.geotiff") == 0,
"Image for GetCoverage is wrong")
response, headers = self._get_restricted(query_string)
self.assertEqual(
headers.get("Content-Type"), "image/tiff",
"Content type for GetMap is wrong: %s" % headers.get("Content-Type"))
self.assertTrue(
self._geo_img_diff(response, "WCS_GetCoverage.geotiff") == 0,
"Image for GetCoverage is wrong")
query_string = "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WCS",
"VERSION": "1.0.0",
"REQUEST": "GetCoverage",
"COVERAGE": "dem",
"CRS": "EPSG:3857",
"BBOX": "-1387454,4252256,431091,5458375",
"HEIGHT": "100",
"WIDTH": "100",
"FORMAT": "GTiff",
"TEST": "dem",
}.items())])
response, headers = self._get_restricted(query_string)
self.assertEqual(
headers.get("Content-Type"), "text/xml; charset=utf-8",
"Content type for GetMap is wrong: %s" % headers.get("Content-Type"))
self.assertTrue(
str(response).find('<ServiceException code="RequestNotWellFormed">') != -1,
"The layer for the COVERAGE 'dem' is not found")
if __name__ == "__main__":
unittest.main()
| gpl-2.0 |
zhongliliu/muse | muse/Crystaloper/How2rep.py | 1 | 1119 | """
MUSE -- A Multi-algorithm-collaborative Universal Structure-prediction Environment
Copyright (C) 2010-2017 by Zhong-Li Liu
This program is free software; you can redistribute it and/or modify it under the
terms of the GNU General Public License as published by the Free Software Foundation
version 2 of the License.
This program is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
PARTICULAR PURPOSE. See the GNU General Public License for more details.
E-mail: zl.liu@163.com
"""
from muse.Readwrite.ReadInput import indict
def How2rep(nu,nulist):
Maxnu = 0
RepTimes = 0
IfRep = False
if nu > int(indict['MinNum_PrimitiveCell'][0]):
IntTimes=[]
for i in nulist:
if max(nulist)%i == 0 and max(nulist) != i:
IfRep = True
IntTimes.append(i)
if IntTimes != []:
Maxnu = max(IntTimes)
RepTimes = int(max(nulist)/max(IntTimes))
return IfRep,Maxnu,RepTimes
| gpl-2.0 |
caesar2164/edx-platform | common/djangoapps/student/management/tests/test_change_enrollment.py | 44 | 4210 | """ Test the change_enrollment command line script."""
import ddt
from mock import patch
from django.core.management import call_command
from xmodule.modulestore.tests.factories import CourseFactory
from xmodule.modulestore.tests.django_utils import SharedModuleStoreTestCase
from student.tests.factories import UserFactory, CourseModeFactory
from student.models import CourseEnrollment
@ddt.ddt
class ChangeEnrollmentTests(SharedModuleStoreTestCase):
""" Test the enrollment change functionality of the change_enrollment script."""
def setUp(self):
super(ChangeEnrollmentTests, self).setUp()
self.course = CourseFactory.create()
self.audit_mode = CourseModeFactory.create(
course_id=self.course.id,
mode_slug='audit',
mode_display_name='Audit',
)
self.honor_mode = CourseModeFactory.create(
course_id=self.course.id,
mode_slug='honor',
mode_display_name='Honor',
)
self.user_info = [
('amy', 'amy@pond.com', 'password'),
('rory', 'rory@theroman.com', 'password'),
('river', 'river@song.com', 'password')
]
self.enrollments = []
self.users = []
for username, email, password in self.user_info:
user = UserFactory.create(username=username, email=email, password=password)
self.users.append(user)
self.enrollments.append(CourseEnrollment.enroll(user, self.course.id, mode='audit'))
@patch('student.management.commands.change_enrollment.logger')
@ddt.data(
('email', False, 3),
('username', False, 3),
('email', True, 0),
('username', True, 0),
)
@ddt.unpack
def test_convert_users(self, method, noop, expected_conversions, mock_logger):
""" The command should update the user's enrollment. """
user_str = ','.join([getattr(user, method) for user in self.users])
user_ids = [u.id for u in self.users]
command_args = {
'course_id': unicode(self.course.id),
'to_mode': 'honor',
'from_mode': 'audit',
'noop': noop,
method: user_str,
}
# Verify users are not in honor mode yet
self.assertEqual(
len(CourseEnrollment.objects.filter(mode='honor', user_id__in=user_ids)),
0
)
call_command(
'change_enrollment',
**command_args
)
# Verify correct number of users are now in honor mode
self.assertEqual(
len(CourseEnrollment.objects.filter(mode='honor', user_id__in=user_ids)),
expected_conversions
)
mock_logger.info.assert_called_with(
'Successfully updated %i out of %i users',
len(self.users),
len(self.users)
)
@patch('student.management.commands.change_enrollment.logger')
@ddt.data(
('email', 'dtennant@thedoctor.com', 3),
('username', 'dtennant', 3),
)
@ddt.unpack
def test_user_not_found(self, method, fake_user, expected_success, mock_logger):
all_users = [getattr(user, method) for user in self.users]
all_users.append(fake_user)
user_str = ','.join(all_users)
real_user_ids = [u.id for u in self.users]
command_args = {
'course_id': unicode(self.course.id),
'to_mode': 'honor',
'from_mode': 'audit',
method: user_str,
}
# Verify users are not in honor mode yet
self.assertEqual(
len(CourseEnrollment.objects.filter(mode='honor', user_id__in=real_user_ids)),
0
)
call_command(
'change_enrollment',
**command_args
)
# Verify correct number of users are now in honor mode
self.assertEqual(
len(CourseEnrollment.objects.filter(mode='honor', user_id__in=real_user_ids)),
expected_success
)
mock_logger.info.assert_called_with(
'user: [%s] reason: [%s] %s', fake_user, 'DoesNotExist', 'User matching query does not exist.'
)
| agpl-3.0 |
Senseg/Py4A | python3-alpha/extra_modules/gdata/analytics/__init__.py | 261 | 6995 | #!/usr/bin/python
#
# Original Copyright (C) 2006 Google Inc.
# Refactored in 2009 to work for Google Analytics by Sal Uryasev at Juice Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Note that this module will not function without specifically adding
# 'analytics': [ #Google Analytics
# 'https://www.google.com/analytics/feeds/'],
# to CLIENT_LOGIN_SCOPES in the gdata/service.py file
"""Contains extensions to Atom objects used with Google Analytics."""
__author__ = 'api.suryasev (Sal Uryasev)'
import atom
import gdata
GAN_NAMESPACE = 'http://schemas.google.com/analytics/2009'
class TableId(gdata.GDataEntry):
"""tableId element."""
_tag = 'tableId'
_namespace = GAN_NAMESPACE
class Property(gdata.GDataEntry):
_tag = 'property'
_namespace = GAN_NAMESPACE
_children = gdata.GDataEntry._children.copy()
_attributes = gdata.GDataEntry._attributes.copy()
_attributes['name'] = 'name'
_attributes['value'] = 'value'
def __init__(self, name=None, value=None, *args, **kwargs):
self.name = name
self.value = value
super(Property, self).__init__(*args, **kwargs)
def __str__(self):
return self.value
def __repr__(self):
return self.value
class AccountListEntry(gdata.GDataEntry):
"""The Google Documents version of an Atom Entry"""
_tag = 'entry'
_namespace = atom.ATOM_NAMESPACE
_children = gdata.GDataEntry._children.copy()
_attributes = gdata.GDataEntry._attributes.copy()
_children['{%s}tableId' % GAN_NAMESPACE] = ('tableId',
[TableId])
_children['{%s}property' % GAN_NAMESPACE] = ('property',
[Property])
def __init__(self, tableId=None, property=None,
*args, **kwargs):
self.tableId = tableId
self.property = property
super(AccountListEntry, self).__init__(*args, **kwargs)
def AccountListEntryFromString(xml_string):
"""Converts an XML string into an AccountListEntry object.
Args:
xml_string: string The XML describing a Document List feed entry.
Returns:
A AccountListEntry object corresponding to the given XML.
"""
return atom.CreateClassFromXMLString(AccountListEntry, xml_string)
class AccountListFeed(gdata.GDataFeed):
"""A feed containing a list of Google Documents Items"""
_tag = 'feed'
_namespace = atom.ATOM_NAMESPACE
_children = gdata.GDataFeed._children.copy()
_attributes = gdata.GDataFeed._attributes.copy()
_children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry',
[AccountListEntry])
def AccountListFeedFromString(xml_string):
"""Converts an XML string into an AccountListFeed object.
Args:
xml_string: string The XML describing an AccountList feed.
Returns:
An AccountListFeed object corresponding to the given XML.
All properties are also linked to with a direct reference
from each entry object for convenience. (e.g. entry.AccountName)
"""
feed = atom.CreateClassFromXMLString(AccountListFeed, xml_string)
for entry in feed.entry:
for pro in entry.property:
entry.__dict__[pro.name.replace('ga:','')] = pro
for td in entry.tableId:
td.__dict__['value'] = td.text
return feed
class Dimension(gdata.GDataEntry):
_tag = 'dimension'
_namespace = GAN_NAMESPACE
_children = gdata.GDataEntry._children.copy()
_attributes = gdata.GDataEntry._attributes.copy()
_attributes['name'] = 'name'
_attributes['value'] = 'value'
_attributes['type'] = 'type'
_attributes['confidenceInterval'] = 'confidence_interval'
def __init__(self, name=None, value=None, type=None,
confidence_interval = None, *args, **kwargs):
self.name = name
self.value = value
self.type = type
self.confidence_interval = confidence_interval
super(Dimension, self).__init__(*args, **kwargs)
def __str__(self):
return self.value
def __repr__(self):
return self.value
class Metric(gdata.GDataEntry):
_tag = 'metric'
_namespace = GAN_NAMESPACE
_children = gdata.GDataEntry._children.copy()
_attributes = gdata.GDataEntry._attributes.copy()
_attributes['name'] = 'name'
_attributes['value'] = 'value'
_attributes['type'] = 'type'
_attributes['confidenceInterval'] = 'confidence_interval'
def __init__(self, name=None, value=None, type=None,
confidence_interval = None, *args, **kwargs):
self.name = name
self.value = value
self.type = type
self.confidence_interval = confidence_interval
super(Metric, self).__init__(*args, **kwargs)
def __str__(self):
return self.value
def __repr__(self):
return self.value
class AnalyticsDataEntry(gdata.GDataEntry):
"""The Google Analytics version of an Atom Entry"""
_tag = 'entry'
_namespace = atom.ATOM_NAMESPACE
_children = gdata.GDataEntry._children.copy()
_attributes = gdata.GDataEntry._attributes.copy()
_children['{%s}dimension' % GAN_NAMESPACE] = ('dimension',
[Dimension])
_children['{%s}metric' % GAN_NAMESPACE] = ('metric',
[Metric])
def __init__(self, dimension=None, metric=None, *args, **kwargs):
self.dimension = dimension
self.metric = metric
super(AnalyticsDataEntry, self).__init__(*args, **kwargs)
class AnalyticsDataFeed(gdata.GDataFeed):
"""A feed containing a list of Google Analytics Data Feed"""
_tag = 'feed'
_namespace = atom.ATOM_NAMESPACE
_children = gdata.GDataFeed._children.copy()
_attributes = gdata.GDataFeed._attributes.copy()
_children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry',
[AnalyticsDataEntry])
"""
Data Feed
"""
def AnalyticsDataFeedFromString(xml_string):
"""Converts an XML string into an AccountListFeed object.
Args:
xml_string: string The XML describing an AccountList feed.
Returns:
An AccountListFeed object corresponding to the given XML.
Each metric and dimension is also referenced directly from
the entry for easier access. (e.g. entry.keyword.value)
"""
feed = atom.CreateClassFromXMLString(AnalyticsDataFeed, xml_string)
if feed.entry:
for entry in feed.entry:
for met in entry.metric:
entry.__dict__[met.name.replace('ga:','')] = met
if entry.dimension is not None:
for dim in entry.dimension:
entry.__dict__[dim.name.replace('ga:','')] = dim
return feed
| apache-2.0 |
aledbf/contrib | service-loadbalancer/Godeps/_workspace/src/github.com/ugorji/go/codec/test.py | 1516 | 4019 | #!/usr/bin/env python
# This will create golden files in a directory passed to it.
# A Test calls this internally to create the golden files
# So it can process them (so we don't have to checkin the files).
# Ensure msgpack-python and cbor are installed first, using:
# sudo apt-get install python-dev
# sudo apt-get install python-pip
# pip install --user msgpack-python msgpack-rpc-python cbor
# Ensure all "string" keys are utf strings (else encoded as bytes)
import cbor, msgpack, msgpackrpc, sys, os, threading
def get_test_data_list():
# get list with all primitive types, and a combo type
l0 = [
-8,
-1616,
-32323232,
-6464646464646464,
192,
1616,
32323232,
6464646464646464,
192,
-3232.0,
-6464646464.0,
3232.0,
6464.0,
6464646464.0,
False,
True,
u"null",
None,
u"someday",
1328176922000002000,
u"",
-2206187877999998000,
u"bytestring",
270,
u"none",
-2013855847999995777,
#-6795364578871345152,
]
l1 = [
{ "true": True,
"false": False },
{ "true": u"True",
"false": False,
"uint16(1616)": 1616 },
{ "list": [1616, 32323232, True, -3232.0, {"TRUE":True, "FALSE":False}, [True, False] ],
"int32":32323232, "bool": True,
"LONG STRING": u"123456789012345678901234567890123456789012345678901234567890",
"SHORT STRING": u"1234567890" },
{ True: "true", 138: False, "false": 200 }
]
l = []
l.extend(l0)
l.append(l0)
l.append(1)
l.extend(l1)
return l
def build_test_data(destdir):
l = get_test_data_list()
for i in range(len(l)):
# packer = msgpack.Packer()
serialized = msgpack.dumps(l[i])
f = open(os.path.join(destdir, str(i) + '.msgpack.golden'), 'wb')
f.write(serialized)
f.close()
serialized = cbor.dumps(l[i])
f = open(os.path.join(destdir, str(i) + '.cbor.golden'), 'wb')
f.write(serialized)
f.close()
def doRpcServer(port, stopTimeSec):
class EchoHandler(object):
def Echo123(self, msg1, msg2, msg3):
return ("1:%s 2:%s 3:%s" % (msg1, msg2, msg3))
def EchoStruct(self, msg):
return ("%s" % msg)
addr = msgpackrpc.Address('localhost', port)
server = msgpackrpc.Server(EchoHandler())
server.listen(addr)
# run thread to stop it after stopTimeSec seconds if > 0
if stopTimeSec > 0:
def myStopRpcServer():
server.stop()
t = threading.Timer(stopTimeSec, myStopRpcServer)
t.start()
server.start()
def doRpcClientToPythonSvc(port):
address = msgpackrpc.Address('localhost', port)
client = msgpackrpc.Client(address, unpack_encoding='utf-8')
print client.call("Echo123", "A1", "B2", "C3")
print client.call("EchoStruct", {"A" :"Aa", "B":"Bb", "C":"Cc"})
def doRpcClientToGoSvc(port):
# print ">>>> port: ", port, " <<<<<"
address = msgpackrpc.Address('localhost', port)
client = msgpackrpc.Client(address, unpack_encoding='utf-8')
print client.call("TestRpcInt.Echo123", ["A1", "B2", "C3"])
print client.call("TestRpcInt.EchoStruct", {"A" :"Aa", "B":"Bb", "C":"Cc"})
def doMain(args):
if len(args) == 2 and args[0] == "testdata":
build_test_data(args[1])
elif len(args) == 3 and args[0] == "rpc-server":
doRpcServer(int(args[1]), int(args[2]))
elif len(args) == 2 and args[0] == "rpc-client-python-service":
doRpcClientToPythonSvc(int(args[1]))
elif len(args) == 2 and args[0] == "rpc-client-go-service":
doRpcClientToGoSvc(int(args[1]))
else:
print("Usage: test.py " +
"[testdata|rpc-server|rpc-client-python-service|rpc-client-go-service] ...")
if __name__ == "__main__":
doMain(sys.argv[1:])
| apache-2.0 |
joachimmetz/plaso | plaso/output/shared_json.py | 3 | 3773 | # -*- coding: utf-8 -*-
"""Shared functionality for JSON based output modules."""
import json
from plaso.lib import errors
from plaso.output import dynamic
from plaso.output import formatting_helper
from plaso.serializer import json_serializer
class JSONEventFormattingHelper(formatting_helper.EventFormattingHelper):
"""JSON output module event formatting helper."""
_JSON_SERIALIZER = json_serializer.JSONAttributeContainerSerializer
def __init__(self, output_mediator):
"""Initializes a JSON output module event formatting helper.
Args:
output_mediator (OutputMediator): output mediator.
"""
super(JSONEventFormattingHelper, self).__init__(output_mediator)
self._field_formatting_helper = dynamic.DynamicFieldFormattingHelper(
output_mediator)
def _WriteSerializedDict(
self, event, event_data, event_data_stream, event_tag):
"""Writes an event, event data and event tag to serialized form.
Args:
event (EventObject): event.
event_data (EventData): event data.
event_data_stream (EventDataStream): event data stream.
event_tag (EventTag): event tag.
Returns:
dict[str, object]: JSON serialized objects.
"""
event_data_json_dict = self._JSON_SERIALIZER.WriteSerializedDict(event_data)
del event_data_json_dict['__container_type__']
del event_data_json_dict['__type__']
display_name = event_data_json_dict.get('display_name', None)
if display_name is None:
display_name = self._field_formatting_helper.GetFormattedField(
'display_name', event, event_data, event_data_stream, event_tag)
event_data_json_dict['display_name'] = display_name
filename = event_data_json_dict.get('filename', None)
if filename is None:
filename = self._field_formatting_helper.GetFormattedField(
'filename', event, event_data, event_data_stream, event_tag)
event_data_json_dict['filename'] = filename
inode = event_data_json_dict.get('inode', None)
if inode is None:
inode = self._field_formatting_helper.GetFormattedField(
'inode', event, event_data, event_data_stream, event_tag)
event_data_json_dict['inode'] = inode
try:
message = self._field_formatting_helper.GetFormattedField(
'message', event, event_data, event_data_stream, event_tag)
event_data_json_dict['message'] = message
except (errors.NoFormatterFound, errors.WrongFormatter):
pass
event_json_dict = self._JSON_SERIALIZER.WriteSerializedDict(event)
event_json_dict['__container_type__'] = 'event'
event_json_dict.update(event_data_json_dict)
if event_data_stream:
event_data_stream_json_dict = self._JSON_SERIALIZER.WriteSerializedDict(
event_data_stream)
del event_data_stream_json_dict['__container_type__']
path_spec = event_data_stream_json_dict.pop('path_spec', None)
if path_spec:
event_data_stream_json_dict['pathspec'] = path_spec
event_json_dict.update(event_data_stream_json_dict)
if event_tag:
event_tag_json_dict = self._JSON_SERIALIZER.WriteSerializedDict(event_tag)
event_json_dict['tag'] = event_tag_json_dict
return event_json_dict
def GetFormattedEvent(self, event, event_data, event_data_stream, event_tag):
"""Retrieves a string representation of the event.
Args:
event (EventObject): event.
event_data (EventData): event data.
event_data_stream (EventDataStream): event data stream.
event_tag (EventTag): event tag.
Returns:
str: string representation of the event.
"""
json_dict = self._WriteSerializedDict(
event, event_data, event_data_stream, event_tag)
return json.dumps(json_dict, sort_keys=True)
| apache-2.0 |
sunminghong/redis-py | benchmarks/command_packer_benchmark.py | 49 | 3338 | import socket
import sys
from redis.connection import (Connection, SYM_STAR, SYM_DOLLAR, SYM_EMPTY,
SYM_CRLF, b)
from redis._compat import imap
from base import Benchmark
class StringJoiningConnection(Connection):
def send_packed_command(self, command):
"Send an already packed command to the Redis server"
if not self._sock:
self.connect()
try:
self._sock.sendall(command)
except socket.error:
e = sys.exc_info()[1]
self.disconnect()
if len(e.args) == 1:
_errno, errmsg = 'UNKNOWN', e.args[0]
else:
_errno, errmsg = e.args
raise ConnectionError("Error %s while writing to socket. %s." %
(_errno, errmsg))
except:
self.disconnect()
raise
def pack_command(self, *args):
"Pack a series of arguments into a value Redis command"
args_output = SYM_EMPTY.join([
SYM_EMPTY.join((SYM_DOLLAR, b(str(len(k))), SYM_CRLF, k, SYM_CRLF))
for k in imap(self.encode, args)])
output = SYM_EMPTY.join(
(SYM_STAR, b(str(len(args))), SYM_CRLF, args_output))
return output
class ListJoiningConnection(Connection):
def send_packed_command(self, command):
if not self._sock:
self.connect()
try:
if isinstance(command, str):
command = [command]
for item in command:
self._sock.sendall(item)
except socket.error:
e = sys.exc_info()[1]
self.disconnect()
if len(e.args) == 1:
_errno, errmsg = 'UNKNOWN', e.args[0]
else:
_errno, errmsg = e.args
raise ConnectionError("Error %s while writing to socket. %s." %
(_errno, errmsg))
except:
self.disconnect()
raise
def pack_command(self, *args):
output = []
buff = SYM_EMPTY.join(
(SYM_STAR, b(str(len(args))), SYM_CRLF))
for k in imap(self.encode, args):
if len(buff) > 6000 or len(k) > 6000:
buff = SYM_EMPTY.join(
(buff, SYM_DOLLAR, b(str(len(k))), SYM_CRLF))
output.append(buff)
output.append(k)
buff = SYM_CRLF
else:
buff = SYM_EMPTY.join((buff, SYM_DOLLAR, b(str(len(k))),
SYM_CRLF, k, SYM_CRLF))
output.append(buff)
return output
class CommandPackerBenchmark(Benchmark):
ARGUMENTS = (
{
'name': 'connection_class',
'values': [StringJoiningConnection, ListJoiningConnection]
},
{
'name': 'value_size',
'values': [10, 100, 1000, 10000, 100000, 1000000, 10000000,
100000000]
},
)
def setup(self, connection_class, value_size):
self.get_client(connection_class=connection_class)
def run(self, connection_class, value_size):
r = self.get_client()
x = 'a' * value_size
r.set('benchmark', x)
if __name__ == '__main__':
CommandPackerBenchmark().run_benchmark()
| mit |
nwjs/chromium.src | third_party/blink/web_tests/external/wpt/tools/third_party/more-itertools/setup.py | 39 | 2130 | # Hack to prevent stupid error on exit of `python setup.py test`. (See
# http://www.eby-sarna.com/pipermail/peak/2010-May/003357.html.)
try:
import multiprocessing # noqa
except ImportError:
pass
from re import sub
from setuptools import setup, find_packages
def get_long_description():
# Fix display issues on PyPI caused by RST markup
readme = open('README.rst').read()
version_lines = []
with open('docs/versions.rst') as infile:
next(infile)
for line in infile:
line = line.rstrip().replace('.. automodule:: more_itertools', '')
version_lines.append(line)
version_history = '\n'.join(version_lines)
version_history = sub(r':func:`([a-zA-Z0-9._]+)`', r'\1', version_history)
ret = readme + '\n\n' + version_history
return ret
setup(
name='more-itertools',
version='4.2.0',
description='More routines for operating on iterables, beyond itertools',
long_description=get_long_description(),
author='Erik Rose',
author_email='erikrose@grinchcentral.com',
license='MIT',
packages=find_packages(exclude=['ez_setup']),
install_requires=['six>=1.0.0,<2.0.0'],
test_suite='more_itertools.tests',
url='https://github.com/erikrose/more-itertools',
include_package_data=True,
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Topic :: Software Development :: Libraries'],
keywords=['itertools', 'iterator', 'iteration', 'filter', 'peek',
'peekable', 'collate', 'chunk', 'chunked'],
)
| bsd-3-clause |
draugiskisprendimai/odoo | addons/website_forum_doc/__openerp__.py | 322 | 1508 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2014-Today OpenERP SA (<http://www.openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Documentation',
'category': 'Website',
'summary': 'Forum, Documentation',
'version': '1.0',
'description': """
Documentation based on question and pertinent answers of Forum
""",
'author': 'OpenERP SA',
'depends': [
'website_forum'
],
'data': [
'data/doc_data.xml',
'security/ir.model.access.csv',
'views/doc.xml',
'views/website_doc.xml',
],
'demo': [
'data/doc_demo.xml',
],
'installable': True,
}
| agpl-3.0 |
jamielennox/keystone | keystone/common/sql/migrate_repo/versions/065_add_domain_config.py | 11 | 1694 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sqlalchemy as sql
from keystone.common import sql as ks_sql
WHITELIST_TABLE = 'whitelisted_config'
SENSITIVE_TABLE = 'sensitive_config'
def upgrade(migrate_engine):
meta = sql.MetaData()
meta.bind = migrate_engine
whitelist_table = sql.Table(
WHITELIST_TABLE,
meta,
sql.Column('domain_id', sql.String(64), primary_key=True),
sql.Column('group', sql.String(255), primary_key=True),
sql.Column('option', sql.String(255), primary_key=True),
sql.Column('value', ks_sql.JsonBlob.impl, nullable=False),
mysql_engine='InnoDB',
mysql_charset='utf8')
whitelist_table.create(migrate_engine, checkfirst=True)
sensitive_table = sql.Table(
SENSITIVE_TABLE,
meta,
sql.Column('domain_id', sql.String(64), primary_key=True),
sql.Column('group', sql.String(255), primary_key=True),
sql.Column('option', sql.String(255), primary_key=True),
sql.Column('value', ks_sql.JsonBlob.impl, nullable=False),
mysql_engine='InnoDB',
mysql_charset='utf8')
sensitive_table.create(migrate_engine, checkfirst=True)
| apache-2.0 |
HolgerPeters/scikit-learn | examples/ensemble/plot_forest_importances_faces.py | 403 | 1519 | """
=================================================
Pixel importances with a parallel forest of trees
=================================================
This example shows the use of forests of trees to evaluate the importance
of the pixels in an image classification task (faces). The hotter the pixel,
the more important.
The code below also illustrates how the construction and the computation
of the predictions can be parallelized within multiple jobs.
"""
print(__doc__)
from time import time
import matplotlib.pyplot as plt
from sklearn.datasets import fetch_olivetti_faces
from sklearn.ensemble import ExtraTreesClassifier
# Number of cores to use to perform parallel fitting of the forest model
n_jobs = 1
# Load the faces dataset
data = fetch_olivetti_faces()
X = data.images.reshape((len(data.images), -1))
y = data.target
mask = y < 5 # Limit to 5 classes
X = X[mask]
y = y[mask]
# Build a forest and compute the pixel importances
print("Fitting ExtraTreesClassifier on faces data with %d cores..." % n_jobs)
t0 = time()
forest = ExtraTreesClassifier(n_estimators=1000,
max_features=128,
n_jobs=n_jobs,
random_state=0)
forest.fit(X, y)
print("done in %0.3fs" % (time() - t0))
importances = forest.feature_importances_
importances = importances.reshape(data.images[0].shape)
# Plot pixel importances
plt.matshow(importances, cmap=plt.cm.hot)
plt.title("Pixel importances with forests of trees")
plt.show()
| bsd-3-clause |
michael-dev2rights/ansible | lib/ansible/modules/web_infrastructure/jboss.py | 72 | 4641 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013, Jeroen Hoekx <jeroen.hoekx@dsquare.be>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = """
module: jboss
version_added: "1.4"
short_description: deploy applications to JBoss
description:
- Deploy applications to JBoss standalone using the filesystem
options:
deployment:
required: true
description:
- The name of the deployment
src:
required: false
description:
- The remote path of the application ear or war to deploy
deploy_path:
required: false
default: /var/lib/jbossas/standalone/deployments
description:
- The location in the filesystem where the deployment scanner listens
state:
required: false
choices: [ present, absent ]
default: "present"
description:
- Whether the application should be deployed or undeployed
notes:
- "The JBoss standalone deployment-scanner has to be enabled in standalone.xml"
- "Ensure no identically named application is deployed through the JBoss CLI"
author: "Jeroen Hoekx (@jhoekx)"
"""
EXAMPLES = """
# Deploy a hello world application
- jboss:
src: /tmp/hello-1.0-SNAPSHOT.war
deployment: hello.war
state: present
# Update the hello world application
- jboss:
src: /tmp/hello-1.1-SNAPSHOT.war
deployment: hello.war
state: present
# Undeploy the hello world application
- jboss:
deployment: hello.war
state: absent
"""
import os
import shutil
import time
from ansible.module_utils.basic import AnsibleModule
def is_deployed(deploy_path, deployment):
return os.path.exists(os.path.join(deploy_path, "%s.deployed" % deployment))
def is_undeployed(deploy_path, deployment):
return os.path.exists(os.path.join(deploy_path, "%s.undeployed" % deployment))
def is_failed(deploy_path, deployment):
return os.path.exists(os.path.join(deploy_path, "%s.failed" % deployment))
def main():
module = AnsibleModule(
argument_spec=dict(
src=dict(type='path'),
deployment=dict(required=True),
deploy_path=dict(type='path', default='/var/lib/jbossas/standalone/deployments'),
state=dict(choices=['absent', 'present'], default='present'),
),
required_if=[('state', 'present', ('src',))]
)
result = dict(changed=False)
src = module.params['src']
deployment = module.params['deployment']
deploy_path = module.params['deploy_path']
state = module.params['state']
if not os.path.exists(deploy_path):
module.fail_json(msg="deploy_path does not exist.")
deployed = is_deployed(deploy_path, deployment)
if state == 'present' and not deployed:
if not os.path.exists(src):
module.fail_json(msg='Source file %s does not exist.' % src)
if is_failed(deploy_path, deployment):
# Clean up old failed deployment
os.remove(os.path.join(deploy_path, "%s.failed" % deployment))
shutil.copyfile(src, os.path.join(deploy_path, deployment))
while not deployed:
deployed = is_deployed(deploy_path, deployment)
if is_failed(deploy_path, deployment):
module.fail_json(msg='Deploying %s failed.' % deployment)
time.sleep(1)
result['changed'] = True
if state == 'present' and deployed:
if module.sha1(src) != module.sha1(os.path.join(deploy_path, deployment)):
os.remove(os.path.join(deploy_path, "%s.deployed" % deployment))
shutil.copyfile(src, os.path.join(deploy_path, deployment))
deployed = False
while not deployed:
deployed = is_deployed(deploy_path, deployment)
if is_failed(deploy_path, deployment):
module.fail_json(msg='Deploying %s failed.' % deployment)
time.sleep(1)
result['changed'] = True
if state == 'absent' and deployed:
os.remove(os.path.join(deploy_path, "%s.deployed" % deployment))
while deployed:
deployed = not is_undeployed(deploy_path, deployment)
if is_failed(deploy_path, deployment):
module.fail_json(msg='Undeploying %s failed.' % deployment)
time.sleep(1)
result['changed'] = True
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
orbweb/ParsePy | parse_rest/query.py | 7 | 6184 | # This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import json
import copy
import collections
class QueryError(Exception):
'''Query error base class'''
def __init__(self, message, status_code=None):
super(QueryError, self).__init__(message)
if status_code:
self.status_code = status_code
class QueryResourceDoesNotExist(QueryError):
'''Query returned no results'''
pass
class QueryResourceMultipleResultsReturned(QueryError):
'''Query was supposed to return unique result, returned more than one'''
pass
class QueryManager(object):
def __init__(self, model_class):
self.model_class = model_class
def _fetch(self, **kw):
klass = self.model_class
uri = self.model_class.ENDPOINT_ROOT
return [klass(**it) for it in klass.GET(uri, **kw).get('results')]
def _count(self, **kw):
kw.update({"count": 1})
return self.model_class.GET(self.model_class.ENDPOINT_ROOT, **kw).get('count')
def all(self):
return Queryset(self)
def filter(self, **kw):
return self.all().filter(**kw)
def fetch(self):
return self.all().fetch()
def get(self, **kw):
return self.filter(**kw).get()
class Queryset(object):
OPERATORS = [
'lt', 'lte', 'gt', 'gte', 'ne', 'in', 'nin', 'exists', 'select', 'dontSelect', 'all', 'regex', 'relatedTo', 'nearSphere'
]
@staticmethod
def convert_to_parse(value):
from parse_rest.datatypes import ParseType
return ParseType.convert_to_parse(value, as_pointer=True)
@classmethod
def extract_filter_operator(cls, parameter):
for op in cls.OPERATORS:
underscored = '__%s' % op
if parameter.endswith(underscored):
return parameter[:-len(underscored)], op
return parameter, None
def __init__(self, manager):
self._manager = manager
self._where = collections.defaultdict(dict)
self._select_related = []
self._options = {}
self._result_cache = None
def __deepcopy__(self, memo):
q = self.__class__(self._manager)
q._where = copy.deepcopy(self._where, memo)
q._options = copy.deepcopy(self._options, memo)
q._select_related.extend(self._select_related)
return q
def __iter__(self):
return iter(self._fetch())
def __len__(self):
#don't use count query for len operator
#count doesn't return real size of result in all cases (eg if query contains skip option)
return len(self._fetch())
def __getitem__(self, key):
if isinstance(key, slice):
raise AttributeError("Slice is not supported for now.")
return self._fetch()[key]
def _fetch(self, count=False):
if self._result_cache is not None:
return len(self._result_cache) if count else self._result_cache
"""
Return a list of objects matching query, or if count == True return
only the number of objects matching.
"""
options = dict(self._options) # make a local copy
if self._where:
# JSON encode WHERE values
options['where'] = json.dumps(self._where)
if self._select_related:
options['include'] = ','.join(self._select_related)
if count:
return self._manager._count(**options)
self._result_cache = self._manager._fetch(**options)
return self._result_cache
def filter(self, **kw):
q = copy.deepcopy(self)
for name, value in kw.items():
parse_value = Queryset.convert_to_parse(value)
attr, operator = Queryset.extract_filter_operator(name)
if operator is None:
q._where[attr] = parse_value
elif operator == 'relatedTo':
q._where['$' + operator] = {'object': parse_value, 'key': attr}
else:
if not isinstance(q._where[attr], dict):
q._where[attr] = {}
q._where[attr]['$' + operator] = parse_value
return q
def limit(self, value):
q = copy.deepcopy(self)
q._options['limit'] = int(value)
return q
def skip(self, value):
q = copy.deepcopy(self)
q._options['skip'] = int(value)
return q
def order_by(self, order, descending=False):
q = copy.deepcopy(self)
# add a minus sign before the order value if descending == True
q._options['order'] = descending and ('-' + order) or order
return q
def select_related(self, *fields):
q = copy.deepcopy(self)
q._select_related.extend(fields)
return q
def count(self):
return self._fetch(count=True)
def exists(self):
return bool(self)
def get(self):
results = self._fetch()
if len(results) == 0:
error_message = 'Query against %s returned no results' % (
self._manager.model_class.ENDPOINT_ROOT)
raise QueryResourceDoesNotExist(error_message,
status_code=404)
if len(results) >= 2:
error_message = 'Query against %s returned multiple results' % (
self._manager.model_class.ENDPOINT_ROOT)
raise QueryResourceMultipleResultsReturned(error_message,
status_code=404)
return results[0]
def __repr__(self):
return repr(self._fetch())
| gpl-3.0 |
jxs/servo | tests/wpt/css-tests/css-text-decor-3_dev/xhtml1print/reference/support/generate-text-emphasis-style-property-tests.py | 841 | 3434 | #!/usr/bin/env python
# - * - coding: UTF-8 - * -
"""
This script generates tests text-emphasis-style-property-011 ~ 020 which
cover all possible values of text-emphasis-style property, except none
and <string>, with horizontal writing mode. It outputs a list of all
tests it generated in the format of Mozilla reftest.list to the stdout.
"""
from __future__ import unicode_literals
TEST_FILE = 'text-emphasis-style-property-{:03}{}.html'
TEST_TEMPLATE = '''<!DOCTYPE html>
<meta charset="utf-8">
<title>CSS Test: text-emphasis-style: {title}</title>
<link rel="author" title="Xidorn Quan" href="https://www.upsuper.org">
<link rel="author" title="Mozilla" href="https://www.mozilla.org">
<link rel="help" href="https://drafts.csswg.org/css-text-decor-3/#text-emphasis-style-property">
<meta name="assert" content="'text-emphasis-style: {value}' produces {code} as emphasis marks.">
<link rel="match" href="text-emphasis-style-property-{index:03}-ref.html">
<p>Pass if there is a '{char}' above every character below:</p>
<div style="line-height: 5; text-emphasis-style: {value}">試験テスト</div>
'''
REF_FILE = 'text-emphasis-style-property-{:03}-ref.html'
REF_TEMPLATE = '''<!DOCTYPE html>
<meta charset="utf-8">
<title>CSS Reference: text-emphasis-style: {0}</title>
<link rel="author" title="Xidorn Quan" href="https://www.upsuper.org">
<link rel="author" title="Mozilla" href="https://www.mozilla.org">
<style> rt {{ font-variant-east-asian: inherit; }} </style>
<p>Pass if there is a '{1}' above every character below:</p>
<div style="line-height: 5;"><ruby>試<rt>{1}</rt>験<rt>{1}</rt>テ<rt>{1}</rt>ス<rt>{1}</rt>ト<rt>{1}</rt></ruby></div>
'''
DATA_SET = [
('dot', 0x2022, 0x25e6),
('circle', 0x25cf, 0x25cb),
('double-circle', 0x25c9, 0x25ce),
('triangle', 0x25b2, 0x25b3),
('sesame', 0xfe45, 0xfe46),
]
SUFFIXES = ['', 'a', 'b', 'c', 'd', 'e']
def get_html_entity(code):
return '&#x{:04X};'.format(code)
def write_file(filename, content):
with open(filename, 'wb') as f:
f.write(content.encode('UTF-8'))
def write_test_file(idx, suffix, style, code, name=None):
if not name:
name = style
filename = TEST_FILE.format(idx, suffix)
write_file(filename, TEST_TEMPLATE.format(index=idx, value=style,
char=get_html_entity(code),
code='U+{:04X}'.format(code),
title=name))
print("== {} {}".format(filename, REF_FILE.format(idx)))
idx = 10
def write_files(style, code):
global idx
idx += 1
fill, shape = style
basic_style = "{} {}".format(fill, shape)
write_file(REF_FILE.format(idx),
REF_TEMPLATE.format(basic_style, get_html_entity(code)))
suffix = iter(SUFFIXES)
write_test_file(idx, next(suffix), basic_style, code)
write_test_file(idx, next(suffix), "{} {}".format(shape, fill), code)
if fill == 'filled':
write_test_file(idx, next(suffix), shape, code)
if shape == 'circle':
write_test_file(idx, next(suffix), fill, code, fill + ', horizontal')
print("# START tests from {}".format(__file__))
for name, code, _ in DATA_SET:
write_files(('filled', name), code)
for name, _, code in DATA_SET:
write_files(('open', name), code)
print("# END tests from {}".format(__file__))
| mpl-2.0 |
PeterWangPo/phantomjs | src/qt/qtwebkit/Tools/Scripts/webkitpy/layout_tests/views/buildbot_results.py | 120 | 8022 | #!/usr/bin/env python
# Copyright (C) 2012 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from webkitpy.layout_tests.models import test_expectations
from webkitpy.common.net import resultsjsonparser
TestExpectations = test_expectations.TestExpectations
TestExpectationParser = test_expectations.TestExpectationParser
class BuildBotPrinter(object):
# This output is parsed by buildbots and must only be changed in coordination with buildbot scripts (see webkit.org's
# Tools/BuildSlaveSupport/build.webkit.org-config/master.cfg: RunWebKitTests._parseNewRunWebKitTestsOutput
# and chromium.org's buildbot/master.chromium/scripts/master/log_parser/webkit_test_command.py).
def __init__(self, stream, debug_logging):
self.stream = stream
self.debug_logging = debug_logging
def print_results(self, run_details):
if self.debug_logging:
self.print_run_results(run_details.initial_results)
self.print_unexpected_results(run_details.summarized_results, run_details.enabled_pixel_tests_in_retry)
def _print(self, msg):
self.stream.write(msg + '\n')
def print_run_results(self, run_results):
failed = run_results.total_failures
total = run_results.total
passed = total - failed - run_results.remaining
percent_passed = 0.0
if total > 0:
percent_passed = float(passed) * 100 / total
self._print("=> Results: %d/%d tests passed (%.1f%%)" % (passed, total, percent_passed))
self._print("")
self._print_run_results_entry(run_results, test_expectations.NOW, "Tests to be fixed")
self._print("")
# FIXME: We should be skipping anything marked WONTFIX, so we shouldn't bother logging these stats.
self._print_run_results_entry(run_results, test_expectations.WONTFIX,
"Tests that will only be fixed if they crash (WONTFIX)")
self._print("")
def _print_run_results_entry(self, run_results, timeline, heading):
total = len(run_results.tests_by_timeline[timeline])
not_passing = (total -
len(run_results.tests_by_expectation[test_expectations.PASS] &
run_results.tests_by_timeline[timeline]))
self._print("=> %s (%d):" % (heading, not_passing))
for result in TestExpectations.EXPECTATION_ORDER:
if result in (test_expectations.PASS, test_expectations.SKIP):
continue
results = (run_results.tests_by_expectation[result] & run_results.tests_by_timeline[timeline])
desc = TestExpectations.EXPECTATION_DESCRIPTIONS[result]
if not_passing and len(results):
pct = len(results) * 100.0 / not_passing
self._print(" %5d %-24s (%4.1f%%)" % (len(results), desc, pct))
def print_unexpected_results(self, summarized_results, enabled_pixel_tests_in_retry=False):
passes = {}
flaky = {}
regressions = {}
def add_to_dict_of_lists(dict, key, value):
dict.setdefault(key, []).append(value)
def add_result(test, results, passes=passes, flaky=flaky, regressions=regressions):
actual = results['actual'].split(" ")
expected = results['expected'].split(" ")
def is_expected(result):
return (result in expected) or (result in ('AUDIO', 'TEXT', 'IMAGE+TEXT') and 'FAIL' in expected)
if all(is_expected(actual_result) for actual_result in actual):
# Don't print anything for tests that ran as expected.
return
if actual == ['PASS']:
if 'CRASH' in expected:
add_to_dict_of_lists(passes, 'Expected to crash, but passed', test)
elif 'TIMEOUT' in expected:
add_to_dict_of_lists(passes, 'Expected to timeout, but passed', test)
else:
add_to_dict_of_lists(passes, 'Expected to fail, but passed', test)
elif enabled_pixel_tests_in_retry and actual == ['TEXT', 'IMAGE+TEXT']:
add_to_dict_of_lists(regressions, actual[0], test)
elif len(actual) > 1:
# We group flaky tests by the first actual result we got.
add_to_dict_of_lists(flaky, actual[0], test)
else:
add_to_dict_of_lists(regressions, results['actual'], test)
resultsjsonparser.for_each_test(summarized_results['tests'], add_result)
if len(passes) or len(flaky) or len(regressions):
self._print("")
if len(passes):
for key, tests in passes.iteritems():
self._print("%s: (%d)" % (key, len(tests)))
tests.sort()
for test in tests:
self._print(" %s" % test)
self._print("")
self._print("")
if len(flaky):
descriptions = TestExpectations.EXPECTATION_DESCRIPTIONS
for key, tests in flaky.iteritems():
result = TestExpectations.EXPECTATIONS[key.lower()]
self._print("Unexpected flakiness: %s (%d)" % (descriptions[result], len(tests)))
tests.sort()
for test in tests:
result = resultsjsonparser.result_for_test(summarized_results['tests'], test)
actual = result['actual'].split(" ")
expected = result['expected'].split(" ")
result = TestExpectations.EXPECTATIONS[key.lower()]
# FIXME: clean this up once the old syntax is gone
new_expectations_list = [TestExpectationParser._inverted_expectation_tokens[exp] for exp in list(set(actual) | set(expected))]
self._print(" %s [ %s ]" % (test, " ".join(new_expectations_list)))
self._print("")
self._print("")
if len(regressions):
descriptions = TestExpectations.EXPECTATION_DESCRIPTIONS
for key, tests in regressions.iteritems():
result = TestExpectations.EXPECTATIONS[key.lower()]
self._print("Regressions: Unexpected %s (%d)" % (descriptions[result], len(tests)))
tests.sort()
for test in tests:
self._print(" %s [ %s ]" % (test, TestExpectationParser._inverted_expectation_tokens[key]))
self._print("")
if len(summarized_results['tests']) and self.debug_logging:
self._print("%s" % ("-" * 78))
| bsd-3-clause |
taroplus/spark | python/pyspark/sql/streaming.py | 15 | 40034 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import sys
import json
if sys.version >= '3':
intlike = int
basestring = unicode = str
else:
intlike = (int, long)
from abc import ABCMeta, abstractmethod
from pyspark import since, keyword_only
from pyspark.rdd import ignore_unicode_prefix
from pyspark.sql.column import _to_seq
from pyspark.sql.readwriter import OptionUtils, to_str
from pyspark.sql.types import *
from pyspark.sql.utils import StreamingQueryException
__all__ = ["StreamingQuery", "StreamingQueryManager", "DataStreamReader", "DataStreamWriter"]
class StreamingQuery(object):
"""
A handle to a query that is executing continuously in the background as new data arrives.
All these methods are thread-safe.
.. note:: Evolving
.. versionadded:: 2.0
"""
def __init__(self, jsq):
self._jsq = jsq
@property
@since(2.0)
def id(self):
"""Returns the unique id of this query that persists across restarts from checkpoint data.
That is, this id is generated when a query is started for the first time, and
will be the same every time it is restarted from checkpoint data.
There can only be one query with the same id active in a Spark cluster.
Also see, `runId`.
"""
return self._jsq.id().toString()
@property
@since(2.1)
def runId(self):
"""Returns the unique id of this query that does not persist across restarts. That is, every
query that is started (or restarted from checkpoint) will have a different runId.
"""
return self._jsq.runId().toString()
@property
@since(2.0)
def name(self):
"""Returns the user-specified name of the query, or null if not specified.
This name can be specified in the `org.apache.spark.sql.streaming.DataStreamWriter`
as `dataframe.writeStream.queryName("query").start()`.
This name, if set, must be unique across all active queries.
"""
return self._jsq.name()
@property
@since(2.0)
def isActive(self):
"""Whether this streaming query is currently active or not.
"""
return self._jsq.isActive()
@since(2.0)
def awaitTermination(self, timeout=None):
"""Waits for the termination of `this` query, either by :func:`query.stop()` or by an
exception. If the query has terminated with an exception, then the exception will be thrown.
If `timeout` is set, it returns whether the query has terminated or not within the
`timeout` seconds.
If the query has terminated, then all subsequent calls to this method will either return
immediately (if the query was terminated by :func:`stop()`), or throw the exception
immediately (if the query has terminated with exception).
throws :class:`StreamingQueryException`, if `this` query has terminated with an exception
"""
if timeout is not None:
if not isinstance(timeout, (int, float)) or timeout < 0:
raise ValueError("timeout must be a positive integer or float. Got %s" % timeout)
return self._jsq.awaitTermination(int(timeout * 1000))
else:
return self._jsq.awaitTermination()
@property
@since(2.1)
def status(self):
"""
Returns the current status of the query.
"""
return json.loads(self._jsq.status().json())
@property
@since(2.1)
def recentProgress(self):
"""Returns an array of the most recent [[StreamingQueryProgress]] updates for this query.
The number of progress updates retained for each stream is configured by Spark session
configuration `spark.sql.streaming.numRecentProgressUpdates`.
"""
return [json.loads(p.json()) for p in self._jsq.recentProgress()]
@property
@since(2.1)
def lastProgress(self):
"""
Returns the most recent :class:`StreamingQueryProgress` update of this streaming query or
None if there were no progress updates
:return: a map
"""
lastProgress = self._jsq.lastProgress()
if lastProgress:
return json.loads(lastProgress.json())
else:
return None
@since(2.0)
def processAllAvailable(self):
"""Blocks until all available data in the source has been processed and committed to the
sink. This method is intended for testing.
.. note:: In the case of continually arriving data, this method may block forever.
Additionally, this method is only guaranteed to block until data that has been
synchronously appended data to a stream source prior to invocation.
(i.e. `getOffset` must immediately reflect the addition).
"""
return self._jsq.processAllAvailable()
@since(2.0)
def stop(self):
"""Stop this streaming query.
"""
self._jsq.stop()
@since(2.1)
def explain(self, extended=False):
"""Prints the (logical and physical) plans to the console for debugging purpose.
:param extended: boolean, default ``False``. If ``False``, prints only the physical plan.
>>> sq = sdf.writeStream.format('memory').queryName('query_explain').start()
>>> sq.processAllAvailable() # Wait a bit to generate the runtime plans.
>>> sq.explain()
== Physical Plan ==
...
>>> sq.explain(True)
== Parsed Logical Plan ==
...
== Analyzed Logical Plan ==
...
== Optimized Logical Plan ==
...
== Physical Plan ==
...
>>> sq.stop()
"""
# Cannot call `_jsq.explain(...)` because it will print in the JVM process.
# We should print it in the Python process.
print(self._jsq.explainInternal(extended))
@since(2.1)
def exception(self):
"""
:return: the StreamingQueryException if the query was terminated by an exception, or None.
"""
if self._jsq.exception().isDefined():
je = self._jsq.exception().get()
msg = je.toString().split(': ', 1)[1] # Drop the Java StreamingQueryException type info
stackTrace = '\n\t at '.join(map(lambda x: x.toString(), je.getStackTrace()))
return StreamingQueryException(msg, stackTrace)
else:
return None
class StreamingQueryManager(object):
"""A class to manage all the :class:`StreamingQuery` StreamingQueries active.
.. note:: Evolving
.. versionadded:: 2.0
"""
def __init__(self, jsqm):
self._jsqm = jsqm
@property
@ignore_unicode_prefix
@since(2.0)
def active(self):
"""Returns a list of active queries associated with this SQLContext
>>> sq = sdf.writeStream.format('memory').queryName('this_query').start()
>>> sqm = spark.streams
>>> # get the list of active streaming queries
>>> [q.name for q in sqm.active]
[u'this_query']
>>> sq.stop()
"""
return [StreamingQuery(jsq) for jsq in self._jsqm.active()]
@ignore_unicode_prefix
@since(2.0)
def get(self, id):
"""Returns an active query from this SQLContext or throws exception if an active query
with this name doesn't exist.
>>> sq = sdf.writeStream.format('memory').queryName('this_query').start()
>>> sq.name
u'this_query'
>>> sq = spark.streams.get(sq.id)
>>> sq.isActive
True
>>> sq = sqlContext.streams.get(sq.id)
>>> sq.isActive
True
>>> sq.stop()
"""
return StreamingQuery(self._jsqm.get(id))
@since(2.0)
def awaitAnyTermination(self, timeout=None):
"""Wait until any of the queries on the associated SQLContext has terminated since the
creation of the context, or since :func:`resetTerminated()` was called. If any query was
terminated with an exception, then the exception will be thrown.
If `timeout` is set, it returns whether the query has terminated or not within the
`timeout` seconds.
If a query has terminated, then subsequent calls to :func:`awaitAnyTermination()` will
either return immediately (if the query was terminated by :func:`query.stop()`),
or throw the exception immediately (if the query was terminated with exception). Use
:func:`resetTerminated()` to clear past terminations and wait for new terminations.
In the case where multiple queries have terminated since :func:`resetTermination()`
was called, if any query has terminated with exception, then :func:`awaitAnyTermination()`
will throw any of the exception. For correctly documenting exceptions across multiple
queries, users need to stop all of them after any of them terminates with exception, and
then check the `query.exception()` for each query.
throws :class:`StreamingQueryException`, if `this` query has terminated with an exception
"""
if timeout is not None:
if not isinstance(timeout, (int, float)) or timeout < 0:
raise ValueError("timeout must be a positive integer or float. Got %s" % timeout)
return self._jsqm.awaitAnyTermination(int(timeout * 1000))
else:
return self._jsqm.awaitAnyTermination()
@since(2.0)
def resetTerminated(self):
"""Forget about past terminated queries so that :func:`awaitAnyTermination()` can be used
again to wait for new terminations.
>>> spark.streams.resetTerminated()
"""
self._jsqm.resetTerminated()
class DataStreamReader(OptionUtils):
"""
Interface used to load a streaming :class:`DataFrame` from external storage systems
(e.g. file systems, key-value stores, etc). Use :func:`spark.readStream`
to access this.
.. note:: Evolving.
.. versionadded:: 2.0
"""
def __init__(self, spark):
self._jreader = spark._ssql_ctx.readStream()
self._spark = spark
def _df(self, jdf):
from pyspark.sql.dataframe import DataFrame
return DataFrame(jdf, self._spark)
@since(2.0)
def format(self, source):
"""Specifies the input data source format.
.. note:: Evolving.
:param source: string, name of the data source, e.g. 'json', 'parquet'.
>>> s = spark.readStream.format("text")
"""
self._jreader = self._jreader.format(source)
return self
@since(2.0)
def schema(self, schema):
"""Specifies the input schema.
Some data sources (e.g. JSON) can infer the input schema automatically from data.
By specifying the schema here, the underlying data source can skip the schema
inference step, and thus speed up data loading.
.. note:: Evolving.
:param schema: a :class:`pyspark.sql.types.StructType` object or a DDL-formatted string
(For example ``col0 INT, col1 DOUBLE``).
>>> s = spark.readStream.schema(sdf_schema)
>>> s = spark.readStream.schema("col0 INT, col1 DOUBLE")
"""
from pyspark.sql import SparkSession
spark = SparkSession.builder.getOrCreate()
if isinstance(schema, StructType):
jschema = spark._jsparkSession.parseDataType(schema.json())
self._jreader = self._jreader.schema(jschema)
elif isinstance(schema, basestring):
self._jreader = self._jreader.schema(schema)
else:
raise TypeError("schema should be StructType or string")
return self
@since(2.0)
def option(self, key, value):
"""Adds an input option for the underlying data source.
You can set the following option(s) for reading files:
* ``timeZone``: sets the string that indicates a timezone to be used to parse timestamps
in the JSON/CSV datasources or partition values.
If it isn't set, it uses the default value, session local timezone.
.. note:: Evolving.
>>> s = spark.readStream.option("x", 1)
"""
self._jreader = self._jreader.option(key, to_str(value))
return self
@since(2.0)
def options(self, **options):
"""Adds input options for the underlying data source.
You can set the following option(s) for reading files:
* ``timeZone``: sets the string that indicates a timezone to be used to parse timestamps
in the JSON/CSV datasources or partition values.
If it isn't set, it uses the default value, session local timezone.
.. note:: Evolving.
>>> s = spark.readStream.options(x="1", y=2)
"""
for k in options:
self._jreader = self._jreader.option(k, to_str(options[k]))
return self
@since(2.0)
def load(self, path=None, format=None, schema=None, **options):
"""Loads a data stream from a data source and returns it as a :class`DataFrame`.
.. note:: Evolving.
:param path: optional string for file-system backed data sources.
:param format: optional string for format of the data source. Default to 'parquet'.
:param schema: optional :class:`pyspark.sql.types.StructType` for the input schema
or a DDL-formatted string (For example ``col0 INT, col1 DOUBLE``).
:param options: all other string options
>>> json_sdf = spark.readStream.format("json") \\
... .schema(sdf_schema) \\
... .load(tempfile.mkdtemp())
>>> json_sdf.isStreaming
True
>>> json_sdf.schema == sdf_schema
True
"""
if format is not None:
self.format(format)
if schema is not None:
self.schema(schema)
self.options(**options)
if path is not None:
if type(path) != str or len(path.strip()) == 0:
raise ValueError("If the path is provided for stream, it needs to be a " +
"non-empty string. List of paths are not supported.")
return self._df(self._jreader.load(path))
else:
return self._df(self._jreader.load())
@since(2.0)
def json(self, path, schema=None, primitivesAsString=None, prefersDecimal=None,
allowComments=None, allowUnquotedFieldNames=None, allowSingleQuotes=None,
allowNumericLeadingZero=None, allowBackslashEscapingAnyCharacter=None,
mode=None, columnNameOfCorruptRecord=None, dateFormat=None, timestampFormat=None,
multiLine=None, allowUnquotedControlChars=None):
"""
Loads a JSON file stream and returns the results as a :class:`DataFrame`.
`JSON Lines <http://jsonlines.org/>`_ (newline-delimited JSON) is supported by default.
For JSON (one record per file), set the ``multiLine`` parameter to ``true``.
If the ``schema`` parameter is not specified, this function goes
through the input once to determine the input schema.
.. note:: Evolving.
:param path: string represents path to the JSON dataset,
or RDD of Strings storing JSON objects.
:param schema: an optional :class:`pyspark.sql.types.StructType` for the input schema
or a DDL-formatted string (For example ``col0 INT, col1 DOUBLE``).
:param primitivesAsString: infers all primitive values as a string type. If None is set,
it uses the default value, ``false``.
:param prefersDecimal: infers all floating-point values as a decimal type. If the values
do not fit in decimal, then it infers them as doubles. If None is
set, it uses the default value, ``false``.
:param allowComments: ignores Java/C++ style comment in JSON records. If None is set,
it uses the default value, ``false``.
:param allowUnquotedFieldNames: allows unquoted JSON field names. If None is set,
it uses the default value, ``false``.
:param allowSingleQuotes: allows single quotes in addition to double quotes. If None is
set, it uses the default value, ``true``.
:param allowNumericLeadingZero: allows leading zeros in numbers (e.g. 00012). If None is
set, it uses the default value, ``false``.
:param allowBackslashEscapingAnyCharacter: allows accepting quoting of all character
using backslash quoting mechanism. If None is
set, it uses the default value, ``false``.
:param mode: allows a mode for dealing with corrupt records during parsing. If None is
set, it uses the default value, ``PERMISSIVE``.
* ``PERMISSIVE`` : sets other fields to ``null`` when it meets a corrupted \
record, and puts the malformed string into a field configured by \
``columnNameOfCorruptRecord``. To keep corrupt records, an user can set \
a string type field named ``columnNameOfCorruptRecord`` in an user-defined \
schema. If a schema does not have the field, it drops corrupt records during \
parsing. When inferring a schema, it implicitly adds a \
``columnNameOfCorruptRecord`` field in an output schema.
* ``DROPMALFORMED`` : ignores the whole corrupted records.
* ``FAILFAST`` : throws an exception when it meets corrupted records.
:param columnNameOfCorruptRecord: allows renaming the new field having malformed string
created by ``PERMISSIVE`` mode. This overrides
``spark.sql.columnNameOfCorruptRecord``. If None is set,
it uses the value specified in
``spark.sql.columnNameOfCorruptRecord``.
:param dateFormat: sets the string that indicates a date format. Custom date formats
follow the formats at ``java.text.SimpleDateFormat``. This
applies to date type. If None is set, it uses the
default value, ``yyyy-MM-dd``.
:param timestampFormat: sets the string that indicates a timestamp format. Custom date
formats follow the formats at ``java.text.SimpleDateFormat``.
This applies to timestamp type. If None is set, it uses the
default value, ``yyyy-MM-dd'T'HH:mm:ss.SSSXXX``.
:param multiLine: parse one record, which may span multiple lines, per file. If None is
set, it uses the default value, ``false``.
:param allowUnquotedControlChars: allows JSON Strings to contain unquoted control
characters (ASCII characters with value less than 32,
including tab and line feed characters) or not.
>>> json_sdf = spark.readStream.json(tempfile.mkdtemp(), schema = sdf_schema)
>>> json_sdf.isStreaming
True
>>> json_sdf.schema == sdf_schema
True
"""
self._set_opts(
schema=schema, primitivesAsString=primitivesAsString, prefersDecimal=prefersDecimal,
allowComments=allowComments, allowUnquotedFieldNames=allowUnquotedFieldNames,
allowSingleQuotes=allowSingleQuotes, allowNumericLeadingZero=allowNumericLeadingZero,
allowBackslashEscapingAnyCharacter=allowBackslashEscapingAnyCharacter,
mode=mode, columnNameOfCorruptRecord=columnNameOfCorruptRecord, dateFormat=dateFormat,
timestampFormat=timestampFormat, multiLine=multiLine,
allowUnquotedControlChars=allowUnquotedControlChars)
if isinstance(path, basestring):
return self._df(self._jreader.json(path))
else:
raise TypeError("path can be only a single string")
@since(2.0)
def parquet(self, path):
"""Loads a Parquet file stream, returning the result as a :class:`DataFrame`.
You can set the following Parquet-specific option(s) for reading Parquet files:
* ``mergeSchema``: sets whether we should merge schemas collected from all \
Parquet part-files. This will override ``spark.sql.parquet.mergeSchema``. \
The default value is specified in ``spark.sql.parquet.mergeSchema``.
.. note:: Evolving.
>>> parquet_sdf = spark.readStream.schema(sdf_schema).parquet(tempfile.mkdtemp())
>>> parquet_sdf.isStreaming
True
>>> parquet_sdf.schema == sdf_schema
True
"""
if isinstance(path, basestring):
return self._df(self._jreader.parquet(path))
else:
raise TypeError("path can be only a single string")
@ignore_unicode_prefix
@since(2.0)
def text(self, path):
"""
Loads a text file stream and returns a :class:`DataFrame` whose schema starts with a
string column named "value", and followed by partitioned columns if there
are any.
Each line in the text file is a new row in the resulting DataFrame.
.. note:: Evolving.
:param paths: string, or list of strings, for input path(s).
>>> text_sdf = spark.readStream.text(tempfile.mkdtemp())
>>> text_sdf.isStreaming
True
>>> "value" in str(text_sdf.schema)
True
"""
if isinstance(path, basestring):
return self._df(self._jreader.text(path))
else:
raise TypeError("path can be only a single string")
@since(2.0)
def csv(self, path, schema=None, sep=None, encoding=None, quote=None, escape=None,
comment=None, header=None, inferSchema=None, ignoreLeadingWhiteSpace=None,
ignoreTrailingWhiteSpace=None, nullValue=None, nanValue=None, positiveInf=None,
negativeInf=None, dateFormat=None, timestampFormat=None, maxColumns=None,
maxCharsPerColumn=None, maxMalformedLogPerPartition=None, mode=None,
columnNameOfCorruptRecord=None, multiLine=None):
"""Loads a CSV file stream and returns the result as a :class:`DataFrame`.
This function will go through the input once to determine the input schema if
``inferSchema`` is enabled. To avoid going through the entire data once, disable
``inferSchema`` option or specify the schema explicitly using ``schema``.
.. note:: Evolving.
:param path: string, or list of strings, for input path(s).
:param schema: an optional :class:`pyspark.sql.types.StructType` for the input schema
or a DDL-formatted string (For example ``col0 INT, col1 DOUBLE``).
:param sep: sets the single character as a separator for each field and value.
If None is set, it uses the default value, ``,``.
:param encoding: decodes the CSV files by the given encoding type. If None is set,
it uses the default value, ``UTF-8``.
:param quote: sets the single character used for escaping quoted values where the
separator can be part of the value. If None is set, it uses the default
value, ``"``. If you would like to turn off quotations, you need to set an
empty string.
:param escape: sets the single character used for escaping quotes inside an already
quoted value. If None is set, it uses the default value, ``\``.
:param comment: sets the single character used for skipping lines beginning with this
character. By default (None), it is disabled.
:param header: uses the first line as names of columns. If None is set, it uses the
default value, ``false``.
:param inferSchema: infers the input schema automatically from data. It requires one extra
pass over the data. If None is set, it uses the default value, ``false``.
:param ignoreLeadingWhiteSpace: a flag indicating whether or not leading whitespaces from
values being read should be skipped. If None is set, it
uses the default value, ``false``.
:param ignoreTrailingWhiteSpace: a flag indicating whether or not trailing whitespaces from
values being read should be skipped. If None is set, it
uses the default value, ``false``.
:param nullValue: sets the string representation of a null value. If None is set, it uses
the default value, empty string. Since 2.0.1, this ``nullValue`` param
applies to all supported types including the string type.
:param nanValue: sets the string representation of a non-number value. If None is set, it
uses the default value, ``NaN``.
:param positiveInf: sets the string representation of a positive infinity value. If None
is set, it uses the default value, ``Inf``.
:param negativeInf: sets the string representation of a negative infinity value. If None
is set, it uses the default value, ``Inf``.
:param dateFormat: sets the string that indicates a date format. Custom date formats
follow the formats at ``java.text.SimpleDateFormat``. This
applies to date type. If None is set, it uses the
default value, ``yyyy-MM-dd``.
:param timestampFormat: sets the string that indicates a timestamp format. Custom date
formats follow the formats at ``java.text.SimpleDateFormat``.
This applies to timestamp type. If None is set, it uses the
default value, ``yyyy-MM-dd'T'HH:mm:ss.SSSXXX``.
:param maxColumns: defines a hard limit of how many columns a record can have. If None is
set, it uses the default value, ``20480``.
:param maxCharsPerColumn: defines the maximum number of characters allowed for any given
value being read. If None is set, it uses the default value,
``-1`` meaning unlimited length.
:param maxMalformedLogPerPartition: this parameter is no longer used since Spark 2.2.0.
If specified, it is ignored.
:param mode: allows a mode for dealing with corrupt records during parsing. If None is
set, it uses the default value, ``PERMISSIVE``.
* ``PERMISSIVE`` : sets other fields to ``null`` when it meets a corrupted \
record, and puts the malformed string into a field configured by \
``columnNameOfCorruptRecord``. To keep corrupt records, an user can set \
a string type field named ``columnNameOfCorruptRecord`` in an \
user-defined schema. If a schema does not have the field, it drops corrupt \
records during parsing. When a length of parsed CSV tokens is shorter than \
an expected length of a schema, it sets `null` for extra fields.
* ``DROPMALFORMED`` : ignores the whole corrupted records.
* ``FAILFAST`` : throws an exception when it meets corrupted records.
:param columnNameOfCorruptRecord: allows renaming the new field having malformed string
created by ``PERMISSIVE`` mode. This overrides
``spark.sql.columnNameOfCorruptRecord``. If None is set,
it uses the value specified in
``spark.sql.columnNameOfCorruptRecord``.
:param multiLine: parse one record, which may span multiple lines. If None is
set, it uses the default value, ``false``.
>>> csv_sdf = spark.readStream.csv(tempfile.mkdtemp(), schema = sdf_schema)
>>> csv_sdf.isStreaming
True
>>> csv_sdf.schema == sdf_schema
True
"""
self._set_opts(
schema=schema, sep=sep, encoding=encoding, quote=quote, escape=escape, comment=comment,
header=header, inferSchema=inferSchema, ignoreLeadingWhiteSpace=ignoreLeadingWhiteSpace,
ignoreTrailingWhiteSpace=ignoreTrailingWhiteSpace, nullValue=nullValue,
nanValue=nanValue, positiveInf=positiveInf, negativeInf=negativeInf,
dateFormat=dateFormat, timestampFormat=timestampFormat, maxColumns=maxColumns,
maxCharsPerColumn=maxCharsPerColumn,
maxMalformedLogPerPartition=maxMalformedLogPerPartition, mode=mode,
columnNameOfCorruptRecord=columnNameOfCorruptRecord, multiLine=multiLine)
if isinstance(path, basestring):
return self._df(self._jreader.csv(path))
else:
raise TypeError("path can be only a single string")
class DataStreamWriter(object):
"""
Interface used to write a streaming :class:`DataFrame` to external storage systems
(e.g. file systems, key-value stores, etc). Use :func:`DataFrame.writeStream`
to access this.
.. note:: Evolving.
.. versionadded:: 2.0
"""
def __init__(self, df):
self._df = df
self._spark = df.sql_ctx
self._jwrite = df._jdf.writeStream()
def _sq(self, jsq):
from pyspark.sql.streaming import StreamingQuery
return StreamingQuery(jsq)
@since(2.0)
def outputMode(self, outputMode):
"""Specifies how data of a streaming DataFrame/Dataset is written to a streaming sink.
Options include:
* `append`:Only the new rows in the streaming DataFrame/Dataset will be written to
the sink
* `complete`:All the rows in the streaming DataFrame/Dataset will be written to the sink
every time these is some updates
* `update`:only the rows that were updated in the streaming DataFrame/Dataset will be
written to the sink every time there are some updates. If the query doesn't contain
aggregations, it will be equivalent to `append` mode.
.. note:: Evolving.
>>> writer = sdf.writeStream.outputMode('append')
"""
if not outputMode or type(outputMode) != str or len(outputMode.strip()) == 0:
raise ValueError('The output mode must be a non-empty string. Got: %s' % outputMode)
self._jwrite = self._jwrite.outputMode(outputMode)
return self
@since(2.0)
def format(self, source):
"""Specifies the underlying output data source.
.. note:: Evolving.
:param source: string, name of the data source, which for now can be 'parquet'.
>>> writer = sdf.writeStream.format('json')
"""
self._jwrite = self._jwrite.format(source)
return self
@since(2.0)
def option(self, key, value):
"""Adds an output option for the underlying data source.
You can set the following option(s) for writing files:
* ``timeZone``: sets the string that indicates a timezone to be used to format
timestamps in the JSON/CSV datasources or partition values.
If it isn't set, it uses the default value, session local timezone.
.. note:: Evolving.
"""
self._jwrite = self._jwrite.option(key, to_str(value))
return self
@since(2.0)
def options(self, **options):
"""Adds output options for the underlying data source.
You can set the following option(s) for writing files:
* ``timeZone``: sets the string that indicates a timezone to be used to format
timestamps in the JSON/CSV datasources or partition values.
If it isn't set, it uses the default value, session local timezone.
.. note:: Evolving.
"""
for k in options:
self._jwrite = self._jwrite.option(k, to_str(options[k]))
return self
@since(2.0)
def partitionBy(self, *cols):
"""Partitions the output by the given columns on the file system.
If specified, the output is laid out on the file system similar
to Hive's partitioning scheme.
.. note:: Evolving.
:param cols: name of columns
"""
if len(cols) == 1 and isinstance(cols[0], (list, tuple)):
cols = cols[0]
self._jwrite = self._jwrite.partitionBy(_to_seq(self._spark._sc, cols))
return self
@since(2.0)
def queryName(self, queryName):
"""Specifies the name of the :class:`StreamingQuery` that can be started with
:func:`start`. This name must be unique among all the currently active queries
in the associated SparkSession.
.. note:: Evolving.
:param queryName: unique name for the query
>>> writer = sdf.writeStream.queryName('streaming_query')
"""
if not queryName or type(queryName) != str or len(queryName.strip()) == 0:
raise ValueError('The queryName must be a non-empty string. Got: %s' % queryName)
self._jwrite = self._jwrite.queryName(queryName)
return self
@keyword_only
@since(2.0)
def trigger(self, processingTime=None, once=None):
"""Set the trigger for the stream query. If this is not set it will run the query as fast
as possible, which is equivalent to setting the trigger to ``processingTime='0 seconds'``.
.. note:: Evolving.
:param processingTime: a processing time interval as a string, e.g. '5 seconds', '1 minute'.
>>> # trigger the query for execution every 5 seconds
>>> writer = sdf.writeStream.trigger(processingTime='5 seconds')
>>> # trigger the query for just once batch of data
>>> writer = sdf.writeStream.trigger(once=True)
"""
jTrigger = None
if processingTime is not None:
if once is not None:
raise ValueError('Multiple triggers not allowed.')
if type(processingTime) != str or len(processingTime.strip()) == 0:
raise ValueError('Value for processingTime must be a non empty string. Got: %s' %
processingTime)
interval = processingTime.strip()
jTrigger = self._spark._sc._jvm.org.apache.spark.sql.streaming.Trigger.ProcessingTime(
interval)
elif once is not None:
if once is not True:
raise ValueError('Value for once must be True. Got: %s' % once)
jTrigger = self._spark._sc._jvm.org.apache.spark.sql.streaming.Trigger.Once()
else:
raise ValueError('No trigger provided')
self._jwrite = self._jwrite.trigger(jTrigger)
return self
@ignore_unicode_prefix
@since(2.0)
def start(self, path=None, format=None, outputMode=None, partitionBy=None, queryName=None,
**options):
"""Streams the contents of the :class:`DataFrame` to a data source.
The data source is specified by the ``format`` and a set of ``options``.
If ``format`` is not specified, the default data source configured by
``spark.sql.sources.default`` will be used.
.. note:: Evolving.
:param path: the path in a Hadoop supported file system
:param format: the format used to save
:param outputMode: specifies how data of a streaming DataFrame/Dataset is written to a
streaming sink.
* `append`:Only the new rows in the streaming DataFrame/Dataset will be written to the
sink
* `complete`:All the rows in the streaming DataFrame/Dataset will be written to the sink
every time these is some updates
* `update`:only the rows that were updated in the streaming DataFrame/Dataset will be
written to the sink every time there are some updates. If the query doesn't contain
aggregations, it will be equivalent to `append` mode.
:param partitionBy: names of partitioning columns
:param queryName: unique name for the query
:param options: All other string options. You may want to provide a `checkpointLocation`
for most streams, however it is not required for a `memory` stream.
>>> sq = sdf.writeStream.format('memory').queryName('this_query').start()
>>> sq.isActive
True
>>> sq.name
u'this_query'
>>> sq.stop()
>>> sq.isActive
False
>>> sq = sdf.writeStream.trigger(processingTime='5 seconds').start(
... queryName='that_query', outputMode="append", format='memory')
>>> sq.name
u'that_query'
>>> sq.isActive
True
>>> sq.stop()
"""
self.options(**options)
if outputMode is not None:
self.outputMode(outputMode)
if partitionBy is not None:
self.partitionBy(partitionBy)
if format is not None:
self.format(format)
if queryName is not None:
self.queryName(queryName)
if path is None:
return self._sq(self._jwrite.start())
else:
return self._sq(self._jwrite.start(path))
def _test():
import doctest
import os
import tempfile
from pyspark.sql import Row, SparkSession, SQLContext
import pyspark.sql.streaming
os.chdir(os.environ["SPARK_HOME"])
globs = pyspark.sql.streaming.__dict__.copy()
try:
spark = SparkSession.builder.getOrCreate()
except py4j.protocol.Py4JError:
spark = SparkSession(sc)
globs['tempfile'] = tempfile
globs['os'] = os
globs['spark'] = spark
globs['sqlContext'] = SQLContext.getOrCreate(spark.sparkContext)
globs['sdf'] = \
spark.readStream.format('text').load('python/test_support/sql/streaming')
globs['sdf_schema'] = StructType([StructField("data", StringType(), False)])
globs['df'] = \
globs['spark'].readStream.format('text').load('python/test_support/sql/streaming')
(failure_count, test_count) = doctest.testmod(
pyspark.sql.streaming, globs=globs,
optionflags=doctest.ELLIPSIS | doctest.NORMALIZE_WHITESPACE | doctest.REPORT_NDIFF)
globs['spark'].stop()
if failure_count:
exit(-1)
if __name__ == "__main__":
_test()
| apache-2.0 |
haikuginger/urllib3 | dummyserver/testcase.py | 3 | 6096 | import unittest
import socket
import threading
from nose.plugins.skip import SkipTest
from tornado import ioloop, web
from dummyserver.server import (
SocketServerThread,
run_tornado_app,
run_loop_in_thread,
DEFAULT_CERTS,
)
from dummyserver.handlers import TestingApp
from dummyserver.proxy import ProxyHandler
def consume_socket(sock, chunks=65536):
while not sock.recv(chunks).endswith(b'\r\n\r\n'):
pass
class SocketDummyServerTestCase(unittest.TestCase):
"""
A simple socket-based server is created for this class that is good for
exactly one request.
"""
scheme = 'http'
host = 'localhost'
@classmethod
def _start_server(cls, socket_handler):
ready_event = threading.Event()
cls.server_thread = SocketServerThread(socket_handler=socket_handler,
ready_event=ready_event,
host=cls.host)
cls.server_thread.start()
ready_event.wait(5)
if not ready_event.is_set():
raise Exception("most likely failed to start server")
cls.port = cls.server_thread.port
@classmethod
def start_response_handler(cls, response, num=1, block_send=None):
ready_event = threading.Event()
def socket_handler(listener):
for _ in range(num):
ready_event.set()
sock = listener.accept()[0]
consume_socket(sock)
if block_send:
block_send.wait()
block_send.clear()
sock.send(response)
sock.close()
cls._start_server(socket_handler)
return ready_event
@classmethod
def start_basic_handler(cls, **kw):
return cls.start_response_handler(
b'HTTP/1.1 200 OK\r\n'
b'Content-Length: 0\r\n'
b'\r\n', **kw)
@classmethod
def tearDownClass(cls):
if hasattr(cls, 'server_thread'):
cls.server_thread.join(0.1)
class IPV4SocketDummyServerTestCase(SocketDummyServerTestCase):
@classmethod
def _start_server(cls, socket_handler):
ready_event = threading.Event()
cls.server_thread = SocketServerThread(socket_handler=socket_handler,
ready_event=ready_event,
host=cls.host)
cls.server_thread.USE_IPV6 = False
cls.server_thread.start()
ready_event.wait(5)
if not ready_event.is_set():
raise Exception("most likely failed to start server")
cls.port = cls.server_thread.port
class HTTPDummyServerTestCase(unittest.TestCase):
""" A simple HTTP server that runs when your test class runs
Have your unittest class inherit from this one, and then a simple server
will start when your tests run, and automatically shut down when they
complete. For examples of what test requests you can send to the server,
see the TestingApp in dummyserver/handlers.py.
"""
scheme = 'http'
host = 'localhost'
host_alt = '127.0.0.1' # Some tests need two hosts
certs = DEFAULT_CERTS
@classmethod
def _start_server(cls):
cls.io_loop = ioloop.IOLoop()
app = web.Application([(r".*", TestingApp)])
cls.server, cls.port = run_tornado_app(app, cls.io_loop, cls.certs,
cls.scheme, cls.host)
cls.server_thread = run_loop_in_thread(cls.io_loop)
@classmethod
def _stop_server(cls):
cls.io_loop.add_callback(cls.server.stop)
cls.io_loop.add_callback(cls.io_loop.stop)
cls.server_thread.join()
@classmethod
def setUpClass(cls):
cls._start_server()
@classmethod
def tearDownClass(cls):
cls._stop_server()
class HTTPSDummyServerTestCase(HTTPDummyServerTestCase):
scheme = 'https'
host = 'localhost'
certs = DEFAULT_CERTS
class IPV6HTTPSDummyServerTestCase(HTTPSDummyServerTestCase):
host = '::1'
@classmethod
def setUpClass(cls):
if not socket.has_ipv6:
raise SkipTest('IPv6 not available')
else:
super(IPV6HTTPSDummyServerTestCase, cls).setUpClass()
class HTTPDummyProxyTestCase(unittest.TestCase):
http_host = 'localhost'
http_host_alt = '127.0.0.1'
https_host = 'localhost'
https_host_alt = '127.0.0.1'
https_certs = DEFAULT_CERTS
proxy_host = 'localhost'
proxy_host_alt = '127.0.0.1'
@classmethod
def setUpClass(cls):
cls.io_loop = ioloop.IOLoop()
app = web.Application([(r'.*', TestingApp)])
cls.http_server, cls.http_port = run_tornado_app(
app, cls.io_loop, None, 'http', cls.http_host)
app = web.Application([(r'.*', TestingApp)])
cls.https_server, cls.https_port = run_tornado_app(
app, cls.io_loop, cls.https_certs, 'https', cls.http_host)
app = web.Application([(r'.*', ProxyHandler)])
cls.proxy_server, cls.proxy_port = run_tornado_app(
app, cls.io_loop, None, 'http', cls.proxy_host)
cls.server_thread = run_loop_in_thread(cls.io_loop)
@classmethod
def tearDownClass(cls):
cls.io_loop.add_callback(cls.http_server.stop)
cls.io_loop.add_callback(cls.https_server.stop)
cls.io_loop.add_callback(cls.proxy_server.stop)
cls.io_loop.add_callback(cls.io_loop.stop)
cls.server_thread.join()
class IPv6HTTPDummyServerTestCase(HTTPDummyServerTestCase):
host = '::1'
@classmethod
def setUpClass(cls):
if not socket.has_ipv6:
raise SkipTest('IPv6 not available')
else:
super(IPv6HTTPDummyServerTestCase, cls).setUpClass()
class IPv6HTTPDummyProxyTestCase(HTTPDummyProxyTestCase):
http_host = 'localhost'
http_host_alt = '127.0.0.1'
https_host = 'localhost'
https_host_alt = '127.0.0.1'
https_certs = DEFAULT_CERTS
proxy_host = '::1'
proxy_host_alt = '127.0.0.1'
| mit |
mfmarlonferrari/django-bootstrap-calendar | django_bootstrap_calendar/templatetags/bootstrap_calendar.py | 5 | 2245 | # -*- coding: utf-8 -*-
__author__ = 'sandlbn'
from django import template
from django.template.loader import render_to_string
from django_bootstrap_calendar.utils import MinifyJs
register = template.Library()
@register.simple_tag
def bootstrap_calendar(css_classes):
"""
return a calendar div if none push empty ""
"""
return render_to_string(
'django_bootstrap_calendar/partial/calendar.html',
{'css_classes': css_classes}
)
@register.simple_tag
def bootstrap_controls(css_classes):
"""
return a calendar controls div if none push empty ""
"""
return render_to_string(
'django_bootstrap_calendar/partial/calendar_controls.html',
{'css_classes': css_classes}
)
@register.simple_tag
def bootstrap_calendar_js(*args, **kwargs):
"""
return a boostrap calendar tag java script files
"""
options = {}
try:
options["language"] = kwargs["language"]
except KeyError:
pass
return render_to_string(
'django_bootstrap_calendar/partial/calendar_js.html',
options
)
@register.simple_tag
def bootstrap_calendar_css(*args):
"""
return a boostrap calendar tag css files
"""
return render_to_string(
'django_bootstrap_calendar/partial/calendar_css.html'
)
@register.simple_tag
def bootstrap_calendar_init(*args, **kwargs):
"""
"""
options = {}
try:
options["events_url"] = kwargs["events_url"]
except KeyError:
options["events_url"] = '/calendar/json/'
try:
options["view"] = kwargs["view"]
except KeyError:
options["view"] = 'month'
try:
options["language"] = kwargs["language"]
except KeyError:
options["language"] = 'en'
try:
options["first_day"] = kwargs["first_day"]
except KeyError:
options["first_day"] = 1
try:
options["width"] = kwargs["width"]
except KeyError:
options["width"] = '100%'
return render_to_string('django_bootstrap_calendar/partial/calendar_init.html', options)
@register.tag
def minifyjs(parser, token):
nodelist = parser.parse(('endminifyjs',))
parser.delete_first_token()
return MinifyJs(nodelist)
| bsd-3-clause |
donSchoe/p2pool-cache | wstools/MIMEAttachment.py | 294 | 3379 | #TODO add the license
#I had to rewrite this class because the python MIME email.mime (version 2.5)
#are buggy, they use \n instead \r\n for new line which is not compliant
#to standard!
# http://bugs.python.org/issue5525
#TODO do not load all the message in memory stream it from the disk
import re
import random
import sys
#new line
NL='\r\n'
_width = len(repr(sys.maxint-1))
_fmt = '%%0%dd' % _width
class MIMEMessage:
def __init__(self):
self._files = []
self._xmlMessage = ""
self._startCID = ""
self._boundary = ""
def makeBoundary(self):
#create the boundary
msgparts = []
msgparts.append(self._xmlMessage)
for i in self._files:
msgparts.append(i.read())
#this sucks, all in memory
alltext = NL.join(msgparts)
self._boundary = _make_boundary(alltext)
#maybe I can save some memory
del alltext
del msgparts
self._startCID = "<" + (_fmt % random.randrange(sys.maxint)) + (_fmt % random.randrange(sys.maxint)) + ">"
def toString(self):
'''it return a string with the MIME message'''
if len(self._boundary) == 0:
#the makeBoundary hasn't been called yet
self.makeBoundary()
#ok we have everything let's start to spit the message out
#first the XML
returnstr = NL + "--" + self._boundary + NL
returnstr += "Content-Type: text/xml; charset=\"us-ascii\"" + NL
returnstr += "Content-Transfer-Encoding: 7bit" + NL
returnstr += "Content-Id: " + self._startCID + NL + NL
returnstr += self._xmlMessage + NL
#then the files
for file in self._files:
returnstr += "--" + self._boundary + NL
returnstr += "Content-Type: application/octet-stream" + NL
returnstr += "Content-Transfer-Encoding: binary" + NL
returnstr += "Content-Id: <" + str(id(file)) + ">" + NL + NL
file.seek(0)
returnstr += file.read() + NL
#closing boundary
returnstr += "--" + self._boundary + "--" + NL
return returnstr
def attachFile(self, file):
'''
it adds a file to this attachment
'''
self._files.append(file)
def addXMLMessage(self, xmlMessage):
'''
it adds the XML message. we can have only one XML SOAP message
'''
self._xmlMessage = xmlMessage
def getBoundary(self):
'''
this function returns the string used in the mime message as a
boundary. First the write method as to be called
'''
return self._boundary
def getStartCID(self):
'''
This function returns the CID of the XML message
'''
return self._startCID
def _make_boundary(text=None):
#some code taken from python stdlib
# Craft a random boundary. If text is given, ensure that the chosen
# boundary doesn't appear in the text.
token = random.randrange(sys.maxint)
boundary = ('=' * 10) + (_fmt % token) + '=='
if text is None:
return boundary
b = boundary
counter = 0
while True:
cre = re.compile('^--' + re.escape(b) + '(--)?$', re.MULTILINE)
if not cre.search(text):
break
b = boundary + '.' + str(counter)
counter += 1
return b
| gpl-3.0 |
google-code-export/pyglet | tests/window/WINDOW_SET_MOUSE_SYSTEM_CURSOR.py | 30 | 2382 | #!/usr/bin/env python
'''Test that mouse cursor can be set to a platform-dependent image.
Expected behaviour:
One window will be opened. Press the left and right arrow keys to cycle
through the system mouse cursors. The current cursor selected will
be printed to the terminal.
Note that not all cursors are unique on each platform; for example,
if a platform doesn't define a cursor for a given name, a suitable
replacement (e.g., a plain arrow) will be used instead.
Close the window or press ESC to end the test.
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id: WINDOW_SET_MOUSE_VISIBLE.py 703 2007-02-28 14:18:00Z Alex.Holkner $'
import unittest
from pyglet import window
from pyglet.window import key
from pyglet.gl import *
class WINDOW_SET_MOUSE_PLATFORM_CURSOR(unittest.TestCase):
i = 0
def on_key_press(self, symbol, modifiers):
names = [
self.w.CURSOR_DEFAULT,
self.w.CURSOR_CROSSHAIR,
self.w.CURSOR_HAND,
self.w.CURSOR_HELP,
self.w.CURSOR_NO,
self.w.CURSOR_SIZE,
self.w.CURSOR_SIZE_UP,
self.w.CURSOR_SIZE_UP_RIGHT,
self.w.CURSOR_SIZE_RIGHT,
self.w.CURSOR_SIZE_DOWN_RIGHT,
self.w.CURSOR_SIZE_DOWN,
self.w.CURSOR_SIZE_DOWN_LEFT,
self.w.CURSOR_SIZE_LEFT,
self.w.CURSOR_SIZE_UP_LEFT,
self.w.CURSOR_SIZE_UP_DOWN,
self.w.CURSOR_SIZE_LEFT_RIGHT,
self.w.CURSOR_TEXT,
self.w.CURSOR_WAIT,
self.w.CURSOR_WAIT_ARROW,
]
if symbol == key.ESCAPE:
self.w.on_close()
if symbol == key.RIGHT:
self.i = (self.i + 1) % len(names)
elif symbol == key.LEFT:
self.i = (self.i - 1) % len(names)
cursor = self.w.get_system_mouse_cursor(names[self.i])
self.w.set_mouse_cursor(cursor)
print 'Set cursor to "%s"' % names[self.i]
return True
def test_set_visible(self):
self.width, self.height = 200, 200
self.w = w = window.Window(self.width, self.height)
w.push_handlers(self)
while not w.has_exit:
glClear(GL_COLOR_BUFFER_BIT)
w.dispatch_events()
w.flip()
w.close()
if __name__ == '__main__':
unittest.main()
| bsd-3-clause |
locustio/locust | locust/test/test_runners.py | 1 | 112600 | import json
import os
import random
import time
import unittest
from collections import defaultdict
from contextlib import contextmanager
from operator import itemgetter
import gevent
import mock
import requests
from gevent import sleep
from gevent.pool import Group
from gevent.queue import Queue
import locust
from locust import (
LoadTestShape,
constant,
runners,
__version__,
)
from locust.env import Environment
from locust.exception import (
RPCError,
StopUser,
)
from locust.main import create_environment
from locust.rpc import Message
from locust.runners import (
LocalRunner,
STATE_INIT,
STATE_SPAWNING,
STATE_RUNNING,
STATE_MISSING,
STATE_STOPPING,
STATE_STOPPED,
WorkerNode,
WorkerRunner,
)
from locust.stats import RequestStats
from .testcases import LocustTestCase
from locust.user import (
TaskSet,
User,
task,
)
NETWORK_BROKEN = "network broken"
def mocked_rpc():
class MockedRpcServerClient:
queue = Queue()
outbox = []
def __init__(self, *args, **kwargs):
pass
@classmethod
def mocked_send(cls, message):
cls.queue.put(message.serialize())
sleep(0)
def recv(self):
results = self.queue.get()
msg = Message.unserialize(results)
if msg.data == NETWORK_BROKEN:
raise RPCError()
return msg
def send(self, message):
self.outbox.append(message)
def send_to_client(self, message):
self.outbox.append((message.node_id, message))
def recv_from_client(self):
results = self.queue.get()
msg = Message.unserialize(results)
if msg.data == NETWORK_BROKEN:
raise RPCError()
return msg.node_id, msg
def close(self):
raise RPCError()
return MockedRpcServerClient
class mocked_options:
def __init__(self):
self.spawn_rate = 5
self.num_users = 5
self.host = "/"
self.tags = None
self.exclude_tags = None
self.master_host = "localhost"
self.master_port = 5557
self.master_bind_host = "*"
self.master_bind_port = 5557
self.heartbeat_liveness = 3
self.heartbeat_interval = 1
self.stop_timeout = None
self.connection_broken = False
def reset_stats(self):
pass
class HeyAnException(Exception):
pass
class TestLocustRunner(LocustTestCase):
def test_cpu_warning(self):
_monitor_interval = runners.CPU_MONITOR_INTERVAL
runners.CPU_MONITOR_INTERVAL = 2.0
try:
class CpuUser(User):
wait_time = constant(0.001)
@task
def cpu_task(self):
for i in range(1000000):
_ = 3 / 2
environment = Environment(user_classes=[CpuUser])
runner = LocalRunner(environment)
self.assertFalse(runner.cpu_warning_emitted)
runner.spawn_users({CpuUser.__name__: 1}, wait=False)
sleep(2.5)
runner.quit()
self.assertTrue(runner.cpu_warning_emitted)
finally:
runners.CPU_MONITOR_INTERVAL = _monitor_interval
def test_kill_locusts(self):
triggered = [False]
class BaseUser(User):
wait_time = constant(1)
@task
class task_set(TaskSet):
@task
def trigger(self):
triggered[0] = True
runner = Environment(user_classes=[BaseUser]).create_local_runner()
runner.spawn_users({BaseUser.__name__: 2}, wait=False)
self.assertEqual(2, len(runner.user_greenlets))
g1 = list(runner.user_greenlets)[0]
g2 = list(runner.user_greenlets)[1]
runner.stop_users({BaseUser.__name__: 2})
self.assertEqual(0, len(runner.user_greenlets))
self.assertTrue(g1.dead)
self.assertTrue(g2.dead)
self.assertTrue(triggered[0])
def test_start_event(self):
class MyUser(User):
wait_time = constant(2)
task_run_count = 0
@task
def my_task(self):
MyUser.task_run_count += 1
test_start_run = [0]
environment = Environment(user_classes=[MyUser])
def on_test_start(*args, **kwargs):
test_start_run[0] += 1
environment.events.test_start.add_listener(on_test_start)
runner = LocalRunner(environment)
runner.start(user_count=3, spawn_rate=3, wait=False)
runner.spawning_greenlet.get(timeout=3)
self.assertEqual(1, test_start_run[0])
self.assertEqual(3, MyUser.task_run_count)
def test_stop_event(self):
class MyUser(User):
wait_time = constant(1)
@task
def my_task(self):
pass
test_stop_run = [0]
environment = Environment(user_classes=[User])
def on_test_stop(*args, **kwargs):
test_stop_run[0] += 1
environment.events.test_stop.add_listener(on_test_stop)
runner = LocalRunner(environment)
runner.start(user_count=3, spawn_rate=3, wait=False)
self.assertEqual(0, test_stop_run[0])
runner.stop()
self.assertEqual(1, test_stop_run[0])
def test_stop_event_quit(self):
class MyUser(User):
wait_time = constant(1)
@task
def my_task(self):
pass
test_stop_run = [0]
environment = Environment(user_classes=[User])
def on_test_stop(*args, **kwargs):
test_stop_run[0] += 1
environment.events.test_stop.add_listener(on_test_stop)
runner = LocalRunner(environment)
runner.start(user_count=3, spawn_rate=3, wait=False)
self.assertEqual(0, test_stop_run[0])
runner.quit()
self.assertEqual(1, test_stop_run[0])
def test_stop_event_stop_and_quit(self):
class MyUser(User):
wait_time = constant(1)
@task
def my_task(self):
pass
test_stop_run = [0]
environment = Environment(user_classes=[MyUser])
def on_test_stop(*args, **kwargs):
test_stop_run[0] += 1
environment.events.test_stop.add_listener(on_test_stop)
runner = LocalRunner(environment)
runner.start(user_count=3, spawn_rate=3, wait=False)
self.assertEqual(0, test_stop_run[0])
runner.stop()
runner.quit()
self.assertEqual(1, test_stop_run[0])
def test_change_user_count_during_spawning(self):
class MyUser(User):
wait_time = constant(1)
@task
def my_task(self):
pass
environment = Environment(user_classes=[MyUser])
runner = LocalRunner(environment)
runner.start(user_count=10, spawn_rate=5, wait=False)
sleep(0.6)
runner.start(user_count=5, spawn_rate=5, wait=False)
runner.spawning_greenlet.join()
self.assertEqual(5, len(runner.user_greenlets))
runner.quit()
def test_reset_stats(self):
class MyUser(User):
@task
class task_set(TaskSet):
@task
def my_task(self):
self.user.environment.events.request.fire(
request_type="GET",
name="/test",
response_time=666,
response_length=1337,
exception=None,
context={},
)
# Make sure each user only run this task once during the test
sleep(30)
environment = Environment(user_classes=[MyUser], reset_stats=True)
runner = LocalRunner(environment)
runner.start(user_count=6, spawn_rate=1, wait=False)
sleep(3)
self.assertGreaterEqual(runner.stats.get("/test", "GET").num_requests, 3)
sleep(3.25)
self.assertLessEqual(runner.stats.get("/test", "GET").num_requests, 1)
runner.quit()
def test_no_reset_stats(self):
class MyUser(User):
@task
class task_set(TaskSet):
@task
def my_task(self):
self.user.environment.events.request.fire(
request_type="GET",
name="/test",
response_time=666,
response_length=1337,
exception=None,
context={},
)
sleep(2)
environment = Environment(reset_stats=False, user_classes=[MyUser])
runner = LocalRunner(environment)
runner.start(user_count=6, spawn_rate=12, wait=False)
sleep(0.25)
self.assertGreaterEqual(runner.stats.get("/test", "GET").num_requests, 3)
sleep(0.3)
self.assertEqual(6, runner.stats.get("/test", "GET").num_requests)
runner.quit()
def test_runner_reference_on_environment(self):
env = Environment()
runner = env.create_local_runner()
self.assertEqual(env, runner.environment)
self.assertEqual(runner, env.runner)
def test_users_can_call_runner_quit_without_deadlocking(self):
class BaseUser(User):
stop_triggered = False
@task
def trigger(self):
self.environment.runner.quit()
def on_stop(self):
BaseUser.stop_triggered = True
runner = Environment(user_classes=[BaseUser]).create_local_runner()
runner.spawn_users({BaseUser.__name__: 1}, wait=False)
timeout = gevent.Timeout(0.5)
timeout.start()
try:
runner.greenlet.join()
except gevent.Timeout:
self.fail("Got Timeout exception, runner must have hung somehow.")
finally:
timeout.cancel()
self.assertTrue(BaseUser.stop_triggered)
def test_runner_quit_can_run_on_stop_for_multiple_users_concurrently(self):
class BaseUser(User):
stop_count = 0
@task
def trigger(self):
pass
def on_stop(self):
gevent.sleep(0.1)
BaseUser.stop_count += 1
runner = Environment(user_classes=[BaseUser]).create_local_runner()
runner.spawn_users({BaseUser.__name__: 10}, wait=False)
timeout = gevent.Timeout(0.3)
timeout.start()
try:
runner.quit()
except gevent.Timeout:
self.fail("Got Timeout exception, runner must have hung somehow.")
finally:
timeout.cancel()
self.assertEqual(10, BaseUser.stop_count) # verify that all users executed on_stop
def test_stop_users_with_spawn_rate(self):
"""
The spawn rate does not have an effect on the rate at which the users are stopped.
It is expected that the excess users will be stopped as soon as possible in parallel
(while respecting the stop_timeout).
"""
class MyUser(User):
wait_time = constant(1)
@task
def my_task(self):
pass
environment = Environment(user_classes=[MyUser])
runner = LocalRunner(environment)
# Start load test, wait for users to start, then trigger ramp down
ts = time.time()
runner.start(10, 10, wait=False)
runner.spawning_greenlet.join()
delta = time.time() - ts
self.assertTrue(
0 <= delta <= 0.05, "Expected user count to increase to 10 instantaneously, instead it took %f" % delta
)
self.assertTrue(
runner.user_count == 10, "User count has not decreased correctly to 2, it is : %i" % runner.user_count
)
ts = time.time()
runner.start(2, 4, wait=False)
runner.spawning_greenlet.join()
delta = time.time() - ts
self.assertTrue(
0 <= delta <= 0.05, "Expected user count to decrease to 2 instantaneously, instead it took %f" % delta
)
self.assertTrue(
runner.user_count == 2, "User count has not decreased correctly to 2, it is : %i" % runner.user_count
)
def test_attributes_populated_when_calling_start(self):
class MyUser1(User):
wait_time = constant(0)
@task
def my_task(self):
pass
class MyUser2(User):
wait_time = constant(0)
@task
def my_task(self):
pass
environment = Environment(user_classes=[MyUser1, MyUser2])
runner = LocalRunner(environment)
runner.start(user_count=10, spawn_rate=5, wait=False)
runner.spawning_greenlet.join()
self.assertDictEqual({"MyUser1": 5, "MyUser2": 5}, runner.user_classes_count)
runner.start(user_count=5, spawn_rate=5, wait=False)
runner.spawning_greenlet.join()
self.assertDictEqual({"MyUser1": 3, "MyUser2": 2}, runner.user_classes_count)
runner.quit()
def test_user_classes_count(self):
class MyUser1(User):
wait_time = constant(0)
@task
def my_task(self):
pass
class MyUser2(User):
wait_time = constant(0)
@task
def my_task(self):
pass
environment = Environment(user_classes=[MyUser1, MyUser2])
runner = LocalRunner(environment)
runner.start(user_count=10, spawn_rate=5, wait=False)
runner.spawning_greenlet.join()
self.assertDictEqual({"MyUser1": 5, "MyUser2": 5}, runner.user_classes_count)
runner.start(user_count=5, spawn_rate=5, wait=False)
runner.spawning_greenlet.join()
self.assertDictEqual({"MyUser1": 3, "MyUser2": 2}, runner.user_classes_count)
runner.quit()
def test_custom_message(self):
class MyUser(User):
wait_time = constant(1)
@task
def my_task(self):
pass
test_custom_msg = [False]
test_custom_msg_data = [{}]
def on_custom_msg(msg, **kw):
test_custom_msg[0] = True
test_custom_msg_data[0] = msg.data
environment = Environment(user_classes=[MyUser])
runner = LocalRunner(environment)
runner.register_message("test_custom_msg", on_custom_msg)
runner.send_message("test_custom_msg", {"test_data": 123})
self.assertTrue(test_custom_msg[0])
self.assertEqual(123, test_custom_msg_data[0]["test_data"])
def test_undefined_custom_message(self):
class MyUser(User):
wait_time = constant(1)
@task
def my_task(self):
pass
test_custom_msg = [False]
def on_custom_msg(msg, **kw):
test_custom_msg[0] = True
environment = Environment(user_classes=[MyUser])
runner = LocalRunner(environment)
runner.register_message("test_custom_msg", on_custom_msg)
runner.send_message("test_different_custom_msg")
self.assertFalse(test_custom_msg[0])
self.assertEqual(1, len(self.mocked_log.warning))
msg = self.mocked_log.warning[0]
self.assertIn("Unknown message type recieved", msg)
def test_swarm_endpoint_is_non_blocking(self):
class TestUser1(User):
@task
def my_task(self):
gevent.sleep(600)
class TestUser2(User):
@task
def my_task(self):
gevent.sleep(600)
stop_timeout = 0
env = Environment(user_classes=[TestUser1, TestUser2], stop_timeout=stop_timeout)
local_runner = env.create_local_runner()
web_ui = env.create_web_ui("127.0.0.1", 0)
gevent.sleep(0.1)
ts = time.perf_counter()
response = requests.post(
"http://127.0.0.1:{}/swarm".format(web_ui.server.server_port),
data={"user_count": 20, "spawn_rate": 1, "host": "https://localhost"},
)
self.assertEqual(200, response.status_code)
self.assertTrue(0 <= time.perf_counter() - ts <= 1, "swarm endpoint is blocking")
ts = time.perf_counter()
while local_runner.state != STATE_RUNNING:
self.assertTrue(time.perf_counter() - ts <= 20, local_runner.state)
gevent.sleep(0.1)
self.assertTrue(19 <= time.perf_counter() - ts <= 21)
self.assertEqual(local_runner.user_count, 20)
local_runner.stop()
web_ui.stop()
def test_can_call_stop_endpoint_if_currently_swarming(self):
class TestUser1(User):
@task
def my_task(self):
gevent.sleep(600)
class TestUser2(User):
@task
def my_task(self):
gevent.sleep(600)
stop_timeout = 5
env = Environment(user_classes=[TestUser1, TestUser2], stop_timeout=stop_timeout)
local_runner = env.create_local_runner()
web_ui = env.create_web_ui("127.0.0.1", 0)
gevent.sleep(0.1)
ts = time.perf_counter()
response = requests.post(
"http://127.0.0.1:{}/swarm".format(web_ui.server.server_port),
data={"user_count": 20, "spawn_rate": 1, "host": "https://localhost"},
)
self.assertEqual(200, response.status_code)
self.assertTrue(0 <= time.perf_counter() - ts <= 1, "swarm endpoint is blocking")
gevent.sleep(5)
self.assertEqual(local_runner.state, STATE_SPAWNING)
self.assertLessEqual(local_runner.user_count, 10)
ts = time.perf_counter()
response = requests.get(
"http://127.0.0.1:{}/stop".format(web_ui.server.server_port),
)
self.assertEqual(200, response.status_code)
self.assertTrue(stop_timeout <= time.perf_counter() - ts <= stop_timeout + 5, "stop endpoint took too long")
ts = time.perf_counter()
while local_runner.state != STATE_STOPPED:
self.assertTrue(time.perf_counter() - ts <= 2)
gevent.sleep(0.1)
self.assertLessEqual(local_runner.user_count, 0)
local_runner.stop()
web_ui.stop()
class TestMasterWorkerRunners(LocustTestCase):
def test_distributed_integration_run(self):
"""
Full integration test that starts both a MasterRunner and three WorkerRunner instances
and makes sure that their stats is sent to the Master.
"""
class TestUser(User):
wait_time = constant(0.1)
@task
def incr_stats(l):
l.environment.events.request.fire(
request_type="GET",
name="/",
response_time=1337,
response_length=666,
exception=None,
context={},
)
with mock.patch("locust.runners.WORKER_REPORT_INTERVAL", new=0.3):
# start a Master runner
master_env = Environment(user_classes=[TestUser])
master = master_env.create_master_runner("*", 0)
sleep(0)
# start 3 Worker runners
workers = []
for i in range(3):
worker_env = Environment(user_classes=[TestUser])
worker = worker_env.create_worker_runner("127.0.0.1", master.server.port)
workers.append(worker)
# give workers time to connect
sleep(0.1)
# issue start command that should trigger TestUsers to be spawned in the Workers
master.start(6, spawn_rate=1000)
sleep(0.1)
# check that worker nodes have started locusts
for worker in workers:
self.assertEqual(2, worker.user_count)
# give time for users to generate stats, and stats to be sent to master
sleep(1)
master.quit()
# make sure users are killed
for worker in workers:
self.assertEqual(0, worker.user_count)
# check that stats are present in master
self.assertGreater(
master_env.runner.stats.total.num_requests,
20,
"For some reason the master node's stats has not come in",
)
def test_test_stop_event(self):
class TestUser(User):
wait_time = constant(0.1)
@task
def my_task(l):
pass
with mock.patch("locust.runners.WORKER_REPORT_INTERVAL", new=0.3):
# start a Master runner
master_env = Environment(user_classes=[TestUser])
test_stop_count = {"master": 0, "worker": 0}
@master_env.events.test_stop.add_listener
def _(*args, **kwargs):
test_stop_count["master"] += 1
master = master_env.create_master_runner("*", 0)
sleep(0)
# start a Worker runner
worker_env = Environment(user_classes=[TestUser])
@worker_env.events.test_stop.add_listener
def _(*args, **kwargs):
test_stop_count["worker"] += 1
worker = worker_env.create_worker_runner("127.0.0.1", master.server.port)
# give worker time to connect
sleep(0.1)
# issue start command that should trigger TestUsers to be spawned in the Workers
master.start(2, spawn_rate=1000)
sleep(0.1)
# check that worker nodes have started locusts
self.assertEqual(2, worker.user_count)
# give time for users to generate stats, and stats to be sent to master
sleep(0.1)
master_env.events.quitting.fire(environment=master_env, reverse=True)
master.quit()
sleep(0.1)
# make sure users are killed
self.assertEqual(0, worker.user_count)
# check the test_stop event was called one time in master and zero times in workder
self.assertEqual(
1,
test_stop_count["master"],
"The test_stop event was not called exactly one time in the master node",
)
self.assertEqual(
0,
test_stop_count["worker"],
"The test_stop event was called in the worker node",
)
def test_distributed_shape(self):
"""
Full integration test that starts both a MasterRunner and three WorkerRunner instances
and tests a basic LoadTestShape with scaling up and down users
"""
class TestUser(User):
@task
def my_task(self):
pass
class TestShape(LoadTestShape):
def tick(self):
run_time = self.get_run_time()
if run_time < 2:
return 9, 9
elif run_time < 4:
return 21, 21
elif run_time < 6:
return 3, 21
else:
return None
with mock.patch("locust.runners.WORKER_REPORT_INTERVAL", new=0.3):
test_shape = TestShape()
master_env = Environment(user_classes=[TestUser], shape_class=test_shape)
master_env.shape_class.reset_time()
master = master_env.create_master_runner("*", 0)
workers = []
for i in range(3):
worker_env = Environment(user_classes=[TestUser])
worker = worker_env.create_worker_runner("127.0.0.1", master.server.port)
workers.append(worker)
# Give workers time to connect
sleep(0.1)
# Start a shape test
master.start_shape()
sleep(1)
# Ensure workers have connected and started the correct amount of users
for worker in workers:
self.assertEqual(3, worker.user_count, "Shape test has not reached stage 1")
self.assertEqual(
9, test_shape.get_current_user_count(), "Shape is not seeing stage 1 runner user count correctly"
)
self.assertDictEqual(master.reported_user_classes_count, {"TestUser": 9})
# Ensure new stage with more users has been reached
sleep(2)
for worker in workers:
self.assertEqual(7, worker.user_count, "Shape test has not reached stage 2")
self.assertEqual(
21, test_shape.get_current_user_count(), "Shape is not seeing stage 2 runner user count correctly"
)
self.assertDictEqual(master.reported_user_classes_count, {"TestUser": 21})
# Ensure new stage with less users has been reached
sleep(2)
for worker in workers:
self.assertEqual(1, worker.user_count, "Shape test has not reached stage 3")
self.assertEqual(
3, test_shape.get_current_user_count(), "Shape is not seeing stage 3 runner user count correctly"
)
self.assertDictEqual(master.reported_user_classes_count, {"TestUser": 3})
# Ensure test stops at the end
sleep(2)
for worker in workers:
self.assertEqual(0, worker.user_count, "Shape test has not stopped")
self.assertEqual(
0, test_shape.get_current_user_count(), "Shape is not seeing stopped runner user count correctly"
)
self.assertDictEqual(master.reported_user_classes_count, {"TestUser": 0})
self.assertEqual("stopped", master.state)
def test_distributed_shape_with_stop_timeout(self):
"""
Full integration test that starts both a MasterRunner and five WorkerRunner instances
and tests a basic LoadTestShape with scaling up and down users
"""
class TestUser1(User):
def start(self, group: Group):
gevent.sleep(0.5)
return super().start(group)
@task
def my_task(self):
gevent.sleep(0)
class TestUser2(User):
def start(self, group: Group):
gevent.sleep(0.5)
return super().start(group)
@task
def my_task(self):
gevent.sleep(600)
class TestUser3(User):
def start(self, group: Group):
gevent.sleep(0.5)
return super().start(group)
@task
def my_task(self):
gevent.sleep(600)
class TestShape(LoadTestShape):
def tick(self):
run_time = self.get_run_time()
if run_time < 10:
return 5, 3
elif run_time < 20:
return 10, 3
elif run_time < 40:
return 15, 3
elif run_time < 60:
return 5, 3
else:
return None
locust_worker_additional_wait_before_ready_after_stop = 5
with mock.patch("locust.runners.WORKER_REPORT_INTERVAL", new=0.3), _patch_env(
"LOCUST_WORKER_ADDITIONAL_WAIT_BEFORE_READY_AFTER_STOP",
str(locust_worker_additional_wait_before_ready_after_stop),
):
stop_timeout = 5
master_env = Environment(
user_classes=[TestUser1, TestUser2, TestUser3], shape_class=TestShape(), stop_timeout=stop_timeout
)
master_env.shape_class.reset_time()
master = master_env.create_master_runner("*", 0)
workers = []
for i in range(5):
worker_env = Environment(user_classes=[TestUser1, TestUser2, TestUser3])
worker = worker_env.create_worker_runner("127.0.0.1", master.server.port)
workers.append(worker)
# Give workers time to connect
sleep(0.1)
self.assertEqual(STATE_INIT, master.state)
self.assertEqual(5, len(master.clients.ready))
# Re-order `workers` so that it is sorted by `id`.
# This is required because the dispatch is done
# on the sorted workers.
workers = sorted(workers, key=lambda w: w.client_id)
# Start a shape test
master.start_shape()
# First stage
ts = time.time()
while master.state != STATE_SPAWNING:
self.assertTrue(time.time() - ts <= 1, master.state)
sleep()
sleep(5 - (time.time() - ts)) # runtime = 5s
self.assertEqual(STATE_RUNNING, master.state)
w1 = {"TestUser1": 1, "TestUser2": 0, "TestUser3": 0}
w2 = {"TestUser1": 0, "TestUser2": 1, "TestUser3": 0}
w3 = {"TestUser1": 0, "TestUser2": 1, "TestUser3": 0}
w4 = {"TestUser1": 0, "TestUser2": 0, "TestUser3": 1}
w5 = {"TestUser1": 0, "TestUser2": 0, "TestUser3": 1}
self.assertDictEqual(w1, workers[0].user_classes_count)
self.assertDictEqual(w2, workers[1].user_classes_count)
self.assertDictEqual(w3, workers[2].user_classes_count)
self.assertDictEqual(w4, workers[3].user_classes_count)
self.assertDictEqual(w5, workers[4].user_classes_count)
self.assertDictEqual(w1, master.clients[workers[0].client_id].user_classes_count)
self.assertDictEqual(w2, master.clients[workers[1].client_id].user_classes_count)
self.assertDictEqual(w3, master.clients[workers[2].client_id].user_classes_count)
self.assertDictEqual(w4, master.clients[workers[3].client_id].user_classes_count)
self.assertDictEqual(w5, master.clients[workers[4].client_id].user_classes_count)
sleep(5) # runtime = 10s
# Second stage
ts = time.time()
while master.state != STATE_SPAWNING:
self.assertTrue(time.time() - ts <= 1, master.state)
sleep()
sleep(5 - (time.time() - ts)) # runtime = 15s
self.assertEqual(STATE_RUNNING, master.state)
w1 = {"TestUser1": 1, "TestUser2": 1, "TestUser3": 0}
w2 = {"TestUser1": 1, "TestUser2": 1, "TestUser3": 0}
w3 = {"TestUser1": 1, "TestUser2": 1, "TestUser3": 0}
w4 = {"TestUser1": 1, "TestUser2": 0, "TestUser3": 1}
w5 = {"TestUser1": 0, "TestUser2": 0, "TestUser3": 2}
self.assertDictEqual(w1, workers[0].user_classes_count)
self.assertDictEqual(w2, workers[1].user_classes_count)
self.assertDictEqual(w3, workers[2].user_classes_count)
self.assertDictEqual(w4, workers[3].user_classes_count)
self.assertDictEqual(w5, workers[4].user_classes_count)
self.assertDictEqual(w1, master.clients[workers[0].client_id].user_classes_count)
self.assertDictEqual(w2, master.clients[workers[1].client_id].user_classes_count)
self.assertDictEqual(w3, master.clients[workers[2].client_id].user_classes_count)
self.assertDictEqual(w4, master.clients[workers[3].client_id].user_classes_count)
self.assertDictEqual(w5, master.clients[workers[4].client_id].user_classes_count)
sleep(5) # runtime = 20s
# Third stage
ts = time.time()
while master.state != STATE_SPAWNING:
self.assertTrue(time.time() - ts <= 1, master.state)
sleep()
sleep(10 - (time.time() - ts)) # runtime = 30s
ts = time.time()
while master.state != STATE_RUNNING:
self.assertTrue(time.time() - ts <= 1, master.state)
sleep()
self.assertEqual(STATE_RUNNING, master.state)
w1 = {"TestUser1": 1, "TestUser2": 1, "TestUser3": 1}
w2 = {"TestUser1": 1, "TestUser2": 1, "TestUser3": 1}
w3 = {"TestUser1": 1, "TestUser2": 1, "TestUser3": 1}
w4 = {"TestUser1": 1, "TestUser2": 1, "TestUser3": 1}
w5 = {"TestUser1": 1, "TestUser2": 1, "TestUser3": 1}
self.assertDictEqual(w1, workers[0].user_classes_count)
self.assertDictEqual(w2, workers[1].user_classes_count)
self.assertDictEqual(w3, workers[2].user_classes_count)
self.assertDictEqual(w4, workers[3].user_classes_count)
self.assertDictEqual(w5, workers[4].user_classes_count)
self.assertDictEqual(w1, master.clients[workers[0].client_id].user_classes_count)
self.assertDictEqual(w2, master.clients[workers[1].client_id].user_classes_count)
self.assertDictEqual(w3, master.clients[workers[2].client_id].user_classes_count)
self.assertDictEqual(w4, master.clients[workers[3].client_id].user_classes_count)
self.assertDictEqual(w5, master.clients[workers[4].client_id].user_classes_count)
sleep(10 - (time.time() - ts)) # runtime = 40s
# Fourth stage
ts = time.time()
while master.state != STATE_SPAWNING:
self.assertTrue(time.time() - ts <= 1, master.state)
sleep()
sleep(5 - (time.time() - ts)) # runtime = 45s
# Fourth stage - Excess TestUser1 have been stopped but
# TestUser2/TestUser3 have not reached stop timeout yet, so
# their number are unchanged
ts = time.time()
while master.state != STATE_RUNNING:
self.assertTrue(time.time() - ts <= 1, master.state)
sleep()
delta = time.time() - ts
w1 = {"TestUser1": 1, "TestUser2": 1, "TestUser3": 1}
w2 = {"TestUser1": 0, "TestUser2": 1, "TestUser3": 1}
w3 = {"TestUser1": 0, "TestUser2": 1, "TestUser3": 1}
w4 = {"TestUser1": 0, "TestUser2": 1, "TestUser3": 1}
w5 = {"TestUser1": 0, "TestUser2": 1, "TestUser3": 1}
self.assertDictEqual(w1, workers[0].user_classes_count)
self.assertDictEqual(w2, workers[1].user_classes_count)
self.assertDictEqual(w3, workers[2].user_classes_count)
self.assertDictEqual(w4, workers[3].user_classes_count)
self.assertDictEqual(w5, workers[4].user_classes_count)
self.assertDictEqual(w1, master.clients[workers[0].client_id].user_classes_count)
self.assertDictEqual(w2, master.clients[workers[1].client_id].user_classes_count)
self.assertDictEqual(w3, master.clients[workers[2].client_id].user_classes_count)
self.assertDictEqual(w4, master.clients[workers[3].client_id].user_classes_count)
self.assertDictEqual(w5, master.clients[workers[4].client_id].user_classes_count)
sleep(1 - delta) # runtime = 46s
# Fourth stage - All users are now at the desired number
ts = time.time()
while master.state != STATE_RUNNING:
self.assertTrue(time.time() - ts <= 1, master.state)
sleep()
delta = time.time() - ts
w1 = {"TestUser1": 1, "TestUser2": 0, "TestUser3": 0}
w2 = {"TestUser1": 0, "TestUser2": 1, "TestUser3": 0}
w3 = {"TestUser1": 0, "TestUser2": 1, "TestUser3": 0}
w4 = {"TestUser1": 0, "TestUser2": 0, "TestUser3": 1}
w5 = {"TestUser1": 0, "TestUser2": 0, "TestUser3": 1}
self.assertDictEqual(w1, workers[0].user_classes_count)
self.assertDictEqual(w2, workers[1].user_classes_count)
self.assertDictEqual(w3, workers[2].user_classes_count)
self.assertDictEqual(w4, workers[3].user_classes_count)
self.assertDictEqual(w5, workers[4].user_classes_count)
self.assertDictEqual(w1, master.clients[workers[0].client_id].user_classes_count)
self.assertDictEqual(w2, master.clients[workers[1].client_id].user_classes_count)
self.assertDictEqual(w3, master.clients[workers[2].client_id].user_classes_count)
self.assertDictEqual(w4, master.clients[workers[3].client_id].user_classes_count)
self.assertDictEqual(w5, master.clients[workers[4].client_id].user_classes_count)
sleep(10 - delta) # runtime = 56s
# Sleep stop_timeout and make sure the test has stopped
sleep(5) # runtime = 61s
self.assertEqual(STATE_STOPPING, master.state)
sleep(stop_timeout) # runtime = 66s
# We wait for "stop_timeout" seconds to let the workers reconnect as "ready" with the master.
# The reason for waiting an additional "stop_timeout" when we already waited for "stop_timeout"
# above is that when a worker receives the stop message, it can take up to "stop_timeout"
# for the worker to send the "client_stopped" message then an additional "stop_timeout" seconds
# to send the "client_ready" message.
ts = time.time()
while len(master.clients.ready) != len(workers):
self.assertTrue(
time.time() - ts <= stop_timeout + locust_worker_additional_wait_before_ready_after_stop,
f"expected {len(workers)} workers to be ready but only {len(master.clients.ready)} workers are",
)
sleep()
sleep(1)
# Check that no users are running
w1 = {"TestUser1": 0, "TestUser2": 0, "TestUser3": 0}
w2 = {"TestUser1": 0, "TestUser2": 0, "TestUser3": 0}
w3 = {"TestUser1": 0, "TestUser2": 0, "TestUser3": 0}
w4 = {"TestUser1": 0, "TestUser2": 0, "TestUser3": 0}
w5 = {"TestUser1": 0, "TestUser2": 0, "TestUser3": 0}
self.assertDictEqual(w1, workers[0].user_classes_count)
self.assertDictEqual(w2, workers[1].user_classes_count)
self.assertDictEqual(w3, workers[2].user_classes_count)
self.assertDictEqual(w4, workers[3].user_classes_count)
self.assertDictEqual(w5, workers[4].user_classes_count)
self.assertDictEqual(w1, master.clients[workers[0].client_id].user_classes_count)
self.assertDictEqual(w2, master.clients[workers[1].client_id].user_classes_count)
self.assertDictEqual(w3, master.clients[workers[2].client_id].user_classes_count)
self.assertDictEqual(w4, master.clients[workers[3].client_id].user_classes_count)
self.assertDictEqual(w5, master.clients[workers[4].client_id].user_classes_count)
ts = time.time()
while master.state != STATE_STOPPED:
self.assertTrue(time.time() - ts <= 5, master.state)
sleep()
master.stop()
@unittest.skip
def test_distributed_shape_fuzzy_test(self):
"""
Incredibility useful test to find issues with dispatch logic. This test allowed to find
multiple small corner cases with the new dispatch logic of locust v2.
The test is disabled by default because it takes a lot of time to run and has randomness to it.
However, it is advised to run it a few times (you can run it in parallel) when modifying the dispatch logic.
"""
class BaseUser(User):
@task
def my_task(self):
gevent.sleep(600)
class TestUser01(BaseUser):
pass
class TestUser02(BaseUser):
pass
class TestUser03(BaseUser):
pass
class TestUser04(BaseUser):
pass
class TestUser05(BaseUser):
pass
class TestUser06(BaseUser):
pass
class TestUser07(BaseUser):
pass
class TestUser08(BaseUser):
pass
class TestUser09(BaseUser):
pass
class TestUser10(BaseUser):
pass
class TestUser11(BaseUser):
pass
class TestUser12(BaseUser):
pass
class TestUser13(BaseUser):
pass
class TestUser14(BaseUser):
pass
class TestUser15(BaseUser):
pass
class TestShape(LoadTestShape):
def __init__(self):
super().__init__()
self.stages = []
runtime = 0
for _ in range(100):
runtime += random.uniform(3, 15)
self.stages.append((runtime, random.randint(1, 100), random.uniform(0.1, 10)))
def tick(self):
run_time = self.get_run_time()
for stage in self.stages:
if run_time < stage[0]:
return stage[1], stage[2]
user_classes = [
TestUser01,
TestUser02,
TestUser03,
TestUser04,
TestUser05,
TestUser06,
TestUser07,
TestUser08,
TestUser09,
TestUser10,
TestUser11,
TestUser12,
TestUser13,
TestUser14,
TestUser15,
]
chosen_user_classes = random.sample(user_classes, k=random.randint(1, len(user_classes)))
for user_class in chosen_user_classes:
user_class.weight = random.uniform(1, 20)
locust_worker_additional_wait_before_ready_after_stop = 5
with mock.patch("locust.runners.WORKER_REPORT_INTERVAL", new=0.3), _patch_env(
"LOCUST_WORKER_ADDITIONAL_WAIT_BEFORE_READY_AFTER_STOP",
str(locust_worker_additional_wait_before_ready_after_stop),
):
stop_timeout = 5
master_env = Environment(
user_classes=chosen_user_classes, shape_class=TestShape(), stop_timeout=stop_timeout
)
master_env.shape_class.reset_time()
master = master_env.create_master_runner("*", 0)
workers = []
for i in range(random.randint(1, 30)):
worker_env = Environment(user_classes=chosen_user_classes)
worker = worker_env.create_worker_runner("127.0.0.1", master.server.port)
workers.append(worker)
# Give workers time to connect
sleep(0.1)
self.assertEqual(STATE_INIT, master.state)
self.assertEqual(len(workers), len(master.clients.ready))
# Start a shape test
master.start_shape()
ts = time.time()
while master.state != STATE_STOPPED:
self.assertTrue(time.time() - ts <= master_env.shape_class.stages[-1][0] + 60, master.state)
print(
"{:.2f}/{:.2f} | {} | {:.0f} | ".format(
time.time() - ts,
master_env.shape_class.stages[-1][0],
master.state,
sum(master.reported_user_classes_count.values()),
)
+ json.dumps(dict(sorted(master.reported_user_classes_count.items(), key=itemgetter(0))))
)
sleep(1)
master.stop()
def test_distributed_shape_stop_and_restart(self):
"""
Test stopping and then restarting a LoadTestShape
"""
class TestUser(User):
@task
def my_task(self):
pass
class TestShape(LoadTestShape):
def tick(self):
run_time = self.get_run_time()
if run_time < 10:
return 4, 4
else:
return None
with mock.patch("locust.runners.WORKER_REPORT_INTERVAL", new=0.3):
master_env = Environment(user_classes=[TestUser], shape_class=TestShape())
master_env.shape_class.reset_time()
master = master_env.create_master_runner("*", 0)
workers = []
for i in range(2):
worker_env = Environment(user_classes=[TestUser])
worker = worker_env.create_worker_runner("127.0.0.1", master.server.port)
workers.append(worker)
# Give workers time to connect
sleep(0.1)
# Start a shape test and ensure workers have connected and started the correct amount of users
master.start_shape()
sleep(1)
for worker in workers:
self.assertEqual(2, worker.user_count, "Shape test has not started correctly")
# Stop the test and ensure all user count is 0
master.stop()
sleep(1)
for worker in workers:
self.assertEqual(0, worker.user_count, "Shape test has not stopped")
# Then restart the test again and ensure workers have connected and started the correct amount of users
master.start_shape()
sleep(1)
for worker in workers:
self.assertEqual(2, worker.user_count, "Shape test has not started again correctly")
master.stop()
def test_distributed_shape_statuses_transition(self):
"""
Full integration test that starts both a MasterRunner and five WorkerRunner instances
The goal of this test is to validate the status on the master is correctly transitioned for each of the
test phases.
"""
class TestUser1(User):
@task
def my_task(self):
gevent.sleep(600)
class TestShape(LoadTestShape):
def tick(self):
run_time = self.get_run_time()
if run_time < 10:
return 5, 1
elif run_time < 20:
return 10, 1
elif run_time < 30:
return 15, 1
else:
return None
locust_worker_additional_wait_before_ready_after_stop = 2
with mock.patch("locust.runners.WORKER_REPORT_INTERVAL", new=0.3), _patch_env(
"LOCUST_WORKER_ADDITIONAL_WAIT_BEFORE_READY_AFTER_STOP",
str(locust_worker_additional_wait_before_ready_after_stop),
):
stop_timeout = 0
master_env = Environment(user_classes=[TestUser1], shape_class=TestShape(), stop_timeout=stop_timeout)
master_env.shape_class.reset_time()
master = master_env.create_master_runner("*", 0)
workers = []
for i in range(5):
worker_env = Environment(user_classes=[TestUser1])
worker = worker_env.create_worker_runner("127.0.0.1", master.server.port)
workers.append(worker)
# Give workers time to connect
sleep(0.1)
self.assertEqual(STATE_INIT, master.state)
self.assertEqual(5, len(master.clients.ready))
statuses = []
ts = time.perf_counter()
master.start_shape()
while master.state != STATE_STOPPED:
# +5s buffer to let master stop
self.assertTrue(
time.perf_counter() - ts <= 30 + locust_worker_additional_wait_before_ready_after_stop + 5,
master.state,
)
statuses.append((time.perf_counter() - ts, master.state, master.user_count))
sleep(0.1)
self.assertEqual(statuses[0][1], STATE_INIT)
stage = 1
tolerance = 1 # in s
for (t1, state1, user_count1), (t2, state2, user_count2) in zip(statuses[:-1], statuses[1:]):
if state1 == STATE_SPAWNING and state2 == STATE_RUNNING and stage == 1:
self.assertTrue(5 - tolerance <= t2 <= 5 + tolerance)
elif state1 == STATE_RUNNING and state2 == STATE_SPAWNING and stage == 1:
self.assertTrue(10 - tolerance <= t2 <= 10 + tolerance)
stage += 1
elif state1 == STATE_SPAWNING and state2 == STATE_RUNNING and stage == 2:
self.assertTrue(15 - tolerance <= t2 <= 15 + tolerance)
elif state1 == STATE_RUNNING and state2 == STATE_SPAWNING and stage == 2:
self.assertTrue(20 - tolerance <= t2 <= 20 + tolerance)
stage += 1
elif state1 == STATE_SPAWNING and state2 == STATE_RUNNING and stage == 3:
self.assertTrue(25 - tolerance <= t2 <= 25 + tolerance)
elif state1 == STATE_RUNNING and state2 == STATE_SPAWNING and stage == 3:
self.assertTrue(30 - tolerance <= t2 <= 30 + tolerance)
stage += 1
elif state1 == STATE_RUNNING and state2 == STATE_STOPPED and stage == 3:
self.assertTrue(30 - tolerance <= t2 <= 30 + tolerance)
def test_swarm_endpoint_is_non_blocking(self):
class TestUser1(User):
@task
def my_task(self):
gevent.sleep(600)
class TestUser2(User):
@task
def my_task(self):
gevent.sleep(600)
with mock.patch("locust.runners.WORKER_REPORT_INTERVAL", new=0.3):
stop_timeout = 0
master_env = Environment(user_classes=[TestUser1, TestUser2], stop_timeout=stop_timeout)
master = master_env.create_master_runner("*", 0)
web_ui = master_env.create_web_ui("127.0.0.1", 0)
workers = []
for i in range(2):
worker_env = Environment(user_classes=[TestUser1, TestUser2])
worker = worker_env.create_worker_runner("127.0.0.1", master.server.port)
workers.append(worker)
# Give workers time to connect
sleep(0.1)
self.assertEqual(STATE_INIT, master.state)
self.assertEqual(len(master.clients.ready), len(workers))
ts = time.perf_counter()
response = requests.post(
"http://127.0.0.1:{}/swarm".format(web_ui.server.server_port),
data={"user_count": 20, "spawn_rate": 1, "host": "https://localhost"},
)
self.assertEqual(200, response.status_code)
self.assertTrue(0 <= time.perf_counter() - ts <= 1, "swarm endpoint is blocking")
ts = time.perf_counter()
while master.state != STATE_RUNNING:
self.assertTrue(time.perf_counter() - ts <= 20, master.state)
gevent.sleep(0.1)
self.assertTrue(19 <= time.perf_counter() - ts <= 21)
self.assertEqual(master.user_count, 20)
master.stop()
web_ui.stop()
def test_can_call_stop_endpoint_if_currently_swarming(self):
class TestUser1(User):
@task
def my_task(self):
gevent.sleep(600)
class TestUser2(User):
@task
def my_task(self):
gevent.sleep(600)
with mock.patch("locust.runners.WORKER_REPORT_INTERVAL", new=0.3):
stop_timeout = 5
master_env = Environment(user_classes=[TestUser1, TestUser2], stop_timeout=stop_timeout)
master = master_env.create_master_runner("*", 0)
web_ui = master_env.create_web_ui("127.0.0.1", 0)
workers = []
for i in range(2):
worker_env = Environment(user_classes=[TestUser1, TestUser2])
worker = worker_env.create_worker_runner("127.0.0.1", master.server.port)
workers.append(worker)
# Give workers time to connect
sleep(0.1)
self.assertEqual(STATE_INIT, master.state)
self.assertEqual(len(master.clients.ready), len(workers))
ts = time.perf_counter()
response = requests.post(
"http://127.0.0.1:{}/swarm".format(web_ui.server.server_port),
data={"user_count": 20, "spawn_rate": 1, "host": "https://localhost"},
)
self.assertEqual(200, response.status_code)
self.assertTrue(0 <= time.perf_counter() - ts <= 1, "swarm endpoint is blocking")
gevent.sleep(5)
self.assertEqual(master.state, STATE_SPAWNING)
self.assertLessEqual(master.user_count, 10)
ts = time.perf_counter()
response = requests.get(
"http://127.0.0.1:{}/stop".format(web_ui.server.server_port),
)
self.assertEqual(200, response.status_code)
self.assertTrue(stop_timeout <= time.perf_counter() - ts <= stop_timeout + 5, "stop endpoint took too long")
ts = time.perf_counter()
while master.state != STATE_STOPPED:
self.assertTrue(time.perf_counter() - ts <= 2)
gevent.sleep(0.1)
self.assertLessEqual(master.user_count, 0)
master.stop()
web_ui.stop()
class TestMasterRunner(LocustTestCase):
def setUp(self):
super().setUp()
self.environment = Environment(events=locust.events, catch_exceptions=False)
def tearDown(self):
super().tearDown()
def get_runner(self, user_classes=None):
if user_classes is not None:
self.environment.user_classes = user_classes
return self.environment.create_master_runner("*", 5557)
def test_worker_connect(self):
with mock.patch("locust.rpc.rpc.Server", mocked_rpc()) as server:
master = self.get_runner()
server.mocked_send(Message("client_ready", __version__, "zeh_fake_client1"))
self.assertEqual(1, len(master.clients))
self.assertTrue(
"zeh_fake_client1" in master.clients, "Could not find fake client in master instance's clients dict"
)
server.mocked_send(Message("client_ready", __version__, "zeh_fake_client2"))
server.mocked_send(Message("client_ready", __version__, "zeh_fake_client3"))
server.mocked_send(Message("client_ready", __version__, "zeh_fake_client4"))
self.assertEqual(4, len(master.clients))
server.mocked_send(Message("quit", None, "zeh_fake_client3"))
self.assertEqual(3, len(master.clients))
def test_worker_stats_report_median(self):
with mock.patch("locust.rpc.rpc.Server", mocked_rpc()) as server:
master = self.get_runner()
server.mocked_send(Message("client_ready", __version__, "fake_client"))
master.stats.get("/", "GET").log(100, 23455)
master.stats.get("/", "GET").log(800, 23455)
master.stats.get("/", "GET").log(700, 23455)
data = {"user_count": 1}
self.environment.events.report_to_master.fire(client_id="fake_client", data=data)
master.stats.clear_all()
server.mocked_send(Message("stats", data, "fake_client"))
s = master.stats.get("/", "GET")
self.assertEqual(700, s.median_response_time)
def test_worker_stats_report_with_none_response_times(self):
with mock.patch("locust.rpc.rpc.Server", mocked_rpc()) as server:
master = self.get_runner()
server.mocked_send(Message("client_ready", __version__, "fake_client"))
master.stats.get("/mixed", "GET").log(0, 23455)
master.stats.get("/mixed", "GET").log(800, 23455)
master.stats.get("/mixed", "GET").log(700, 23455)
master.stats.get("/mixed", "GET").log(None, 23455)
master.stats.get("/mixed", "GET").log(None, 23455)
master.stats.get("/mixed", "GET").log(None, 23455)
master.stats.get("/mixed", "GET").log(None, 23455)
master.stats.get("/onlyNone", "GET").log(None, 23455)
data = {"user_count": 1}
self.environment.events.report_to_master.fire(client_id="fake_client", data=data)
master.stats.clear_all()
server.mocked_send(Message("stats", data, "fake_client"))
s1 = master.stats.get("/mixed", "GET")
self.assertEqual(700, s1.median_response_time)
self.assertEqual(500, s1.avg_response_time)
s2 = master.stats.get("/onlyNone", "GET")
self.assertEqual(0, s2.median_response_time)
self.assertEqual(0, s2.avg_response_time)
def test_master_marks_downed_workers_as_missing(self):
with mock.patch("locust.rpc.rpc.Server", mocked_rpc()) as server:
master = self.get_runner()
server.mocked_send(Message("client_ready", __version__, "fake_client"))
sleep(6)
# print(master.clients['fake_client'].__dict__)
assert master.clients["fake_client"].state == STATE_MISSING
def test_last_worker_quitting_stops_test(self):
with mock.patch("locust.rpc.rpc.Server", mocked_rpc()) as server:
master = self.get_runner()
server.mocked_send(Message("client_ready", __version__, "fake_client1"))
server.mocked_send(Message("client_ready", __version__, "fake_client2"))
master.start(1, 2)
server.mocked_send(Message("spawning", None, "fake_client1"))
server.mocked_send(Message("spawning", None, "fake_client2"))
server.mocked_send(Message("quit", None, "fake_client1"))
sleep(0)
self.assertEqual(1, len(master.clients.all))
self.assertNotEqual(STATE_STOPPED, master.state, "Not all workers quit but test stopped anyway.")
server.mocked_send(Message("quit", None, "fake_client2"))
sleep(0)
self.assertEqual(0, len(master.clients.all))
self.assertEqual(STATE_STOPPED, master.state, "All workers quit but test didn't stop.")
@mock.patch("locust.runners.HEARTBEAT_INTERVAL", new=0.1)
def test_last_worker_missing_stops_test(self):
with mock.patch("locust.rpc.rpc.Server", mocked_rpc()) as server:
master = self.get_runner()
server.mocked_send(Message("client_ready", __version__, "fake_client1"))
server.mocked_send(Message("client_ready", __version__, "fake_client2"))
server.mocked_send(Message("client_ready", __version__, "fake_client3"))
master.start(3, 3)
server.mocked_send(Message("spawning", None, "fake_client1"))
server.mocked_send(Message("spawning", None, "fake_client2"))
server.mocked_send(Message("spawning", None, "fake_client3"))
sleep(0.2)
server.mocked_send(
Message("heartbeat", {"state": STATE_RUNNING, "current_cpu_usage": 50, "count": 1}, "fake_client1")
)
server.mocked_send(
Message("heartbeat", {"state": STATE_RUNNING, "current_cpu_usage": 50, "count": 1}, "fake_client2")
)
server.mocked_send(
Message("heartbeat", {"state": STATE_RUNNING, "current_cpu_usage": 50, "count": 1}, "fake_client3")
)
sleep(0.2)
self.assertEqual(0, len(master.clients.missing))
self.assertEqual(3, master.worker_count)
self.assertNotIn(
master.state, [STATE_STOPPED, STATE_STOPPING], "Not all workers went missing but test stopped anyway."
)
server.mocked_send(
Message("heartbeat", {"state": STATE_RUNNING, "current_cpu_usage": 50, "count": 1}, "fake_client1")
)
sleep(0.4)
self.assertEqual(2, len(master.clients.missing))
self.assertEqual(1, master.worker_count)
self.assertNotIn(
master.state, [STATE_STOPPED, STATE_STOPPING], "Not all workers went missing but test stopped anyway."
)
sleep(0.2)
self.assertEqual(3, len(master.clients.missing))
self.assertEqual(0, master.worker_count)
self.assertEqual(STATE_STOPPED, master.state, "All workers went missing but test didn't stop.")
def test_master_total_stats(self):
with mock.patch("locust.rpc.rpc.Server", mocked_rpc()) as server:
master = self.get_runner()
server.mocked_send(Message("client_ready", __version__, "fake_client"))
stats = RequestStats()
stats.log_request("GET", "/1", 100, 3546)
stats.log_request("GET", "/1", 800, 56743)
stats2 = RequestStats()
stats2.log_request("GET", "/2", 700, 2201)
server.mocked_send(
Message(
"stats",
{
"stats": stats.serialize_stats(),
"stats_total": stats.total.serialize(),
"errors": stats.serialize_errors(),
"user_count": 1,
},
"fake_client",
)
)
server.mocked_send(
Message(
"stats",
{
"stats": stats2.serialize_stats(),
"stats_total": stats2.total.serialize(),
"errors": stats2.serialize_errors(),
"user_count": 2,
},
"fake_client",
)
)
self.assertEqual(700, master.stats.total.median_response_time)
def test_master_total_stats_with_none_response_times(self):
with mock.patch("locust.rpc.rpc.Server", mocked_rpc()) as server:
master = self.get_runner()
server.mocked_send(Message("client_ready", __version__, "fake_client"))
stats = RequestStats()
stats.log_request("GET", "/1", 100, 3546)
stats.log_request("GET", "/1", 800, 56743)
stats.log_request("GET", "/1", None, 56743)
stats2 = RequestStats()
stats2.log_request("GET", "/2", 700, 2201)
stats2.log_request("GET", "/2", None, 2201)
stats3 = RequestStats()
stats3.log_request("GET", "/3", None, 2201)
server.mocked_send(
Message(
"stats",
{
"stats": stats.serialize_stats(),
"stats_total": stats.total.serialize(),
"errors": stats.serialize_errors(),
"user_count": 1,
},
"fake_client",
)
)
server.mocked_send(
Message(
"stats",
{
"stats": stats2.serialize_stats(),
"stats_total": stats2.total.serialize(),
"errors": stats2.serialize_errors(),
"user_count": 2,
},
"fake_client",
)
)
server.mocked_send(
Message(
"stats",
{
"stats": stats3.serialize_stats(),
"stats_total": stats3.total.serialize(),
"errors": stats3.serialize_errors(),
"user_count": 2,
},
"fake_client",
)
)
self.assertEqual(700, master.stats.total.median_response_time)
def test_master_current_response_times(self):
start_time = 1
with mock.patch("time.time") as mocked_time:
mocked_time.return_value = start_time
with mock.patch("locust.rpc.rpc.Server", mocked_rpc()) as server:
master = self.get_runner()
self.environment.stats.reset_all()
mocked_time.return_value += 1.0234
server.mocked_send(Message("client_ready", __version__, "fake_client"))
stats = RequestStats()
stats.log_request("GET", "/1", 100, 3546)
stats.log_request("GET", "/1", 800, 56743)
server.mocked_send(
Message(
"stats",
{
"stats": stats.serialize_stats(),
"stats_total": stats.total.get_stripped_report(),
"errors": stats.serialize_errors(),
"user_count": 1,
},
"fake_client",
)
)
mocked_time.return_value += 1
stats2 = RequestStats()
stats2.log_request("GET", "/2", 400, 2201)
server.mocked_send(
Message(
"stats",
{
"stats": stats2.serialize_stats(),
"stats_total": stats2.total.get_stripped_report(),
"errors": stats2.serialize_errors(),
"user_count": 2,
},
"fake_client",
)
)
mocked_time.return_value += 4
self.assertEqual(400, master.stats.total.get_current_response_time_percentile(0.5))
self.assertEqual(800, master.stats.total.get_current_response_time_percentile(0.95))
# let 10 second pass, do some more requests, send it to the master and make
# sure the current response time percentiles only accounts for these new requests
mocked_time.return_value += 10.10023
stats.log_request("GET", "/1", 20, 1)
stats.log_request("GET", "/1", 30, 1)
stats.log_request("GET", "/1", 3000, 1)
server.mocked_send(
Message(
"stats",
{
"stats": stats.serialize_stats(),
"stats_total": stats.total.get_stripped_report(),
"errors": stats.serialize_errors(),
"user_count": 2,
},
"fake_client",
)
)
self.assertEqual(30, master.stats.total.get_current_response_time_percentile(0.5))
self.assertEqual(3000, master.stats.total.get_current_response_time_percentile(0.95))
@mock.patch("locust.runners.HEARTBEAT_INTERVAL", new=600)
def test_rebalance_locust_users_on_worker_connect(self):
class TestUser(User):
@task
def my_task(self):
pass
with mock.patch("locust.rpc.rpc.Server", mocked_rpc()) as server:
master = self.get_runner(user_classes=[TestUser])
server.mocked_send(Message("client_ready", __version__, "zeh_fake_client1"))
self.assertEqual(1, len(master.clients))
self.assertTrue(
"zeh_fake_client1" in master.clients, "Could not find fake client in master instance's clients dict"
)
master.start(100, 20)
self.assertEqual(5, len(server.outbox))
for i, (_, msg) in enumerate(server.outbox.copy()):
self.assertDictEqual({"TestUser": int((i + 1) * 20)}, msg.data["user_classes_count"])
server.outbox.pop()
# Normally, this attribute would be updated when the
# master receives the report from the worker.
master.clients["zeh_fake_client1"].user_classes_count = {"TestUser": 100}
# let another worker connect
server.mocked_send(Message("client_ready", __version__, "zeh_fake_client2"))
self.assertEqual(2, len(master.clients))
sleep(0.1) # give time for messages to be sent to clients
self.assertEqual(2, len(server.outbox))
client_id, msg = server.outbox.pop()
self.assertEqual({"TestUser": 50}, msg.data["user_classes_count"])
client_id, msg = server.outbox.pop()
self.assertEqual({"TestUser": 50}, msg.data["user_classes_count"])
def test_sends_spawn_data_to_ready_running_spawning_workers(self):
"""Sends spawn job to running, ready, or spawning workers"""
class TestUser(User):
@task
def my_task(self):
pass
with mock.patch("locust.rpc.rpc.Server", mocked_rpc()) as server:
master = self.get_runner(user_classes=[TestUser])
master.clients[1] = WorkerNode("1")
master.clients[2] = WorkerNode("2")
master.clients[3] = WorkerNode("3")
master.clients[1].state = STATE_INIT
master.clients[2].state = STATE_SPAWNING
master.clients[3].state = STATE_RUNNING
master.start(user_count=5, spawn_rate=5)
self.assertEqual(3, len(server.outbox))
def test_start_event(self):
"""
Tests that test_start event is fired
"""
class TestUser(User):
@task
def my_task(self):
pass
with mock.patch("locust.rpc.rpc.Server", mocked_rpc()) as server:
master = self.get_runner(user_classes=[TestUser])
run_count = [0]
@self.environment.events.test_start.add_listener
def on_test_start(*a, **kw):
run_count[0] += 1
for i in range(5):
server.mocked_send(Message("client_ready", __version__, "fake_client%i" % i))
master.start(7, 7)
self.assertEqual(5, len(server.outbox))
self.assertEqual(1, run_count[0])
# change number of users and check that test_start isn't fired again
master.start(7, 7)
self.assertEqual(1, run_count[0])
# stop and start to make sure test_start is fired again
master.stop()
master.start(3, 3)
self.assertEqual(2, run_count[0])
master.quit()
def test_stop_event(self):
"""
Tests that test_stop event is fired
"""
class TestUser(User):
@task
def my_task(self):
pass
with mock.patch("locust.rpc.rpc.Server", mocked_rpc()) as server:
master = self.get_runner(user_classes=[TestUser])
run_count = [0]
@self.environment.events.test_stop.add_listener
def on_test_stop(*a, **kw):
run_count[0] += 1
for i in range(5):
server.mocked_send(Message("client_ready", __version__, "fake_client%i" % i))
master.start(7, 7)
self.assertEqual(5, len(server.outbox))
master.stop()
self.assertEqual(1, run_count[0])
run_count[0] = 0
for i in range(5):
server.mocked_send(Message("client_ready", __version__, "fake_client%i" % i))
master.start(7, 7)
master.stop()
master.quit()
self.assertEqual(1, run_count[0])
def test_stop_event_quit(self):
"""
Tests that test_stop event is fired when quit() is called directly
"""
class TestUser(User):
@task
def my_task(self):
pass
with mock.patch("locust.rpc.rpc.Server", mocked_rpc()) as server:
master = self.get_runner(user_classes=[TestUser])
run_count = [0]
@self.environment.events.test_stop.add_listener
def on_test_stop(*a, **kw):
run_count[0] += 1
for i in range(5):
server.mocked_send(Message("client_ready", __version__, "fake_client%i" % i))
master.start(7, 7)
self.assertEqual(5, len(server.outbox))
master.quit()
self.assertEqual(1, run_count[0])
def test_spawn_zero_locusts(self):
class MyTaskSet(TaskSet):
@task
def my_task(self):
pass
class MyTestUser(User):
tasks = [MyTaskSet]
wait_time = constant(0.1)
environment = Environment(user_classes=[MyTestUser])
runner = LocalRunner(environment)
timeout = gevent.Timeout(2.0)
timeout.start()
try:
runner.start(0, 1, wait=True)
runner.spawning_greenlet.join()
except gevent.Timeout:
self.fail("Got Timeout exception. A locust seems to have been spawned, even though 0 was specified.")
finally:
timeout.cancel()
def test_spawn_uneven_locusts(self):
"""
Tests that we can accurately spawn a certain number of locusts, even if it's not an
even number of the connected workers
"""
class TestUser(User):
@task
def my_task(self):
pass
with mock.patch("locust.rpc.rpc.Server", mocked_rpc()) as server:
master = self.get_runner(user_classes=[TestUser])
for i in range(5):
server.mocked_send(Message("client_ready", __version__, "fake_client%i" % i))
master.start(7, 7)
self.assertEqual(5, len(server.outbox))
num_users = sum(sum(msg.data["user_classes_count"].values()) for _, msg in server.outbox if msg.data)
self.assertEqual(7, num_users, "Total number of locusts that would have been spawned is not 7")
def test_spawn_fewer_locusts_than_workers(self):
class TestUser(User):
@task
def my_task(self):
pass
with mock.patch("locust.rpc.rpc.Server", mocked_rpc()) as server:
master = self.get_runner(user_classes=[TestUser])
for i in range(5):
server.mocked_send(Message("client_ready", __version__, "fake_client%i" % i))
master.start(2, 2)
self.assertEqual(5, len(server.outbox))
num_users = sum(sum(msg.data["user_classes_count"].values()) for _, msg in server.outbox if msg.data)
self.assertEqual(2, num_users, "Total number of locusts that would have been spawned is not 2")
def test_custom_shape_scale_up(self):
class MyUser(User):
@task
def my_task(self):
pass
class TestShape(LoadTestShape):
def tick(self):
run_time = self.get_run_time()
if run_time < 2:
return 1, 1
elif run_time < 4:
return 2, 2
else:
return None
self.environment.shape_class = TestShape()
with mock.patch("locust.rpc.rpc.Server", mocked_rpc()) as server:
master = self.get_runner(user_classes=[MyUser])
for i in range(5):
server.mocked_send(Message("client_ready", __version__, "fake_client%i" % i))
# Start the shape_worker
self.environment.shape_class.reset_time()
master.start_shape()
sleep(0.5)
# Wait for shape_worker to update user_count
num_users = sum(sum(msg.data["user_classes_count"].values()) for _, msg in server.outbox if msg.data)
self.assertEqual(
1, num_users, "Total number of users in first stage of shape test is not 1: %i" % num_users
)
# Wait for shape_worker to update user_count again
sleep(2)
num_users = sum(sum(msg.data["user_classes_count"].values()) for _, msg in server.outbox if msg.data)
self.assertEqual(
3, num_users, "Total number of users in second stage of shape test is not 3: %i" % num_users
)
# Wait to ensure shape_worker has stopped the test
sleep(3)
self.assertEqual("stopped", master.state, "The test has not been stopped by the shape class")
def test_custom_shape_scale_down(self):
class MyUser(User):
@task
def my_task(self):
pass
class TestShape(LoadTestShape):
def tick(self):
run_time = self.get_run_time()
if run_time < 2:
return 5, 5
elif run_time < 4:
return 1, 5
else:
return None
self.environment.shape_class = TestShape()
with mock.patch("locust.rpc.rpc.Server", mocked_rpc()) as server:
master = self.get_runner(user_classes=[MyUser])
for i in range(5):
server.mocked_send(Message("client_ready", __version__, "fake_client%i" % i))
# Start the shape_worker
self.environment.shape_class.reset_time()
master.start_shape()
sleep(0.5)
# Wait for shape_worker to update user_count
num_users = sum(sum(msg.data["user_classes_count"].values()) for _, msg in server.outbox if msg.data)
self.assertEqual(
5, num_users, "Total number of users in first stage of shape test is not 5: %i" % num_users
)
# Wait for shape_worker to update user_count again
sleep(2)
msgs = defaultdict(dict)
for _, msg in server.outbox:
if not msg.data:
continue
msgs[msg.node_id][msg.data["timestamp"]] = sum(msg.data["user_classes_count"].values())
# Count users for the last received messages
num_users = sum(v[max(v.keys())] for v in msgs.values())
self.assertEqual(
1, num_users, "Total number of users in second stage of shape test is not 1: %i" % num_users
)
# Wait to ensure shape_worker has stopped the test
sleep(3)
self.assertEqual("stopped", master.state, "The test has not been stopped by the shape class")
def test_exception_in_task(self):
class MyUser(User):
@task
def will_error(self):
raise HeyAnException(":(")
self.environment.user_classes = [MyUser]
runner = self.environment.create_local_runner()
l = MyUser(self.environment)
self.assertRaises(HeyAnException, l.run)
self.assertRaises(HeyAnException, l.run)
self.assertEqual(1, len(runner.exceptions))
hash_key, exception = runner.exceptions.popitem()
self.assertTrue("traceback" in exception)
self.assertTrue("HeyAnException" in exception["traceback"])
self.assertEqual(2, exception["count"])
def test_exception_is_caught(self):
"""Test that exceptions are stored, and execution continues"""
class MyTaskSet(TaskSet):
def __init__(self, *a, **kw):
super().__init__(*a, **kw)
self._task_queue = [self.will_error, self.will_stop]
@task(1)
def will_error(self):
raise HeyAnException(":(")
@task(1)
def will_stop(self):
raise StopUser()
class MyUser(User):
wait_time = constant(0.01)
tasks = [MyTaskSet]
# set config to catch exceptions in locust users
self.environment.catch_exceptions = True
self.environment.user_classes = [MyUser]
runner = LocalRunner(self.environment)
l = MyUser(self.environment)
# make sure HeyAnException isn't raised
l.run()
l.run()
# make sure we got two entries in the error log
self.assertEqual(2, len(self.mocked_log.error))
# make sure exception was stored
self.assertEqual(1, len(runner.exceptions))
hash_key, exception = runner.exceptions.popitem()
self.assertTrue("traceback" in exception)
self.assertTrue("HeyAnException" in exception["traceback"])
self.assertEqual(2, exception["count"])
def test_master_reset_connection(self):
"""Test that connection will be reset when network issues found"""
with mock.patch("locust.runners.FALLBACK_INTERVAL", new=0.1):
with mock.patch("locust.rpc.rpc.Server", mocked_rpc()) as server:
master = self.get_runner()
self.assertEqual(0, len(master.clients))
server.mocked_send(Message("client_ready", NETWORK_BROKEN, "fake_client"))
self.assertTrue(master.connection_broken)
server.mocked_send(Message("client_ready", __version__, "fake_client"))
sleep(0.2)
self.assertFalse(master.connection_broken)
self.assertEqual(1, len(master.clients))
master.quit()
def test_attributes_populated_when_calling_start(self):
class MyUser1(User):
@task
def my_task(self):
pass
class MyUser2(User):
@task
def my_task(self):
pass
with mock.patch("locust.rpc.rpc.Server", mocked_rpc()) as server:
master = self.get_runner(user_classes=[MyUser1, MyUser2])
server.mocked_send(Message("client_ready", __version__, "fake_client1"))
master.start(7, 7)
self.assertEqual({"MyUser1": 3, "MyUser2": 4}, master.target_user_classes_count)
self.assertEqual(7, master.target_user_count)
self.assertEqual(7, master.spawn_rate)
master.start(10, 10)
self.assertEqual({"MyUser1": 5, "MyUser2": 5}, master.target_user_classes_count)
self.assertEqual(10, master.target_user_count)
self.assertEqual(10, master.spawn_rate)
master.start(1, 3)
self.assertEqual({"MyUser1": 1, "MyUser2": 0}, master.target_user_classes_count)
self.assertEqual(1, master.target_user_count)
self.assertEqual(3, master.spawn_rate)
def test_custom_message_send(self):
class MyUser(User):
wait_time = constant(1)
@task
def my_task(self):
pass
with mock.patch("locust.rpc.rpc.Server", mocked_rpc()) as server:
master = self.get_runner()
for i in range(5):
master.clients[i] = WorkerNode(str(i))
master.send_message("test_custom_msg", {"test_data": 123})
self.assertEqual(5, len(server.outbox))
for _, msg in server.outbox:
self.assertEqual("test_custom_msg", msg.type)
self.assertEqual(123, msg.data["test_data"])
def test_custom_message_receive(self):
class MyUser(User):
wait_time = constant(1)
@task
def my_task(self):
pass
with mock.patch("locust.rpc.rpc.Server", mocked_rpc()) as server:
test_custom_msg = [False]
test_custom_msg_data = [{}]
def on_custom_msg(msg, **kw):
test_custom_msg[0] = True
test_custom_msg_data[0] = msg.data
master = self.get_runner()
master.register_message("test_custom_msg", on_custom_msg)
server.mocked_send(Message("test_custom_msg", {"test_data": 123}, "dummy_id"))
self.assertTrue(test_custom_msg[0])
self.assertEqual(123, test_custom_msg_data[0]["test_data"])
def test_undefined_custom_message_receive(self):
class MyUser(User):
wait_time = constant(1)
@task
def my_task(self):
pass
with mock.patch("locust.rpc.rpc.Server", mocked_rpc()) as server:
test_custom_msg = [False]
def on_custom_msg(msg, **kw):
test_custom_msg[0] = True
master = self.get_runner()
master.register_message("test_custom_msg", on_custom_msg)
server.mocked_send(Message("unregistered_custom_msg", {}, "dummy_id"))
self.assertFalse(test_custom_msg[0])
self.assertEqual(1, len(self.mocked_log.warning))
msg = self.mocked_log.warning[0]
self.assertIn("Unknown message type recieved from worker", msg)
def test_wait_for_workers_report_after_ramp_up(self):
def assert_cache_hits():
self.assertEqual(master._wait_for_workers_report_after_ramp_up.cache_info().hits, 0)
master._wait_for_workers_report_after_ramp_up()
self.assertEqual(master._wait_for_workers_report_after_ramp_up.cache_info().hits, 1)
master = self.get_runner()
master._wait_for_workers_report_after_ramp_up.cache_clear()
self.assertEqual(master._wait_for_workers_report_after_ramp_up(), 0.1)
assert_cache_hits()
master._wait_for_workers_report_after_ramp_up.cache_clear()
with _patch_env("LOCUST_WAIT_FOR_WORKERS_REPORT_AFTER_RAMP_UP", "5.7"):
self.assertEqual(master._wait_for_workers_report_after_ramp_up(), 5.7)
assert_cache_hits()
master._wait_for_workers_report_after_ramp_up.cache_clear()
with mock.patch("locust.runners.WORKER_REPORT_INTERVAL", new=1.5), _patch_env(
"LOCUST_WAIT_FOR_WORKERS_REPORT_AFTER_RAMP_UP", "5.7 * WORKER_REPORT_INTERVAL"
):
self.assertEqual(master._wait_for_workers_report_after_ramp_up(), 5.7 * 1.5)
assert_cache_hits()
master._wait_for_workers_report_after_ramp_up.cache_clear()
@contextmanager
def _patch_env(name: str, value: str):
prev_value = os.getenv(name)
os.environ[name] = value
try:
yield
finally:
if prev_value is None:
del os.environ[name]
else:
os.environ[name] = prev_value
class TestWorkerRunner(LocustTestCase):
def setUp(self):
super().setUp()
# self._report_to_master_event_handlers = [h for h in events.report_to_master._handlers]
def tearDown(self):
# events.report_to_master._handlers = self._report_to_master_event_handlers
super().tearDown()
def get_runner(self, environment=None, user_classes=None):
if environment is None:
environment = self.environment
user_classes = user_classes or []
environment.user_classes = user_classes
return WorkerRunner(environment, master_host="localhost", master_port=5557)
def test_worker_stop_timeout(self):
class MyTestUser(User):
_test_state = 0
@task
def the_task(self):
MyTestUser._test_state = 1
gevent.sleep(0.2)
MyTestUser._test_state = 2
with mock.patch("locust.rpc.rpc.Client", mocked_rpc()) as client:
environment = Environment()
test_start_run = [False]
@environment.events.test_start.add_listener
def on_test_start(_environment, **kw):
test_start_run[0] = True
worker = self.get_runner(environment=environment, user_classes=[MyTestUser])
self.assertEqual(1, len(client.outbox))
self.assertEqual("client_ready", client.outbox[0].type)
client.mocked_send(
Message(
"spawn",
{
"timestamp": 1605538584,
"user_classes_count": {"MyTestUser": 1},
"host": "",
"stop_timeout": 1,
},
"dummy_client_id",
)
)
# wait for worker to spawn locusts
self.assertIn("spawning", [m.type for m in client.outbox])
worker.spawning_greenlet.join()
self.assertEqual(1, len(worker.user_greenlets))
# check that locust has started running
gevent.sleep(0.01)
self.assertEqual(1, MyTestUser._test_state)
# send stop message
client.mocked_send(Message("stop", None, "dummy_client_id"))
worker.user_greenlets.join()
# check that locust user got to finish
self.assertEqual(2, MyTestUser._test_state)
# make sure the test_start was never fired on the worker
self.assertFalse(test_start_run[0])
def test_worker_without_stop_timeout(self):
class MyTestUser(User):
_test_state = 0
@task
def the_task(self):
MyTestUser._test_state = 1
gevent.sleep(0.2)
MyTestUser._test_state = 2
with mock.patch("locust.rpc.rpc.Client", mocked_rpc()) as client:
environment = Environment(stop_timeout=None)
worker = self.get_runner(environment=environment, user_classes=[MyTestUser])
self.assertEqual(1, len(client.outbox))
self.assertEqual("client_ready", client.outbox[0].type)
client.mocked_send(
Message(
"spawn",
{
"timestamp": 1605538584,
"user_classes_count": {"MyTestUser": 1},
"host": "",
"stop_timeout": None,
},
"dummy_client_id",
)
)
# print("outbox:", client.outbox)
# wait for worker to spawn locusts
self.assertIn("spawning", [m.type for m in client.outbox])
worker.spawning_greenlet.join()
self.assertEqual(1, len(worker.user_greenlets))
# check that locust has started running
gevent.sleep(0.01)
self.assertEqual(1, MyTestUser._test_state)
# send stop message
client.mocked_send(Message("stop", None, "dummy_client_id"))
worker.user_greenlets.join()
# check that locust user did not get to finish
self.assertEqual(1, MyTestUser._test_state)
def test_spawn_message_with_older_timestamp_is_rejected(self):
class MyUser(User):
wait_time = constant(1)
def start(self, group: Group):
# We do this so that the spawning does not finish
# too quickly
gevent.sleep(0.1)
return super().start(group)
@task
def my_task(self):
pass
with mock.patch("locust.rpc.rpc.Client", mocked_rpc()) as client:
environment = Environment()
worker = self.get_runner(environment=environment, user_classes=[MyUser])
client.mocked_send(
Message(
"spawn",
{
"timestamp": 1605538584,
"user_classes_count": {"MyUser": 10},
"host": "",
"stop_timeout": None,
},
"dummy_client_id",
)
)
sleep(0.6)
self.assertEqual(STATE_SPAWNING, worker.state)
worker.spawning_greenlet.join()
self.assertEqual(10, worker.user_count)
# Send same timestamp as the first message
client.mocked_send(
Message(
"spawn",
{
"timestamp": 1605538584,
"user_classes_count": {"MyUser": 9},
"host": "",
"stop_timeout": None,
},
"dummy_client_id",
)
)
worker.spawning_greenlet.join()
# Still 10 users
self.assertEqual(10, worker.user_count)
# Send older timestamp than the first message
client.mocked_send(
Message(
"spawn",
{
"timestamp": 1605538583,
"user_classes_count": {"MyUser": 2},
"host": "",
"stop_timeout": None,
},
"dummy_client_id",
)
)
worker.spawning_greenlet.join()
# Still 10 users
self.assertEqual(10, worker.user_count)
# Send newer timestamp than the first message
client.mocked_send(
Message(
"spawn",
{
"timestamp": 1605538585,
"user_classes_count": {"MyUser": 2},
"host": "",
"stop_timeout": None,
},
"dummy_client_id",
)
)
worker.spawning_greenlet.join()
self.assertEqual(2, worker.user_count)
worker.quit()
def test_worker_messages_sent_to_master(self):
"""
Ensure that worker includes both "user_count" and "user_classes_count"
when reporting to the master.
"""
class MyUser(User):
wait_time = constant(1)
def start(self, group: Group):
# We do this so that the spawning does not finish
# too quickly
gevent.sleep(0.1)
return super().start(group)
@task
def my_task(self):
pass
with mock.patch("locust.rpc.rpc.Client", mocked_rpc()) as client:
environment = Environment()
worker = self.get_runner(environment=environment, user_classes=[MyUser])
client.mocked_send(
Message(
"spawn",
{
"timestamp": 1605538584,
"user_classes_count": {"MyUser": 10},
"host": "",
"stop_timeout": None,
},
"dummy_client_id",
)
)
sleep(0.6)
self.assertEqual(STATE_SPAWNING, worker.state)
worker.spawning_greenlet.join()
self.assertEqual(10, worker.user_count)
sleep(2)
message = next((m for m in reversed(client.outbox) if m.type == "stats"), None)
self.assertIsNotNone(message)
self.assertIn("user_count", message.data)
self.assertIn("user_classes_count", message.data)
self.assertEqual(message.data["user_count"], 10)
self.assertEqual(message.data["user_classes_count"]["MyUser"], 10)
message = next((m for m in client.outbox if m.type == "spawning_complete"), None)
self.assertIsNotNone(message)
self.assertIn("user_count", message.data)
self.assertIn("user_classes_count", message.data)
self.assertEqual(message.data["user_count"], 10)
self.assertEqual(message.data["user_classes_count"]["MyUser"], 10)
worker.quit()
def test_change_user_count_during_spawning(self):
class MyUser(User):
wait_time = constant(1)
def start(self, group: Group):
# We do this so that the spawning does not finish
# too quickly
gevent.sleep(0.1)
return super().start(group)
@task
def my_task(self):
pass
with mock.patch("locust.rpc.rpc.Client", mocked_rpc()) as client:
environment = Environment()
worker = self.get_runner(environment=environment, user_classes=[MyUser])
client.mocked_send(
Message(
"spawn",
{
"timestamp": 1605538584,
"user_classes_count": {"MyUser": 10},
"host": "",
"stop_timeout": None,
},
"dummy_client_id",
)
)
sleep(0.6)
self.assertEqual(STATE_SPAWNING, worker.state)
client.mocked_send(
Message(
"spawn",
{
"timestamp": 1605538585,
"user_classes_count": {"MyUser": 9},
"host": "",
"stop_timeout": None,
},
"dummy_client_id",
)
)
sleep(0)
worker.spawning_greenlet.join()
self.assertEqual(9, len(worker.user_greenlets))
worker.quit()
def test_computed_properties(self):
class MyUser1(User):
wait_time = constant(1)
@task
def my_task(self):
pass
class MyUser2(User):
wait_time = constant(1)
@task
def my_task(self):
pass
with mock.patch("locust.rpc.rpc.Client", mocked_rpc()) as client:
environment = Environment()
worker = self.get_runner(environment=environment, user_classes=[MyUser1, MyUser2])
client.mocked_send(
Message(
"spawn",
{
"timestamp": 1605538584,
"user_classes_count": {"MyUser1": 10, "MyUser2": 10},
"host": "",
"stop_timeout": None,
},
"dummy_client_id",
)
)
worker.spawning_greenlet.join()
self.assertDictEqual(worker.user_classes_count, {"MyUser1": 10, "MyUser2": 10})
self.assertDictEqual(worker.target_user_classes_count, {"MyUser1": 10, "MyUser2": 10})
self.assertEqual(worker.target_user_count, 20)
client.mocked_send(
Message(
"spawn",
{
"timestamp": 1605538585,
"user_classes_count": {"MyUser1": 1, "MyUser2": 2},
"host": "",
"stop_timeout": None,
},
"dummy_client_id",
)
)
worker.spawning_greenlet.join()
self.assertDictEqual(worker.user_classes_count, {"MyUser1": 1, "MyUser2": 2})
self.assertDictEqual(worker.target_user_classes_count, {"MyUser1": 1, "MyUser2": 2})
self.assertEqual(worker.target_user_count, 3)
worker.quit()
def test_custom_message_send(self):
class MyUser(User):
wait_time = constant(1)
@task
def my_task(self):
pass
with mock.patch("locust.rpc.rpc.Client", mocked_rpc()) as client:
environment = Environment()
worker = self.get_runner(environment=environment, user_classes=[MyUser])
client.outbox.clear()
worker.send_message("test_custom_msg", {"test_data": 123})
self.assertEqual("test_custom_msg", client.outbox[0].type)
self.assertEqual(123, client.outbox[0].data["test_data"])
worker.quit()
def test_custom_message_receive(self):
class MyUser(User):
wait_time = constant(1)
@task
def my_task(self):
pass
with mock.patch("locust.rpc.rpc.Client", mocked_rpc()) as client:
environment = Environment()
test_custom_msg = [False]
test_custom_msg_data = [{}]
def on_custom_msg(msg, **kw):
test_custom_msg[0] = True
test_custom_msg_data[0] = msg.data
worker = self.get_runner(environment=environment, user_classes=[MyUser])
worker.register_message("test_custom_msg", on_custom_msg)
client.mocked_send(Message("test_custom_msg", {"test_data": 123}, "dummy_client_id"))
self.assertTrue(test_custom_msg[0])
self.assertEqual(123, test_custom_msg_data[0]["test_data"])
worker.quit()
def test_undefined_custom_message_receive(self):
class MyUser(User):
wait_time = constant(1)
@task
def my_task(self):
pass
with mock.patch("locust.rpc.rpc.Client", mocked_rpc()) as client:
environment = Environment()
test_custom_msg = [False]
def on_custom_msg(msg, **kw):
test_custom_msg[0] = True
worker = self.get_runner(environment=environment, user_classes=[MyUser])
worker.register_message("test_custom_msg", on_custom_msg)
client.mocked_send(Message("unregistered_custom_msg", {}, "dummy_id"))
self.assertFalse(test_custom_msg[0])
self.assertEqual(1, len(self.mocked_log.warning))
msg = self.mocked_log.warning[0]
self.assertIn("Unknown message type recieved", msg)
class TestMessageSerializing(unittest.TestCase):
def test_message_serialize(self):
msg = Message("client_ready", __version__, "my_id")
rebuilt = Message.unserialize(msg.serialize())
self.assertEqual(msg.type, rebuilt.type)
self.assertEqual(msg.data, rebuilt.data)
self.assertEqual(msg.node_id, rebuilt.node_id)
class TestStopTimeout(LocustTestCase):
def test_stop_timeout(self):
short_time = 0.05
class MyTaskSet(TaskSet):
@task
def my_task(self):
MyTaskSet.state = "first"
gevent.sleep(short_time)
MyTaskSet.state = "second" # should only run when run time + stop_timeout is > short_time
gevent.sleep(short_time)
MyTaskSet.state = "third" # should only run when run time + stop_timeout is > short_time * 2
class MyTestUser(User):
tasks = [MyTaskSet]
environment = Environment(user_classes=[MyTestUser])
runner = environment.create_local_runner()
runner.start(1, 1, wait=False)
gevent.sleep(short_time / 2)
runner.quit()
self.assertEqual("first", MyTaskSet.state)
# exit with timeout
environment = Environment(user_classes=[MyTestUser], stop_timeout=short_time / 2)
runner = environment.create_local_runner()
runner.start(1, 1, wait=False)
gevent.sleep(short_time)
runner.quit()
self.assertEqual("second", MyTaskSet.state)
# allow task iteration to complete, with some margin
environment = Environment(user_classes=[MyTestUser], stop_timeout=short_time * 3)
runner = environment.create_local_runner()
runner.start(1, 1, wait=False)
gevent.sleep(short_time)
timeout = gevent.Timeout(short_time * 2)
timeout.start()
try:
runner.quit()
runner.greenlet.join()
except gevent.Timeout:
self.fail("Got Timeout exception. Some locusts must have kept running after iteration finish")
finally:
timeout.cancel()
self.assertEqual("third", MyTaskSet.state)
def test_stop_timeout_during_on_start(self):
short_time = 0.05
class MyTaskSet(TaskSet):
finished_on_start = False
my_task_run = False
def on_start(self):
gevent.sleep(short_time)
MyTaskSet.finished_on_start = True
@task
def my_task(self):
MyTaskSet.my_task_run = True
class MyTestUser(User):
tasks = [MyTaskSet]
environment = create_environment([MyTestUser], mocked_options())
environment.stop_timeout = short_time
runner = environment.create_local_runner()
runner.start(1, 1)
gevent.sleep(short_time / 2)
runner.quit()
self.assertTrue(MyTaskSet.finished_on_start)
self.assertFalse(MyTaskSet.my_task_run)
def test_stop_timeout_exit_during_wait(self):
short_time = 0.05
class MyTaskSet(TaskSet):
@task
def my_task(self):
pass
class MyTestUser(User):
tasks = [MyTaskSet]
wait_time = constant(1)
environment = Environment(user_classes=[MyTestUser], stop_timeout=short_time)
runner = environment.create_local_runner()
runner.start(1, 1)
gevent.sleep(short_time) # sleep to make sure locust has had time to start waiting
timeout = gevent.Timeout(short_time)
timeout.start()
try:
runner.quit()
runner.greenlet.join()
except gevent.Timeout:
self.fail("Got Timeout exception. Waiting locusts should stop immediately, even when using stop_timeout.")
finally:
timeout.cancel()
def test_stop_timeout_with_interrupt(self):
short_time = 0.05
class MySubTaskSet(TaskSet):
@task
def a_task(self):
gevent.sleep(0)
self.interrupt(reschedule=True)
class MyTaskSet(TaskSet):
tasks = [MySubTaskSet]
class MyTestUser(User):
tasks = [MyTaskSet]
environment = create_environment([MyTestUser], mocked_options())
environment.stop_timeout = short_time
runner = environment.create_local_runner()
runner.start(1, 1, wait=True)
gevent.sleep(0)
timeout = gevent.Timeout(short_time)
timeout.start()
try:
runner.quit()
runner.greenlet.join()
except gevent.Timeout:
self.fail("Got Timeout exception. Interrupted locusts should exit immediately during stop_timeout.")
finally:
timeout.cancel()
def test_stop_timeout_with_interrupt_no_reschedule(self):
state = [0]
class MySubTaskSet(TaskSet):
@task
def a_task(self):
gevent.sleep(0.1)
state[0] = 1
self.interrupt(reschedule=False)
class MyTestUser(User):
tasks = [MySubTaskSet]
wait_time = constant(3)
environment = create_environment([MyTestUser], mocked_options())
environment.stop_timeout = 0.3
runner = environment.create_local_runner()
runner.start(1, 1, wait=True)
gevent.sleep(0)
timeout = gevent.Timeout(0.11)
timeout.start()
try:
runner.quit()
runner.greenlet.join()
except gevent.Timeout:
self.fail("Got Timeout exception. Interrupted locusts should exit immediately during stop_timeout.")
finally:
timeout.cancel()
self.assertEqual(1, state[0])
def test_kill_locusts_with_stop_timeout(self):
short_time = 0.05
class MyTaskSet(TaskSet):
@task
def my_task(self):
MyTaskSet.state = "first"
gevent.sleep(short_time)
MyTaskSet.state = "second" # should only run when run time + stop_timeout is > short_time
gevent.sleep(short_time)
MyTaskSet.state = "third" # should only run when run time + stop_timeout is > short_time * 2
class MyTestUser(User):
tasks = [MyTaskSet]
environment = create_environment([MyTestUser], mocked_options())
runner = environment.create_local_runner()
runner.start(1, 1)
gevent.sleep(short_time / 2)
runner.stop_users({MyTestUser.__name__: 1})
self.assertEqual("first", MyTaskSet.state)
runner.quit()
environment.runner = None
environment.stop_timeout = short_time / 2 # exit with timeout
runner = environment.create_local_runner()
runner.start(1, 1)
gevent.sleep(short_time)
runner.stop_users({MyTestUser.__name__: 1})
self.assertEqual("second", MyTaskSet.state)
runner.quit()
environment.runner = None
environment.stop_timeout = short_time * 3 # allow task iteration to complete, with some margin
runner = environment.create_local_runner()
runner.start(1, 1)
gevent.sleep(short_time)
timeout = gevent.Timeout(short_time * 2)
timeout.start()
try:
runner.stop_users({MyTestUser.__name__: 1})
runner.user_greenlets.join()
except gevent.Timeout:
self.fail("Got Timeout exception. Some locusts must have kept running after iteration finish")
finally:
timeout.cancel()
self.assertEqual("third", MyTaskSet.state)
def test_users_can_call_runner_quit_with_stop_timeout(self):
class BaseUser(User):
wait_time = constant(1)
@task
def trigger(self):
self.environment.runner.quit()
runner = Environment(user_classes=[BaseUser]).create_local_runner()
runner.environment.stop_timeout = 1
runner.spawn_users({BaseUser.__name__: 1}, wait=False)
timeout = gevent.Timeout(0.5)
timeout.start()
try:
runner.greenlet.join()
except gevent.Timeout:
self.fail("Got Timeout exception, runner must have hung somehow.")
finally:
timeout.cancel()
def test_gracefully_handle_exceptions_in_listener(self):
class MyUser(User):
wait_time = constant(1)
@task
def my_task(self):
pass
test_stop_run = [0]
environment = Environment(user_classes=[MyUser])
def on_test_stop_ok(*args, **kwargs):
test_stop_run[0] += 1
def on_test_stop_fail(*args, **kwargs):
assert False
environment.events.test_stop.add_listener(on_test_stop_ok)
environment.events.test_stop.add_listener(on_test_stop_fail)
environment.events.test_stop.add_listener(on_test_stop_ok)
runner = LocalRunner(environment)
runner.start(user_count=3, spawn_rate=3, wait=False)
self.assertEqual(0, test_stop_run[0])
runner.stop()
self.assertEqual(2, test_stop_run[0])
def test_stop_timeout_with_ramp_down(self):
"""
The spawn rate does not have an effect on the rate at which the users are stopped.
It is expected that the excess users will be stopped as soon as possible in parallel
(while respecting the stop_timeout).
"""
class MyTaskSet(TaskSet):
@task
def my_task(self):
gevent.sleep(1)
class MyTestUser(User):
tasks = [MyTaskSet]
environment = Environment(user_classes=[MyTestUser], stop_timeout=2)
runner = environment.create_local_runner()
# Start load test, wait for users to start, then trigger ramp down
ts = time.time()
runner.start(10, 10, wait=False)
runner.spawning_greenlet.join()
delta = time.time() - ts
self.assertTrue(
0 <= delta <= 0.05, "Expected user count to increase to 10 instantaneously, instead it took %f" % delta
)
self.assertTrue(
runner.user_count == 10, "User count has not decreased correctly to 2, it is : %i" % runner.user_count
)
ts = time.time()
runner.start(2, 4, wait=False)
runner.spawning_greenlet.join()
delta = time.time() - ts
self.assertTrue(1 <= delta <= 1.05, "Expected user count to decrease to 2 in 1s, instead it took %f" % delta)
self.assertTrue(
runner.user_count == 2, "User count has not decreased correctly to 2, it is : %i" % runner.user_count
)
| mit |
dnlm92/chokoretto | temp/venv/lib/python2.7/site-packages/pip/_vendor/requests/packages/chardet/codingstatemachine.py | 2931 | 2318 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .constants import eStart
from .compat import wrap_ord
class CodingStateMachine:
def __init__(self, sm):
self._mModel = sm
self._mCurrentBytePos = 0
self._mCurrentCharLen = 0
self.reset()
def reset(self):
self._mCurrentState = eStart
def next_state(self, c):
# for each byte we get its class
# if it is first byte, we also get byte length
# PY3K: aBuf is a byte stream, so c is an int, not a byte
byteCls = self._mModel['classTable'][wrap_ord(c)]
if self._mCurrentState == eStart:
self._mCurrentBytePos = 0
self._mCurrentCharLen = self._mModel['charLenTable'][byteCls]
# from byte's class and stateTable, we get its next state
curr_state = (self._mCurrentState * self._mModel['classFactor']
+ byteCls)
self._mCurrentState = self._mModel['stateTable'][curr_state]
self._mCurrentBytePos += 1
return self._mCurrentState
def get_current_charlen(self):
return self._mCurrentCharLen
def get_coding_state_machine(self):
return self._mModel['name']
| mit |
anthgur/servo | etc/ci/chaos_monkey_test.py | 140 | 1676 | # Copyright 2013 The Servo Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
from __future__ import absolute_import, print_function
import json
import sys
from subprocess import Popen, PIPE
TEST_CMD = [
"./mach",
"test-wpt",
"--release",
"--processes=24",
"--binary-arg=--random-pipeline-closure-probability=0.1",
"--binary-arg=--random-pipeline-closure-seed=123",
"--binary-arg=--multiprocess",
"--binary-arg=--soft-fail",
"--log-raw=-",
# We run the content-security-policy test because it creates
# cross-origin iframes, which are good for stress-testing pipelines
"content-security-policy"
]
# Note that there will probably be test failures caused
# by random pipeline closure, so we ignore the status code
# returned by the test command (which is why we can't use check_output).
test_results = Popen(TEST_CMD, stdout=PIPE)
any_crashes = False
for line in test_results.stdout:
report = json.loads(line.decode('utf-8'))
if report.get("action") == "process_output":
print("{} - {}".format(report.get("thread"), report.get("data")))
status = report.get("status")
if status:
print("{} - {} - {}".format(report.get("thread"), status, report.get("test")))
if status == "CRASH":
any_crashes = True
if any_crashes:
sys.exit(1)
| mpl-2.0 |
stephane-martin/salt-debian-packaging | salt-2016.3.2/salt/states/boto_route53.py | 3 | 10304 | # -*- coding: utf-8 -*-
'''
Manage Route53 records
.. versionadded:: 2014.7.0
Create and delete Route53 records. Be aware that this interacts with Amazon's
services, and so may incur charges.
This module uses ``boto``, which can be installed via package, or pip.
This module accepts explicit route53 credentials but can also utilize
IAM roles assigned to the instance through Instance Profiles. Dynamic
credentials are then automatically obtained from AWS API and no further
configuration is necessary. More information available `here
<http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/iam-roles-for-amazon-ec2.html>`_.
If IAM roles are not used you need to specify them either in a pillar file or
in the minion's config file:
.. code-block:: yaml
route53.keyid: GKTADJGHEIQSXMKKRBJ08H
route53.key: askdjghsdfjkghWupUjasdflkdfklgjsdfjajkghs
It's also possible to specify ``key``, ``keyid`` and ``region`` via a profile, either
passed in as a dict, or as a string to pull from pillars or minion config:
.. code-block:: yaml
myprofile:
keyid: GKTADJGHEIQSXMKKRBJ08H
key: askdjghsdfjkghWupUjasdflkdfklgjsdfjajkghs
region: us-east-1
.. code-block:: yaml
mycnamerecord:
boto_route53.present:
- name: test.example.com.
- value: my-elb.us-east-1.elb.amazonaws.com.
- zone: example.com.
- ttl: 60
- record_type: CNAME
- region: us-east-1
- keyid: GKTADJGHEIQSXMKKRBJ08H
- key: askdjghsdfjkghWupUjasdflkdfklgjsdfjajkghs
# Using a profile from pillars
myarecord:
boto_route53.present:
- name: test.example.com.
- value: 1.1.1.1
- zone: example.com.
- ttl: 60
- record_type: A
- region: us-east-1
- profile: myprofile
# Passing in a profile
myarecord:
boto_route53.present:
- name: test.example.com.
- value: 1.1.1.1
- zone: example.com.
- ttl: 60
- record_type: A
- region: us-east-1
- profile:
keyid: GKTADJGHEIQSXMKKRBJ08H
key: askdjghsdfjkghWupUjasdflkdfklgjsdfjajkghs
'''
# Import Python Libs
from __future__ import absolute_import
# Import Salt Libs
from salt.utils import SaltInvocationError
def __virtual__():
'''
Only load if boto is available.
'''
return 'boto_route53' if 'boto_route53.get_record' in __salt__ else False
def present(
name,
value,
zone,
record_type,
ttl=None,
identifier=None,
region=None,
key=None,
keyid=None,
profile=None,
wait_for_sync=True,
split_dns=False,
private_zone=False):
'''
Ensure the Route53 record is present.
name
Name of the record.
value
Value of the record.
zone
The zone to create the record in.
record_type
The record type (A, NS, MX, TXT, etc.)
ttl
The time to live for the record.
identifier
The unique identifier to use for this record.
region
The region to connect to.
key
Secret key to be used.
keyid
Access key to be used.
profile
A dict with region, key and keyid, or a pillar key (string)
that contains a dict with region, key and keyid.
wait_for_sync
Wait for an INSYNC change status from Route53.
split_dns
Route53 supports a public and private DNS zone with the same
names.
private_zone
If using split_dns, specify if this is the private zone.
'''
ret = {'name': name, 'result': True, 'comment': '', 'changes': {}}
# If a list is passed in for value, change it to a comma-separated string
# So it will work with subsequent boto module calls and string functions
if isinstance(value, list):
value = ','.join(value)
try:
record = __salt__['boto_route53.get_record'](name, zone, record_type,
False, region, key, keyid,
profile, split_dns,
private_zone)
except SaltInvocationError as err:
ret['comment'] = 'Error: {0}'.format(err)
ret['result'] = False
return ret
if isinstance(record, dict) and not record:
if __opts__['test']:
ret['comment'] = 'Route53 record {0} set to be added.'.format(name)
ret['result'] = None
return ret
added = __salt__['boto_route53.add_record'](name, value, zone,
record_type, identifier,
ttl, region, key, keyid,
profile, wait_for_sync,
split_dns, private_zone)
if added:
ret['changes']['old'] = None
ret['changes']['new'] = {'name': name,
'value': value,
'record_type': record_type,
'ttl': ttl}
ret['comment'] = 'Added {0} Route53 record.'.format(name)
else:
ret['result'] = False
ret['comment'] = 'Failed to add {0} Route53 record.'.format(name)
return ret
elif record:
need_to_update = False
# Values can be a comma separated list and some values will end with a
# period (even if we set it without one). To easily check this we need
# to split and check with the period stripped from the input and what's
# in route53.
# TODO: figure out if this will cause us problems with some records.
_values = [x.rstrip('.') for x in value.split(',')]
_r_values = [x.rstrip('.') for x in record['value'].split(',')]
_values.sort()
_r_values.sort()
if _values != _r_values:
need_to_update = True
if identifier and identifier != record['identifier']:
need_to_update = True
if ttl and str(ttl) != str(record['ttl']):
need_to_update = True
if need_to_update:
if __opts__['test']:
msg = 'Route53 record {0} set to be updated.'.format(name)
ret['comment'] = msg
ret['result'] = None
return ret
updated = __salt__['boto_route53.update_record'](name, value, zone,
record_type,
identifier, ttl,
region, key,
keyid, profile,
wait_for_sync,
split_dns,
private_zone)
if updated:
ret['changes']['old'] = record
ret['changes']['new'] = {'name': name,
'value': value,
'record_type': record_type,
'ttl': ttl}
ret['comment'] = 'Updated {0} Route53 record.'.format(name)
else:
ret['result'] = False
msg = 'Failed to update {0} Route53 record.'.format(name)
ret['comment'] = msg
else:
ret['comment'] = '{0} exists.'.format(name)
return ret
def absent(
name,
zone,
record_type,
identifier=None,
region=None,
key=None,
keyid=None,
profile=None,
wait_for_sync=True,
split_dns=False,
private_zone=False):
'''
Ensure the Route53 record is deleted.
name
Name of the record.
zone
The zone to delete the record from.
record_type
The record type (A, NS, MX, TXT, etc.)
identifier
An identifier to match for deletion.
region
The region to connect to.
key
Secret key to be used.
keyid
Access key to be used.
profile
A dict with region, key and keyid, or a pillar key (string)
that contains a dict with region, key and keyid.
wait_for_sync
Wait for an INSYNC change status from Route53.
split_dns
Route53 supports a public and private DNS zone with the same
names.
private_zone
If using split_dns, specify if this is the private zone.
'''
ret = {'name': name, 'result': True, 'comment': '', 'changes': {}}
record = __salt__['boto_route53.get_record'](name, zone, record_type,
False, region, key, keyid,
profile, split_dns,
private_zone)
if record:
if __opts__['test']:
msg = 'Route53 record {0} set to be deleted.'.format(name)
ret['comment'] = msg
ret['result'] = None
return ret
deleted = __salt__['boto_route53.delete_record'](name, zone,
record_type,
identifier, False,
region, key, keyid,
profile,
wait_for_sync,
split_dns,
private_zone)
if deleted:
ret['changes']['old'] = record
ret['changes']['new'] = None
ret['comment'] = 'Deleted {0} Route53 record.'.format(name)
else:
ret['result'] = False
msg = 'Failed to delete {0} Route53 record.'.format(name)
ret['comment'] = msg
else:
ret['comment'] = '{0} does not exist.'.format(name)
return ret
| apache-2.0 |
andresguisado/andresguisado.github.io | node_modules/pygmentize-bundled/vendor/pygments/build-3.3/pygments/console.py | 365 | 1850 | # -*- coding: utf-8 -*-
"""
pygments.console
~~~~~~~~~~~~~~~~
Format colored console output.
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
esc = "\x1b["
codes = {}
codes[""] = ""
codes["reset"] = esc + "39;49;00m"
codes["bold"] = esc + "01m"
codes["faint"] = esc + "02m"
codes["standout"] = esc + "03m"
codes["underline"] = esc + "04m"
codes["blink"] = esc + "05m"
codes["overline"] = esc + "06m"
dark_colors = ["black", "darkred", "darkgreen", "brown", "darkblue",
"purple", "teal", "lightgray"]
light_colors = ["darkgray", "red", "green", "yellow", "blue",
"fuchsia", "turquoise", "white"]
x = 30
for d, l in zip(dark_colors, light_colors):
codes[d] = esc + "%im" % x
codes[l] = esc + "%i;01m" % x
x += 1
del d, l, x
codes["darkteal"] = codes["turquoise"]
codes["darkyellow"] = codes["brown"]
codes["fuscia"] = codes["fuchsia"]
codes["white"] = codes["bold"]
def reset_color():
return codes["reset"]
def colorize(color_key, text):
return codes[color_key] + text + codes["reset"]
def ansiformat(attr, text):
"""
Format ``text`` with a color and/or some attributes::
color normal color
*color* bold color
_color_ underlined color
+color+ blinking color
"""
result = []
if attr[:1] == attr[-1:] == '+':
result.append(codes['blink'])
attr = attr[1:-1]
if attr[:1] == attr[-1:] == '*':
result.append(codes['bold'])
attr = attr[1:-1]
if attr[:1] == attr[-1:] == '_':
result.append(codes['underline'])
attr = attr[1:-1]
result.append(codes[attr])
result.append(text)
result.append(codes['reset'])
return ''.join(result)
| mit |
maniteja123/scipy | scipy/sparse/csr.py | 25 | 15482 | """Compressed Sparse Row matrix format"""
from __future__ import division, print_function, absolute_import
__docformat__ = "restructuredtext en"
__all__ = ['csr_matrix', 'isspmatrix_csr']
import numpy as np
from scipy._lib.six import xrange
from .base import spmatrix
from ._sparsetools import csr_tocsc, csr_tobsr, csr_count_blocks, \
get_csr_submatrix, csr_sample_values
from .sputils import (upcast, isintlike, IndexMixin, issequence,
get_index_dtype, ismatrix)
from .compressed import _cs_matrix
class csr_matrix(_cs_matrix, IndexMixin):
"""
Compressed Sparse Row matrix
This can be instantiated in several ways:
csr_matrix(D)
with a dense matrix or rank-2 ndarray D
csr_matrix(S)
with another sparse matrix S (equivalent to S.tocsr())
csr_matrix((M, N), [dtype])
to construct an empty matrix with shape (M, N)
dtype is optional, defaulting to dtype='d'.
csr_matrix((data, (row_ind, col_ind)), [shape=(M, N)])
where ``data``, ``row_ind`` and ``col_ind`` satisfy the
relationship ``a[row_ind[k], col_ind[k]] = data[k]``.
csr_matrix((data, indices, indptr), [shape=(M, N)])
is the standard CSR representation where the column indices for
row i are stored in ``indices[indptr[i]:indptr[i+1]]`` and their
corresponding values are stored in ``data[indptr[i]:indptr[i+1]]``.
If the shape parameter is not supplied, the matrix dimensions
are inferred from the index arrays.
Attributes
----------
dtype : dtype
Data type of the matrix
shape : 2-tuple
Shape of the matrix
ndim : int
Number of dimensions (this is always 2)
nnz
Number of nonzero elements
data
CSR format data array of the matrix
indices
CSR format index array of the matrix
indptr
CSR format index pointer array of the matrix
has_sorted_indices
Whether indices are sorted
Notes
-----
Sparse matrices can be used in arithmetic operations: they support
addition, subtraction, multiplication, division, and matrix power.
Advantages of the CSR format
- efficient arithmetic operations CSR + CSR, CSR * CSR, etc.
- efficient row slicing
- fast matrix vector products
Disadvantages of the CSR format
- slow column slicing operations (consider CSC)
- changes to the sparsity structure are expensive (consider LIL or DOK)
Examples
--------
>>> import numpy as np
>>> from scipy.sparse import csr_matrix
>>> csr_matrix((3, 4), dtype=np.int8).toarray()
array([[0, 0, 0, 0],
[0, 0, 0, 0],
[0, 0, 0, 0]], dtype=int8)
>>> row = np.array([0, 0, 1, 2, 2, 2])
>>> col = np.array([0, 2, 2, 0, 1, 2])
>>> data = np.array([1, 2, 3, 4, 5, 6])
>>> csr_matrix((data, (row, col)), shape=(3, 3)).toarray()
array([[1, 0, 2],
[0, 0, 3],
[4, 5, 6]])
>>> indptr = np.array([0, 2, 3, 6])
>>> indices = np.array([0, 2, 2, 0, 1, 2])
>>> data = np.array([1, 2, 3, 4, 5, 6])
>>> csr_matrix((data, indices, indptr), shape=(3, 3)).toarray()
array([[1, 0, 2],
[0, 0, 3],
[4, 5, 6]])
As an example of how to construct a CSR matrix incrementally,
the following snippet builds a term-document matrix from texts:
>>> docs = [["hello", "world", "hello"], ["goodbye", "cruel", "world"]]
>>> indptr = [0]
>>> indices = []
>>> data = []
>>> vocabulary = {}
>>> for d in docs:
... for term in d:
... index = vocabulary.setdefault(term, len(vocabulary))
... indices.append(index)
... data.append(1)
... indptr.append(len(indices))
...
>>> csr_matrix((data, indices, indptr), dtype=int).toarray()
array([[2, 1, 0, 0],
[0, 1, 1, 1]])
"""
format = 'csr'
def transpose(self, axes=None, copy=False):
if axes is not None:
raise ValueError(("Sparse matrices do not support "
"an 'axes' parameter because swapping "
"dimensions is the only logical permutation."))
M, N = self.shape
from .csc import csc_matrix
return csc_matrix((self.data, self.indices,
self.indptr), shape=(N, M), copy=copy)
transpose.__doc__ = spmatrix.transpose.__doc__
def tolil(self, copy=False):
from .lil import lil_matrix
lil = lil_matrix(self.shape,dtype=self.dtype)
self.sum_duplicates()
ptr,ind,dat = self.indptr,self.indices,self.data
rows, data = lil.rows, lil.data
for n in xrange(self.shape[0]):
start = ptr[n]
end = ptr[n+1]
rows[n] = ind[start:end].tolist()
data[n] = dat[start:end].tolist()
return lil
tolil.__doc__ = spmatrix.tolil.__doc__
def tocsr(self, copy=False):
if copy:
return self.copy()
else:
return self
tocsr.__doc__ = spmatrix.tocsr.__doc__
def tocsc(self, copy=False):
idx_dtype = get_index_dtype((self.indptr, self.indices),
maxval=max(self.nnz, self.shape[0]))
indptr = np.empty(self.shape[1] + 1, dtype=idx_dtype)
indices = np.empty(self.nnz, dtype=idx_dtype)
data = np.empty(self.nnz, dtype=upcast(self.dtype))
csr_tocsc(self.shape[0], self.shape[1],
self.indptr.astype(idx_dtype),
self.indices.astype(idx_dtype),
self.data,
indptr,
indices,
data)
from .csc import csc_matrix
A = csc_matrix((data, indices, indptr), shape=self.shape)
A.has_sorted_indices = True
return A
tocsr.__doc__ = spmatrix.tocsr.__doc__
def tobsr(self, blocksize=None, copy=True):
from .bsr import bsr_matrix
if blocksize is None:
from .spfuncs import estimate_blocksize
return self.tobsr(blocksize=estimate_blocksize(self))
elif blocksize == (1,1):
arg1 = (self.data.reshape(-1,1,1),self.indices,self.indptr)
return bsr_matrix(arg1, shape=self.shape, copy=copy)
else:
R,C = blocksize
M,N = self.shape
if R < 1 or C < 1 or M % R != 0 or N % C != 0:
raise ValueError('invalid blocksize %s' % blocksize)
blks = csr_count_blocks(M,N,R,C,self.indptr,self.indices)
idx_dtype = get_index_dtype((self.indptr, self.indices),
maxval=max(N//C, blks))
indptr = np.empty(M//R+1, dtype=idx_dtype)
indices = np.empty(blks, dtype=idx_dtype)
data = np.zeros((blks,R,C), dtype=self.dtype)
csr_tobsr(M, N, R, C,
self.indptr.astype(idx_dtype),
self.indices.astype(idx_dtype),
self.data,
indptr, indices, data.ravel())
return bsr_matrix((data,indices,indptr), shape=self.shape)
tobsr.__doc__ = spmatrix.tobsr.__doc__
# these functions are used by the parent class (_cs_matrix)
# to remove redudancy between csc_matrix and csr_matrix
def _swap(self,x):
"""swap the members of x if this is a column-oriented matrix
"""
return (x[0],x[1])
def __getitem__(self, key):
def asindices(x):
try:
x = np.asarray(x)
# Check index contents, to avoid creating 64-bit arrays needlessly
idx_dtype = get_index_dtype((x,), check_contents=True)
if idx_dtype != x.dtype:
x = x.astype(idx_dtype)
except:
raise IndexError('invalid index')
else:
return x
def check_bounds(indices, N):
if indices.size == 0:
return (0, 0)
max_indx = indices.max()
if max_indx >= N:
raise IndexError('index (%d) out of range' % max_indx)
min_indx = indices.min()
if min_indx < -N:
raise IndexError('index (%d) out of range' % (N + min_indx))
return (min_indx,max_indx)
def extractor(indices,N):
"""Return a sparse matrix P so that P*self implements
slicing of the form self[[1,2,3],:]
"""
indices = asindices(indices)
(min_indx,max_indx) = check_bounds(indices,N)
if min_indx < 0:
indices = indices.copy()
indices[indices < 0] += N
indptr = np.arange(len(indices)+1, dtype=indices.dtype)
data = np.ones(len(indices), dtype=self.dtype)
shape = (len(indices),N)
return csr_matrix((data,indices,indptr), shape=shape)
row, col = self._unpack_index(key)
# First attempt to use original row optimized methods
# [1, ?]
if isintlike(row):
# [i, j]
if isintlike(col):
return self._get_single_element(row, col)
# [i, 1:2]
elif isinstance(col, slice):
return self._get_row_slice(row, col)
# [i, [1, 2]]
elif issequence(col):
P = extractor(col,self.shape[1]).T
return self[row, :] * P
elif isinstance(row, slice):
# [1:2,??]
if ((isintlike(col) and row.step in (1, None)) or
(isinstance(col, slice) and
col.step in (1, None) and
row.step in (1, None))):
# col is int or slice with step 1, row is slice with step 1.
return self._get_submatrix(row, col)
elif issequence(col):
# row is slice, col is sequence.
P = extractor(col,self.shape[1]).T # [1:2,[1,2]]
sliced = self
if row != slice(None, None, None):
sliced = sliced[row,:]
return sliced * P
elif issequence(row):
# [[1,2],??]
if isintlike(col) or isinstance(col,slice):
P = extractor(row, self.shape[0]) # [[1,2],j] or [[1,2],1:2]
extracted = P * self
if col == slice(None, None, None):
return extracted
else:
return extracted[:,col]
elif ismatrix(row) and issequence(col):
if len(row[0]) == 1 and isintlike(row[0][0]):
# [[[1],[2]], [1,2]], outer indexing
row = asindices(row)
P_row = extractor(row[:,0], self.shape[0])
P_col = extractor(col, self.shape[1]).T
return P_row * self * P_col
if not (issequence(col) and issequence(row)):
# Sample elementwise
row, col = self._index_to_arrays(row, col)
row = asindices(row)
col = asindices(col)
if row.shape != col.shape:
raise IndexError('number of row and column indices differ')
assert row.ndim <= 2
num_samples = np.size(row)
if num_samples == 0:
return csr_matrix(np.atleast_2d(row).shape, dtype=self.dtype)
check_bounds(row, self.shape[0])
check_bounds(col, self.shape[1])
val = np.empty(num_samples, dtype=self.dtype)
csr_sample_values(self.shape[0], self.shape[1],
self.indptr, self.indices, self.data,
num_samples, row.ravel(), col.ravel(), val)
if row.ndim == 1:
# row and col are 1d
return np.asmatrix(val)
return self.__class__(val.reshape(row.shape))
def getrow(self, i):
"""Returns a copy of row i of the matrix, as a (1 x n)
CSR matrix (row vector).
"""
return self._get_submatrix(i, slice(None))
def getcol(self, i):
"""Returns a copy of column i of the matrix, as a (m x 1)
CSR matrix (column vector).
"""
return self._get_submatrix(slice(None), i)
def _get_row_slice(self, i, cslice):
"""Returns a copy of row self[i, cslice]
"""
if i < 0:
i += self.shape[0]
if i < 0 or i >= self.shape[0]:
raise IndexError('index (%d) out of range' % i)
start, stop, stride = cslice.indices(self.shape[1])
if stride == 1:
# for stride == 1, _get_submatrix is ~30% faster than below
row_slice = self._get_submatrix(i, cslice)
else:
# other strides need new code
row_indices = self.indices[self.indptr[i]:self.indptr[i + 1]]
row_data = self.data[self.indptr[i]:self.indptr[i + 1]]
if stride > 0:
ind = (row_indices >= start) & (row_indices < stop)
elif stride < 0:
ind = (row_indices <= start) & (row_indices > stop)
if abs(stride) > 1:
ind = ind & ((row_indices - start) % stride == 0)
row_indices = (row_indices[ind] - start) // stride
row_data = row_data[ind]
row_indptr = np.array([0, len(row_indices)])
if stride < 0:
row_data = row_data[::-1]
row_indices = abs(row_indices[::-1])
shape = (1, int(np.ceil(float(stop - start) / stride)))
row_slice = csr_matrix((row_data, row_indices, row_indptr),
shape=shape)
return row_slice
def _get_submatrix(self, row_slice, col_slice):
"""Return a submatrix of this matrix (new matrix is created)."""
M,N = self.shape
def process_slice(sl, num):
if isinstance(sl, slice):
if sl.step not in (1, None):
raise ValueError('slicing with step != 1 not supported')
i0, i1 = sl.start, sl.stop
if i0 is None:
i0 = 0
elif i0 < 0:
i0 = num + i0
if i1 is None:
i1 = num
elif i1 < 0:
i1 = num + i1
return i0, i1
elif isintlike(sl):
if sl < 0:
sl += num
return sl, sl + 1
else:
raise TypeError('expected slice or scalar')
def check_bounds(i0, i1, num):
if not (0 <= i0 <= num) or not (0 <= i1 <= num) or not (i0 <= i1):
raise IndexError(
"index out of bounds: 0 <= %d <= %d, 0 <= %d <= %d,"
" %d <= %d" % (i0, num, i1, num, i0, i1))
i0, i1 = process_slice(row_slice, M)
j0, j1 = process_slice(col_slice, N)
check_bounds(i0, i1, M)
check_bounds(j0, j1, N)
indptr, indices, data = get_csr_submatrix(M, N,
self.indptr, self.indices, self.data,
int(i0), int(i1), int(j0), int(j1))
shape = (i1 - i0, j1 - j0)
return self.__class__((data,indices,indptr), shape=shape)
def isspmatrix_csr(x):
return isinstance(x, csr_matrix)
| bsd-3-clause |
telwertowski/Books-Mac-OS-X | Versions/Books_3.0b5/Amazon (DE).plugin/Contents/Resources/amazon.py | 126 | 16017 | """Python wrapper
for Amazon web APIs
This module allows you to access Amazon's web APIs,
to do things like search Amazon and get the results programmatically.
Described here:
http://www.amazon.com/webservices
You need a Amazon-provided license key to use these services.
Follow the link above to get one. These functions will look in
several places (in this order) for the license key:
- the "license_key" argument of each function
- the module-level LICENSE_KEY variable (call setLicense once to set it)
- an environment variable called AMAZON_LICENSE_KEY
- a file called ".amazonkey" in the current directory
- a file called "amazonkey.txt" in the current directory
- a file called ".amazonkey" in your home directory
- a file called "amazonkey.txt" in your home directory
- a file called ".amazonkey" in the same directory as amazon.py
- a file called "amazonkey.txt" in the same directory as amazon.py
Sample usage:
>>> import amazon
>>> amazon.setLicense('...') # must get your own key!
>>> pythonBooks = amazon.searchByKeyword('Python')
>>> pythonBooks[0].ProductName
u'Learning Python (Help for Programmers)'
>>> pythonBooks[0].URL
...
>>> pythonBooks[0].OurPrice
...
Other available functions:
- browseBestSellers
- searchByASIN
- searchByUPC
- searchByAuthor
- searchByArtist
- searchByActor
- searchByDirector
- searchByManufacturer
- searchByListMania
- searchSimilar
- searchByWishlist
Other usage notes:
- Most functions can take product_line as well, see source for possible values
- All functions can take type="lite" to get less detail in results
- All functions can take page=N to get second, third, fourth page of results
- All functions can take license_key="XYZ", instead of setting it globally
- All functions can take http_proxy="http://x/y/z" which overrides your system setting
"""
__author__ = "Mark Pilgrim (f8dy@diveintomark.org)"
__version__ = "0.64.1"
__cvsversion__ = "$Revision: 1.12 $"[11:-2]
__date__ = "$Date: 2004/07/02 13:24:09 $"[7:-2]
__copyright__ = "Copyright (c) 2002 Mark Pilgrim"
__license__ = "Python"
# Powersearch and return object type fix by Joseph Reagle <geek@goatee.net>
# Locale support by Michael Josephson <mike@josephson.org>
# Modification to _contentsOf to strip trailing whitespace when loading Amazon key
# from a file submitted by Patrick Phalen.
# Support for specifying locale and associates ID as search parameters and
# internationalisation fix for the SalesRank integer conversion by
# Christian Theune <ct@gocept.com>, gocept gmbh & co. kg
# Support for BlendedSearch contributed by Alex Choo
from xml.dom import minidom
import os, sys, getopt, cgi, urllib, string
try:
import timeoutsocket # http://www.timo-tasi.org/python/timeoutsocket.py
timeoutsocket.setDefaultSocketTimeout(10)
except ImportError:
pass
LICENSE_KEY = "1M21AJ49MF6Y0DJ4D1G2"
ASSOCIATE = "aetherialnu0a-20"
HTTP_PROXY = None
LOCALE = "us"
# don't touch the rest of these constants
class AmazonError(Exception): pass
class NoLicenseKey(Exception): pass
_amazonfile1 = ".amazonkey"
_amazonfile2 = "amazonkey.txt"
_licenseLocations = (
(lambda key: key, 'passed to the function in license_key variable'),
(lambda key: LICENSE_KEY, 'module-level LICENSE_KEY variable (call setLicense to set it)'),
(lambda key: os.environ.get('AMAZON_LICENSE_KEY', None), 'an environment variable called AMAZON_LICENSE_KEY'),
(lambda key: _contentsOf(os.getcwd(), _amazonfile1), '%s in the current directory' % _amazonfile1),
(lambda key: _contentsOf(os.getcwd(), _amazonfile2), '%s in the current directory' % _amazonfile2),
(lambda key: _contentsOf(os.environ.get('HOME', ''), _amazonfile1), '%s in your home directory' % _amazonfile1),
(lambda key: _contentsOf(os.environ.get('HOME', ''), _amazonfile2), '%s in your home directory' % _amazonfile2),
(lambda key: _contentsOf(_getScriptDir(), _amazonfile1), '%s in the amazon.py directory' % _amazonfile1),
(lambda key: _contentsOf(_getScriptDir(), _amazonfile2), '%s in the amazon.py directory' % _amazonfile2)
)
_supportedLocales = {
"us" : (None, "xml.amazon.com"),
"uk" : ("uk", "xml-eu.amazon.com"),
"de" : ("de", "xml-eu.amazon.com"),
"jp" : ("jp", "xml.amazon.co.jp"),
"ca" : ("ca", "xml.amazon.ca"),
"fr" : ("fr", "xml.amazon.fr")
}
## administrative functions
def version():
print """PyAmazon %(__version__)s
%(__copyright__)s
released %(__date__)s
""" % globals()
def setAssociate(associate):
global ASSOCIATE
ASSOCIATE=associate
def getAssociate(override=None):
return override or ASSOCIATE
## utility functions
def _checkLocaleSupported(locale):
if not _supportedLocales.has_key(locale):
raise AmazonError, ("Unsupported locale. Locale must be one of: %s" %
string.join(_supportedLocales, ", "))
def setLocale(locale):
"""set locale"""
global LOCALE
_checkLocaleSupported(locale)
LOCALE = locale
def getLocale(locale=None):
"""get locale"""
return locale or LOCALE
def setLicense(license_key):
"""set license key"""
global LICENSE_KEY
LICENSE_KEY = license_key
def getLicense(license_key = None):
"""get license key
license key can come from any number of locations;
see module docs for search order"""
for get, location in _licenseLocations:
rc = get(license_key)
if rc: return rc
raise NoLicenseKey, 'get a license key at http://www.amazon.com/webservices'
def setProxy(http_proxy):
"""set HTTP proxy"""
global HTTP_PROXY
HTTP_PROXY = http_proxy
def getProxy(http_proxy = None):
"""get HTTP proxy"""
return http_proxy or HTTP_PROXY
def getProxies(http_proxy = None):
http_proxy = getProxy(http_proxy)
if http_proxy:
proxies = {"http": http_proxy}
else:
proxies = None
return proxies
def _contentsOf(dirname, filename):
filename = os.path.join(dirname, filename)
if not os.path.exists(filename): return None
fsock = open(filename)
contents = fsock.read().strip()
fsock.close()
return contents
def _getScriptDir():
if __name__ == '__main__':
return os.path.abspath(os.path.dirname(sys.argv[0]))
else:
return os.path.abspath(os.path.dirname(sys.modules[__name__].__file__))
class Bag: pass
def unmarshal(element):
rc = Bag()
if isinstance(element, minidom.Element) and (element.tagName == 'Details'):
rc.URL = element.attributes["url"].value
childElements = [e for e in element.childNodes if isinstance(e, minidom.Element)]
if childElements:
for child in childElements:
key = child.tagName
if hasattr(rc, key):
if type(getattr(rc, key)) <> type([]):
setattr(rc, key, [getattr(rc, key)])
setattr(rc, key, getattr(rc, key) + [unmarshal(child)])
elif isinstance(child, minidom.Element) and (child.tagName == 'Details'):
# make the first Details element a key
setattr(rc,key,[unmarshal(child)])
#dbg: because otherwise 'hasattr' only tests
#dbg: on the second occurence: if there's a
#dbg: single return to a query, it's not a
#dbg: list. This module should always
#dbg: return a list of Details objects.
else:
setattr(rc, key, unmarshal(child))
else:
rc = "".join([e.data for e in element.childNodes if isinstance(e, minidom.Text)])
if element.tagName == 'SalesRank':
rc = rc.replace('.', '')
rc = rc.replace(',', '')
rc = rc.replace(' ', '')
rc = int(rc)
return rc
def buildURL(search_type, keyword, product_line, type, page, license_key, locale, associate, mode):
_checkLocaleSupported(locale)
url = "http://" + _supportedLocales[locale][1] + "/onca/xml3?f=xml"
url += "&t=%s" % associate
url += "&dev-t=%s" % license_key.strip()
url += "&type=%s" % type
if _supportedLocales[locale][0]:
url += "&locale=%s" % _supportedLocales[locale][0]
if page:
url += "&page=%s" % page
if product_line:
url += "&mode=%s" % mode
url += "&%s=%s" % (search_type, urllib.quote(keyword))
return url
## main functions
def search(search_type, keyword, product_line, type = "heavy", page = None,
license_key=None, http_proxy = None, locale = None, associate = None, mode = None):
"""search Amazon
You need a license key to call this function; see
http://www.amazon.com/webservices
to get one. Then you can either pass it to
this function every time, or set it globally; see the module docs for details.
Parameters:
keyword - keyword to search
search_type - in (KeywordSearch, BrowseNodeSearch, AsinSearch, UpcSearch, AuthorSearch, ArtistSearch, ActorSearch, DirectorSearch, ManufacturerSearch, ListManiaSearch, SimilaritySearch)
product_line - type of product to search for. restrictions based on search_type
UpcSearch - in (music, classical)
AuthorSearch - must be "books"
ArtistSearch - in (music, classical)
ActorSearch - in (dvd, vhs, video)
DirectorSearch - in (dvd, vhs, video)
ManufacturerSearch - in (electronics, kitchen, videogames, software, photo, pc-hardware)
http_proxy (optional) - address of HTTP proxy to use for sending and receiving SOAP messages
Returns: list of Bags, each Bag may contain the following attributes:
Asin - Amazon ID ("ASIN" number) of this item
Authors - list of authors
Availability - "available", etc.
BrowseList - list of related categories
Catalog - catalog type ("Book", etc)
CollectiblePrice - ?, format "$34.95"
ImageUrlLarge - URL of large image of this item
ImageUrlMedium - URL of medium image of this item
ImageUrlSmall - URL of small image of this item
Isbn - ISBN number
ListPrice - list price, format "$34.95"
Lists - list of ListMania lists that include this item
Manufacturer - manufacturer
Media - media ("Paperback", "Audio CD", etc)
NumMedia - number of different media types in which this item is available
OurPrice - Amazon price, format "$24.47"
ProductName - name of this item
ReleaseDate - release date, format "09 April, 1999"
Reviews - reviews (AvgCustomerRating, plus list of CustomerReview with Rating, Summary, Content)
SalesRank - sales rank (integer)
SimilarProducts - list of Product, which is ASIN number
ThirdPartyNewPrice - ?, format "$34.95"
URL - URL of this item
"""
license_key = getLicense(license_key)
locale = getLocale(locale)
associate = getAssociate(associate)
url = buildURL(search_type, keyword, product_line, type, page,
license_key, locale, associate, mode)
proxies = getProxies(http_proxy)
u = urllib.FancyURLopener(proxies)
usock = u.open(url)
xmldoc = minidom.parse(usock)
# from xml.dom.ext import PrettyPrint
# PrettyPrint(xmldoc)
usock.close()
if search_type == "BlendedSearch":
data = unmarshal(xmldoc).BlendedSearch
else:
data = unmarshal(xmldoc).ProductInfo
if hasattr(data, 'ErrorMsg'):
raise AmazonError, data.ErrorMsg
else:
if search_type == "BlendedSearch":
# a list of ProductLine containing a list of ProductInfo
# containing a list of Details.
return data
else:
return data.Details
def searchByKeyword(keyword, product_line="books", type="heavy", page=1, license_key=None, http_proxy=None, locale=None, associate=None, mode="books"):
return search("KeywordSearch", keyword, product_line, type, page, license_key, http_proxy, locale, associate, mode)
def browseBestSellers(browse_node, product_line="books", type="heavy", page=1, license_key=None, http_proxy=None, locale=None, associate=None, mode="books"):
return search("BrowseNodeSearch", browse_node, product_line, type, page, license_key, http_proxy, locale, associate, mode)
def searchByASIN(ASIN, type="heavy", license_key=None, http_proxy=None, locale=None, associate=None, mode="books"):
return search("AsinSearch", ASIN, None, type, None, license_key, http_proxy, locale, associate, mode)
def searchByUPC(UPC, type="heavy", license_key=None, http_proxy=None, locale=None, associate=None, mode="books"):
return search("UpcSearch", UPC, None, type, None, license_key, http_proxy, locale, associate, mode)
def searchByAuthor(author, type="heavy", page=1, license_key=None, http_proxy=None, locale=None, associate=None, mode="books"):
return search("AuthorSearch", author, "books", type, page, license_key, http_proxy, locale, associate, mode)
def searchByArtist(artist, product_line="music", type="heavy", page=1, license_key=None, http_proxy=None, locale=None, associate=None, mode="books"):
if product_line not in ("music", "classical"):
raise AmazonError, "product_line must be in ('music', 'classical')"
return search("ArtistSearch", artist, product_line, type, page, license_key, http_proxy, locale, associate, mode)
def searchByActor(actor, product_line="dvd", type="heavy", page=1, license_key=None, http_proxy=None, locale=None, associate=None, mode="books"):
if product_line not in ("dvd", "vhs", "video"):
raise AmazonError, "product_line must be in ('dvd', 'vhs', 'video')"
return search("ActorSearch", actor, product_line, type, page, license_key, http_proxy, locale, associate, mode)
def searchByDirector(director, product_line="dvd", type="heavy", page=1, license_key=None, http_proxy=None, locale=None, associate=None, mode="books"):
if product_line not in ("dvd", "vhs", "video"):
raise AmazonError, "product_line must be in ('dvd', 'vhs', 'video')"
return search("DirectorSearch", director, product_line, type, page, license_key, http_proxy, locale, associate, mode)
def searchByManufacturer(manufacturer, product_line="pc-hardware", type="heavy", page=1, license_key=None, http_proxy=None, locale=None, associate=None, mode="books"):
if product_line not in ("electronics", "kitchen", "videogames", "software", "photo", "pc-hardware"):
raise AmazonError, "product_line must be in ('electronics', 'kitchen', 'videogames', 'software', 'photo', 'pc-hardware')"
return search("ManufacturerSearch", manufacturer, product_line, type, page, license_key, http_proxy, locale, associate, mode)
def searchByListMania(listManiaID, type="heavy", page=1, license_key=None, http_proxy=None, locale=None, associate=None, mode="books"):
return search("ListManiaSearch", listManiaID, None, type, page, license_key, http_proxy, locale, associate, mode)
def searchSimilar(ASIN, type="heavy", page=1, license_key=None, http_proxy=None, locale=None, associate=None, mode="books"):
return search("SimilaritySearch", ASIN, None, type, page, license_key, http_proxy, locale, associate, mode)
def searchByWishlist(wishlistID, type="heavy", page=1, license_key=None, http_proxy=None, locale=None, associate=None, mode="books"):
return search("WishlistSearch", wishlistID, None, type, page, license_key, http_proxy, locale, associate,mode)
def searchByPower(keyword, product_line="books", type="heavy", page=1, license_key=None, http_proxy=None, locale=None, associate=None, mode="books"):
return search("PowerSearch", keyword, product_line, type, page, license_key, http_proxy, locale, associate, mode)
# >>> RecentKing = amazon.searchByPower('author:Stephen King and pubdate:2003')
# >>> SnowCrash = amazon.searchByPower('title:Snow Crash')
def searchByBlended(keyword, type="heavy", page=1, license_key=None, http_proxy=None, locale=None, associate=None, mode="books"):
return search("BlendedSearch", keyword, None, type, page, license_key, http_proxy, locale, associate, mode)
| mit |
CERNDocumentServer/invenio | modules/websubmit/lib/functions/Stamp_Replace_Single_File_Approval.py | 3 | 25131 | # This file is part of Invenio.
# Copyright (C) 2008, 2010, 2011 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Stamp_Replace_Single_File_Approval: A function to allow a single file
that is already attached to a record to be stamped at approval time.
"""
__revision__ = "$Id$"
from invenio.bibdocfile import BibRecDocs, InvenioBibDocFileError
from invenio.errorlib import register_exception
from invenio import websubmit_file_stamper
from invenio.websubmit_config import InvenioWebSubmitFunctionWarning, \
InvenioWebSubmitFunctionError, InvenioWebSubmitFileStamperError
import os.path
import re
import cgi
import time
def Stamp_Replace_Single_File_Approval(parameters, \
curdir, \
form, \
user_info=None):
"""
This function is intended to be called when a document has been
approved and needs to be stamped.
The function should be used when there is ONLY ONE file to be
stamped after approval (for example, the "main file").
The name of the file to be stamped should be known and should be stored
in a file in the submission's working directory (without the extension).
Generally, this will work our fine as the main file is named after the
report number of the document, this will be stored in the report number
file.
@param parameters: (dictionary) - must contain:
+ latex_template: (string) - the name of the LaTeX template that
should be used for the creation of the stamp.
+ latex_template_vars: (string) - a string-ified dictionary
of variables to be replaced in the LaTeX template and the
values (or names of files in curdir containing the values)
with which to replace them. Use prefix 'FILE:' to specify
that the stamped value must be read from a file in
submission directory instead of being a fixed value to
stamp.
E.G.:
{ 'TITLE' : 'FILE:DEMOTHESIS_TITLE',
'DATE' : 'FILE:DEMOTHESIS_DATE'
}
+ file_to_be_stamped: (string) - this is the name of a file in the
submission's working directory that contains the name of the
bibdocfile that is to be stamped.
+ new_file_name: (string) - this is the name of a file in the
submission's working directory that contains the name that is to
be given to the file after it has been stamped. If empty, or if
that file doesn't exist, the file will not be renamed after
stamping.
+ switch_file: (string) - when this value is set, specifies
the name of a file that will swith on/off the
stamping. The stamp will be applied if the file exists in
the submission directory and is not empty. If the file
cannot be found or is empty, the stamp is not applied.
Useful for eg. if you want to let your users control the
stamping with a checkbox on your submission page.
Leave this parameter empty to always stamp by default.
+ stamp: (string) - the type of stamp to be applied to the file.
should be one of:
+ first (only the first page is stamped);
+ all (all pages are stamped);
+ coverpage (a separate cover-page is added to the file as a
first page);
+ layer: (string) - the position of the stamp. Should be one of:
+ background (invisible if original file has a white
-not transparent- background layer)
+ foreground (on top of the stamped file. If the stamp
does not have a transparent background, will hide all
of the document layers)
The default value is 'background'.
"""
############
## Definition of important variables:
############
## The file stamper needs to be called with a dictionary of options of
## the following format:
## { 'latex-template' : "", ## TEMPLATE_NAME
## 'latex-template-var' : {}, ## TEMPLATE VARIABLES
## 'input-file' : "", ## INPUT FILE
## 'output-file' : "", ## OUTPUT FILE
## 'stamp' : "", ## STAMP TYPE
## 'layer' : "", ## LAYER TO STAMP
## 'verbosity' : 0, ## VERBOSITY (we don't care about it)
## }
file_stamper_options = { 'latex-template' : "",
'latex-template-var' : { },
'input-file' : "",
'output-file' : "",
'stamp' : "",
'layer' : "",
'verbosity' : 0,
}
## Check if stamping is enabled
switch_file = parameters.get('switch_file', '')
if switch_file:
# Good, a "switch file" was specified. Check if it exists, and
# it its value is not empty.
if not _read_in_file(os.path.join(curdir, switch_file)):
# File does not exist, or is emtpy. Silently abort
# stamping.
return ""
## Submission access number:
access = _read_in_file("%s/access" % curdir)
## record ID for the current submission. It is found in the special file
## "SN" (sysno) in curdir:
recid = _read_in_file("%s/SN" % curdir)
try:
recid = int(recid)
except ValueError:
## No record ID. Cannot continue.
err_msg = "Error in Stamp_Replace_Single_File_Approval: " \
"Cannot recover record ID from the submission's working " \
"directory. Stamping cannot be carried out. The " \
"submission ID is [%s]." % cgi.escape(access)
register_exception(prefix=err_msg)
raise InvenioWebSubmitFunctionError(err_msg)
############
## Resolution of function parameters:
############
## The name of the LaTeX template to be used for stamp creation:
latex_template = "%s" % ((type(parameters['latex_template']) is str \
and parameters['latex_template']) or "")
## A string containing the variables/values that should be substituted
## in the final (working) LaTeX template:
latex_template_vars_string = "%s" % \
((type(parameters['latex_template_vars']) is str \
and parameters['latex_template_vars']) or "")
## The type of stamp to be applied to the file(s):
stamp = "%s" % ((type(parameters['stamp']) is str and \
parameters['stamp'].lower()) or "")
## The layer to use for stamping:
try:
layer = parameters['layer']
except KeyError:
layer = "background"
if not layer in ('background', 'foreground'):
layer = "background"
## Get the name of the file to be stamped from the file indicated in
## the file_to_be_stamped parameter:
try:
file_to_stamp_file = parameters['file_to_be_stamped']
except KeyError:
file_to_stamp_file = ""
else:
if file_to_stamp_file is None:
file_to_stamp_file = ""
## Get the "basename" for the file to be stamped (it's mandatory that it
## be in curdir):
file_to_stamp_file = os.path.basename(file_to_stamp_file).strip()
name_file_to_stamp = _read_in_file("%s/%s" % (curdir, file_to_stamp_file))
name_file_to_stamp.replace("\n", "").replace("\r", "")
##
## Get the name to be given to the file after it has been stamped (if there
## is one.) Once more, it will be found in a file in curdir:
try:
new_file_name_file = parameters['new_file_name']
except KeyError:
new_file_name_file = ""
else:
if new_file_name_file is None:
new_file_name_file = ""
## Get the "basename" for the file containing the new file name. (It's
## mandatory that it be in curdir):
new_file_name_file = os.path.basename(new_file_name_file).strip()
new_file_name = _read_in_file("%s/%s" % (curdir, new_file_name_file))
############
## Begin:
############
##
## If no name for the file to stamp, warning.
if name_file_to_stamp == "":
wrn_msg = "Warning in Stamp_Replace_Single_File_Approval: " \
"It was not possible to recover a valid name for the " \
"file to be stamped. Stamping could not, therefore, be " \
"carried out. The submission ID is [%s]." \
% access
raise InvenioWebSubmitFunctionWarning(wrn_msg)
##
## The file to be stamped is a bibdoc. We will only stamp it (a) if it
## exists; and (b) if it is a PDF file. So, get the path (in the bibdocs
## tree) to the file to be stamped:
##
## First get the object representing the bibdocs belonging to this record:
bibrecdocs = BibRecDocs(recid)
try:
bibdoc_file_to_stamp = bibrecdocs.get_bibdoc("%s" % name_file_to_stamp)
except InvenioBibDocFileError:
## Couldn't get a bibdoc object for this filename. Probably the file
## that we wanted to stamp wasn't attached to this record.
wrn_msg = "Warning in Stamp_Replace_Single_File_Approval: " \
"It was not possible to recover a bibdoc object for the " \
"filename [%s] when trying to stamp the main file. " \
"Stamping could not be carried out. The submission ID is " \
"[%s] and the record ID is [%s]." \
% (name_file_to_stamp, access, recid)
register_exception(prefix=wrn_msg)
raise InvenioWebSubmitFunctionWarning(wrn_msg)
## Get the BibDocFile object for the PDF version of the bibdoc to be
## stamped:
try:
bibdocfile_file_to_stamp = bibdoc_file_to_stamp.get_file("pdf")
except InvenioBibDocFileError:
## This bibdoc doesn't have a physical file with the extension ".pdf"
## (take note of the lower-case extension - the bibdocfile library
## is case-sensitive with respect to filenames. Log that there was
## no "pdf" and check for a file with extension "PDF":
wrn_msg = "Warning in Stamp_Replace_Single_File_Approval: " \
"It wasn't possible to recover a PDF BibDocFile object " \
"for the file with the name [%s], using the extension " \
"[pdf] - note the lower case - the bibdocfile library " \
"relies upon the case of an extension. The submission ID " \
"is [%s] and the record ID is [%s]. Going to try " \
"looking for a file with a [PDF] extension before giving " \
"up . . . " \
% (name_file_to_stamp, access, recid)
register_exception(prefix=wrn_msg)
try:
bibdocfile_file_to_stamp = bibdoc_file_to_stamp.get_file("PDF")
except InvenioBibDocFileError:
wrn_msg = "Warning in Stamp_Replace_Single_File_Approval: " \
"It wasn't possible to recover a PDF " \
"BibDocFile object for the file with the name [%s], " \
"using the extension [PDF] - note the upper case. " \
"Had previously tried searching for [pdf] - now " \
"giving up. Stamping could not be carried out. " \
"The submission ID is [%s] and the record ID is [%s]." \
% (name_file_to_stamp, access, recid)
register_exception(prefix=wrn_msg)
raise InvenioWebSubmitFunctionWarning(wrn_msg)
############
## Go ahead and prepare the details for the LaTeX stamp template and its
## variables:
############
## Strip the LaTeX filename into the basename (All templates should be
## in the template repository):
latex_template = os.path.basename(latex_template)
## Convert the string of latex template variables into a dictionary
## of search-term/replacement-term pairs:
latex_template_vars = get_dictionary_from_string(latex_template_vars_string)
## For each of the latex variables, check in `CURDIR' for a file with that
## name. If found, use it's contents as the template-variable's value.
## If not, just use the raw value string already held by the template
## variable:
latex_template_varnames = latex_template_vars.keys()
for varname in latex_template_varnames:
## Get this variable's value:
varvalue = latex_template_vars[varname].strip()
if not ((varvalue.find("date(") == 0 and varvalue[-1] == ")") or \
(varvalue.find("include(") == 0 and varvalue[-1] == ")")) \
and varvalue != "":
## We don't want to interfere with date() or include() directives,
## so we only do this if the variable value didn't contain them:
##
## Is this variable value the name of a file in the current
## submission's working directory, from which a literal value for
## use in the template should be extracted? If yes, it will
## begin with "FILE:". If no, we leave the value exactly as it is.
if varvalue.upper().find("FILE:") == 0:
## The value to be used is to be taken from a file. Clean the
## file name and if it's OK, extract that value from the file.
##
seekvalue_fname = varvalue[5:].strip()
seekvalue_fname = os.path.basename(seekvalue_fname).strip()
if seekvalue_fname != "":
## Attempt to extract the value from the file:
if os.access("%s/%s" % (curdir, seekvalue_fname), \
os.R_OK|os.F_OK):
## The file exists. Extract its value:
try:
repl_file_val = \
open("%s/%s" \
% (curdir, seekvalue_fname), "r").readlines()
except IOError:
## The file was unreadable.
err_msg = "Error in Stamp_Replace_Single_File_" \
"Approval: The function attempted to " \
"read a LaTex template variable " \
"value from the following file in the " \
"current submission's working " \
"directory: [%s]. However, an " \
"unexpected error was encountered " \
"when doing so. Please inform the " \
"administrator." \
% seekvalue_fname
register_exception(req=user_info['req'])
raise InvenioWebSubmitFunctionError(err_msg)
else:
final_varval = ""
for line in repl_file_val:
final_varval += line
final_varval = final_varval.rstrip()
## Replace the variable value with that which has
## been read from the file:
latex_template_vars[varname] = final_varval
else:
## The file didn't actually exist in the current
## submission's working directory. Use an empty
## value:
latex_template_vars[varname] = ""
else:
## The filename was not valid.
err_msg = "Error in Stamp_Replace_Single_File_Approval: " \
"The function was configured to read a LaTeX " \
"template variable from a file with the " \
"following instruction: [%s --> %s]. The " \
"filename, however, was not considered valid. " \
"Please report this to the administrator." \
% (varname, varvalue)
raise InvenioWebSubmitFunctionError(err_msg)
## Put the 'fixed' values into the file_stamper_options dictionary:
file_stamper_options['latex-template'] = latex_template
file_stamper_options['latex-template-var'] = latex_template_vars
file_stamper_options['stamp'] = stamp
file_stamper_options['layer'] = layer
## Put the input file and output file into the file_stamper_options
## dictionary:
file_stamper_options['input-file'] = bibdocfile_file_to_stamp.fullpath
file_stamper_options['output-file'] = bibdocfile_file_to_stamp.get_full_name()
##
## Before attempting to stamp the file, log the dictionary of arguments
## that will be passed to websubmit_file_stamper:
try:
fh_log = open("%s/websubmit_file_stamper-calls-options.log" \
% curdir, "a+")
fh_log.write("%s\n" % file_stamper_options)
fh_log.flush()
fh_log.close()
except IOError:
## Unable to log the file stamper options.
exception_prefix = "Unable to write websubmit_file_stamper " \
"options to log file " \
"%s/websubmit_file_stamper-calls-options.log" \
% curdir
register_exception(prefix=exception_prefix)
try:
## Try to stamp the file:
(stamped_file_path_only, stamped_file_name) = \
websubmit_file_stamper.stamp_file(file_stamper_options)
except InvenioWebSubmitFileStamperError:
## It wasn't possible to stamp this file.
## Register the exception along with an informational message:
wrn_msg = "Warning in Stamp_Replace_Single_File_Approval: " \
"There was a problem stamping the file with the name [%s] " \
"and the fullpath [%s]. The file has not been stamped. " \
"The submission ID is [%s] and the record ID is [%s]." \
% (name_file_to_stamp, \
file_stamper_options['input-file'], \
access, \
recid)
register_exception(prefix=wrn_msg)
raise InvenioWebSubmitFunctionWarning(wrn_msg)
else:
## Stamping was successful. The BibDocFile must now be revised with
## the latest (stamped) version of the file:
file_comment = "Stamped by WebSubmit: %s" \
% time.strftime("%d/%m/%Y", time.localtime())
try:
dummy = \
bibrecdocs.add_new_version("%s/%s" \
% (stamped_file_path_only, \
stamped_file_name), \
name_file_to_stamp, \
comment=file_comment, \
flags=('STAMPED', ))
except InvenioBibDocFileError:
## Unable to revise the file with the newly stamped version.
wrn_msg = "Warning in Stamp_Replace_Single_File_Approval: " \
"After having stamped the file with the name [%s] " \
"and the fullpath [%s], it wasn't possible to revise " \
"that file with the newly stamped version. Stamping " \
"was unsuccessful. The submission ID is [%s] and the " \
"record ID is [%s]." \
% (name_file_to_stamp, \
file_stamper_options['input-file'], \
access, \
recid)
register_exception(prefix=wrn_msg)
raise InvenioWebSubmitFunctionWarning(wrn_msg)
else:
## File revised. If the file should be renamed after stamping,
## do so.
if new_file_name != "":
try:
bibrecdocs.change_name(newname = new_file_name, docid = bibdoc_file_to_stamp.id)
except (IOError, InvenioBibDocFileError):
## Unable to change the name
wrn_msg = "Warning in Stamp_Replace_Single_File_Approval" \
": After having stamped and revised the file " \
"with the name [%s] and the fullpath [%s], it " \
"wasn't possible to rename it to [%s]. The " \
"submission ID is [%s] and the record ID is " \
"[%s]." \
% (name_file_to_stamp, \
file_stamper_options['input-file'], \
new_file_name, \
access, \
recid)
## Finished.
return ""
def get_dictionary_from_string(dict_string):
"""Given a string version of a "dictionary", split the string into a
python dictionary.
For example, given the following string:
{'TITLE' : 'EX_TITLE', 'AUTHOR' : 'EX_AUTHOR', 'REPORTNUMBER' : 'EX_RN'}
A dictionary in the following format will be returned:
{
'TITLE' : 'EX_TITLE',
'AUTHOR' : 'EX_AUTHOR',
'REPORTNUMBER' : 'EX_RN',
}
@param dict_string: (string) - the string version of the dictionary.
@return: (dictionary) - the dictionary build from the string.
"""
## First, strip off the leading and trailing spaces and braces:
dict_string = dict_string.strip(" {}")
## Next, split the string on commas (,) that have not been escaped
## So, the following string: """'hello' : 'world', 'click' : 'here'"""
## will be split into the following list:
## ["'hello' : 'world'", " 'click' : 'here'"]
##
## However, the string """'hello\, world' : '!', 'click' : 'here'"""
## will be split into: ["'hello\, world' : '!'", " 'click' : 'here'"]
## I.e. the comma that was escaped in the string has been kept.
##
## So basically, split on unescaped parameters at first:
key_vals = re.split(r'(?<!\\),', dict_string)
## Now we should have a list of "key" : "value" terms. For each of them,
## check it is OK. If not in the format "Key" : "Value" (quotes are
## optional), discard it. As with the comma separator in the previous
## splitting, this one splits on the first colon (:) ONLY.
final_dictionary = {}
for key_value_string in key_vals:
## Split the pair apart, based on the first ":":
key_value_pair = key_value_string.split(":", 1)
## check that the length of the new list is 2:
if len(key_value_pair) != 2:
## There was a problem with the splitting - pass this pair
continue
## The split was made.
## strip white-space, single-quotes and double-quotes from around the
## key and value pairs:
key_term = key_value_pair[0].strip(" '\"")
value_term = key_value_pair[1].strip(" '\"")
## Is the left-side (key) term empty?
if len(key_term) == 0:
continue
## Now, add the search-replace pair to the dictionary of
## search-replace terms:
final_dictionary[key_term] = value_term
return final_dictionary
def _read_in_file(filepath):
"""Read the contents of a file into a string in memory.
@param filepath: (string) - the path to the file to be read in.
@return: (string) - the contents of the file.
"""
if filepath != "" and \
os.path.exists("%s" % filepath):
try:
fh_filepath = open("%s" % filepath, "r")
file_contents = fh_filepath.read()
fh_filepath.close()
except IOError:
register_exception()
file_contents = ""
else:
file_contents = ""
return file_contents
| gpl-2.0 |
chris-chris/tensorflow | tensorflow/python/kernel_tests/one_hot_op_test.py | 139 | 12639 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.ops.one_hot_op."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.ops import array_ops
from tensorflow.python.platform import test
class OneHotTest(test.TestCase):
def _testOneHot(self,
truth,
use_gpu=False,
expected_err_re=None,
raises=None,
**inputs):
with self.test_session(use_gpu=use_gpu):
if raises is not None:
with self.assertRaises(raises):
array_ops.one_hot(**inputs)
else:
ans = array_ops.one_hot(**inputs)
if expected_err_re is None:
tf_ans = ans.eval()
self.assertAllEqual(tf_ans, truth)
self.assertEqual(tf_ans.shape, ans.get_shape())
else:
with self.assertRaisesOpError(expected_err_re):
ans.eval()
def _testBothOneHot(self, truth, expected_err_re=None, raises=None, **inputs):
self._testOneHot(truth, True, expected_err_re, raises, **inputs)
self._testOneHot(truth, False, expected_err_re, raises, **inputs)
def _testBasic(self, dtype):
indices = np.asarray([0, 2, -1, 1], dtype=np.int64)
depth = 3
on_value = np.asarray(1.0, dtype=dtype)
off_value = np.asarray(-1.0, dtype=dtype)
truth = np.asarray(
[[1.0, -1.0, -1.0], [-1.0, -1.0, 1.0], [-1.0, -1.0, -1.0],
[-1.0, 1.0, -1.0]],
dtype=dtype)
# axis == -1
self._testBothOneHot(
indices=indices,
depth=depth,
on_value=on_value,
off_value=off_value,
dtype=dtype,
truth=truth)
# axis == 0
self._testBothOneHot(
indices=indices,
depth=depth,
on_value=on_value,
off_value=off_value,
axis=0,
dtype=dtype,
truth=truth.T) # Output is transpose version in this case
def _testDefaultBasic(self, dtype):
indices = np.asarray([0, 2, -1, 1], dtype=np.int64)
depth = 3
truth = np.asarray(
[[1.0, 0.0, 0.0], [0.0, 0.0, 1.0], [0.0, 0.0, 0.0], [0.0, 1.0, 0.0]],
dtype=dtype)
# axis == -1
self._testBothOneHot(indices=indices, depth=depth, truth=truth)
# axis == 0
self._testBothOneHot(
indices=indices, depth=depth, axis=0,
truth=truth.T) # Output is transpose version in this case
def testFloatBasic(self):
self._testBasic(np.float32)
self._testDefaultBasic(np.float32)
def testDoubleBasic(self):
self._testBasic(np.float64)
self._testDefaultBasic(np.float64)
def testInt32Basic(self):
self._testBasic(np.int32)
self._testDefaultBasic(np.int32)
def testInt64Basic(self):
self._testBasic(np.int64)
self._testDefaultBasic(np.int64)
def testComplex64Basic(self):
self._testBasic(np.complex64)
self._testDefaultBasic(np.complex64)
def testComplex128Basic(self):
self._testBasic(np.complex128)
self._testDefaultBasic(np.complex128)
def _testBatch(self, dtype):
indices = np.asarray([[0, 2, -1, 1], [1, 0, 1, -1]], dtype=np.int64)
depth = 3
on_value = np.asarray(1.0, dtype=dtype)
off_value = np.asarray(-1.0, dtype=dtype)
truth = np.asarray(
[[[1.0, -1.0, -1.0], [-1.0, -1.0, 1.0], [-1.0, -1.0, -1.0],
[-1.0, 1.0, -1.0]], [[-1.0, 1.0, -1.0], [1.0, -1.0, -1.0],
[-1.0, 1.0, -1.0], [-1.0, -1.0, -1.0]]],
dtype=dtype)
# axis == -1
self._testBothOneHot(
indices=indices,
depth=depth,
on_value=on_value,
off_value=off_value,
dtype=dtype,
truth=truth)
# axis == 1
self._testBothOneHot(
indices=indices,
depth=depth,
on_value=on_value,
off_value=off_value,
axis=1,
dtype=dtype,
truth=[truth[0].T, truth[1].T]) # Do not transpose the batch
def _testDefaultValuesBatch(self, dtype):
indices = np.asarray([[0, 2, -1, 1], [1, 0, 1, -1]], dtype=np.int64)
depth = 3
truth = np.asarray(
[[[1.0, 0.0, 0.0], [0.0, 0.0, 1.0], [0.0, 0.0, 0.0], [0.0, 1.0, 0.0]],
[[0.0, 1.0, 0.0], [1.0, 0.0, 0.0], [0.0, 1.0, 0.0], [0.0, 0.0, 0.0]]],
dtype=dtype)
# axis == -1
self._testBothOneHot(indices=indices, depth=depth, dtype=dtype, truth=truth)
# axis == 1
self._testBothOneHot(
indices=indices,
depth=depth,
axis=1,
dtype=dtype,
truth=[truth[0].T, truth[1].T]) # Do not transpose the batch
def _testValueTypeBatch(self, dtype):
indices = np.asarray([[0, 2, -1, 1], [1, 0, 1, -1]], dtype=np.int64)
depth = 3
on_value = np.asarray(1.0, dtype=dtype)
off_value = np.asarray(-1.0, dtype=dtype)
truth = np.asarray(
[[[1.0, -1.0, -1.0], [-1.0, -1.0, 1.0], [-1.0, -1.0, -1.0],
[-1.0, 1.0, -1.0]], [[-1.0, 1.0, -1.0], [1.0, -1.0, -1.0],
[-1.0, 1.0, -1.0], [-1.0, -1.0, -1.0]]],
dtype=dtype)
# axis == -1
self._testBothOneHot(
indices=indices,
on_value=on_value,
off_value=off_value,
depth=depth,
dtype=dtype,
truth=truth)
# axis == 1
self._testBothOneHot(
indices=indices,
on_value=on_value,
off_value=off_value,
depth=depth,
axis=1,
dtype=dtype,
truth=[truth[0].T, truth[1].T]) # Do not transpose the batch
def _testEmpty(self, dtype):
indices = np.zeros((0, 16), dtype=np.int64)
depth = 3
on_value = np.asarray(1.0, dtype=dtype)
off_value = np.asarray(-1.0, dtype=dtype)
truth = np.empty((0, 16, 3), dtype=dtype)
# axis == -1
self._testBothOneHot(
indices=indices,
depth=depth,
on_value=on_value,
off_value=off_value,
dtype=dtype,
truth=truth)
def testHalfBatch(self):
self._testEmpty(np.float16)
self._testBatch(np.float16)
self._testDefaultValuesBatch(np.float16)
self._testValueTypeBatch(np.float16)
def testFloatBatch(self):
self._testEmpty(np.float32)
self._testBatch(np.float32)
self._testDefaultValuesBatch(np.float32)
self._testValueTypeBatch(np.float32)
def testDoubleBatch(self):
self._testEmpty(np.float64)
self._testBatch(np.float64)
self._testDefaultValuesBatch(np.float64)
self._testValueTypeBatch(np.float64)
def testInt32Batch(self):
self._testEmpty(np.int32)
self._testBatch(np.int32)
self._testDefaultValuesBatch(np.int32)
self._testValueTypeBatch(np.int32)
def testInt64Batch(self):
self._testEmpty(np.int64)
self._testBatch(np.int64)
self._testDefaultValuesBatch(np.int64)
self._testValueTypeBatch(np.int64)
def testComplexBatch(self):
self._testEmpty(np.complex64)
self._testBatch(np.complex64)
# self._testDefaultValuesBatch(np.complex64)
self._testValueTypeBatch(np.complex64)
def testSimpleCases(self):
indices = [0, 1, 2]
depth = 3
truth = np.asarray(
[[1.0, 0.0, 0.0], [0.0, 1.0, 0.0], [0.0, 0.0, 1.0]], dtype=np.float32)
self._testBothOneHot(indices=indices, depth=depth, truth=truth)
indices = [0, 1, 2]
depth = 3
truth = np.asarray([[1, 0, 0], [0, 1, 0], [0, 0, 1]], dtype=np.int32)
self._testBothOneHot(
indices=indices, depth=depth, dtype=np.int32, truth=truth)
indices = [0, 1, 2]
depth = 3
truth = np.asarray([[1, -1, -1], [-1, 1, -1], [-1, -1, 1]], dtype=np.int32)
self._testBothOneHot(
indices=indices, depth=depth, on_value=1, off_value=-1, truth=truth)
def testSingleValueGiven(self):
# Only on_value provided
indices = [0, 1, 2]
depth = 3
truth = np.asarray([[1, 0, 0], [0, 1, 0], [0, 0, 1]], dtype=np.int32)
self._testBothOneHot(indices=indices, depth=depth, on_value=1, truth=truth)
# Only off_value provided
indices = [0, 1, 2]
depth = 3
truth = np.asarray([[1, 0, 0], [0, 1, 0], [0, 0, 1]], dtype=np.float32)
self._testBothOneHot(
indices=indices, depth=depth, off_value=0.0, truth=truth)
def testString(self):
indices = [0, 1, 2]
depth = 3
truth = np.asarray([[b"1.0", b"0.0", b"0.0"], [b"0.0", b"1.0", b"0.0"],
[b"0.0", b"0.0", b"1.0"]])
on_value = np.asarray(b"1.0")
off_value = np.asarray(b"0.0")
self._testBothOneHot(
indices=indices,
depth=depth,
on_value=on_value,
off_value=off_value,
dtype=dtypes.string,
truth=truth)
on_value = constant_op.constant(b"1.0")
off_value = constant_op.constant(b"0.0")
self._testBothOneHot(
indices=indices,
depth=depth,
on_value=on_value,
off_value=off_value,
dtype=dtypes.string,
truth=truth)
on_value = b"1.0"
off_value = b"0.0"
self._testBothOneHot(
indices=indices,
depth=depth,
on_value=on_value,
off_value=off_value,
dtype=dtypes.string,
truth=truth)
def testIndicesTypes(self):
tf_types = [dtypes.uint8, dtypes.int32, dtypes.int64]
np_types = [np.int32, np.int64]
for itype in tf_types + np_types:
# Note: to keep the tests simple in the case of uint8 the index -1 below
# maps to 255 which is out of the depth range, just like -1.
if itype in tf_types:
indices = constant_op.constant(
[[0, 2, -1, 1], [1, 0, 1, -1]], dtype=itype)
elif itype in np_types:
indices = np.asarray([[0, 2, -1, 1], [1, 0, 1, -1]], dtype=itype)
depth = 3
on_value = np.asarray(1.0, dtype=np.float32)
off_value = np.asarray(-1.0, dtype=np.float32)
truth = np.asarray(
[[[1.0, -1.0, -1.0], [-1.0, -1.0, 1.0], [-1.0, -1.0, -1.0],
[-1.0, 1.0, -1.0]], [[-1.0, 1.0, -1.0], [1.0, -1.0, -1.0],
[-1.0, 1.0, -1.0], [-1.0, -1.0, -1.0]]],
dtype=np.float32)
# axis == -1
self._testBothOneHot(
indices=indices,
on_value=on_value,
off_value=off_value,
depth=depth,
truth=truth)
# axis == 1
self._testBothOneHot(
indices=indices,
on_value=on_value,
off_value=off_value,
depth=depth,
axis=1,
truth=[truth[0].T, truth[1].T]) # Do not transpose the batch
def testPrefixDimOverflow(self):
for itype in [dtypes.int32, dtypes.int64, dtypes.uint8]:
prefix_dim_size = 65536
depth = 2
x = [i % depth for i in range(prefix_dim_size)]
indices = constant_op.constant(x, dtype=itype)
truth = np.zeros((prefix_dim_size, depth), np.float32)
for i in range(prefix_dim_size):
truth[i, x[i]] = 1.0
self._testBothOneHot(
indices=indices,
depth=depth,
on_value=1.0,
off_value=0.0,
truth=truth)
def testOnOffMismatchTypeError(self):
indices = [0, 1, 2]
depth = 3
on_value = np.asarray(1.0, np.float64)
off_value = np.asarray(0.0, np.float32)
self._testBothOneHot(
indices=indices,
depth=depth,
on_value=on_value,
off_value=off_value,
truth=None,
raises=TypeError)
def testDtypeMismatchTypeError(self):
indices = [0, 1, 2]
depth = 3
on_value = np.asarray(1.0, np.float32)
off_value = np.asarray(0.0, np.float32)
dtype = np.int32
self._testBothOneHot(
indices=indices,
depth=depth,
on_value=on_value,
dtype=dtype,
truth=None,
raises=TypeError)
self._testBothOneHot(
indices=indices,
depth=depth,
on_value=off_value,
dtype=dtype,
truth=None,
raises=TypeError)
if __name__ == "__main__":
test.main()
| apache-2.0 |
ahb0327/intellij-community | python/helpers/pydev/_pydev_imps/_pydev_pluginbase.py | 52 | 13955 | # -*- coding: utf-8 -*-
"""
pluginbase
~~~~~~~~~~
Pluginbase is a module for Python that provides a system for building
plugin based applications.
:copyright: (c) Copyright 2014 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import os
import sys
from pydevd_constants import IS_PY24, IS_PY3K, IS_JYTHON
if IS_PY24:
from _pydev_imps._pydev_uuid_old import uuid4
else:
from uuid import uuid4
if IS_PY3K:
import pkgutil
else:
from _pydev_imps import _pydev_pkgutil_old as pkgutil
import errno
try:
from hashlib import md5
except ImportError:
from md5 import md5
import threading
from types import ModuleType
from weakref import ref as weakref
PY2 = sys.version_info[0] == 2
if PY2:
text_type = unicode
string_types = (unicode, str)
from cStringIO import StringIO as NativeBytesIO
else:
text_type = str
string_types = (str,)
from io import BytesIO as NativeBytesIO
_local = threading.local()
_internalspace = ModuleType(__name__ + '._internalspace')
_internalspace.__path__ = []
sys.modules[_internalspace.__name__] = _internalspace
def get_plugin_source(module=None, stacklevel=None):
"""Returns the :class:`PluginSource` for the current module or the given
module. The module can be provided by name (in which case an import
will be attempted) or as a module object.
If no plugin source can be discovered, the return value from this method
is `None`.
This function can be very useful if additional data has been attached
to the plugin source. For instance this could allow plugins to get
access to a back reference to the application that created them.
:param module: optionally the module to locate the plugin source of.
:param stacklevel: defines how many levels up the module should search
for before it discovers the plugin frame. The
default is 0. This can be useful for writing wrappers
around this function.
"""
if module is None:
frm = sys._getframe((stacklevel or 0) + 1)
name = frm.f_globals['__name__']
glob = frm.f_globals
elif isinstance(module, string_types):
frm = sys._getframe(1)
name = module
glob = __import__(module, frm.f_globals,
frm.f_locals, ['__dict__']).__dict__
else:
name = module.__name__
glob = module.__dict__
return _discover_space(name, glob)
def _discover_space(name, globals):
try:
return _local.space_stack[-1]
except (AttributeError, IndexError):
pass
if '__pluginbase_state__' in globals:
return globals['__pluginbase_state__'].source
mod_name = None
if globals:
# in unidecode package they pass [] as globals arg
mod_name = globals.get('__name__')
if mod_name is not None and \
mod_name.startswith(_internalspace.__name__ + '.'):
end = mod_name.find('.', len(_internalspace.__name__) + 1)
space = sys.modules.get(mod_name[:end])
if space is not None:
return space.__pluginbase_state__.source
def _shutdown_module(mod):
members = list(mod.__dict__.items())
for key, value in members:
if key[:1] != '_':
setattr(mod, key, None)
for key, value in members:
setattr(mod, key, None)
def _to_bytes(s):
if isinstance(s, text_type):
return s.encode('utf-8')
return s
class _IntentionallyEmptyModule(ModuleType):
def __getattr__(self, name):
try:
return ModuleType.__getattr__(self, name)
except AttributeError:
if name[:2] == '__':
raise
raise RuntimeError(
'Attempted to import from a plugin base module (%s) without '
'having a plugin source activated. To solve this error '
'you have to move the import into a "with" block of the '
'associated plugin source.' % self.__name__)
class _PluginSourceModule(ModuleType):
def __init__(self, source):
modname = '%s.%s' % (_internalspace.__name__, source.spaceid)
ModuleType.__init__(self, modname)
self.__pluginbase_state__ = PluginBaseState(source)
@property
def __path__(self):
try:
ps = self.__pluginbase_state__.source
except AttributeError:
return []
return ps.searchpath + ps.base.searchpath
def _setup_base_package(module_name):
try:
mod = __import__(module_name, None, None, ['__name__'])
except ImportError:
mod = None
if '.' in module_name:
parent_mod = __import__(module_name.rsplit('.', 1)[0],
None, None, ['__name__'])
else:
parent_mod = None
if mod is None:
mod = _IntentionallyEmptyModule(module_name)
if parent_mod is not None:
setattr(parent_mod, module_name.rsplit('.', 1)[-1], mod)
sys.modules[module_name] = mod
class PluginBase(object):
"""The plugin base acts as a control object around a dummy Python
package that acts as a container for plugins. Usually each
application creates exactly one base object for all plugins.
:param package: the name of the package that acts as the plugin base.
Usually this module does not exist. Unless you know
what you are doing you should not create this module
on the file system.
:param searchpath: optionally a shared search path for modules that
will be used by all plugin sources registered.
"""
def __init__(self, package, searchpath=None):
#: the name of the dummy package.
self.package = package
if searchpath is None:
searchpath = []
#: the default search path shared by all plugins as list.
self.searchpath = searchpath
_setup_base_package(package)
def make_plugin_source(self, *args, **kwargs):
"""Creats a plugin source for this plugin base and returns it.
All parameters are forwarded to :class:`PluginSource`.
"""
return PluginSource(self, *args, **kwargs)
class PluginSource(object):
"""The plugin source is what ultimately decides where plugins are
loaded from. Plugin bases can have multiple plugin sources which act
as isolation layer. While this is not a security system it generally
is not possible for plugins from different sources to accidentally
cross talk.
Once a plugin source has been created it can be used in a ``with``
statement to change the behavior of the ``import`` statement in the
block to define which source to load the plugins from::
plugin_source = plugin_base.make_plugin_source(
searchpath=['./path/to/plugins', './path/to/more/plugins'])
with plugin_source:
from myapplication.plugins import my_plugin
:param base: the base this plugin source belongs to.
:param identifier: optionally a stable identifier. If it's not defined
a random identifier is picked. It's useful to set this
to a stable value to have consistent tracebacks
between restarts and to support pickle.
:param searchpath: a list of paths where plugins are looked for.
:param persist: optionally this can be set to `True` and the plugins
will not be cleaned up when the plugin source gets
garbage collected.
"""
# Set these here to false by default so that a completely failing
# constructor does not fuck up the destructor.
persist = False
mod = None
def __init__(self, base, identifier=None, searchpath=None,
persist=False):
#: indicates if this plugin source persists or not.
self.persist = persist
if identifier is None:
identifier = str(uuid4())
#: the identifier for this source.
self.identifier = identifier
#: A reference to the plugin base that created this source.
self.base = base
#: a list of paths where plugins are searched in.
self.searchpath = searchpath
#: The internal module name of the plugin source as it appears
#: in the :mod:`pluginsource._internalspace`.
div = None
self.spaceid = '_sp' + md5(
_to_bytes(self.base.package) + _to_bytes('|') +
_to_bytes(self.identifier)
).hexdigest()
#: a reference to the module on the internal
#: :mod:`pluginsource._internalspace`.
self.mod = _PluginSourceModule(self)
if hasattr(_internalspace, self.spaceid):
raise RuntimeError('This plugin source already exists.')
sys.modules[self.mod.__name__] = self.mod
setattr(_internalspace, self.spaceid, self.mod)
def __del__(self):
if not self.persist:
self.cleanup()
def list_plugins(self):
"""Returns a sorted list of all plugins that are available in this
plugin source. This can be useful to automatically discover plugins
that are available and is usually used together with
:meth:`load_plugin`.
"""
rv = []
for _, modname, ispkg in pkgutil.iter_modules(self.mod.__path__):
rv.append(modname)
return sorted(rv)
def load_plugin(self, name):
"""This automatically loads a plugin by the given name from the
current source and returns the module. This is a convenient
alternative to the import statement and saves you from invoking
``__import__`` or a similar function yourself.
:param name: the name of the plugin to load.
"""
if '.' in name:
raise ImportError('Plugin names cannot contain dots.')
#with self:
# return __import__(self.base.package + '.' + name,
# globals(), {}, ['__name__'])
self.__assert_not_cleaned_up()
_local.__dict__.setdefault('space_stack', []).append(self)
try:
res = __import__(self.base.package + '.' + name,
globals(), {}, ['__name__'])
return res
finally:
try:
_local.space_stack.pop()
except (AttributeError, IndexError):
pass
def open_resource(self, plugin, filename):
"""This function locates a resource inside the plugin and returns
a byte stream to the contents of it. If the resource cannot be
loaded an :exc:`IOError` will be raised. Only plugins that are
real Python packages can contain resources. Plain old Python
modules do not allow this for obvious reasons.
.. versionadded:: 0.3
:param plugin: the name of the plugin to open the resource of.
:param filename: the name of the file within the plugin to open.
"""
mod = self.load_plugin(plugin)
fn = getattr(mod, '__file__', None)
if fn is not None:
if fn.endswith(('.pyc', '.pyo')):
fn = fn[:-1]
if os.path.isfile(fn):
return open(os.path.join(os.path.dirname(fn), filename), 'rb')
buf = pkgutil.get_data(self.mod.__name__ + '.' + plugin, filename)
if buf is None:
raise IOError(errno.ENOEXITS, 'Could not find resource')
return NativeBytesIO(buf)
def cleanup(self):
"""Cleans up all loaded plugins manually. This is necessary to
call only if :attr:`persist` is enabled. Otherwise this happens
automatically when the source gets garbage collected.
"""
self.__cleanup()
def __cleanup(self, _sys=sys, _shutdown_module=_shutdown_module):
# The default parameters are necessary because this can be fired
# from the destructor and so late when the interpreter shuts down
# that these functions and modules might be gone.
if self.mod is None:
return
modname = self.mod.__name__
self.mod.__pluginbase_state__ = None
self.mod = None
try:
delattr(_internalspace, self.spaceid)
except AttributeError:
pass
prefix = modname + '.'
_sys.modules.pop(modname)
for key, value in list(_sys.modules.items()):
if not key.startswith(prefix):
continue
mod = _sys.modules.pop(key, None)
if mod is None:
continue
_shutdown_module(mod)
def __assert_not_cleaned_up(self):
if self.mod is None:
raise RuntimeError('The plugin source was already cleaned up.')
def __enter__(self):
self.__assert_not_cleaned_up()
_local.__dict__.setdefault('space_stack', []).append(self)
return self
def __exit__(self, exc_type, exc_value, tb):
try:
_local.space_stack.pop()
except (AttributeError, IndexError):
pass
def _rewrite_module_path(self, modname):
self.__assert_not_cleaned_up()
if modname == self.base.package:
return self.mod.__name__
elif modname.startswith(self.base.package + '.'):
pieces = modname.split('.')
return self.mod.__name__ + '.' + '.'.join(
pieces[self.base.package.count('.') + 1:])
class PluginBaseState(object):
__slots__ = ('_source',)
def __init__(self, source):
if source.persist:
self._source = lambda: source
else:
self._source = weakref(source)
@property
def source(self):
rv = self._source()
if rv is None:
raise AttributeError('Plugin source went away')
return rv
| apache-2.0 |
msingh172/pylearn2 | pylearn2/packaged_dependencies/theano_linear/unshared_conv/test_gpu_unshared_conv.py | 37 | 7950 | from __future__ import print_function
import unittest
from nose.plugins.skip import SkipTest
import numpy
import theano
# Skip test if cuda_ndarray is not available.
from nose.plugins.skip import SkipTest
import theano.sandbox.cuda as cuda_ndarray
if cuda_ndarray.cuda_available == False:
raise SkipTest('Optional package cuda disabled')
from theano.sandbox.cuda.var import float32_shared_constructor
from .unshared_conv import FilterActs
from .unshared_conv import WeightActs
from .unshared_conv import ImgActs
from .gpu_unshared_conv import (
GpuFilterActs,
GpuWeightActs,
GpuImgActs,
)
import test_unshared_conv
if theano.config.mode == 'FAST_COMPILE':
mode_with_gpu = theano.compile.mode.get_mode('FAST_RUN').including('gpu')
else:
mode_with_gpu = theano.compile.mode.get_default_mode().including('gpu')
class TestGpuFilterActs(test_unshared_conv.TestFilterActs):
"""
This class tests GpuWeightActs via the gradient of GpuFilterAct
The correctness of GpuFilterActs is tested in TestMatchFilterActs
"""
ishape = (1, 1, 4, 4, 2) # 2 4x4 greyscale images
fshape = (2, 2, 1, 3, 3, 1, 16) # 5 3x3 filters at each location in a 2x2 grid
module_stride = 1
dtype = 'float32'
mode = mode_with_gpu
def setUp(self):
test_unshared_conv.TestFilterActs.setUp(self)
self.gpu_op = GpuFilterActs(
module_stride=self.module_stride,
partial_sum=1)
self.s_images = float32_shared_constructor(
self.s_images.get_value())
self.s_filters = float32_shared_constructor(
self.s_filters.get_value())
def test_gpu_shape(self):
import theano.sandbox.cuda as cuda_ndarray
if cuda_ndarray.cuda_available == False:
raise SkipTest('Optional package cuda disabled')
gpuout = self.gpu_op(self.s_images, self.s_filters)
assert 'Cuda' in str(self.s_filters.type)
f = theano.function([], gpuout, mode=mode_with_gpu)
outval = f()
assert outval.shape == (
self.fshape[-2], self.fshape[-1],
self.fshape[0], self.fshape[1],
self.ishape[-1])
def test_insert_gpu_filter_acts(self):
out = self.op(self.s_images, self.s_filters)
f = self.function([], out)
try:
fgraph = f.maker.fgraph
except:
# this needs to work for older versions of theano too
fgraph = f.maker.env
assert isinstance(
fgraph.toposort()[0].op,
GpuFilterActs)
def test_gpu_op_eq(self):
assert GpuFilterActs(1, 1) == GpuFilterActs(1, 1)
assert not (GpuFilterActs(1, 1) != GpuFilterActs(1, 1))
assert (GpuFilterActs(1, 2) != GpuFilterActs(1, 1))
assert (GpuFilterActs(2, 1) != GpuFilterActs(1, 1))
assert GpuFilterActs(2, 1) != None
class TestGpuWeightActs(unittest.TestCase):
"""
"""
ishape = (1, 1, 4, 4, 2) # 2 4x4 greyscale images
hshape = (1, 16, 2, 2, 2)
fshape = (2, 2, 1, 3, 3, 1, 16) # 5 3x3 filters at each location in a 2x2 grid
frows = 3
fcols = 3
module_stride = 1
partial_sum = 1
dtype = 'float32'
def setUp(self):
self.gwa = GpuWeightActs(
module_stride=self.module_stride,
partial_sum=self.partial_sum)
self.gpu_images = float32_shared_constructor(
numpy.random.rand(*self.ishape).astype(self.dtype))
self.gpu_hidact = float32_shared_constructor(
numpy.random.rand(*self.hshape).astype(self.dtype))
def test_shape(self):
dfilters = self.gwa(self.gpu_images, self.gpu_hidact,
self.frows, self.fcols)
f = theano.function([], dfilters)
outval = f()
assert outval.shape == self.fshape
class TestGpuImgActs(unittest.TestCase):
"""
"""
ishape = (1, 1, 4, 4, 2) # 2 4x4 greyscale images
hshape = (1, 16, 2, 2, 2)
fshape = (2, 2, 1, 3, 3, 1, 16) # 5 3x3 filters at each location in a 2x2 grid
irows = 4
icols = 4
module_stride = 1
partial_sum = 1
dtype = 'float32'
def setUp(self):
self.gia = GpuImgActs(
module_stride=self.module_stride,
partial_sum=self.partial_sum)
self.gpu_images = float32_shared_constructor(
numpy.random.rand(*self.ishape).astype(self.dtype))
self.gpu_hidact = float32_shared_constructor(
numpy.random.rand(*self.hshape).astype(self.dtype))
self.gpu_filters = float32_shared_constructor(
numpy.random.rand(*self.fshape).astype(self.dtype))
def test_shape(self):
dimages = self.gia(self.gpu_filters, self.gpu_hidact,
self.irows, self.icols)
f = theano.function([], dimages)
outval = f()
assert outval.shape == self.ishape
if 1:
class TestMatchFilterActs(unittest.TestCase):
def setUp(self):
numpy.random.seed(77)
def run_match(self, images, filters, module_stride, retvals=False, partial_sum=1):
gfa = GpuFilterActs(module_stride, partial_sum)
fa = FilterActs(module_stride)
gpu_images = float32_shared_constructor(images)
gpu_filters = float32_shared_constructor(filters)
cpu_images = theano.shared(images)
cpu_filters = theano.shared(filters)
gpu_out = gfa(gpu_images, gpu_filters)
cpu_out = fa(cpu_images, cpu_filters)
f = theano.function([], [cpu_out, gpu_out])
cpuval, gpuval = f()
gpuval = numpy.asarray(gpuval)
if retvals:
return cpuval, gpuval
else:
#print 'run_match: cpu shape', cpuval.shape
#print 'run_match: gpu shape', gpuval.shape
assert cpuval.shape == gpuval.shape
assert numpy.allclose(cpuval, gpuval)
def run_match_shape(self, ishape, fshape, module_stride, dtype='float32'):
return self.run_match(
images=numpy.random.rand(*ishape).astype(dtype),
filters=numpy.random.rand(*fshape).astype(dtype),
module_stride=module_stride)
def test_small_random(self):
self.run_match_shape(
ishape = (1, 1, 4, 4, 2),
fshape = (2, 2, 1, 3, 3, 1, 16),
module_stride = 1)
def test_small_random_colors(self):
self.run_match_shape(
ishape = (1, 6, 4, 4, 2),
fshape = (2, 2, 6, 3, 3, 1, 16),
module_stride = 1)
def test_small_random_groups(self):
self.run_match_shape(
ishape = (5, 6, 4, 4, 2),
fshape = (2, 2, 6, 3, 3, 5, 16),
module_stride = 1)
def test_small_random_module_stride(self):
self.run_match_shape(
ishape = (4, 6, 5, 5, 1),
fshape = (2, 2, 6, 3, 3, 4, 16),
module_stride = 2)
def test_med_random_module_stride(self):
self.run_match_shape(
ishape = (4, 6, 32, 32, 1),
fshape = (12, 12, 6, 3, 3, 4, 16),
module_stride = 2)
def _blah_topcorner_filter1(self):
ishape = (1, 1, 4, 4, 2)
fshape = (2, 2, 1, 3, 3, 1, 16)
images = numpy.random.rand(*ishape).astype('float32')
filters = numpy.random.rand(*fshape).astype('float32')
filters *= 0
filters[0,0,0,0,0,0,0] = 1
self.run_match(images, filters, 1)
def _blah_botcorner_filter1(self):
ishape = (1, 1, 4, 4, 2)
fshape = (2, 2, 1, 3, 3, 1, 16)
images = numpy.random.rand(*ishape).astype('float32')
filters = numpy.random.rand(*fshape).astype('float32')
filters *= 0
filters[1,1,0,0,0,0,0] = 1
cpuval, gpuval = self.run_match(images, filters, 1, retvals=True)
print(images)
print(cpuval[:, :, 1, 1, :])
print(gpuval[:, :, 1, 1, :])
| bsd-3-clause |
playm2mboy/edx-platform | lms/djangoapps/lti_provider/tests/test_signature_validator.py | 139 | 3804 | """
Tests for the SignatureValidator class.
"""
import ddt
from django.test import TestCase
from django.test.client import RequestFactory
from mock import patch
from lti_provider.models import LtiConsumer
from lti_provider.signature_validator import SignatureValidator
def get_lti_consumer():
"""
Helper method for all Signature Validator tests to get an LtiConsumer object.
"""
return LtiConsumer(
consumer_name='Consumer Name',
consumer_key='Consumer Key',
consumer_secret='Consumer Secret'
)
@ddt.ddt
class ClientKeyValidatorTest(TestCase):
"""
Tests for the check_client_key method in the SignatureValidator class.
"""
def setUp(self):
super(ClientKeyValidatorTest, self).setUp()
self.lti_consumer = get_lti_consumer()
def test_valid_client_key(self):
"""
Verify that check_client_key succeeds with a valid key
"""
key = self.lti_consumer.consumer_key
self.assertTrue(SignatureValidator(self.lti_consumer).check_client_key(key))
@ddt.data(
('0123456789012345678901234567890123456789',),
('',),
(None,),
)
@ddt.unpack
def test_invalid_client_key(self, key):
"""
Verify that check_client_key fails with a disallowed key
"""
self.assertFalse(SignatureValidator(self.lti_consumer).check_client_key(key))
@ddt.ddt
class NonceValidatorTest(TestCase):
"""
Tests for the check_nonce method in the SignatureValidator class.
"""
def setUp(self):
super(NonceValidatorTest, self).setUp()
self.lti_consumer = get_lti_consumer()
def test_valid_nonce(self):
"""
Verify that check_nonce succeeds with a key of maximum length
"""
nonce = '0123456789012345678901234567890123456789012345678901234567890123'
self.assertTrue(SignatureValidator(self.lti_consumer).check_nonce(nonce))
@ddt.data(
('01234567890123456789012345678901234567890123456789012345678901234',),
('',),
(None,),
)
@ddt.unpack
def test_invalid_nonce(self, nonce):
"""
Verify that check_nonce fails with badly formatted nonce
"""
self.assertFalse(SignatureValidator(self.lti_consumer).check_nonce(nonce))
class SignatureValidatorTest(TestCase):
"""
Tests for the custom SignatureValidator class that uses the oauthlib library
to check message signatures. Note that these tests mock out the library
itself, since we assume it to be correct.
"""
def setUp(self):
super(SignatureValidatorTest, self).setUp()
self.lti_consumer = get_lti_consumer()
def test_get_existing_client_secret(self):
"""
Verify that get_client_secret returns the right value for the correct
key
"""
key = self.lti_consumer.consumer_key
secret = SignatureValidator(self.lti_consumer).get_client_secret(key, None)
self.assertEqual(secret, self.lti_consumer.consumer_secret)
@patch('oauthlib.oauth1.SignatureOnlyEndpoint.validate_request',
return_value=(True, None))
def test_verification_parameters(self, verify_mock):
"""
Verify that the signature validaton library method is called using the
correct parameters derived from the HttpRequest.
"""
body = 'oauth_signature_method=HMAC-SHA1&oauth_version=1.0'
content_type = 'application/x-www-form-urlencoded'
request = RequestFactory().post('/url', body, content_type=content_type)
headers = {'Content-Type': content_type}
SignatureValidator(self.lti_consumer).verify(request)
verify_mock.assert_called_once_with(
request.build_absolute_uri(), 'POST', body, headers)
| agpl-3.0 |
bgris/ODL_bgris | lib/python3.5/site-packages/pygments/lexers/_stan_builtins.py | 27 | 10121 | # -*- coding: utf-8 -*-
"""
pygments.lexers._stan_builtins
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This file contains the names of functions for Stan used by
``pygments.lexers.math.StanLexer. This is for Stan language version 2.8.0.
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
KEYWORDS = (
'else',
'for',
'if',
'in',
'increment_log_prob',
'integrate_ode',
'lp__',
'print',
'reject',
'return',
'while'
)
TYPES = (
'cholesky_factor_corr',
'cholesky_factor_cov',
'corr_matrix',
'cov_matrix',
'int',
'matrix',
'ordered',
'positive_ordered',
'real',
'row_vector',
'row_vectormatrix',
'simplex',
'unit_vector',
'vector',
'void')
FUNCTIONS = (
'Phi',
'Phi_approx',
'abs',
'acos',
'acosh',
'append_col',
'append_row',
'asin',
'asinh',
'atan',
'atan2',
'atanh',
'bernoulli_ccdf_log',
'bernoulli_cdf',
'bernoulli_cdf_log',
'bernoulli_log',
'bernoulli_logit_log',
'bernoulli_rng',
'bessel_first_kind',
'bessel_second_kind',
'beta_binomial_ccdf_log',
'beta_binomial_cdf',
'beta_binomial_cdf_log',
'beta_binomial_log',
'beta_binomial_rng',
'beta_ccdf_log',
'beta_cdf',
'beta_cdf_log',
'beta_log',
'beta_rng',
'binary_log_loss',
'binomial_ccdf_log',
'binomial_cdf',
'binomial_cdf_log',
'binomial_coefficient_log',
'binomial_log',
'binomial_logit_log',
'binomial_rng',
'block',
'categorical_log',
'categorical_logit_log',
'categorical_rng',
'cauchy_ccdf_log',
'cauchy_cdf',
'cauchy_cdf_log',
'cauchy_log',
'cauchy_rng',
'cbrt',
'ceil',
'chi_square_ccdf_log',
'chi_square_cdf',
'chi_square_cdf_log',
'chi_square_log',
'chi_square_rng',
'cholesky_decompose',
'col',
'cols',
'columns_dot_product',
'columns_dot_self',
'cos',
'cosh',
'crossprod',
'csr_extract_u',
'csr_extract_v',
'csr_extract_w',
'csr_matrix_times_vector',
'csr_to_dense_matrix',
'cumulative_sum',
'determinant',
'diag_matrix',
'diag_post_multiply',
'diag_pre_multiply',
'diagonal',
'digamma',
'dims',
'dirichlet_log',
'dirichlet_rng',
'distance',
'dot_product',
'dot_self',
'double_exponential_ccdf_log',
'double_exponential_cdf',
'double_exponential_cdf_log',
'double_exponential_log',
'double_exponential_rng',
'e',
'eigenvalues_sym',
'eigenvectors_sym',
'erf',
'erfc',
'exp',
'exp2',
'exp_mod_normal_ccdf_log',
'exp_mod_normal_cdf',
'exp_mod_normal_cdf_log',
'exp_mod_normal_log',
'exp_mod_normal_rng',
'expm1',
'exponential_ccdf_log',
'exponential_cdf',
'exponential_cdf_log',
'exponential_log',
'exponential_rng',
'fabs',
'falling_factorial',
'fdim',
'floor',
'fma',
'fmax',
'fmin',
'fmod',
'frechet_ccdf_log',
'frechet_cdf',
'frechet_cdf_log',
'frechet_log',
'frechet_rng',
'gamma_ccdf_log',
'gamma_cdf',
'gamma_cdf_log',
'gamma_log',
'gamma_p',
'gamma_q',
'gamma_rng',
'gaussian_dlm_obs_log',
'get_lp',
'gumbel_ccdf_log',
'gumbel_cdf',
'gumbel_cdf_log',
'gumbel_log',
'gumbel_rng',
'head',
'hypergeometric_log',
'hypergeometric_rng',
'hypot',
'if_else',
'int_step',
'inv',
'inv_chi_square_ccdf_log',
'inv_chi_square_cdf',
'inv_chi_square_cdf_log',
'inv_chi_square_log',
'inv_chi_square_rng',
'inv_cloglog',
'inv_gamma_ccdf_log',
'inv_gamma_cdf',
'inv_gamma_cdf_log',
'inv_gamma_log',
'inv_gamma_rng',
'inv_logit',
'inv_phi',
'inv_sqrt',
'inv_square',
'inv_wishart_log',
'inv_wishart_rng',
'inverse',
'inverse_spd',
'is_inf',
'is_nan',
'lbeta',
'lgamma',
'lkj_corr_cholesky_log',
'lkj_corr_cholesky_rng',
'lkj_corr_log',
'lkj_corr_rng',
'lmgamma',
'log',
'log10',
'log1m',
'log1m_exp',
'log1m_inv_logit',
'log1p',
'log1p_exp',
'log2',
'log_determinant',
'log_diff_exp',
'log_falling_factorial',
'log_inv_logit',
'log_mix',
'log_rising_factorial',
'log_softmax',
'log_sum_exp',
'logistic_ccdf_log',
'logistic_cdf',
'logistic_cdf_log',
'logistic_log',
'logistic_rng',
'logit',
'lognormal_ccdf_log',
'lognormal_cdf',
'lognormal_cdf_log',
'lognormal_log',
'lognormal_rng',
'machine_precision',
'max',
'mdivide_left_tri_low',
'mdivide_right_tri_low',
'mean',
'min',
'modified_bessel_first_kind',
'modified_bessel_second_kind',
'multi_gp_cholesky_log',
'multi_gp_log',
'multi_normal_cholesky_log',
'multi_normal_cholesky_rng',
'multi_normal_log',
'multi_normal_prec_log',
'multi_normal_rng',
'multi_student_t_log',
'multi_student_t_rng',
'multinomial_log',
'multinomial_rng',
'multiply_log',
'multiply_lower_tri_self_transpose',
'neg_binomial_2_ccdf_log',
'neg_binomial_2_cdf',
'neg_binomial_2_cdf_log',
'neg_binomial_2_log',
'neg_binomial_2_log_log',
'neg_binomial_2_log_rng',
'neg_binomial_2_rng',
'neg_binomial_ccdf_log',
'neg_binomial_cdf',
'neg_binomial_cdf_log',
'neg_binomial_log',
'neg_binomial_rng',
'negative_infinity',
'normal_ccdf_log',
'normal_cdf',
'normal_cdf_log',
'normal_log',
'normal_rng',
'not_a_number',
'num_elements',
'ordered_logistic_log',
'ordered_logistic_rng',
'owens_t',
'pareto_ccdf_log',
'pareto_cdf',
'pareto_cdf_log',
'pareto_log',
'pareto_rng',
'pareto_type_2_ccdf_log',
'pareto_type_2_cdf',
'pareto_type_2_cdf_log',
'pareto_type_2_log',
'pareto_type_2_rng',
'pi',
'poisson_ccdf_log',
'poisson_cdf',
'poisson_cdf_log',
'poisson_log',
'poisson_log_log',
'poisson_log_rng',
'poisson_rng',
'positive_infinity',
'pow',
'prod',
'qr_Q',
'qr_R',
'quad_form',
'quad_form_diag',
'quad_form_sym',
'rank',
'rayleigh_ccdf_log',
'rayleigh_cdf',
'rayleigh_cdf_log',
'rayleigh_log',
'rayleigh_rng',
'rep_array',
'rep_matrix',
'rep_row_vector',
'rep_vector',
'rising_factorial',
'round',
'row',
'rows',
'rows_dot_product',
'rows_dot_self',
'scaled_inv_chi_square_ccdf_log',
'scaled_inv_chi_square_cdf',
'scaled_inv_chi_square_cdf_log',
'scaled_inv_chi_square_log',
'scaled_inv_chi_square_rng',
'sd',
'segment',
'sin',
'singular_values',
'sinh',
'size',
'skew_normal_ccdf_log',
'skew_normal_cdf',
'skew_normal_cdf_log',
'skew_normal_log',
'skew_normal_rng',
'softmax',
'sort_asc',
'sort_desc',
'sort_indices_asc',
'sort_indices_desc',
'sqrt',
'sqrt2',
'square',
'squared_distance',
'step',
'student_t_ccdf_log',
'student_t_cdf',
'student_t_cdf_log',
'student_t_log',
'student_t_rng',
'sub_col',
'sub_row',
'sum',
'tail',
'tan',
'tanh',
'tcrossprod',
'tgamma',
'to_array_1d',
'to_array_2d',
'to_matrix',
'to_row_vector',
'to_vector',
'trace',
'trace_gen_quad_form',
'trace_quad_form',
'trigamma',
'trunc',
'uniform_ccdf_log',
'uniform_cdf',
'uniform_cdf_log',
'uniform_log',
'uniform_rng',
'variance',
'von_mises_log',
'von_mises_rng',
'weibull_ccdf_log',
'weibull_cdf',
'weibull_cdf_log',
'weibull_log',
'weibull_rng',
'wiener_log',
'wishart_log',
'wishart_rng'
)
DISTRIBUTIONS = (
'bernoulli',
'bernoulli_logit',
'beta',
'beta_binomial',
'binomial',
'binomial_logit',
'categorical',
'categorical_logit',
'cauchy',
'chi_square',
'dirichlet',
'double_exponential',
'exp_mod_normal',
'exponential',
'frechet',
'gamma',
'gaussian_dlm_obs',
'gumbel',
'hypergeometric',
'inv_chi_square',
'inv_gamma',
'inv_wishart',
'lkj_corr',
'lkj_corr_cholesky',
'logistic',
'lognormal',
'multi_gp',
'multi_gp_cholesky',
'multi_normal',
'multi_normal_cholesky',
'multi_normal_prec',
'multi_student_t',
'multinomial',
'neg_binomial',
'neg_binomial_2',
'neg_binomial_2_log',
'normal',
'ordered_logistic',
'pareto',
'pareto_type_2',
'poisson',
'poisson_log',
'rayleigh',
'scaled_inv_chi_square',
'skew_normal',
'student_t',
'uniform',
'von_mises',
'weibull',
'wiener',
'wishart'
)
RESERVED = (
'alignas',
'alignof',
'and',
'and_eq',
'asm',
'auto',
'bitand',
'bitor',
'bool',
'break',
'case',
'catch',
'char',
'char16_t',
'char32_t',
'class',
'compl',
'const',
'const_cast',
'constexpr',
'continue',
'decltype',
'default',
'delete',
'do',
'double',
'dynamic_cast',
'enum',
'explicit',
'export',
'extern',
'false',
'false',
'float',
'friend',
'fvar',
'goto',
'inline',
'int',
'long',
'mutable',
'namespace',
'new',
'noexcept',
'not',
'not_eq',
'nullptr',
'operator',
'or',
'or_eq',
'private',
'protected',
'public',
'register',
'reinterpret_cast',
'repeat',
'short',
'signed',
'sizeof',
'static',
'static_assert',
'static_cast',
'struct',
'switch',
'template',
'then',
'this',
'thread_local',
'throw',
'true',
'true',
'try',
'typedef',
'typeid',
'typename',
'union',
'unsigned',
'until',
'using',
'var',
'virtual',
'void',
'volatile',
'wchar_t',
'xor',
'xor_eq'
)
| gpl-3.0 |
pchrista/AliPhysics | PWGLF/NUCLEX/Nuclei/NucleiPbPb/macros_pp13TeV/CorrelationFraction.py | 19 | 2196 | import uproot
import numpy as np
import matplotlib
import matplotlib.pyplot as plt
import matplotlib.cm as cm
shift_list = [[1, -2], [2, -1], [3, 1], [4, 2]]
dcaxy_list = [[0, 1.0], [1, 1.4]]
dcaz_list = [[0, 0.5], [1, 0.75], [2, 1.25], [3, 1.50]]
pid_list = [[0, 3.25], [1, 3.5]]
tpc_list = [[0, 60], [1, 65], [2, 75], [3, 80]]
width_list = [[4, -2], [2, -1], [1, +1], [3, 2]]
cuts = {"shift": [shift_list, "Bin shift"], "dcaxy": [dcaxy_list, "$DCA_{xy}$ (mm)"], "dcaz": [dcaz_list, "$DCA_{z}$ (cm)"], "pid": [
pid_list, "$n\sigma_{TPC}$"], "tpc": [tpc_list, "TPC clusters"], "width": [width_list, "Bin width"]}
inFile = uproot.open("spectra.root")
normHist = inFile["nuclei_deuterons_/deuterons/9/Joined/JoinedSpectraM9"]
norm = normHist.values[:-3]
pt = [0.5 * (x + y) for x, y in zip(normHist.edges[:-4], normHist.edges[1:-3])]
colors = cm.rainbow(np.linspace(0, 1, len(norm)))
cMap = plt.get_cmap('jet')
cNorm = matplotlib.colors.Normalize(vmin=min(pt), vmax=max(pt))
scalarMap = cm.ScalarMappable(norm=cNorm, cmap=cMap)
for key, record in cuts.items():
x = np.array([])
y = np.array([])
fig,ax = plt.subplots()
for obj_list in record[0]:
obj_label=obj_list[0]
obj_val=obj_list[1]
values = inFile["nuclei_deuterons_{}{}/deuterons/9/Joined/JoinedSpectraM9".format(
key, obj_label)].values[:-3]
values = values / norm
x = np.append(x, np.array([obj_val for _ in range(0, len(values))]))
y = np.append(y, values)
plt.scatter(np.array([obj_val for _ in range(0, len(values))]),
values, color=scalarMap.to_rgba(pt), edgecolors='none')
scalarMap.set_array(pt)
fig.colorbar(scalarMap).set_label("$p_{T}$ (GeV/$c$)")
plt.ylabel("$S_{var}$ / $S_{ref}$")
plt.xlabel(record[1])
ax.tick_params(axis="y",direction="in")
ax.tick_params(axis="x",direction="in")
ax.xaxis.set_label_coords(0.9,-0.07)
ax.yaxis.set_label_coords(-0.115,0.9)
plt.text(0.5, 0.92, 'This work', ha='center', va='center', transform=ax.transAxes, fontweight='bold', fontsize=14)
print(np.corrcoef(x, y))
plt.savefig("{}.pdf".format(key))
# for name, keys in cuts:
| bsd-3-clause |
laslabs/odoo-connector-carepoint | connector_carepoint/tests/test_backend_adapter.py | 1 | 11309 | # -*- coding: utf-8 -*-
# Copyright 2015-2016 LasLabs Inc.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
import mock
from odoo.addons.connector_carepoint.unit import backend_adapter
from .common import SetUpCarepointBase
model = 'odoo.addons.connector_carepoint.unit.backend_adapter'
class TestBackendAdapter(SetUpCarepointBase):
def setUp(self):
super(TestBackendAdapter, self).setUp()
self.Model = backend_adapter.CarepointCRUDAdapter
def _init_model(self, model='carepoint.carepoint.store'):
self.model = self.env[model]
self.api_camel = self.__to_camel_case(self.model._cp_lib)
return self.Model(self.get_carepoint_helper(model))
def __to_camel_case(self, snake_case):
""" Convert the snake_case to CamelCase
:param snake_case: To convert
:type snake_case: str
:rtype: str
"""
parts = snake_case.split('_')
return "".join(x.title() for x in parts)
def test_init_new_connection(self):
""" It should initialize a new connection when none for backend """
with self.mock_api() as api:
self._init_model()
api.assert_called_once_with(
server=self.backend.server,
user=self.backend.username,
passwd=self.backend.password,
db_args={'drv': self.backend.db_driver},
)
def test_init_assigns_instance(self):
""" It should assign carepoint instance variable during init """
with self.mock_api() as api:
expect = 'expect'
api.return_value = expect
res = self._init_model()
self.assertEqual(expect, res.carepoint)
def test_get_cp_model_resets(self):
""" It should guard a reconnectable exception and clear globals """
with self.mock_api():
model = self._init_model()
model.carepoint = mock.MagicMock()
model.carepoint.__getitem__.side_effect = [
model.RECONNECT_EXCEPTIONS[0],
]
model.carepoint._init_env.side_effect = self.EndTestException
with self.assertRaises(self.EndTestException):
model.search()
def test_get_cp_model_recurse(self):
""" It should recurse when reconnectable exception is identified """
with self.mock_api():
model = self._init_model()
model.carepoint = mock.MagicMock()
model.carepoint.__getitem__.side_effect = [
model.RECONNECT_EXCEPTIONS[0],
self.EndTestException,
]
with self.assertRaises(self.EndTestException):
model.search()
def test_get_cp_model_raise(self):
""" It should guard a reconnectable exception and clear globals """
with self.mock_api():
model = self._init_model()
model.carepoint = mock.MagicMock()
model.carepoint.__getitem__.side_effect = [
model.RECONNECT_EXCEPTIONS[0],
model.RECONNECT_EXCEPTIONS[0],
]
with self.assertRaises(model.RECONNECT_EXCEPTIONS[0]):
model.search()
def test_search_gets_pks(self):
""" It should get the primary keys of the db """
with self.mock_api() as api:
expect = {
'col1': 'Test',
'col2': 1234,
}
self._init_model().search(**expect)
api().get_pks.assert_called_once_with(
api()[self.api_camel]
)
def test_search_does_search(self):
""" It should search w/ filters and PK """
with self.mock_api() as api:
expect = {
'col1': 'Test',
'col2': 1234,
}
self._init_model().search(**expect)
api().search.assert_called_once_with(
api()[self.api_camel],
expect,
[api().get_pks().__getitem__()],
)
def test_read_gets_pks(self):
""" It should get the primary keys of the db """
with self.mock_api() as api:
expect = 5
self._init_model().read(expect)
api().get_pks.assert_called_once_with(
api()[self.api_camel]
)
def test_read_searches(self):
""" It should search for ID w/ attributes """""
with self.mock_api() as api:
attr_expect = ['col1', 'col2']
pk_expect = ['pk1', 'pk2']
id_expect = '123,456'
api().get_pks.return_value = pk_expect
self._init_model().read(id_expect, attr_expect)
api().search.assert_called_once_with(
api()[self.api_camel],
dict(zip(pk_expect, id_expect.split(','))),
attr_expect,
)
def test_read_returns_first(self):
""" It should return first record result """""
with self.mock_api() as api:
res = self._init_model().read(123, ['expect', 'no_expect'])
self.assertEqual(api().search()[0], res)
def test_read_returns_all(self):
""" It should return first record result """""
with self.mock_api() as api:
res = self._init_model().read(123, ['expect', 'no_expect'], True)
self.assertEqual(api().search(), res)
def test_read_image_gets_file(self):
""" It should get proper file path from server """
with self.mock_api() as api:
expect = '/path/to/obj'
self._init_model().read_image(expect)
api().get_file.assert_called_once_with(expect)
def test_read_image_encodes_file_obj(self):
""" It should base64 encode the resulting file obj """
with self.mock_api() as api:
self._init_model().read_image('/path/to/obj')
api().get_file().read().encode.assert_called_once_with(
'base64',
)
def test_read_image_returns_encoded_file(self):
""" It should return the encoded file string """
with self.mock_api() as api:
res = self._init_model().read_image('/path/to/obj')
self.assertEqual(
api().get_file().read().encode(), res,
)
def test_write_image_sends_file(self):
""" It should send file obj to proper path on server """
with self.mock_api() as api:
expect = ['path', 'file']
self._init_model().write_image(*expect)
api().send_file.assert_called_once_with(*expect)
def test_write_image_returns_result(self):
""" It should send file obj to proper path on server """
with self.mock_api() as api:
res = self._init_model().write_image('path', 'file')
self.assertEqual(api().send_file(), res)
def test_search_read_searches(self):
""" It should search for ID w/ attributes """""
with self.mock_api() as api:
attr_expect = ['col1', 'col2']
filter_expect = {'col4': 1234, 'col8': 'test'}
self._init_model().search_read(attr_expect, **filter_expect)
api().search.assert_called_once_with(
api()[self.api_camel],
filter_expect,
attr_expect,
)
def test_search_read_returns_result(self):
""" It should return result of search """""
with self.mock_api() as api:
attr_expect = ['col1', 'col2']
filter_expect = {'col4': 1234, 'col8': 'test'}
res = self._init_model().search_read(attr_expect, **filter_expect)
self.assertEqual(api().search(), res)
def test_create_creates(self):
""" It should create w/ proper vals """
with self.mock_api() as api:
expect = {'data': 'test', 'col': 12323423}
self._init_model().create(expect)
api().create.assert_called_once_with(
api()[self.api_camel],
expect,
)
def test_create_gets_pks(self):
""" It should get primary keys of model """
with self.mock_api() as api:
model = self._init_model()
model.create({'data': 'test', 'col': 12323423})
api().get_pks.assert_called_once_with(
api()[self.api_camel],
)
def test_create_gets_sequences(self):
""" It should get next sequence for PK """
expect = mock.MagicMock()
with self.mock_api() as api:
model = self._init_model()
api().get_pks.return_value = [expect]
model.create({'data': 'test', 'col': 12323423})
api().get_next_sequence.assert_called_once_with(
expect,
)
def test_create_returns_pks(self):
""" It should return comma joined PKs of new record """
expect = ['col', 'no_exist']
with self.mock_api() as api:
model = self._init_model()
api().get_pks.return_value = expect
expect = {'data': 'test', 'col': 12323423}
res = model.create(expect)
self.assertEqual(
'%s,%s' % (
str(expect['col']),
api().get_next_sequence(),
),
res,
)
def test_delete_deletes(self):
""" It should delete w/ proper vals """
with self.mock_api() as api:
expect = 123
self._init_model().delete(expect)
api().delete.assert_called_once_with(
api()[self.api_camel],
expect,
)
def test_delete_returns_result(self):
""" It should return result of delete operation """
with self.mock_api() as api:
res = self._init_model().delete(123)
self.assertEqual(api().delete(), res)
def test_write_reads(self):
""" It should get record for id """
expect1, expect2 = 123, {'test': 'TEST'}
with self.mock_api():
model = self._init_model()
with mock.patch.object(model, 'read') as read:
model.write(expect1, expect2)
read.assert_called_once_with(
expect1,
return_all=True,
)
def test_write_updates(self):
""" It should update record w/ data """
expect1, expect2 = 123, {'test': 'TEST'}
with self.mock_api() as api:
self._init_model().write(expect1, expect2)
api().search().update.assert_called_once_with(
expect2,
)
def test_write_commits(self):
""" It should commit update to session """
expect1, expect2 = 123, {'test': 'TEST'}
with self.mock_api() as api:
self._init_model().write(expect1, expect2)
api().search().session.commit.assert_called_once_with()
def test_write_returns(self):
""" It should return record object """
expect1, expect2 = 123, {'test': 'TEST'}
with self.mock_api() as api:
res = self._init_model().write(expect1, expect2)
self.assertEqual(
api().search(),
res
)
| agpl-3.0 |
KristianOellegaard/django-health-check | health_check/contrib/migrations/backends.py | 1 | 1061 | import logging
from django.conf import settings
from django.db import DEFAULT_DB_ALIAS, DatabaseError, connections
from django.db.migrations.executor import MigrationExecutor
from health_check.backends import BaseHealthCheckBackend
from health_check.exceptions import ServiceUnavailable
logger = logging.getLogger(__name__)
class MigrationsHealthCheck(BaseHealthCheckBackend):
def get_migration_plan(self, executor):
return executor.migration_plan(executor.loader.graph.leaf_nodes())
def check_status(self):
db_alias = getattr(settings, 'HEALTHCHECK_MIGRATIONS_DB', DEFAULT_DB_ALIAS)
try:
executor = MigrationExecutor(connections[db_alias])
plan = self.get_migration_plan(executor)
if plan:
self.add_error(ServiceUnavailable("There are migrations to apply"))
except DatabaseError as e:
self.add_error(ServiceUnavailable("Database is not ready"), e)
except Exception as e:
self.add_error(ServiceUnavailable("Unexpected error"), e)
| mit |
mrquim/repository.mrquim | repo/script.module.youtube.dl/lib/youtube_dl/extractor/cbssports.py | 48 | 1054 | from __future__ import unicode_literals
from .cbs import CBSBaseIE
class CBSSportsIE(CBSBaseIE):
_VALID_URL = r'https?://(?:www\.)?cbssports\.com/video/player/[^/]+/(?P<id>\d+)'
_TESTS = [{
'url': 'http://www.cbssports.com/video/player/videos/708337219968/0/ben-simmons-the-next-lebron?-not-so-fast',
'info_dict': {
'id': '708337219968',
'ext': 'mp4',
'title': 'Ben Simmons the next LeBron? Not so fast',
'description': 'md5:854294f627921baba1f4b9a990d87197',
'timestamp': 1466293740,
'upload_date': '20160618',
'uploader': 'CBSI-NEW',
},
'params': {
# m3u8 download
'skip_download': True,
}
}]
def _extract_video_info(self, filter_query, video_id):
return self._extract_feed_info('dJ5BDC', 'VxxJg8Ymh8sE', filter_query, video_id)
def _real_extract(self, url):
video_id = self._match_id(url)
return self._extract_video_info('byId=%s' % video_id, video_id)
| gpl-2.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.