repo_name stringlengths 5 100 | path stringlengths 4 375 | copies stringclasses 991 values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15 values |
|---|---|---|---|---|---|
fengxiaoiie/volatility | volatility/plugins/overlays/windows/vista_sp1_x64_vtypes.py | 58 | 460142 | ntkrnlmp_types = {
'_PNP_DEVICE_EVENT_ENTRY' : [ 0x90, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'Argument' : [ 0x10, ['unsigned long']],
'CallerEvent' : [ 0x18, ['pointer64', ['_KEVENT']]],
'Callback' : [ 0x20, ['pointer64', ['void']]],
'Context' : [ 0x28, ['pointer64', ['void']]],
'VetoType' : [ 0x30, ['pointer64', ['Enumeration', dict(target = 'long', choices = {0: 'PNP_VetoTypeUnknown', 1: 'PNP_VetoLegacyDevice', 2: 'PNP_VetoPendingClose', 3: 'PNP_VetoWindowsApp', 4: 'PNP_VetoWindowsService', 5: 'PNP_VetoOutstandingOpen', 6: 'PNP_VetoDevice', 7: 'PNP_VetoDriver', 8: 'PNP_VetoIllegalDeviceRequest', 9: 'PNP_VetoInsufficientPower', 10: 'PNP_VetoNonDisableable', 11: 'PNP_VetoLegacyDriver', 12: 'PNP_VetoInsufficientRights'})]]],
'VetoName' : [ 0x38, ['pointer64', ['_UNICODE_STRING']]],
'Data' : [ 0x40, ['_PLUGPLAY_EVENT_BLOCK']],
} ],
'_CONFIGURATION_COMPONENT' : [ 0x28, {
'Class' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'SystemClass', 1: 'ProcessorClass', 2: 'CacheClass', 3: 'AdapterClass', 4: 'ControllerClass', 5: 'PeripheralClass', 6: 'MemoryClass', 7: 'MaximumClass'})]],
'Type' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'ArcSystem', 1: 'CentralProcessor', 2: 'FloatingPointProcessor', 3: 'PrimaryIcache', 4: 'PrimaryDcache', 5: 'SecondaryIcache', 6: 'SecondaryDcache', 7: 'SecondaryCache', 8: 'EisaAdapter', 9: 'TcAdapter', 10: 'ScsiAdapter', 11: 'DtiAdapter', 12: 'MultiFunctionAdapter', 13: 'DiskController', 14: 'TapeController', 15: 'CdromController', 16: 'WormController', 17: 'SerialController', 18: 'NetworkController', 19: 'DisplayController', 20: 'ParallelController', 21: 'PointerController', 22: 'KeyboardController', 23: 'AudioController', 24: 'OtherController', 25: 'DiskPeripheral', 26: 'FloppyDiskPeripheral', 27: 'TapePeripheral', 28: 'ModemPeripheral', 29: 'MonitorPeripheral', 30: 'PrinterPeripheral', 31: 'PointerPeripheral', 32: 'KeyboardPeripheral', 33: 'TerminalPeripheral', 34: 'OtherPeripheral', 35: 'LinePeripheral', 36: 'NetworkPeripheral', 37: 'SystemMemory', 38: 'DockingInformation', 39: 'RealModeIrqRoutingTable', 40: 'RealModePCIEnumeration', 41: 'MaximumType'})]],
'Flags' : [ 0x8, ['_DEVICE_FLAGS']],
'Version' : [ 0xc, ['unsigned short']],
'Revision' : [ 0xe, ['unsigned short']],
'Key' : [ 0x10, ['unsigned long']],
'AffinityMask' : [ 0x14, ['unsigned long']],
'ConfigurationDataLength' : [ 0x18, ['unsigned long']],
'IdentifierLength' : [ 0x1c, ['unsigned long']],
'Identifier' : [ 0x20, ['pointer64', ['unsigned char']]],
} ],
'_KTRANSACTION' : [ 0x2d8, {
'OutcomeEvent' : [ 0x0, ['_KEVENT']],
'cookie' : [ 0x18, ['unsigned long']],
'Mutex' : [ 0x20, ['_KMUTANT']],
'TreeTx' : [ 0x58, ['pointer64', ['_KTRANSACTION']]],
'GlobalNamespaceLink' : [ 0x60, ['_KTMOBJECT_NAMESPACE_LINK']],
'TmNamespaceLink' : [ 0x88, ['_KTMOBJECT_NAMESPACE_LINK']],
'UOW' : [ 0xb0, ['_GUID']],
'State' : [ 0xc0, ['Enumeration', dict(target = 'long', choices = {0: 'KTransactionUninitialized', 1: 'KTransactionActive', 2: 'KTransactionPreparing', 3: 'KTransactionPrepared', 4: 'KTransactionInDoubt', 5: 'KTransactionCommitted', 6: 'KTransactionAborted', 7: 'KTransactionDelegated', 8: 'KTransactionPrePreparing', 9: 'KTransactionForgotten', 10: 'KTransactionRecovering', 11: 'KTransactionPrePrepared'})]],
'Flags' : [ 0xc4, ['unsigned long']],
'EnlistmentHead' : [ 0xc8, ['_LIST_ENTRY']],
'EnlistmentCount' : [ 0xd8, ['unsigned long']],
'RecoverableEnlistmentCount' : [ 0xdc, ['unsigned long']],
'PrePrepareRequiredEnlistmentCount' : [ 0xe0, ['unsigned long']],
'PrepareRequiredEnlistmentCount' : [ 0xe4, ['unsigned long']],
'OutcomeRequiredEnlistmentCount' : [ 0xe8, ['unsigned long']],
'PendingResponses' : [ 0xec, ['unsigned long']],
'SuperiorEnlistment' : [ 0xf0, ['pointer64', ['_KENLISTMENT']]],
'LastLsn' : [ 0xf8, ['_CLS_LSN']],
'PromotedEntry' : [ 0x100, ['_LIST_ENTRY']],
'PromoterTransaction' : [ 0x110, ['pointer64', ['_KTRANSACTION']]],
'PromotePropagation' : [ 0x118, ['pointer64', ['void']]],
'IsolationLevel' : [ 0x120, ['unsigned long']],
'IsolationFlags' : [ 0x124, ['unsigned long']],
'Timeout' : [ 0x128, ['_LARGE_INTEGER']],
'Description' : [ 0x130, ['_UNICODE_STRING']],
'RollbackThread' : [ 0x140, ['pointer64', ['_KTHREAD']]],
'RollbackWorkItem' : [ 0x148, ['_WORK_QUEUE_ITEM']],
'RollbackDpc' : [ 0x168, ['_KDPC']],
'RollbackTimer' : [ 0x1a8, ['_KTIMER']],
'LsnOrderedEntry' : [ 0x1e8, ['_LIST_ENTRY']],
'Outcome' : [ 0x1f8, ['Enumeration', dict(target = 'long', choices = {0: 'KTxOutcomeUninitialized', 1: 'KTxOutcomeUndetermined', 2: 'KTxOutcomeCommitted', 3: 'KTxOutcomeAborted', 4: 'KTxOutcomeUnavailable'})]],
'Tm' : [ 0x200, ['pointer64', ['_KTM']]],
'CommitReservation' : [ 0x208, ['long long']],
'TransactionHistory' : [ 0x210, ['array', 10, ['_KTRANSACTION_HISTORY']]],
'TransactionHistoryCount' : [ 0x260, ['unsigned long']],
'DTCPrivateInformation' : [ 0x268, ['pointer64', ['void']]],
'DTCPrivateInformationLength' : [ 0x270, ['unsigned long']],
'DTCPrivateInformationMutex' : [ 0x278, ['_KMUTANT']],
'PromotedTxSelfHandle' : [ 0x2b0, ['pointer64', ['void']]],
'PendingPromotionCount' : [ 0x2b8, ['unsigned long']],
'PromotionCompletedEvent' : [ 0x2c0, ['_KEVENT']],
} ],
'_PRIVATE_CACHE_MAP_FLAGS' : [ 0x4, {
'DontUse' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned long')]],
'ReadAheadActive' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
'ReadAheadEnabled' : [ 0x0, ['BitField', dict(start_bit = 17, end_bit = 18, native_type='unsigned long')]],
'PagePriority' : [ 0x0, ['BitField', dict(start_bit = 18, end_bit = 21, native_type='unsigned long')]],
'Available' : [ 0x0, ['BitField', dict(start_bit = 21, end_bit = 32, native_type='unsigned long')]],
} ],
'_CM_KCB_UOW' : [ 0x60, {
'TransactionListEntry' : [ 0x0, ['_LIST_ENTRY']],
'KCBLock' : [ 0x10, ['pointer64', ['_CM_INTENT_LOCK']]],
'KeyLock' : [ 0x18, ['pointer64', ['_CM_INTENT_LOCK']]],
'KCBListEntry' : [ 0x20, ['_LIST_ENTRY']],
'KeyControlBlock' : [ 0x30, ['pointer64', ['_CM_KEY_CONTROL_BLOCK']]],
'Transaction' : [ 0x38, ['pointer64', ['_CM_TRANS']]],
'UoWState' : [ 0x40, ['unsigned long']],
'ActionType' : [ 0x44, ['Enumeration', dict(target = 'long', choices = {0: 'UoWAddThisKey', 1: 'UoWAddChildKey', 2: 'UoWDeleteThisKey', 3: 'UoWDeleteChildKey', 4: 'UoWSetValueNew', 5: 'UoWSetValueExisting', 6: 'UoWDeleteValue', 7: 'UoWSetKeyUserFlags', 8: 'UoWSetLastWriteTime', 9: 'UoWSetSecurityDescriptor', 10: 'UoWRenameSubKey', 11: 'UoWRenameOldSubKey', 12: 'UoWRenameNewSubKey', 13: 'UoWIsolation', 14: 'UoWInvalid'})]],
'StorageType' : [ 0x48, ['Enumeration', dict(target = 'long', choices = {0: 'Stable', 1: 'Volatile', 2: 'InvalidStorage'})]],
'ChildKCB' : [ 0x50, ['pointer64', ['_CM_KEY_CONTROL_BLOCK']]],
'VolatileKeyCell' : [ 0x50, ['unsigned long']],
'OldValueCell' : [ 0x50, ['unsigned long']],
'NewValueCell' : [ 0x54, ['unsigned long']],
'UserFlags' : [ 0x50, ['unsigned long']],
'LastWriteTime' : [ 0x50, ['_LARGE_INTEGER']],
'TxSecurityCell' : [ 0x50, ['unsigned long']],
'OldChildKCB' : [ 0x50, ['pointer64', ['_CM_KEY_CONTROL_BLOCK']]],
'NewChildKCB' : [ 0x58, ['pointer64', ['_CM_KEY_CONTROL_BLOCK']]],
'OtherChildKCB' : [ 0x50, ['pointer64', ['_CM_KEY_CONTROL_BLOCK']]],
'ThisVolatileKeyCell' : [ 0x58, ['unsigned long']],
} ],
'_MMPTE_TRANSITION' : [ 0x8, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'Write' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long long')]],
'Owner' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long long')]],
'WriteThrough' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long long')]],
'CacheDisable' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long long')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 10, native_type='unsigned long long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long long')]],
'Transition' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long long')]],
'PageFrameNumber' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 48, native_type='unsigned long long')]],
'Unused' : [ 0x0, ['BitField', dict(start_bit = 48, end_bit = 64, native_type='unsigned long long')]],
} ],
'_KREQUEST_PACKET' : [ 0x20, {
'CurrentPacket' : [ 0x0, ['array', 3, ['pointer64', ['void']]]],
'WorkerRoutine' : [ 0x18, ['pointer64', ['void']]],
} ],
'_flags' : [ 0x1, {
'Removable' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'Fill' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 8, native_type='unsigned char')]],
} ],
'__unnamed_202d' : [ 0x8, {
'Head' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 24, native_type='unsigned long long')]],
'Tail' : [ 0x0, ['BitField', dict(start_bit = 24, end_bit = 48, native_type='unsigned long long')]],
'ActiveThreadCount' : [ 0x0, ['BitField', dict(start_bit = 48, end_bit = 64, native_type='unsigned long long')]],
} ],
'__unnamed_202f' : [ 0x8, {
's1' : [ 0x0, ['__unnamed_202d']],
'Value' : [ 0x0, ['unsigned long long']],
} ],
'_ALPC_COMPLETION_LIST_STATE' : [ 0x8, {
'u1' : [ 0x0, ['__unnamed_202f']],
} ],
'_CM_KEY_SECURITY_CACHE' : [ 0x38, {
'Cell' : [ 0x0, ['unsigned long']],
'ConvKey' : [ 0x4, ['unsigned long']],
'List' : [ 0x8, ['_LIST_ENTRY']],
'DescriptorLength' : [ 0x18, ['unsigned long']],
'RealRefCount' : [ 0x1c, ['unsigned long']],
'Descriptor' : [ 0x20, ['_SECURITY_DESCRIPTOR_RELATIVE']],
} ],
'_CM_NAME_HASH' : [ 0x18, {
'ConvKey' : [ 0x0, ['unsigned long']],
'NextHash' : [ 0x8, ['pointer64', ['_CM_NAME_HASH']]],
'NameLength' : [ 0x10, ['unsigned short']],
'Name' : [ 0x12, ['array', 1, ['wchar']]],
} ],
'_MMSECURE_FLAGS' : [ 0x4, {
'ReadOnly' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'NoWrite' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'Spare' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 12, native_type='unsigned long')]],
} ],
'_PO_IRP_QUEUE' : [ 0x10, {
'CurrentIrp' : [ 0x0, ['pointer64', ['_IRP']]],
'PendingIrpList' : [ 0x8, ['pointer64', ['_IRP']]],
} ],
'__unnamed_2041' : [ 0x4, {
'Active' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'OnlyTryAcquireUsed' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'ReleasedOutOfOrder' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'SequenceNumber' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 32, native_type='unsigned long')]],
'Whole' : [ 0x0, ['unsigned long']],
} ],
'_VI_DEADLOCK_NODE' : [ 0xd0, {
'Parent' : [ 0x0, ['pointer64', ['_VI_DEADLOCK_NODE']]],
'ChildrenList' : [ 0x8, ['_LIST_ENTRY']],
'SiblingsList' : [ 0x18, ['_LIST_ENTRY']],
'ResourceList' : [ 0x28, ['_LIST_ENTRY']],
'FreeListEntry' : [ 0x28, ['_LIST_ENTRY']],
'Root' : [ 0x38, ['pointer64', ['_VI_DEADLOCK_RESOURCE']]],
'ThreadEntry' : [ 0x40, ['pointer64', ['_VI_DEADLOCK_THREAD']]],
'u1' : [ 0x48, ['__unnamed_2041']],
'ChildrenCount' : [ 0x4c, ['long']],
'StackTrace' : [ 0x50, ['array', 8, ['pointer64', ['void']]]],
'ParentStackTrace' : [ 0x90, ['array', 8, ['pointer64', ['void']]]],
} ],
'PROCESSOR_IDLESTATE_INFO' : [ 0x8, {
'TimeCheck' : [ 0x0, ['unsigned long']],
'DemotePercent' : [ 0x4, ['unsigned char']],
'PromotePercent' : [ 0x5, ['unsigned char']],
'Spare' : [ 0x6, ['array', 2, ['unsigned char']]],
} ],
'_KTMOBJECT_NAMESPACE' : [ 0xa8, {
'Table' : [ 0x0, ['_RTL_AVL_TABLE']],
'Mutex' : [ 0x68, ['_KMUTANT']],
'LinksOffset' : [ 0xa0, ['unsigned short']],
'GuidOffset' : [ 0xa2, ['unsigned short']],
'Expired' : [ 0xa4, ['unsigned char']],
} ],
'_LPCP_PORT_QUEUE' : [ 0x20, {
'NonPagedPortQueue' : [ 0x0, ['pointer64', ['_LPCP_NONPAGED_PORT_QUEUE']]],
'Semaphore' : [ 0x8, ['pointer64', ['_KSEMAPHORE']]],
'ReceiveHead' : [ 0x10, ['_LIST_ENTRY']],
} ],
'_CM_KEY_REFERENCE' : [ 0x10, {
'KeyCell' : [ 0x0, ['unsigned long']],
'KeyHive' : [ 0x8, ['pointer64', ['_HHIVE']]],
} ],
'SYSTEM_POWER_LEVEL' : [ 0x18, {
'Enable' : [ 0x0, ['unsigned char']],
'Spare' : [ 0x1, ['array', 3, ['unsigned char']]],
'BatteryLevel' : [ 0x4, ['unsigned long']],
'PowerPolicy' : [ 0x8, ['POWER_ACTION_POLICY']],
'MinSystemState' : [ 0x14, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
} ],
'_OBJECT_DUMP_CONTROL' : [ 0x10, {
'Stream' : [ 0x0, ['pointer64', ['void']]],
'Detail' : [ 0x8, ['unsigned long']],
} ],
'_OBJECT_SYMBOLIC_LINK' : [ 0x38, {
'CreationTime' : [ 0x0, ['_LARGE_INTEGER']],
'LinkTarget' : [ 0x8, ['_UNICODE_STRING']],
'LinkTargetRemaining' : [ 0x18, ['_UNICODE_STRING']],
'LinkTargetObject' : [ 0x28, ['pointer64', ['void']]],
'DosDeviceDriveIndex' : [ 0x30, ['unsigned long']],
} ],
'_LPCP_NONPAGED_PORT_QUEUE' : [ 0x28, {
'Semaphore' : [ 0x0, ['_KSEMAPHORE']],
'BackPointer' : [ 0x20, ['pointer64', ['_LPCP_PORT_OBJECT']]],
} ],
'_KRESOURCEMANAGER_COMPLETION_BINDING' : [ 0x28, {
'NotificationListHead' : [ 0x0, ['_LIST_ENTRY']],
'Port' : [ 0x10, ['pointer64', ['void']]],
'Key' : [ 0x18, ['unsigned long long']],
'BindingProcess' : [ 0x20, ['pointer64', ['_EPROCESS']]],
} ],
'_VF_TRACKER' : [ 0x10, {
'TrackerFlags' : [ 0x0, ['unsigned long']],
'TrackerSize' : [ 0x4, ['unsigned long']],
'TrackerIndex' : [ 0x8, ['unsigned long']],
'TraceDepth' : [ 0xc, ['unsigned long']],
} ],
'_EX_RUNDOWN_REF' : [ 0x8, {
'Count' : [ 0x0, ['unsigned long long']],
'Ptr' : [ 0x0, ['pointer64', ['void']]],
} ],
'_CALL_PERFORMANCE_DATA' : [ 0x408, {
'SpinLock' : [ 0x0, ['unsigned long long']],
'HashTable' : [ 0x8, ['array', 64, ['_LIST_ENTRY']]],
} ],
'_ARBITER_ALTERNATIVE' : [ 0x40, {
'Minimum' : [ 0x0, ['unsigned long long']],
'Maximum' : [ 0x8, ['unsigned long long']],
'Length' : [ 0x10, ['unsigned long long']],
'Alignment' : [ 0x18, ['unsigned long long']],
'Priority' : [ 0x20, ['long']],
'Flags' : [ 0x24, ['unsigned long']],
'Descriptor' : [ 0x28, ['pointer64', ['_IO_RESOURCE_DESCRIPTOR']]],
'Reserved' : [ 0x30, ['array', 3, ['unsigned long']]],
} ],
'_WHEA_PERSISTENCE_INFO' : [ 0x8, {
'Signature' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned long long')]],
'Length' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 40, native_type='unsigned long long')]],
'Identifier' : [ 0x0, ['BitField', dict(start_bit = 40, end_bit = 56, native_type='unsigned long long')]],
'Attributes' : [ 0x0, ['BitField', dict(start_bit = 56, end_bit = 58, native_type='unsigned long long')]],
'DoNotLog' : [ 0x0, ['BitField', dict(start_bit = 58, end_bit = 59, native_type='unsigned long long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 59, end_bit = 64, native_type='unsigned long long')]],
'AsULONGLONG' : [ 0x0, ['unsigned long long']],
} ],
'_MI_SECTION_IMAGE_INFORMATION' : [ 0x50, {
'ExportedImageInformation' : [ 0x0, ['_SECTION_IMAGE_INFORMATION']],
'InternalImageInformation' : [ 0x40, ['_MI_EXTRA_IMAGE_INFORMATION']],
} ],
'_HEAP_USERDATA_HEADER' : [ 0x20, {
'SFreeListEntry' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
'SubSegment' : [ 0x0, ['pointer64', ['_HEAP_SUBSEGMENT']]],
'Reserved' : [ 0x8, ['pointer64', ['void']]],
'SizeIndex' : [ 0x10, ['unsigned long long']],
'Signature' : [ 0x18, ['unsigned long long']],
} ],
'_PPM_DIA_STATS' : [ 0xc, {
'PerfLevel' : [ 0x0, ['unsigned long']],
'IdleTime' : [ 0x4, ['unsigned long']],
'TimeInterval' : [ 0x8, ['unsigned long']],
} ],
'_STRING64' : [ 0x10, {
'Length' : [ 0x0, ['unsigned short']],
'MaximumLength' : [ 0x2, ['unsigned short']],
'Buffer' : [ 0x8, ['unsigned long long']],
} ],
'_STACK_TABLE' : [ 0x8088, {
'NumStackTraces' : [ 0x0, ['unsigned short']],
'TraceCapacity' : [ 0x2, ['unsigned short']],
'StackTrace' : [ 0x8, ['array', 16, ['pointer64', ['_OBJECT_REF_TRACE']]]],
'StackTableHash' : [ 0x88, ['array', 16381, ['unsigned short']]],
} ],
'_CM_INDEX_HINT_BLOCK' : [ 0x8, {
'Count' : [ 0x0, ['unsigned long']],
'HashKey' : [ 0x4, ['array', 1, ['unsigned long']]],
} ],
'_TOKEN_CONTROL' : [ 0x28, {
'TokenId' : [ 0x0, ['_LUID']],
'AuthenticationId' : [ 0x8, ['_LUID']],
'ModifiedId' : [ 0x10, ['_LUID']],
'TokenSource' : [ 0x18, ['_TOKEN_SOURCE']],
} ],
'_DEFERRED_WRITE' : [ 0x50, {
'NodeTypeCode' : [ 0x0, ['short']],
'NodeByteSize' : [ 0x2, ['short']],
'FileObject' : [ 0x8, ['pointer64', ['_FILE_OBJECT']]],
'BytesToWrite' : [ 0x10, ['unsigned long']],
'DeferredWriteLinks' : [ 0x18, ['_LIST_ENTRY']],
'Event' : [ 0x28, ['pointer64', ['_KEVENT']]],
'PostRoutine' : [ 0x30, ['pointer64', ['void']]],
'Context1' : [ 0x38, ['pointer64', ['void']]],
'Context2' : [ 0x40, ['pointer64', ['void']]],
'LimitModifiedPages' : [ 0x48, ['unsigned char']],
} ],
'_DBGKD_ANY_CONTROL_SET' : [ 0x1c, {
'X86ControlSet' : [ 0x0, ['_X86_DBGKD_CONTROL_SET']],
'AlphaControlSet' : [ 0x0, ['unsigned long']],
'IA64ControlSet' : [ 0x0, ['_IA64_DBGKD_CONTROL_SET']],
'Amd64ControlSet' : [ 0x0, ['_AMD64_DBGKD_CONTROL_SET']],
'ArmControlSet' : [ 0x0, ['_ARM_DBGKD_CONTROL_SET']],
} ],
'_ARBITER_ORDERING_LIST' : [ 0x10, {
'Count' : [ 0x0, ['unsigned short']],
'Maximum' : [ 0x2, ['unsigned short']],
'Orderings' : [ 0x8, ['pointer64', ['_ARBITER_ORDERING']]],
} ],
'_SECTION_IMAGE_INFORMATION' : [ 0x40, {
'TransferAddress' : [ 0x0, ['pointer64', ['void']]],
'ZeroBits' : [ 0x8, ['unsigned long']],
'MaximumStackSize' : [ 0x10, ['unsigned long long']],
'CommittedStackSize' : [ 0x18, ['unsigned long long']],
'SubSystemType' : [ 0x20, ['unsigned long']],
'SubSystemMinorVersion' : [ 0x24, ['unsigned short']],
'SubSystemMajorVersion' : [ 0x26, ['unsigned short']],
'SubSystemVersion' : [ 0x24, ['unsigned long']],
'GpValue' : [ 0x28, ['unsigned long']],
'ImageCharacteristics' : [ 0x2c, ['unsigned short']],
'DllCharacteristics' : [ 0x2e, ['unsigned short']],
'Machine' : [ 0x30, ['unsigned short']],
'ImageContainsCode' : [ 0x32, ['unsigned char']],
'ImageFlags' : [ 0x33, ['unsigned char']],
'ComPlusNativeReady' : [ 0x33, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'ComPlusILOnly' : [ 0x33, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'ImageDynamicallyRelocated' : [ 0x33, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'ImageMappedFlat' : [ 0x33, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'Reserved' : [ 0x33, ['BitField', dict(start_bit = 4, end_bit = 8, native_type='unsigned char')]],
'LoaderFlags' : [ 0x34, ['unsigned long']],
'ImageFileSize' : [ 0x38, ['unsigned long']],
'CheckSum' : [ 0x3c, ['unsigned long']],
} ],
'_ARM_DBGKD_CONTROL_SET' : [ 0xc, {
'Continue' : [ 0x0, ['unsigned long']],
'CurrentSymbolStart' : [ 0x4, ['unsigned long']],
'CurrentSymbolEnd' : [ 0x8, ['unsigned long']],
} ],
'_TOKEN_AUDIT_POLICY' : [ 0x1b, {
'PerUserPolicy' : [ 0x0, ['array', 27, ['unsigned char']]],
} ],
'__unnamed_2098' : [ 0x10, {
'EndingOffset' : [ 0x0, ['pointer64', ['_LARGE_INTEGER']]],
'ResourceToRelease' : [ 0x8, ['pointer64', ['pointer64', ['_ERESOURCE']]]],
} ],
'__unnamed_209a' : [ 0x8, {
'ResourceToRelease' : [ 0x0, ['pointer64', ['_ERESOURCE']]],
} ],
'__unnamed_209e' : [ 0x8, {
'SyncType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'SyncTypeOther', 1: 'SyncTypeCreateSection'})]],
'PageProtection' : [ 0x4, ['unsigned long']],
} ],
'__unnamed_20a2' : [ 0x10, {
'NotificationType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'NotifyTypeCreate', 1: 'NotifyTypeRetired'})]],
'SafeToRecurse' : [ 0x8, ['unsigned char']],
} ],
'__unnamed_20a4' : [ 0x28, {
'Argument1' : [ 0x0, ['pointer64', ['void']]],
'Argument2' : [ 0x8, ['pointer64', ['void']]],
'Argument3' : [ 0x10, ['pointer64', ['void']]],
'Argument4' : [ 0x18, ['pointer64', ['void']]],
'Argument5' : [ 0x20, ['pointer64', ['void']]],
} ],
'_FS_FILTER_PARAMETERS' : [ 0x28, {
'AcquireForModifiedPageWriter' : [ 0x0, ['__unnamed_2098']],
'ReleaseForModifiedPageWriter' : [ 0x0, ['__unnamed_209a']],
'AcquireForSectionSynchronization' : [ 0x0, ['__unnamed_209e']],
'NotifyStreamFileObject' : [ 0x0, ['__unnamed_20a2']],
'Others' : [ 0x0, ['__unnamed_20a4']],
} ],
'_PROFILE_PARAMETER_BLOCK' : [ 0x10, {
'Status' : [ 0x0, ['unsigned short']],
'Reserved' : [ 0x2, ['unsigned short']],
'DockingState' : [ 0x4, ['unsigned short']],
'Capabilities' : [ 0x6, ['unsigned short']],
'DockID' : [ 0x8, ['unsigned long']],
'SerialNumber' : [ 0xc, ['unsigned long']],
} ],
'_COMPRESSED_DATA_INFO' : [ 0xc, {
'CompressionFormatAndEngine' : [ 0x0, ['unsigned short']],
'CompressionUnitShift' : [ 0x2, ['unsigned char']],
'ChunkShift' : [ 0x3, ['unsigned char']],
'ClusterShift' : [ 0x4, ['unsigned char']],
'Reserved' : [ 0x5, ['unsigned char']],
'NumberOfChunks' : [ 0x6, ['unsigned short']],
'CompressedChunkSizes' : [ 0x8, ['array', 1, ['unsigned long']]],
} ],
'_POP_HIBER_CONTEXT' : [ 0x178, {
'WriteToFile' : [ 0x0, ['unsigned char']],
'ReserveLoaderMemory' : [ 0x1, ['unsigned char']],
'ReserveFreeMemory' : [ 0x2, ['unsigned char']],
'VerifyOnWake' : [ 0x3, ['unsigned char']],
'Reset' : [ 0x4, ['unsigned char']],
'HiberFlags' : [ 0x5, ['unsigned char']],
'WroteHiberFile' : [ 0x6, ['unsigned char']],
'Lock' : [ 0x8, ['unsigned long long']],
'MapFrozen' : [ 0x10, ['unsigned char']],
'MemoryMap' : [ 0x18, ['_RTL_BITMAP']],
'DiscardedMemoryPages' : [ 0x28, ['_RTL_BITMAP']],
'ClonedRanges' : [ 0x38, ['_LIST_ENTRY']],
'ClonedRangeCount' : [ 0x48, ['unsigned long']],
'NextCloneRange' : [ 0x50, ['pointer64', ['_LIST_ENTRY']]],
'NextPreserve' : [ 0x58, ['unsigned long long']],
'LoaderMdl' : [ 0x60, ['pointer64', ['_MDL']]],
'AllocatedMdl' : [ 0x68, ['pointer64', ['_MDL']]],
'PagesOut' : [ 0x70, ['unsigned long long']],
'IoPages' : [ 0x78, ['pointer64', ['void']]],
'IoPagesCount' : [ 0x80, ['unsigned long']],
'CurrentMcb' : [ 0x88, ['pointer64', ['void']]],
'DumpStack' : [ 0x90, ['pointer64', ['_DUMP_STACK_CONTEXT']]],
'WakeState' : [ 0x98, ['pointer64', ['_KPROCESSOR_STATE']]],
'HiberVa' : [ 0xa0, ['unsigned long long']],
'HiberPte' : [ 0xa8, ['_LARGE_INTEGER']],
'Status' : [ 0xb0, ['long']],
'MemoryImage' : [ 0xb8, ['pointer64', ['PO_MEMORY_IMAGE']]],
'TableHead' : [ 0xc0, ['pointer64', ['_PO_MEMORY_RANGE_ARRAY']]],
'CompressionWorkspace' : [ 0xc8, ['pointer64', ['unsigned char']]],
'CompressedWriteBuffer' : [ 0xd0, ['pointer64', ['unsigned char']]],
'PerformanceStats' : [ 0xd8, ['pointer64', ['unsigned long']]],
'CompressionBlock' : [ 0xe0, ['pointer64', ['void']]],
'DmaIO' : [ 0xe8, ['pointer64', ['void']]],
'TemporaryHeap' : [ 0xf0, ['pointer64', ['void']]],
'PerfInfo' : [ 0xf8, ['_PO_HIBER_PERF']],
'BootLoaderLogMdl' : [ 0x158, ['pointer64', ['_MDL']]],
'FirmwareRuntimeInformationMdl' : [ 0x160, ['pointer64', ['_MDL']]],
'ResumeContext' : [ 0x168, ['pointer64', ['void']]],
'ResumeContextPages' : [ 0x170, ['unsigned long']],
} ],
'_OBJECT_REF_TRACE' : [ 0x80, {
'StackTrace' : [ 0x0, ['array', 16, ['pointer64', ['void']]]],
} ],
'_OBJECT_NAME_INFORMATION' : [ 0x10, {
'Name' : [ 0x0, ['_UNICODE_STRING']],
} ],
'_KDESCRIPTOR' : [ 0x10, {
'Pad' : [ 0x0, ['array', 3, ['unsigned short']]],
'Limit' : [ 0x6, ['unsigned short']],
'Base' : [ 0x8, ['pointer64', ['void']]],
} ],
'_DUMP_STACK_CONTEXT' : [ 0x110, {
'Init' : [ 0x0, ['_DUMP_INITIALIZATION_CONTEXT']],
'PartitionOffset' : [ 0xa0, ['_LARGE_INTEGER']],
'DumpPointers' : [ 0xa8, ['pointer64', ['void']]],
'PointersLength' : [ 0xb0, ['unsigned long']],
'ModulePrefix' : [ 0xb8, ['pointer64', ['unsigned short']]],
'DriverList' : [ 0xc0, ['_LIST_ENTRY']],
'InitMsg' : [ 0xd0, ['_STRING']],
'ProgMsg' : [ 0xe0, ['_STRING']],
'DoneMsg' : [ 0xf0, ['_STRING']],
'FileObject' : [ 0x100, ['pointer64', ['void']]],
'UsageType' : [ 0x108, ['Enumeration', dict(target = 'long', choices = {0: 'DeviceUsageTypeUndefined', 1: 'DeviceUsageTypePaging', 2: 'DeviceUsageTypeHibernation', 3: 'DeviceUsageTypeDumpFile'})]],
} ],
'_FILE_STANDARD_INFORMATION' : [ 0x18, {
'AllocationSize' : [ 0x0, ['_LARGE_INTEGER']],
'EndOfFile' : [ 0x8, ['_LARGE_INTEGER']],
'NumberOfLinks' : [ 0x10, ['unsigned long']],
'DeletePending' : [ 0x14, ['unsigned char']],
'Directory' : [ 0x15, ['unsigned char']],
} ],
'_POP_SHUTDOWN_BUG_CHECK' : [ 0x40, {
'ThreadHandle' : [ 0x0, ['pointer64', ['void']]],
'ThreadId' : [ 0x8, ['pointer64', ['void']]],
'ProcessId' : [ 0x10, ['pointer64', ['void']]],
'Code' : [ 0x18, ['unsigned long']],
'Parameter1' : [ 0x20, ['unsigned long long']],
'Parameter2' : [ 0x28, ['unsigned long long']],
'Parameter3' : [ 0x30, ['unsigned long long']],
'Parameter4' : [ 0x38, ['unsigned long long']],
} ],
'_MI_EXTRA_IMAGE_INFORMATION' : [ 0x10, {
'SizeOfHeaders' : [ 0x0, ['unsigned long']],
'ImageMerge' : [ 0x8, ['pointer64', ['void']]],
} ],
'_RTL_HANDLE_TABLE_ENTRY' : [ 0x8, {
'Flags' : [ 0x0, ['unsigned long']],
'NextFree' : [ 0x0, ['pointer64', ['_RTL_HANDLE_TABLE_ENTRY']]],
} ],
'_SECURITY_DESCRIPTOR_RELATIVE' : [ 0x14, {
'Revision' : [ 0x0, ['unsigned char']],
'Sbz1' : [ 0x1, ['unsigned char']],
'Control' : [ 0x2, ['unsigned short']],
'Owner' : [ 0x4, ['unsigned long']],
'Group' : [ 0x8, ['unsigned long']],
'Sacl' : [ 0xc, ['unsigned long']],
'Dacl' : [ 0x10, ['unsigned long']],
} ],
'_WHEA_GENERIC_PROCESSOR_ERROR_VALIDBITS' : [ 0x8, {
'ProcessorType' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'InstructionSet' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long long')]],
'ErrorType' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long long')]],
'Operation' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long long')]],
'Flags' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long long')]],
'Level' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long long')]],
'CPUVersion' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long long')]],
'CPUBrandString' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long long')]],
'ProcessorId' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long long')]],
'TargetAddress' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long long')]],
'RequesterId' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long long')]],
'ResponderId' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long long')]],
'InstructionPointer' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 13, end_bit = 64, native_type='unsigned long long')]],
'ValidBits' : [ 0x0, ['unsigned long long']],
} ],
'__unnamed_20ce' : [ 0x20, {
'TestAllocation' : [ 0x0, ['_ARBITER_TEST_ALLOCATION_PARAMETERS']],
'RetestAllocation' : [ 0x0, ['_ARBITER_RETEST_ALLOCATION_PARAMETERS']],
'BootAllocation' : [ 0x0, ['_ARBITER_BOOT_ALLOCATION_PARAMETERS']],
'QueryAllocatedResources' : [ 0x0, ['_ARBITER_QUERY_ALLOCATED_RESOURCES_PARAMETERS']],
'QueryConflict' : [ 0x0, ['_ARBITER_QUERY_CONFLICT_PARAMETERS']],
'QueryArbitrate' : [ 0x0, ['_ARBITER_QUERY_ARBITRATE_PARAMETERS']],
'AddReserved' : [ 0x0, ['_ARBITER_ADD_RESERVED_PARAMETERS']],
} ],
'_ARBITER_PARAMETERS' : [ 0x20, {
'Parameters' : [ 0x0, ['__unnamed_20ce']],
} ],
'__unnamed_20d2' : [ 0x8, {
'idxRecord' : [ 0x0, ['unsigned long']],
'cidContainer' : [ 0x4, ['unsigned long']],
} ],
'_CLS_LSN' : [ 0x8, {
'offset' : [ 0x0, ['__unnamed_20d2']],
'ullOffset' : [ 0x0, ['unsigned long long']],
} ],
'_NT_TIB32' : [ 0x1c, {
'ExceptionList' : [ 0x0, ['unsigned long']],
'StackBase' : [ 0x4, ['unsigned long']],
'StackLimit' : [ 0x8, ['unsigned long']],
'SubSystemTib' : [ 0xc, ['unsigned long']],
'FiberData' : [ 0x10, ['unsigned long']],
'Version' : [ 0x10, ['unsigned long']],
'ArbitraryUserPointer' : [ 0x14, ['unsigned long']],
'Self' : [ 0x18, ['unsigned long']],
} ],
'POWER_ACTION_POLICY' : [ 0xc, {
'Action' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'PowerActionNone', 1: 'PowerActionReserved', 2: 'PowerActionSleep', 3: 'PowerActionHibernate', 4: 'PowerActionShutdown', 5: 'PowerActionShutdownReset', 6: 'PowerActionShutdownOff', 7: 'PowerActionWarmEject'})]],
'Flags' : [ 0x4, ['unsigned long']],
'EventCode' : [ 0x8, ['unsigned long']],
} ],
'PO_MEMORY_IMAGE' : [ 0x140, {
'Signature' : [ 0x0, ['unsigned long']],
'ImageType' : [ 0x4, ['unsigned long']],
'CheckSum' : [ 0x8, ['unsigned long']],
'LengthSelf' : [ 0xc, ['unsigned long']],
'PageSelf' : [ 0x10, ['unsigned long long']],
'PageSize' : [ 0x18, ['unsigned long']],
'SystemTime' : [ 0x20, ['_LARGE_INTEGER']],
'InterruptTime' : [ 0x28, ['unsigned long long']],
'FeatureFlags' : [ 0x30, ['unsigned long']],
'HiberFlags' : [ 0x34, ['unsigned char']],
'spare' : [ 0x35, ['array', 3, ['unsigned char']]],
'NoHiberPtes' : [ 0x38, ['unsigned long']],
'HiberVa' : [ 0x40, ['unsigned long long']],
'HiberPte' : [ 0x48, ['_LARGE_INTEGER']],
'NoFreePages' : [ 0x50, ['unsigned long']],
'FreeMapCheck' : [ 0x54, ['unsigned long']],
'WakeCheck' : [ 0x58, ['unsigned long']],
'TotalPages' : [ 0x60, ['unsigned long long']],
'FirstTablePage' : [ 0x68, ['unsigned long long']],
'LastFilePage' : [ 0x70, ['unsigned long long']],
'PerfInfo' : [ 0x78, ['_PO_HIBER_PERF']],
'FirmwareRuntimeInformationPages' : [ 0xd8, ['unsigned long']],
'FirmwareRuntimeInformation' : [ 0xe0, ['array', 1, ['unsigned long long']]],
'NoBootLoaderLogPages' : [ 0xe8, ['unsigned long']],
'BootLoaderLogPages' : [ 0xf0, ['array', 8, ['unsigned long long']]],
'NotUsed' : [ 0x130, ['unsigned long']],
'ResumeContextCheck' : [ 0x134, ['unsigned long']],
'ResumeContextPages' : [ 0x138, ['unsigned long']],
} ],
'EX_QUEUE_WORKER_INFO' : [ 0x4, {
'QueueDisabled' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'MakeThreadsAsNecessary' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'WaitMode' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'WorkerCount' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 32, native_type='unsigned long')]],
'QueueWorkerInfo' : [ 0x0, ['long']],
} ],
'BATTERY_REPORTING_SCALE' : [ 0x8, {
'Granularity' : [ 0x0, ['unsigned long']],
'Capacity' : [ 0x4, ['unsigned long']],
} ],
'_CURDIR' : [ 0x18, {
'DosPath' : [ 0x0, ['_UNICODE_STRING']],
'Handle' : [ 0x10, ['pointer64', ['void']]],
} ],
'_PO_HIBER_PERF' : [ 0x60, {
'IoTicks' : [ 0x0, ['unsigned long long']],
'InitTicks' : [ 0x8, ['unsigned long long']],
'CopyTicks' : [ 0x10, ['unsigned long long']],
'StartCount' : [ 0x18, ['unsigned long long']],
'ElapsedTime' : [ 0x20, ['unsigned long']],
'IoTime' : [ 0x24, ['unsigned long']],
'CopyTime' : [ 0x28, ['unsigned long']],
'InitTime' : [ 0x2c, ['unsigned long']],
'PagesWritten' : [ 0x30, ['unsigned long']],
'PagesProcessed' : [ 0x34, ['unsigned long']],
'BytesCopied' : [ 0x38, ['unsigned long']],
'DumpCount' : [ 0x3c, ['unsigned long']],
'FileRuns' : [ 0x40, ['unsigned long']],
'ResumeAppStartTime' : [ 0x48, ['unsigned long long']],
'ResumeAppEndTime' : [ 0x50, ['unsigned long long']],
'HiberFileResumeTime' : [ 0x58, ['unsigned long long']],
} ],
'_DEVICE_FLAGS' : [ 0x4, {
'Failed' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'ReadOnly' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'Removable' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'ConsoleIn' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'ConsoleOut' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'Input' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'Output' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
} ],
'_RTL_BALANCED_LINKS' : [ 0x20, {
'Parent' : [ 0x0, ['pointer64', ['_RTL_BALANCED_LINKS']]],
'LeftChild' : [ 0x8, ['pointer64', ['_RTL_BALANCED_LINKS']]],
'RightChild' : [ 0x10, ['pointer64', ['_RTL_BALANCED_LINKS']]],
'Balance' : [ 0x18, ['unsigned char']],
'Reserved' : [ 0x19, ['array', 3, ['unsigned char']]],
} ],
'_MMVIEW' : [ 0x10, {
'Entry' : [ 0x0, ['unsigned long long']],
'Writable' : [ 0x8, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'ControlArea' : [ 0x8, ['pointer64', ['_CONTROL_AREA']]],
} ],
'_MM_SESSION_SPACE_FLAGS' : [ 0x4, {
'Initialized' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'DeletePending' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'PoolInitialized' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'DynamicVaInitialized' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'WsInitialized' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'PoolDestroyed' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'ObjectInitialized' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'Filler' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 32, native_type='unsigned long')]],
} ],
'_RTL_CRITICAL_SECTION_DEBUG' : [ 0x30, {
'Type' : [ 0x0, ['unsigned short']],
'CreatorBackTraceIndex' : [ 0x2, ['unsigned short']],
'CriticalSection' : [ 0x8, ['pointer64', ['_RTL_CRITICAL_SECTION']]],
'ProcessLocksList' : [ 0x10, ['_LIST_ENTRY']],
'EntryCount' : [ 0x20, ['unsigned long']],
'ContentionCount' : [ 0x24, ['unsigned long']],
'Flags' : [ 0x28, ['unsigned long']],
'CreatorBackTraceIndexHigh' : [ 0x2c, ['unsigned short']],
'SpareUSHORT' : [ 0x2e, ['unsigned short']],
} ],
'__unnamed_20f1' : [ 0x14, {
'ClassGuid' : [ 0x0, ['_GUID']],
'SymbolicLinkName' : [ 0x10, ['array', 1, ['wchar']]],
} ],
'__unnamed_20f3' : [ 0x2, {
'DeviceIds' : [ 0x0, ['array', 1, ['wchar']]],
} ],
'__unnamed_20f5' : [ 0x2, {
'DeviceId' : [ 0x0, ['array', 1, ['wchar']]],
} ],
'__unnamed_20f7' : [ 0x10, {
'NotificationStructure' : [ 0x0, ['pointer64', ['void']]],
'DeviceIds' : [ 0x8, ['array', 1, ['wchar']]],
} ],
'__unnamed_20f9' : [ 0x8, {
'Notification' : [ 0x0, ['pointer64', ['void']]],
} ],
'__unnamed_20fb' : [ 0x8, {
'NotificationCode' : [ 0x0, ['unsigned long']],
'NotificationData' : [ 0x4, ['unsigned long']],
} ],
'__unnamed_20fd' : [ 0x8, {
'VetoType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'PNP_VetoTypeUnknown', 1: 'PNP_VetoLegacyDevice', 2: 'PNP_VetoPendingClose', 3: 'PNP_VetoWindowsApp', 4: 'PNP_VetoWindowsService', 5: 'PNP_VetoOutstandingOpen', 6: 'PNP_VetoDevice', 7: 'PNP_VetoDriver', 8: 'PNP_VetoIllegalDeviceRequest', 9: 'PNP_VetoInsufficientPower', 10: 'PNP_VetoNonDisableable', 11: 'PNP_VetoLegacyDriver', 12: 'PNP_VetoInsufficientRights'})]],
'DeviceIdVetoNameBuffer' : [ 0x4, ['array', 1, ['wchar']]],
} ],
'__unnamed_20ff' : [ 0x10, {
'BlockedDriverGuid' : [ 0x0, ['_GUID']],
} ],
'__unnamed_2101' : [ 0x2, {
'ParentId' : [ 0x0, ['array', 1, ['wchar']]],
} ],
'__unnamed_2103' : [ 0x1c, {
'PowerSettingGuid' : [ 0x0, ['_GUID']],
'PowerSettingChanged' : [ 0x10, ['unsigned char']],
'DataLength' : [ 0x14, ['unsigned long']],
'Data' : [ 0x18, ['array', 1, ['unsigned char']]],
} ],
'__unnamed_2105' : [ 0x20, {
'DeviceClass' : [ 0x0, ['__unnamed_20f1']],
'TargetDevice' : [ 0x0, ['__unnamed_20f3']],
'InstallDevice' : [ 0x0, ['__unnamed_20f5']],
'CustomNotification' : [ 0x0, ['__unnamed_20f7']],
'ProfileNotification' : [ 0x0, ['__unnamed_20f9']],
'PowerNotification' : [ 0x0, ['__unnamed_20fb']],
'VetoNotification' : [ 0x0, ['__unnamed_20fd']],
'BlockedDriverNotification' : [ 0x0, ['__unnamed_20ff']],
'InvalidIDNotification' : [ 0x0, ['__unnamed_2101']],
'PowerSettingNotification' : [ 0x0, ['__unnamed_2103']],
} ],
'_PLUGPLAY_EVENT_BLOCK' : [ 0x50, {
'EventGuid' : [ 0x0, ['_GUID']],
'EventCategory' : [ 0x10, ['Enumeration', dict(target = 'long', choices = {0: 'HardwareProfileChangeEvent', 1: 'TargetDeviceChangeEvent', 2: 'DeviceClassChangeEvent', 3: 'CustomDeviceEvent', 4: 'DeviceInstallEvent', 5: 'DeviceArrivalEvent', 6: 'PowerEvent', 7: 'VetoEvent', 8: 'BlockedDriverEvent', 9: 'InvalidIDEvent', 10: 'PowerSettingChange', 11: 'MaxPlugEventCategory'})]],
'Result' : [ 0x18, ['pointer64', ['unsigned long']]],
'Flags' : [ 0x20, ['unsigned long']],
'TotalSize' : [ 0x24, ['unsigned long']],
'DeviceObject' : [ 0x28, ['pointer64', ['void']]],
'u' : [ 0x30, ['__unnamed_2105']],
} ],
'_HEADLESS_LOADER_BLOCK' : [ 0x40, {
'UsedBiosSettings' : [ 0x0, ['unsigned char']],
'DataBits' : [ 0x1, ['unsigned char']],
'StopBits' : [ 0x2, ['unsigned char']],
'Parity' : [ 0x3, ['unsigned char']],
'BaudRate' : [ 0x4, ['unsigned long']],
'PortNumber' : [ 0x8, ['unsigned long']],
'PortAddress' : [ 0x10, ['pointer64', ['unsigned char']]],
'PciDeviceId' : [ 0x18, ['unsigned short']],
'PciVendorId' : [ 0x1a, ['unsigned short']],
'PciBusNumber' : [ 0x1c, ['unsigned char']],
'PciBusSegment' : [ 0x1e, ['unsigned short']],
'PciSlotNumber' : [ 0x20, ['unsigned char']],
'PciFunctionNumber' : [ 0x21, ['unsigned char']],
'PciFlags' : [ 0x24, ['unsigned long']],
'SystemGUID' : [ 0x28, ['_GUID']],
'IsMMIODevice' : [ 0x38, ['unsigned char']],
'TerminalType' : [ 0x39, ['unsigned char']],
} ],
'_POWER_CHANNEL_SUMMARY' : [ 0x20, {
'Signature' : [ 0x0, ['unsigned long']],
'TotalCount' : [ 0x4, ['unsigned long']],
'D0Count' : [ 0x8, ['unsigned long']],
'NotifyList' : [ 0x10, ['_LIST_ENTRY']],
} ],
'_PO_MEMORY_RANGE_ARRAY' : [ 0x20, {
'Range' : [ 0x0, ['_PO_MEMORY_RANGE_ARRAY_RANGE']],
'Link' : [ 0x0, ['_PO_MEMORY_RANGE_ARRAY_LINK']],
} ],
'__unnamed_211c' : [ 0x8, {
'Signature' : [ 0x0, ['unsigned long']],
'CheckSum' : [ 0x4, ['unsigned long']],
} ],
'__unnamed_211e' : [ 0x10, {
'DiskId' : [ 0x0, ['_GUID']],
} ],
'__unnamed_2120' : [ 0x10, {
'Mbr' : [ 0x0, ['__unnamed_211c']],
'Gpt' : [ 0x0, ['__unnamed_211e']],
} ],
'_DUMP_INITIALIZATION_CONTEXT' : [ 0xa0, {
'Length' : [ 0x0, ['unsigned long']],
'Reserved' : [ 0x4, ['unsigned long']],
'MemoryBlock' : [ 0x8, ['pointer64', ['void']]],
'CommonBuffer' : [ 0x10, ['array', 2, ['pointer64', ['void']]]],
'PhysicalAddress' : [ 0x20, ['array', 2, ['_LARGE_INTEGER']]],
'StallRoutine' : [ 0x30, ['pointer64', ['void']]],
'OpenRoutine' : [ 0x38, ['pointer64', ['void']]],
'WriteRoutine' : [ 0x40, ['pointer64', ['void']]],
'FinishRoutine' : [ 0x48, ['pointer64', ['void']]],
'AdapterObject' : [ 0x50, ['pointer64', ['_ADAPTER_OBJECT']]],
'MappedRegisterBase' : [ 0x58, ['pointer64', ['void']]],
'PortConfiguration' : [ 0x60, ['pointer64', ['void']]],
'CrashDump' : [ 0x68, ['unsigned char']],
'MaximumTransferSize' : [ 0x6c, ['unsigned long']],
'CommonBufferSize' : [ 0x70, ['unsigned long']],
'TargetAddress' : [ 0x78, ['pointer64', ['void']]],
'WritePendingRoutine' : [ 0x80, ['pointer64', ['void']]],
'PartitionStyle' : [ 0x88, ['unsigned long']],
'DiskInfo' : [ 0x8c, ['__unnamed_2120']],
} ],
'_MI_SYSTEM_PTE_TYPE' : [ 0x48, {
'Bitmap' : [ 0x0, ['_RTL_BITMAP']],
'Hint' : [ 0x10, ['unsigned long']],
'BasePte' : [ 0x18, ['pointer64', ['_MMPTE']]],
'FailureCount' : [ 0x20, ['pointer64', ['unsigned long']]],
'Vm' : [ 0x28, ['pointer64', ['_MMSUPPORT']]],
'TotalSystemPtes' : [ 0x30, ['long']],
'TotalFreeSystemPtes' : [ 0x34, ['long']],
'CachedPteCount' : [ 0x38, ['long']],
'PteFailures' : [ 0x3c, ['unsigned long']],
'GlobalMutex' : [ 0x40, ['pointer64', ['_KGUARDED_MUTEX']]],
} ],
'_NETWORK_LOADER_BLOCK' : [ 0x20, {
'DHCPServerACK' : [ 0x0, ['pointer64', ['unsigned char']]],
'DHCPServerACKLength' : [ 0x8, ['unsigned long']],
'BootServerReplyPacket' : [ 0x10, ['pointer64', ['unsigned char']]],
'BootServerReplyPacketLength' : [ 0x18, ['unsigned long']],
} ],
'_CM_KEY_SECURITY' : [ 0x28, {
'Signature' : [ 0x0, ['unsigned short']],
'Reserved' : [ 0x2, ['unsigned short']],
'Flink' : [ 0x4, ['unsigned long']],
'Blink' : [ 0x8, ['unsigned long']],
'ReferenceCount' : [ 0xc, ['unsigned long']],
'DescriptorLength' : [ 0x10, ['unsigned long']],
'Descriptor' : [ 0x14, ['_SECURITY_DESCRIPTOR_RELATIVE']],
} ],
'_PO_DEVICE_NOTIFY_ORDER' : [ 0x250, {
'Locked' : [ 0x0, ['unsigned char']],
'WarmEjectPdoPointer' : [ 0x8, ['pointer64', ['pointer64', ['_DEVICE_OBJECT']]]],
'OrderLevel' : [ 0x10, ['array', 8, ['_PO_NOTIFY_ORDER_LEVEL']]],
} ],
'_IA64_DBGKD_CONTROL_SET' : [ 0x14, {
'Continue' : [ 0x0, ['unsigned long']],
'CurrentSymbolStart' : [ 0x4, ['unsigned long long']],
'CurrentSymbolEnd' : [ 0xc, ['unsigned long long']],
} ],
'_PO_MEMORY_RANGE_ARRAY_RANGE' : [ 0x20, {
'PageNo' : [ 0x0, ['unsigned long long']],
'StartPage' : [ 0x8, ['unsigned long long']],
'EndPage' : [ 0x10, ['unsigned long long']],
'CheckSum' : [ 0x18, ['unsigned long']],
} ],
'_KTSS64' : [ 0x68, {
'Reserved0' : [ 0x0, ['unsigned long']],
'Rsp0' : [ 0x4, ['unsigned long long']],
'Rsp1' : [ 0xc, ['unsigned long long']],
'Rsp2' : [ 0x14, ['unsigned long long']],
'Ist' : [ 0x1c, ['array', 8, ['unsigned long long']]],
'Reserved1' : [ 0x5c, ['unsigned long long']],
'Reserved2' : [ 0x64, ['unsigned short']],
'IoMapBase' : [ 0x66, ['unsigned short']],
} ],
'_ARBITER_CONFLICT_INFO' : [ 0x18, {
'OwningObject' : [ 0x0, ['pointer64', ['_DEVICE_OBJECT']]],
'Start' : [ 0x8, ['unsigned long long']],
'End' : [ 0x10, ['unsigned long long']],
} ],
'_PO_NOTIFY_ORDER_LEVEL' : [ 0x48, {
'DeviceCount' : [ 0x0, ['unsigned long']],
'ActiveCount' : [ 0x4, ['unsigned long']],
'WaitSleep' : [ 0x8, ['_LIST_ENTRY']],
'ReadySleep' : [ 0x18, ['_LIST_ENTRY']],
'ReadyS0' : [ 0x28, ['_LIST_ENTRY']],
'WaitS0' : [ 0x38, ['_LIST_ENTRY']],
} ],
'_VI_DEADLOCK_ADDRESS_RANGE' : [ 0x10, {
'Start' : [ 0x0, ['pointer64', ['unsigned char']]],
'End' : [ 0x8, ['pointer64', ['unsigned char']]],
} ],
'_GDI_TEB_BATCH32' : [ 0x4e0, {
'Offset' : [ 0x0, ['unsigned long']],
'HDC' : [ 0x4, ['unsigned long']],
'Buffer' : [ 0x8, ['array', 310, ['unsigned long']]],
} ],
'_PO_MEMORY_RANGE_ARRAY_LINK' : [ 0x18, {
'Next' : [ 0x0, ['pointer64', ['_PO_MEMORY_RANGE_ARRAY']]],
'NextTable' : [ 0x8, ['unsigned long long']],
'CheckSum' : [ 0x10, ['unsigned long']],
'EntryCount' : [ 0x14, ['unsigned long']],
} ],
'_KIDTENTRY64' : [ 0x10, {
'OffsetLow' : [ 0x0, ['unsigned short']],
'Selector' : [ 0x2, ['unsigned short']],
'IstIndex' : [ 0x4, ['BitField', dict(start_bit = 0, end_bit = 3, native_type='unsigned short')]],
'Reserved0' : [ 0x4, ['BitField', dict(start_bit = 3, end_bit = 8, native_type='unsigned short')]],
'Type' : [ 0x4, ['BitField', dict(start_bit = 8, end_bit = 13, native_type='unsigned short')]],
'Dpl' : [ 0x4, ['BitField', dict(start_bit = 13, end_bit = 15, native_type='unsigned short')]],
'Present' : [ 0x4, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned short')]],
'OffsetMiddle' : [ 0x6, ['unsigned short']],
'OffsetHigh' : [ 0x8, ['unsigned long']],
'Reserved1' : [ 0xc, ['unsigned long']],
'Alignment' : [ 0x0, ['unsigned long long']],
} ],
'_ETW_REPLY_QUEUE' : [ 0x48, {
'Queue' : [ 0x0, ['_KQUEUE']],
'EventsLost' : [ 0x40, ['long']],
} ],
'_ARBITER_QUERY_ALLOCATED_RESOURCES_PARAMETERS' : [ 0x8, {
'AllocatedResources' : [ 0x0, ['pointer64', ['pointer64', ['_CM_PARTIAL_RESOURCE_LIST']]]],
} ],
'_X86_DBGKD_CONTROL_SET' : [ 0x10, {
'TraceFlag' : [ 0x0, ['unsigned long']],
'Dr7' : [ 0x4, ['unsigned long']],
'CurrentSymbolStart' : [ 0x8, ['unsigned long']],
'CurrentSymbolEnd' : [ 0xc, ['unsigned long']],
} ],
'_RTL_ACTIVATION_CONTEXT_STACK_FRAME' : [ 0x18, {
'Previous' : [ 0x0, ['pointer64', ['_RTL_ACTIVATION_CONTEXT_STACK_FRAME']]],
'ActivationContext' : [ 0x8, ['pointer64', ['_ACTIVATION_CONTEXT']]],
'Flags' : [ 0x10, ['unsigned long']],
} ],
'__unnamed_2150' : [ 0x4, {
'BaseMiddle' : [ 0x0, ['unsigned char']],
'Flags1' : [ 0x1, ['unsigned char']],
'Flags2' : [ 0x2, ['unsigned char']],
'BaseHigh' : [ 0x3, ['unsigned char']],
} ],
'__unnamed_2154' : [ 0x4, {
'BaseMiddle' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned long')]],
'Type' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 13, native_type='unsigned long')]],
'Dpl' : [ 0x0, ['BitField', dict(start_bit = 13, end_bit = 15, native_type='unsigned long')]],
'Present' : [ 0x0, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
'LimitHigh' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 20, native_type='unsigned long')]],
'System' : [ 0x0, ['BitField', dict(start_bit = 20, end_bit = 21, native_type='unsigned long')]],
'LongMode' : [ 0x0, ['BitField', dict(start_bit = 21, end_bit = 22, native_type='unsigned long')]],
'DefaultBig' : [ 0x0, ['BitField', dict(start_bit = 22, end_bit = 23, native_type='unsigned long')]],
'Granularity' : [ 0x0, ['BitField', dict(start_bit = 23, end_bit = 24, native_type='unsigned long')]],
'BaseHigh' : [ 0x0, ['BitField', dict(start_bit = 24, end_bit = 32, native_type='unsigned long')]],
} ],
'_KGDTENTRY64' : [ 0x10, {
'LimitLow' : [ 0x0, ['unsigned short']],
'BaseLow' : [ 0x2, ['unsigned short']],
'Bytes' : [ 0x4, ['__unnamed_2150']],
'Bits' : [ 0x4, ['__unnamed_2154']],
'BaseUpper' : [ 0x8, ['unsigned long']],
'MustBeZero' : [ 0xc, ['unsigned long']],
'Alignment' : [ 0x0, ['unsigned long long']],
} ],
'_ARBITER_ORDERING' : [ 0x10, {
'Start' : [ 0x0, ['unsigned long long']],
'End' : [ 0x8, ['unsigned long long']],
} ],
'_RTL_AVL_TABLE' : [ 0x68, {
'BalancedRoot' : [ 0x0, ['_RTL_BALANCED_LINKS']],
'OrderedPointer' : [ 0x20, ['pointer64', ['void']]],
'WhichOrderedElement' : [ 0x28, ['unsigned long']],
'NumberGenericTableElements' : [ 0x2c, ['unsigned long']],
'DepthOfTree' : [ 0x30, ['unsigned long']],
'RestartKey' : [ 0x38, ['pointer64', ['_RTL_BALANCED_LINKS']]],
'DeleteCount' : [ 0x40, ['unsigned long']],
'CompareRoutine' : [ 0x48, ['pointer64', ['void']]],
'AllocateRoutine' : [ 0x50, ['pointer64', ['void']]],
'FreeRoutine' : [ 0x58, ['pointer64', ['void']]],
'TableContext' : [ 0x60, ['pointer64', ['void']]],
} ],
'_KTRANSACTION_HISTORY' : [ 0x8, {
'RecordType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {1: 'KTMOH_CommitTransaction_Result', 2: 'KTMOH_RollbackTransaction_Result'})]],
'Payload' : [ 0x4, ['unsigned long']],
} ],
'LIST_ENTRY64' : [ 0x10, {
'Flink' : [ 0x0, ['unsigned long long']],
'Blink' : [ 0x8, ['unsigned long long']],
} ],
'LIST_ENTRY32' : [ 0x8, {
'Flink' : [ 0x0, ['unsigned long']],
'Blink' : [ 0x4, ['unsigned long']],
} ],
'__unnamed_101f' : [ 0x8, {
'LowPart' : [ 0x0, ['unsigned long']],
'HighPart' : [ 0x4, ['unsigned long']],
} ],
'_ULARGE_INTEGER' : [ 0x8, {
'LowPart' : [ 0x0, ['unsigned long']],
'HighPart' : [ 0x4, ['unsigned long']],
'u' : [ 0x0, ['__unnamed_101f']],
'QuadPart' : [ 0x0, ['unsigned long long']],
} ],
'__unnamed_1024' : [ 0x8, {
'LowPart' : [ 0x0, ['unsigned long']],
'HighPart' : [ 0x4, ['long']],
} ],
'_LARGE_INTEGER' : [ 0x8, {
'LowPart' : [ 0x0, ['unsigned long']],
'HighPart' : [ 0x4, ['long']],
'u' : [ 0x0, ['__unnamed_1024']],
'QuadPart' : [ 0x0, ['long long']],
} ],
'__unnamed_103d' : [ 0x4, {
'LongFunction' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Private' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 32, native_type='unsigned long')]],
} ],
'__unnamed_103f' : [ 0x4, {
'Flags' : [ 0x0, ['unsigned long']],
's' : [ 0x0, ['__unnamed_103d']],
} ],
'_TP_CALLBACK_ENVIRON' : [ 0x40, {
'Version' : [ 0x0, ['unsigned long']],
'Pool' : [ 0x8, ['pointer64', ['_TP_POOL']]],
'CleanupGroup' : [ 0x10, ['pointer64', ['_TP_CLEANUP_GROUP']]],
'CleanupGroupCancelCallback' : [ 0x18, ['pointer64', ['void']]],
'RaceDll' : [ 0x20, ['pointer64', ['void']]],
'ActivationContext' : [ 0x28, ['pointer64', ['_ACTIVATION_CONTEXT']]],
'FinalizationCallback' : [ 0x30, ['pointer64', ['void']]],
'u' : [ 0x38, ['__unnamed_103f']],
} ],
'_TP_TASK_CALLBACKS' : [ 0x10, {
'ExecuteCallback' : [ 0x0, ['pointer64', ['void']]],
'Unposted' : [ 0x8, ['pointer64', ['void']]],
} ],
'_TP_TASK' : [ 0x8, {
'Callbacks' : [ 0x0, ['pointer64', ['_TP_TASK_CALLBACKS']]],
} ],
'_TP_DIRECT' : [ 0x8, {
'Callback' : [ 0x0, ['pointer64', ['void']]],
} ],
'_LIST_ENTRY' : [ 0x10, {
'Flink' : [ 0x0, ['pointer64', ['_LIST_ENTRY']]],
'Blink' : [ 0x8, ['pointer64', ['_LIST_ENTRY']]],
} ],
'_SINGLE_LIST_ENTRY' : [ 0x8, {
'Next' : [ 0x0, ['pointer64', ['_SINGLE_LIST_ENTRY']]],
} ],
'_UNICODE_STRING' : [ 0x10, {
'Length' : [ 0x0, ['unsigned short']],
'MaximumLength' : [ 0x2, ['unsigned short']],
'Buffer' : [ 0x8, ['pointer64', ['unsigned short']]],
} ],
'_STRING' : [ 0x10, {
'Length' : [ 0x0, ['unsigned short']],
'MaximumLength' : [ 0x2, ['unsigned short']],
'Buffer' : [ 0x8, ['pointer64', ['unsigned char']]],
} ],
'_RTL_BITMAP' : [ 0x10, {
'SizeOfBitMap' : [ 0x0, ['unsigned long']],
'Buffer' : [ 0x8, ['pointer64', ['unsigned long']]],
} ],
'_LUID' : [ 0x8, {
'LowPart' : [ 0x0, ['unsigned long']],
'HighPart' : [ 0x4, ['long']],
} ],
'_IMAGE_NT_HEADERS64' : [ 0x108, {
'Signature' : [ 0x0, ['unsigned long']],
'FileHeader' : [ 0x4, ['_IMAGE_FILE_HEADER']],
'OptionalHeader' : [ 0x18, ['_IMAGE_OPTIONAL_HEADER64']],
} ],
'_KPRCB' : [ 0x3b20, {
'MxCsr' : [ 0x0, ['unsigned long']],
'Number' : [ 0x4, ['unsigned short']],
'InterruptRequest' : [ 0x6, ['unsigned char']],
'IdleHalt' : [ 0x7, ['unsigned char']],
'CurrentThread' : [ 0x8, ['pointer64', ['_KTHREAD']]],
'NextThread' : [ 0x10, ['pointer64', ['_KTHREAD']]],
'IdleThread' : [ 0x18, ['pointer64', ['_KTHREAD']]],
'NestingLevel' : [ 0x20, ['unsigned char']],
'Group' : [ 0x21, ['unsigned char']],
'PrcbPad00' : [ 0x22, ['array', 6, ['unsigned char']]],
'RspBase' : [ 0x28, ['unsigned long long']],
'PrcbLock' : [ 0x30, ['unsigned long long']],
'SetMember' : [ 0x38, ['unsigned long long']],
'ProcessorState' : [ 0x40, ['_KPROCESSOR_STATE']],
'CpuType' : [ 0x5f0, ['unsigned char']],
'CpuID' : [ 0x5f1, ['unsigned char']],
'CpuStep' : [ 0x5f2, ['unsigned short']],
'CpuStepping' : [ 0x5f2, ['unsigned char']],
'CpuModel' : [ 0x5f3, ['unsigned char']],
'MHz' : [ 0x5f4, ['unsigned long']],
'HalReserved' : [ 0x5f8, ['array', 8, ['unsigned long long']]],
'MinorVersion' : [ 0x638, ['unsigned short']],
'MajorVersion' : [ 0x63a, ['unsigned short']],
'BuildType' : [ 0x63c, ['unsigned char']],
'CpuVendor' : [ 0x63d, ['unsigned char']],
'CoresPerPhysicalProcessor' : [ 0x63e, ['unsigned char']],
'LogicalProcessorsPerCore' : [ 0x63f, ['unsigned char']],
'ApicMask' : [ 0x640, ['unsigned long']],
'CFlushSize' : [ 0x644, ['unsigned long']],
'AcpiReserved' : [ 0x648, ['pointer64', ['void']]],
'InitialApicId' : [ 0x650, ['unsigned long']],
'Stride' : [ 0x654, ['unsigned long']],
'PrcbPad01' : [ 0x658, ['array', 3, ['unsigned long long']]],
'LockQueue' : [ 0x670, ['array', 49, ['_KSPIN_LOCK_QUEUE']]],
'PPLookasideList' : [ 0x980, ['array', 16, ['_PP_LOOKASIDE_LIST']]],
'PPNPagedLookasideList' : [ 0xa80, ['array', 32, ['_GENERAL_LOOKASIDE_POOL']]],
'PPPagedLookasideList' : [ 0x1680, ['array', 32, ['_GENERAL_LOOKASIDE_POOL']]],
'PacketBarrier' : [ 0x2280, ['unsigned long long']],
'DeferredReadyListHead' : [ 0x2288, ['_SINGLE_LIST_ENTRY']],
'MmPageFaultCount' : [ 0x2290, ['long']],
'MmCopyOnWriteCount' : [ 0x2294, ['long']],
'MmTransitionCount' : [ 0x2298, ['long']],
'MmDemandZeroCount' : [ 0x229c, ['long']],
'MmPageReadCount' : [ 0x22a0, ['long']],
'MmPageReadIoCount' : [ 0x22a4, ['long']],
'MmDirtyPagesWriteCount' : [ 0x22a8, ['long']],
'MmDirtyWriteIoCount' : [ 0x22ac, ['long']],
'MmMappedPagesWriteCount' : [ 0x22b0, ['long']],
'MmMappedWriteIoCount' : [ 0x22b4, ['long']],
'KeSystemCalls' : [ 0x22b8, ['unsigned long']],
'KeContextSwitches' : [ 0x22bc, ['unsigned long']],
'CcFastReadNoWait' : [ 0x22c0, ['unsigned long']],
'CcFastReadWait' : [ 0x22c4, ['unsigned long']],
'CcFastReadNotPossible' : [ 0x22c8, ['unsigned long']],
'CcCopyReadNoWait' : [ 0x22cc, ['unsigned long']],
'CcCopyReadWait' : [ 0x22d0, ['unsigned long']],
'CcCopyReadNoWaitMiss' : [ 0x22d4, ['unsigned long']],
'LookasideIrpFloat' : [ 0x22d8, ['long']],
'IoReadOperationCount' : [ 0x22dc, ['long']],
'IoWriteOperationCount' : [ 0x22e0, ['long']],
'IoOtherOperationCount' : [ 0x22e4, ['long']],
'IoReadTransferCount' : [ 0x22e8, ['_LARGE_INTEGER']],
'IoWriteTransferCount' : [ 0x22f0, ['_LARGE_INTEGER']],
'IoOtherTransferCount' : [ 0x22f8, ['_LARGE_INTEGER']],
'TargetSet' : [ 0x2300, ['unsigned long long']],
'IpiFrozen' : [ 0x2308, ['unsigned long']],
'PrcbPad3' : [ 0x230c, ['array', 116, ['unsigned char']]],
'RequestMailbox' : [ 0x2380, ['array', 64, ['_REQUEST_MAILBOX']]],
'SenderSummary' : [ 0x3380, ['unsigned long long']],
'PrcbPad4' : [ 0x3388, ['array', 120, ['unsigned char']]],
'DpcData' : [ 0x3400, ['array', 2, ['_KDPC_DATA']]],
'DpcStack' : [ 0x3440, ['pointer64', ['void']]],
'SparePtr0' : [ 0x3448, ['pointer64', ['void']]],
'MaximumDpcQueueDepth' : [ 0x3450, ['long']],
'DpcRequestRate' : [ 0x3454, ['unsigned long']],
'MinimumDpcRate' : [ 0x3458, ['unsigned long']],
'DpcInterruptRequested' : [ 0x345c, ['unsigned char']],
'DpcThreadRequested' : [ 0x345d, ['unsigned char']],
'DpcRoutineActive' : [ 0x345e, ['unsigned char']],
'DpcThreadActive' : [ 0x345f, ['unsigned char']],
'TimerHand' : [ 0x3460, ['unsigned long long']],
'TimerRequest' : [ 0x3460, ['unsigned long long']],
'TickOffset' : [ 0x3468, ['long']],
'MasterOffset' : [ 0x346c, ['long']],
'DpcLastCount' : [ 0x3470, ['unsigned long']],
'ThreadDpcEnable' : [ 0x3474, ['unsigned char']],
'QuantumEnd' : [ 0x3475, ['unsigned char']],
'PrcbPad50' : [ 0x3476, ['unsigned char']],
'IdleSchedule' : [ 0x3477, ['unsigned char']],
'DpcSetEventRequest' : [ 0x3478, ['long']],
'KeExceptionDispatchCount' : [ 0x347c, ['unsigned long']],
'DpcEvent' : [ 0x3480, ['_KEVENT']],
'PrcbPad51' : [ 0x3498, ['pointer64', ['void']]],
'CallDpc' : [ 0x34a0, ['_KDPC']],
'ClockKeepAlive' : [ 0x34e0, ['long']],
'ClockCheckSlot' : [ 0x34e4, ['unsigned char']],
'ClockPollCycle' : [ 0x34e5, ['unsigned char']],
'PrcbPad6' : [ 0x34e6, ['array', 2, ['unsigned char']]],
'DpcWatchdogPeriod' : [ 0x34e8, ['long']],
'DpcWatchdogCount' : [ 0x34ec, ['long']],
'PrcbPad70' : [ 0x34f0, ['array', 2, ['unsigned long long']]],
'WaitListHead' : [ 0x3500, ['_LIST_ENTRY']],
'WaitLock' : [ 0x3510, ['unsigned long long']],
'ReadySummary' : [ 0x3518, ['unsigned long']],
'QueueIndex' : [ 0x351c, ['unsigned long']],
'PrcbPad71' : [ 0x3520, ['array', 12, ['unsigned long long']]],
'DispatcherReadyListHead' : [ 0x3580, ['array', 32, ['_LIST_ENTRY']]],
'InterruptCount' : [ 0x3780, ['unsigned long']],
'KernelTime' : [ 0x3784, ['unsigned long']],
'UserTime' : [ 0x3788, ['unsigned long']],
'DpcTime' : [ 0x378c, ['unsigned long']],
'InterruptTime' : [ 0x3790, ['unsigned long']],
'AdjustDpcThreshold' : [ 0x3794, ['unsigned long']],
'SkipTick' : [ 0x3798, ['unsigned char']],
'DebuggerSavedIRQL' : [ 0x3799, ['unsigned char']],
'PollSlot' : [ 0x379a, ['unsigned char']],
'PrcbPad80' : [ 0x379b, ['array', 5, ['unsigned char']]],
'DpcTimeCount' : [ 0x37a0, ['unsigned long']],
'DpcTimeLimit' : [ 0x37a4, ['unsigned long']],
'PeriodicCount' : [ 0x37a8, ['unsigned long']],
'PeriodicBias' : [ 0x37ac, ['unsigned long']],
'PrcbPad81' : [ 0x37b0, ['array', 2, ['unsigned long long']]],
'ParentNode' : [ 0x37c0, ['pointer64', ['_KNODE']]],
'MultiThreadProcessorSet' : [ 0x37c8, ['unsigned long long']],
'MultiThreadSetMaster' : [ 0x37d0, ['pointer64', ['_KPRCB']]],
'StartCycles' : [ 0x37d8, ['unsigned long long']],
'MmSpinLockOrdering' : [ 0x37e0, ['long']],
'PageColor' : [ 0x37e4, ['unsigned long']],
'NodeColor' : [ 0x37e8, ['unsigned long']],
'NodeShiftedColor' : [ 0x37ec, ['unsigned long']],
'SecondaryColorMask' : [ 0x37f0, ['unsigned long']],
'Sleeping' : [ 0x37f4, ['long']],
'CycleTime' : [ 0x37f8, ['unsigned long long']],
'CcFastMdlReadNoWait' : [ 0x3800, ['unsigned long']],
'CcFastMdlReadWait' : [ 0x3804, ['unsigned long']],
'CcFastMdlReadNotPossible' : [ 0x3808, ['unsigned long']],
'CcMapDataNoWait' : [ 0x380c, ['unsigned long']],
'CcMapDataWait' : [ 0x3810, ['unsigned long']],
'CcPinMappedDataCount' : [ 0x3814, ['unsigned long']],
'CcPinReadNoWait' : [ 0x3818, ['unsigned long']],
'CcPinReadWait' : [ 0x381c, ['unsigned long']],
'CcMdlReadNoWait' : [ 0x3820, ['unsigned long']],
'CcMdlReadWait' : [ 0x3824, ['unsigned long']],
'CcLazyWriteHotSpots' : [ 0x3828, ['unsigned long']],
'CcLazyWriteIos' : [ 0x382c, ['unsigned long']],
'CcLazyWritePages' : [ 0x3830, ['unsigned long']],
'CcDataFlushes' : [ 0x3834, ['unsigned long']],
'CcDataPages' : [ 0x3838, ['unsigned long']],
'CcLostDelayedWrites' : [ 0x383c, ['unsigned long']],
'CcFastReadResourceMiss' : [ 0x3840, ['unsigned long']],
'CcCopyReadWaitMiss' : [ 0x3844, ['unsigned long']],
'CcFastMdlReadResourceMiss' : [ 0x3848, ['unsigned long']],
'CcMapDataNoWaitMiss' : [ 0x384c, ['unsigned long']],
'CcMapDataWaitMiss' : [ 0x3850, ['unsigned long']],
'CcPinReadNoWaitMiss' : [ 0x3854, ['unsigned long']],
'CcPinReadWaitMiss' : [ 0x3858, ['unsigned long']],
'CcMdlReadNoWaitMiss' : [ 0x385c, ['unsigned long']],
'CcMdlReadWaitMiss' : [ 0x3860, ['unsigned long']],
'CcReadAheadIos' : [ 0x3864, ['unsigned long']],
'MmCacheTransitionCount' : [ 0x3868, ['long']],
'MmCacheReadCount' : [ 0x386c, ['long']],
'MmCacheIoCount' : [ 0x3870, ['long']],
'PrcbPad91' : [ 0x3874, ['array', 3, ['unsigned long']]],
'PowerState' : [ 0x3880, ['_PROCESSOR_POWER_STATE']],
'KeAlignmentFixupCount' : [ 0x3998, ['unsigned long']],
'VendorString' : [ 0x399c, ['array', 13, ['unsigned char']]],
'PrcbPad10' : [ 0x39a9, ['array', 3, ['unsigned char']]],
'FeatureBits' : [ 0x39ac, ['unsigned long']],
'UpdateSignature' : [ 0x39b0, ['_LARGE_INTEGER']],
'DpcWatchdogDpc' : [ 0x39b8, ['_KDPC']],
'DpcWatchdogTimer' : [ 0x39f8, ['_KTIMER']],
'Cache' : [ 0x3a38, ['array', 5, ['_CACHE_DESCRIPTOR']]],
'CacheCount' : [ 0x3a74, ['unsigned long']],
'CachedCommit' : [ 0x3a78, ['unsigned long']],
'CachedResidentAvailable' : [ 0x3a7c, ['unsigned long']],
'HyperPte' : [ 0x3a80, ['pointer64', ['void']]],
'WheaInfo' : [ 0x3a88, ['pointer64', ['void']]],
'EtwSupport' : [ 0x3a90, ['pointer64', ['void']]],
'InterruptObjectPool' : [ 0x3aa0, ['_SLIST_HEADER']],
'HypercallPageList' : [ 0x3ab0, ['_SLIST_HEADER']],
'HypercallPageVirtual' : [ 0x3ac0, ['pointer64', ['void']]],
'VirtualApicAssist' : [ 0x3ac8, ['pointer64', ['void']]],
'StatisticsPage' : [ 0x3ad0, ['pointer64', ['unsigned long long']]],
'RateControl' : [ 0x3ad8, ['pointer64', ['void']]],
'CacheProcessorMask' : [ 0x3ae0, ['array', 5, ['unsigned long long']]],
'PackageProcessorSet' : [ 0x3b08, ['unsigned long long']],
'CoreProcessorSet' : [ 0x3b10, ['unsigned long long']],
} ],
'_KTHREAD' : [ 0x330, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
'CycleTime' : [ 0x18, ['unsigned long long']],
'QuantumTarget' : [ 0x20, ['unsigned long long']],
'InitialStack' : [ 0x28, ['pointer64', ['void']]],
'StackLimit' : [ 0x30, ['pointer64', ['void']]],
'KernelStack' : [ 0x38, ['pointer64', ['void']]],
'ThreadLock' : [ 0x40, ['unsigned long long']],
'ApcState' : [ 0x48, ['_KAPC_STATE']],
'ApcStateFill' : [ 0x48, ['array', 43, ['unsigned char']]],
'Priority' : [ 0x73, ['unsigned char']],
'NextProcessor' : [ 0x74, ['unsigned short']],
'DeferredProcessor' : [ 0x76, ['unsigned short']],
'ApcQueueLock' : [ 0x78, ['unsigned long long']],
'WaitStatus' : [ 0x80, ['long long']],
'WaitBlockList' : [ 0x88, ['pointer64', ['_KWAIT_BLOCK']]],
'GateObject' : [ 0x88, ['pointer64', ['_KGATE']]],
'KernelStackResident' : [ 0x90, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'ReadyTransition' : [ 0x90, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'ProcessReadyQueue' : [ 0x90, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'WaitNext' : [ 0x90, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'SystemAffinityActive' : [ 0x90, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'Alertable' : [ 0x90, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'GdiFlushActive' : [ 0x90, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'UserStackWalkActive' : [ 0x90, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'Reserved' : [ 0x90, ['BitField', dict(start_bit = 8, end_bit = 32, native_type='unsigned long')]],
'MiscFlags' : [ 0x90, ['long']],
'WaitReason' : [ 0x94, ['unsigned char']],
'SwapBusy' : [ 0x95, ['unsigned char']],
'Alerted' : [ 0x96, ['array', 2, ['unsigned char']]],
'WaitListEntry' : [ 0x98, ['_LIST_ENTRY']],
'SwapListEntry' : [ 0x98, ['_SINGLE_LIST_ENTRY']],
'Queue' : [ 0xa8, ['pointer64', ['_KQUEUE']]],
'Teb' : [ 0xb0, ['pointer64', ['void']]],
'Timer' : [ 0xb8, ['_KTIMER']],
'TimerFill' : [ 0xb8, ['array', 60, ['unsigned char']]],
'AutoAlignment' : [ 0xf4, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'DisableBoost' : [ 0xf4, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'EtwStackTraceApc1Inserted' : [ 0xf4, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'EtwStackTraceApc2Inserted' : [ 0xf4, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'CycleChargePending' : [ 0xf4, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'CalloutActive' : [ 0xf4, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'ApcQueueable' : [ 0xf4, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'EnableStackSwap' : [ 0xf4, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'GuiThread' : [ 0xf4, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'ReservedFlags' : [ 0xf4, ['BitField', dict(start_bit = 9, end_bit = 32, native_type='unsigned long')]],
'ThreadFlags' : [ 0xf4, ['long']],
'WaitBlock' : [ 0xf8, ['array', 4, ['_KWAIT_BLOCK']]],
'WaitBlockFill0' : [ 0xf8, ['array', 43, ['unsigned char']]],
'IdealProcessor' : [ 0x123, ['unsigned char']],
'WaitBlockFill1' : [ 0xf8, ['array', 91, ['unsigned char']]],
'PreviousMode' : [ 0x153, ['unsigned char']],
'WaitBlockFill2' : [ 0xf8, ['array', 139, ['unsigned char']]],
'ResourceIndex' : [ 0x183, ['unsigned char']],
'WaitBlockFill3' : [ 0xf8, ['array', 187, ['unsigned char']]],
'LargeStack' : [ 0x1b3, ['unsigned char']],
'WaitBlockFill4' : [ 0xf8, ['array', 44, ['unsigned char']]],
'ContextSwitches' : [ 0x124, ['unsigned long']],
'WaitBlockFill5' : [ 0xf8, ['array', 92, ['unsigned char']]],
'State' : [ 0x154, ['unsigned char']],
'NpxState' : [ 0x155, ['unsigned char']],
'WaitIrql' : [ 0x156, ['unsigned char']],
'WaitMode' : [ 0x157, ['unsigned char']],
'WaitBlockFill6' : [ 0xf8, ['array', 140, ['unsigned char']]],
'WaitTime' : [ 0x184, ['unsigned long']],
'WaitBlockFill7' : [ 0xf8, ['array', 188, ['unsigned char']]],
'KernelApcDisable' : [ 0x1b4, ['short']],
'SpecialApcDisable' : [ 0x1b6, ['short']],
'CombinedApcDisable' : [ 0x1b4, ['unsigned long']],
'QueueListEntry' : [ 0x1b8, ['_LIST_ENTRY']],
'TrapFrame' : [ 0x1c8, ['pointer64', ['_KTRAP_FRAME']]],
'FirstArgument' : [ 0x1d0, ['pointer64', ['void']]],
'CallbackStack' : [ 0x1d8, ['pointer64', ['void']]],
'CallbackDepth' : [ 0x1d8, ['unsigned long long']],
'ApcStateIndex' : [ 0x1e0, ['unsigned char']],
'BasePriority' : [ 0x1e1, ['unsigned char']],
'PriorityDecrement' : [ 0x1e2, ['unsigned char']],
'Preempted' : [ 0x1e3, ['unsigned char']],
'AdjustReason' : [ 0x1e4, ['unsigned char']],
'AdjustIncrement' : [ 0x1e5, ['unsigned char']],
'Spare01' : [ 0x1e6, ['unsigned char']],
'Saturation' : [ 0x1e7, ['unsigned char']],
'SystemCallNumber' : [ 0x1e8, ['unsigned long']],
'FreezeCount' : [ 0x1ec, ['unsigned long']],
'UserAffinity' : [ 0x1f0, ['unsigned long long']],
'Process' : [ 0x1f8, ['pointer64', ['_KPROCESS']]],
'Affinity' : [ 0x200, ['unsigned long long']],
'ApcStatePointer' : [ 0x208, ['array', 2, ['pointer64', ['_KAPC_STATE']]]],
'SavedApcState' : [ 0x218, ['_KAPC_STATE']],
'SavedApcStateFill' : [ 0x218, ['array', 43, ['unsigned char']]],
'Spare02' : [ 0x243, ['unsigned char']],
'SuspendCount' : [ 0x244, ['unsigned char']],
'UserIdealProcessor' : [ 0x245, ['unsigned char']],
'Spare03' : [ 0x246, ['unsigned char']],
'CodePatchInProgress' : [ 0x247, ['unsigned char']],
'Win32Thread' : [ 0x248, ['pointer64', ['void']]],
'StackBase' : [ 0x250, ['pointer64', ['void']]],
'SuspendApc' : [ 0x258, ['_KAPC']],
'SuspendApcFill0' : [ 0x258, ['array', 1, ['unsigned char']]],
'Spare04' : [ 0x259, ['unsigned char']],
'SuspendApcFill1' : [ 0x258, ['array', 3, ['unsigned char']]],
'QuantumReset' : [ 0x25b, ['unsigned char']],
'SuspendApcFill2' : [ 0x258, ['array', 4, ['unsigned char']]],
'KernelTime' : [ 0x25c, ['unsigned long']],
'SuspendApcFill3' : [ 0x258, ['array', 64, ['unsigned char']]],
'WaitPrcb' : [ 0x298, ['pointer64', ['_KPRCB']]],
'SuspendApcFill4' : [ 0x258, ['array', 72, ['unsigned char']]],
'LegoData' : [ 0x2a0, ['pointer64', ['void']]],
'SuspendApcFill5' : [ 0x258, ['array', 83, ['unsigned char']]],
'PowerState' : [ 0x2ab, ['unsigned char']],
'UserTime' : [ 0x2ac, ['unsigned long']],
'SuspendSemaphore' : [ 0x2b0, ['_KSEMAPHORE']],
'SuspendSemaphorefill' : [ 0x2b0, ['array', 28, ['unsigned char']]],
'SListFaultCount' : [ 0x2cc, ['unsigned long']],
'ThreadListEntry' : [ 0x2d0, ['_LIST_ENTRY']],
'MutantListHead' : [ 0x2e0, ['_LIST_ENTRY']],
'SListFaultAddress' : [ 0x2f0, ['pointer64', ['void']]],
'ReadOperationCount' : [ 0x2f8, ['long long']],
'WriteOperationCount' : [ 0x300, ['long long']],
'OtherOperationCount' : [ 0x308, ['long long']],
'ReadTransferCount' : [ 0x310, ['long long']],
'WriteTransferCount' : [ 0x318, ['long long']],
'OtherTransferCount' : [ 0x320, ['long long']],
'MdlForLockedTeb' : [ 0x328, ['pointer64', ['void']]],
} ],
'_KERNEL_STACK_CONTROL' : [ 0x250, {
'XmmSaveArea' : [ 0x0, ['_XMM_SAVE_AREA32']],
'Current' : [ 0x200, ['_KERNEL_STACK_SEGMENT']],
'Previous' : [ 0x228, ['_KERNEL_STACK_SEGMENT']],
} ],
'_FAST_MUTEX' : [ 0x38, {
'Count' : [ 0x0, ['long']],
'Owner' : [ 0x8, ['pointer64', ['_KTHREAD']]],
'Contention' : [ 0x10, ['unsigned long']],
'Gate' : [ 0x18, ['_KEVENT']],
'OldIrql' : [ 0x30, ['unsigned long']],
} ],
'__unnamed_1119' : [ 0x10, {
'Depth' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned long long')]],
'Sequence' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 25, native_type='unsigned long long')]],
'NextEntry' : [ 0x0, ['BitField', dict(start_bit = 25, end_bit = 64, native_type='unsigned long long')]],
'HeaderType' : [ 0x8, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'Init' : [ 0x8, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long long')]],
'Reserved' : [ 0x8, ['BitField', dict(start_bit = 2, end_bit = 61, native_type='unsigned long long')]],
'Region' : [ 0x8, ['BitField', dict(start_bit = 61, end_bit = 64, native_type='unsigned long long')]],
} ],
'__unnamed_111e' : [ 0x10, {
'Depth' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned long long')]],
'Sequence' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 64, native_type='unsigned long long')]],
'HeaderType' : [ 0x8, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'Init' : [ 0x8, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long long')]],
'Reserved' : [ 0x8, ['BitField', dict(start_bit = 2, end_bit = 4, native_type='unsigned long long')]],
'NextEntry' : [ 0x8, ['BitField', dict(start_bit = 4, end_bit = 64, native_type='unsigned long long')]],
} ],
'_SLIST_HEADER' : [ 0x10, {
'Alignment' : [ 0x0, ['unsigned long long']],
'Region' : [ 0x8, ['unsigned long long']],
'Header8' : [ 0x0, ['__unnamed_1119']],
'Header16' : [ 0x0, ['__unnamed_111e']],
} ],
'_SLIST_ENTRY' : [ 0x10, {
'Next' : [ 0x0, ['pointer64', ['_SLIST_ENTRY']]],
} ],
'_LOOKASIDE_LIST_EX' : [ 0x60, {
'L' : [ 0x0, ['_GENERAL_LOOKASIDE_POOL']],
} ],
'_NPAGED_LOOKASIDE_LIST' : [ 0x80, {
'L' : [ 0x0, ['_GENERAL_LOOKASIDE']],
} ],
'_PAGED_LOOKASIDE_LIST' : [ 0x80, {
'L' : [ 0x0, ['_GENERAL_LOOKASIDE']],
} ],
'_QUAD' : [ 0x8, {
'UseThisFieldToCopy' : [ 0x0, ['long long']],
'DoNotUseThisField' : [ 0x0, ['double']],
} ],
'_IO_STATUS_BLOCK' : [ 0x10, {
'Status' : [ 0x0, ['long']],
'Pointer' : [ 0x0, ['pointer64', ['void']]],
'Information' : [ 0x8, ['unsigned long long']],
} ],
'_EX_PUSH_LOCK' : [ 0x8, {
'Locked' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'Waiting' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long long')]],
'Waking' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long long')]],
'MultipleShared' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long long')]],
'Shared' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 64, native_type='unsigned long long')]],
'Value' : [ 0x0, ['unsigned long long']],
'Ptr' : [ 0x0, ['pointer64', ['void']]],
} ],
'_EX_PUSH_LOCK_CACHE_AWARE' : [ 0x100, {
'Locks' : [ 0x0, ['array', 32, ['pointer64', ['_EX_PUSH_LOCK']]]],
} ],
'_GENERAL_LOOKASIDE' : [ 0x80, {
'ListHead' : [ 0x0, ['_SLIST_HEADER']],
'SingleListHead' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
'Depth' : [ 0x10, ['unsigned short']],
'MaximumDepth' : [ 0x12, ['unsigned short']],
'TotalAllocates' : [ 0x14, ['unsigned long']],
'AllocateMisses' : [ 0x18, ['unsigned long']],
'AllocateHits' : [ 0x18, ['unsigned long']],
'TotalFrees' : [ 0x1c, ['unsigned long']],
'FreeMisses' : [ 0x20, ['unsigned long']],
'FreeHits' : [ 0x20, ['unsigned long']],
'Type' : [ 0x24, ['Enumeration', dict(target = 'long', choices = {0: 'NonPagedPool', 1: 'PagedPool', 2: 'NonPagedPoolMustSucceed', 3: 'DontUseThisType', 4: 'NonPagedPoolCacheAligned', 5: 'PagedPoolCacheAligned', 6: 'NonPagedPoolCacheAlignedMustS', 7: 'MaxPoolType', 34: 'NonPagedPoolMustSucceedSession', 35: 'DontUseThisTypeSession', 32: 'NonPagedPoolSession', 36: 'NonPagedPoolCacheAlignedSession', 33: 'PagedPoolSession', 38: 'NonPagedPoolCacheAlignedMustSSession', 37: 'PagedPoolCacheAlignedSession'})]],
'Tag' : [ 0x28, ['unsigned long']],
'Size' : [ 0x2c, ['unsigned long']],
'AllocateEx' : [ 0x30, ['pointer64', ['void']]],
'Allocate' : [ 0x30, ['pointer64', ['void']]],
'FreeEx' : [ 0x38, ['pointer64', ['void']]],
'Free' : [ 0x38, ['pointer64', ['void']]],
'ListEntry' : [ 0x40, ['_LIST_ENTRY']],
'LastTotalAllocates' : [ 0x50, ['unsigned long']],
'LastAllocateMisses' : [ 0x54, ['unsigned long']],
'LastAllocateHits' : [ 0x54, ['unsigned long']],
'Future' : [ 0x58, ['array', 2, ['unsigned long']]],
} ],
'_EX_FAST_REF' : [ 0x8, {
'Object' : [ 0x0, ['pointer64', ['void']]],
'RefCnt' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 4, native_type='unsigned long long')]],
'Value' : [ 0x0, ['unsigned long long']],
} ],
'_EX_PUSH_LOCK_WAIT_BLOCK' : [ 0x40, {
'WakeGate' : [ 0x0, ['_KGATE']],
'WakeEvent' : [ 0x0, ['_KEVENT']],
'Next' : [ 0x18, ['pointer64', ['_EX_PUSH_LOCK_WAIT_BLOCK']]],
'Last' : [ 0x20, ['pointer64', ['_EX_PUSH_LOCK_WAIT_BLOCK']]],
'Previous' : [ 0x28, ['pointer64', ['_EX_PUSH_LOCK_WAIT_BLOCK']]],
'ShareCount' : [ 0x30, ['long']],
'Flags' : [ 0x34, ['long']],
} ],
'_ETHREAD' : [ 0x450, {
'Tcb' : [ 0x0, ['_KTHREAD']],
'CreateTime' : [ 0x330, ['_LARGE_INTEGER']],
'ExitTime' : [ 0x338, ['_LARGE_INTEGER']],
'KeyedWaitChain' : [ 0x338, ['_LIST_ENTRY']],
'ExitStatus' : [ 0x348, ['long']],
'OfsChain' : [ 0x348, ['pointer64', ['void']]],
'PostBlockList' : [ 0x350, ['_LIST_ENTRY']],
'ForwardLinkShadow' : [ 0x350, ['pointer64', ['void']]],
'StartAddress' : [ 0x358, ['pointer64', ['void']]],
'TerminationPort' : [ 0x360, ['pointer64', ['_TERMINATION_PORT']]],
'ReaperLink' : [ 0x360, ['pointer64', ['_ETHREAD']]],
'KeyedWaitValue' : [ 0x360, ['pointer64', ['void']]],
'Win32StartParameter' : [ 0x360, ['pointer64', ['void']]],
'ActiveTimerListLock' : [ 0x368, ['unsigned long long']],
'ActiveTimerListHead' : [ 0x370, ['_LIST_ENTRY']],
'Cid' : [ 0x380, ['_CLIENT_ID']],
'KeyedWaitSemaphore' : [ 0x390, ['_KSEMAPHORE']],
'AlpcWaitSemaphore' : [ 0x390, ['_KSEMAPHORE']],
'ClientSecurity' : [ 0x3b0, ['_PS_CLIENT_SECURITY_CONTEXT']],
'IrpList' : [ 0x3b8, ['_LIST_ENTRY']],
'TopLevelIrp' : [ 0x3c8, ['unsigned long long']],
'DeviceToVerify' : [ 0x3d0, ['pointer64', ['_DEVICE_OBJECT']]],
'RateControlApc' : [ 0x3d8, ['pointer64', ['_PSP_RATE_APC']]],
'Win32StartAddress' : [ 0x3e0, ['pointer64', ['void']]],
'SparePtr0' : [ 0x3e8, ['pointer64', ['void']]],
'ThreadListEntry' : [ 0x3f0, ['_LIST_ENTRY']],
'RundownProtect' : [ 0x400, ['_EX_RUNDOWN_REF']],
'ThreadLock' : [ 0x408, ['_EX_PUSH_LOCK']],
'ReadClusterSize' : [ 0x410, ['unsigned long']],
'MmLockOrdering' : [ 0x414, ['long']],
'CrossThreadFlags' : [ 0x418, ['unsigned long']],
'Terminated' : [ 0x418, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'ThreadInserted' : [ 0x418, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'HideFromDebugger' : [ 0x418, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'ActiveImpersonationInfo' : [ 0x418, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'SystemThread' : [ 0x418, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'HardErrorsAreDisabled' : [ 0x418, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'BreakOnTermination' : [ 0x418, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'SkipCreationMsg' : [ 0x418, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'SkipTerminationMsg' : [ 0x418, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'CopyTokenOnOpen' : [ 0x418, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'ThreadIoPriority' : [ 0x418, ['BitField', dict(start_bit = 10, end_bit = 13, native_type='unsigned long')]],
'ThreadPagePriority' : [ 0x418, ['BitField', dict(start_bit = 13, end_bit = 16, native_type='unsigned long')]],
'RundownFail' : [ 0x418, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
'SameThreadPassiveFlags' : [ 0x41c, ['unsigned long']],
'ActiveExWorker' : [ 0x41c, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'ExWorkerCanWaitUser' : [ 0x41c, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'MemoryMaker' : [ 0x41c, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'ClonedThread' : [ 0x41c, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'KeyedEventInUse' : [ 0x41c, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'RateApcState' : [ 0x41c, ['BitField', dict(start_bit = 5, end_bit = 7, native_type='unsigned long')]],
'SelfTerminate' : [ 0x41c, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'SameThreadApcFlags' : [ 0x420, ['unsigned long']],
'Spare' : [ 0x420, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'StartAddressInvalid' : [ 0x420, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'EtwPageFaultCalloutActive' : [ 0x420, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'OwnsProcessWorkingSetExclusive' : [ 0x420, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'OwnsProcessWorkingSetShared' : [ 0x420, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'OwnsSystemWorkingSetExclusive' : [ 0x420, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'OwnsSystemWorkingSetShared' : [ 0x420, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned char')]],
'OwnsSessionWorkingSetExclusive' : [ 0x420, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned char')]],
'OwnsSessionWorkingSetShared' : [ 0x421, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'OwnsProcessAddressSpaceExclusive' : [ 0x421, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'OwnsProcessAddressSpaceShared' : [ 0x421, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'SuppressSymbolLoad' : [ 0x421, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'Prefetching' : [ 0x421, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'OwnsDynamicMemoryShared' : [ 0x421, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'OwnsChangeControlAreaExclusive' : [ 0x421, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned char')]],
'OwnsChangeControlAreaShared' : [ 0x421, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned char')]],
'Spare1' : [ 0x422, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned char')]],
'PriorityRegionActive' : [ 0x423, ['unsigned char']],
'CacheManagerActive' : [ 0x424, ['unsigned char']],
'DisablePageFaultClustering' : [ 0x425, ['unsigned char']],
'ActiveFaultCount' : [ 0x426, ['unsigned char']],
'AlpcMessageId' : [ 0x428, ['unsigned long long']],
'AlpcMessage' : [ 0x430, ['pointer64', ['void']]],
'AlpcReceiveAttributeSet' : [ 0x430, ['unsigned long']],
'AlpcWaitListEntry' : [ 0x438, ['_LIST_ENTRY']],
'CacheManagerCount' : [ 0x448, ['unsigned long']],
} ],
'_EPROCESS' : [ 0x3e8, {
'Pcb' : [ 0x0, ['_KPROCESS']],
'ProcessLock' : [ 0xc0, ['_EX_PUSH_LOCK']],
'CreateTime' : [ 0xc8, ['_LARGE_INTEGER']],
'ExitTime' : [ 0xd0, ['_LARGE_INTEGER']],
'RundownProtect' : [ 0xd8, ['_EX_RUNDOWN_REF']],
'UniqueProcessId' : [ 0xe0, ['pointer64', ['void']]],
'ActiveProcessLinks' : [ 0xe8, ['_LIST_ENTRY']],
'QuotaUsage' : [ 0xf8, ['array', 3, ['unsigned long long']]],
'QuotaPeak' : [ 0x110, ['array', 3, ['unsigned long long']]],
'CommitCharge' : [ 0x128, ['unsigned long long']],
'PeakVirtualSize' : [ 0x130, ['unsigned long long']],
'VirtualSize' : [ 0x138, ['unsigned long long']],
'SessionProcessLinks' : [ 0x140, ['_LIST_ENTRY']],
'DebugPort' : [ 0x150, ['pointer64', ['void']]],
'ExceptionPortData' : [ 0x158, ['pointer64', ['void']]],
'ExceptionPortValue' : [ 0x158, ['unsigned long long']],
'ExceptionPortState' : [ 0x158, ['BitField', dict(start_bit = 0, end_bit = 3, native_type='unsigned long long')]],
'ObjectTable' : [ 0x160, ['pointer64', ['_HANDLE_TABLE']]],
'Token' : [ 0x168, ['_EX_FAST_REF']],
'WorkingSetPage' : [ 0x170, ['unsigned long long']],
'AddressCreationLock' : [ 0x178, ['_EX_PUSH_LOCK']],
'RotateInProgress' : [ 0x180, ['pointer64', ['_ETHREAD']]],
'ForkInProgress' : [ 0x188, ['pointer64', ['_ETHREAD']]],
'HardwareTrigger' : [ 0x190, ['unsigned long long']],
'PhysicalVadRoot' : [ 0x198, ['pointer64', ['_MM_AVL_TABLE']]],
'CloneRoot' : [ 0x1a0, ['pointer64', ['void']]],
'NumberOfPrivatePages' : [ 0x1a8, ['unsigned long long']],
'NumberOfLockedPages' : [ 0x1b0, ['unsigned long long']],
'Win32Process' : [ 0x1b8, ['pointer64', ['void']]],
'Job' : [ 0x1c0, ['pointer64', ['_EJOB']]],
'SectionObject' : [ 0x1c8, ['pointer64', ['void']]],
'SectionBaseAddress' : [ 0x1d0, ['pointer64', ['void']]],
'QuotaBlock' : [ 0x1d8, ['pointer64', ['_EPROCESS_QUOTA_BLOCK']]],
'WorkingSetWatch' : [ 0x1e0, ['pointer64', ['_PAGEFAULT_HISTORY']]],
'Win32WindowStation' : [ 0x1e8, ['pointer64', ['void']]],
'InheritedFromUniqueProcessId' : [ 0x1f0, ['pointer64', ['void']]],
'LdtInformation' : [ 0x1f8, ['pointer64', ['void']]],
'Spare' : [ 0x200, ['pointer64', ['void']]],
'VdmObjects' : [ 0x208, ['pointer64', ['void']]],
'DeviceMap' : [ 0x210, ['pointer64', ['void']]],
'EtwDataSource' : [ 0x218, ['pointer64', ['void']]],
'FreeTebHint' : [ 0x220, ['pointer64', ['void']]],
'PageDirectoryPte' : [ 0x228, ['_HARDWARE_PTE']],
'Filler' : [ 0x228, ['unsigned long long']],
'Session' : [ 0x230, ['pointer64', ['void']]],
'ImageFileName' : [ 0x238, ['array', 16, ['unsigned char']]],
'JobLinks' : [ 0x248, ['_LIST_ENTRY']],
'LockedPagesList' : [ 0x258, ['pointer64', ['void']]],
'ThreadListHead' : [ 0x260, ['_LIST_ENTRY']],
'SecurityPort' : [ 0x270, ['pointer64', ['void']]],
'Wow64Process' : [ 0x278, ['pointer64', ['_WOW64_PROCESS']]],
'ActiveThreads' : [ 0x280, ['unsigned long']],
'ImagePathHash' : [ 0x284, ['unsigned long']],
'DefaultHardErrorProcessing' : [ 0x288, ['unsigned long']],
'LastThreadExitStatus' : [ 0x28c, ['long']],
'Peb' : [ 0x290, ['pointer64', ['_PEB']]],
'PrefetchTrace' : [ 0x298, ['_EX_FAST_REF']],
'ReadOperationCount' : [ 0x2a0, ['_LARGE_INTEGER']],
'WriteOperationCount' : [ 0x2a8, ['_LARGE_INTEGER']],
'OtherOperationCount' : [ 0x2b0, ['_LARGE_INTEGER']],
'ReadTransferCount' : [ 0x2b8, ['_LARGE_INTEGER']],
'WriteTransferCount' : [ 0x2c0, ['_LARGE_INTEGER']],
'OtherTransferCount' : [ 0x2c8, ['_LARGE_INTEGER']],
'CommitChargeLimit' : [ 0x2d0, ['unsigned long long']],
'CommitChargePeak' : [ 0x2d8, ['unsigned long long']],
'AweInfo' : [ 0x2e0, ['pointer64', ['void']]],
'SeAuditProcessCreationInfo' : [ 0x2e8, ['_SE_AUDIT_PROCESS_CREATION_INFO']],
'Vm' : [ 0x2f0, ['_MMSUPPORT']],
'MmProcessLinks' : [ 0x358, ['_LIST_ENTRY']],
'ModifiedPageCount' : [ 0x368, ['unsigned long']],
'Flags2' : [ 0x36c, ['unsigned long']],
'JobNotReallyActive' : [ 0x36c, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'AccountingFolded' : [ 0x36c, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'NewProcessReported' : [ 0x36c, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'ExitProcessReported' : [ 0x36c, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'ReportCommitChanges' : [ 0x36c, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'LastReportMemory' : [ 0x36c, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'ReportPhysicalPageChanges' : [ 0x36c, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'HandleTableRundown' : [ 0x36c, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'NeedsHandleRundown' : [ 0x36c, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'RefTraceEnabled' : [ 0x36c, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'NumaAware' : [ 0x36c, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'ProtectedProcess' : [ 0x36c, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'DefaultPagePriority' : [ 0x36c, ['BitField', dict(start_bit = 12, end_bit = 15, native_type='unsigned long')]],
'PrimaryTokenFrozen' : [ 0x36c, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
'ProcessVerifierTarget' : [ 0x36c, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
'StackRandomizationDisabled' : [ 0x36c, ['BitField', dict(start_bit = 17, end_bit = 18, native_type='unsigned long')]],
'AffinityPermanent' : [ 0x36c, ['BitField', dict(start_bit = 18, end_bit = 19, native_type='unsigned long')]],
'AffinityUpdateEnable' : [ 0x36c, ['BitField', dict(start_bit = 19, end_bit = 20, native_type='unsigned long')]],
'CrossSessionCreate' : [ 0x36c, ['BitField', dict(start_bit = 20, end_bit = 21, native_type='unsigned long')]],
'Flags' : [ 0x370, ['unsigned long']],
'CreateReported' : [ 0x370, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'NoDebugInherit' : [ 0x370, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'ProcessExiting' : [ 0x370, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'ProcessDelete' : [ 0x370, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'Wow64SplitPages' : [ 0x370, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'VmDeleted' : [ 0x370, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'OutswapEnabled' : [ 0x370, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'Outswapped' : [ 0x370, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'ForkFailed' : [ 0x370, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'Wow64VaSpace4Gb' : [ 0x370, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'AddressSpaceInitialized' : [ 0x370, ['BitField', dict(start_bit = 10, end_bit = 12, native_type='unsigned long')]],
'SetTimerResolution' : [ 0x370, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long')]],
'BreakOnTermination' : [ 0x370, ['BitField', dict(start_bit = 13, end_bit = 14, native_type='unsigned long')]],
'DeprioritizeViews' : [ 0x370, ['BitField', dict(start_bit = 14, end_bit = 15, native_type='unsigned long')]],
'WriteWatch' : [ 0x370, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
'ProcessInSession' : [ 0x370, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
'OverrideAddressSpace' : [ 0x370, ['BitField', dict(start_bit = 17, end_bit = 18, native_type='unsigned long')]],
'HasAddressSpace' : [ 0x370, ['BitField', dict(start_bit = 18, end_bit = 19, native_type='unsigned long')]],
'LaunchPrefetched' : [ 0x370, ['BitField', dict(start_bit = 19, end_bit = 20, native_type='unsigned long')]],
'InjectInpageErrors' : [ 0x370, ['BitField', dict(start_bit = 20, end_bit = 21, native_type='unsigned long')]],
'VmTopDown' : [ 0x370, ['BitField', dict(start_bit = 21, end_bit = 22, native_type='unsigned long')]],
'ImageNotifyDone' : [ 0x370, ['BitField', dict(start_bit = 22, end_bit = 23, native_type='unsigned long')]],
'PdeUpdateNeeded' : [ 0x370, ['BitField', dict(start_bit = 23, end_bit = 24, native_type='unsigned long')]],
'VdmAllowed' : [ 0x370, ['BitField', dict(start_bit = 24, end_bit = 25, native_type='unsigned long')]],
'SmapAllowed' : [ 0x370, ['BitField', dict(start_bit = 25, end_bit = 26, native_type='unsigned long')]],
'ProcessInserted' : [ 0x370, ['BitField', dict(start_bit = 26, end_bit = 27, native_type='unsigned long')]],
'DefaultIoPriority' : [ 0x370, ['BitField', dict(start_bit = 27, end_bit = 30, native_type='unsigned long')]],
'ProcessSelfDelete' : [ 0x370, ['BitField', dict(start_bit = 30, end_bit = 31, native_type='unsigned long')]],
'SpareProcessFlags' : [ 0x370, ['BitField', dict(start_bit = 31, end_bit = 32, native_type='unsigned long')]],
'ExitStatus' : [ 0x374, ['long']],
'Spare7' : [ 0x378, ['unsigned short']],
'SubSystemMinorVersion' : [ 0x37a, ['unsigned char']],
'SubSystemMajorVersion' : [ 0x37b, ['unsigned char']],
'SubSystemVersion' : [ 0x37a, ['unsigned short']],
'PriorityClass' : [ 0x37c, ['unsigned char']],
'VadRoot' : [ 0x380, ['_MM_AVL_TABLE']],
'Cookie' : [ 0x3c0, ['unsigned long']],
'AlpcContext' : [ 0x3c8, ['_ALPC_PROCESS_CONTEXT']],
} ],
'__unnamed_11ea' : [ 0x2c, {
'InitialPrivilegeSet' : [ 0x0, ['_INITIAL_PRIVILEGE_SET']],
'PrivilegeSet' : [ 0x0, ['_PRIVILEGE_SET']],
} ],
'_ACCESS_STATE' : [ 0xa0, {
'OperationID' : [ 0x0, ['_LUID']],
'SecurityEvaluated' : [ 0x8, ['unsigned char']],
'GenerateAudit' : [ 0x9, ['unsigned char']],
'GenerateOnClose' : [ 0xa, ['unsigned char']],
'PrivilegesAllocated' : [ 0xb, ['unsigned char']],
'Flags' : [ 0xc, ['unsigned long']],
'RemainingDesiredAccess' : [ 0x10, ['unsigned long']],
'PreviouslyGrantedAccess' : [ 0x14, ['unsigned long']],
'OriginalDesiredAccess' : [ 0x18, ['unsigned long']],
'SubjectSecurityContext' : [ 0x20, ['_SECURITY_SUBJECT_CONTEXT']],
'SecurityDescriptor' : [ 0x40, ['pointer64', ['void']]],
'AuxData' : [ 0x48, ['pointer64', ['void']]],
'Privileges' : [ 0x50, ['__unnamed_11ea']],
'AuditPrivileges' : [ 0x7c, ['unsigned char']],
'ObjectName' : [ 0x80, ['_UNICODE_STRING']],
'ObjectTypeName' : [ 0x90, ['_UNICODE_STRING']],
} ],
'__unnamed_11f8' : [ 0x8, {
'MasterIrp' : [ 0x0, ['pointer64', ['_IRP']]],
'IrpCount' : [ 0x0, ['long']],
'SystemBuffer' : [ 0x0, ['pointer64', ['void']]],
} ],
'__unnamed_11fd' : [ 0x10, {
'UserApcRoutine' : [ 0x0, ['pointer64', ['void']]],
'IssuingProcess' : [ 0x0, ['pointer64', ['void']]],
'UserApcContext' : [ 0x8, ['pointer64', ['void']]],
} ],
'__unnamed_11ff' : [ 0x10, {
'AsynchronousParameters' : [ 0x0, ['__unnamed_11fd']],
'AllocationSize' : [ 0x0, ['_LARGE_INTEGER']],
} ],
'__unnamed_120a' : [ 0x50, {
'DeviceQueueEntry' : [ 0x0, ['_KDEVICE_QUEUE_ENTRY']],
'DriverContext' : [ 0x0, ['array', 4, ['pointer64', ['void']]]],
'Thread' : [ 0x20, ['pointer64', ['_ETHREAD']]],
'AuxiliaryBuffer' : [ 0x28, ['pointer64', ['unsigned char']]],
'ListEntry' : [ 0x30, ['_LIST_ENTRY']],
'CurrentStackLocation' : [ 0x40, ['pointer64', ['_IO_STACK_LOCATION']]],
'PacketType' : [ 0x40, ['unsigned long']],
'OriginalFileObject' : [ 0x48, ['pointer64', ['_FILE_OBJECT']]],
} ],
'__unnamed_120c' : [ 0x58, {
'Overlay' : [ 0x0, ['__unnamed_120a']],
'Apc' : [ 0x0, ['_KAPC']],
'CompletionKey' : [ 0x0, ['pointer64', ['void']]],
} ],
'_IRP' : [ 0xd0, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['unsigned short']],
'MdlAddress' : [ 0x8, ['pointer64', ['_MDL']]],
'Flags' : [ 0x10, ['unsigned long']],
'AssociatedIrp' : [ 0x18, ['__unnamed_11f8']],
'ThreadListEntry' : [ 0x20, ['_LIST_ENTRY']],
'IoStatus' : [ 0x30, ['_IO_STATUS_BLOCK']],
'RequestorMode' : [ 0x40, ['unsigned char']],
'PendingReturned' : [ 0x41, ['unsigned char']],
'StackCount' : [ 0x42, ['unsigned char']],
'CurrentLocation' : [ 0x43, ['unsigned char']],
'Cancel' : [ 0x44, ['unsigned char']],
'CancelIrql' : [ 0x45, ['unsigned char']],
'ApcEnvironment' : [ 0x46, ['unsigned char']],
'AllocationFlags' : [ 0x47, ['unsigned char']],
'UserIosb' : [ 0x48, ['pointer64', ['_IO_STATUS_BLOCK']]],
'UserEvent' : [ 0x50, ['pointer64', ['_KEVENT']]],
'Overlay' : [ 0x58, ['__unnamed_11ff']],
'CancelRoutine' : [ 0x68, ['pointer64', ['void']]],
'UserBuffer' : [ 0x70, ['pointer64', ['void']]],
'Tail' : [ 0x78, ['__unnamed_120c']],
} ],
'__unnamed_1212' : [ 0x20, {
'SecurityContext' : [ 0x0, ['pointer64', ['_IO_SECURITY_CONTEXT']]],
'Options' : [ 0x8, ['unsigned long']],
'FileAttributes' : [ 0x10, ['unsigned short']],
'ShareAccess' : [ 0x12, ['unsigned short']],
'EaLength' : [ 0x18, ['unsigned long']],
} ],
'__unnamed_1216' : [ 0x20, {
'SecurityContext' : [ 0x0, ['pointer64', ['_IO_SECURITY_CONTEXT']]],
'Options' : [ 0x8, ['unsigned long']],
'Reserved' : [ 0x10, ['unsigned short']],
'ShareAccess' : [ 0x12, ['unsigned short']],
'Parameters' : [ 0x18, ['pointer64', ['_NAMED_PIPE_CREATE_PARAMETERS']]],
} ],
'__unnamed_121a' : [ 0x20, {
'SecurityContext' : [ 0x0, ['pointer64', ['_IO_SECURITY_CONTEXT']]],
'Options' : [ 0x8, ['unsigned long']],
'Reserved' : [ 0x10, ['unsigned short']],
'ShareAccess' : [ 0x12, ['unsigned short']],
'Parameters' : [ 0x18, ['pointer64', ['_MAILSLOT_CREATE_PARAMETERS']]],
} ],
'__unnamed_121c' : [ 0x18, {
'Length' : [ 0x0, ['unsigned long']],
'Key' : [ 0x8, ['unsigned long']],
'ByteOffset' : [ 0x10, ['_LARGE_INTEGER']],
} ],
'__unnamed_1220' : [ 0x20, {
'Length' : [ 0x0, ['unsigned long']],
'FileName' : [ 0x8, ['pointer64', ['_UNICODE_STRING']]],
'FileInformationClass' : [ 0x10, ['Enumeration', dict(target = 'long', choices = {1: 'FileDirectoryInformation', 2: 'FileFullDirectoryInformation', 3: 'FileBothDirectoryInformation', 4: 'FileBasicInformation', 5: 'FileStandardInformation', 6: 'FileInternalInformation', 7: 'FileEaInformation', 8: 'FileAccessInformation', 9: 'FileNameInformation', 10: 'FileRenameInformation', 11: 'FileLinkInformation', 12: 'FileNamesInformation', 13: 'FileDispositionInformation', 14: 'FilePositionInformation', 15: 'FileFullEaInformation', 16: 'FileModeInformation', 17: 'FileAlignmentInformation', 18: 'FileAllInformation', 19: 'FileAllocationInformation', 20: 'FileEndOfFileInformation', 21: 'FileAlternateNameInformation', 22: 'FileStreamInformation', 23: 'FilePipeInformation', 24: 'FilePipeLocalInformation', 25: 'FilePipeRemoteInformation', 26: 'FileMailslotQueryInformation', 27: 'FileMailslotSetInformation', 28: 'FileCompressionInformation', 29: 'FileObjectIdInformation', 30: 'FileCompletionInformation', 31: 'FileMoveClusterInformation', 32: 'FileQuotaInformation', 33: 'FileReparsePointInformation', 34: 'FileNetworkOpenInformation', 35: 'FileAttributeTagInformation', 36: 'FileTrackingInformation', 37: 'FileIdBothDirectoryInformation', 38: 'FileIdFullDirectoryInformation', 39: 'FileValidDataLengthInformation', 40: 'FileShortNameInformation', 41: 'FileIoCompletionNotificationInformation', 42: 'FileIoStatusBlockRangeInformation', 43: 'FileIoPriorityHintInformation', 44: 'FileSfioReserveInformation', 45: 'FileSfioVolumeInformation', 46: 'FileHardLinkInformation', 47: 'FileProcessIdsUsingFileInformation', 48: 'FileNormalizedNameInformation', 49: 'FileNetworkPhysicalNameInformation', 50: 'FileIdGlobalTxDirectoryInformation', 51: 'FileMaximumInformation'})]],
'FileIndex' : [ 0x18, ['unsigned long']],
} ],
'__unnamed_1222' : [ 0x10, {
'Length' : [ 0x0, ['unsigned long']],
'CompletionFilter' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_1224' : [ 0x10, {
'Length' : [ 0x0, ['unsigned long']],
'FileInformationClass' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {1: 'FileDirectoryInformation', 2: 'FileFullDirectoryInformation', 3: 'FileBothDirectoryInformation', 4: 'FileBasicInformation', 5: 'FileStandardInformation', 6: 'FileInternalInformation', 7: 'FileEaInformation', 8: 'FileAccessInformation', 9: 'FileNameInformation', 10: 'FileRenameInformation', 11: 'FileLinkInformation', 12: 'FileNamesInformation', 13: 'FileDispositionInformation', 14: 'FilePositionInformation', 15: 'FileFullEaInformation', 16: 'FileModeInformation', 17: 'FileAlignmentInformation', 18: 'FileAllInformation', 19: 'FileAllocationInformation', 20: 'FileEndOfFileInformation', 21: 'FileAlternateNameInformation', 22: 'FileStreamInformation', 23: 'FilePipeInformation', 24: 'FilePipeLocalInformation', 25: 'FilePipeRemoteInformation', 26: 'FileMailslotQueryInformation', 27: 'FileMailslotSetInformation', 28: 'FileCompressionInformation', 29: 'FileObjectIdInformation', 30: 'FileCompletionInformation', 31: 'FileMoveClusterInformation', 32: 'FileQuotaInformation', 33: 'FileReparsePointInformation', 34: 'FileNetworkOpenInformation', 35: 'FileAttributeTagInformation', 36: 'FileTrackingInformation', 37: 'FileIdBothDirectoryInformation', 38: 'FileIdFullDirectoryInformation', 39: 'FileValidDataLengthInformation', 40: 'FileShortNameInformation', 41: 'FileIoCompletionNotificationInformation', 42: 'FileIoStatusBlockRangeInformation', 43: 'FileIoPriorityHintInformation', 44: 'FileSfioReserveInformation', 45: 'FileSfioVolumeInformation', 46: 'FileHardLinkInformation', 47: 'FileProcessIdsUsingFileInformation', 48: 'FileNormalizedNameInformation', 49: 'FileNetworkPhysicalNameInformation', 50: 'FileIdGlobalTxDirectoryInformation', 51: 'FileMaximumInformation'})]],
} ],
'__unnamed_1226' : [ 0x20, {
'Length' : [ 0x0, ['unsigned long']],
'FileInformationClass' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {1: 'FileDirectoryInformation', 2: 'FileFullDirectoryInformation', 3: 'FileBothDirectoryInformation', 4: 'FileBasicInformation', 5: 'FileStandardInformation', 6: 'FileInternalInformation', 7: 'FileEaInformation', 8: 'FileAccessInformation', 9: 'FileNameInformation', 10: 'FileRenameInformation', 11: 'FileLinkInformation', 12: 'FileNamesInformation', 13: 'FileDispositionInformation', 14: 'FilePositionInformation', 15: 'FileFullEaInformation', 16: 'FileModeInformation', 17: 'FileAlignmentInformation', 18: 'FileAllInformation', 19: 'FileAllocationInformation', 20: 'FileEndOfFileInformation', 21: 'FileAlternateNameInformation', 22: 'FileStreamInformation', 23: 'FilePipeInformation', 24: 'FilePipeLocalInformation', 25: 'FilePipeRemoteInformation', 26: 'FileMailslotQueryInformation', 27: 'FileMailslotSetInformation', 28: 'FileCompressionInformation', 29: 'FileObjectIdInformation', 30: 'FileCompletionInformation', 31: 'FileMoveClusterInformation', 32: 'FileQuotaInformation', 33: 'FileReparsePointInformation', 34: 'FileNetworkOpenInformation', 35: 'FileAttributeTagInformation', 36: 'FileTrackingInformation', 37: 'FileIdBothDirectoryInformation', 38: 'FileIdFullDirectoryInformation', 39: 'FileValidDataLengthInformation', 40: 'FileShortNameInformation', 41: 'FileIoCompletionNotificationInformation', 42: 'FileIoStatusBlockRangeInformation', 43: 'FileIoPriorityHintInformation', 44: 'FileSfioReserveInformation', 45: 'FileSfioVolumeInformation', 46: 'FileHardLinkInformation', 47: 'FileProcessIdsUsingFileInformation', 48: 'FileNormalizedNameInformation', 49: 'FileNetworkPhysicalNameInformation', 50: 'FileIdGlobalTxDirectoryInformation', 51: 'FileMaximumInformation'})]],
'FileObject' : [ 0x10, ['pointer64', ['_FILE_OBJECT']]],
'ReplaceIfExists' : [ 0x18, ['unsigned char']],
'AdvanceOnly' : [ 0x19, ['unsigned char']],
'ClusterCount' : [ 0x18, ['unsigned long']],
'DeleteHandle' : [ 0x18, ['pointer64', ['void']]],
} ],
'__unnamed_1228' : [ 0x20, {
'Length' : [ 0x0, ['unsigned long']],
'EaList' : [ 0x8, ['pointer64', ['void']]],
'EaListLength' : [ 0x10, ['unsigned long']],
'EaIndex' : [ 0x18, ['unsigned long']],
} ],
'__unnamed_122a' : [ 0x4, {
'Length' : [ 0x0, ['unsigned long']],
} ],
'__unnamed_122e' : [ 0x10, {
'Length' : [ 0x0, ['unsigned long']],
'FsInformationClass' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {1: 'FileFsVolumeInformation', 2: 'FileFsLabelInformation', 3: 'FileFsSizeInformation', 4: 'FileFsDeviceInformation', 5: 'FileFsAttributeInformation', 6: 'FileFsControlInformation', 7: 'FileFsFullSizeInformation', 8: 'FileFsObjectIdInformation', 9: 'FileFsDriverPathInformation', 10: 'FileFsVolumeFlagsInformation', 11: 'FileFsMaximumInformation'})]],
} ],
'__unnamed_1230' : [ 0x20, {
'OutputBufferLength' : [ 0x0, ['unsigned long']],
'InputBufferLength' : [ 0x8, ['unsigned long']],
'FsControlCode' : [ 0x10, ['unsigned long']],
'Type3InputBuffer' : [ 0x18, ['pointer64', ['void']]],
} ],
'__unnamed_1232' : [ 0x18, {
'Length' : [ 0x0, ['pointer64', ['_LARGE_INTEGER']]],
'Key' : [ 0x8, ['unsigned long']],
'ByteOffset' : [ 0x10, ['_LARGE_INTEGER']],
} ],
'__unnamed_1234' : [ 0x20, {
'OutputBufferLength' : [ 0x0, ['unsigned long']],
'InputBufferLength' : [ 0x8, ['unsigned long']],
'IoControlCode' : [ 0x10, ['unsigned long']],
'Type3InputBuffer' : [ 0x18, ['pointer64', ['void']]],
} ],
'__unnamed_1236' : [ 0x10, {
'SecurityInformation' : [ 0x0, ['unsigned long']],
'Length' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_1238' : [ 0x10, {
'SecurityInformation' : [ 0x0, ['unsigned long']],
'SecurityDescriptor' : [ 0x8, ['pointer64', ['void']]],
} ],
'__unnamed_123c' : [ 0x10, {
'Vpb' : [ 0x0, ['pointer64', ['_VPB']]],
'DeviceObject' : [ 0x8, ['pointer64', ['_DEVICE_OBJECT']]],
} ],
'__unnamed_1240' : [ 0x8, {
'Srb' : [ 0x0, ['pointer64', ['_SCSI_REQUEST_BLOCK']]],
} ],
'__unnamed_1244' : [ 0x20, {
'Length' : [ 0x0, ['unsigned long']],
'StartSid' : [ 0x8, ['pointer64', ['void']]],
'SidList' : [ 0x10, ['pointer64', ['_FILE_GET_QUOTA_INFORMATION']]],
'SidListLength' : [ 0x18, ['unsigned long']],
} ],
'__unnamed_1248' : [ 0x4, {
'Type' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'BusRelations', 1: 'EjectionRelations', 2: 'PowerRelations', 3: 'RemovalRelations', 4: 'TargetDeviceRelation', 5: 'SingleBusRelations'})]],
} ],
'__unnamed_124f' : [ 0x20, {
'InterfaceType' : [ 0x0, ['pointer64', ['_GUID']]],
'Size' : [ 0x8, ['unsigned short']],
'Version' : [ 0xa, ['unsigned short']],
'Interface' : [ 0x10, ['pointer64', ['_INTERFACE']]],
'InterfaceSpecificData' : [ 0x18, ['pointer64', ['void']]],
} ],
'__unnamed_1253' : [ 0x8, {
'Capabilities' : [ 0x0, ['pointer64', ['_DEVICE_CAPABILITIES']]],
} ],
'__unnamed_1257' : [ 0x8, {
'IoResourceRequirementList' : [ 0x0, ['pointer64', ['_IO_RESOURCE_REQUIREMENTS_LIST']]],
} ],
'__unnamed_1259' : [ 0x20, {
'WhichSpace' : [ 0x0, ['unsigned long']],
'Buffer' : [ 0x8, ['pointer64', ['void']]],
'Offset' : [ 0x10, ['unsigned long']],
'Length' : [ 0x18, ['unsigned long']],
} ],
'__unnamed_125b' : [ 0x1, {
'Lock' : [ 0x0, ['unsigned char']],
} ],
'__unnamed_125f' : [ 0x4, {
'IdType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'BusQueryDeviceID', 1: 'BusQueryHardwareIDs', 2: 'BusQueryCompatibleIDs', 3: 'BusQueryInstanceID', 4: 'BusQueryDeviceSerialNumber'})]],
} ],
'__unnamed_1263' : [ 0x10, {
'DeviceTextType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'DeviceTextDescription', 1: 'DeviceTextLocationInformation'})]],
'LocaleId' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_1267' : [ 0x10, {
'InPath' : [ 0x0, ['unsigned char']],
'Reserved' : [ 0x1, ['array', 3, ['unsigned char']]],
'Type' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'DeviceUsageTypeUndefined', 1: 'DeviceUsageTypePaging', 2: 'DeviceUsageTypeHibernation', 3: 'DeviceUsageTypeDumpFile'})]],
} ],
'__unnamed_126b' : [ 0x4, {
'PowerState' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
} ],
'__unnamed_126f' : [ 0x8, {
'PowerSequence' : [ 0x0, ['pointer64', ['_POWER_SEQUENCE']]],
} ],
'__unnamed_1277' : [ 0x20, {
'SystemContext' : [ 0x0, ['unsigned long']],
'SystemPowerStateContext' : [ 0x0, ['_SYSTEM_POWER_STATE_CONTEXT']],
'Type' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'SystemPowerState', 1: 'DevicePowerState'})]],
'State' : [ 0x10, ['_POWER_STATE']],
'ShutdownType' : [ 0x18, ['Enumeration', dict(target = 'long', choices = {0: 'PowerActionNone', 1: 'PowerActionReserved', 2: 'PowerActionSleep', 3: 'PowerActionHibernate', 4: 'PowerActionShutdown', 5: 'PowerActionShutdownReset', 6: 'PowerActionShutdownOff', 7: 'PowerActionWarmEject'})]],
} ],
'__unnamed_127b' : [ 0x10, {
'AllocatedResources' : [ 0x0, ['pointer64', ['_CM_RESOURCE_LIST']]],
'AllocatedResourcesTranslated' : [ 0x8, ['pointer64', ['_CM_RESOURCE_LIST']]],
} ],
'__unnamed_127d' : [ 0x20, {
'ProviderId' : [ 0x0, ['unsigned long long']],
'DataPath' : [ 0x8, ['pointer64', ['void']]],
'BufferSize' : [ 0x10, ['unsigned long']],
'Buffer' : [ 0x18, ['pointer64', ['void']]],
} ],
'__unnamed_127f' : [ 0x20, {
'Argument1' : [ 0x0, ['pointer64', ['void']]],
'Argument2' : [ 0x8, ['pointer64', ['void']]],
'Argument3' : [ 0x10, ['pointer64', ['void']]],
'Argument4' : [ 0x18, ['pointer64', ['void']]],
} ],
'__unnamed_1281' : [ 0x20, {
'Create' : [ 0x0, ['__unnamed_1212']],
'CreatePipe' : [ 0x0, ['__unnamed_1216']],
'CreateMailslot' : [ 0x0, ['__unnamed_121a']],
'Read' : [ 0x0, ['__unnamed_121c']],
'Write' : [ 0x0, ['__unnamed_121c']],
'QueryDirectory' : [ 0x0, ['__unnamed_1220']],
'NotifyDirectory' : [ 0x0, ['__unnamed_1222']],
'QueryFile' : [ 0x0, ['__unnamed_1224']],
'SetFile' : [ 0x0, ['__unnamed_1226']],
'QueryEa' : [ 0x0, ['__unnamed_1228']],
'SetEa' : [ 0x0, ['__unnamed_122a']],
'QueryVolume' : [ 0x0, ['__unnamed_122e']],
'SetVolume' : [ 0x0, ['__unnamed_122e']],
'FileSystemControl' : [ 0x0, ['__unnamed_1230']],
'LockControl' : [ 0x0, ['__unnamed_1232']],
'DeviceIoControl' : [ 0x0, ['__unnamed_1234']],
'QuerySecurity' : [ 0x0, ['__unnamed_1236']],
'SetSecurity' : [ 0x0, ['__unnamed_1238']],
'MountVolume' : [ 0x0, ['__unnamed_123c']],
'VerifyVolume' : [ 0x0, ['__unnamed_123c']],
'Scsi' : [ 0x0, ['__unnamed_1240']],
'QueryQuota' : [ 0x0, ['__unnamed_1244']],
'SetQuota' : [ 0x0, ['__unnamed_122a']],
'QueryDeviceRelations' : [ 0x0, ['__unnamed_1248']],
'QueryInterface' : [ 0x0, ['__unnamed_124f']],
'DeviceCapabilities' : [ 0x0, ['__unnamed_1253']],
'FilterResourceRequirements' : [ 0x0, ['__unnamed_1257']],
'ReadWriteConfig' : [ 0x0, ['__unnamed_1259']],
'SetLock' : [ 0x0, ['__unnamed_125b']],
'QueryId' : [ 0x0, ['__unnamed_125f']],
'QueryDeviceText' : [ 0x0, ['__unnamed_1263']],
'UsageNotification' : [ 0x0, ['__unnamed_1267']],
'WaitWake' : [ 0x0, ['__unnamed_126b']],
'PowerSequence' : [ 0x0, ['__unnamed_126f']],
'Power' : [ 0x0, ['__unnamed_1277']],
'StartDevice' : [ 0x0, ['__unnamed_127b']],
'WMI' : [ 0x0, ['__unnamed_127d']],
'Others' : [ 0x0, ['__unnamed_127f']],
} ],
'_IO_STACK_LOCATION' : [ 0x48, {
'MajorFunction' : [ 0x0, ['unsigned char']],
'MinorFunction' : [ 0x1, ['unsigned char']],
'Flags' : [ 0x2, ['unsigned char']],
'Control' : [ 0x3, ['unsigned char']],
'Parameters' : [ 0x8, ['__unnamed_1281']],
'DeviceObject' : [ 0x28, ['pointer64', ['_DEVICE_OBJECT']]],
'FileObject' : [ 0x30, ['pointer64', ['_FILE_OBJECT']]],
'CompletionRoutine' : [ 0x38, ['pointer64', ['void']]],
'Context' : [ 0x40, ['pointer64', ['void']]],
} ],
'_IO_DRIVER_CREATE_CONTEXT' : [ 0x20, {
'Size' : [ 0x0, ['short']],
'ExtraCreateParameter' : [ 0x8, ['pointer64', ['_ECP_LIST']]],
'DeviceObjectHint' : [ 0x10, ['pointer64', ['void']]],
'TxnParameters' : [ 0x18, ['pointer64', ['_TXN_PARAMETER_BLOCK']]],
} ],
'_IO_PRIORITY_INFO' : [ 0x10, {
'Size' : [ 0x0, ['unsigned long']],
'ThreadPriority' : [ 0x4, ['unsigned long']],
'PagePriority' : [ 0x8, ['unsigned long']],
'IoPriority' : [ 0xc, ['Enumeration', dict(target = 'long', choices = {0: 'IoPriorityVeryLow', 1: 'IoPriorityLow', 2: 'IoPriorityNormal', 3: 'IoPriorityHigh', 4: 'IoPriorityCritical', 5: 'MaxIoPriorityTypes'})]],
} ],
'_OBJECT_ATTRIBUTES' : [ 0x30, {
'Length' : [ 0x0, ['unsigned long']],
'RootDirectory' : [ 0x8, ['pointer64', ['void']]],
'ObjectName' : [ 0x10, ['pointer64', ['_UNICODE_STRING']]],
'Attributes' : [ 0x18, ['unsigned long']],
'SecurityDescriptor' : [ 0x20, ['pointer64', ['void']]],
'SecurityQualityOfService' : [ 0x28, ['pointer64', ['void']]],
} ],
'_OBJECT_HANDLE_INFORMATION' : [ 0x8, {
'HandleAttributes' : [ 0x0, ['unsigned long']],
'GrantedAccess' : [ 0x4, ['unsigned long']],
} ],
'_OBJECT_HEADER' : [ 0x38, {
'PointerCount' : [ 0x0, ['long long']],
'HandleCount' : [ 0x8, ['long long']],
'NextToFree' : [ 0x8, ['pointer64', ['void']]],
'Type' : [ 0x10, ['pointer64', ['_OBJECT_TYPE']]],
'NameInfoOffset' : [ 0x18, ['unsigned char']],
'HandleInfoOffset' : [ 0x19, ['unsigned char']],
'QuotaInfoOffset' : [ 0x1a, ['unsigned char']],
'Flags' : [ 0x1b, ['unsigned char']],
'ObjectCreateInfo' : [ 0x20, ['pointer64', ['_OBJECT_CREATE_INFORMATION']]],
'QuotaBlockCharged' : [ 0x20, ['pointer64', ['void']]],
'SecurityDescriptor' : [ 0x28, ['pointer64', ['void']]],
'Body' : [ 0x30, ['_QUAD']],
} ],
'_OBJECT_HEADER_QUOTA_INFO' : [ 0x20, {
'PagedPoolCharge' : [ 0x0, ['unsigned long']],
'NonPagedPoolCharge' : [ 0x4, ['unsigned long']],
'SecurityDescriptorCharge' : [ 0x8, ['unsigned long']],
'ExclusiveProcess' : [ 0x10, ['pointer64', ['_EPROCESS']]],
'Reserved' : [ 0x18, ['unsigned long long']],
} ],
'_OBJECT_HEADER_HANDLE_INFO' : [ 0x10, {
'HandleCountDataBase' : [ 0x0, ['pointer64', ['_OBJECT_HANDLE_COUNT_DATABASE']]],
'SingleEntry' : [ 0x0, ['_OBJECT_HANDLE_COUNT_ENTRY']],
} ],
'_OBJECT_HEADER_NAME_INFO' : [ 0x20, {
'Directory' : [ 0x0, ['pointer64', ['_OBJECT_DIRECTORY']]],
'Name' : [ 0x8, ['_UNICODE_STRING']],
'QueryReferences' : [ 0x18, ['unsigned long']],
} ],
'_OBJECT_HEADER_CREATOR_INFO' : [ 0x20, {
'TypeList' : [ 0x0, ['_LIST_ENTRY']],
'CreatorUniqueProcess' : [ 0x10, ['pointer64', ['void']]],
'CreatorBackTraceIndex' : [ 0x18, ['unsigned short']],
'Reserved' : [ 0x1a, ['unsigned short']],
} ],
'_EVENT_DATA_DESCRIPTOR' : [ 0x10, {
'Ptr' : [ 0x0, ['unsigned long long']],
'Size' : [ 0x8, ['unsigned long']],
'Reserved' : [ 0xc, ['unsigned long']],
} ],
'_EVENT_DESCRIPTOR' : [ 0x10, {
'Id' : [ 0x0, ['unsigned short']],
'Version' : [ 0x2, ['unsigned char']],
'Channel' : [ 0x3, ['unsigned char']],
'Level' : [ 0x4, ['unsigned char']],
'Opcode' : [ 0x5, ['unsigned char']],
'Task' : [ 0x6, ['unsigned short']],
'Keyword' : [ 0x8, ['unsigned long long']],
} ],
'_PERFINFO_GROUPMASK' : [ 0x20, {
'Masks' : [ 0x0, ['array', 8, ['unsigned long']]],
} ],
'_FILE_OBJECT' : [ 0xd8, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['short']],
'DeviceObject' : [ 0x8, ['pointer64', ['_DEVICE_OBJECT']]],
'Vpb' : [ 0x10, ['pointer64', ['_VPB']]],
'FsContext' : [ 0x18, ['pointer64', ['void']]],
'FsContext2' : [ 0x20, ['pointer64', ['void']]],
'SectionObjectPointer' : [ 0x28, ['pointer64', ['_SECTION_OBJECT_POINTERS']]],
'PrivateCacheMap' : [ 0x30, ['pointer64', ['void']]],
'FinalStatus' : [ 0x38, ['long']],
'RelatedFileObject' : [ 0x40, ['pointer64', ['_FILE_OBJECT']]],
'LockOperation' : [ 0x48, ['unsigned char']],
'DeletePending' : [ 0x49, ['unsigned char']],
'ReadAccess' : [ 0x4a, ['unsigned char']],
'WriteAccess' : [ 0x4b, ['unsigned char']],
'DeleteAccess' : [ 0x4c, ['unsigned char']],
'SharedRead' : [ 0x4d, ['unsigned char']],
'SharedWrite' : [ 0x4e, ['unsigned char']],
'SharedDelete' : [ 0x4f, ['unsigned char']],
'Flags' : [ 0x50, ['unsigned long']],
'FileName' : [ 0x58, ['_UNICODE_STRING']],
'CurrentByteOffset' : [ 0x68, ['_LARGE_INTEGER']],
'Waiters' : [ 0x70, ['unsigned long']],
'Busy' : [ 0x74, ['unsigned long']],
'LastLock' : [ 0x78, ['pointer64', ['void']]],
'Lock' : [ 0x80, ['_KEVENT']],
'Event' : [ 0x98, ['_KEVENT']],
'CompletionContext' : [ 0xb0, ['pointer64', ['_IO_COMPLETION_CONTEXT']]],
'IrpListLock' : [ 0xb8, ['unsigned long long']],
'IrpList' : [ 0xc0, ['_LIST_ENTRY']],
'FileObjectExtension' : [ 0xd0, ['pointer64', ['void']]],
} ],
'_MM_PAGE_ACCESS_INFO_HEADER' : [ 0x48, {
'Link' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
'Type' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'MmPteAccessType', 1: 'MmCcReadAheadType', 2: 'MmPfnRepurposeType', 3: 'MmMaximumPageAccessType'})]],
'EmptySequenceNumber' : [ 0xc, ['unsigned long']],
'CurrentFileIndex' : [ 0xc, ['unsigned long']],
'CreateTime' : [ 0x10, ['unsigned long long']],
'EmptyTime' : [ 0x18, ['unsigned long long']],
'TempEntry' : [ 0x18, ['pointer64', ['_MM_PAGE_ACCESS_INFO']]],
'PageEntry' : [ 0x20, ['pointer64', ['_MM_PAGE_ACCESS_INFO']]],
'FileEntry' : [ 0x28, ['pointer64', ['unsigned long long']]],
'FirstFileEntry' : [ 0x30, ['pointer64', ['unsigned long long']]],
'Process' : [ 0x38, ['pointer64', ['_EPROCESS']]],
'SessionId' : [ 0x40, ['unsigned long']],
'PageFrameEntry' : [ 0x20, ['pointer64', ['unsigned long long']]],
'LastPageFrameEntry' : [ 0x28, ['pointer64', ['unsigned long long']]],
} ],
'_PF_HARD_FAULT_INFO' : [ 0x38, {
'KernelTimeStamp' : [ 0x0, ['_ETW_KERNEL_TRACE_TIMESTAMP']],
'HardFaultEvent' : [ 0x10, ['_PERFINFO_HARDPAGEFAULT_INFORMATION']],
'IoTimeInTicks' : [ 0x30, ['_LARGE_INTEGER']],
} ],
'_WHEA_ERROR_RECORD' : [ 0xc8, {
'Header' : [ 0x0, ['_WHEA_ERROR_RECORD_HEADER']],
'SectionDescriptor' : [ 0x80, ['array', 1, ['_WHEA_ERROR_RECORD_SECTION_DESCRIPTOR']]],
} ],
'_GUID' : [ 0x10, {
'Data1' : [ 0x0, ['unsigned long']],
'Data2' : [ 0x4, ['unsigned short']],
'Data3' : [ 0x6, ['unsigned short']],
'Data4' : [ 0x8, ['array', 8, ['unsigned char']]],
} ],
'__unnamed_132b' : [ 0xd0, {
'ProcessorError' : [ 0x0, ['_WHEA_GENERIC_PROCESSOR_ERROR']],
'MemoryError' : [ 0x0, ['_WHEA_MEMORY_ERROR']],
'NmiError' : [ 0x0, ['_WHEA_NMI_ERROR']],
'PciExpressError' : [ 0x0, ['_WHEA_PCIEXPRESS_ERROR']],
'PciXBusError' : [ 0x0, ['_WHEA_PCIXBUS_ERROR']],
'PciXDeviceError' : [ 0x0, ['_WHEA_PCIXDEVICE_ERROR']],
} ],
'_WHEA_ERROR_PACKET' : [ 0x119, {
'Signature' : [ 0x0, ['unsigned long']],
'Flags' : [ 0x4, ['_WHEA_ERROR_PACKET_FLAGS']],
'Size' : [ 0x8, ['unsigned long']],
'RawDataLength' : [ 0xc, ['unsigned long']],
'Reserved1' : [ 0x10, ['unsigned long long']],
'Context' : [ 0x18, ['unsigned long long']],
'ErrorType' : [ 0x20, ['Enumeration', dict(target = 'long', choices = {0: 'WheaErrTypeProcessor', 1: 'WheaErrTypeMemory', 2: 'WheaErrTypePCIExpress', 3: 'WheaErrTypeNMI', 4: 'WheaErrTypePCIXBus', 5: 'WheaErrTypePCIXDevice', 6: 'WheaErrTypeGeneric'})]],
'ErrorSeverity' : [ 0x24, ['Enumeration', dict(target = 'long', choices = {0: 'WheaErrSevRecoverable', 1: 'WheaErrSevFatal', 2: 'WheaErrSevCorrected', 3: 'WheaErrSevNone'})]],
'ErrorSourceId' : [ 0x28, ['unsigned long']],
'ErrorSourceType' : [ 0x2c, ['Enumeration', dict(target = 'long', choices = {0: 'WheaErrSrcTypeMCE', 1: 'WheaErrSrcTypeCMC', 2: 'WheaErrSrcTypeCPE', 3: 'WheaErrSrcTypeNMI', 4: 'WheaErrSrcTypePCIe', 5: 'WheaErrSrcTypeGeneric', 6: 'WheaErrSrcTypeINIT', 7: 'WheaErrSrcTypeBOOT', 8: 'WheaErrSrcTypeSCIGeneric', 9: 'WheaErrSrcTypeIPFMCA', 10: 'WheaErrSrcTypeIPFCMC', 11: 'WheaErrSrcTypeIPFCPE', 12: 'WheaErrSrcTypeMax'})]],
'Reserved2' : [ 0x30, ['unsigned long']],
'Version' : [ 0x34, ['unsigned long']],
'Cpu' : [ 0x38, ['unsigned long long']],
'u' : [ 0x40, ['__unnamed_132b']],
'RawDataFormat' : [ 0x110, ['Enumeration', dict(target = 'long', choices = {0: 'WheaRawDataFormatIPFSalRecord', 1: 'WheaRawDataFormatIA32MCA', 2: 'WheaRawDataFormatIntel64MCA', 3: 'WheaRawDataFormatAMD64MCA', 4: 'WheaRawDataFormatMemory', 5: 'WheaRawDataFormatPCIExpress', 6: 'WheaRawDataFormatNMIPort', 7: 'WheaRawDataFormatPCIXBus', 8: 'WheaRawDataFormatPCIXDevice', 9: 'WheaRawDataFormatGeneric', 10: 'WheaRawDataFormatMax'})]],
'RawDataOffset' : [ 0x114, ['unsigned long']],
'RawData' : [ 0x118, ['array', 1, ['unsigned char']]],
} ],
'_KPROCESS' : [ 0xc0, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
'ProfileListHead' : [ 0x18, ['_LIST_ENTRY']],
'DirectoryTableBase' : [ 0x28, ['unsigned long long']],
'Unused0' : [ 0x30, ['unsigned long long']],
'IopmOffset' : [ 0x38, ['unsigned short']],
'ActiveProcessors' : [ 0x40, ['unsigned long long']],
'KernelTime' : [ 0x48, ['unsigned long']],
'UserTime' : [ 0x4c, ['unsigned long']],
'ReadyListHead' : [ 0x50, ['_LIST_ENTRY']],
'SwapListEntry' : [ 0x60, ['_SINGLE_LIST_ENTRY']],
'InstrumentationCallback' : [ 0x68, ['pointer64', ['void']]],
'ThreadListHead' : [ 0x70, ['_LIST_ENTRY']],
'ProcessLock' : [ 0x80, ['unsigned long long']],
'Affinity' : [ 0x88, ['unsigned long long']],
'AutoAlignment' : [ 0x90, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='long')]],
'DisableBoost' : [ 0x90, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='long')]],
'DisableQuantum' : [ 0x90, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='long')]],
'ReservedFlags' : [ 0x90, ['BitField', dict(start_bit = 3, end_bit = 32, native_type='long')]],
'ProcessFlags' : [ 0x90, ['long']],
'BasePriority' : [ 0x94, ['unsigned char']],
'QuantumReset' : [ 0x95, ['unsigned char']],
'State' : [ 0x96, ['unsigned char']],
'ThreadSeed' : [ 0x97, ['unsigned char']],
'PowerState' : [ 0x98, ['unsigned char']],
'IdealNode' : [ 0x99, ['unsigned char']],
'Visited' : [ 0x9a, ['unsigned char']],
'Flags' : [ 0x9b, ['_KEXECUTE_OPTIONS']],
'ExecuteOptions' : [ 0x9b, ['unsigned char']],
'StackCount' : [ 0xa0, ['unsigned long long']],
'ProcessListEntry' : [ 0xa8, ['_LIST_ENTRY']],
'CycleTime' : [ 0xb8, ['unsigned long long']],
} ],
'__unnamed_13eb' : [ 0x10, {
'I386' : [ 0x0, ['_I386_LOADER_BLOCK']],
'Alpha' : [ 0x0, ['_ALPHA_LOADER_BLOCK']],
'Ia64' : [ 0x0, ['_IA64_LOADER_BLOCK']],
} ],
'_LOADER_PARAMETER_BLOCK' : [ 0xe8, {
'LoadOrderListHead' : [ 0x0, ['_LIST_ENTRY']],
'MemoryDescriptorListHead' : [ 0x10, ['_LIST_ENTRY']],
'BootDriverListHead' : [ 0x20, ['_LIST_ENTRY']],
'KernelStack' : [ 0x30, ['unsigned long long']],
'Prcb' : [ 0x38, ['unsigned long long']],
'Process' : [ 0x40, ['unsigned long long']],
'Thread' : [ 0x48, ['unsigned long long']],
'RegistryLength' : [ 0x50, ['unsigned long']],
'RegistryBase' : [ 0x58, ['pointer64', ['void']]],
'ConfigurationRoot' : [ 0x60, ['pointer64', ['_CONFIGURATION_COMPONENT_DATA']]],
'ArcBootDeviceName' : [ 0x68, ['pointer64', ['unsigned char']]],
'ArcHalDeviceName' : [ 0x70, ['pointer64', ['unsigned char']]],
'NtBootPathName' : [ 0x78, ['pointer64', ['unsigned char']]],
'NtHalPathName' : [ 0x80, ['pointer64', ['unsigned char']]],
'LoadOptions' : [ 0x88, ['pointer64', ['unsigned char']]],
'NlsData' : [ 0x90, ['pointer64', ['_NLS_DATA_BLOCK']]],
'ArcDiskInformation' : [ 0x98, ['pointer64', ['_ARC_DISK_INFORMATION']]],
'OemFontFile' : [ 0xa0, ['pointer64', ['void']]],
'SetupLoaderBlock' : [ 0xa8, ['pointer64', ['_SETUP_LOADER_BLOCK']]],
'Extension' : [ 0xb0, ['pointer64', ['_LOADER_PARAMETER_EXTENSION']]],
'u' : [ 0xb8, ['__unnamed_13eb']],
'FirmwareInformation' : [ 0xc8, ['_FIRMWARE_INFORMATION_LOADER_BLOCK']],
} ],
'__unnamed_1408' : [ 0x8, {
'Long' : [ 0x0, ['unsigned long long']],
'VolatileLong' : [ 0x0, ['unsigned long long']],
'Hard' : [ 0x0, ['_MMPTE_HARDWARE']],
'HardLarge' : [ 0x0, ['_MMPTE_HARDWARE_LARGEPAGE']],
'Flush' : [ 0x0, ['_HARDWARE_PTE']],
'Proto' : [ 0x0, ['_MMPTE_PROTOTYPE']],
'Soft' : [ 0x0, ['_MMPTE_SOFTWARE']],
'TimeStamp' : [ 0x0, ['_MMPTE_TIMESTAMP']],
'Trans' : [ 0x0, ['_MMPTE_TRANSITION']],
'Subsect' : [ 0x0, ['_MMPTE_SUBSECTION']],
'List' : [ 0x0, ['_MMPTE_LIST']],
} ],
'_MMPTE' : [ 0x8, {
'u' : [ 0x0, ['__unnamed_1408']],
} ],
'__unnamed_1417' : [ 0x8, {
'Flink' : [ 0x0, ['unsigned long long']],
'WsIndex' : [ 0x0, ['unsigned long']],
'Event' : [ 0x0, ['pointer64', ['_KEVENT']]],
'Next' : [ 0x0, ['pointer64', ['void']]],
'VolatileNext' : [ 0x0, ['pointer64', ['void']]],
'KernelStackOwner' : [ 0x0, ['pointer64', ['_KTHREAD']]],
'NextStackPfn' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
} ],
'__unnamed_1419' : [ 0x8, {
'Blink' : [ 0x0, ['unsigned long long']],
'ImageProtoPte' : [ 0x0, ['pointer64', ['_MMPTE']]],
'ShareCount' : [ 0x0, ['unsigned long long']],
} ],
'__unnamed_141d' : [ 0x4, {
'ReferenceCount' : [ 0x0, ['unsigned short']],
'VolatileReferenceCount' : [ 0x0, ['short']],
'ShortFlags' : [ 0x2, ['unsigned short']],
} ],
'__unnamed_141f' : [ 0x4, {
'ReferenceCount' : [ 0x0, ['unsigned short']],
'ByteFlags' : [ 0x2, ['unsigned char']],
'InterlockedByteFlags' : [ 0x3, ['unsigned char']],
} ],
'__unnamed_1421' : [ 0x4, {
'ReferenceCount' : [ 0x0, ['unsigned short']],
'e1' : [ 0x2, ['_MMPFNENTRY']],
'e2' : [ 0x0, ['__unnamed_141d']],
'e3' : [ 0x0, ['__unnamed_141f']],
} ],
'__unnamed_1429' : [ 0x8, {
'PteFrame' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 52, native_type='unsigned long long')]],
'Unused' : [ 0x0, ['BitField', dict(start_bit = 52, end_bit = 55, native_type='unsigned long long')]],
'PfnImageVerified' : [ 0x0, ['BitField', dict(start_bit = 55, end_bit = 56, native_type='unsigned long long')]],
'AweAllocation' : [ 0x0, ['BitField', dict(start_bit = 56, end_bit = 57, native_type='unsigned long long')]],
'PrototypePte' : [ 0x0, ['BitField', dict(start_bit = 57, end_bit = 58, native_type='unsigned long long')]],
'PageColor' : [ 0x0, ['BitField', dict(start_bit = 58, end_bit = 64, native_type='unsigned long long')]],
} ],
'_MMPFN' : [ 0x30, {
'u1' : [ 0x0, ['__unnamed_1417']],
'u2' : [ 0x8, ['__unnamed_1419']],
'PteAddress' : [ 0x10, ['pointer64', ['_MMPTE']]],
'VolatilePteAddress' : [ 0x10, ['pointer64', ['void']]],
'u3' : [ 0x18, ['__unnamed_1421']],
'UsedPageTableEntries' : [ 0x1c, ['unsigned short']],
'VaType' : [ 0x1e, ['unsigned char']],
'ViewCount' : [ 0x1f, ['unsigned char']],
'OriginalPte' : [ 0x20, ['_MMPTE']],
'AweReferenceCount' : [ 0x20, ['long']],
'u4' : [ 0x28, ['__unnamed_1429']],
} ],
'_MMPTE_FLUSH_LIST' : [ 0xa8, {
'Count' : [ 0x0, ['unsigned long']],
'MaximumCount' : [ 0x4, ['unsigned long']],
'FlushVa' : [ 0x8, ['array', 20, ['pointer64', ['void']]]],
} ],
'_MI_COLOR_BASE' : [ 0x10, {
'ColorPointer' : [ 0x0, ['pointer64', ['unsigned short']]],
'ColorMask' : [ 0x8, ['unsigned short']],
'ColorNode' : [ 0xa, ['unsigned short']],
} ],
'_MMSUPPORT' : [ 0x68, {
'WorkingSetExpansionLinks' : [ 0x0, ['_LIST_ENTRY']],
'LastTrimStamp' : [ 0x10, ['unsigned short']],
'NextPageColor' : [ 0x12, ['unsigned short']],
'Flags' : [ 0x14, ['_MMSUPPORT_FLAGS']],
'PageFaultCount' : [ 0x18, ['unsigned long']],
'PeakWorkingSetSize' : [ 0x1c, ['unsigned long']],
'ChargedWslePages' : [ 0x20, ['unsigned long']],
'MinimumWorkingSetSize' : [ 0x24, ['unsigned long']],
'MaximumWorkingSetSize' : [ 0x28, ['unsigned long']],
'VmWorkingSetList' : [ 0x30, ['pointer64', ['_MMWSL']]],
'Claim' : [ 0x38, ['unsigned long']],
'ActualWslePages' : [ 0x3c, ['unsigned long']],
'WorkingSetPrivateSize' : [ 0x40, ['unsigned long']],
'WorkingSetSizeOverhead' : [ 0x44, ['unsigned long']],
'WorkingSetSize' : [ 0x48, ['unsigned long']],
'ExitGate' : [ 0x50, ['pointer64', ['_KGATE']]],
'WorkingSetMutex' : [ 0x58, ['_EX_PUSH_LOCK']],
'AccessLog' : [ 0x60, ['pointer64', ['void']]],
} ],
'__unnamed_144d' : [ 0x8, {
'VirtualAddress' : [ 0x0, ['pointer64', ['void']]],
'Long' : [ 0x0, ['unsigned long long']],
'e1' : [ 0x0, ['_MMWSLENTRY']],
'e2' : [ 0x0, ['_MMWSLE_FREE_ENTRY']],
} ],
'_MMWSLE' : [ 0x8, {
'u1' : [ 0x0, ['__unnamed_144d']],
} ],
'_MMWSL' : [ 0x498, {
'FirstFree' : [ 0x0, ['unsigned long']],
'FirstDynamic' : [ 0x4, ['unsigned long']],
'LastEntry' : [ 0x8, ['unsigned long']],
'NextSlot' : [ 0xc, ['unsigned long']],
'Wsle' : [ 0x10, ['pointer64', ['_MMWSLE']]],
'LowestPagableAddress' : [ 0x18, ['pointer64', ['void']]],
'LastInitializedWsle' : [ 0x20, ['unsigned long']],
'NextEstimationSlot' : [ 0x24, ['unsigned long']],
'NextAgingSlot' : [ 0x28, ['unsigned long']],
'EstimatedAvailable' : [ 0x2c, ['unsigned long']],
'GrowthSinceLastEstimate' : [ 0x30, ['unsigned long']],
'NumberOfCommittedPageTables' : [ 0x34, ['unsigned long']],
'VadBitMapHint' : [ 0x38, ['unsigned long']],
'NonDirectCount' : [ 0x3c, ['unsigned long']],
'LastVadBit' : [ 0x40, ['unsigned long']],
'MaximumLastVadBit' : [ 0x44, ['unsigned long']],
'LastAllocationSizeHint' : [ 0x48, ['unsigned long']],
'LastAllocationSize' : [ 0x4c, ['unsigned long']],
'NonDirectHash' : [ 0x50, ['pointer64', ['_MMWSLE_NONDIRECT_HASH']]],
'HashTableStart' : [ 0x58, ['pointer64', ['_MMWSLE_HASH']]],
'HighestPermittedHashAddress' : [ 0x60, ['pointer64', ['_MMWSLE_HASH']]],
'HighestUserAddress' : [ 0x68, ['pointer64', ['void']]],
'MaximumUserPageTablePages' : [ 0x70, ['unsigned long']],
'MaximumUserPageDirectoryPages' : [ 0x74, ['unsigned long']],
'CommittedPageTables' : [ 0x78, ['pointer64', ['unsigned long']]],
'NumberOfCommittedPageDirectories' : [ 0x80, ['unsigned long']],
'CommittedPageDirectories' : [ 0x88, ['array', 128, ['unsigned long long']]],
'NumberOfCommittedPageDirectoryParents' : [ 0x488, ['unsigned long']],
'CommittedPageDirectoryParents' : [ 0x490, ['array', 1, ['unsigned long long']]],
} ],
'__unnamed_1467' : [ 0x4, {
'LongFlags' : [ 0x0, ['unsigned long']],
'Flags' : [ 0x0, ['_MMSECTION_FLAGS']],
} ],
'__unnamed_1469' : [ 0x4, {
'ModifiedWriteCount' : [ 0x0, ['unsigned short']],
'FlushInProgressCount' : [ 0x2, ['unsigned short']],
} ],
'__unnamed_146b' : [ 0x4, {
'e2' : [ 0x0, ['__unnamed_1469']],
} ],
'__unnamed_1475' : [ 0x10, {
'NumberOfSystemCacheViews' : [ 0x0, ['unsigned long']],
'ImageRelocationStartBit' : [ 0x0, ['unsigned long']],
'WritableUserReferences' : [ 0x4, ['long']],
'ImageRelocationSizeIn64k' : [ 0x4, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned long')]],
'Unused' : [ 0x4, ['BitField', dict(start_bit = 16, end_bit = 30, native_type='unsigned long')]],
'BitMap64' : [ 0x4, ['BitField', dict(start_bit = 30, end_bit = 31, native_type='unsigned long')]],
'ImageActive' : [ 0x4, ['BitField', dict(start_bit = 31, end_bit = 32, native_type='unsigned long')]],
'SubsectionRoot' : [ 0x8, ['pointer64', ['_MM_SUBSECTION_AVL_TABLE']]],
'SeImageStub' : [ 0x8, ['pointer64', ['_MI_IMAGE_SECURITY_REFERENCE']]],
} ],
'__unnamed_1477' : [ 0x10, {
'e2' : [ 0x0, ['__unnamed_1475']],
} ],
'_CONTROL_AREA' : [ 0x70, {
'Segment' : [ 0x0, ['pointer64', ['_SEGMENT']]],
'DereferenceList' : [ 0x8, ['_LIST_ENTRY']],
'NumberOfSectionReferences' : [ 0x18, ['unsigned long long']],
'NumberOfPfnReferences' : [ 0x20, ['unsigned long long']],
'NumberOfMappedViews' : [ 0x28, ['unsigned long long']],
'NumberOfUserReferences' : [ 0x30, ['unsigned long long']],
'u' : [ 0x38, ['__unnamed_1467']],
'u1' : [ 0x3c, ['__unnamed_146b']],
'FilePointer' : [ 0x40, ['_EX_FAST_REF']],
'ControlAreaLock' : [ 0x48, ['long']],
'StartingFrame' : [ 0x4c, ['unsigned long']],
'WaitingForDeletion' : [ 0x50, ['pointer64', ['_MI_SECTION_CREATION_GATE']]],
'u2' : [ 0x58, ['__unnamed_1477']],
'LockedPages' : [ 0x68, ['long long']],
} ],
'_MMPAGING_FILE' : [ 0xa0, {
'Size' : [ 0x0, ['unsigned long long']],
'MaximumSize' : [ 0x8, ['unsigned long long']],
'MinimumSize' : [ 0x10, ['unsigned long long']],
'FreeSpace' : [ 0x18, ['unsigned long long']],
'PeakUsage' : [ 0x20, ['unsigned long long']],
'HighestPage' : [ 0x28, ['unsigned long long']],
'File' : [ 0x30, ['pointer64', ['_FILE_OBJECT']]],
'Entry' : [ 0x38, ['array', 2, ['pointer64', ['_MMMOD_WRITER_MDL_ENTRY']]]],
'PageFileName' : [ 0x48, ['_UNICODE_STRING']],
'Bitmap' : [ 0x58, ['pointer64', ['_RTL_BITMAP']]],
'BitmapHint' : [ 0x60, ['unsigned long']],
'LastAllocationSize' : [ 0x64, ['unsigned long']],
'PageFileNumber' : [ 0x68, ['BitField', dict(start_bit = 0, end_bit = 4, native_type='unsigned short')]],
'BootPartition' : [ 0x68, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned short')]],
'Spare0' : [ 0x68, ['BitField', dict(start_bit = 5, end_bit = 16, native_type='unsigned short')]],
'AdriftMdls' : [ 0x6a, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned short')]],
'Spare1' : [ 0x6a, ['BitField', dict(start_bit = 1, end_bit = 16, native_type='unsigned short')]],
'FileHandle' : [ 0x70, ['pointer64', ['void']]],
'AvailableList' : [ 0x80, ['_SLIST_HEADER']],
'NeedProcessingList' : [ 0x90, ['_SLIST_HEADER']],
} ],
'_MMPAGING_FILE_FREE_ENTRY' : [ 0x10, {
'ListEntry' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
'FreeBit' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_14aa' : [ 0x8, {
'Balance' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 2, native_type='long long')]],
'Parent' : [ 0x0, ['pointer64', ['_MMVAD']]],
} ],
'__unnamed_14ad' : [ 0x8, {
'LongFlags' : [ 0x0, ['unsigned long long']],
'VadFlags' : [ 0x0, ['_MMVAD_FLAGS']],
} ],
'__unnamed_14b0' : [ 0x8, {
'LongFlags3' : [ 0x0, ['unsigned long long']],
'VadFlags3' : [ 0x0, ['_MMVAD_FLAGS3']],
} ],
'_MMVAD_SHORT' : [ 0x40, {
'u1' : [ 0x0, ['__unnamed_14aa']],
'LeftChild' : [ 0x8, ['pointer64', ['_MMVAD']]],
'RightChild' : [ 0x10, ['pointer64', ['_MMVAD']]],
'StartingVpn' : [ 0x18, ['unsigned long long']],
'EndingVpn' : [ 0x20, ['unsigned long long']],
'u' : [ 0x28, ['__unnamed_14ad']],
'PushLock' : [ 0x30, ['_EX_PUSH_LOCK']],
'u5' : [ 0x38, ['__unnamed_14b0']],
} ],
'_MM_AVL_TABLE' : [ 0x40, {
'BalancedRoot' : [ 0x0, ['_MMADDRESS_NODE']],
'DepthOfTree' : [ 0x28, ['BitField', dict(start_bit = 0, end_bit = 5, native_type='unsigned long long')]],
'Unused' : [ 0x28, ['BitField', dict(start_bit = 5, end_bit = 8, native_type='unsigned long long')]],
'NumberGenericTableElements' : [ 0x28, ['BitField', dict(start_bit = 8, end_bit = 64, native_type='unsigned long long')]],
'NodeHint' : [ 0x30, ['pointer64', ['void']]],
'NodeFreeHint' : [ 0x38, ['pointer64', ['void']]],
} ],
'__unnamed_14ba' : [ 0x4, {
'LongFlags2' : [ 0x0, ['unsigned long']],
'VadFlags2' : [ 0x0, ['_MMVAD_FLAGS2']],
} ],
'_MMVAD' : [ 0x60, {
'u1' : [ 0x0, ['__unnamed_14aa']],
'LeftChild' : [ 0x8, ['pointer64', ['_MMVAD']]],
'RightChild' : [ 0x10, ['pointer64', ['_MMVAD']]],
'StartingVpn' : [ 0x18, ['unsigned long long']],
'EndingVpn' : [ 0x20, ['unsigned long long']],
'u' : [ 0x28, ['__unnamed_14ad']],
'PushLock' : [ 0x30, ['_EX_PUSH_LOCK']],
'u5' : [ 0x38, ['__unnamed_14b0']],
'u2' : [ 0x40, ['__unnamed_14ba']],
'Subsection' : [ 0x48, ['pointer64', ['_SUBSECTION']]],
'MappedSubsection' : [ 0x48, ['pointer64', ['_MSUBSECTION']]],
'FirstPrototypePte' : [ 0x50, ['pointer64', ['_MMPTE']]],
'LastContiguousPte' : [ 0x58, ['pointer64', ['_MMPTE']]],
} ],
'__unnamed_14ca' : [ 0x8, {
'Balance' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 2, native_type='long long')]],
'Parent' : [ 0x0, ['pointer64', ['_MMADDRESS_NODE']]],
} ],
'_MMADDRESS_NODE' : [ 0x28, {
'u1' : [ 0x0, ['__unnamed_14ca']],
'LeftChild' : [ 0x8, ['pointer64', ['_MMADDRESS_NODE']]],
'RightChild' : [ 0x10, ['pointer64', ['_MMADDRESS_NODE']]],
'StartingVpn' : [ 0x18, ['unsigned long long']],
'EndingVpn' : [ 0x20, ['unsigned long long']],
} ],
'__unnamed_14cf' : [ 0x4, {
'LongFlags' : [ 0x0, ['unsigned long']],
'SubsectionFlags' : [ 0x0, ['_MMSUBSECTION_FLAGS']],
} ],
'_SUBSECTION' : [ 0x38, {
'ControlArea' : [ 0x0, ['pointer64', ['_CONTROL_AREA']]],
'SubsectionBase' : [ 0x8, ['pointer64', ['_MMPTE']]],
'NextSubsection' : [ 0x10, ['pointer64', ['_SUBSECTION']]],
'PtesInSubsection' : [ 0x18, ['unsigned long']],
'UnusedPtes' : [ 0x20, ['unsigned long']],
'GlobalPerSessionHead' : [ 0x20, ['pointer64', ['_MM_AVL_TABLE']]],
'u' : [ 0x28, ['__unnamed_14cf']],
'StartingSector' : [ 0x2c, ['unsigned long']],
'NumberOfFullSectors' : [ 0x30, ['unsigned long']],
} ],
'__unnamed_14d5' : [ 0x8, {
'Balance' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 2, native_type='long long')]],
'Parent' : [ 0x0, ['pointer64', ['_MMADDRESS_NODE']]],
'NextToFree' : [ 0x0, ['pointer64', ['_MI_PER_SESSION_PROTOS']]],
} ],
'__unnamed_14d7' : [ 0x4, {
'ReferenceCount' : [ 0x0, ['unsigned long']],
'NumberOfPtesToFree' : [ 0x0, ['unsigned long']],
} ],
'_MI_PER_SESSION_PROTOS' : [ 0x38, {
'u1' : [ 0x0, ['__unnamed_14d5']],
'LeftChild' : [ 0x8, ['pointer64', ['_MMADDRESS_NODE']]],
'RightChild' : [ 0x10, ['pointer64', ['_MMADDRESS_NODE']]],
'SessionId' : [ 0x18, ['unsigned long']],
'StartingVpn' : [ 0x18, ['unsigned long long']],
'Subsection' : [ 0x18, ['pointer64', ['_SUBSECTION']]],
'EndingVpn' : [ 0x20, ['unsigned long long']],
'SubsectionBase' : [ 0x28, ['pointer64', ['_MMPTE']]],
'u2' : [ 0x30, ['__unnamed_14d7']],
} ],
'__unnamed_14e0' : [ 0x10, {
'IoStatus' : [ 0x0, ['_IO_STATUS_BLOCK']],
} ],
'__unnamed_14e2' : [ 0x8, {
'KeepForever' : [ 0x0, ['unsigned long long']],
} ],
'_MMMOD_WRITER_MDL_ENTRY' : [ 0xa0, {
'Links' : [ 0x0, ['_LIST_ENTRY']],
'u' : [ 0x10, ['__unnamed_14e0']],
'Irp' : [ 0x20, ['pointer64', ['_IRP']]],
'u1' : [ 0x28, ['__unnamed_14e2']],
'PagingFile' : [ 0x30, ['pointer64', ['_MMPAGING_FILE']]],
'File' : [ 0x38, ['pointer64', ['_FILE_OBJECT']]],
'ControlArea' : [ 0x40, ['pointer64', ['_CONTROL_AREA']]],
'FileResource' : [ 0x48, ['pointer64', ['_ERESOURCE']]],
'WriteOffset' : [ 0x50, ['_LARGE_INTEGER']],
'IssueTime' : [ 0x58, ['_LARGE_INTEGER']],
'PointerMdl' : [ 0x60, ['pointer64', ['_MDL']]],
'Mdl' : [ 0x68, ['_MDL']],
'Page' : [ 0x98, ['array', 1, ['unsigned long long']]],
} ],
'__unnamed_14ea' : [ 0x38, {
'Mdl' : [ 0x0, ['_MDL']],
'Page' : [ 0x30, ['array', 1, ['unsigned long long']]],
} ],
'_MI_PAGEFILE_TRACES' : [ 0x68, {
'Status' : [ 0x0, ['long']],
'Priority' : [ 0x4, ['unsigned char']],
'IrpPriority' : [ 0x5, ['unsigned char']],
'CurrentTime' : [ 0x8, ['_LARGE_INTEGER']],
'AvailablePages' : [ 0x10, ['unsigned long long']],
'ModifiedPagesTotal' : [ 0x18, ['unsigned long long']],
'ModifiedPagefilePages' : [ 0x20, ['unsigned long long']],
'ModifiedNoWritePages' : [ 0x28, ['unsigned long long']],
'MdlHack' : [ 0x30, ['__unnamed_14ea']],
} ],
'_HHIVE' : [ 0x590, {
'Signature' : [ 0x0, ['unsigned long']],
'GetCellRoutine' : [ 0x8, ['pointer64', ['void']]],
'ReleaseCellRoutine' : [ 0x10, ['pointer64', ['void']]],
'Allocate' : [ 0x18, ['pointer64', ['void']]],
'Free' : [ 0x20, ['pointer64', ['void']]],
'FileSetSize' : [ 0x28, ['pointer64', ['void']]],
'FileWrite' : [ 0x30, ['pointer64', ['void']]],
'FileRead' : [ 0x38, ['pointer64', ['void']]],
'FileFlush' : [ 0x40, ['pointer64', ['void']]],
'BaseBlock' : [ 0x48, ['pointer64', ['_HBASE_BLOCK']]],
'DirtyVector' : [ 0x50, ['_RTL_BITMAP']],
'DirtyCount' : [ 0x60, ['unsigned long']],
'DirtyAlloc' : [ 0x64, ['unsigned long']],
'BaseBlockAlloc' : [ 0x68, ['unsigned long']],
'Cluster' : [ 0x6c, ['unsigned long']],
'Flat' : [ 0x70, ['unsigned char']],
'ReadOnly' : [ 0x71, ['unsigned char']],
'DirtyFlag' : [ 0x72, ['unsigned char']],
'HvBinHeadersUse' : [ 0x74, ['unsigned long']],
'HvFreeCellsUse' : [ 0x78, ['unsigned long']],
'HvUsedCellsUse' : [ 0x7c, ['unsigned long']],
'CmUsedCellsUse' : [ 0x80, ['unsigned long']],
'HiveFlags' : [ 0x84, ['unsigned long']],
'CurrentLog' : [ 0x88, ['unsigned long']],
'LogSize' : [ 0x8c, ['array', 2, ['unsigned long']]],
'RefreshCount' : [ 0x94, ['unsigned long']],
'StorageTypeCount' : [ 0x98, ['unsigned long']],
'Version' : [ 0x9c, ['unsigned long']],
'Storage' : [ 0xa0, ['array', 2, ['_DUAL']]],
} ],
'_iobuf' : [ 0x30, {
'_ptr' : [ 0x0, ['pointer64', ['unsigned char']]],
'_cnt' : [ 0x8, ['long']],
'_base' : [ 0x10, ['pointer64', ['unsigned char']]],
'_flag' : [ 0x18, ['long']],
'_file' : [ 0x1c, ['long']],
'_charbuf' : [ 0x20, ['long']],
'_bufsiz' : [ 0x24, ['long']],
'_tmpfname' : [ 0x28, ['pointer64', ['unsigned char']]],
} ],
'_CM_VIEW_OF_FILE' : [ 0x58, {
'MappedViewLinks' : [ 0x0, ['_LIST_ENTRY']],
'PinnedViewLinks' : [ 0x10, ['_LIST_ENTRY']],
'FlushedViewLinks' : [ 0x20, ['_LIST_ENTRY']],
'CmHive' : [ 0x30, ['pointer64', ['_CMHIVE']]],
'Bcb' : [ 0x38, ['pointer64', ['void']]],
'ViewAddress' : [ 0x40, ['pointer64', ['void']]],
'FileOffset' : [ 0x48, ['unsigned long']],
'Size' : [ 0x4c, ['unsigned long']],
'UseCount' : [ 0x50, ['unsigned long']],
} ],
'_EXCEPTION_RECORD' : [ 0x98, {
'ExceptionCode' : [ 0x0, ['long']],
'ExceptionFlags' : [ 0x4, ['unsigned long']],
'ExceptionRecord' : [ 0x8, ['pointer64', ['_EXCEPTION_RECORD']]],
'ExceptionAddress' : [ 0x10, ['pointer64', ['void']]],
'NumberParameters' : [ 0x18, ['unsigned long']],
'ExceptionInformation' : [ 0x20, ['array', 15, ['unsigned long long']]],
} ],
'_EXCEPTION_RECORD64' : [ 0x98, {
'ExceptionCode' : [ 0x0, ['long']],
'ExceptionFlags' : [ 0x4, ['unsigned long']],
'ExceptionRecord' : [ 0x8, ['unsigned long long']],
'ExceptionAddress' : [ 0x10, ['unsigned long long']],
'NumberParameters' : [ 0x18, ['unsigned long']],
'__unusedAlignment' : [ 0x1c, ['unsigned long']],
'ExceptionInformation' : [ 0x20, ['array', 15, ['unsigned long long']]],
} ],
'_TEB' : [ 0x1828, {
'NtTib' : [ 0x0, ['_NT_TIB']],
'EnvironmentPointer' : [ 0x38, ['pointer64', ['void']]],
'ClientId' : [ 0x40, ['_CLIENT_ID']],
'ActiveRpcHandle' : [ 0x50, ['pointer64', ['void']]],
'ThreadLocalStoragePointer' : [ 0x58, ['pointer64', ['void']]],
'ProcessEnvironmentBlock' : [ 0x60, ['pointer64', ['_PEB']]],
'LastErrorValue' : [ 0x68, ['unsigned long']],
'CountOfOwnedCriticalSections' : [ 0x6c, ['unsigned long']],
'CsrClientThread' : [ 0x70, ['pointer64', ['void']]],
'Win32ThreadInfo' : [ 0x78, ['pointer64', ['void']]],
'User32Reserved' : [ 0x80, ['array', 26, ['unsigned long']]],
'UserReserved' : [ 0xe8, ['array', 5, ['unsigned long']]],
'WOW32Reserved' : [ 0x100, ['pointer64', ['void']]],
'CurrentLocale' : [ 0x108, ['unsigned long']],
'FpSoftwareStatusRegister' : [ 0x10c, ['unsigned long']],
'SystemReserved1' : [ 0x110, ['array', 54, ['pointer64', ['void']]]],
'ExceptionCode' : [ 0x2c0, ['long']],
'ActivationContextStackPointer' : [ 0x2c8, ['pointer64', ['_ACTIVATION_CONTEXT_STACK']]],
'SpareBytes1' : [ 0x2d0, ['array', 24, ['unsigned char']]],
'TxFsContext' : [ 0x2e8, ['unsigned long']],
'GdiTebBatch' : [ 0x2f0, ['_GDI_TEB_BATCH']],
'RealClientId' : [ 0x7d8, ['_CLIENT_ID']],
'GdiCachedProcessHandle' : [ 0x7e8, ['pointer64', ['void']]],
'GdiClientPID' : [ 0x7f0, ['unsigned long']],
'GdiClientTID' : [ 0x7f4, ['unsigned long']],
'GdiThreadLocalInfo' : [ 0x7f8, ['pointer64', ['void']]],
'Win32ClientInfo' : [ 0x800, ['array', 62, ['unsigned long long']]],
'glDispatchTable' : [ 0x9f0, ['array', 233, ['pointer64', ['void']]]],
'glReserved1' : [ 0x1138, ['array', 29, ['unsigned long long']]],
'glReserved2' : [ 0x1220, ['pointer64', ['void']]],
'glSectionInfo' : [ 0x1228, ['pointer64', ['void']]],
'glSection' : [ 0x1230, ['pointer64', ['void']]],
'glTable' : [ 0x1238, ['pointer64', ['void']]],
'glCurrentRC' : [ 0x1240, ['pointer64', ['void']]],
'glContext' : [ 0x1248, ['pointer64', ['void']]],
'LastStatusValue' : [ 0x1250, ['unsigned long']],
'StaticUnicodeString' : [ 0x1258, ['_UNICODE_STRING']],
'StaticUnicodeBuffer' : [ 0x1268, ['array', 261, ['wchar']]],
'DeallocationStack' : [ 0x1478, ['pointer64', ['void']]],
'TlsSlots' : [ 0x1480, ['array', 64, ['pointer64', ['void']]]],
'TlsLinks' : [ 0x1680, ['_LIST_ENTRY']],
'Vdm' : [ 0x1690, ['pointer64', ['void']]],
'ReservedForNtRpc' : [ 0x1698, ['pointer64', ['void']]],
'DbgSsReserved' : [ 0x16a0, ['array', 2, ['pointer64', ['void']]]],
'HardErrorMode' : [ 0x16b0, ['unsigned long']],
'Instrumentation' : [ 0x16b8, ['array', 11, ['pointer64', ['void']]]],
'ActivityId' : [ 0x1710, ['_GUID']],
'SubProcessTag' : [ 0x1720, ['pointer64', ['void']]],
'EtwLocalData' : [ 0x1728, ['pointer64', ['void']]],
'EtwTraceData' : [ 0x1730, ['pointer64', ['void']]],
'WinSockData' : [ 0x1738, ['pointer64', ['void']]],
'GdiBatchCount' : [ 0x1740, ['unsigned long']],
'SpareBool0' : [ 0x1744, ['unsigned char']],
'SpareBool1' : [ 0x1745, ['unsigned char']],
'SpareBool2' : [ 0x1746, ['unsigned char']],
'IdealProcessor' : [ 0x1747, ['unsigned char']],
'GuaranteedStackBytes' : [ 0x1748, ['unsigned long']],
'ReservedForPerf' : [ 0x1750, ['pointer64', ['void']]],
'ReservedForOle' : [ 0x1758, ['pointer64', ['void']]],
'WaitingOnLoaderLock' : [ 0x1760, ['unsigned long']],
'SavedPriorityState' : [ 0x1768, ['pointer64', ['void']]],
'SoftPatchPtr1' : [ 0x1770, ['unsigned long long']],
'ThreadPoolData' : [ 0x1778, ['pointer64', ['void']]],
'TlsExpansionSlots' : [ 0x1780, ['pointer64', ['pointer64', ['void']]]],
'DeallocationBStore' : [ 0x1788, ['pointer64', ['void']]],
'BStoreLimit' : [ 0x1790, ['pointer64', ['void']]],
'ImpersonationLocale' : [ 0x1798, ['unsigned long']],
'IsImpersonating' : [ 0x179c, ['unsigned long']],
'NlsCache' : [ 0x17a0, ['pointer64', ['void']]],
'pShimData' : [ 0x17a8, ['pointer64', ['void']]],
'HeapVirtualAffinity' : [ 0x17b0, ['unsigned long']],
'CurrentTransactionHandle' : [ 0x17b8, ['pointer64', ['void']]],
'ActiveFrame' : [ 0x17c0, ['pointer64', ['_TEB_ACTIVE_FRAME']]],
'FlsData' : [ 0x17c8, ['pointer64', ['void']]],
'PreferredLanguages' : [ 0x17d0, ['pointer64', ['void']]],
'UserPrefLanguages' : [ 0x17d8, ['pointer64', ['void']]],
'MergedPrefLanguages' : [ 0x17e0, ['pointer64', ['void']]],
'MuiImpersonation' : [ 0x17e8, ['unsigned long']],
'CrossTebFlags' : [ 0x17ec, ['unsigned short']],
'SpareCrossTebBits' : [ 0x17ec, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned short')]],
'SameTebFlags' : [ 0x17ee, ['unsigned short']],
'DbgSafeThunkCall' : [ 0x17ee, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned short')]],
'DbgInDebugPrint' : [ 0x17ee, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned short')]],
'DbgHasFiberData' : [ 0x17ee, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned short')]],
'DbgSkipThreadAttach' : [ 0x17ee, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned short')]],
'DbgWerInShipAssertCode' : [ 0x17ee, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned short')]],
'DbgRanProcessInit' : [ 0x17ee, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned short')]],
'DbgClonedThread' : [ 0x17ee, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned short')]],
'DbgSuppressDebugMsg' : [ 0x17ee, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned short')]],
'RtlDisableUserStackWalk' : [ 0x17ee, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned short')]],
'RtlExceptionAttached' : [ 0x17ee, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned short')]],
'SpareSameTebBits' : [ 0x17ee, ['BitField', dict(start_bit = 10, end_bit = 16, native_type='unsigned short')]],
'TxnScopeEnterCallback' : [ 0x17f0, ['pointer64', ['void']]],
'TxnScopeExitCallback' : [ 0x17f8, ['pointer64', ['void']]],
'TxnScopeContext' : [ 0x1800, ['pointer64', ['void']]],
'LockCount' : [ 0x1808, ['unsigned long']],
'ProcessRundown' : [ 0x180c, ['unsigned long']],
'LastSwitchTime' : [ 0x1810, ['unsigned long long']],
'TotalSwitchOutTime' : [ 0x1818, ['unsigned long long']],
'WaitReasonBitMap' : [ 0x1820, ['_LARGE_INTEGER']],
} ],
'_CONTEXT32_UPDATE' : [ 0x4, {
'NumberEntries' : [ 0x0, ['unsigned long']],
} ],
'_KTIMER' : [ 0x40, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
'DueTime' : [ 0x18, ['_ULARGE_INTEGER']],
'TimerListEntry' : [ 0x20, ['_LIST_ENTRY']],
'Dpc' : [ 0x30, ['pointer64', ['_KDPC']]],
'Period' : [ 0x38, ['long']],
} ],
'_KEVENT' : [ 0x18, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
} ],
'_KLOCK_QUEUE_HANDLE' : [ 0x18, {
'LockQueue' : [ 0x0, ['_KSPIN_LOCK_QUEUE']],
'OldIrql' : [ 0x10, ['unsigned char']],
} ],
'_KSPIN_LOCK_QUEUE' : [ 0x10, {
'Next' : [ 0x0, ['pointer64', ['_KSPIN_LOCK_QUEUE']]],
'Lock' : [ 0x8, ['pointer64', ['unsigned long long']]],
} ],
'_KQUEUE' : [ 0x40, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
'EntryListHead' : [ 0x18, ['_LIST_ENTRY']],
'CurrentCount' : [ 0x28, ['unsigned long']],
'MaximumCount' : [ 0x2c, ['unsigned long']],
'ThreadListHead' : [ 0x30, ['_LIST_ENTRY']],
} ],
'_KWAIT_BLOCK' : [ 0x30, {
'WaitListEntry' : [ 0x0, ['_LIST_ENTRY']],
'Thread' : [ 0x10, ['pointer64', ['_KTHREAD']]],
'Object' : [ 0x18, ['pointer64', ['void']]],
'NextWaitBlock' : [ 0x20, ['pointer64', ['_KWAIT_BLOCK']]],
'WaitKey' : [ 0x28, ['unsigned short']],
'WaitType' : [ 0x2a, ['unsigned char']],
'SpareByte' : [ 0x2b, ['unsigned char']],
'SpareLong' : [ 0x2c, ['long']],
} ],
'_KTIMER_TABLE_ENTRY' : [ 0x18, {
'Entry' : [ 0x0, ['_LIST_ENTRY']],
'Time' : [ 0x10, ['_ULARGE_INTEGER']],
} ],
'__unnamed_15bb' : [ 0x8, {
'IdleTransitionTime' : [ 0x0, ['unsigned long long']],
} ],
'__unnamed_15bd' : [ 0x8, {
'LastIdleCheck' : [ 0x0, ['unsigned long long']],
} ],
'__unnamed_15c4' : [ 0x2, {
'AsUSHORT' : [ 0x0, ['unsigned short']],
'PStateDomain' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned short')]],
'PStateDomainIdleAccounting' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned short')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 16, native_type='unsigned short')]],
} ],
'_PROCESSOR_POWER_STATE' : [ 0x118, {
'IdleStates' : [ 0x0, ['pointer64', ['_PPM_IDLE_STATES']]],
'LastTimeCheck' : [ 0x8, ['unsigned long long']],
'IdleTimeAccumulated' : [ 0x10, ['unsigned long long']],
'Native' : [ 0x18, ['__unnamed_15bb']],
'Hv' : [ 0x18, ['__unnamed_15bd']],
'IdleAccounting' : [ 0x20, ['pointer64', ['PPM_IDLE_ACCOUNTING']]],
'PerfStates' : [ 0x28, ['pointer64', ['_PPM_PERF_STATES']]],
'LastKernelUserTime' : [ 0x30, ['unsigned long']],
'LastIdleThreadKTime' : [ 0x34, ['unsigned long']],
'LastGlobalTimeHv' : [ 0x38, ['unsigned long long']],
'LastProcessorTimeHv' : [ 0x40, ['unsigned long long']],
'ThermalConstraint' : [ 0x48, ['unsigned char']],
'LastBusyPercentage' : [ 0x49, ['unsigned char']],
'Flags' : [ 0x4a, ['__unnamed_15c4']],
'PerfTimer' : [ 0x50, ['_KTIMER']],
'PerfDpc' : [ 0x90, ['_KDPC']],
'LastSysTime' : [ 0xd0, ['unsigned long']],
'PStateMaster' : [ 0xd8, ['pointer64', ['_KPRCB']]],
'PStateSet' : [ 0xe0, ['unsigned long long']],
'CurrentPState' : [ 0xe8, ['unsigned long']],
'DesiredPState' : [ 0xec, ['unsigned long']],
'PStateIdleStartTime' : [ 0xf0, ['unsigned long']],
'PStateIdleTime' : [ 0xf4, ['unsigned long']],
'LastPStateIdleTime' : [ 0xf8, ['unsigned long']],
'PStateStartTime' : [ 0xfc, ['unsigned long']],
'DiaIndex' : [ 0x100, ['unsigned long']],
'Reserved0' : [ 0x104, ['unsigned long']],
'WmiDispatchPtr' : [ 0x108, ['unsigned long long']],
'WmiInterfaceEnabled' : [ 0x110, ['long']],
} ],
'_KEXCEPTION_FRAME' : [ 0x140, {
'P1Home' : [ 0x0, ['unsigned long long']],
'P2Home' : [ 0x8, ['unsigned long long']],
'P3Home' : [ 0x10, ['unsigned long long']],
'P4Home' : [ 0x18, ['unsigned long long']],
'P5' : [ 0x20, ['unsigned long long']],
'InitialStack' : [ 0x28, ['unsigned long long']],
'Xmm6' : [ 0x30, ['_M128A']],
'Xmm7' : [ 0x40, ['_M128A']],
'Xmm8' : [ 0x50, ['_M128A']],
'Xmm9' : [ 0x60, ['_M128A']],
'Xmm10' : [ 0x70, ['_M128A']],
'Xmm11' : [ 0x80, ['_M128A']],
'Xmm12' : [ 0x90, ['_M128A']],
'Xmm13' : [ 0xa0, ['_M128A']],
'Xmm14' : [ 0xb0, ['_M128A']],
'Xmm15' : [ 0xc0, ['_M128A']],
'TrapFrame' : [ 0xd0, ['unsigned long long']],
'CallbackStack' : [ 0xd8, ['unsigned long long']],
'OutputBuffer' : [ 0xe0, ['unsigned long long']],
'OutputLength' : [ 0xe8, ['unsigned long long']],
'MxCsr' : [ 0xf0, ['unsigned long long']],
'Rbp' : [ 0xf8, ['unsigned long long']],
'Rbx' : [ 0x100, ['unsigned long long']],
'Rdi' : [ 0x108, ['unsigned long long']],
'Rsi' : [ 0x110, ['unsigned long long']],
'R12' : [ 0x118, ['unsigned long long']],
'R13' : [ 0x120, ['unsigned long long']],
'R14' : [ 0x128, ['unsigned long long']],
'R15' : [ 0x130, ['unsigned long long']],
'Return' : [ 0x138, ['unsigned long long']],
} ],
'_KTRAP_FRAME' : [ 0x190, {
'P1Home' : [ 0x0, ['unsigned long long']],
'P2Home' : [ 0x8, ['unsigned long long']],
'P3Home' : [ 0x10, ['unsigned long long']],
'P4Home' : [ 0x18, ['unsigned long long']],
'P5' : [ 0x20, ['unsigned long long']],
'PreviousMode' : [ 0x28, ['unsigned char']],
'PreviousIrql' : [ 0x29, ['unsigned char']],
'FaultIndicator' : [ 0x2a, ['unsigned char']],
'ExceptionActive' : [ 0x2b, ['unsigned char']],
'MxCsr' : [ 0x2c, ['unsigned long']],
'Rax' : [ 0x30, ['unsigned long long']],
'Rcx' : [ 0x38, ['unsigned long long']],
'Rdx' : [ 0x40, ['unsigned long long']],
'R8' : [ 0x48, ['unsigned long long']],
'R9' : [ 0x50, ['unsigned long long']],
'R10' : [ 0x58, ['unsigned long long']],
'R11' : [ 0x60, ['unsigned long long']],
'GsBase' : [ 0x68, ['unsigned long long']],
'GsSwap' : [ 0x68, ['unsigned long long']],
'Xmm0' : [ 0x70, ['_M128A']],
'Xmm1' : [ 0x80, ['_M128A']],
'Xmm2' : [ 0x90, ['_M128A']],
'Xmm3' : [ 0xa0, ['_M128A']],
'Xmm4' : [ 0xb0, ['_M128A']],
'Xmm5' : [ 0xc0, ['_M128A']],
'FaultAddress' : [ 0xd0, ['unsigned long long']],
'ContextRecord' : [ 0xd0, ['unsigned long long']],
'TimeStampCKCL' : [ 0xd0, ['unsigned long long']],
'Dr0' : [ 0xd8, ['unsigned long long']],
'Dr1' : [ 0xe0, ['unsigned long long']],
'Dr2' : [ 0xe8, ['unsigned long long']],
'Dr3' : [ 0xf0, ['unsigned long long']],
'Dr6' : [ 0xf8, ['unsigned long long']],
'Dr7' : [ 0x100, ['unsigned long long']],
'DebugControl' : [ 0x108, ['unsigned long long']],
'LastBranchToRip' : [ 0x110, ['unsigned long long']],
'LastBranchFromRip' : [ 0x118, ['unsigned long long']],
'LastExceptionToRip' : [ 0x120, ['unsigned long long']],
'LastExceptionFromRip' : [ 0x128, ['unsigned long long']],
'LastBranchControl' : [ 0x108, ['unsigned long long']],
'LastBranchMSR' : [ 0x110, ['unsigned long']],
'SegDs' : [ 0x130, ['unsigned short']],
'SegEs' : [ 0x132, ['unsigned short']],
'SegFs' : [ 0x134, ['unsigned short']],
'SegGs' : [ 0x136, ['unsigned short']],
'TrapFrame' : [ 0x138, ['unsigned long long']],
'Rbx' : [ 0x140, ['unsigned long long']],
'Rdi' : [ 0x148, ['unsigned long long']],
'Rsi' : [ 0x150, ['unsigned long long']],
'Rbp' : [ 0x158, ['unsigned long long']],
'ErrorCode' : [ 0x160, ['unsigned long long']],
'ExceptionFrame' : [ 0x160, ['unsigned long long']],
'TimeStampKlog' : [ 0x160, ['unsigned long long']],
'Rip' : [ 0x168, ['unsigned long long']],
'SegCs' : [ 0x170, ['unsigned short']],
'Fill0' : [ 0x172, ['unsigned char']],
'Logging' : [ 0x173, ['unsigned char']],
'Fill1' : [ 0x174, ['array', 2, ['unsigned short']]],
'EFlags' : [ 0x178, ['unsigned long']],
'Fill2' : [ 0x17c, ['unsigned long']],
'Rsp' : [ 0x180, ['unsigned long long']],
'SegSs' : [ 0x188, ['unsigned short']],
'Fill3' : [ 0x18a, ['unsigned short']],
'CodePatchCycle' : [ 0x18c, ['long']],
} ],
'_PNP_DEVICE_COMPLETION_QUEUE' : [ 0x50, {
'SpinLock' : [ 0x0, ['unsigned long long']],
'DispatchedCount' : [ 0x8, ['unsigned long']],
'DispatchedList' : [ 0x10, ['_LIST_ENTRY']],
'CompletedSemaphore' : [ 0x20, ['_KSEMAPHORE']],
'CompletedList' : [ 0x40, ['_LIST_ENTRY']],
} ],
'__unnamed_15f5' : [ 0x48, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'Wcb' : [ 0x0, ['_WAIT_CONTEXT_BLOCK']],
} ],
'_DEVICE_OBJECT' : [ 0x150, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['unsigned short']],
'ReferenceCount' : [ 0x4, ['long']],
'DriverObject' : [ 0x8, ['pointer64', ['_DRIVER_OBJECT']]],
'NextDevice' : [ 0x10, ['pointer64', ['_DEVICE_OBJECT']]],
'AttachedDevice' : [ 0x18, ['pointer64', ['_DEVICE_OBJECT']]],
'CurrentIrp' : [ 0x20, ['pointer64', ['_IRP']]],
'Timer' : [ 0x28, ['pointer64', ['_IO_TIMER']]],
'Flags' : [ 0x30, ['unsigned long']],
'Characteristics' : [ 0x34, ['unsigned long']],
'Vpb' : [ 0x38, ['pointer64', ['_VPB']]],
'DeviceExtension' : [ 0x40, ['pointer64', ['void']]],
'DeviceType' : [ 0x48, ['unsigned long']],
'StackSize' : [ 0x4c, ['unsigned char']],
'Queue' : [ 0x50, ['__unnamed_15f5']],
'AlignmentRequirement' : [ 0x98, ['unsigned long']],
'DeviceQueue' : [ 0xa0, ['_KDEVICE_QUEUE']],
'Dpc' : [ 0xc8, ['_KDPC']],
'ActiveThreadCount' : [ 0x108, ['unsigned long']],
'SecurityDescriptor' : [ 0x110, ['pointer64', ['void']]],
'DeviceLock' : [ 0x118, ['_KEVENT']],
'SectorSize' : [ 0x130, ['unsigned short']],
'Spare1' : [ 0x132, ['unsigned short']],
'DeviceObjectExtension' : [ 0x138, ['pointer64', ['_DEVOBJ_EXTENSION']]],
'Reserved' : [ 0x140, ['pointer64', ['void']]],
} ],
'__unnamed_1607' : [ 0x8, {
'LegacyDeviceNode' : [ 0x0, ['pointer64', ['_DEVICE_NODE']]],
'PendingDeviceRelations' : [ 0x0, ['pointer64', ['_DEVICE_RELATIONS']]],
'Information' : [ 0x0, ['pointer64', ['void']]],
} ],
'__unnamed_1609' : [ 0x8, {
'NextResourceDeviceNode' : [ 0x0, ['pointer64', ['_DEVICE_NODE']]],
} ],
'__unnamed_160d' : [ 0x20, {
'DockStatus' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'DOCK_NOTDOCKDEVICE', 1: 'DOCK_QUIESCENT', 2: 'DOCK_ARRIVING', 3: 'DOCK_DEPARTING', 4: 'DOCK_EJECTIRP_COMPLETED'})]],
'ListEntry' : [ 0x8, ['_LIST_ENTRY']],
'SerialNumber' : [ 0x18, ['pointer64', ['unsigned short']]],
} ],
'_DEVICE_NODE' : [ 0x220, {
'Sibling' : [ 0x0, ['pointer64', ['_DEVICE_NODE']]],
'Child' : [ 0x8, ['pointer64', ['_DEVICE_NODE']]],
'Parent' : [ 0x10, ['pointer64', ['_DEVICE_NODE']]],
'LastChild' : [ 0x18, ['pointer64', ['_DEVICE_NODE']]],
'Level' : [ 0x20, ['unsigned long']],
'Notify' : [ 0x28, ['_PO_DEVICE_NOTIFY']],
'PoIrpManager' : [ 0x68, ['_PO_IRP_MANAGER']],
'State' : [ 0x88, ['Enumeration', dict(target = 'long', choices = {768: 'DeviceNodeUnspecified', 769: 'DeviceNodeUninitialized', 770: 'DeviceNodeInitialized', 771: 'DeviceNodeDriversAdded', 772: 'DeviceNodeResourcesAssigned', 773: 'DeviceNodeStartPending', 774: 'DeviceNodeStartCompletion', 775: 'DeviceNodeStartPostWork', 776: 'DeviceNodeStarted', 777: 'DeviceNodeQueryStopped', 778: 'DeviceNodeStopped', 779: 'DeviceNodeRestartCompletion', 780: 'DeviceNodeEnumeratePending', 781: 'DeviceNodeEnumerateCompletion', 782: 'DeviceNodeAwaitingQueuedDeletion', 783: 'DeviceNodeAwaitingQueuedRemoval', 784: 'DeviceNodeQueryRemoved', 785: 'DeviceNodeRemovePendingCloses', 786: 'DeviceNodeRemoved', 787: 'DeviceNodeDeletePendingCloses', 788: 'DeviceNodeDeleted', 789: 'MaxDeviceNodeState'})]],
'PreviousState' : [ 0x8c, ['Enumeration', dict(target = 'long', choices = {768: 'DeviceNodeUnspecified', 769: 'DeviceNodeUninitialized', 770: 'DeviceNodeInitialized', 771: 'DeviceNodeDriversAdded', 772: 'DeviceNodeResourcesAssigned', 773: 'DeviceNodeStartPending', 774: 'DeviceNodeStartCompletion', 775: 'DeviceNodeStartPostWork', 776: 'DeviceNodeStarted', 777: 'DeviceNodeQueryStopped', 778: 'DeviceNodeStopped', 779: 'DeviceNodeRestartCompletion', 780: 'DeviceNodeEnumeratePending', 781: 'DeviceNodeEnumerateCompletion', 782: 'DeviceNodeAwaitingQueuedDeletion', 783: 'DeviceNodeAwaitingQueuedRemoval', 784: 'DeviceNodeQueryRemoved', 785: 'DeviceNodeRemovePendingCloses', 786: 'DeviceNodeRemoved', 787: 'DeviceNodeDeletePendingCloses', 788: 'DeviceNodeDeleted', 789: 'MaxDeviceNodeState'})]],
'StateHistory' : [ 0x90, ['array', -80, ['Enumeration', dict(target = 'long', choices = {768: 'DeviceNodeUnspecified', 769: 'DeviceNodeUninitialized', 770: 'DeviceNodeInitialized', 771: 'DeviceNodeDriversAdded', 772: 'DeviceNodeResourcesAssigned', 773: 'DeviceNodeStartPending', 774: 'DeviceNodeStartCompletion', 775: 'DeviceNodeStartPostWork', 776: 'DeviceNodeStarted', 777: 'DeviceNodeQueryStopped', 778: 'DeviceNodeStopped', 779: 'DeviceNodeRestartCompletion', 780: 'DeviceNodeEnumeratePending', 781: 'DeviceNodeEnumerateCompletion', 782: 'DeviceNodeAwaitingQueuedDeletion', 783: 'DeviceNodeAwaitingQueuedRemoval', 784: 'DeviceNodeQueryRemoved', 785: 'DeviceNodeRemovePendingCloses', 786: 'DeviceNodeRemoved', 787: 'DeviceNodeDeletePendingCloses', 788: 'DeviceNodeDeleted', 789: 'MaxDeviceNodeState'})]]],
'StateHistoryEntry' : [ 0xe0, ['unsigned long']],
'CompletionStatus' : [ 0xe4, ['long']],
'PendingIrp' : [ 0xe8, ['pointer64', ['_IRP']]],
'Flags' : [ 0xf0, ['unsigned long']],
'UserFlags' : [ 0xf4, ['unsigned long']],
'Problem' : [ 0xf8, ['unsigned long']],
'PhysicalDeviceObject' : [ 0x100, ['pointer64', ['_DEVICE_OBJECT']]],
'ResourceList' : [ 0x108, ['pointer64', ['_CM_RESOURCE_LIST']]],
'ResourceListTranslated' : [ 0x110, ['pointer64', ['_CM_RESOURCE_LIST']]],
'InstancePath' : [ 0x118, ['_UNICODE_STRING']],
'ServiceName' : [ 0x128, ['_UNICODE_STRING']],
'DuplicatePDO' : [ 0x138, ['pointer64', ['_DEVICE_OBJECT']]],
'ResourceRequirements' : [ 0x140, ['pointer64', ['_IO_RESOURCE_REQUIREMENTS_LIST']]],
'InterfaceType' : [ 0x148, ['Enumeration', dict(target = 'long', choices = {0: 'Internal', 1: 'Isa', 2: 'Eisa', 3: 'MicroChannel', 4: 'TurboChannel', 5: 'PCIBus', 6: 'VMEBus', 7: 'NuBus', 8: 'PCMCIABus', 9: 'CBus', 10: 'MPIBus', 11: 'MPSABus', 12: 'ProcessorInternal', 13: 'InternalPowerBus', 14: 'PNPISABus', 15: 'PNPBus', 16: 'Vmcs', 17: 'MaximumInterfaceType', -1: 'InterfaceTypeUndefined'})]],
'BusNumber' : [ 0x14c, ['unsigned long']],
'ChildInterfaceType' : [ 0x150, ['Enumeration', dict(target = 'long', choices = {0: 'Internal', 1: 'Isa', 2: 'Eisa', 3: 'MicroChannel', 4: 'TurboChannel', 5: 'PCIBus', 6: 'VMEBus', 7: 'NuBus', 8: 'PCMCIABus', 9: 'CBus', 10: 'MPIBus', 11: 'MPSABus', 12: 'ProcessorInternal', 13: 'InternalPowerBus', 14: 'PNPISABus', 15: 'PNPBus', 16: 'Vmcs', 17: 'MaximumInterfaceType', -1: 'InterfaceTypeUndefined'})]],
'ChildBusNumber' : [ 0x154, ['unsigned long']],
'ChildBusTypeIndex' : [ 0x158, ['unsigned short']],
'RemovalPolicy' : [ 0x15a, ['unsigned char']],
'HardwareRemovalPolicy' : [ 0x15b, ['unsigned char']],
'TargetDeviceNotify' : [ 0x160, ['_LIST_ENTRY']],
'DeviceArbiterList' : [ 0x170, ['_LIST_ENTRY']],
'DeviceTranslatorList' : [ 0x180, ['_LIST_ENTRY']],
'NoTranslatorMask' : [ 0x190, ['unsigned short']],
'QueryTranslatorMask' : [ 0x192, ['unsigned short']],
'NoArbiterMask' : [ 0x194, ['unsigned short']],
'QueryArbiterMask' : [ 0x196, ['unsigned short']],
'OverUsed1' : [ 0x198, ['__unnamed_1607']],
'OverUsed2' : [ 0x1a0, ['__unnamed_1609']],
'BootResources' : [ 0x1a8, ['pointer64', ['_CM_RESOURCE_LIST']]],
'BootResourcesTranslated' : [ 0x1b0, ['pointer64', ['_CM_RESOURCE_LIST']]],
'CapabilityFlags' : [ 0x1b8, ['unsigned long']],
'DockInfo' : [ 0x1c0, ['__unnamed_160d']],
'DisableableDepends' : [ 0x1e0, ['unsigned long']],
'PendedSetInterfaceState' : [ 0x1e8, ['_LIST_ENTRY']],
'LegacyBusListEntry' : [ 0x1f8, ['_LIST_ENTRY']],
'DriverUnloadRetryCount' : [ 0x208, ['unsigned long']],
'PreviousParent' : [ 0x210, ['pointer64', ['_DEVICE_NODE']]],
'DeletedChildren' : [ 0x218, ['unsigned long']],
'NumaNodeIndex' : [ 0x21c, ['unsigned long']],
} ],
'_PNP_ASSIGN_RESOURCES_CONTEXT' : [ 0x10, {
'IncludeFailedDevices' : [ 0x0, ['unsigned long']],
'DeviceCount' : [ 0x4, ['unsigned long']],
'DeviceList' : [ 0x8, ['array', 1, ['pointer64', ['_DEVICE_OBJECT']]]],
} ],
'_PNP_RESOURCE_REQUEST' : [ 0x40, {
'PhysicalDevice' : [ 0x0, ['pointer64', ['_DEVICE_OBJECT']]],
'Flags' : [ 0x8, ['unsigned long']],
'AllocationType' : [ 0xc, ['Enumeration', dict(target = 'long', choices = {0: 'ArbiterRequestLegacyReported', 1: 'ArbiterRequestHalReported', 2: 'ArbiterRequestLegacyAssigned', 3: 'ArbiterRequestPnpDetected', 4: 'ArbiterRequestPnpEnumerated', -1: 'ArbiterRequestUndefined'})]],
'Priority' : [ 0x10, ['unsigned long']],
'Position' : [ 0x14, ['unsigned long']],
'ResourceRequirements' : [ 0x18, ['pointer64', ['_IO_RESOURCE_REQUIREMENTS_LIST']]],
'ReqList' : [ 0x20, ['pointer64', ['void']]],
'ResourceAssignment' : [ 0x28, ['pointer64', ['_CM_RESOURCE_LIST']]],
'TranslatedResourceAssignment' : [ 0x30, ['pointer64', ['_CM_RESOURCE_LIST']]],
'Status' : [ 0x38, ['long']],
} ],
'_IO_RESOURCE_REQUIREMENTS_LIST' : [ 0x48, {
'ListSize' : [ 0x0, ['unsigned long']],
'InterfaceType' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'Internal', 1: 'Isa', 2: 'Eisa', 3: 'MicroChannel', 4: 'TurboChannel', 5: 'PCIBus', 6: 'VMEBus', 7: 'NuBus', 8: 'PCMCIABus', 9: 'CBus', 10: 'MPIBus', 11: 'MPSABus', 12: 'ProcessorInternal', 13: 'InternalPowerBus', 14: 'PNPISABus', 15: 'PNPBus', 16: 'Vmcs', 17: 'MaximumInterfaceType', -1: 'InterfaceTypeUndefined'})]],
'BusNumber' : [ 0x8, ['unsigned long']],
'SlotNumber' : [ 0xc, ['unsigned long']],
'Reserved' : [ 0x10, ['array', 3, ['unsigned long']]],
'AlternativeLists' : [ 0x1c, ['unsigned long']],
'List' : [ 0x20, ['array', 1, ['_IO_RESOURCE_LIST']]],
} ],
'_EXCEPTION_RECORD32' : [ 0x50, {
'ExceptionCode' : [ 0x0, ['long']],
'ExceptionFlags' : [ 0x4, ['unsigned long']],
'ExceptionRecord' : [ 0x8, ['unsigned long']],
'ExceptionAddress' : [ 0xc, ['unsigned long']],
'NumberParameters' : [ 0x10, ['unsigned long']],
'ExceptionInformation' : [ 0x14, ['array', 15, ['unsigned long']]],
} ],
'_DBGKM_EXCEPTION64' : [ 0xa0, {
'ExceptionRecord' : [ 0x0, ['_EXCEPTION_RECORD64']],
'FirstChance' : [ 0x98, ['unsigned long']],
} ],
'_DBGKM_EXCEPTION32' : [ 0x54, {
'ExceptionRecord' : [ 0x0, ['_EXCEPTION_RECORD32']],
'FirstChance' : [ 0x50, ['unsigned long']],
} ],
'_DBGKD_LOAD_SYMBOLS64' : [ 0x28, {
'PathNameLength' : [ 0x0, ['unsigned long']],
'BaseOfDll' : [ 0x8, ['unsigned long long']],
'ProcessId' : [ 0x10, ['unsigned long long']],
'CheckSum' : [ 0x18, ['unsigned long']],
'SizeOfImage' : [ 0x1c, ['unsigned long']],
'UnloadSymbols' : [ 0x20, ['unsigned char']],
} ],
'_DBGKD_LOAD_SYMBOLS32' : [ 0x18, {
'PathNameLength' : [ 0x0, ['unsigned long']],
'BaseOfDll' : [ 0x4, ['unsigned long']],
'ProcessId' : [ 0x8, ['unsigned long']],
'CheckSum' : [ 0xc, ['unsigned long']],
'SizeOfImage' : [ 0x10, ['unsigned long']],
'UnloadSymbols' : [ 0x14, ['unsigned char']],
} ],
'_DBGKD_READ_MEMORY64' : [ 0x10, {
'TargetBaseAddress' : [ 0x0, ['unsigned long long']],
'TransferCount' : [ 0x8, ['unsigned long']],
'ActualBytesRead' : [ 0xc, ['unsigned long']],
} ],
'_DBGKD_READ_MEMORY32' : [ 0xc, {
'TargetBaseAddress' : [ 0x0, ['unsigned long']],
'TransferCount' : [ 0x4, ['unsigned long']],
'ActualBytesRead' : [ 0x8, ['unsigned long']],
} ],
'_DBGKD_WRITE_MEMORY64' : [ 0x10, {
'TargetBaseAddress' : [ 0x0, ['unsigned long long']],
'TransferCount' : [ 0x8, ['unsigned long']],
'ActualBytesWritten' : [ 0xc, ['unsigned long']],
} ],
'_DBGKD_WRITE_MEMORY32' : [ 0xc, {
'TargetBaseAddress' : [ 0x0, ['unsigned long']],
'TransferCount' : [ 0x4, ['unsigned long']],
'ActualBytesWritten' : [ 0x8, ['unsigned long']],
} ],
'_DBGKD_WRITE_BREAKPOINT64' : [ 0x10, {
'BreakPointAddress' : [ 0x0, ['unsigned long long']],
'BreakPointHandle' : [ 0x8, ['unsigned long']],
} ],
'_DBGKD_WRITE_BREAKPOINT32' : [ 0x8, {
'BreakPointAddress' : [ 0x0, ['unsigned long']],
'BreakPointHandle' : [ 0x4, ['unsigned long']],
} ],
'_DBGKD_READ_WRITE_IO64' : [ 0x10, {
'IoAddress' : [ 0x0, ['unsigned long long']],
'DataSize' : [ 0x8, ['unsigned long']],
'DataValue' : [ 0xc, ['unsigned long']],
} ],
'_DBGKD_READ_WRITE_IO32' : [ 0xc, {
'DataSize' : [ 0x0, ['unsigned long']],
'IoAddress' : [ 0x4, ['unsigned long']],
'DataValue' : [ 0x8, ['unsigned long']],
} ],
'_DBGKD_READ_WRITE_IO_EXTENDED64' : [ 0x20, {
'DataSize' : [ 0x0, ['unsigned long']],
'InterfaceType' : [ 0x4, ['unsigned long']],
'BusNumber' : [ 0x8, ['unsigned long']],
'AddressSpace' : [ 0xc, ['unsigned long']],
'IoAddress' : [ 0x10, ['unsigned long long']],
'DataValue' : [ 0x18, ['unsigned long']],
} ],
'_DBGKD_READ_WRITE_IO_EXTENDED32' : [ 0x18, {
'DataSize' : [ 0x0, ['unsigned long']],
'InterfaceType' : [ 0x4, ['unsigned long']],
'BusNumber' : [ 0x8, ['unsigned long']],
'AddressSpace' : [ 0xc, ['unsigned long']],
'IoAddress' : [ 0x10, ['unsigned long']],
'DataValue' : [ 0x14, ['unsigned long']],
} ],
'_DBGKD_SET_SPECIAL_CALL32' : [ 0x4, {
'SpecialCall' : [ 0x0, ['unsigned long']],
} ],
'_DBGKD_SET_SPECIAL_CALL64' : [ 0x8, {
'SpecialCall' : [ 0x0, ['unsigned long long']],
} ],
'_DBGKD_SET_INTERNAL_BREAKPOINT32' : [ 0x8, {
'BreakpointAddress' : [ 0x0, ['unsigned long']],
'Flags' : [ 0x4, ['unsigned long']],
} ],
'_DBGKD_SET_INTERNAL_BREAKPOINT64' : [ 0x10, {
'BreakpointAddress' : [ 0x0, ['unsigned long long']],
'Flags' : [ 0x8, ['unsigned long']],
} ],
'_DBGKD_GET_INTERNAL_BREAKPOINT64' : [ 0x20, {
'BreakpointAddress' : [ 0x0, ['unsigned long long']],
'Flags' : [ 0x8, ['unsigned long']],
'Calls' : [ 0xc, ['unsigned long']],
'MaxCallsPerPeriod' : [ 0x10, ['unsigned long']],
'MinInstructions' : [ 0x14, ['unsigned long']],
'MaxInstructions' : [ 0x18, ['unsigned long']],
'TotalInstructions' : [ 0x1c, ['unsigned long']],
} ],
'_DBGKD_GET_INTERNAL_BREAKPOINT32' : [ 0x1c, {
'BreakpointAddress' : [ 0x0, ['unsigned long']],
'Flags' : [ 0x4, ['unsigned long']],
'Calls' : [ 0x8, ['unsigned long']],
'MaxCallsPerPeriod' : [ 0xc, ['unsigned long']],
'MinInstructions' : [ 0x10, ['unsigned long']],
'MaxInstructions' : [ 0x14, ['unsigned long']],
'TotalInstructions' : [ 0x18, ['unsigned long']],
} ],
'__unnamed_16ad' : [ 0x28, {
'ReadMemory' : [ 0x0, ['_DBGKD_READ_MEMORY64']],
'WriteMemory' : [ 0x0, ['_DBGKD_WRITE_MEMORY64']],
'GetContext' : [ 0x0, ['_DBGKD_GET_CONTEXT']],
'SetContext' : [ 0x0, ['_DBGKD_SET_CONTEXT']],
'WriteBreakPoint' : [ 0x0, ['_DBGKD_WRITE_BREAKPOINT64']],
'RestoreBreakPoint' : [ 0x0, ['_DBGKD_RESTORE_BREAKPOINT']],
'Continue' : [ 0x0, ['_DBGKD_CONTINUE']],
'Continue2' : [ 0x0, ['_DBGKD_CONTINUE2']],
'ReadWriteIo' : [ 0x0, ['_DBGKD_READ_WRITE_IO64']],
'ReadWriteIoExtended' : [ 0x0, ['_DBGKD_READ_WRITE_IO_EXTENDED64']],
'QuerySpecialCalls' : [ 0x0, ['_DBGKD_QUERY_SPECIAL_CALLS']],
'SetSpecialCall' : [ 0x0, ['_DBGKD_SET_SPECIAL_CALL64']],
'SetInternalBreakpoint' : [ 0x0, ['_DBGKD_SET_INTERNAL_BREAKPOINT64']],
'GetInternalBreakpoint' : [ 0x0, ['_DBGKD_GET_INTERNAL_BREAKPOINT64']],
'GetVersion64' : [ 0x0, ['_DBGKD_GET_VERSION64']],
'BreakPointEx' : [ 0x0, ['_DBGKD_BREAKPOINTEX']],
'ReadWriteMsr' : [ 0x0, ['_DBGKD_READ_WRITE_MSR']],
'SearchMemory' : [ 0x0, ['_DBGKD_SEARCH_MEMORY']],
'GetSetBusData' : [ 0x0, ['_DBGKD_GET_SET_BUS_DATA']],
'FillMemory' : [ 0x0, ['_DBGKD_FILL_MEMORY']],
'QueryMemory' : [ 0x0, ['_DBGKD_QUERY_MEMORY']],
'SwitchPartition' : [ 0x0, ['_DBGKD_SWITCH_PARTITION']],
} ],
'_DBGKD_MANIPULATE_STATE64' : [ 0x38, {
'ApiNumber' : [ 0x0, ['unsigned long']],
'ProcessorLevel' : [ 0x4, ['unsigned short']],
'Processor' : [ 0x6, ['unsigned short']],
'ReturnStatus' : [ 0x8, ['long']],
'u' : [ 0x10, ['__unnamed_16ad']],
} ],
'__unnamed_16b4' : [ 0x28, {
'ReadMemory' : [ 0x0, ['_DBGKD_READ_MEMORY32']],
'WriteMemory' : [ 0x0, ['_DBGKD_WRITE_MEMORY32']],
'ReadMemory64' : [ 0x0, ['_DBGKD_READ_MEMORY64']],
'WriteMemory64' : [ 0x0, ['_DBGKD_WRITE_MEMORY64']],
'GetContext' : [ 0x0, ['_DBGKD_GET_CONTEXT']],
'SetContext' : [ 0x0, ['_DBGKD_SET_CONTEXT']],
'WriteBreakPoint' : [ 0x0, ['_DBGKD_WRITE_BREAKPOINT32']],
'RestoreBreakPoint' : [ 0x0, ['_DBGKD_RESTORE_BREAKPOINT']],
'Continue' : [ 0x0, ['_DBGKD_CONTINUE']],
'Continue2' : [ 0x0, ['_DBGKD_CONTINUE2']],
'ReadWriteIo' : [ 0x0, ['_DBGKD_READ_WRITE_IO32']],
'ReadWriteIoExtended' : [ 0x0, ['_DBGKD_READ_WRITE_IO_EXTENDED32']],
'QuerySpecialCalls' : [ 0x0, ['_DBGKD_QUERY_SPECIAL_CALLS']],
'SetSpecialCall' : [ 0x0, ['_DBGKD_SET_SPECIAL_CALL32']],
'SetInternalBreakpoint' : [ 0x0, ['_DBGKD_SET_INTERNAL_BREAKPOINT32']],
'GetInternalBreakpoint' : [ 0x0, ['_DBGKD_GET_INTERNAL_BREAKPOINT32']],
'GetVersion32' : [ 0x0, ['_DBGKD_GET_VERSION32']],
'BreakPointEx' : [ 0x0, ['_DBGKD_BREAKPOINTEX']],
'ReadWriteMsr' : [ 0x0, ['_DBGKD_READ_WRITE_MSR']],
'SearchMemory' : [ 0x0, ['_DBGKD_SEARCH_MEMORY']],
} ],
'_DBGKD_MANIPULATE_STATE32' : [ 0x34, {
'ApiNumber' : [ 0x0, ['unsigned long']],
'ProcessorLevel' : [ 0x4, ['unsigned short']],
'Processor' : [ 0x6, ['unsigned short']],
'ReturnStatus' : [ 0x8, ['long']],
'u' : [ 0xc, ['__unnamed_16b4']],
} ],
'_CPU_INFO' : [ 0x10, {
'Eax' : [ 0x0, ['unsigned long']],
'Ebx' : [ 0x4, ['unsigned long']],
'Ecx' : [ 0x8, ['unsigned long']],
'Edx' : [ 0xc, ['unsigned long']],
} ],
'_VOLUME_CACHE_MAP' : [ 0x28, {
'NodeTypeCode' : [ 0x0, ['short']],
'NodeByteCode' : [ 0x2, ['short']],
'UseCount' : [ 0x4, ['unsigned long']],
'DeviceObject' : [ 0x8, ['pointer64', ['_DEVICE_OBJECT']]],
'VolumeCacheMapLinks' : [ 0x10, ['_LIST_ENTRY']],
'Flags' : [ 0x20, ['unsigned long']],
} ],
'_SHARED_CACHE_MAP' : [ 0x1d0, {
'NodeTypeCode' : [ 0x0, ['short']],
'NodeByteSize' : [ 0x2, ['short']],
'OpenCount' : [ 0x4, ['unsigned long']],
'FileSize' : [ 0x8, ['_LARGE_INTEGER']],
'BcbList' : [ 0x10, ['_LIST_ENTRY']],
'SectionSize' : [ 0x20, ['_LARGE_INTEGER']],
'ValidDataLength' : [ 0x28, ['_LARGE_INTEGER']],
'ValidDataGoal' : [ 0x30, ['_LARGE_INTEGER']],
'InitialVacbs' : [ 0x38, ['array', 4, ['pointer64', ['_VACB']]]],
'Vacbs' : [ 0x58, ['pointer64', ['pointer64', ['_VACB']]]],
'FileObjectFastRef' : [ 0x60, ['_EX_FAST_REF']],
'ActiveVacb' : [ 0x68, ['pointer64', ['_VACB']]],
'NeedToZero' : [ 0x70, ['pointer64', ['void']]],
'ActivePage' : [ 0x78, ['unsigned long']],
'NeedToZeroPage' : [ 0x7c, ['unsigned long']],
'ActiveVacbSpinLock' : [ 0x80, ['unsigned long long']],
'VacbActiveCount' : [ 0x88, ['unsigned long']],
'DirtyPages' : [ 0x8c, ['unsigned long']],
'SharedCacheMapLinks' : [ 0x90, ['_LIST_ENTRY']],
'Flags' : [ 0xa0, ['unsigned long']],
'Status' : [ 0xa4, ['long']],
'Mbcb' : [ 0xa8, ['pointer64', ['_MBCB']]],
'Section' : [ 0xb0, ['pointer64', ['void']]],
'CreateEvent' : [ 0xb8, ['pointer64', ['_KEVENT']]],
'WaitOnActiveCount' : [ 0xc0, ['pointer64', ['_KEVENT']]],
'PagesToWrite' : [ 0xc8, ['unsigned long']],
'BeyondLastFlush' : [ 0xd0, ['long long']],
'Callbacks' : [ 0xd8, ['pointer64', ['_CACHE_MANAGER_CALLBACKS']]],
'LazyWriteContext' : [ 0xe0, ['pointer64', ['void']]],
'PrivateList' : [ 0xe8, ['_LIST_ENTRY']],
'LogHandle' : [ 0xf8, ['pointer64', ['void']]],
'FlushToLsnRoutine' : [ 0x100, ['pointer64', ['void']]],
'DirtyPageThreshold' : [ 0x108, ['unsigned long']],
'LazyWritePassCount' : [ 0x10c, ['unsigned long']],
'UninitializeEvent' : [ 0x110, ['pointer64', ['_CACHE_UNINITIALIZE_EVENT']]],
'NeedToZeroVacb' : [ 0x118, ['pointer64', ['_VACB']]],
'BcbSpinLock' : [ 0x120, ['unsigned long long']],
'Reserved' : [ 0x128, ['pointer64', ['void']]],
'Event' : [ 0x130, ['_KEVENT']],
'HighWaterMappingOffset' : [ 0x148, ['_LARGE_INTEGER']],
'PrivateCacheMap' : [ 0x150, ['_PRIVATE_CACHE_MAP']],
'WriteBehindWorkQueueEntry' : [ 0x1b8, ['pointer64', ['void']]],
'VolumeCacheMap' : [ 0x1c0, ['pointer64', ['_VOLUME_CACHE_MAP']]],
'ProcImagePathHash' : [ 0x1c8, ['unsigned long']],
'MappedWritesInProgress' : [ 0x1cc, ['unsigned long']],
} ],
'__unnamed_16f6' : [ 0x8, {
'FileOffset' : [ 0x0, ['_LARGE_INTEGER']],
'ActiveCount' : [ 0x0, ['unsigned short']],
} ],
'_VACB' : [ 0x30, {
'BaseAddress' : [ 0x0, ['pointer64', ['void']]],
'SharedCacheMap' : [ 0x8, ['pointer64', ['_SHARED_CACHE_MAP']]],
'Overlay' : [ 0x10, ['__unnamed_16f6']],
'LruList' : [ 0x18, ['_LIST_ENTRY']],
'ArrayHead' : [ 0x28, ['pointer64', ['_VACB_ARRAY_HEADER']]],
} ],
'__unnamed_1704' : [ 0x8, {
'FileObject' : [ 0x0, ['pointer64', ['_FILE_OBJECT']]],
} ],
'__unnamed_1706' : [ 0x8, {
'SharedCacheMap' : [ 0x0, ['pointer64', ['_SHARED_CACHE_MAP']]],
} ],
'__unnamed_1708' : [ 0x8, {
'Event' : [ 0x0, ['pointer64', ['_KEVENT']]],
} ],
'__unnamed_170a' : [ 0x4, {
'Reason' : [ 0x0, ['unsigned long']],
} ],
'__unnamed_170c' : [ 0x8, {
'Read' : [ 0x0, ['__unnamed_1704']],
'Write' : [ 0x0, ['__unnamed_1706']],
'Event' : [ 0x0, ['__unnamed_1708']],
'Notification' : [ 0x0, ['__unnamed_170a']],
} ],
'_WORK_QUEUE_ENTRY' : [ 0x30, {
'WorkQueueLinks' : [ 0x0, ['_LIST_ENTRY']],
'CoalescedWorkQueueLinks' : [ 0x10, ['_LIST_ENTRY']],
'Parameters' : [ 0x20, ['__unnamed_170c']],
'Function' : [ 0x28, ['unsigned char']],
} ],
'_VACB_LEVEL_REFERENCE' : [ 0x8, {
'Reference' : [ 0x0, ['long']],
'SpecialReference' : [ 0x4, ['long']],
} ],
'_HEAP_LIST_LOOKUP' : [ 0x38, {
'ExtendedLookup' : [ 0x0, ['pointer64', ['_HEAP_LIST_LOOKUP']]],
'ArraySize' : [ 0x8, ['unsigned long']],
'ExtraItem' : [ 0xc, ['unsigned long']],
'ItemCount' : [ 0x10, ['unsigned long']],
'OutOfRangeItems' : [ 0x14, ['unsigned long']],
'BaseIndex' : [ 0x18, ['unsigned long']],
'ListHead' : [ 0x20, ['pointer64', ['_LIST_ENTRY']]],
'ListsInUseUlong' : [ 0x28, ['pointer64', ['unsigned long']]],
'ListHints' : [ 0x30, ['pointer64', ['pointer64', ['_LIST_ENTRY']]]],
} ],
'_HEAP' : [ 0x1f8, {
'Entry' : [ 0x0, ['_HEAP_ENTRY']],
'SegmentSignature' : [ 0x10, ['unsigned long']],
'SegmentFlags' : [ 0x14, ['unsigned long']],
'SegmentListEntry' : [ 0x18, ['_LIST_ENTRY']],
'Heap' : [ 0x28, ['pointer64', ['_HEAP']]],
'BaseAddress' : [ 0x30, ['pointer64', ['void']]],
'NumberOfPages' : [ 0x38, ['unsigned long']],
'FirstEntry' : [ 0x40, ['pointer64', ['_HEAP_ENTRY']]],
'LastValidEntry' : [ 0x48, ['pointer64', ['_HEAP_ENTRY']]],
'NumberOfUnCommittedPages' : [ 0x50, ['unsigned long']],
'NumberOfUnCommittedRanges' : [ 0x54, ['unsigned long']],
'SegmentAllocatorBackTraceIndex' : [ 0x58, ['unsigned short']],
'Reserved' : [ 0x5a, ['unsigned short']],
'UCRSegmentList' : [ 0x60, ['_LIST_ENTRY']],
'Flags' : [ 0x70, ['unsigned long']],
'ForceFlags' : [ 0x74, ['unsigned long']],
'CompatibilityFlags' : [ 0x78, ['unsigned long']],
'EncodeFlagMask' : [ 0x7c, ['unsigned long']],
'Encoding' : [ 0x80, ['_HEAP_ENTRY']],
'PointerKey' : [ 0x90, ['unsigned long long']],
'Interceptor' : [ 0x98, ['unsigned long']],
'VirtualMemoryThreshold' : [ 0x9c, ['unsigned long']],
'Signature' : [ 0xa0, ['unsigned long']],
'SegmentReserve' : [ 0xa8, ['unsigned long long']],
'SegmentCommit' : [ 0xb0, ['unsigned long long']],
'DeCommitFreeBlockThreshold' : [ 0xb8, ['unsigned long long']],
'DeCommitTotalFreeThreshold' : [ 0xc0, ['unsigned long long']],
'TotalFreeSize' : [ 0xc8, ['unsigned long long']],
'MaximumAllocationSize' : [ 0xd0, ['unsigned long long']],
'ProcessHeapsListIndex' : [ 0xd8, ['unsigned short']],
'HeaderValidateLength' : [ 0xda, ['unsigned short']],
'HeaderValidateCopy' : [ 0xe0, ['pointer64', ['void']]],
'NextAvailableTagIndex' : [ 0xe8, ['unsigned short']],
'MaximumTagIndex' : [ 0xea, ['unsigned short']],
'TagEntries' : [ 0xf0, ['pointer64', ['_HEAP_TAG_ENTRY']]],
'UCRList' : [ 0xf8, ['_LIST_ENTRY']],
'AlignRound' : [ 0x108, ['unsigned long long']],
'AlignMask' : [ 0x110, ['unsigned long long']],
'VirtualAllocdBlocks' : [ 0x118, ['_LIST_ENTRY']],
'SegmentList' : [ 0x128, ['_LIST_ENTRY']],
'AllocatorBackTraceIndex' : [ 0x138, ['unsigned short']],
'NonDedicatedListLength' : [ 0x13c, ['unsigned long']],
'BlocksIndex' : [ 0x140, ['pointer64', ['void']]],
'UCRIndex' : [ 0x148, ['pointer64', ['void']]],
'PseudoTagEntries' : [ 0x150, ['pointer64', ['_HEAP_PSEUDO_TAG_ENTRY']]],
'FreeLists' : [ 0x158, ['_LIST_ENTRY']],
'LockVariable' : [ 0x168, ['pointer64', ['_HEAP_LOCK']]],
'CommitRoutine' : [ 0x170, ['pointer64', ['void']]],
'FrontEndHeap' : [ 0x178, ['pointer64', ['void']]],
'FrontHeapLockCount' : [ 0x180, ['unsigned short']],
'FrontEndHeapType' : [ 0x182, ['unsigned char']],
'Counters' : [ 0x188, ['_HEAP_COUNTERS']],
'TuningParameters' : [ 0x1e8, ['_HEAP_TUNING_PARAMETERS']],
} ],
'_HEAP_ENTRY' : [ 0x10, {
'PreviousBlockPrivateData' : [ 0x0, ['pointer64', ['void']]],
'Size' : [ 0x8, ['unsigned short']],
'Flags' : [ 0xa, ['unsigned char']],
'SmallTagIndex' : [ 0xb, ['unsigned char']],
'PreviousSize' : [ 0xc, ['unsigned short']],
'SegmentOffset' : [ 0xe, ['unsigned char']],
'LFHFlags' : [ 0xe, ['unsigned char']],
'UnusedBytes' : [ 0xf, ['unsigned char']],
'CompactHeader' : [ 0x8, ['unsigned long long']],
'Reserved' : [ 0x0, ['pointer64', ['void']]],
'FunctionIndex' : [ 0x8, ['unsigned short']],
'ContextValue' : [ 0xa, ['unsigned short']],
'InterceptorValue' : [ 0x8, ['unsigned long']],
'UnusedBytesLength' : [ 0xc, ['unsigned short']],
'EntryOffset' : [ 0xe, ['unsigned char']],
'ExtendedBlockSignature' : [ 0xf, ['unsigned char']],
'ReservedForAlignment' : [ 0x0, ['pointer64', ['void']]],
'Code1' : [ 0x8, ['unsigned long']],
'Code2' : [ 0xc, ['unsigned short']],
'Code3' : [ 0xe, ['unsigned char']],
'Code4' : [ 0xf, ['unsigned char']],
'AgregateCode' : [ 0x8, ['unsigned long long']],
} ],
'_HEAP_SEGMENT' : [ 0x70, {
'Entry' : [ 0x0, ['_HEAP_ENTRY']],
'SegmentSignature' : [ 0x10, ['unsigned long']],
'SegmentFlags' : [ 0x14, ['unsigned long']],
'SegmentListEntry' : [ 0x18, ['_LIST_ENTRY']],
'Heap' : [ 0x28, ['pointer64', ['_HEAP']]],
'BaseAddress' : [ 0x30, ['pointer64', ['void']]],
'NumberOfPages' : [ 0x38, ['unsigned long']],
'FirstEntry' : [ 0x40, ['pointer64', ['_HEAP_ENTRY']]],
'LastValidEntry' : [ 0x48, ['pointer64', ['_HEAP_ENTRY']]],
'NumberOfUnCommittedPages' : [ 0x50, ['unsigned long']],
'NumberOfUnCommittedRanges' : [ 0x54, ['unsigned long']],
'SegmentAllocatorBackTraceIndex' : [ 0x58, ['unsigned short']],
'Reserved' : [ 0x5a, ['unsigned short']],
'UCRSegmentList' : [ 0x60, ['_LIST_ENTRY']],
} ],
'_HEAP_FREE_ENTRY' : [ 0x20, {
'PreviousBlockPrivateData' : [ 0x0, ['pointer64', ['void']]],
'Size' : [ 0x8, ['unsigned short']],
'Flags' : [ 0xa, ['unsigned char']],
'SmallTagIndex' : [ 0xb, ['unsigned char']],
'PreviousSize' : [ 0xc, ['unsigned short']],
'SegmentOffset' : [ 0xe, ['unsigned char']],
'LFHFlags' : [ 0xe, ['unsigned char']],
'UnusedBytes' : [ 0xf, ['unsigned char']],
'CompactHeader' : [ 0x8, ['unsigned long long']],
'Reserved' : [ 0x0, ['pointer64', ['void']]],
'FunctionIndex' : [ 0x8, ['unsigned short']],
'ContextValue' : [ 0xa, ['unsigned short']],
'InterceptorValue' : [ 0x8, ['unsigned long']],
'UnusedBytesLength' : [ 0xc, ['unsigned short']],
'EntryOffset' : [ 0xe, ['unsigned char']],
'ExtendedBlockSignature' : [ 0xf, ['unsigned char']],
'ReservedForAlignment' : [ 0x0, ['pointer64', ['void']]],
'Code1' : [ 0x8, ['unsigned long']],
'Code2' : [ 0xc, ['unsigned short']],
'Code3' : [ 0xe, ['unsigned char']],
'Code4' : [ 0xf, ['unsigned char']],
'AgregateCode' : [ 0x8, ['unsigned long long']],
'FreeList' : [ 0x10, ['_LIST_ENTRY']],
} ],
'_LDR_DATA_TABLE_ENTRY' : [ 0xc8, {
'InLoadOrderLinks' : [ 0x0, ['_LIST_ENTRY']],
'InMemoryOrderLinks' : [ 0x10, ['_LIST_ENTRY']],
'InInitializationOrderLinks' : [ 0x20, ['_LIST_ENTRY']],
'DllBase' : [ 0x30, ['pointer64', ['void']]],
'EntryPoint' : [ 0x38, ['pointer64', ['void']]],
'SizeOfImage' : [ 0x40, ['unsigned long']],
'FullDllName' : [ 0x48, ['_UNICODE_STRING']],
'BaseDllName' : [ 0x58, ['_UNICODE_STRING']],
'Flags' : [ 0x68, ['unsigned long']],
'LoadCount' : [ 0x6c, ['unsigned short']],
'TlsIndex' : [ 0x6e, ['unsigned short']],
'HashLinks' : [ 0x70, ['_LIST_ENTRY']],
'SectionPointer' : [ 0x70, ['pointer64', ['void']]],
'CheckSum' : [ 0x78, ['unsigned long']],
'TimeDateStamp' : [ 0x80, ['unsigned long']],
'LoadedImports' : [ 0x80, ['pointer64', ['void']]],
'EntryPointActivationContext' : [ 0x88, ['pointer64', ['_ACTIVATION_CONTEXT']]],
'PatchInformation' : [ 0x90, ['pointer64', ['void']]],
'ForwarderLinks' : [ 0x98, ['_LIST_ENTRY']],
'ServiceTagLinks' : [ 0xa8, ['_LIST_ENTRY']],
'StaticLinks' : [ 0xb8, ['_LIST_ENTRY']],
} ],
'_HEAP_SUBSEGMENT' : [ 0x30, {
'LocalInfo' : [ 0x0, ['pointer64', ['_HEAP_LOCAL_SEGMENT_INFO']]],
'UserBlocks' : [ 0x8, ['pointer64', ['_HEAP_USERDATA_HEADER']]],
'AggregateExchg' : [ 0x10, ['_INTERLOCK_SEQ']],
'BlockSize' : [ 0x18, ['unsigned short']],
'Flags' : [ 0x1a, ['unsigned short']],
'BlockCount' : [ 0x1c, ['unsigned short']],
'SizeIndex' : [ 0x1e, ['unsigned char']],
'AffinityIndex' : [ 0x1f, ['unsigned char']],
'Alignment' : [ 0x18, ['array', 2, ['unsigned long']]],
'SFreeListEntry' : [ 0x20, ['_SINGLE_LIST_ENTRY']],
'Lock' : [ 0x28, ['unsigned long']],
} ],
'_ETW_SYSTEMTIME' : [ 0x10, {
'Year' : [ 0x0, ['unsigned short']],
'Month' : [ 0x2, ['unsigned short']],
'DayOfWeek' : [ 0x4, ['unsigned short']],
'Day' : [ 0x6, ['unsigned short']],
'Hour' : [ 0x8, ['unsigned short']],
'Minute' : [ 0xa, ['unsigned short']],
'Second' : [ 0xc, ['unsigned short']],
'Milliseconds' : [ 0xe, ['unsigned short']],
} ],
'_TIME_FIELDS' : [ 0x10, {
'Year' : [ 0x0, ['short']],
'Month' : [ 0x2, ['short']],
'Day' : [ 0x4, ['short']],
'Hour' : [ 0x6, ['short']],
'Minute' : [ 0x8, ['short']],
'Second' : [ 0xa, ['short']],
'Milliseconds' : [ 0xc, ['short']],
'Weekday' : [ 0xe, ['short']],
} ],
'_WMI_LOGGER_CONTEXT' : [ 0x370, {
'StartTime' : [ 0x0, ['_LARGE_INTEGER']],
'LogFileHandle' : [ 0x8, ['pointer64', ['void']]],
'LoggerThread' : [ 0x10, ['pointer64', ['_ETHREAD']]],
'LoggerStatus' : [ 0x18, ['long']],
'LoggerId' : [ 0x1c, ['unsigned long']],
'NBQHead' : [ 0x20, ['pointer64', ['void']]],
'OverflowNBQHead' : [ 0x28, ['pointer64', ['void']]],
'QueueBlockFreeList' : [ 0x30, ['_SLIST_HEADER']],
'GlobalList' : [ 0x40, ['_SLIST_HEADER']],
'BatchedBufferList' : [ 0x50, ['pointer64', ['_WMI_BUFFER_HEADER']]],
'LoggerName' : [ 0x58, ['_UNICODE_STRING']],
'LogFileName' : [ 0x68, ['_UNICODE_STRING']],
'LogFilePattern' : [ 0x78, ['_UNICODE_STRING']],
'NewLogFileName' : [ 0x88, ['_UNICODE_STRING']],
'ClockType' : [ 0x98, ['unsigned long']],
'CollectionOn' : [ 0x9c, ['long']],
'MaximumFileSize' : [ 0xa0, ['unsigned long']],
'LoggerMode' : [ 0xa4, ['unsigned long']],
'LastFlushedBuffer' : [ 0xa8, ['unsigned long']],
'FlushTimer' : [ 0xac, ['unsigned long']],
'FlushThreshold' : [ 0xb0, ['unsigned long']],
'ByteOffset' : [ 0xb8, ['_LARGE_INTEGER']],
'FlushTimeStamp' : [ 0xc0, ['_LARGE_INTEGER']],
'MinimumBuffers' : [ 0xc8, ['unsigned long']],
'BuffersAvailable' : [ 0xcc, ['long']],
'NumberOfBuffers' : [ 0xd0, ['long']],
'MaximumBuffers' : [ 0xd4, ['unsigned long']],
'EventsLost' : [ 0xd8, ['unsigned long']],
'BuffersWritten' : [ 0xdc, ['unsigned long']],
'LogBuffersLost' : [ 0xe0, ['unsigned long']],
'RealTimeBuffersDelivered' : [ 0xe4, ['unsigned long']],
'RealTimeBuffersLost' : [ 0xe8, ['unsigned long']],
'BufferSize' : [ 0xec, ['unsigned long']],
'MaximumEventSize' : [ 0xf0, ['unsigned long']],
'SequencePtr' : [ 0xf8, ['pointer64', ['long']]],
'LocalSequence' : [ 0x100, ['unsigned long']],
'InstanceGuid' : [ 0x104, ['_GUID']],
'GetCpuClock' : [ 0x118, ['pointer64', ['void']]],
'FileCounter' : [ 0x120, ['long']],
'BufferCallback' : [ 0x128, ['pointer64', ['void']]],
'PoolType' : [ 0x130, ['Enumeration', dict(target = 'long', choices = {0: 'NonPagedPool', 1: 'PagedPool', 2: 'NonPagedPoolMustSucceed', 3: 'DontUseThisType', 4: 'NonPagedPoolCacheAligned', 5: 'PagedPoolCacheAligned', 6: 'NonPagedPoolCacheAlignedMustS', 7: 'MaxPoolType', 34: 'NonPagedPoolMustSucceedSession', 35: 'DontUseThisTypeSession', 32: 'NonPagedPoolSession', 36: 'NonPagedPoolCacheAlignedSession', 33: 'PagedPoolSession', 38: 'NonPagedPoolCacheAlignedMustSSession', 37: 'PagedPoolCacheAlignedSession'})]],
'ReferenceTime' : [ 0x138, ['_ETW_REF_CLOCK']],
'RealtimeLoggerContextFreed' : [ 0x148, ['unsigned char']],
'Consumers' : [ 0x150, ['_LIST_ENTRY']],
'NumConsumers' : [ 0x160, ['unsigned long']],
'Connecting' : [ 0x168, ['_LIST_ENTRY']],
'NewConsumer' : [ 0x178, ['unsigned char']],
'RealtimeLogfileHandle' : [ 0x180, ['pointer64', ['void']]],
'RealtimeLogfileName' : [ 0x188, ['_UNICODE_STRING']],
'RealtimeWriteOffset' : [ 0x198, ['_LARGE_INTEGER']],
'RealtimeReadOffset' : [ 0x1a0, ['_LARGE_INTEGER']],
'RealtimeLogfileSize' : [ 0x1a8, ['_LARGE_INTEGER']],
'RealtimeLogfileUsage' : [ 0x1b0, ['unsigned long long']],
'RealtimeMaximumFileSize' : [ 0x1b8, ['unsigned long long']],
'RealtimeBuffersSaved' : [ 0x1c0, ['unsigned long']],
'RealtimeReferenceTime' : [ 0x1c8, ['_ETW_REF_CLOCK']],
'RealtimeDisconnectProcessId' : [ 0x1d8, ['unsigned long']],
'RealtimeDisconnectConsumerId' : [ 0x1dc, ['unsigned long']],
'NewRTEventsLost' : [ 0x1e0, ['Enumeration', dict(target = 'long', choices = {0: 'EtwRtEventNoLoss', 1: 'EtwRtEventLost', 2: 'EtwRtBufferLost', 3: 'EtwRtBackupLost', 4: 'EtwRtEventLossMax'})]],
'LoggerEvent' : [ 0x1e8, ['_KEVENT']],
'FlushEvent' : [ 0x200, ['_KEVENT']],
'FlushDpc' : [ 0x218, ['_KDPC']],
'LoggerMutex' : [ 0x258, ['_KMUTANT']],
'LoggerLock' : [ 0x290, ['_EX_PUSH_LOCK']],
'ClientSecurityContext' : [ 0x298, ['_SECURITY_CLIENT_CONTEXT']],
'SecurityDescriptor' : [ 0x2e0, ['_EX_FAST_REF']],
'DummyBufferForMarker' : [ 0x2e8, ['_WMI_BUFFER_HEADER']],
'BufferSequenceNumber' : [ 0x330, ['long long']],
'AcceptNewEvents' : [ 0x338, ['long']],
'Flags' : [ 0x33c, ['unsigned long']],
'Persistent' : [ 0x33c, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'AutoLogger' : [ 0x33c, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'FsReady' : [ 0x33c, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'RealTime' : [ 0x33c, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'Wow' : [ 0x33c, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'KernelTrace' : [ 0x33c, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'NoMoreEnable' : [ 0x33c, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'RequestFlag' : [ 0x340, ['unsigned long']],
'RequestNewFie' : [ 0x340, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'RequestUpdateFile' : [ 0x340, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'RequestFlush' : [ 0x340, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'RequestDisableRealtime' : [ 0x340, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'RequestDisconnectConsumer' : [ 0x340, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'StackTraceFilterHookCount' : [ 0x344, ['unsigned short']],
'StackTraceFilter' : [ 0x346, ['array', 16, ['unsigned short']]],
} ],
'_WMI_BUFFER_HEADER' : [ 0x48, {
'Wnode' : [ 0x0, ['_WNODE_HEADER']],
'BufferSize' : [ 0x0, ['unsigned long']],
'SavedOffset' : [ 0x4, ['unsigned long']],
'CurrentOffset' : [ 0x8, ['unsigned long']],
'ReferenceCount' : [ 0xc, ['long']],
'TimeStamp' : [ 0x10, ['_LARGE_INTEGER']],
'StartPerfClock' : [ 0x10, ['_LARGE_INTEGER']],
'SequenceNumber' : [ 0x18, ['long long']],
'Padding0' : [ 0x20, ['array', 2, ['unsigned long']]],
'SlistEntry' : [ 0x20, ['_SINGLE_LIST_ENTRY']],
'NextBuffer' : [ 0x20, ['pointer64', ['_WMI_BUFFER_HEADER']]],
'ClientContext' : [ 0x28, ['_ETW_BUFFER_CONTEXT']],
'State' : [ 0x2c, ['Enumeration', dict(target = 'long', choices = {0: 'EtwBufferStateFree', 1: 'EtwBufferStateGeneralLogging', 2: 'EtwBufferStateCSwitch', 3: 'EtwBufferStateFlush', 4: 'EtwBufferStateMaximum'})]],
'Flags' : [ 0x2c, ['unsigned long']],
'Offset' : [ 0x30, ['unsigned long']],
'BufferFlag' : [ 0x34, ['unsigned short']],
'BufferType' : [ 0x36, ['unsigned short']],
'Padding1' : [ 0x38, ['array', 4, ['unsigned long']]],
'StartTime' : [ 0x38, ['_LARGE_INTEGER']],
'Entry' : [ 0x38, ['_LIST_ENTRY']],
'Padding2' : [ 0x38, ['pointer64', ['void']]],
'GlobalEntry' : [ 0x40, ['_SINGLE_LIST_ENTRY']],
'Pointer0' : [ 0x38, ['pointer64', ['void']]],
'Pointer1' : [ 0x40, ['pointer64', ['void']]],
} ],
'_SYSTEM_TRACE_HEADER' : [ 0x20, {
'Marker' : [ 0x0, ['unsigned long']],
'Version' : [ 0x0, ['unsigned short']],
'HeaderType' : [ 0x2, ['unsigned char']],
'Flags' : [ 0x3, ['unsigned char']],
'Header' : [ 0x4, ['unsigned long']],
'Packet' : [ 0x4, ['_WMI_TRACE_PACKET']],
'ThreadId' : [ 0x8, ['unsigned long']],
'ProcessId' : [ 0xc, ['unsigned long']],
'SystemTime' : [ 0x10, ['_LARGE_INTEGER']],
'KernelTime' : [ 0x18, ['unsigned long']],
'UserTime' : [ 0x1c, ['unsigned long']],
} ],
'_PERFINFO_TRACE_HEADER' : [ 0x18, {
'Marker' : [ 0x0, ['unsigned long']],
'Version' : [ 0x0, ['unsigned short']],
'HeaderType' : [ 0x2, ['unsigned char']],
'Flags' : [ 0x3, ['unsigned char']],
'Header' : [ 0x4, ['unsigned long']],
'Packet' : [ 0x4, ['_WMI_TRACE_PACKET']],
'TS' : [ 0x8, ['unsigned long long']],
'SystemTime' : [ 0x8, ['_LARGE_INTEGER']],
'Data' : [ 0x10, ['array', 1, ['unsigned char']]],
} ],
'_ETW_LAST_ENABLE_INFO' : [ 0x10, {
'EnableFlags' : [ 0x0, ['_LARGE_INTEGER']],
'LoggerId' : [ 0x8, ['unsigned short']],
'Level' : [ 0xa, ['unsigned char']],
'Enabled' : [ 0xb, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'InternalFlag' : [ 0xb, ['BitField', dict(start_bit = 1, end_bit = 8, native_type='unsigned char')]],
} ],
'_TRACE_ENABLE_CONTEXT' : [ 0x8, {
'LoggerId' : [ 0x0, ['unsigned short']],
'Level' : [ 0x2, ['unsigned char']],
'InternalFlag' : [ 0x3, ['unsigned char']],
'EnableFlags' : [ 0x4, ['unsigned long']],
} ],
'_TRACE_ENABLE_CONTEXT_EX' : [ 0x10, {
'LoggerId' : [ 0x0, ['unsigned short']],
'Level' : [ 0x2, ['unsigned char']],
'InternalFlag' : [ 0x3, ['unsigned char']],
'EnableFlags' : [ 0x4, ['unsigned long']],
'EnableFlagsHigh' : [ 0x8, ['unsigned long']],
'Reserved' : [ 0xc, ['unsigned long']],
} ],
'_ETW_GUID_ENTRY' : [ 0x170, {
'GuidList' : [ 0x0, ['_LIST_ENTRY']],
'RefCount' : [ 0x10, ['long']],
'Guid' : [ 0x14, ['_GUID']],
'RegListHead' : [ 0x28, ['_LIST_ENTRY']],
'SecurityDescriptor' : [ 0x38, ['pointer64', ['void']]],
'LastEnable' : [ 0x40, ['_ETW_LAST_ENABLE_INFO']],
'ProviderEnableInfo' : [ 0x50, ['_TRACE_ENABLE_INFO']],
'EnableInfo' : [ 0x70, ['array', 8, ['_TRACE_ENABLE_INFO']]],
} ],
'_TRACE_ENABLE_INFO' : [ 0x20, {
'IsEnabled' : [ 0x0, ['unsigned long']],
'Level' : [ 0x4, ['unsigned char']],
'Reserved1' : [ 0x5, ['unsigned char']],
'LoggerId' : [ 0x6, ['unsigned short']],
'EnableProperty' : [ 0x8, ['unsigned long']],
'Reserved2' : [ 0xc, ['unsigned long']],
'MatchAnyKeyword' : [ 0x10, ['unsigned long long']],
'MatchAllKeyword' : [ 0x18, ['unsigned long long']],
} ],
'__unnamed_17fa' : [ 0x4, {
'DataLength' : [ 0x0, ['short']],
'TotalLength' : [ 0x2, ['short']],
} ],
'__unnamed_17fc' : [ 0x4, {
's1' : [ 0x0, ['__unnamed_17fa']],
'Length' : [ 0x0, ['unsigned long']],
} ],
'__unnamed_17fe' : [ 0x4, {
'Type' : [ 0x0, ['short']],
'DataInfoOffset' : [ 0x2, ['short']],
} ],
'__unnamed_1800' : [ 0x4, {
's2' : [ 0x0, ['__unnamed_17fe']],
'ZeroInit' : [ 0x0, ['unsigned long']],
} ],
'_PORT_MESSAGE' : [ 0x28, {
'u1' : [ 0x0, ['__unnamed_17fc']],
'u2' : [ 0x4, ['__unnamed_1800']],
'ClientId' : [ 0x8, ['_CLIENT_ID']],
'DoNotUseThisField' : [ 0x8, ['double']],
'MessageId' : [ 0x18, ['unsigned long']],
'ClientViewSize' : [ 0x20, ['unsigned long long']],
'CallbackId' : [ 0x20, ['unsigned long']],
} ],
'_BLOB_TYPE' : [ 0x38, {
'ResourceId' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'BLOB_TYPE_UNKNOWN', 1: 'BLOB_TYPE_CONNECTION_INFO', 2: 'BLOB_TYPE_MESSAGE', 3: 'BLOB_TYPE_SECURITY_CONTEXT', 4: 'BLOB_TYPE_SECTION', 5: 'BLOB_TYPE_REGION', 6: 'BLOB_TYPE_VIEW', 7: 'BLOB_TYPE_RESERVE', 8: 'BLOB_TYPE_DIRECT_TRANSFER', 9: 'BLOB_TYPE_HANDLE_DATA', 10: 'BLOB_TYPE_MAX_ID'})]],
'PoolTag' : [ 0x4, ['unsigned long']],
'Flags' : [ 0x8, ['unsigned long']],
'CreatedObjects' : [ 0xc, ['unsigned long']],
'DeletedObjects' : [ 0x10, ['unsigned long']],
'DeleteProcedure' : [ 0x18, ['pointer64', ['void']]],
'DestroyProcedure' : [ 0x20, ['pointer64', ['void']]],
'UsualSize' : [ 0x28, ['unsigned long long']],
'LookasideIndex' : [ 0x30, ['unsigned long']],
} ],
'_ALPC_HANDLE_ENTRY' : [ 0x8, {
'Object' : [ 0x0, ['pointer64', ['void']]],
} ],
'__unnamed_1817' : [ 0x1, {
'ReferenceCache' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'Lookaside' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'Initializing' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'Deleted' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
} ],
'__unnamed_1819' : [ 0x1, {
's1' : [ 0x0, ['__unnamed_1817']],
'Flags' : [ 0x0, ['unsigned char']],
} ],
'_BLOB' : [ 0x20, {
'ResourceList' : [ 0x0, ['_LIST_ENTRY']],
'FreeListEntry' : [ 0x0, ['_SLIST_ENTRY']],
'u1' : [ 0x10, ['__unnamed_1819']],
'ResourceId' : [ 0x11, ['unsigned char']],
'CachedReferences' : [ 0x12, ['short']],
'ReferenceCount' : [ 0x14, ['long']],
'Lock' : [ 0x18, ['_EX_PUSH_LOCK']],
} ],
'__unnamed_1824' : [ 0x4, {
'Internal' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Secure' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
} ],
'__unnamed_1826' : [ 0x4, {
's1' : [ 0x0, ['__unnamed_1824']],
} ],
'_KALPC_SECTION' : [ 0x50, {
'u1' : [ 0x0, ['__unnamed_1826']],
'SectionObject' : [ 0x8, ['pointer64', ['void']]],
'Size' : [ 0x10, ['unsigned long long']],
'HandleTable' : [ 0x18, ['pointer64', ['_ALPC_HANDLE_TABLE']]],
'SectionHandle' : [ 0x20, ['pointer64', ['void']]],
'OwnerProcess' : [ 0x28, ['pointer64', ['_EPROCESS']]],
'OwnerPort' : [ 0x30, ['pointer64', ['_ALPC_PORT']]],
'NumberOfRegions' : [ 0x38, ['unsigned long']],
'RegionListHead' : [ 0x40, ['_LIST_ENTRY']],
} ],
'__unnamed_1833' : [ 0x4, {
'Secure' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
} ],
'__unnamed_1835' : [ 0x4, {
's1' : [ 0x0, ['__unnamed_1833']],
} ],
'_KALPC_REGION' : [ 0x60, {
'u1' : [ 0x0, ['__unnamed_1835']],
'RegionListEntry' : [ 0x8, ['_LIST_ENTRY']],
'Section' : [ 0x18, ['pointer64', ['_KALPC_SECTION']]],
'Offset' : [ 0x20, ['unsigned long long']],
'Size' : [ 0x28, ['unsigned long long']],
'ViewSize' : [ 0x30, ['unsigned long long']],
'ReadOnlyView' : [ 0x38, ['pointer64', ['_KALPC_VIEW']]],
'ReadWriteView' : [ 0x40, ['pointer64', ['_KALPC_VIEW']]],
'NumberOfViews' : [ 0x48, ['unsigned long']],
'ViewListHead' : [ 0x50, ['_LIST_ENTRY']],
} ],
'__unnamed_183b' : [ 0x4, {
'WriteAccess' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'AutoRelease' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'ForceUnlink' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
} ],
'__unnamed_183d' : [ 0x4, {
's1' : [ 0x0, ['__unnamed_183b']],
} ],
'_KALPC_VIEW' : [ 0x68, {
'ViewListEntry' : [ 0x0, ['_LIST_ENTRY']],
'u1' : [ 0x10, ['__unnamed_183d']],
'Region' : [ 0x18, ['pointer64', ['_KALPC_REGION']]],
'OwnerPort' : [ 0x20, ['pointer64', ['_ALPC_PORT']]],
'OwnerProcess' : [ 0x28, ['pointer64', ['_EPROCESS']]],
'Address' : [ 0x30, ['pointer64', ['void']]],
'Size' : [ 0x38, ['unsigned long long']],
'SecureViewHandle' : [ 0x40, ['pointer64', ['void']]],
'WriteAccessHandle' : [ 0x48, ['pointer64', ['void']]],
'NumberOfOwnerMessages' : [ 0x50, ['unsigned long']],
'ProcessViewListEntry' : [ 0x58, ['_LIST_ENTRY']],
} ],
'_ALPC_COMMUNICATION_INFO' : [ 0x48, {
'ConnectionPort' : [ 0x0, ['pointer64', ['_ALPC_PORT']]],
'ServerCommunicationPort' : [ 0x8, ['pointer64', ['_ALPC_PORT']]],
'ClientCommunicationPort' : [ 0x10, ['pointer64', ['_ALPC_PORT']]],
'CommunicationList' : [ 0x18, ['_LIST_ENTRY']],
'HandleTable' : [ 0x28, ['_ALPC_HANDLE_TABLE']],
} ],
'__unnamed_1855' : [ 0x4, {
'Initialized' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Type' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 3, native_type='unsigned long')]],
'ConnectionPending' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'ConnectionRefused' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'Disconnected' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'Closed' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'NoFlushOnClose' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'ReturnExtendedInfo' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'Waitable' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'DynamicSecurity' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'Wow64CompletionList' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'Lpc' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long')]],
'LpcToLpc' : [ 0x0, ['BitField', dict(start_bit = 13, end_bit = 14, native_type='unsigned long')]],
'HasCompletionList' : [ 0x0, ['BitField', dict(start_bit = 14, end_bit = 15, native_type='unsigned long')]],
'HadCompletionList' : [ 0x0, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
} ],
'__unnamed_1857' : [ 0x4, {
's1' : [ 0x0, ['__unnamed_1855']],
'State' : [ 0x0, ['unsigned long']],
} ],
'_ALPC_PORT' : [ 0x198, {
'PortListEntry' : [ 0x0, ['_LIST_ENTRY']],
'CommunicationInfo' : [ 0x10, ['pointer64', ['_ALPC_COMMUNICATION_INFO']]],
'OwnerProcess' : [ 0x18, ['pointer64', ['_EPROCESS']]],
'SequenceNo' : [ 0x20, ['unsigned long']],
'CompletionPort' : [ 0x28, ['pointer64', ['void']]],
'CompletionKey' : [ 0x30, ['pointer64', ['void']]],
'CompletionPacketLookaside' : [ 0x38, ['pointer64', ['_ALPC_COMPLETION_PACKET_LOOKASIDE']]],
'PortContext' : [ 0x40, ['pointer64', ['void']]],
'StaticSecurity' : [ 0x48, ['_SECURITY_CLIENT_CONTEXT']],
'MainQueue' : [ 0x90, ['_LIST_ENTRY']],
'PendingQueue' : [ 0xa0, ['_LIST_ENTRY']],
'LargeMessageQueue' : [ 0xb0, ['_LIST_ENTRY']],
'WaitQueue' : [ 0xc0, ['_LIST_ENTRY']],
'Semaphore' : [ 0xd0, ['pointer64', ['_KSEMAPHORE']]],
'DummyEvent' : [ 0xd0, ['pointer64', ['_KEVENT']]],
'Lock' : [ 0xd8, ['_EX_PUSH_LOCK']],
'PortAttributes' : [ 0xe0, ['_ALPC_PORT_ATTRIBUTES']],
'ResourceListLock' : [ 0x128, ['_EX_PUSH_LOCK']],
'ResourceListHead' : [ 0x130, ['_LIST_ENTRY']],
'CompletionList' : [ 0x140, ['pointer64', ['_ALPC_COMPLETION_LIST']]],
'MessageZone' : [ 0x148, ['pointer64', ['_ALPC_MESSAGE_ZONE']]],
'CanceledQueue' : [ 0x150, ['_LIST_ENTRY']],
'u1' : [ 0x160, ['__unnamed_1857']],
'TargetQueuePort' : [ 0x168, ['pointer64', ['_ALPC_PORT']]],
'TargetSequencePort' : [ 0x170, ['pointer64', ['_ALPC_PORT']]],
'Message' : [ 0x178, ['pointer64', ['_KALPC_MESSAGE']]],
'MainQueueLength' : [ 0x180, ['unsigned long']],
'PendingQueueLength' : [ 0x184, ['unsigned long']],
'LargeMessageQueueLength' : [ 0x188, ['unsigned long']],
'CanceledQueueLength' : [ 0x18c, ['unsigned long']],
'WaitQueueLength' : [ 0x190, ['unsigned long']],
} ],
'_PORT_MESSAGE32' : [ 0x18, {
'u1' : [ 0x0, ['__unnamed_17fc']],
'u2' : [ 0x4, ['__unnamed_1800']],
'ClientId' : [ 0x8, ['_CLIENT_ID32']],
'DoNotUseThisField' : [ 0x8, ['double']],
'MessageId' : [ 0x10, ['unsigned long']],
'ClientViewSize' : [ 0x14, ['unsigned long']],
'CallbackId' : [ 0x14, ['unsigned long']],
} ],
'__unnamed_1873' : [ 0x4, {
'QueueType' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 2, native_type='unsigned long')]],
'QueuePortType' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 6, native_type='unsigned long')]],
'Canceled' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'Ready' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'ReleaseMessage' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'SharedQuota' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'ReplyWaitReply' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'OwnerPortReference' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'ReserveReference' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long')]],
'ReceiverReference' : [ 0x0, ['BitField', dict(start_bit = 13, end_bit = 14, native_type='unsigned long')]],
} ],
'__unnamed_1875' : [ 0x4, {
's1' : [ 0x0, ['__unnamed_1873']],
'State' : [ 0x0, ['unsigned long']],
} ],
'_KALPC_MESSAGE' : [ 0x108, {
'Entry' : [ 0x0, ['_LIST_ENTRY']],
'ExtensionBuffer' : [ 0x10, ['pointer64', ['void']]],
'ExtensionBufferSize' : [ 0x18, ['unsigned long long']],
'QuotaProcess' : [ 0x20, ['pointer64', ['_EPROCESS']]],
'QuotaBlock' : [ 0x20, ['pointer64', ['void']]],
'SequenceNo' : [ 0x28, ['long']],
'u1' : [ 0x2c, ['__unnamed_1875']],
'CancelSequencePort' : [ 0x30, ['pointer64', ['_ALPC_PORT']]],
'CancelQueuePort' : [ 0x38, ['pointer64', ['_ALPC_PORT']]],
'CancelSequenceNo' : [ 0x40, ['long']],
'CancelListEntry' : [ 0x48, ['_LIST_ENTRY']],
'WaitingThread' : [ 0x58, ['pointer64', ['_ETHREAD']]],
'Reserve' : [ 0x60, ['pointer64', ['_KALPC_RESERVE']]],
'PortQueue' : [ 0x68, ['pointer64', ['_ALPC_PORT']]],
'OwnerPort' : [ 0x70, ['pointer64', ['_ALPC_PORT']]],
'UniqueTableEntry' : [ 0x78, ['pointer64', ['_HANDLE_TABLE_ENTRY']]],
'MessageAttributes' : [ 0x80, ['_KALPC_MESSAGE_ATTRIBUTES']],
'DataUserVa' : [ 0xb8, ['pointer64', ['void']]],
'DataSystemVa' : [ 0xc0, ['pointer64', ['void']]],
'CommunicationInfo' : [ 0xc8, ['pointer64', ['_ALPC_COMMUNICATION_INFO']]],
'ConnectionPort' : [ 0xd0, ['pointer64', ['_ALPC_PORT']]],
'ServerThread' : [ 0xd8, ['pointer64', ['_ETHREAD']]],
'PortMessage' : [ 0xe0, ['_PORT_MESSAGE']],
} ],
'_REMOTE_PORT_VIEW' : [ 0x18, {
'Length' : [ 0x0, ['unsigned long']],
'ViewSize' : [ 0x8, ['unsigned long long']],
'ViewBase' : [ 0x10, ['pointer64', ['void']]],
} ],
'_KALPC_HANDLE_DATA' : [ 0x10, {
'Flags' : [ 0x0, ['unsigned long']],
'ObjectType' : [ 0x4, ['unsigned long']],
'DuplicateContext' : [ 0x8, ['pointer64', ['_OB_DUPLICATE_OBJECT_STATE']]],
} ],
'_KALPC_MESSAGE_ATTRIBUTES' : [ 0x38, {
'ClientContext' : [ 0x0, ['pointer64', ['void']]],
'ServerContext' : [ 0x8, ['pointer64', ['void']]],
'PortContext' : [ 0x10, ['pointer64', ['void']]],
'CancelPortContext' : [ 0x18, ['pointer64', ['void']]],
'SecurityData' : [ 0x20, ['pointer64', ['_KALPC_SECURITY_DATA']]],
'View' : [ 0x28, ['pointer64', ['_KALPC_VIEW']]],
'HandleData' : [ 0x30, ['pointer64', ['_KALPC_HANDLE_DATA']]],
} ],
'__unnamed_18b4' : [ 0x4, {
'Revoked' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Impersonated' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
} ],
'__unnamed_18b6' : [ 0x4, {
's1' : [ 0x0, ['__unnamed_18b4']],
} ],
'_KALPC_SECURITY_DATA' : [ 0x70, {
'HandleTable' : [ 0x0, ['pointer64', ['_ALPC_HANDLE_TABLE']]],
'ContextHandle' : [ 0x8, ['pointer64', ['void']]],
'OwningProcess' : [ 0x10, ['pointer64', ['_EPROCESS']]],
'OwnerPort' : [ 0x18, ['pointer64', ['_ALPC_PORT']]],
'DynamicSecurity' : [ 0x20, ['_SECURITY_CLIENT_CONTEXT']],
'u1' : [ 0x68, ['__unnamed_18b6']],
} ],
'_ALPC_DISPATCH_CONTEXT' : [ 0x38, {
'PortObject' : [ 0x0, ['pointer64', ['_ALPC_PORT']]],
'Message' : [ 0x8, ['pointer64', ['_KALPC_MESSAGE']]],
'CommunicationInfo' : [ 0x10, ['pointer64', ['_ALPC_COMMUNICATION_INFO']]],
'Flags' : [ 0x18, ['unsigned long']],
'TargetThread' : [ 0x20, ['pointer64', ['_ETHREAD']]],
'TargetPort' : [ 0x28, ['pointer64', ['_ALPC_PORT']]],
'TotalLength' : [ 0x30, ['unsigned short']],
'Type' : [ 0x32, ['unsigned short']],
'DataInfoOffset' : [ 0x34, ['unsigned short']],
} ],
'_ALPC_MESSAGE_ATTRIBUTES' : [ 0x8, {
'AllocatedAttributes' : [ 0x0, ['unsigned long']],
'ValidAttributes' : [ 0x4, ['unsigned long']],
} ],
'_ALPC_PORT_ATTRIBUTES' : [ 0x48, {
'Flags' : [ 0x0, ['unsigned long']],
'SecurityQos' : [ 0x4, ['_SECURITY_QUALITY_OF_SERVICE']],
'MaxMessageLength' : [ 0x10, ['unsigned long long']],
'MemoryBandwidth' : [ 0x18, ['unsigned long long']],
'MaxPoolUsage' : [ 0x20, ['unsigned long long']],
'MaxSectionSize' : [ 0x28, ['unsigned long long']],
'MaxViewSize' : [ 0x30, ['unsigned long long']],
'MaxTotalSectionSize' : [ 0x38, ['unsigned long long']],
'DupObjectTypes' : [ 0x40, ['unsigned long']],
'Reserved' : [ 0x44, ['unsigned long']],
} ],
'_LUID_AND_ATTRIBUTES' : [ 0xc, {
'Luid' : [ 0x0, ['_LUID']],
'Attributes' : [ 0x8, ['unsigned long']],
} ],
'_TOKEN' : [ 0x318, {
'TokenSource' : [ 0x0, ['_TOKEN_SOURCE']],
'TokenId' : [ 0x10, ['_LUID']],
'AuthenticationId' : [ 0x18, ['_LUID']],
'ParentTokenId' : [ 0x20, ['_LUID']],
'ExpirationTime' : [ 0x28, ['_LARGE_INTEGER']],
'TokenLock' : [ 0x30, ['pointer64', ['_ERESOURCE']]],
'ModifiedId' : [ 0x38, ['_LUID']],
'Privileges' : [ 0x40, ['_SEP_TOKEN_PRIVILEGES']],
'AuditPolicy' : [ 0x58, ['_SEP_AUDIT_POLICY']],
'SessionId' : [ 0x74, ['unsigned long']],
'UserAndGroupCount' : [ 0x78, ['unsigned long']],
'RestrictedSidCount' : [ 0x7c, ['unsigned long']],
'VariableLength' : [ 0x80, ['unsigned long']],
'DynamicCharged' : [ 0x84, ['unsigned long']],
'DynamicAvailable' : [ 0x88, ['unsigned long']],
'DefaultOwnerIndex' : [ 0x8c, ['unsigned long']],
'UserAndGroups' : [ 0x90, ['pointer64', ['_SID_AND_ATTRIBUTES']]],
'RestrictedSids' : [ 0x98, ['pointer64', ['_SID_AND_ATTRIBUTES']]],
'PrimaryGroup' : [ 0xa0, ['pointer64', ['void']]],
'DynamicPart' : [ 0xa8, ['pointer64', ['unsigned long']]],
'DefaultDacl' : [ 0xb0, ['pointer64', ['_ACL']]],
'TokenType' : [ 0xb8, ['Enumeration', dict(target = 'long', choices = {1: 'TokenPrimary', 2: 'TokenImpersonation'})]],
'ImpersonationLevel' : [ 0xbc, ['Enumeration', dict(target = 'long', choices = {0: 'SecurityAnonymous', 1: 'SecurityIdentification', 2: 'SecurityImpersonation', 3: 'SecurityDelegation'})]],
'TokenFlags' : [ 0xc0, ['unsigned long']],
'TokenInUse' : [ 0xc4, ['unsigned char']],
'IntegrityLevelIndex' : [ 0xc8, ['unsigned long']],
'MandatoryPolicy' : [ 0xcc, ['unsigned long']],
'ProxyData' : [ 0xd0, ['pointer64', ['_SECURITY_TOKEN_PROXY_DATA']]],
'AuditData' : [ 0xd8, ['pointer64', ['_SECURITY_TOKEN_AUDIT_DATA']]],
'LogonSession' : [ 0xe0, ['pointer64', ['_SEP_LOGON_SESSION_REFERENCES']]],
'OriginatingLogonSession' : [ 0xe8, ['_LUID']],
'SidHash' : [ 0xf0, ['_SID_AND_ATTRIBUTES_HASH']],
'RestrictedSidHash' : [ 0x200, ['_SID_AND_ATTRIBUTES_HASH']],
'VariablePart' : [ 0x310, ['unsigned long long']],
} ],
'_SEP_LOGON_SESSION_REFERENCES' : [ 0x50, {
'Next' : [ 0x0, ['pointer64', ['_SEP_LOGON_SESSION_REFERENCES']]],
'LogonId' : [ 0x8, ['_LUID']],
'BuddyLogonId' : [ 0x10, ['_LUID']],
'ReferenceCount' : [ 0x18, ['unsigned long']],
'Flags' : [ 0x1c, ['unsigned long']],
'pDeviceMap' : [ 0x20, ['pointer64', ['_DEVICE_MAP']]],
'Token' : [ 0x28, ['pointer64', ['void']]],
'AccountName' : [ 0x30, ['_UNICODE_STRING']],
'AuthorityName' : [ 0x40, ['_UNICODE_STRING']],
} ],
'_OBP_LOOKUP_CONTEXT' : [ 0x20, {
'Directory' : [ 0x0, ['pointer64', ['_OBJECT_DIRECTORY']]],
'Object' : [ 0x8, ['pointer64', ['void']]],
'HashValue' : [ 0x10, ['unsigned long']],
'HashIndex' : [ 0x14, ['unsigned short']],
'DirectoryLocked' : [ 0x16, ['unsigned char']],
'LockStateSignature' : [ 0x18, ['unsigned long']],
} ],
'_OBJECT_DIRECTORY' : [ 0x150, {
'HashBuckets' : [ 0x0, ['array', 37, ['pointer64', ['_OBJECT_DIRECTORY_ENTRY']]]],
'Lock' : [ 0x128, ['_EX_PUSH_LOCK']],
'DeviceMap' : [ 0x130, ['pointer64', ['_DEVICE_MAP']]],
'SessionId' : [ 0x138, ['unsigned long']],
'NamespaceEntry' : [ 0x140, ['pointer64', ['void']]],
'Flags' : [ 0x148, ['unsigned long']],
} ],
'_OBJECT_TYPE' : [ 0x238, {
'TypeList' : [ 0x0, ['_LIST_ENTRY']],
'Name' : [ 0x10, ['_UNICODE_STRING']],
'DefaultObject' : [ 0x20, ['pointer64', ['void']]],
'Index' : [ 0x28, ['unsigned long']],
'TotalNumberOfObjects' : [ 0x2c, ['unsigned long']],
'TotalNumberOfHandles' : [ 0x30, ['unsigned long']],
'HighWaterNumberOfObjects' : [ 0x34, ['unsigned long']],
'HighWaterNumberOfHandles' : [ 0x38, ['unsigned long']],
'TypeInfo' : [ 0x40, ['_OBJECT_TYPE_INITIALIZER']],
'Mutex' : [ 0xb0, ['_ERESOURCE']],
'TypeLock' : [ 0x118, ['_EX_PUSH_LOCK']],
'Key' : [ 0x120, ['unsigned long']],
'ObjectLocks' : [ 0x128, ['array', 32, ['_EX_PUSH_LOCK']]],
'CallbackList' : [ 0x228, ['_LIST_ENTRY']],
} ],
'_PS_CLIENT_SECURITY_CONTEXT' : [ 0x8, {
'ImpersonationData' : [ 0x0, ['unsigned long long']],
'ImpersonationToken' : [ 0x0, ['pointer64', ['void']]],
'ImpersonationLevel' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 2, native_type='unsigned long long')]],
'EffectiveOnly' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long long')]],
} ],
'_MMVAD_FLAGS3' : [ 0x8, {
'PreferredNode' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 6, native_type='unsigned long long')]],
'Teb' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long long')]],
'Spare' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long long')]],
'SequentialAccess' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long long')]],
'LastSequentialTrim' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 24, native_type='unsigned long long')]],
'Spare2' : [ 0x0, ['BitField', dict(start_bit = 24, end_bit = 32, native_type='unsigned long long')]],
'LargePageCreating' : [ 0x0, ['BitField', dict(start_bit = 32, end_bit = 33, native_type='unsigned long long')]],
'Spare3' : [ 0x0, ['BitField', dict(start_bit = 33, end_bit = 64, native_type='unsigned long long')]],
} ],
'_MI_VERIFIER_POOL_HEADER' : [ 0x8, {
'VerifierPoolEntry' : [ 0x0, ['pointer64', ['_VI_POOL_ENTRY']]],
} ],
'_HBASE_BLOCK' : [ 0x1000, {
'Signature' : [ 0x0, ['unsigned long']],
'Sequence1' : [ 0x4, ['unsigned long']],
'Sequence2' : [ 0x8, ['unsigned long']],
'TimeStamp' : [ 0xc, ['_LARGE_INTEGER']],
'Major' : [ 0x14, ['unsigned long']],
'Minor' : [ 0x18, ['unsigned long']],
'Type' : [ 0x1c, ['unsigned long']],
'Format' : [ 0x20, ['unsigned long']],
'RootCell' : [ 0x24, ['unsigned long']],
'Length' : [ 0x28, ['unsigned long']],
'Cluster' : [ 0x2c, ['unsigned long']],
'FileName' : [ 0x30, ['array', 64, ['unsigned char']]],
'RmId' : [ 0x70, ['_GUID']],
'LogId' : [ 0x80, ['_GUID']],
'Flags' : [ 0x90, ['unsigned long']],
'TmId' : [ 0x94, ['_GUID']],
'GuidSignature' : [ 0xa4, ['unsigned long']],
'Reserved1' : [ 0xa8, ['array', 85, ['unsigned long']]],
'CheckSum' : [ 0x1fc, ['unsigned long']],
'Reserved2' : [ 0x200, ['array', 882, ['unsigned long']]],
'ThawTmId' : [ 0xfc8, ['_GUID']],
'ThawRmId' : [ 0xfd8, ['_GUID']],
'ThawLogId' : [ 0xfe8, ['_GUID']],
'BootType' : [ 0xff8, ['unsigned long']],
'BootRecover' : [ 0xffc, ['unsigned long']],
} ],
'_ERESOURCE' : [ 0x68, {
'SystemResourcesList' : [ 0x0, ['_LIST_ENTRY']],
'OwnerTable' : [ 0x10, ['pointer64', ['_OWNER_ENTRY']]],
'ActiveCount' : [ 0x18, ['short']],
'Flag' : [ 0x1a, ['unsigned short']],
'SharedWaiters' : [ 0x20, ['pointer64', ['_KSEMAPHORE']]],
'ExclusiveWaiters' : [ 0x28, ['pointer64', ['_KEVENT']]],
'OwnerEntry' : [ 0x30, ['_OWNER_ENTRY']],
'ActiveEntries' : [ 0x40, ['unsigned long']],
'ContentionCount' : [ 0x44, ['unsigned long']],
'NumberOfSharedWaiters' : [ 0x48, ['unsigned long']],
'NumberOfExclusiveWaiters' : [ 0x4c, ['unsigned long']],
'Reserved2' : [ 0x50, ['pointer64', ['void']]],
'Address' : [ 0x58, ['pointer64', ['void']]],
'CreatorBackTraceIndex' : [ 0x58, ['unsigned long long']],
'SpinLock' : [ 0x60, ['unsigned long long']],
} ],
'_LPCP_MESSAGE' : [ 0x50, {
'Entry' : [ 0x0, ['_LIST_ENTRY']],
'FreeEntry' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
'Reserved0' : [ 0x8, ['unsigned long']],
'SenderPort' : [ 0x10, ['pointer64', ['void']]],
'RepliedToThread' : [ 0x18, ['pointer64', ['_ETHREAD']]],
'PortContext' : [ 0x20, ['pointer64', ['void']]],
'Request' : [ 0x28, ['_PORT_MESSAGE']],
} ],
'_HARDWARE_PTE' : [ 0x8, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'Write' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long long')]],
'Owner' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long long')]],
'WriteThrough' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long long')]],
'CacheDisable' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long long')]],
'Accessed' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long long')]],
'Dirty' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long long')]],
'LargePage' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long long')]],
'Global' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long long')]],
'CopyOnWrite' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long long')]],
'reserved0' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long long')]],
'PageFrameNumber' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 40, native_type='unsigned long long')]],
'reserved1' : [ 0x0, ['BitField', dict(start_bit = 40, end_bit = 52, native_type='unsigned long long')]],
'SoftwareWsIndex' : [ 0x0, ['BitField', dict(start_bit = 52, end_bit = 63, native_type='unsigned long long')]],
'NoExecute' : [ 0x0, ['BitField', dict(start_bit = 63, end_bit = 64, native_type='unsigned long long')]],
} ],
'_DUAL' : [ 0x278, {
'Length' : [ 0x0, ['unsigned long']],
'Map' : [ 0x8, ['pointer64', ['_HMAP_DIRECTORY']]],
'SmallDir' : [ 0x10, ['pointer64', ['_HMAP_TABLE']]],
'Guard' : [ 0x18, ['unsigned long']],
'FreeDisplay' : [ 0x20, ['array', 24, ['_FREE_DISPLAY']]],
'FreeSummary' : [ 0x260, ['unsigned long']],
'FreeBins' : [ 0x268, ['_LIST_ENTRY']],
} ],
'_DISPATCHER_HEADER' : [ 0x18, {
'Type' : [ 0x0, ['unsigned char']],
'Abandoned' : [ 0x1, ['unsigned char']],
'Absolute' : [ 0x1, ['unsigned char']],
'NpxIrql' : [ 0x1, ['unsigned char']],
'Signalling' : [ 0x1, ['unsigned char']],
'Size' : [ 0x2, ['unsigned char']],
'Hand' : [ 0x2, ['unsigned char']],
'Inserted' : [ 0x3, ['unsigned char']],
'DebugActive' : [ 0x3, ['unsigned char']],
'DpcActive' : [ 0x3, ['unsigned char']],
'Lock' : [ 0x0, ['long']],
'SignalState' : [ 0x4, ['long']],
'WaitListHead' : [ 0x8, ['_LIST_ENTRY']],
} ],
'_VI_POOL_ENTRY' : [ 0x20, {
'PageHeader' : [ 0x0, ['_VI_POOL_PAGE_HEADER']],
'InUse' : [ 0x0, ['_VI_POOL_ENTRY_INUSE']],
'NextFree' : [ 0x0, ['pointer64', ['_SLIST_ENTRY']]],
} ],
'_MM_PAGE_ACCESS_INFO' : [ 0x8, {
'Flags' : [ 0x0, ['_MM_PAGE_ACCESS_INFO_FLAGS']],
'FileOffset' : [ 0x0, ['unsigned long long']],
'VirtualAddress' : [ 0x0, ['pointer64', ['void']]],
'PointerProtoPte' : [ 0x0, ['pointer64', ['void']]],
} ],
'_HEAP_COUNTERS' : [ 0x60, {
'TotalMemoryReserved' : [ 0x0, ['unsigned long long']],
'TotalMemoryCommitted' : [ 0x8, ['unsigned long long']],
'TotalMemoryLargeUCR' : [ 0x10, ['unsigned long long']],
'TotalSizeInVirtualBlocks' : [ 0x18, ['unsigned long long']],
'TotalSegments' : [ 0x20, ['unsigned long']],
'TotalUCRs' : [ 0x24, ['unsigned long']],
'CommittOps' : [ 0x28, ['unsigned long']],
'DeCommitOps' : [ 0x2c, ['unsigned long']],
'LockAcquires' : [ 0x30, ['unsigned long']],
'LockCollisions' : [ 0x34, ['unsigned long']],
'CommitRate' : [ 0x38, ['unsigned long']],
'DecommittRate' : [ 0x3c, ['unsigned long']],
'CommitFailures' : [ 0x40, ['unsigned long']],
'InBlockCommitFailures' : [ 0x44, ['unsigned long']],
'CompactHeapCalls' : [ 0x48, ['unsigned long']],
'CompactedUCRs' : [ 0x4c, ['unsigned long']],
'InBlockDeccommits' : [ 0x50, ['unsigned long']],
'InBlockDeccomitSize' : [ 0x58, ['unsigned long long']],
} ],
'_SYSPTES_HEADER' : [ 0x28, {
'ListHead' : [ 0x0, ['_LIST_ENTRY']],
'Count' : [ 0x10, ['unsigned long long']],
'NumberOfEntries' : [ 0x18, ['unsigned long long']],
'NumberOfEntriesPeak' : [ 0x20, ['unsigned long long']],
} ],
'_PERFINFO_HARDPAGEFAULT_INFORMATION' : [ 0x20, {
'ReadOffset' : [ 0x0, ['_LARGE_INTEGER']],
'VirtualAddress' : [ 0x8, ['pointer64', ['void']]],
'FileObject' : [ 0x10, ['pointer64', ['void']]],
'ThreadId' : [ 0x18, ['unsigned long']],
'ByteCount' : [ 0x1c, ['unsigned long']],
} ],
'_I386_LOADER_BLOCK' : [ 0x10, {
'CommonDataArea' : [ 0x0, ['pointer64', ['void']]],
'MachineType' : [ 0x8, ['unsigned long']],
'VirtualBias' : [ 0xc, ['unsigned long']],
} ],
'_CELL_DATA' : [ 0x50, {
'u' : [ 0x0, ['_u']],
} ],
'_ARC_DISK_INFORMATION' : [ 0x10, {
'DiskSignatures' : [ 0x0, ['_LIST_ENTRY']],
} ],
'_INITIAL_PRIVILEGE_SET' : [ 0x2c, {
'PrivilegeCount' : [ 0x0, ['unsigned long']],
'Control' : [ 0x4, ['unsigned long']],
'Privilege' : [ 0x8, ['array', 3, ['_LUID_AND_ATTRIBUTES']]],
} ],
'_HEAP_TUNING_PARAMETERS' : [ 0x10, {
'CommittThresholdShift' : [ 0x0, ['unsigned long']],
'MaxPreCommittThreshold' : [ 0x8, ['unsigned long long']],
} ],
'_MMWSLE_NONDIRECT_HASH' : [ 0x10, {
'Key' : [ 0x0, ['pointer64', ['void']]],
'Index' : [ 0x8, ['unsigned long']],
} ],
'_DBGKD_SEARCH_MEMORY' : [ 0x18, {
'SearchAddress' : [ 0x0, ['unsigned long long']],
'FoundAddress' : [ 0x0, ['unsigned long long']],
'SearchLength' : [ 0x8, ['unsigned long long']],
'PatternLength' : [ 0x10, ['unsigned long']],
} ],
'_HMAP_DIRECTORY' : [ 0x2000, {
'Directory' : [ 0x0, ['array', 1024, ['pointer64', ['_HMAP_TABLE']]]],
} ],
'_DEVPROPKEY' : [ 0x14, {
'fmtid' : [ 0x0, ['_GUID']],
'pid' : [ 0x10, ['unsigned long']],
} ],
'_WHEA_NMI_ERROR' : [ 0xc, {
'Data' : [ 0x0, ['array', 8, ['unsigned char']]],
'Flags' : [ 0x8, ['_WHEA_NMI_ERROR_FLAGS']],
} ],
'_KAPC' : [ 0x58, {
'Type' : [ 0x0, ['unsigned char']],
'SpareByte0' : [ 0x1, ['unsigned char']],
'Size' : [ 0x2, ['unsigned char']],
'SpareByte1' : [ 0x3, ['unsigned char']],
'SpareLong0' : [ 0x4, ['unsigned long']],
'Thread' : [ 0x8, ['pointer64', ['_KTHREAD']]],
'ApcListEntry' : [ 0x10, ['_LIST_ENTRY']],
'KernelRoutine' : [ 0x20, ['pointer64', ['void']]],
'RundownRoutine' : [ 0x28, ['pointer64', ['void']]],
'NormalRoutine' : [ 0x30, ['pointer64', ['void']]],
'NormalContext' : [ 0x38, ['pointer64', ['void']]],
'SystemArgument1' : [ 0x40, ['pointer64', ['void']]],
'SystemArgument2' : [ 0x48, ['pointer64', ['void']]],
'ApcStateIndex' : [ 0x50, ['unsigned char']],
'ApcMode' : [ 0x51, ['unsigned char']],
'Inserted' : [ 0x52, ['unsigned char']],
} ],
'_HANDLE_TABLE' : [ 0x60, {
'TableCode' : [ 0x0, ['unsigned long long']],
'QuotaProcess' : [ 0x8, ['pointer64', ['_EPROCESS']]],
'UniqueProcessId' : [ 0x10, ['pointer64', ['void']]],
'HandleLock' : [ 0x18, ['_EX_PUSH_LOCK']],
'HandleTableList' : [ 0x20, ['_LIST_ENTRY']],
'HandleContentionEvent' : [ 0x30, ['_EX_PUSH_LOCK']],
'DebugInfo' : [ 0x38, ['pointer64', ['_HANDLE_TRACE_DEBUG_INFO']]],
'ExtraInfoPages' : [ 0x40, ['long']],
'Flags' : [ 0x44, ['unsigned long']],
'StrictFIFO' : [ 0x44, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'FirstFreeHandle' : [ 0x48, ['long']],
'LastFreeHandleEntry' : [ 0x50, ['pointer64', ['_HANDLE_TABLE_ENTRY']]],
'HandleCount' : [ 0x58, ['long']],
'NextHandleNeedingPool' : [ 0x5c, ['unsigned long']],
} ],
'_POOL_TRACKER_BIG_PAGES' : [ 0x18, {
'Va' : [ 0x0, ['pointer64', ['void']]],
'Key' : [ 0x8, ['unsigned long']],
'PoolType' : [ 0xc, ['unsigned long']],
'NumberOfBytes' : [ 0x10, ['unsigned long long']],
} ],
'_MMVAD_FLAGS2' : [ 0x4, {
'FileOffset' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 24, native_type='unsigned long')]],
'SecNoChange' : [ 0x0, ['BitField', dict(start_bit = 24, end_bit = 25, native_type='unsigned long')]],
'OneSecured' : [ 0x0, ['BitField', dict(start_bit = 25, end_bit = 26, native_type='unsigned long')]],
'MultipleSecured' : [ 0x0, ['BitField', dict(start_bit = 26, end_bit = 27, native_type='unsigned long')]],
'Spare' : [ 0x0, ['BitField', dict(start_bit = 27, end_bit = 28, native_type='unsigned long')]],
'LongVad' : [ 0x0, ['BitField', dict(start_bit = 28, end_bit = 29, native_type='unsigned long')]],
'ExtendableFile' : [ 0x0, ['BitField', dict(start_bit = 29, end_bit = 30, native_type='unsigned long')]],
'Inherit' : [ 0x0, ['BitField', dict(start_bit = 30, end_bit = 31, native_type='unsigned long')]],
'CopyOnWrite' : [ 0x0, ['BitField', dict(start_bit = 31, end_bit = 32, native_type='unsigned long')]],
} ],
'_VI_CANCEL_GLOBALS' : [ 0x78, {
'CancelLock' : [ 0x0, ['unsigned long long']],
'IssueLock' : [ 0x8, ['unsigned long long']],
'Counters' : [ 0x10, ['array', 25, ['long']]],
} ],
'_KALPC_RESERVE' : [ 0x28, {
'OwnerPort' : [ 0x0, ['pointer64', ['_ALPC_PORT']]],
'HandleTable' : [ 0x8, ['pointer64', ['_ALPC_HANDLE_TABLE']]],
'Handle' : [ 0x10, ['pointer64', ['void']]],
'Message' : [ 0x18, ['pointer64', ['_KALPC_MESSAGE']]],
'Active' : [ 0x20, ['long']],
} ],
'_TEB_ACTIVE_FRAME' : [ 0x18, {
'Flags' : [ 0x0, ['unsigned long']],
'Previous' : [ 0x8, ['pointer64', ['_TEB_ACTIVE_FRAME']]],
'Context' : [ 0x10, ['pointer64', ['_TEB_ACTIVE_FRAME_CONTEXT']]],
} ],
'_FILE_GET_QUOTA_INFORMATION' : [ 0x14, {
'NextEntryOffset' : [ 0x0, ['unsigned long']],
'SidLength' : [ 0x4, ['unsigned long']],
'Sid' : [ 0x8, ['_SID']],
} ],
'_CM_KEY_BODY' : [ 0x58, {
'Type' : [ 0x0, ['unsigned long']],
'KeyControlBlock' : [ 0x8, ['pointer64', ['_CM_KEY_CONTROL_BLOCK']]],
'NotifyBlock' : [ 0x10, ['pointer64', ['_CM_NOTIFY_BLOCK']]],
'ProcessID' : [ 0x18, ['pointer64', ['void']]],
'KeyBodyList' : [ 0x20, ['_LIST_ENTRY']],
'Flags' : [ 0x30, ['unsigned long']],
'KtmTrans' : [ 0x38, ['pointer64', ['void']]],
'KtmUow' : [ 0x40, ['pointer64', ['_GUID']]],
'ContextListHead' : [ 0x48, ['_LIST_ENTRY']],
} ],
'_XMM_SAVE_AREA32' : [ 0x200, {
'ControlWord' : [ 0x0, ['unsigned short']],
'StatusWord' : [ 0x2, ['unsigned short']],
'TagWord' : [ 0x4, ['unsigned char']],
'Reserved1' : [ 0x5, ['unsigned char']],
'ErrorOpcode' : [ 0x6, ['unsigned short']],
'ErrorOffset' : [ 0x8, ['unsigned long']],
'ErrorSelector' : [ 0xc, ['unsigned short']],
'Reserved2' : [ 0xe, ['unsigned short']],
'DataOffset' : [ 0x10, ['unsigned long']],
'DataSelector' : [ 0x14, ['unsigned short']],
'Reserved3' : [ 0x16, ['unsigned short']],
'MxCsr' : [ 0x18, ['unsigned long']],
'MxCsr_Mask' : [ 0x1c, ['unsigned long']],
'FloatRegisters' : [ 0x20, ['array', 8, ['_M128A']]],
'XmmRegisters' : [ 0xa0, ['array', 16, ['_M128A']]],
'Reserved4' : [ 0x1a0, ['array', 96, ['unsigned char']]],
} ],
'_MMPTE_PROTOTYPE' : [ 0x8, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'Unused0' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 8, native_type='unsigned long long')]],
'ReadOnly' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long long')]],
'Unused1' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long long')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 16, native_type='unsigned long long')]],
'ProtoAddress' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 64, native_type='long long')]],
} ],
'_WHEA_ERROR_PACKET_FLAGS' : [ 0x4, {
'PreviousError' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'CpuValid' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'HypervisorError' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'Simulated' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 32, native_type='unsigned long')]],
'AsULONG' : [ 0x0, ['unsigned long']],
} ],
'_THERMAL_INFORMATION_EX' : [ 0x58, {
'ThermalStamp' : [ 0x0, ['unsigned long']],
'ThermalConstant1' : [ 0x4, ['unsigned long']],
'ThermalConstant2' : [ 0x8, ['unsigned long']],
'Processors' : [ 0x10, ['unsigned long long']],
'SamplingPeriod' : [ 0x18, ['unsigned long']],
'CurrentTemperature' : [ 0x1c, ['unsigned long']],
'PassiveTripPoint' : [ 0x20, ['unsigned long']],
'CriticalTripPoint' : [ 0x24, ['unsigned long']],
'ActiveTripPointCount' : [ 0x28, ['unsigned char']],
'ActiveTripPoint' : [ 0x2c, ['array', 10, ['unsigned long']]],
'S4TransitionTripPoint' : [ 0x54, ['unsigned long']],
} ],
'__unnamed_19bc' : [ 0x4, {
'FilePointerIndex' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 9, native_type='unsigned long')]],
'HardFault' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'Image' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'Spare0' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
} ],
'__unnamed_19be' : [ 0x4, {
'FilePointerIndex' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 9, native_type='unsigned long')]],
'HardFault' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'Spare1' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 12, native_type='unsigned long')]],
} ],
'_MM_PAGE_ACCESS_INFO_FLAGS' : [ 0x4, {
'File' : [ 0x0, ['__unnamed_19bc']],
'Private' : [ 0x0, ['__unnamed_19be']],
} ],
'_VI_VERIFIER_ISSUE' : [ 0x20, {
'IssueType' : [ 0x0, ['unsigned long long']],
'Address' : [ 0x8, ['pointer64', ['void']]],
'Parameters' : [ 0x10, ['array', 2, ['unsigned long long']]],
} ],
'_MMSUBSECTION_FLAGS' : [ 0x4, {
'SubsectionAccessed' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned short')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 6, native_type='unsigned short')]],
'StartingSector4132' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 16, native_type='unsigned short')]],
'SubsectionStatic' : [ 0x2, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned short')]],
'GlobalMemory' : [ 0x2, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned short')]],
'DirtyPages' : [ 0x2, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned short')]],
'Spare' : [ 0x2, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned short')]],
'SectorEndOffset' : [ 0x2, ['BitField', dict(start_bit = 4, end_bit = 16, native_type='unsigned short')]],
} ],
'_EXCEPTION_POINTERS' : [ 0x10, {
'ExceptionRecord' : [ 0x0, ['pointer64', ['_EXCEPTION_RECORD']]],
'ContextRecord' : [ 0x8, ['pointer64', ['_CONTEXT']]],
} ],
'_KMUTANT' : [ 0x38, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
'MutantListEntry' : [ 0x18, ['_LIST_ENTRY']],
'OwnerThread' : [ 0x28, ['pointer64', ['_KTHREAD']]],
'Abandoned' : [ 0x30, ['unsigned char']],
'ApcDisable' : [ 0x31, ['unsigned char']],
} ],
'_OBJECT_REF_INFO' : [ 0x28, {
'ObjectHeader' : [ 0x0, ['pointer64', ['_OBJECT_HEADER']]],
'NextRef' : [ 0x8, ['pointer64', ['void']]],
'ImageFileName' : [ 0x10, ['array', 16, ['unsigned char']]],
'NextPos' : [ 0x20, ['unsigned short']],
'MaxStacks' : [ 0x22, ['unsigned short']],
'StackInfo' : [ 0x24, ['array', 0, ['_OBJECT_REF_STACK_INFO']]],
} ],
'_CMHIVE' : [ 0xb48, {
'Hive' : [ 0x0, ['_HHIVE']],
'FileHandles' : [ 0x590, ['array', 6, ['pointer64', ['void']]]],
'NotifyList' : [ 0x5c0, ['_LIST_ENTRY']],
'HiveList' : [ 0x5d0, ['_LIST_ENTRY']],
'HiveLock' : [ 0x5e0, ['pointer64', ['_FAST_MUTEX']]],
'ViewLock' : [ 0x5e8, ['_EX_PUSH_LOCK']],
'ViewLockOwner' : [ 0x5f0, ['pointer64', ['_KTHREAD']]],
'ViewLockLast' : [ 0x5f8, ['unsigned long']],
'ViewUnLockLast' : [ 0x5fc, ['unsigned long']],
'WriterLock' : [ 0x600, ['pointer64', ['_FAST_MUTEX']]],
'FlusherLock' : [ 0x608, ['_EX_PUSH_LOCK']],
'SecurityLock' : [ 0x610, ['_EX_PUSH_LOCK']],
'MappedViewList' : [ 0x618, ['_LIST_ENTRY']],
'PinnedViewList' : [ 0x628, ['_LIST_ENTRY']],
'FlushedViewList' : [ 0x638, ['_LIST_ENTRY']],
'MappedViewCount' : [ 0x648, ['unsigned short']],
'PinnedViewCount' : [ 0x64a, ['unsigned short']],
'UseCount' : [ 0x64c, ['unsigned long']],
'ViewsPerHive' : [ 0x650, ['unsigned long']],
'FileObject' : [ 0x658, ['pointer64', ['_FILE_OBJECT']]],
'LastShrinkHiveSize' : [ 0x660, ['unsigned long']],
'ActualFileSize' : [ 0x668, ['_LARGE_INTEGER']],
'FileFullPath' : [ 0x670, ['_UNICODE_STRING']],
'FileUserName' : [ 0x680, ['_UNICODE_STRING']],
'HiveRootPath' : [ 0x690, ['_UNICODE_STRING']],
'SecurityCount' : [ 0x6a0, ['unsigned long']],
'SecurityCacheSize' : [ 0x6a4, ['unsigned long']],
'SecurityHitHint' : [ 0x6a8, ['long']],
'SecurityCache' : [ 0x6b0, ['pointer64', ['_CM_KEY_SECURITY_CACHE_ENTRY']]],
'SecurityHash' : [ 0x6b8, ['array', 64, ['_LIST_ENTRY']]],
'UnloadEventCount' : [ 0xab8, ['unsigned long']],
'UnloadEventArray' : [ 0xac0, ['pointer64', ['pointer64', ['_KEVENT']]]],
'RootKcb' : [ 0xac8, ['pointer64', ['_CM_KEY_CONTROL_BLOCK']]],
'Frozen' : [ 0xad0, ['unsigned char']],
'UnloadWorkItem' : [ 0xad8, ['pointer64', ['_CM_WORKITEM']]],
'GrowOnlyMode' : [ 0xae0, ['unsigned char']],
'GrowOffset' : [ 0xae4, ['unsigned long']],
'KcbConvertListHead' : [ 0xae8, ['_LIST_ENTRY']],
'KnodeConvertListHead' : [ 0xaf8, ['_LIST_ENTRY']],
'CellRemapArray' : [ 0xb08, ['pointer64', ['_CM_CELL_REMAP_BLOCK']]],
'Flags' : [ 0xb10, ['unsigned long']],
'TrustClassEntry' : [ 0xb18, ['_LIST_ENTRY']],
'FlushCount' : [ 0xb28, ['unsigned long']],
'CmRm' : [ 0xb30, ['pointer64', ['_CM_RM']]],
'CmRmInitFailPoint' : [ 0xb38, ['unsigned long']],
'CmRmInitFailStatus' : [ 0xb3c, ['long']],
'CreatorOwner' : [ 0xb40, ['pointer64', ['_KTHREAD']]],
} ],
'_MI_IMAGE_SECURITY_REFERENCE' : [ 0x18, {
'SecurityContext' : [ 0x0, ['_IMAGE_SECURITY_CONTEXT']],
'DynamicRelocations' : [ 0x8, ['pointer64', ['void']]],
'ReferenceCount' : [ 0x10, ['long']],
} ],
'_HEAP_TAG_ENTRY' : [ 0x48, {
'Allocs' : [ 0x0, ['unsigned long']],
'Frees' : [ 0x4, ['unsigned long']],
'Size' : [ 0x8, ['unsigned long long']],
'TagIndex' : [ 0x10, ['unsigned short']],
'CreatorBackTraceIndex' : [ 0x12, ['unsigned short']],
'TagName' : [ 0x14, ['array', 24, ['wchar']]],
} ],
'_SECURITY_QUALITY_OF_SERVICE' : [ 0xc, {
'Length' : [ 0x0, ['unsigned long']],
'ImpersonationLevel' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'SecurityAnonymous', 1: 'SecurityIdentification', 2: 'SecurityImpersonation', 3: 'SecurityDelegation'})]],
'ContextTrackingMode' : [ 0x8, ['unsigned char']],
'EffectiveOnly' : [ 0x9, ['unsigned char']],
} ],
'__unnamed_19ed' : [ 0x10, {
'List' : [ 0x0, ['_LIST_ENTRY']],
'Secured' : [ 0x0, ['_MMADDRESS_LIST']],
} ],
'__unnamed_19f3' : [ 0x8, {
'Banked' : [ 0x0, ['pointer64', ['_MMBANKED_SECTION']]],
'ExtendedInfo' : [ 0x0, ['pointer64', ['_MMEXTEND_INFO']]],
} ],
'_MMVAD_LONG' : [ 0x78, {
'u1' : [ 0x0, ['__unnamed_14aa']],
'LeftChild' : [ 0x8, ['pointer64', ['_MMVAD']]],
'RightChild' : [ 0x10, ['pointer64', ['_MMVAD']]],
'StartingVpn' : [ 0x18, ['unsigned long long']],
'EndingVpn' : [ 0x20, ['unsigned long long']],
'u' : [ 0x28, ['__unnamed_14ad']],
'PushLock' : [ 0x30, ['_EX_PUSH_LOCK']],
'u5' : [ 0x38, ['__unnamed_14b0']],
'u2' : [ 0x40, ['__unnamed_14ba']],
'Subsection' : [ 0x48, ['pointer64', ['_SUBSECTION']]],
'FirstPrototypePte' : [ 0x50, ['pointer64', ['_MMPTE']]],
'LastContiguousPte' : [ 0x58, ['pointer64', ['_MMPTE']]],
'u3' : [ 0x60, ['__unnamed_19ed']],
'u4' : [ 0x70, ['__unnamed_19f3']],
} ],
'_MMWSLE_FREE_ENTRY' : [ 0x8, {
'MustBeZero' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'PreviousFree' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 32, native_type='unsigned long long')]],
'NextFree' : [ 0x0, ['BitField', dict(start_bit = 32, end_bit = 64, native_type='unsigned long long')]],
} ],
'_NT_TIB' : [ 0x38, {
'ExceptionList' : [ 0x0, ['pointer64', ['_EXCEPTION_REGISTRATION_RECORD']]],
'StackBase' : [ 0x8, ['pointer64', ['void']]],
'StackLimit' : [ 0x10, ['pointer64', ['void']]],
'SubSystemTib' : [ 0x18, ['pointer64', ['void']]],
'FiberData' : [ 0x20, ['pointer64', ['void']]],
'Version' : [ 0x20, ['unsigned long']],
'ArbitraryUserPointer' : [ 0x28, ['pointer64', ['void']]],
'Self' : [ 0x30, ['pointer64', ['_NT_TIB']]],
} ],
'_EJOB' : [ 0x1b0, {
'Event' : [ 0x0, ['_KEVENT']],
'JobLinks' : [ 0x18, ['_LIST_ENTRY']],
'ProcessListHead' : [ 0x28, ['_LIST_ENTRY']],
'JobLock' : [ 0x38, ['_ERESOURCE']],
'TotalUserTime' : [ 0xa0, ['_LARGE_INTEGER']],
'TotalKernelTime' : [ 0xa8, ['_LARGE_INTEGER']],
'ThisPeriodTotalUserTime' : [ 0xb0, ['_LARGE_INTEGER']],
'ThisPeriodTotalKernelTime' : [ 0xb8, ['_LARGE_INTEGER']],
'TotalPageFaultCount' : [ 0xc0, ['unsigned long']],
'TotalProcesses' : [ 0xc4, ['unsigned long']],
'ActiveProcesses' : [ 0xc8, ['unsigned long']],
'TotalTerminatedProcesses' : [ 0xcc, ['unsigned long']],
'PerProcessUserTimeLimit' : [ 0xd0, ['_LARGE_INTEGER']],
'PerJobUserTimeLimit' : [ 0xd8, ['_LARGE_INTEGER']],
'LimitFlags' : [ 0xe0, ['unsigned long']],
'MinimumWorkingSetSize' : [ 0xe8, ['unsigned long long']],
'MaximumWorkingSetSize' : [ 0xf0, ['unsigned long long']],
'ActiveProcessLimit' : [ 0xf8, ['unsigned long']],
'Affinity' : [ 0x100, ['unsigned long long']],
'PriorityClass' : [ 0x108, ['unsigned char']],
'AccessState' : [ 0x110, ['pointer64', ['_JOB_ACCESS_STATE']]],
'UIRestrictionsClass' : [ 0x118, ['unsigned long']],
'EndOfJobTimeAction' : [ 0x11c, ['unsigned long']],
'CompletionPort' : [ 0x120, ['pointer64', ['void']]],
'CompletionKey' : [ 0x128, ['pointer64', ['void']]],
'SessionId' : [ 0x130, ['unsigned long']],
'SchedulingClass' : [ 0x134, ['unsigned long']],
'ReadOperationCount' : [ 0x138, ['unsigned long long']],
'WriteOperationCount' : [ 0x140, ['unsigned long long']],
'OtherOperationCount' : [ 0x148, ['unsigned long long']],
'ReadTransferCount' : [ 0x150, ['unsigned long long']],
'WriteTransferCount' : [ 0x158, ['unsigned long long']],
'OtherTransferCount' : [ 0x160, ['unsigned long long']],
'ProcessMemoryLimit' : [ 0x168, ['unsigned long long']],
'JobMemoryLimit' : [ 0x170, ['unsigned long long']],
'PeakProcessMemoryUsed' : [ 0x178, ['unsigned long long']],
'PeakJobMemoryUsed' : [ 0x180, ['unsigned long long']],
'CurrentJobMemoryUsed' : [ 0x188, ['unsigned long long']],
'MemoryLimitsLock' : [ 0x190, ['_EX_PUSH_LOCK']],
'JobSetLinks' : [ 0x198, ['_LIST_ENTRY']],
'MemberLevel' : [ 0x1a8, ['unsigned long']],
'JobFlags' : [ 0x1ac, ['unsigned long']],
} ],
'__unnamed_1a06' : [ 0x4, {
'AsULONG' : [ 0x0, ['unsigned long']],
'AllowScaling' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Disabled' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'Hypervisor' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 4, native_type='unsigned long')]],
'HvMaxCState' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 8, native_type='unsigned long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 32, native_type='unsigned long')]],
} ],
'_PPM_IDLE_STATES' : [ 0x48, {
'Type' : [ 0x0, ['unsigned long']],
'Count' : [ 0x4, ['unsigned long']],
'Flags' : [ 0x8, ['__unnamed_1a06']],
'TargetState' : [ 0xc, ['unsigned long']],
'ActualState' : [ 0x10, ['unsigned long']],
'OldState' : [ 0x14, ['unsigned long']],
'TargetProcessors' : [ 0x18, ['unsigned long long']],
'State' : [ 0x20, ['array', 1, ['_PPM_IDLE_STATE']]],
} ],
'_PEB' : [ 0x368, {
'InheritedAddressSpace' : [ 0x0, ['unsigned char']],
'ReadImageFileExecOptions' : [ 0x1, ['unsigned char']],
'BeingDebugged' : [ 0x2, ['unsigned char']],
'BitField' : [ 0x3, ['unsigned char']],
'ImageUsesLargePages' : [ 0x3, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'IsProtectedProcess' : [ 0x3, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'IsLegacyProcess' : [ 0x3, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'IsImageDynamicallyRelocated' : [ 0x3, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'SkipPatchingUser32Forwarders' : [ 0x3, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'SpareBits' : [ 0x3, ['BitField', dict(start_bit = 5, end_bit = 8, native_type='unsigned char')]],
'Mutant' : [ 0x8, ['pointer64', ['void']]],
'ImageBaseAddress' : [ 0x10, ['pointer64', ['void']]],
'Ldr' : [ 0x18, ['pointer64', ['_PEB_LDR_DATA']]],
'ProcessParameters' : [ 0x20, ['pointer64', ['_RTL_USER_PROCESS_PARAMETERS']]],
'SubSystemData' : [ 0x28, ['pointer64', ['void']]],
'ProcessHeap' : [ 0x30, ['pointer64', ['void']]],
'FastPebLock' : [ 0x38, ['pointer64', ['_RTL_CRITICAL_SECTION']]],
'AtlThunkSListPtr' : [ 0x40, ['pointer64', ['void']]],
'IFEOKey' : [ 0x48, ['pointer64', ['void']]],
'CrossProcessFlags' : [ 0x50, ['unsigned long']],
'ProcessInJob' : [ 0x50, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'ProcessInitializing' : [ 0x50, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'ProcessUsingVEH' : [ 0x50, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'ProcessUsingVCH' : [ 0x50, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'ReservedBits0' : [ 0x50, ['BitField', dict(start_bit = 4, end_bit = 32, native_type='unsigned long')]],
'KernelCallbackTable' : [ 0x58, ['pointer64', ['void']]],
'UserSharedInfoPtr' : [ 0x58, ['pointer64', ['void']]],
'SystemReserved' : [ 0x60, ['array', 1, ['unsigned long']]],
'SpareUlong' : [ 0x64, ['unsigned long']],
'SparePebPtr0' : [ 0x68, ['unsigned long long']],
'TlsExpansionCounter' : [ 0x70, ['unsigned long']],
'TlsBitmap' : [ 0x78, ['pointer64', ['void']]],
'TlsBitmapBits' : [ 0x80, ['array', 2, ['unsigned long']]],
'ReadOnlySharedMemoryBase' : [ 0x88, ['pointer64', ['void']]],
'HotpatchInformation' : [ 0x90, ['pointer64', ['void']]],
'ReadOnlyStaticServerData' : [ 0x98, ['pointer64', ['pointer64', ['void']]]],
'AnsiCodePageData' : [ 0xa0, ['pointer64', ['void']]],
'OemCodePageData' : [ 0xa8, ['pointer64', ['void']]],
'UnicodeCaseTableData' : [ 0xb0, ['pointer64', ['void']]],
'NumberOfProcessors' : [ 0xb8, ['unsigned long']],
'NtGlobalFlag' : [ 0xbc, ['unsigned long']],
'CriticalSectionTimeout' : [ 0xc0, ['_LARGE_INTEGER']],
'HeapSegmentReserve' : [ 0xc8, ['unsigned long long']],
'HeapSegmentCommit' : [ 0xd0, ['unsigned long long']],
'HeapDeCommitTotalFreeThreshold' : [ 0xd8, ['unsigned long long']],
'HeapDeCommitFreeBlockThreshold' : [ 0xe0, ['unsigned long long']],
'NumberOfHeaps' : [ 0xe8, ['unsigned long']],
'MaximumNumberOfHeaps' : [ 0xec, ['unsigned long']],
'ProcessHeaps' : [ 0xf0, ['pointer64', ['pointer64', ['void']]]],
'GdiSharedHandleTable' : [ 0xf8, ['pointer64', ['void']]],
'ProcessStarterHelper' : [ 0x100, ['pointer64', ['void']]],
'GdiDCAttributeList' : [ 0x108, ['unsigned long']],
'LoaderLock' : [ 0x110, ['pointer64', ['_RTL_CRITICAL_SECTION']]],
'OSMajorVersion' : [ 0x118, ['unsigned long']],
'OSMinorVersion' : [ 0x11c, ['unsigned long']],
'OSBuildNumber' : [ 0x120, ['unsigned short']],
'OSCSDVersion' : [ 0x122, ['unsigned short']],
'OSPlatformId' : [ 0x124, ['unsigned long']],
'ImageSubsystem' : [ 0x128, ['unsigned long']],
'ImageSubsystemMajorVersion' : [ 0x12c, ['unsigned long']],
'ImageSubsystemMinorVersion' : [ 0x130, ['unsigned long']],
'ActiveProcessAffinityMask' : [ 0x138, ['unsigned long long']],
'GdiHandleBuffer' : [ 0x140, ['array', 60, ['unsigned long']]],
'PostProcessInitRoutine' : [ 0x230, ['pointer64', ['void']]],
'TlsExpansionBitmap' : [ 0x238, ['pointer64', ['void']]],
'TlsExpansionBitmapBits' : [ 0x240, ['array', 32, ['unsigned long']]],
'SessionId' : [ 0x2c0, ['unsigned long']],
'AppCompatFlags' : [ 0x2c8, ['_ULARGE_INTEGER']],
'AppCompatFlagsUser' : [ 0x2d0, ['_ULARGE_INTEGER']],
'pShimData' : [ 0x2d8, ['pointer64', ['void']]],
'AppCompatInfo' : [ 0x2e0, ['pointer64', ['void']]],
'CSDVersion' : [ 0x2e8, ['_UNICODE_STRING']],
'ActivationContextData' : [ 0x2f8, ['pointer64', ['_ACTIVATION_CONTEXT_DATA']]],
'ProcessAssemblyStorageMap' : [ 0x300, ['pointer64', ['_ASSEMBLY_STORAGE_MAP']]],
'SystemDefaultActivationContextData' : [ 0x308, ['pointer64', ['_ACTIVATION_CONTEXT_DATA']]],
'SystemAssemblyStorageMap' : [ 0x310, ['pointer64', ['_ASSEMBLY_STORAGE_MAP']]],
'MinimumStackCommit' : [ 0x318, ['unsigned long long']],
'FlsCallback' : [ 0x320, ['pointer64', ['_FLS_CALLBACK_INFO']]],
'FlsListHead' : [ 0x328, ['_LIST_ENTRY']],
'FlsBitmap' : [ 0x338, ['pointer64', ['void']]],
'FlsBitmapBits' : [ 0x340, ['array', 4, ['unsigned long']]],
'FlsHighIndex' : [ 0x350, ['unsigned long']],
'WerRegistrationData' : [ 0x358, ['pointer64', ['void']]],
'WerShipAssertPtr' : [ 0x360, ['pointer64', ['void']]],
} ],
'__unnamed_1a1f' : [ 0x18, {
'EfiInformation' : [ 0x0, ['_EFI_FIRMWARE_INFORMATION']],
'PcatInformation' : [ 0x0, ['_PCAT_FIRMWARE_INFORMATION']],
} ],
'_FIRMWARE_INFORMATION_LOADER_BLOCK' : [ 0x20, {
'FirmwareTypeEfi' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 32, native_type='unsigned long')]],
'u' : [ 0x8, ['__unnamed_1a1f']],
} ],
'_HEAP_UCR_DESCRIPTOR' : [ 0x30, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'SegmentEntry' : [ 0x10, ['_LIST_ENTRY']],
'Address' : [ 0x20, ['pointer64', ['void']]],
'Size' : [ 0x28, ['unsigned long long']],
} ],
'_POOL_DESCRIPTOR' : [ 0x1048, {
'PoolType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'NonPagedPool', 1: 'PagedPool', 2: 'NonPagedPoolMustSucceed', 3: 'DontUseThisType', 4: 'NonPagedPoolCacheAligned', 5: 'PagedPoolCacheAligned', 6: 'NonPagedPoolCacheAlignedMustS', 7: 'MaxPoolType', 34: 'NonPagedPoolMustSucceedSession', 35: 'DontUseThisTypeSession', 32: 'NonPagedPoolSession', 36: 'NonPagedPoolCacheAlignedSession', 33: 'PagedPoolSession', 38: 'NonPagedPoolCacheAlignedMustSSession', 37: 'PagedPoolCacheAlignedSession'})]],
'PoolIndex' : [ 0x4, ['unsigned long']],
'RunningAllocs' : [ 0x8, ['long']],
'RunningDeAllocs' : [ 0xc, ['long']],
'TotalPages' : [ 0x10, ['long']],
'TotalBigPages' : [ 0x14, ['long']],
'Threshold' : [ 0x18, ['unsigned long']],
'LockAddress' : [ 0x20, ['pointer64', ['void']]],
'PendingFrees' : [ 0x28, ['pointer64', ['pointer64', ['void']]]],
'ThreadsProcessingDeferrals' : [ 0x30, ['long']],
'PendingFreeDepth' : [ 0x34, ['long']],
'TotalBytes' : [ 0x38, ['unsigned long long']],
'Spare0' : [ 0x40, ['unsigned long long']],
'ListHeads' : [ 0x48, ['array', 256, ['_LIST_ENTRY']]],
} ],
'_KGATE' : [ 0x18, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
} ],
'_WHEA_ERROR_RECORD_HEADER' : [ 0x80, {
'Signature' : [ 0x0, ['unsigned long']],
'Revision' : [ 0x4, ['_WHEA_REVISION']],
'SignatureEnd' : [ 0x6, ['unsigned long']],
'SectionCount' : [ 0xa, ['unsigned short']],
'Severity' : [ 0xc, ['Enumeration', dict(target = 'long', choices = {0: 'WheaErrSevRecoverable', 1: 'WheaErrSevFatal', 2: 'WheaErrSevCorrected', 3: 'WheaErrSevNone'})]],
'ValidBits' : [ 0x10, ['_WHEA_ERROR_RECORD_HEADER_VALIDBITS']],
'Length' : [ 0x14, ['unsigned long']],
'Timestamp' : [ 0x18, ['_WHEA_TIMESTAMP']],
'PlatformId' : [ 0x20, ['_GUID']],
'PartitionId' : [ 0x30, ['_GUID']],
'CreatorId' : [ 0x40, ['_GUID']],
'NotifyType' : [ 0x50, ['_GUID']],
'RecordId' : [ 0x60, ['unsigned long long']],
'Flags' : [ 0x68, ['_WHEA_ERROR_RECORD_HEADER_FLAGS']],
'PersistenceInfo' : [ 0x6c, ['_WHEA_PERSISTENCE_INFO']],
'Reserved' : [ 0x74, ['array', 12, ['unsigned char']]],
} ],
'_ALPC_PROCESS_CONTEXT' : [ 0x20, {
'Lock' : [ 0x0, ['_EX_PUSH_LOCK']],
'ViewListHead' : [ 0x8, ['_LIST_ENTRY']],
'PagedPoolQuotaCache' : [ 0x18, ['unsigned long long']],
} ],
'_CM_NOTIFY_BLOCK' : [ 0x58, {
'HiveList' : [ 0x0, ['_LIST_ENTRY']],
'PostList' : [ 0x10, ['_LIST_ENTRY']],
'KeyControlBlock' : [ 0x20, ['pointer64', ['_CM_KEY_CONTROL_BLOCK']]],
'KeyBody' : [ 0x28, ['pointer64', ['_CM_KEY_BODY']]],
'Filter' : [ 0x30, ['BitField', dict(start_bit = 0, end_bit = 30, native_type='unsigned long')]],
'WatchTree' : [ 0x30, ['BitField', dict(start_bit = 30, end_bit = 31, native_type='unsigned long')]],
'NotifyPending' : [ 0x30, ['BitField', dict(start_bit = 31, end_bit = 32, native_type='unsigned long')]],
'SubjectContext' : [ 0x38, ['_SECURITY_SUBJECT_CONTEXT']],
} ],
'_KINTERRUPT' : [ 0xa0, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['short']],
'InterruptListEntry' : [ 0x8, ['_LIST_ENTRY']],
'ServiceRoutine' : [ 0x18, ['pointer64', ['void']]],
'MessageServiceRoutine' : [ 0x20, ['pointer64', ['void']]],
'MessageIndex' : [ 0x28, ['unsigned long']],
'ServiceContext' : [ 0x30, ['pointer64', ['void']]],
'SpinLock' : [ 0x38, ['unsigned long long']],
'TickCount' : [ 0x40, ['unsigned long']],
'ActualLock' : [ 0x48, ['pointer64', ['unsigned long long']]],
'DispatchAddress' : [ 0x50, ['pointer64', ['void']]],
'Vector' : [ 0x58, ['unsigned long']],
'Irql' : [ 0x5c, ['unsigned char']],
'SynchronizeIrql' : [ 0x5d, ['unsigned char']],
'FloatingSave' : [ 0x5e, ['unsigned char']],
'Connected' : [ 0x5f, ['unsigned char']],
'Number' : [ 0x60, ['unsigned char']],
'ShareVector' : [ 0x61, ['unsigned char']],
'Mode' : [ 0x64, ['Enumeration', dict(target = 'long', choices = {0: 'LevelSensitive', 1: 'Latched'})]],
'Polarity' : [ 0x68, ['Enumeration', dict(target = 'long', choices = {0: 'InterruptPolarityUnknown', 1: 'InterruptActiveHigh', 2: 'InterruptActiveLow'})]],
'ServiceCount' : [ 0x6c, ['unsigned long']],
'DispatchCount' : [ 0x70, ['unsigned long']],
'Rsvd1' : [ 0x78, ['unsigned long long']],
'TrapFrame' : [ 0x80, ['pointer64', ['_KTRAP_FRAME']]],
'Reserved' : [ 0x88, ['pointer64', ['void']]],
'DispatchCode' : [ 0x90, ['array', 4, ['unsigned long']]],
} ],
'_HANDLE_TABLE_ENTRY' : [ 0x10, {
'Object' : [ 0x0, ['pointer64', ['void']]],
'ObAttributes' : [ 0x0, ['unsigned long']],
'InfoTable' : [ 0x0, ['pointer64', ['_HANDLE_TABLE_ENTRY_INFO']]],
'Value' : [ 0x0, ['unsigned long long']],
'GrantedAccess' : [ 0x8, ['unsigned long']],
'GrantedAccessIndex' : [ 0x8, ['unsigned short']],
'CreatorBackTraceIndex' : [ 0xa, ['unsigned short']],
'NextFreeTableEntry' : [ 0x8, ['long']],
} ],
'_SID' : [ 0xc, {
'Revision' : [ 0x0, ['unsigned char']],
'SubAuthorityCount' : [ 0x1, ['unsigned char']],
'IdentifierAuthority' : [ 0x2, ['_SID_IDENTIFIER_AUTHORITY']],
'SubAuthority' : [ 0x8, ['array', 1, ['unsigned long']]],
} ],
'_IMAGE_FILE_HEADER' : [ 0x14, {
'Machine' : [ 0x0, ['unsigned short']],
'NumberOfSections' : [ 0x2, ['unsigned short']],
'TimeDateStamp' : [ 0x4, ['unsigned long']],
'PointerToSymbolTable' : [ 0x8, ['unsigned long']],
'NumberOfSymbols' : [ 0xc, ['unsigned long']],
'SizeOfOptionalHeader' : [ 0x10, ['unsigned short']],
'Characteristics' : [ 0x12, ['unsigned short']],
} ],
'_MMEXTEND_INFO' : [ 0x10, {
'CommittedSize' : [ 0x0, ['unsigned long long']],
'ReferenceCount' : [ 0x8, ['unsigned long']],
} ],
'_HIVE_LIST_ENTRY' : [ 0x38, {
'FileName' : [ 0x0, ['pointer64', ['unsigned short']]],
'BaseName' : [ 0x8, ['pointer64', ['unsigned short']]],
'RegRootName' : [ 0x10, ['pointer64', ['unsigned short']]],
'CmHive' : [ 0x18, ['pointer64', ['_CMHIVE']]],
'HHiveFlags' : [ 0x20, ['unsigned long']],
'CmHiveFlags' : [ 0x24, ['unsigned long']],
'CmHive2' : [ 0x28, ['pointer64', ['_CMHIVE']]],
'ThreadFinished' : [ 0x30, ['unsigned char']],
'ThreadStarted' : [ 0x31, ['unsigned char']],
'Allocate' : [ 0x32, ['unsigned char']],
'WinPERequired' : [ 0x33, ['unsigned char']],
} ],
'_CONTEXT' : [ 0x4d0, {
'P1Home' : [ 0x0, ['unsigned long long']],
'P2Home' : [ 0x8, ['unsigned long long']],
'P3Home' : [ 0x10, ['unsigned long long']],
'P4Home' : [ 0x18, ['unsigned long long']],
'P5Home' : [ 0x20, ['unsigned long long']],
'P6Home' : [ 0x28, ['unsigned long long']],
'ContextFlags' : [ 0x30, ['unsigned long']],
'MxCsr' : [ 0x34, ['unsigned long']],
'SegCs' : [ 0x38, ['unsigned short']],
'SegDs' : [ 0x3a, ['unsigned short']],
'SegEs' : [ 0x3c, ['unsigned short']],
'SegFs' : [ 0x3e, ['unsigned short']],
'SegGs' : [ 0x40, ['unsigned short']],
'SegSs' : [ 0x42, ['unsigned short']],
'EFlags' : [ 0x44, ['unsigned long']],
'Dr0' : [ 0x48, ['unsigned long long']],
'Dr1' : [ 0x50, ['unsigned long long']],
'Dr2' : [ 0x58, ['unsigned long long']],
'Dr3' : [ 0x60, ['unsigned long long']],
'Dr6' : [ 0x68, ['unsigned long long']],
'Dr7' : [ 0x70, ['unsigned long long']],
'Rax' : [ 0x78, ['unsigned long long']],
'Rcx' : [ 0x80, ['unsigned long long']],
'Rdx' : [ 0x88, ['unsigned long long']],
'Rbx' : [ 0x90, ['unsigned long long']],
'Rsp' : [ 0x98, ['unsigned long long']],
'Rbp' : [ 0xa0, ['unsigned long long']],
'Rsi' : [ 0xa8, ['unsigned long long']],
'Rdi' : [ 0xb0, ['unsigned long long']],
'R8' : [ 0xb8, ['unsigned long long']],
'R9' : [ 0xc0, ['unsigned long long']],
'R10' : [ 0xc8, ['unsigned long long']],
'R11' : [ 0xd0, ['unsigned long long']],
'R12' : [ 0xd8, ['unsigned long long']],
'R13' : [ 0xe0, ['unsigned long long']],
'R14' : [ 0xe8, ['unsigned long long']],
'R15' : [ 0xf0, ['unsigned long long']],
'Rip' : [ 0xf8, ['unsigned long long']],
'FltSave' : [ 0x100, ['_XMM_SAVE_AREA32']],
'Header' : [ 0x100, ['array', 2, ['_M128A']]],
'Legacy' : [ 0x120, ['array', 8, ['_M128A']]],
'Xmm0' : [ 0x1a0, ['_M128A']],
'Xmm1' : [ 0x1b0, ['_M128A']],
'Xmm2' : [ 0x1c0, ['_M128A']],
'Xmm3' : [ 0x1d0, ['_M128A']],
'Xmm4' : [ 0x1e0, ['_M128A']],
'Xmm5' : [ 0x1f0, ['_M128A']],
'Xmm6' : [ 0x200, ['_M128A']],
'Xmm7' : [ 0x210, ['_M128A']],
'Xmm8' : [ 0x220, ['_M128A']],
'Xmm9' : [ 0x230, ['_M128A']],
'Xmm10' : [ 0x240, ['_M128A']],
'Xmm11' : [ 0x250, ['_M128A']],
'Xmm12' : [ 0x260, ['_M128A']],
'Xmm13' : [ 0x270, ['_M128A']],
'Xmm14' : [ 0x280, ['_M128A']],
'Xmm15' : [ 0x290, ['_M128A']],
'VectorRegister' : [ 0x300, ['array', 26, ['_M128A']]],
'VectorControl' : [ 0x4a0, ['unsigned long long']],
'DebugControl' : [ 0x4a8, ['unsigned long long']],
'LastBranchToRip' : [ 0x4b0, ['unsigned long long']],
'LastBranchFromRip' : [ 0x4b8, ['unsigned long long']],
'LastExceptionToRip' : [ 0x4c0, ['unsigned long long']],
'LastExceptionFromRip' : [ 0x4c8, ['unsigned long long']],
} ],
'_ALPC_HANDLE_TABLE' : [ 0x20, {
'Flags' : [ 0x0, ['unsigned long']],
'Handles' : [ 0x8, ['pointer64', ['_ALPC_HANDLE_ENTRY']]],
'TotalHandles' : [ 0x10, ['unsigned long']],
'Lock' : [ 0x18, ['_EX_PUSH_LOCK']],
} ],
'_MMPTE_HARDWARE' : [ 0x8, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'Dirty1' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long long')]],
'Owner' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long long')]],
'WriteThrough' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long long')]],
'CacheDisable' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long long')]],
'Accessed' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long long')]],
'Dirty' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long long')]],
'LargePage' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long long')]],
'Global' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long long')]],
'CopyOnWrite' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long long')]],
'Write' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long long')]],
'PageFrameNumber' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 48, native_type='unsigned long long')]],
'reserved1' : [ 0x0, ['BitField', dict(start_bit = 48, end_bit = 52, native_type='unsigned long long')]],
'SoftwareWsIndex' : [ 0x0, ['BitField', dict(start_bit = 52, end_bit = 63, native_type='unsigned long long')]],
'NoExecute' : [ 0x0, ['BitField', dict(start_bit = 63, end_bit = 64, native_type='unsigned long long')]],
} ],
'_IO_COMPLETION_CONTEXT' : [ 0x10, {
'Port' : [ 0x0, ['pointer64', ['void']]],
'Key' : [ 0x8, ['pointer64', ['void']]],
} ],
'_IOV_FORCED_PENDING_TRACE' : [ 0x200, {
'Irp' : [ 0x0, ['pointer64', ['_IRP']]],
'StackTrace' : [ 0x8, ['array', 63, ['pointer64', ['void']]]],
} ],
'_DBGKD_SET_CONTEXT' : [ 0x4, {
'ContextFlags' : [ 0x0, ['unsigned long']],
} ],
'_VI_POOL_ENTRY_INUSE' : [ 0x20, {
'VirtualAddress' : [ 0x0, ['pointer64', ['void']]],
'CallingAddress' : [ 0x8, ['pointer64', ['void']]],
'NumberOfBytes' : [ 0x10, ['unsigned long long']],
'Tag' : [ 0x18, ['unsigned long long']],
} ],
'_ALPC_COMPLETION_LIST' : [ 0x98, {
'Entry' : [ 0x0, ['_LIST_ENTRY']],
'OwnerProcess' : [ 0x10, ['pointer64', ['_EPROCESS']]],
'Mdl' : [ 0x18, ['pointer64', ['_MDL']]],
'UserVa' : [ 0x20, ['pointer64', ['void']]],
'UserLimit' : [ 0x28, ['pointer64', ['void']]],
'DataUserVa' : [ 0x30, ['pointer64', ['void']]],
'SystemVa' : [ 0x38, ['pointer64', ['void']]],
'TotalSize' : [ 0x40, ['unsigned long long']],
'Header' : [ 0x48, ['pointer64', ['_ALPC_COMPLETION_LIST_HEADER']]],
'List' : [ 0x50, ['pointer64', ['void']]],
'ListSize' : [ 0x58, ['unsigned long long']],
'Bitmap' : [ 0x60, ['pointer64', ['void']]],
'BitmapSize' : [ 0x68, ['unsigned long long']],
'Data' : [ 0x70, ['pointer64', ['void']]],
'DataSize' : [ 0x78, ['unsigned long long']],
'BitmapLimit' : [ 0x80, ['unsigned long']],
'BitmapNextHint' : [ 0x84, ['unsigned long']],
'ConcurrencyCount' : [ 0x88, ['unsigned long']],
'AttributeFlags' : [ 0x8c, ['unsigned long']],
'AttributeSize' : [ 0x90, ['unsigned long']],
} ],
'_INTERFACE' : [ 0x20, {
'Size' : [ 0x0, ['unsigned short']],
'Version' : [ 0x2, ['unsigned short']],
'Context' : [ 0x8, ['pointer64', ['void']]],
'InterfaceReference' : [ 0x10, ['pointer64', ['void']]],
'InterfaceDereference' : [ 0x18, ['pointer64', ['void']]],
} ],
'_ACL' : [ 0x8, {
'AclRevision' : [ 0x0, ['unsigned char']],
'Sbz1' : [ 0x1, ['unsigned char']],
'AclSize' : [ 0x2, ['unsigned short']],
'AceCount' : [ 0x4, ['unsigned short']],
'Sbz2' : [ 0x6, ['unsigned short']],
} ],
'_LAZY_WRITER' : [ 0x98, {
'WorkQueue' : [ 0x0, ['_LIST_ENTRY']],
'ScanDpc' : [ 0x10, ['_KDPC']],
'ScanTimer' : [ 0x50, ['_KTIMER']],
'ScanActive' : [ 0x90, ['unsigned char']],
'OtherWork' : [ 0x91, ['unsigned char']],
'PendingTeardown' : [ 0x92, ['unsigned char']],
} ],
'_PI_BUS_EXTENSION' : [ 0x70, {
'Flags' : [ 0x0, ['unsigned long']],
'NumberCSNs' : [ 0x4, ['unsigned char']],
'ReadDataPort' : [ 0x8, ['pointer64', ['unsigned char']]],
'DataPortMapped' : [ 0x10, ['unsigned char']],
'AddressPort' : [ 0x18, ['pointer64', ['unsigned char']]],
'AddrPortMapped' : [ 0x20, ['unsigned char']],
'CommandPort' : [ 0x28, ['pointer64', ['unsigned char']]],
'CmdPortMapped' : [ 0x30, ['unsigned char']],
'NextSlotNumber' : [ 0x34, ['unsigned long']],
'DeviceList' : [ 0x38, ['_SINGLE_LIST_ENTRY']],
'CardList' : [ 0x40, ['_SINGLE_LIST_ENTRY']],
'PhysicalBusDevice' : [ 0x48, ['pointer64', ['_DEVICE_OBJECT']]],
'FunctionalBusDevice' : [ 0x50, ['pointer64', ['_DEVICE_OBJECT']]],
'AttachedDevice' : [ 0x58, ['pointer64', ['_DEVICE_OBJECT']]],
'BusNumber' : [ 0x60, ['unsigned long']],
'SystemPowerState' : [ 0x64, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'DevicePowerState' : [ 0x68, ['Enumeration', dict(target = 'long', choices = {0: 'PowerDeviceUnspecified', 1: 'PowerDeviceD0', 2: 'PowerDeviceD1', 3: 'PowerDeviceD2', 4: 'PowerDeviceD3', 5: 'PowerDeviceMaximum'})]],
} ],
'_DRIVER_OBJECT' : [ 0x150, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['short']],
'DeviceObject' : [ 0x8, ['pointer64', ['_DEVICE_OBJECT']]],
'Flags' : [ 0x10, ['unsigned long']],
'DriverStart' : [ 0x18, ['pointer64', ['void']]],
'DriverSize' : [ 0x20, ['unsigned long']],
'DriverSection' : [ 0x28, ['pointer64', ['void']]],
'DriverExtension' : [ 0x30, ['pointer64', ['_DRIVER_EXTENSION']]],
'DriverName' : [ 0x38, ['_UNICODE_STRING']],
'HardwareDatabase' : [ 0x48, ['pointer64', ['_UNICODE_STRING']]],
'FastIoDispatch' : [ 0x50, ['pointer64', ['_FAST_IO_DISPATCH']]],
'DriverInit' : [ 0x58, ['pointer64', ['void']]],
'DriverStartIo' : [ 0x60, ['pointer64', ['void']]],
'DriverUnload' : [ 0x68, ['pointer64', ['void']]],
'MajorFunction' : [ 0x70, ['array', 28, ['pointer64', ['void']]]],
} ],
'_SID_AND_ATTRIBUTES' : [ 0x10, {
'Sid' : [ 0x0, ['pointer64', ['void']]],
'Attributes' : [ 0x8, ['unsigned long']],
} ],
'_SID_IDENTIFIER_AUTHORITY' : [ 0x6, {
'Value' : [ 0x0, ['array', 6, ['unsigned char']]],
} ],
'_CM_RM' : [ 0x88, {
'RmListEntry' : [ 0x0, ['_LIST_ENTRY']],
'TransactionListHead' : [ 0x10, ['_LIST_ENTRY']],
'TmHandle' : [ 0x20, ['pointer64', ['void']]],
'Tm' : [ 0x28, ['pointer64', ['void']]],
'RmHandle' : [ 0x30, ['pointer64', ['void']]],
'KtmRm' : [ 0x38, ['pointer64', ['void']]],
'RefCount' : [ 0x40, ['unsigned long']],
'ContainerNum' : [ 0x44, ['unsigned long']],
'ContainerSize' : [ 0x48, ['unsigned long long']],
'CmHive' : [ 0x50, ['pointer64', ['_CMHIVE']]],
'LogFileObject' : [ 0x58, ['pointer64', ['void']]],
'MarshallingContext' : [ 0x60, ['pointer64', ['void']]],
'RmFlags' : [ 0x68, ['unsigned long']],
'LogStartStatus1' : [ 0x6c, ['long']],
'LogStartStatus2' : [ 0x70, ['long']],
'BaseLsn' : [ 0x78, ['unsigned long long']],
'RmLock' : [ 0x80, ['pointer64', ['_ERESOURCE']]],
} ],
'_MMVAD_FLAGS' : [ 0x8, {
'CommitCharge' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 51, native_type='unsigned long long')]],
'NoChange' : [ 0x0, ['BitField', dict(start_bit = 51, end_bit = 52, native_type='unsigned long long')]],
'VadType' : [ 0x0, ['BitField', dict(start_bit = 52, end_bit = 55, native_type='unsigned long long')]],
'MemCommit' : [ 0x0, ['BitField', dict(start_bit = 55, end_bit = 56, native_type='unsigned long long')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 56, end_bit = 61, native_type='unsigned long long')]],
'Spare' : [ 0x0, ['BitField', dict(start_bit = 61, end_bit = 63, native_type='unsigned long long')]],
'PrivateMemory' : [ 0x0, ['BitField', dict(start_bit = 63, end_bit = 64, native_type='unsigned long long')]],
} ],
'_WHEA_PCIXDEVICE_ERROR' : [ 0x68, {
'ValidBits' : [ 0x0, ['_WHEA_PCIXDEVICE_ERROR_VALIDBITS']],
'ErrorStatus' : [ 0x8, ['_WHEA_ERROR_STATUS']],
'IdInfo' : [ 0x10, ['_WHEA_PCIXDEVICE_ID']],
'MemoryNumber' : [ 0x20, ['unsigned long']],
'IoNumber' : [ 0x24, ['unsigned long']],
'RegisterDataPairs' : [ 0x28, ['array', 4, ['WHEA_PCIXDEVICE_REGISTER_PAIR']]],
} ],
'_MMWSLE_HASH' : [ 0x4, {
'Index' : [ 0x0, ['unsigned long']],
} ],
'_UNEXPECTED_INTERRUPT' : [ 0x10, {
'PushImmOp' : [ 0x0, ['unsigned char']],
'PushImm' : [ 0x1, ['unsigned long']],
'PushRbp' : [ 0x5, ['unsigned char']],
'JmpOp' : [ 0x6, ['unsigned char']],
'JmpOffset' : [ 0x7, ['long']],
} ],
'__unnamed_1aa7' : [ 0x28, {
'CriticalSection' : [ 0x0, ['_RTL_CRITICAL_SECTION']],
} ],
'_HEAP_LOCK' : [ 0x28, {
'Lock' : [ 0x0, ['__unnamed_1aa7']],
} ],
'_DBGKD_FILL_MEMORY' : [ 0x10, {
'Address' : [ 0x0, ['unsigned long long']],
'Length' : [ 0x8, ['unsigned long']],
'Flags' : [ 0xc, ['unsigned short']],
'PatternLength' : [ 0xe, ['unsigned short']],
} ],
'_DRIVER_EXTENSION' : [ 0x38, {
'DriverObject' : [ 0x0, ['pointer64', ['_DRIVER_OBJECT']]],
'AddDevice' : [ 0x8, ['pointer64', ['void']]],
'Count' : [ 0x10, ['unsigned long']],
'ServiceKeyName' : [ 0x18, ['_UNICODE_STRING']],
'ClientDriverExtension' : [ 0x28, ['pointer64', ['_IO_CLIENT_EXTENSION']]],
'FsFilterCallbacks' : [ 0x30, ['pointer64', ['_FS_FILTER_CALLBACKS']]],
} ],
'_HEAP_STOP_ON_VALUES' : [ 0x30, {
'AllocAddress' : [ 0x0, ['unsigned long long']],
'AllocTag' : [ 0x8, ['_HEAP_STOP_ON_TAG']],
'ReAllocAddress' : [ 0x10, ['unsigned long long']],
'ReAllocTag' : [ 0x18, ['_HEAP_STOP_ON_TAG']],
'FreeAddress' : [ 0x20, ['unsigned long long']],
'FreeTag' : [ 0x28, ['_HEAP_STOP_ON_TAG']],
} ],
'_PP_LOOKASIDE_LIST' : [ 0x10, {
'P' : [ 0x0, ['pointer64', ['_GENERAL_LOOKASIDE']]],
'L' : [ 0x8, ['pointer64', ['_GENERAL_LOOKASIDE']]],
} ],
'_HEAP_PSEUDO_TAG_ENTRY' : [ 0x10, {
'Allocs' : [ 0x0, ['unsigned long']],
'Frees' : [ 0x4, ['unsigned long']],
'Size' : [ 0x8, ['unsigned long long']],
} ],
'_CALL_HASH_ENTRY' : [ 0x28, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'CallersAddress' : [ 0x10, ['pointer64', ['void']]],
'CallersCaller' : [ 0x18, ['pointer64', ['void']]],
'CallCount' : [ 0x20, ['unsigned long']],
} ],
'_VF_TRACKER_STAMP' : [ 0x10, {
'Thread' : [ 0x0, ['pointer64', ['void']]],
'Flags' : [ 0x8, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned char')]],
'OldIrql' : [ 0x9, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned char')]],
'NewIrql' : [ 0xa, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned char')]],
'Processor' : [ 0xb, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned char')]],
} ],
'_VI_TRACK_IRQL' : [ 0x38, {
'Thread' : [ 0x0, ['pointer64', ['void']]],
'OldIrql' : [ 0x8, ['unsigned char']],
'NewIrql' : [ 0x9, ['unsigned char']],
'Processor' : [ 0xa, ['unsigned char']],
'TickCount' : [ 0xc, ['unsigned long']],
'StackTrace' : [ 0x10, ['array', 5, ['pointer64', ['void']]]],
} ],
'_PEB_LDR_DATA' : [ 0x58, {
'Length' : [ 0x0, ['unsigned long']],
'Initialized' : [ 0x4, ['unsigned char']],
'SsHandle' : [ 0x8, ['pointer64', ['void']]],
'InLoadOrderModuleList' : [ 0x10, ['_LIST_ENTRY']],
'InMemoryOrderModuleList' : [ 0x20, ['_LIST_ENTRY']],
'InInitializationOrderModuleList' : [ 0x30, ['_LIST_ENTRY']],
'EntryInProgress' : [ 0x40, ['pointer64', ['void']]],
'ShutdownInProgress' : [ 0x48, ['unsigned char']],
'ShutdownThreadId' : [ 0x50, ['pointer64', ['void']]],
} ],
'_HEAP_STOP_ON_TAG' : [ 0x4, {
'HeapAndTagIndex' : [ 0x0, ['unsigned long']],
'TagIndex' : [ 0x0, ['unsigned short']],
'HeapIndex' : [ 0x2, ['unsigned short']],
} ],
'_DBGKD_GET_CONTEXT' : [ 0x4, {
'Unused' : [ 0x0, ['unsigned long']],
} ],
'_TEB_ACTIVE_FRAME_CONTEXT' : [ 0x10, {
'Flags' : [ 0x0, ['unsigned long']],
'FrameName' : [ 0x8, ['pointer64', ['unsigned char']]],
} ],
'_NLS_DATA_BLOCK' : [ 0x18, {
'AnsiCodePageData' : [ 0x0, ['pointer64', ['void']]],
'OemCodePageData' : [ 0x8, ['pointer64', ['void']]],
'UnicodeCaseTableData' : [ 0x10, ['pointer64', ['void']]],
} ],
'_CM_KEY_CONTROL_BLOCK' : [ 0x100, {
'RefCount' : [ 0x0, ['unsigned long']],
'ExtFlags' : [ 0x4, ['BitField', dict(start_bit = 0, end_bit = 9, native_type='unsigned long')]],
'PrivateAlloc' : [ 0x4, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'Delete' : [ 0x4, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'DelayedCloseIndex' : [ 0x4, ['BitField', dict(start_bit = 11, end_bit = 22, native_type='unsigned long')]],
'TotalLevels' : [ 0x4, ['BitField', dict(start_bit = 22, end_bit = 32, native_type='unsigned long')]],
'KeyHash' : [ 0x8, ['_CM_KEY_HASH']],
'ConvKey' : [ 0x8, ['unsigned long']],
'NextHash' : [ 0x10, ['pointer64', ['_CM_KEY_HASH']]],
'KeyHive' : [ 0x18, ['pointer64', ['_HHIVE']]],
'KeyCell' : [ 0x20, ['unsigned long']],
'ParentKcb' : [ 0x28, ['pointer64', ['_CM_KEY_CONTROL_BLOCK']]],
'NameBlock' : [ 0x30, ['pointer64', ['_CM_NAME_CONTROL_BLOCK']]],
'CachedSecurity' : [ 0x38, ['pointer64', ['_CM_KEY_SECURITY_CACHE']]],
'ValueCache' : [ 0x40, ['_CACHED_CHILD_LIST']],
'IndexHint' : [ 0x50, ['pointer64', ['_CM_INDEX_HINT_BLOCK']]],
'HashKey' : [ 0x50, ['unsigned long']],
'SubKeyCount' : [ 0x50, ['unsigned long']],
'KeyBodyListHead' : [ 0x58, ['_LIST_ENTRY']],
'FreeListEntry' : [ 0x58, ['_LIST_ENTRY']],
'KeyBodyArray' : [ 0x68, ['array', 4, ['pointer64', ['_CM_KEY_BODY']]]],
'DelayCloseEntry' : [ 0x88, ['pointer64', ['void']]],
'KcbLastWriteTime' : [ 0x90, ['_LARGE_INTEGER']],
'KcbMaxNameLen' : [ 0x98, ['unsigned short']],
'KcbMaxValueNameLen' : [ 0x9a, ['unsigned short']],
'KcbMaxValueDataLen' : [ 0x9c, ['unsigned long']],
'KcbUserFlags' : [ 0xa0, ['BitField', dict(start_bit = 0, end_bit = 4, native_type='unsigned long')]],
'KcbVirtControlFlags' : [ 0xa0, ['BitField', dict(start_bit = 4, end_bit = 8, native_type='unsigned long')]],
'KcbDebug' : [ 0xa0, ['BitField', dict(start_bit = 8, end_bit = 16, native_type='unsigned long')]],
'Flags' : [ 0xa0, ['BitField', dict(start_bit = 16, end_bit = 32, native_type='unsigned long')]],
'RealKeyName' : [ 0xa8, ['pointer64', ['unsigned char']]],
'KCBUoWListHead' : [ 0xb0, ['_LIST_ENTRY']],
'TransKCBOwner' : [ 0xc0, ['pointer64', ['_CM_TRANS']]],
'KCBLock' : [ 0xc8, ['_CM_INTENT_LOCK']],
'KeyLock' : [ 0xd8, ['_CM_INTENT_LOCK']],
'TransValueCache' : [ 0xe8, ['_CHILD_LIST']],
'TransValueListOwner' : [ 0xf0, ['pointer64', ['_CM_TRANS']]],
'FullKCBName' : [ 0xf8, ['pointer64', ['_UNICODE_STRING']]],
} ],
'_MMPTE_SOFTWARE' : [ 0x8, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'PageFileLow' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 5, native_type='unsigned long long')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 10, native_type='unsigned long long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long long')]],
'Transition' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long long')]],
'UsedPageTableEntries' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 22, native_type='unsigned long long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 22, end_bit = 32, native_type='unsigned long long')]],
'PageFileHigh' : [ 0x0, ['BitField', dict(start_bit = 32, end_bit = 64, native_type='unsigned long long')]],
} ],
'_CACHE_UNINITIALIZE_EVENT' : [ 0x20, {
'Next' : [ 0x0, ['pointer64', ['_CACHE_UNINITIALIZE_EVENT']]],
'Event' : [ 0x8, ['_KEVENT']],
} ],
'_SECURITY_TOKEN_AUDIT_DATA' : [ 0xc, {
'Length' : [ 0x0, ['unsigned long']],
'GrantMask' : [ 0x4, ['unsigned long']],
'DenyMask' : [ 0x8, ['unsigned long']],
} ],
'_CM_RESOURCE_LIST' : [ 0x28, {
'Count' : [ 0x0, ['unsigned long']],
'List' : [ 0x4, ['array', 1, ['_CM_FULL_RESOURCE_DESCRIPTOR']]],
} ],
'_POOL_TRACKER_TABLE' : [ 0x28, {
'Key' : [ 0x0, ['long']],
'NonPagedAllocs' : [ 0x4, ['long']],
'NonPagedFrees' : [ 0x8, ['long']],
'NonPagedBytes' : [ 0x10, ['unsigned long long']],
'PagedAllocs' : [ 0x18, ['unsigned long']],
'PagedFrees' : [ 0x1c, ['unsigned long']],
'PagedBytes' : [ 0x20, ['unsigned long long']],
} ],
'_MM_SUBSECTION_AVL_TABLE' : [ 0x38, {
'BalancedRoot' : [ 0x0, ['_MMSUBSECTION_NODE']],
'DepthOfTree' : [ 0x28, ['BitField', dict(start_bit = 0, end_bit = 5, native_type='unsigned long long')]],
'Unused' : [ 0x28, ['BitField', dict(start_bit = 5, end_bit = 8, native_type='unsigned long long')]],
'NumberGenericTableElements' : [ 0x28, ['BitField', dict(start_bit = 8, end_bit = 64, native_type='unsigned long long')]],
'NodeHint' : [ 0x30, ['pointer64', ['void']]],
} ],
'_HANDLE_TABLE_ENTRY_INFO' : [ 0x4, {
'AuditMask' : [ 0x0, ['unsigned long']],
} ],
'_CM_FULL_RESOURCE_DESCRIPTOR' : [ 0x24, {
'InterfaceType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'Internal', 1: 'Isa', 2: 'Eisa', 3: 'MicroChannel', 4: 'TurboChannel', 5: 'PCIBus', 6: 'VMEBus', 7: 'NuBus', 8: 'PCMCIABus', 9: 'CBus', 10: 'MPIBus', 11: 'MPSABus', 12: 'ProcessorInternal', 13: 'InternalPowerBus', 14: 'PNPISABus', 15: 'PNPBus', 16: 'Vmcs', 17: 'MaximumInterfaceType', -1: 'InterfaceTypeUndefined'})]],
'BusNumber' : [ 0x4, ['unsigned long']],
'PartialResourceList' : [ 0x8, ['_CM_PARTIAL_RESOURCE_LIST']],
} ],
'_POWER_SEQUENCE' : [ 0xc, {
'SequenceD1' : [ 0x0, ['unsigned long']],
'SequenceD2' : [ 0x4, ['unsigned long']],
'SequenceD3' : [ 0x8, ['unsigned long']],
} ],
'_KSEMAPHORE' : [ 0x20, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
'Limit' : [ 0x18, ['long']],
} ],
'_OBJECT_REF_STACK_INFO' : [ 0x8, {
'Sequence' : [ 0x0, ['unsigned long']],
'Index' : [ 0x4, ['unsigned short']],
'NumTraces' : [ 0x6, ['unsigned short']],
} ],
'_DBGKD_READ_WRITE_MSR' : [ 0xc, {
'Msr' : [ 0x0, ['unsigned long']],
'DataValueLow' : [ 0x4, ['unsigned long']],
'DataValueHigh' : [ 0x8, ['unsigned long']],
} ],
'_MMPFNENTRY' : [ 0x2, {
'PageLocation' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 3, native_type='unsigned char')]],
'WriteInProgress' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'Modified' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'ReadInProgress' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'CacheAttribute' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 8, native_type='unsigned char')]],
'Priority' : [ 0x1, ['BitField', dict(start_bit = 0, end_bit = 3, native_type='unsigned char')]],
'Rom' : [ 0x1, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'InPageError' : [ 0x1, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'KernelStack' : [ 0x1, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'RemovalRequested' : [ 0x1, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned char')]],
'ParityError' : [ 0x1, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned char')]],
} ],
'_SEGMENT_OBJECT' : [ 0x40, {
'BaseAddress' : [ 0x0, ['pointer64', ['void']]],
'TotalNumberOfPtes' : [ 0x8, ['unsigned long']],
'SizeOfSegment' : [ 0x10, ['_LARGE_INTEGER']],
'NonExtendedPtes' : [ 0x18, ['unsigned long']],
'ImageCommitment' : [ 0x1c, ['unsigned long']],
'ControlArea' : [ 0x20, ['pointer64', ['_CONTROL_AREA']]],
'Subsection' : [ 0x28, ['pointer64', ['_SUBSECTION']]],
'MmSectionFlags' : [ 0x30, ['pointer64', ['_MMSECTION_FLAGS']]],
'MmSubSectionFlags' : [ 0x38, ['pointer64', ['_MMSUBSECTION_FLAGS']]],
} ],
'_IMAGE_DOS_HEADER' : [ 0x40, {
'e_magic' : [ 0x0, ['unsigned short']],
'e_cblp' : [ 0x2, ['unsigned short']],
'e_cp' : [ 0x4, ['unsigned short']],
'e_crlc' : [ 0x6, ['unsigned short']],
'e_cparhdr' : [ 0x8, ['unsigned short']],
'e_minalloc' : [ 0xa, ['unsigned short']],
'e_maxalloc' : [ 0xc, ['unsigned short']],
'e_ss' : [ 0xe, ['unsigned short']],
'e_sp' : [ 0x10, ['unsigned short']],
'e_csum' : [ 0x12, ['unsigned short']],
'e_ip' : [ 0x14, ['unsigned short']],
'e_cs' : [ 0x16, ['unsigned short']],
'e_lfarlc' : [ 0x18, ['unsigned short']],
'e_ovno' : [ 0x1a, ['unsigned short']],
'e_res' : [ 0x1c, ['array', 4, ['unsigned short']]],
'e_oemid' : [ 0x24, ['unsigned short']],
'e_oeminfo' : [ 0x26, ['unsigned short']],
'e_res2' : [ 0x28, ['array', 10, ['unsigned short']]],
'e_lfanew' : [ 0x3c, ['long']],
} ],
'_TOKEN_SOURCE' : [ 0x10, {
'SourceName' : [ 0x0, ['array', 8, ['unsigned char']]],
'SourceIdentifier' : [ 0x8, ['_LUID']],
} ],
'_DBGKD_QUERY_MEMORY' : [ 0x18, {
'Address' : [ 0x0, ['unsigned long long']],
'Reserved' : [ 0x8, ['unsigned long long']],
'AddressSpace' : [ 0x10, ['unsigned long']],
'Flags' : [ 0x14, ['unsigned long']],
} ],
'CMP_OFFSET_ARRAY' : [ 0x18, {
'FileOffset' : [ 0x0, ['unsigned long']],
'DataBuffer' : [ 0x8, ['pointer64', ['void']]],
'DataLength' : [ 0x10, ['unsigned long']],
} ],
'_MMSUPPORT_FLAGS' : [ 0x4, {
'SessionSpace' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'ModwriterAttached' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'TrimHard' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'MaximumWorkingSetHard' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'ForceTrim' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'MinimumWorkingSetHard' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'SessionMaster' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned char')]],
'TrimmerAttached' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned char')]],
'TrimmerDetaching' : [ 0x1, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'Reserved' : [ 0x1, ['BitField', dict(start_bit = 1, end_bit = 4, native_type='unsigned char')]],
'PageStealers' : [ 0x1, ['BitField', dict(start_bit = 4, end_bit = 8, native_type='unsigned char')]],
'MemoryPriority' : [ 0x2, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned char')]],
'WsleDeleted' : [ 0x3, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'VmExiting' : [ 0x3, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'ExpansionFailed' : [ 0x3, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'Available' : [ 0x3, ['BitField', dict(start_bit = 3, end_bit = 8, native_type='unsigned char')]],
} ],
'PPM_IDLE_ACCOUNTING' : [ 0x48, {
'StateCount' : [ 0x0, ['unsigned long']],
'TotalTransitions' : [ 0x4, ['unsigned long']],
'ResetCount' : [ 0x8, ['unsigned long']],
'StartTime' : [ 0x10, ['unsigned long long']],
'State' : [ 0x18, ['array', 1, ['PPM_IDLE_STATE_ACCOUNTING']]],
} ],
'PPM_IDLE_STATE_ACCOUNTING' : [ 0x30, {
'IdleTransitions' : [ 0x0, ['unsigned long']],
'FailedTransitions' : [ 0x4, ['unsigned long']],
'InvalidBucketIndex' : [ 0x8, ['unsigned long']],
'TotalTime' : [ 0x10, ['unsigned long long']],
'IdleTimeBuckets' : [ 0x18, ['array', 6, ['unsigned long']]],
} ],
'_IMAGE_OPTIONAL_HEADER64' : [ 0xf0, {
'Magic' : [ 0x0, ['unsigned short']],
'MajorLinkerVersion' : [ 0x2, ['unsigned char']],
'MinorLinkerVersion' : [ 0x3, ['unsigned char']],
'SizeOfCode' : [ 0x4, ['unsigned long']],
'SizeOfInitializedData' : [ 0x8, ['unsigned long']],
'SizeOfUninitializedData' : [ 0xc, ['unsigned long']],
'AddressOfEntryPoint' : [ 0x10, ['unsigned long']],
'BaseOfCode' : [ 0x14, ['unsigned long']],
'ImageBase' : [ 0x18, ['unsigned long long']],
'SectionAlignment' : [ 0x20, ['unsigned long']],
'FileAlignment' : [ 0x24, ['unsigned long']],
'MajorOperatingSystemVersion' : [ 0x28, ['unsigned short']],
'MinorOperatingSystemVersion' : [ 0x2a, ['unsigned short']],
'MajorImageVersion' : [ 0x2c, ['unsigned short']],
'MinorImageVersion' : [ 0x2e, ['unsigned short']],
'MajorSubsystemVersion' : [ 0x30, ['unsigned short']],
'MinorSubsystemVersion' : [ 0x32, ['unsigned short']],
'Win32VersionValue' : [ 0x34, ['unsigned long']],
'SizeOfImage' : [ 0x38, ['unsigned long']],
'SizeOfHeaders' : [ 0x3c, ['unsigned long']],
'CheckSum' : [ 0x40, ['unsigned long']],
'Subsystem' : [ 0x44, ['unsigned short']],
'DllCharacteristics' : [ 0x46, ['unsigned short']],
'SizeOfStackReserve' : [ 0x48, ['unsigned long long']],
'SizeOfStackCommit' : [ 0x50, ['unsigned long long']],
'SizeOfHeapReserve' : [ 0x58, ['unsigned long long']],
'SizeOfHeapCommit' : [ 0x60, ['unsigned long long']],
'LoaderFlags' : [ 0x68, ['unsigned long']],
'NumberOfRvaAndSizes' : [ 0x6c, ['unsigned long']],
'DataDirectory' : [ 0x70, ['array', 16, ['_IMAGE_DATA_DIRECTORY']]],
} ],
'_ALPC_COMPLETION_PACKET_LOOKASIDE' : [ 0x38, {
'Lock' : [ 0x0, ['unsigned long long']],
'Size' : [ 0x8, ['unsigned long']],
'ActiveCount' : [ 0xc, ['unsigned long']],
'PendingNullCount' : [ 0x10, ['unsigned long']],
'PendingCheckCompletionListCount' : [ 0x14, ['unsigned long']],
'PendingDelete' : [ 0x18, ['unsigned long']],
'FreeListHead' : [ 0x20, ['_SINGLE_LIST_ENTRY']],
'CompletionPort' : [ 0x28, ['pointer64', ['void']]],
'CompletionKey' : [ 0x30, ['pointer64', ['void']]],
'Entry' : [ 0x38, ['array', 0, ['_ALPC_COMPLETION_PACKET_LOOKASIDE_ENTRY']]],
} ],
'_TERMINATION_PORT' : [ 0x10, {
'Next' : [ 0x0, ['pointer64', ['_TERMINATION_PORT']]],
'Port' : [ 0x8, ['pointer64', ['void']]],
} ],
'_MEMORY_ALLOCATION_DESCRIPTOR' : [ 0x28, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'MemoryType' : [ 0x10, ['Enumeration', dict(target = 'long', choices = {0: 'LoaderExceptionBlock', 1: 'LoaderSystemBlock', 2: 'LoaderFree', 3: 'LoaderBad', 4: 'LoaderLoadedProgram', 5: 'LoaderFirmwareTemporary', 6: 'LoaderFirmwarePermanent', 7: 'LoaderOsloaderHeap', 8: 'LoaderOsloaderStack', 9: 'LoaderSystemCode', 10: 'LoaderHalCode', 11: 'LoaderBootDriver', 12: 'LoaderConsoleInDriver', 13: 'LoaderConsoleOutDriver', 14: 'LoaderStartupDpcStack', 15: 'LoaderStartupKernelStack', 16: 'LoaderStartupPanicStack', 17: 'LoaderStartupPcrPage', 18: 'LoaderStartupPdrPage', 19: 'LoaderRegistryData', 20: 'LoaderMemoryData', 21: 'LoaderNlsData', 22: 'LoaderSpecialMemory', 23: 'LoaderBBTMemory', 24: 'LoaderReserve', 25: 'LoaderXIPRom', 26: 'LoaderHALCachedMemory', 27: 'LoaderLargePageFiller', 28: 'LoaderErrorLogMemory', 29: 'LoaderMaximum'})]],
'BasePage' : [ 0x18, ['unsigned long long']],
'PageCount' : [ 0x20, ['unsigned long long']],
} ],
'_CM_INTENT_LOCK' : [ 0x10, {
'OwnerCount' : [ 0x0, ['unsigned long']],
'OwnerTable' : [ 0x8, ['pointer64', ['pointer64', ['_CM_KCB_UOW']]]],
} ],
'_THERMAL_INFORMATION' : [ 0x58, {
'ThermalStamp' : [ 0x0, ['unsigned long']],
'ThermalConstant1' : [ 0x4, ['unsigned long']],
'ThermalConstant2' : [ 0x8, ['unsigned long']],
'Processors' : [ 0x10, ['unsigned long long']],
'SamplingPeriod' : [ 0x18, ['unsigned long']],
'CurrentTemperature' : [ 0x1c, ['unsigned long']],
'PassiveTripPoint' : [ 0x20, ['unsigned long']],
'CriticalTripPoint' : [ 0x24, ['unsigned long']],
'ActiveTripPointCount' : [ 0x28, ['unsigned char']],
'ActiveTripPoint' : [ 0x2c, ['array', 10, ['unsigned long']]],
} ],
'_MAPPED_FILE_SEGMENT' : [ 0x38, {
'ControlArea' : [ 0x0, ['pointer64', ['_CONTROL_AREA']]],
'TotalNumberOfPtes' : [ 0x8, ['unsigned long']],
'SegmentFlags' : [ 0xc, ['_SEGMENT_FLAGS']],
'NumberOfCommittedPages' : [ 0x10, ['unsigned long long']],
'SizeOfSegment' : [ 0x18, ['unsigned long long']],
'ExtendInfo' : [ 0x20, ['pointer64', ['_MMEXTEND_INFO']]],
'BasedAddress' : [ 0x20, ['pointer64', ['void']]],
'SegmentLock' : [ 0x28, ['_EX_PUSH_LOCK']],
'LastSubsectionHint' : [ 0x30, ['pointer64', ['_MSUBSECTION']]],
} ],
'_TEB64' : [ 0x1828, {
'NtTib' : [ 0x0, ['_NT_TIB64']],
'EnvironmentPointer' : [ 0x38, ['unsigned long long']],
'ClientId' : [ 0x40, ['_CLIENT_ID64']],
'ActiveRpcHandle' : [ 0x50, ['unsigned long long']],
'ThreadLocalStoragePointer' : [ 0x58, ['unsigned long long']],
'ProcessEnvironmentBlock' : [ 0x60, ['unsigned long long']],
'LastErrorValue' : [ 0x68, ['unsigned long']],
'CountOfOwnedCriticalSections' : [ 0x6c, ['unsigned long']],
'CsrClientThread' : [ 0x70, ['unsigned long long']],
'Win32ThreadInfo' : [ 0x78, ['unsigned long long']],
'User32Reserved' : [ 0x80, ['array', 26, ['unsigned long']]],
'UserReserved' : [ 0xe8, ['array', 5, ['unsigned long']]],
'WOW32Reserved' : [ 0x100, ['unsigned long long']],
'CurrentLocale' : [ 0x108, ['unsigned long']],
'FpSoftwareStatusRegister' : [ 0x10c, ['unsigned long']],
'SystemReserved1' : [ 0x110, ['array', 54, ['unsigned long long']]],
'ExceptionCode' : [ 0x2c0, ['long']],
'ActivationContextStackPointer' : [ 0x2c8, ['unsigned long long']],
'SpareBytes1' : [ 0x2d0, ['array', 24, ['unsigned char']]],
'TxFsContext' : [ 0x2e8, ['unsigned long']],
'GdiTebBatch' : [ 0x2f0, ['_GDI_TEB_BATCH64']],
'RealClientId' : [ 0x7d8, ['_CLIENT_ID64']],
'GdiCachedProcessHandle' : [ 0x7e8, ['unsigned long long']],
'GdiClientPID' : [ 0x7f0, ['unsigned long']],
'GdiClientTID' : [ 0x7f4, ['unsigned long']],
'GdiThreadLocalInfo' : [ 0x7f8, ['unsigned long long']],
'Win32ClientInfo' : [ 0x800, ['array', 62, ['unsigned long long']]],
'glDispatchTable' : [ 0x9f0, ['array', 233, ['unsigned long long']]],
'glReserved1' : [ 0x1138, ['array', 29, ['unsigned long long']]],
'glReserved2' : [ 0x1220, ['unsigned long long']],
'glSectionInfo' : [ 0x1228, ['unsigned long long']],
'glSection' : [ 0x1230, ['unsigned long long']],
'glTable' : [ 0x1238, ['unsigned long long']],
'glCurrentRC' : [ 0x1240, ['unsigned long long']],
'glContext' : [ 0x1248, ['unsigned long long']],
'LastStatusValue' : [ 0x1250, ['unsigned long']],
'StaticUnicodeString' : [ 0x1258, ['_STRING64']],
'StaticUnicodeBuffer' : [ 0x1268, ['array', 261, ['wchar']]],
'DeallocationStack' : [ 0x1478, ['unsigned long long']],
'TlsSlots' : [ 0x1480, ['array', 64, ['unsigned long long']]],
'TlsLinks' : [ 0x1680, ['LIST_ENTRY64']],
'Vdm' : [ 0x1690, ['unsigned long long']],
'ReservedForNtRpc' : [ 0x1698, ['unsigned long long']],
'DbgSsReserved' : [ 0x16a0, ['array', 2, ['unsigned long long']]],
'HardErrorMode' : [ 0x16b0, ['unsigned long']],
'Instrumentation' : [ 0x16b8, ['array', 11, ['unsigned long long']]],
'ActivityId' : [ 0x1710, ['_GUID']],
'SubProcessTag' : [ 0x1720, ['unsigned long long']],
'EtwLocalData' : [ 0x1728, ['unsigned long long']],
'EtwTraceData' : [ 0x1730, ['unsigned long long']],
'WinSockData' : [ 0x1738, ['unsigned long long']],
'GdiBatchCount' : [ 0x1740, ['unsigned long']],
'SpareBool0' : [ 0x1744, ['unsigned char']],
'SpareBool1' : [ 0x1745, ['unsigned char']],
'SpareBool2' : [ 0x1746, ['unsigned char']],
'IdealProcessor' : [ 0x1747, ['unsigned char']],
'GuaranteedStackBytes' : [ 0x1748, ['unsigned long']],
'ReservedForPerf' : [ 0x1750, ['unsigned long long']],
'ReservedForOle' : [ 0x1758, ['unsigned long long']],
'WaitingOnLoaderLock' : [ 0x1760, ['unsigned long']],
'SavedPriorityState' : [ 0x1768, ['unsigned long long']],
'SoftPatchPtr1' : [ 0x1770, ['unsigned long long']],
'ThreadPoolData' : [ 0x1778, ['unsigned long long']],
'TlsExpansionSlots' : [ 0x1780, ['unsigned long long']],
'DeallocationBStore' : [ 0x1788, ['unsigned long long']],
'BStoreLimit' : [ 0x1790, ['unsigned long long']],
'ImpersonationLocale' : [ 0x1798, ['unsigned long']],
'IsImpersonating' : [ 0x179c, ['unsigned long']],
'NlsCache' : [ 0x17a0, ['unsigned long long']],
'pShimData' : [ 0x17a8, ['unsigned long long']],
'HeapVirtualAffinity' : [ 0x17b0, ['unsigned long']],
'CurrentTransactionHandle' : [ 0x17b8, ['unsigned long long']],
'ActiveFrame' : [ 0x17c0, ['unsigned long long']],
'FlsData' : [ 0x17c8, ['unsigned long long']],
'PreferredLanguages' : [ 0x17d0, ['unsigned long long']],
'UserPrefLanguages' : [ 0x17d8, ['unsigned long long']],
'MergedPrefLanguages' : [ 0x17e0, ['unsigned long long']],
'MuiImpersonation' : [ 0x17e8, ['unsigned long']],
'CrossTebFlags' : [ 0x17ec, ['unsigned short']],
'SpareCrossTebBits' : [ 0x17ec, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned short')]],
'SameTebFlags' : [ 0x17ee, ['unsigned short']],
'DbgSafeThunkCall' : [ 0x17ee, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned short')]],
'DbgInDebugPrint' : [ 0x17ee, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned short')]],
'DbgHasFiberData' : [ 0x17ee, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned short')]],
'DbgSkipThreadAttach' : [ 0x17ee, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned short')]],
'DbgWerInShipAssertCode' : [ 0x17ee, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned short')]],
'DbgRanProcessInit' : [ 0x17ee, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned short')]],
'DbgClonedThread' : [ 0x17ee, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned short')]],
'DbgSuppressDebugMsg' : [ 0x17ee, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned short')]],
'RtlDisableUserStackWalk' : [ 0x17ee, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned short')]],
'RtlExceptionAttached' : [ 0x17ee, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned short')]],
'SpareSameTebBits' : [ 0x17ee, ['BitField', dict(start_bit = 10, end_bit = 16, native_type='unsigned short')]],
'TxnScopeEnterCallback' : [ 0x17f0, ['unsigned long long']],
'TxnScopeExitCallback' : [ 0x17f8, ['unsigned long long']],
'TxnScopeContext' : [ 0x1800, ['unsigned long long']],
'LockCount' : [ 0x1808, ['unsigned long']],
'ProcessRundown' : [ 0x180c, ['unsigned long']],
'LastSwitchTime' : [ 0x1810, ['unsigned long long']],
'TotalSwitchOutTime' : [ 0x1818, ['unsigned long long']],
'WaitReasonBitMap' : [ 0x1820, ['_LARGE_INTEGER']],
} ],
'_GDI_TEB_BATCH' : [ 0x4e8, {
'Offset' : [ 0x0, ['unsigned long']],
'HDC' : [ 0x8, ['unsigned long long']],
'Buffer' : [ 0x10, ['array', 310, ['unsigned long']]],
} ],
'_VI_FAULT_TRACE' : [ 0x48, {
'Thread' : [ 0x0, ['pointer64', ['_ETHREAD']]],
'StackTrace' : [ 0x8, ['array', 8, ['pointer64', ['void']]]],
} ],
'_WHEA_PCIXBUS_ERROR' : [ 0x48, {
'ValidBits' : [ 0x0, ['_WHEA_PCIXBUS_ERROR_VALIDBITS']],
'ErrorStatus' : [ 0x8, ['_WHEA_ERROR_STATUS']],
'ErrorType' : [ 0x10, ['unsigned short']],
'BusId' : [ 0x12, ['_WHEA_PCIXBUS_ID']],
'Reserved' : [ 0x14, ['unsigned long']],
'BusAddress' : [ 0x18, ['unsigned long long']],
'BusData' : [ 0x20, ['unsigned long long']],
'BusCommand' : [ 0x28, ['_WHEA_PCIXBUS_COMMAND']],
'RequesterId' : [ 0x30, ['unsigned long long']],
'CompleterId' : [ 0x38, ['unsigned long long']],
'TargetId' : [ 0x40, ['unsigned long long']],
} ],
'_DBGKD_CONTINUE2' : [ 0x20, {
'ContinueStatus' : [ 0x0, ['long']],
'ControlSet' : [ 0x4, ['_AMD64_DBGKD_CONTROL_SET']],
'AnyControlSet' : [ 0x4, ['_DBGKD_ANY_CONTROL_SET']],
} ],
'_SECURITY_TOKEN_PROXY_DATA' : [ 0x20, {
'Length' : [ 0x0, ['unsigned long']],
'ProxyClass' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'ProxyFull', 1: 'ProxyService', 2: 'ProxyTree', 3: 'ProxyDirectory'})]],
'PathInfo' : [ 0x8, ['_UNICODE_STRING']],
'ContainerMask' : [ 0x18, ['unsigned long']],
'ObjectMask' : [ 0x1c, ['unsigned long']],
} ],
'_OBJECT_HANDLE_COUNT_DATABASE' : [ 0x18, {
'CountEntries' : [ 0x0, ['unsigned long']],
'HandleCountEntries' : [ 0x8, ['array', 1, ['_OBJECT_HANDLE_COUNT_ENTRY']]],
} ],
'_WHEA_ERROR_RECORD_SECTION_DESCRIPTOR' : [ 0x48, {
'SectionOffset' : [ 0x0, ['unsigned long']],
'SectionLength' : [ 0x4, ['unsigned long']],
'Revision' : [ 0x8, ['_WHEA_REVISION']],
'ValidBits' : [ 0xa, ['_WHEA_ERROR_RECORD_SECTION_DESCRIPTOR_VALIDBITS']],
'Reserved' : [ 0xb, ['unsigned char']],
'Flags' : [ 0xc, ['_WHEA_ERROR_RECORD_SECTION_DESCRIPTOR_FLAGS']],
'SectionType' : [ 0x10, ['_GUID']],
'FRUId' : [ 0x20, ['_GUID']],
'SectionSeverity' : [ 0x30, ['Enumeration', dict(target = 'long', choices = {0: 'WheaErrSevRecoverable', 1: 'WheaErrSevFatal', 2: 'WheaErrSevCorrected', 3: 'WheaErrSevNone'})]],
'FRUText' : [ 0x34, ['array', 20, ['unsigned char']]],
} ],
'_OWNER_ENTRY' : [ 0x10, {
'OwnerThread' : [ 0x0, ['unsigned long long']],
'OwnerCount' : [ 0x8, ['long']],
'TableSize' : [ 0x8, ['unsigned long']],
} ],
'_MI_SECTION_CREATION_GATE' : [ 0x20, {
'Next' : [ 0x0, ['pointer64', ['_MI_SECTION_CREATION_GATE']]],
'Gate' : [ 0x8, ['_KGATE']],
} ],
'_ETIMER' : [ 0x108, {
'KeTimer' : [ 0x0, ['_KTIMER']],
'TimerApc' : [ 0x40, ['_KAPC']],
'TimerDpc' : [ 0x98, ['_KDPC']],
'ActiveTimerListEntry' : [ 0xd8, ['_LIST_ENTRY']],
'Lock' : [ 0xe8, ['unsigned long long']],
'Period' : [ 0xf0, ['long']],
'ApcAssociated' : [ 0xf4, ['unsigned char']],
'WakeTimer' : [ 0xf5, ['unsigned char']],
'WakeTimerListEntry' : [ 0xf8, ['_LIST_ENTRY']],
} ],
'_FREE_DISPLAY' : [ 0x18, {
'RealVectorSize' : [ 0x0, ['unsigned long']],
'Display' : [ 0x8, ['_RTL_BITMAP']],
} ],
'_POOL_BLOCK_HEAD' : [ 0x20, {
'Header' : [ 0x0, ['_POOL_HEADER']],
'List' : [ 0x10, ['_LIST_ENTRY']],
} ],
'_WHEA_PCIXBUS_ERROR_VALIDBITS' : [ 0x8, {
'ErrorStatus' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'ErrorType' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long long')]],
'BusId' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long long')]],
'BusAddress' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long long')]],
'BusData' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long long')]],
'BusCommand' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long long')]],
'RequesterId' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long long')]],
'CompleterId' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long long')]],
'TargetId' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 64, native_type='unsigned long long')]],
'ValidBits' : [ 0x0, ['unsigned long long']],
} ],
'__unnamed_1b88' : [ 0x8, {
'Flags' : [ 0x0, ['_MMSECURE_FLAGS']],
'StartVa' : [ 0x0, ['pointer64', ['void']]],
} ],
'_MMADDRESS_LIST' : [ 0x10, {
'u1' : [ 0x0, ['__unnamed_1b88']],
'EndVa' : [ 0x8, ['pointer64', ['void']]],
} ],
'_ARBITER_INSTANCE' : [ 0x698, {
'Signature' : [ 0x0, ['unsigned long']],
'MutexEvent' : [ 0x8, ['pointer64', ['_KEVENT']]],
'Name' : [ 0x10, ['pointer64', ['unsigned short']]],
'OrderingName' : [ 0x18, ['pointer64', ['unsigned short']]],
'ResourceType' : [ 0x20, ['long']],
'Allocation' : [ 0x28, ['pointer64', ['_RTL_RANGE_LIST']]],
'PossibleAllocation' : [ 0x30, ['pointer64', ['_RTL_RANGE_LIST']]],
'OrderingList' : [ 0x38, ['_ARBITER_ORDERING_LIST']],
'ReservedList' : [ 0x48, ['_ARBITER_ORDERING_LIST']],
'ReferenceCount' : [ 0x58, ['long']],
'Interface' : [ 0x60, ['pointer64', ['_ARBITER_INTERFACE']]],
'AllocationStackMaxSize' : [ 0x68, ['unsigned long']],
'AllocationStack' : [ 0x70, ['pointer64', ['_ARBITER_ALLOCATION_STATE']]],
'UnpackRequirement' : [ 0x78, ['pointer64', ['void']]],
'PackResource' : [ 0x80, ['pointer64', ['void']]],
'UnpackResource' : [ 0x88, ['pointer64', ['void']]],
'ScoreRequirement' : [ 0x90, ['pointer64', ['void']]],
'TestAllocation' : [ 0x98, ['pointer64', ['void']]],
'RetestAllocation' : [ 0xa0, ['pointer64', ['void']]],
'CommitAllocation' : [ 0xa8, ['pointer64', ['void']]],
'RollbackAllocation' : [ 0xb0, ['pointer64', ['void']]],
'BootAllocation' : [ 0xb8, ['pointer64', ['void']]],
'QueryArbitrate' : [ 0xc0, ['pointer64', ['void']]],
'QueryConflict' : [ 0xc8, ['pointer64', ['void']]],
'AddReserved' : [ 0xd0, ['pointer64', ['void']]],
'StartArbiter' : [ 0xd8, ['pointer64', ['void']]],
'PreprocessEntry' : [ 0xe0, ['pointer64', ['void']]],
'AllocateEntry' : [ 0xe8, ['pointer64', ['void']]],
'GetNextAllocationRange' : [ 0xf0, ['pointer64', ['void']]],
'FindSuitableRange' : [ 0xf8, ['pointer64', ['void']]],
'AddAllocation' : [ 0x100, ['pointer64', ['void']]],
'BacktrackAllocation' : [ 0x108, ['pointer64', ['void']]],
'OverrideConflict' : [ 0x110, ['pointer64', ['void']]],
'InitializeRangeList' : [ 0x118, ['pointer64', ['void']]],
'TransactionInProgress' : [ 0x120, ['unsigned char']],
'TransactionEvent' : [ 0x128, ['pointer64', ['_KEVENT']]],
'Extension' : [ 0x130, ['pointer64', ['void']]],
'BusDeviceObject' : [ 0x138, ['pointer64', ['_DEVICE_OBJECT']]],
'ConflictCallbackContext' : [ 0x140, ['pointer64', ['void']]],
'ConflictCallback' : [ 0x148, ['pointer64', ['void']]],
'PdoDescriptionString' : [ 0x150, ['array', 336, ['wchar']]],
'PdoSymbolicNameString' : [ 0x3f0, ['array', 672, ['unsigned char']]],
'PdoAddressString' : [ 0x690, ['array', 1, ['wchar']]],
} ],
'_KDEVICE_QUEUE_ENTRY' : [ 0x18, {
'DeviceListEntry' : [ 0x0, ['_LIST_ENTRY']],
'SortKey' : [ 0x10, ['unsigned long']],
'Inserted' : [ 0x14, ['unsigned char']],
} ],
'_HMAP_TABLE' : [ 0x4000, {
'Table' : [ 0x0, ['array', 512, ['_HMAP_ENTRY']]],
} ],
'_WHEA_MEMORY_ERROR' : [ 0x49, {
'ValidBits' : [ 0x0, ['_WHEA_MEMORY_ERROR_VALIDBITS']],
'ErrorStatus' : [ 0x8, ['_WHEA_ERROR_STATUS']],
'PhysicalAddress' : [ 0x10, ['unsigned long long']],
'PhysicalAddressMask' : [ 0x18, ['unsigned long long']],
'Node' : [ 0x20, ['unsigned short']],
'Card' : [ 0x22, ['unsigned short']],
'Module' : [ 0x24, ['unsigned short']],
'Bank' : [ 0x26, ['unsigned short']],
'Device' : [ 0x28, ['unsigned short']],
'Row' : [ 0x2a, ['unsigned short']],
'Column' : [ 0x2c, ['unsigned short']],
'BitPosition' : [ 0x2e, ['unsigned short']],
'RequesterId' : [ 0x30, ['unsigned long long']],
'ResponderId' : [ 0x38, ['unsigned long long']],
'TargetId' : [ 0x40, ['unsigned long long']],
'ErrorType' : [ 0x48, ['unsigned char']],
} ],
'_IMAGE_DATA_DIRECTORY' : [ 0x8, {
'VirtualAddress' : [ 0x0, ['unsigned long']],
'Size' : [ 0x4, ['unsigned long']],
} ],
'_DEVICE_CAPABILITIES' : [ 0x40, {
'Size' : [ 0x0, ['unsigned short']],
'Version' : [ 0x2, ['unsigned short']],
'DeviceD1' : [ 0x4, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'DeviceD2' : [ 0x4, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'LockSupported' : [ 0x4, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'EjectSupported' : [ 0x4, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'Removable' : [ 0x4, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'DockDevice' : [ 0x4, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'UniqueID' : [ 0x4, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'SilentInstall' : [ 0x4, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'RawDeviceOK' : [ 0x4, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'SurpriseRemovalOK' : [ 0x4, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'WakeFromD0' : [ 0x4, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'WakeFromD1' : [ 0x4, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'WakeFromD2' : [ 0x4, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long')]],
'WakeFromD3' : [ 0x4, ['BitField', dict(start_bit = 13, end_bit = 14, native_type='unsigned long')]],
'HardwareDisabled' : [ 0x4, ['BitField', dict(start_bit = 14, end_bit = 15, native_type='unsigned long')]],
'NonDynamic' : [ 0x4, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
'WarmEjectSupported' : [ 0x4, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
'NoDisplayInUI' : [ 0x4, ['BitField', dict(start_bit = 17, end_bit = 18, native_type='unsigned long')]],
'Reserved' : [ 0x4, ['BitField', dict(start_bit = 18, end_bit = 32, native_type='unsigned long')]],
'Address' : [ 0x8, ['unsigned long']],
'UINumber' : [ 0xc, ['unsigned long']],
'DeviceState' : [ 0x10, ['array', -28, ['Enumeration', dict(target = 'long', choices = {0: 'PowerDeviceUnspecified', 1: 'PowerDeviceD0', 2: 'PowerDeviceD1', 3: 'PowerDeviceD2', 4: 'PowerDeviceD3', 5: 'PowerDeviceMaximum'})]]],
'SystemWake' : [ 0x2c, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'DeviceWake' : [ 0x30, ['Enumeration', dict(target = 'long', choices = {0: 'PowerDeviceUnspecified', 1: 'PowerDeviceD0', 2: 'PowerDeviceD1', 3: 'PowerDeviceD2', 4: 'PowerDeviceD3', 5: 'PowerDeviceMaximum'})]],
'D1Latency' : [ 0x34, ['unsigned long']],
'D2Latency' : [ 0x38, ['unsigned long']],
'D3Latency' : [ 0x3c, ['unsigned long']],
} ],
'_KGUARDED_MUTEX' : [ 0x38, {
'Count' : [ 0x0, ['long']],
'Owner' : [ 0x8, ['pointer64', ['_KTHREAD']]],
'Contention' : [ 0x10, ['unsigned long']],
'Gate' : [ 0x18, ['_KGATE']],
'KernelApcDisable' : [ 0x30, ['short']],
'SpecialApcDisable' : [ 0x32, ['short']],
'CombinedApcDisable' : [ 0x30, ['unsigned long']],
} ],
'_ALPHA_LOADER_BLOCK' : [ 0x4, {
'PlaceHolder' : [ 0x0, ['unsigned long']],
} ],
'__unnamed_1bf2' : [ 0x18, {
'Length' : [ 0x0, ['unsigned long']],
'Alignment' : [ 0x4, ['unsigned long']],
'MinimumAddress' : [ 0x8, ['_LARGE_INTEGER']],
'MaximumAddress' : [ 0x10, ['_LARGE_INTEGER']],
} ],
'__unnamed_1bf8' : [ 0x18, {
'MinimumVector' : [ 0x0, ['unsigned long']],
'MaximumVector' : [ 0x4, ['unsigned long']],
'AffinityPolicy' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'IrqPolicyMachineDefault', 1: 'IrqPolicyAllCloseProcessors', 2: 'IrqPolicyOneCloseProcessor', 3: 'IrqPolicyAllProcessorsInMachine', 4: 'IrqPolicySpecifiedProcessors', 5: 'IrqPolicySpreadMessagesAcrossAllProcessors'})]],
'PriorityPolicy' : [ 0xc, ['Enumeration', dict(target = 'long', choices = {0: 'IrqPriorityUndefined', 1: 'IrqPriorityLow', 2: 'IrqPriorityNormal', 3: 'IrqPriorityHigh'})]],
'TargetedProcessors' : [ 0x10, ['unsigned long long']],
} ],
'__unnamed_1bfa' : [ 0x8, {
'MinimumChannel' : [ 0x0, ['unsigned long']],
'MaximumChannel' : [ 0x4, ['unsigned long']],
} ],
'__unnamed_1bfc' : [ 0xc, {
'Data' : [ 0x0, ['array', 3, ['unsigned long']]],
} ],
'__unnamed_1bfe' : [ 0x10, {
'Length' : [ 0x0, ['unsigned long']],
'MinBusNumber' : [ 0x4, ['unsigned long']],
'MaxBusNumber' : [ 0x8, ['unsigned long']],
'Reserved' : [ 0xc, ['unsigned long']],
} ],
'__unnamed_1c00' : [ 0xc, {
'Priority' : [ 0x0, ['unsigned long']],
'Reserved1' : [ 0x4, ['unsigned long']],
'Reserved2' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_1c02' : [ 0x18, {
'Length40' : [ 0x0, ['unsigned long']],
'Alignment40' : [ 0x4, ['unsigned long']],
'MinimumAddress' : [ 0x8, ['_LARGE_INTEGER']],
'MaximumAddress' : [ 0x10, ['_LARGE_INTEGER']],
} ],
'__unnamed_1c04' : [ 0x18, {
'Length48' : [ 0x0, ['unsigned long']],
'Alignment48' : [ 0x4, ['unsigned long']],
'MinimumAddress' : [ 0x8, ['_LARGE_INTEGER']],
'MaximumAddress' : [ 0x10, ['_LARGE_INTEGER']],
} ],
'__unnamed_1c06' : [ 0x18, {
'Length64' : [ 0x0, ['unsigned long']],
'Alignment64' : [ 0x4, ['unsigned long']],
'MinimumAddress' : [ 0x8, ['_LARGE_INTEGER']],
'MaximumAddress' : [ 0x10, ['_LARGE_INTEGER']],
} ],
'__unnamed_1c08' : [ 0x18, {
'Port' : [ 0x0, ['__unnamed_1bf2']],
'Memory' : [ 0x0, ['__unnamed_1bf2']],
'Interrupt' : [ 0x0, ['__unnamed_1bf8']],
'Dma' : [ 0x0, ['__unnamed_1bfa']],
'Generic' : [ 0x0, ['__unnamed_1bf2']],
'DevicePrivate' : [ 0x0, ['__unnamed_1bfc']],
'BusNumber' : [ 0x0, ['__unnamed_1bfe']],
'ConfigData' : [ 0x0, ['__unnamed_1c00']],
'Memory40' : [ 0x0, ['__unnamed_1c02']],
'Memory48' : [ 0x0, ['__unnamed_1c04']],
'Memory64' : [ 0x0, ['__unnamed_1c06']],
} ],
'_IO_RESOURCE_DESCRIPTOR' : [ 0x20, {
'Option' : [ 0x0, ['unsigned char']],
'Type' : [ 0x1, ['unsigned char']],
'ShareDisposition' : [ 0x2, ['unsigned char']],
'Spare1' : [ 0x3, ['unsigned char']],
'Flags' : [ 0x4, ['unsigned short']],
'Spare2' : [ 0x6, ['unsigned short']],
'u' : [ 0x8, ['__unnamed_1c08']],
} ],
'_POP_THERMAL_ZONE' : [ 0x128, {
'Link' : [ 0x0, ['_LIST_ENTRY']],
'State' : [ 0x10, ['unsigned char']],
'Flags' : [ 0x11, ['unsigned char']],
'Mode' : [ 0x12, ['unsigned char']],
'PendingMode' : [ 0x13, ['unsigned char']],
'ActivePoint' : [ 0x14, ['unsigned char']],
'PendingActivePoint' : [ 0x15, ['unsigned char']],
'Throttle' : [ 0x18, ['long']],
'LastTime' : [ 0x20, ['unsigned long long']],
'SampleRate' : [ 0x28, ['unsigned long']],
'LastTemp' : [ 0x2c, ['unsigned long']],
'PassiveTimer' : [ 0x30, ['_KTIMER']],
'PassiveDpc' : [ 0x70, ['_KDPC']],
'OverThrottled' : [ 0xb0, ['_POP_ACTION_TRIGGER']],
'Irp' : [ 0xc8, ['pointer64', ['_IRP']]],
'Info' : [ 0xd0, ['_THERMAL_INFORMATION_EX']],
} ],
'_MMPTE_LIST' : [ 0x8, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'OneEntry' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long long')]],
'filler0' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 5, native_type='unsigned long long')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 10, native_type='unsigned long long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long long')]],
'Transition' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long long')]],
'filler1' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 32, native_type='unsigned long long')]],
'NextEntry' : [ 0x0, ['BitField', dict(start_bit = 32, end_bit = 64, native_type='unsigned long long')]],
} ],
'_VI_POOL_PAGE_HEADER' : [ 0x18, {
'NextPage' : [ 0x0, ['pointer64', ['_SLIST_ENTRY']]],
'VerifierEntry' : [ 0x8, ['pointer64', ['void']]],
'Signature' : [ 0x10, ['unsigned long long']],
} ],
'_HANDLE_TRACE_DEBUG_INFO' : [ 0xf0, {
'RefCount' : [ 0x0, ['long']],
'TableSize' : [ 0x4, ['unsigned long']],
'BitMaskFlags' : [ 0x8, ['unsigned long']],
'CloseCompactionLock' : [ 0x10, ['_FAST_MUTEX']],
'CurrentStackIndex' : [ 0x48, ['unsigned long']],
'TraceDb' : [ 0x50, ['array', 1, ['_HANDLE_TRACE_DB_ENTRY']]],
} ],
'_MDL' : [ 0x30, {
'Next' : [ 0x0, ['pointer64', ['_MDL']]],
'Size' : [ 0x8, ['short']],
'MdlFlags' : [ 0xa, ['short']],
'Process' : [ 0x10, ['pointer64', ['_EPROCESS']]],
'MappedSystemVa' : [ 0x18, ['pointer64', ['void']]],
'StartVa' : [ 0x20, ['pointer64', ['void']]],
'ByteCount' : [ 0x28, ['unsigned long']],
'ByteOffset' : [ 0x2c, ['unsigned long']],
} ],
'_WHEA_PCIXBUS_COMMAND' : [ 0x8, {
'Command' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 56, native_type='unsigned long long')]],
'PCIXCommand' : [ 0x0, ['BitField', dict(start_bit = 56, end_bit = 57, native_type='unsigned long long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 57, end_bit = 64, native_type='unsigned long long')]],
'AsULONGLONG' : [ 0x0, ['unsigned long long']],
} ],
'_CM_TRANS' : [ 0xb0, {
'TransactionListEntry' : [ 0x0, ['_LIST_ENTRY']],
'KCBUoWListHead' : [ 0x10, ['_LIST_ENTRY']],
'LazyCommitListEntry' : [ 0x20, ['_LIST_ENTRY']],
'KtmTrans' : [ 0x30, ['pointer64', ['void']]],
'CmRm' : [ 0x38, ['pointer64', ['_CM_RM']]],
'KtmEnlistmentObject' : [ 0x40, ['pointer64', ['_KENLISTMENT']]],
'KtmEnlistmentHandle' : [ 0x48, ['pointer64', ['void']]],
'KtmUow' : [ 0x50, ['_GUID']],
'StartLsn' : [ 0x60, ['unsigned long long']],
'TransState' : [ 0x68, ['unsigned long']],
'HiveCount' : [ 0x6c, ['unsigned long']],
'HiveArray' : [ 0x70, ['array', 8, ['pointer64', ['_CMHIVE']]]],
} ],
'_WHEA_ERROR_RECORD_HEADER_VALIDBITS' : [ 0x4, {
'Timestamp' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'PlatformId' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'PartitionId' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 32, native_type='unsigned long')]],
'AsULONG' : [ 0x0, ['unsigned long']],
} ],
'_CM_PARTIAL_RESOURCE_LIST' : [ 0x1c, {
'Version' : [ 0x0, ['unsigned short']],
'Revision' : [ 0x2, ['unsigned short']],
'Count' : [ 0x4, ['unsigned long']],
'PartialDescriptors' : [ 0x8, ['array', 1, ['_CM_PARTIAL_RESOURCE_DESCRIPTOR']]],
} ],
'_RTL_RANGE_LIST' : [ 0x20, {
'ListHead' : [ 0x0, ['_LIST_ENTRY']],
'Flags' : [ 0x10, ['unsigned long']],
'Count' : [ 0x14, ['unsigned long']],
'Stamp' : [ 0x18, ['unsigned long']],
} ],
'_OBJECT_CREATE_INFORMATION' : [ 0x48, {
'Attributes' : [ 0x0, ['unsigned long']],
'RootDirectory' : [ 0x8, ['pointer64', ['void']]],
'ParseContext' : [ 0x10, ['pointer64', ['void']]],
'ProbeMode' : [ 0x18, ['unsigned char']],
'PagedPoolCharge' : [ 0x1c, ['unsigned long']],
'NonPagedPoolCharge' : [ 0x20, ['unsigned long']],
'SecurityDescriptorCharge' : [ 0x24, ['unsigned long']],
'SecurityDescriptor' : [ 0x28, ['pointer64', ['void']]],
'SecurityQos' : [ 0x30, ['pointer64', ['_SECURITY_QUALITY_OF_SERVICE']]],
'SecurityQualityOfService' : [ 0x38, ['_SECURITY_QUALITY_OF_SERVICE']],
} ],
'_POOL_HACKER' : [ 0x30, {
'Header' : [ 0x0, ['_POOL_HEADER']],
'Contents' : [ 0x10, ['array', 8, ['unsigned long']]],
} ],
'_SECTION_OBJECT_POINTERS' : [ 0x18, {
'DataSectionObject' : [ 0x0, ['pointer64', ['void']]],
'SharedCacheMap' : [ 0x8, ['pointer64', ['void']]],
'ImageSectionObject' : [ 0x10, ['pointer64', ['void']]],
} ],
'_SEP_AUDIT_POLICY' : [ 0x1c, {
'AdtTokenPolicy' : [ 0x0, ['_TOKEN_AUDIT_POLICY']],
'PolicySetStatus' : [ 0x1b, ['unsigned char']],
} ],
'_MMPTE_TIMESTAMP' : [ 0x8, {
'MustBeZero' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'PageFileLow' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 5, native_type='unsigned long long')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 10, native_type='unsigned long long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long long')]],
'Transition' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 32, native_type='unsigned long long')]],
'GlobalTimeStamp' : [ 0x0, ['BitField', dict(start_bit = 32, end_bit = 64, native_type='unsigned long long')]],
} ],
'_SID_AND_ATTRIBUTES_HASH' : [ 0x110, {
'SidCount' : [ 0x0, ['unsigned long']],
'SidAttr' : [ 0x8, ['pointer64', ['_SID_AND_ATTRIBUTES']]],
'Hash' : [ 0x10, ['array', 32, ['unsigned long long']]],
} ],
'_MBCB' : [ 0xb8, {
'NodeTypeCode' : [ 0x0, ['short']],
'NodeIsInZone' : [ 0x2, ['short']],
'PagesToWrite' : [ 0x4, ['unsigned long']],
'DirtyPages' : [ 0x8, ['unsigned long']],
'Reserved' : [ 0xc, ['unsigned long']],
'BitmapRanges' : [ 0x10, ['_LIST_ENTRY']],
'ResumeWritePage' : [ 0x20, ['long long']],
'BitmapRange1' : [ 0x28, ['_BITMAP_RANGE']],
'BitmapRange2' : [ 0x58, ['_BITMAP_RANGE']],
'BitmapRange3' : [ 0x88, ['_BITMAP_RANGE']],
} ],
'__unnamed_1c4b' : [ 0x1, {
'AsUCHAR' : [ 0x0, ['unsigned char']],
'NoDomainAccounting' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'IncreasePolicy' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 3, native_type='unsigned char')]],
'DecreasePolicy' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 5, native_type='unsigned char')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 8, native_type='unsigned char')]],
} ],
'PROCESSOR_PERFSTATE_POLICY' : [ 0x1c, {
'Revision' : [ 0x0, ['unsigned long']],
'MaxThrottle' : [ 0x4, ['unsigned char']],
'MinThrottle' : [ 0x5, ['unsigned char']],
'BusyAdjThreshold' : [ 0x6, ['unsigned char']],
'Spare' : [ 0x7, ['unsigned char']],
'Flags' : [ 0x7, ['__unnamed_1c4b']],
'TimeCheck' : [ 0x8, ['unsigned long']],
'IncreaseTime' : [ 0xc, ['unsigned long']],
'DecreaseTime' : [ 0x10, ['unsigned long']],
'IncreasePercent' : [ 0x14, ['unsigned long']],
'DecreasePercent' : [ 0x18, ['unsigned long']],
} ],
'_BUS_EXTENSION_LIST' : [ 0x10, {
'Next' : [ 0x0, ['pointer64', ['void']]],
'BusExtension' : [ 0x8, ['pointer64', ['_PI_BUS_EXTENSION']]],
} ],
'_CACHED_CHILD_LIST' : [ 0x10, {
'Count' : [ 0x0, ['unsigned long']],
'ValueList' : [ 0x8, ['unsigned long long']],
'RealKcb' : [ 0x8, ['pointer64', ['_CM_KEY_CONTROL_BLOCK']]],
} ],
'_KDEVICE_QUEUE' : [ 0x28, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['short']],
'DeviceListHead' : [ 0x8, ['_LIST_ENTRY']],
'Lock' : [ 0x18, ['unsigned long long']],
'Busy' : [ 0x20, ['unsigned char']],
'Reserved' : [ 0x20, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='long long')]],
'Hint' : [ 0x20, ['BitField', dict(start_bit = 8, end_bit = 64, native_type='long long')]],
} ],
'_SYSTEM_POWER_STATE_CONTEXT' : [ 0x4, {
'Reserved1' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned long')]],
'TargetSystemState' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 12, native_type='unsigned long')]],
'EffectiveSystemState' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 16, native_type='unsigned long')]],
'CurrentSystemState' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 20, native_type='unsigned long')]],
'IgnoreHibernationPath' : [ 0x0, ['BitField', dict(start_bit = 20, end_bit = 21, native_type='unsigned long')]],
'PseudoTransition' : [ 0x0, ['BitField', dict(start_bit = 21, end_bit = 22, native_type='unsigned long')]],
'Reserved2' : [ 0x0, ['BitField', dict(start_bit = 22, end_bit = 32, native_type='unsigned long')]],
'ContextAsUlong' : [ 0x0, ['unsigned long']],
} ],
'_OBJECT_TYPE_INITIALIZER' : [ 0x70, {
'Length' : [ 0x0, ['unsigned short']],
'ObjectTypeFlags' : [ 0x2, ['unsigned char']],
'CaseInsensitive' : [ 0x2, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'UnnamedObjectsOnly' : [ 0x2, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'UseDefaultObject' : [ 0x2, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'SecurityRequired' : [ 0x2, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'MaintainHandleCount' : [ 0x2, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'MaintainTypeList' : [ 0x2, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'SupportsObjectCallbacks' : [ 0x2, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned char')]],
'ObjectTypeCode' : [ 0x4, ['unsigned long']],
'InvalidAttributes' : [ 0x8, ['unsigned long']],
'GenericMapping' : [ 0xc, ['_GENERIC_MAPPING']],
'ValidAccessMask' : [ 0x1c, ['unsigned long']],
'RetainAccess' : [ 0x20, ['unsigned long']],
'PoolType' : [ 0x24, ['Enumeration', dict(target = 'long', choices = {0: 'NonPagedPool', 1: 'PagedPool', 2: 'NonPagedPoolMustSucceed', 3: 'DontUseThisType', 4: 'NonPagedPoolCacheAligned', 5: 'PagedPoolCacheAligned', 6: 'NonPagedPoolCacheAlignedMustS', 7: 'MaxPoolType', 34: 'NonPagedPoolMustSucceedSession', 35: 'DontUseThisTypeSession', 32: 'NonPagedPoolSession', 36: 'NonPagedPoolCacheAlignedSession', 33: 'PagedPoolSession', 38: 'NonPagedPoolCacheAlignedMustSSession', 37: 'PagedPoolCacheAlignedSession'})]],
'DefaultPagedPoolCharge' : [ 0x28, ['unsigned long']],
'DefaultNonPagedPoolCharge' : [ 0x2c, ['unsigned long']],
'DumpProcedure' : [ 0x30, ['pointer64', ['void']]],
'OpenProcedure' : [ 0x38, ['pointer64', ['void']]],
'CloseProcedure' : [ 0x40, ['pointer64', ['void']]],
'DeleteProcedure' : [ 0x48, ['pointer64', ['void']]],
'ParseProcedure' : [ 0x50, ['pointer64', ['void']]],
'SecurityProcedure' : [ 0x58, ['pointer64', ['void']]],
'QueryNameProcedure' : [ 0x60, ['pointer64', ['void']]],
'OkayToCloseProcedure' : [ 0x68, ['pointer64', ['void']]],
} ],
'_KPROCESSOR_STATE' : [ 0x5b0, {
'SpecialRegisters' : [ 0x0, ['_KSPECIAL_REGISTERS']],
'ContextFrame' : [ 0xe0, ['_CONTEXT']],
} ],
'_IO_CLIENT_EXTENSION' : [ 0x10, {
'NextExtension' : [ 0x0, ['pointer64', ['_IO_CLIENT_EXTENSION']]],
'ClientIdentificationAddress' : [ 0x8, ['pointer64', ['void']]],
} ],
'_ETW_BUFFER_CONTEXT' : [ 0x4, {
'ProcessorNumber' : [ 0x0, ['unsigned char']],
'Alignment' : [ 0x1, ['unsigned char']],
'LoggerId' : [ 0x2, ['unsigned short']],
} ],
'_KDPC' : [ 0x40, {
'Type' : [ 0x0, ['unsigned char']],
'Importance' : [ 0x1, ['unsigned char']],
'Number' : [ 0x2, ['unsigned short']],
'DpcListEntry' : [ 0x8, ['_LIST_ENTRY']],
'DeferredRoutine' : [ 0x18, ['pointer64', ['void']]],
'DeferredContext' : [ 0x20, ['pointer64', ['void']]],
'SystemArgument1' : [ 0x28, ['pointer64', ['void']]],
'SystemArgument2' : [ 0x30, ['pointer64', ['void']]],
'DpcData' : [ 0x38, ['pointer64', ['void']]],
} ],
'_KERNEL_STACK_SEGMENT' : [ 0x28, {
'StackBase' : [ 0x0, ['unsigned long long']],
'StackLimit' : [ 0x8, ['unsigned long long']],
'KernelStack' : [ 0x10, ['unsigned long long']],
'InitialStack' : [ 0x18, ['unsigned long long']],
'ActualLimit' : [ 0x20, ['unsigned long long']],
} ],
'_KEXECUTE_OPTIONS' : [ 0x1, {
'ExecuteDisable' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'ExecuteEnable' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'DisableThunkEmulation' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'Permanent' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'ExecuteDispatchEnable' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'ImageDispatchEnable' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'DisableExceptionChainValidation' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned char')]],
'Spare' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned char')]],
} ],
'_SEP_TOKEN_PRIVILEGES' : [ 0x18, {
'Present' : [ 0x0, ['unsigned long long']],
'Enabled' : [ 0x8, ['unsigned long long']],
'EnabledByDefault' : [ 0x10, ['unsigned long long']],
} ],
'_WHEA_PCIXDEVICE_ERROR_VALIDBITS' : [ 0x8, {
'ErrorStatus' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'IdInfo' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long long')]],
'MemoryNumber' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long long')]],
'IoNumber' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long long')]],
'RegisterDataPairs' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 64, native_type='unsigned long long')]],
'ValidBits' : [ 0x0, ['unsigned long long']],
} ],
'WHEA_PCIXDEVICE_REGISTER_PAIR' : [ 0x10, {
'Register' : [ 0x0, ['unsigned long long']],
'Data' : [ 0x8, ['unsigned long long']],
} ],
'_ARBITER_ALLOCATION_STATE' : [ 0x50, {
'Start' : [ 0x0, ['unsigned long long']],
'End' : [ 0x8, ['unsigned long long']],
'CurrentMinimum' : [ 0x10, ['unsigned long long']],
'CurrentMaximum' : [ 0x18, ['unsigned long long']],
'Entry' : [ 0x20, ['pointer64', ['_ARBITER_LIST_ENTRY']]],
'CurrentAlternative' : [ 0x28, ['pointer64', ['_ARBITER_ALTERNATIVE']]],
'AlternativeCount' : [ 0x30, ['unsigned long']],
'Alternatives' : [ 0x38, ['pointer64', ['_ARBITER_ALTERNATIVE']]],
'Flags' : [ 0x40, ['unsigned short']],
'RangeAttributes' : [ 0x42, ['unsigned char']],
'RangeAvailableAttributes' : [ 0x43, ['unsigned char']],
'WorkSpace' : [ 0x48, ['unsigned long long']],
} ],
'_VACB_ARRAY_HEADER' : [ 0x18, {
'Links' : [ 0x0, ['_LIST_ENTRY']],
'MappingCount' : [ 0x10, ['unsigned long']],
'Reserved' : [ 0x14, ['unsigned long']],
} ],
'_MMWSLENTRY' : [ 0x8, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'LockedInWs' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long long')]],
'LockedInMemory' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long long')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 8, native_type='unsigned long long')]],
'Hashed' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long long')]],
'Direct' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long long')]],
'Age' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 12, native_type='unsigned long long')]],
'VirtualPageNumber' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 64, native_type='unsigned long long')]],
} ],
'_WOW64_PROCESS' : [ 0x8, {
'Wow64' : [ 0x0, ['pointer64', ['void']]],
} ],
'_DBGKD_SWITCH_PARTITION' : [ 0x4, {
'Partition' : [ 0x0, ['unsigned long']],
} ],
'_DBGKD_GET_VERSION32' : [ 0x28, {
'MajorVersion' : [ 0x0, ['unsigned short']],
'MinorVersion' : [ 0x2, ['unsigned short']],
'ProtocolVersion' : [ 0x4, ['unsigned short']],
'Flags' : [ 0x6, ['unsigned short']],
'KernBase' : [ 0x8, ['unsigned long']],
'PsLoadedModuleList' : [ 0xc, ['unsigned long']],
'MachineType' : [ 0x10, ['unsigned short']],
'ThCallbackStack' : [ 0x12, ['unsigned short']],
'NextCallback' : [ 0x14, ['unsigned short']],
'FramePointer' : [ 0x16, ['unsigned short']],
'KiCallUserMode' : [ 0x18, ['unsigned long']],
'KeUserCallbackDispatcher' : [ 0x1c, ['unsigned long']],
'BreakpointWithStatus' : [ 0x20, ['unsigned long']],
'DebuggerDataList' : [ 0x24, ['unsigned long']],
} ],
'_INTERLOCK_SEQ' : [ 0x8, {
'Depth' : [ 0x0, ['unsigned short']],
'FreeEntryOffset' : [ 0x2, ['unsigned short']],
'OffsetAndDepth' : [ 0x0, ['unsigned long']],
'Sequence' : [ 0x4, ['unsigned long']],
'Exchg' : [ 0x0, ['long long']],
} ],
'_WHEA_TIMESTAMP' : [ 0x8, {
'Seconds' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned long long')]],
'Minutes' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 16, native_type='unsigned long long')]],
'Hours' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 24, native_type='unsigned long long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 24, end_bit = 32, native_type='unsigned long long')]],
'Day' : [ 0x0, ['BitField', dict(start_bit = 32, end_bit = 40, native_type='unsigned long long')]],
'Month' : [ 0x0, ['BitField', dict(start_bit = 40, end_bit = 48, native_type='unsigned long long')]],
'Year' : [ 0x0, ['BitField', dict(start_bit = 48, end_bit = 56, native_type='unsigned long long')]],
'Century' : [ 0x0, ['BitField', dict(start_bit = 56, end_bit = 64, native_type='unsigned long long')]],
'AsLARGE_INTEGER' : [ 0x0, ['_LARGE_INTEGER']],
} ],
'_PEB32' : [ 0x238, {
'InheritedAddressSpace' : [ 0x0, ['unsigned char']],
'ReadImageFileExecOptions' : [ 0x1, ['unsigned char']],
'BeingDebugged' : [ 0x2, ['unsigned char']],
'BitField' : [ 0x3, ['unsigned char']],
'ImageUsesLargePages' : [ 0x3, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'IsProtectedProcess' : [ 0x3, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'IsLegacyProcess' : [ 0x3, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'IsImageDynamicallyRelocated' : [ 0x3, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'SkipPatchingUser32Forwarders' : [ 0x3, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'SpareBits' : [ 0x3, ['BitField', dict(start_bit = 5, end_bit = 8, native_type='unsigned char')]],
'Mutant' : [ 0x4, ['unsigned long']],
'ImageBaseAddress' : [ 0x8, ['unsigned long']],
'Ldr' : [ 0xc, ['unsigned long']],
'ProcessParameters' : [ 0x10, ['unsigned long']],
'SubSystemData' : [ 0x14, ['unsigned long']],
'ProcessHeap' : [ 0x18, ['unsigned long']],
'FastPebLock' : [ 0x1c, ['unsigned long']],
'AtlThunkSListPtr' : [ 0x20, ['unsigned long']],
'IFEOKey' : [ 0x24, ['unsigned long']],
'CrossProcessFlags' : [ 0x28, ['unsigned long']],
'ProcessInJob' : [ 0x28, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'ProcessInitializing' : [ 0x28, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'ProcessUsingVEH' : [ 0x28, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'ProcessUsingVCH' : [ 0x28, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'ReservedBits0' : [ 0x28, ['BitField', dict(start_bit = 4, end_bit = 32, native_type='unsigned long')]],
'KernelCallbackTable' : [ 0x2c, ['unsigned long']],
'UserSharedInfoPtr' : [ 0x2c, ['unsigned long']],
'SystemReserved' : [ 0x30, ['array', 1, ['unsigned long']]],
'SpareUlong' : [ 0x34, ['unsigned long']],
'SparePebPtr0' : [ 0x38, ['unsigned long']],
'TlsExpansionCounter' : [ 0x3c, ['unsigned long']],
'TlsBitmap' : [ 0x40, ['unsigned long']],
'TlsBitmapBits' : [ 0x44, ['array', 2, ['unsigned long']]],
'ReadOnlySharedMemoryBase' : [ 0x4c, ['unsigned long']],
'HotpatchInformation' : [ 0x50, ['unsigned long']],
'ReadOnlyStaticServerData' : [ 0x54, ['unsigned long']],
'AnsiCodePageData' : [ 0x58, ['unsigned long']],
'OemCodePageData' : [ 0x5c, ['unsigned long']],
'UnicodeCaseTableData' : [ 0x60, ['unsigned long']],
'NumberOfProcessors' : [ 0x64, ['unsigned long']],
'NtGlobalFlag' : [ 0x68, ['unsigned long']],
'CriticalSectionTimeout' : [ 0x70, ['_LARGE_INTEGER']],
'HeapSegmentReserve' : [ 0x78, ['unsigned long']],
'HeapSegmentCommit' : [ 0x7c, ['unsigned long']],
'HeapDeCommitTotalFreeThreshold' : [ 0x80, ['unsigned long']],
'HeapDeCommitFreeBlockThreshold' : [ 0x84, ['unsigned long']],
'NumberOfHeaps' : [ 0x88, ['unsigned long']],
'MaximumNumberOfHeaps' : [ 0x8c, ['unsigned long']],
'ProcessHeaps' : [ 0x90, ['unsigned long']],
'GdiSharedHandleTable' : [ 0x94, ['unsigned long']],
'ProcessStarterHelper' : [ 0x98, ['unsigned long']],
'GdiDCAttributeList' : [ 0x9c, ['unsigned long']],
'LoaderLock' : [ 0xa0, ['unsigned long']],
'OSMajorVersion' : [ 0xa4, ['unsigned long']],
'OSMinorVersion' : [ 0xa8, ['unsigned long']],
'OSBuildNumber' : [ 0xac, ['unsigned short']],
'OSCSDVersion' : [ 0xae, ['unsigned short']],
'OSPlatformId' : [ 0xb0, ['unsigned long']],
'ImageSubsystem' : [ 0xb4, ['unsigned long']],
'ImageSubsystemMajorVersion' : [ 0xb8, ['unsigned long']],
'ImageSubsystemMinorVersion' : [ 0xbc, ['unsigned long']],
'ActiveProcessAffinityMask' : [ 0xc0, ['unsigned long']],
'GdiHandleBuffer' : [ 0xc4, ['array', 34, ['unsigned long']]],
'PostProcessInitRoutine' : [ 0x14c, ['unsigned long']],
'TlsExpansionBitmap' : [ 0x150, ['unsigned long']],
'TlsExpansionBitmapBits' : [ 0x154, ['array', 32, ['unsigned long']]],
'SessionId' : [ 0x1d4, ['unsigned long']],
'AppCompatFlags' : [ 0x1d8, ['_ULARGE_INTEGER']],
'AppCompatFlagsUser' : [ 0x1e0, ['_ULARGE_INTEGER']],
'pShimData' : [ 0x1e8, ['unsigned long']],
'AppCompatInfo' : [ 0x1ec, ['unsigned long']],
'CSDVersion' : [ 0x1f0, ['_STRING32']],
'ActivationContextData' : [ 0x1f8, ['unsigned long']],
'ProcessAssemblyStorageMap' : [ 0x1fc, ['unsigned long']],
'SystemDefaultActivationContextData' : [ 0x200, ['unsigned long']],
'SystemAssemblyStorageMap' : [ 0x204, ['unsigned long']],
'MinimumStackCommit' : [ 0x208, ['unsigned long']],
'FlsCallback' : [ 0x20c, ['unsigned long']],
'FlsListHead' : [ 0x210, ['LIST_ENTRY32']],
'FlsBitmap' : [ 0x218, ['unsigned long']],
'FlsBitmapBits' : [ 0x21c, ['array', 4, ['unsigned long']]],
'FlsHighIndex' : [ 0x22c, ['unsigned long']],
'WerRegistrationData' : [ 0x230, ['unsigned long']],
'WerShipAssertPtr' : [ 0x234, ['unsigned long']],
} ],
'_VPB' : [ 0x60, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['short']],
'Flags' : [ 0x4, ['unsigned short']],
'VolumeLabelLength' : [ 0x6, ['unsigned short']],
'DeviceObject' : [ 0x8, ['pointer64', ['_DEVICE_OBJECT']]],
'RealDevice' : [ 0x10, ['pointer64', ['_DEVICE_OBJECT']]],
'SerialNumber' : [ 0x18, ['unsigned long']],
'ReferenceCount' : [ 0x1c, ['unsigned long']],
'VolumeLabel' : [ 0x20, ['array', 32, ['wchar']]],
} ],
'_CACHE_DESCRIPTOR' : [ 0xc, {
'Level' : [ 0x0, ['unsigned char']],
'Associativity' : [ 0x1, ['unsigned char']],
'LineSize' : [ 0x2, ['unsigned short']],
'Size' : [ 0x4, ['unsigned long']],
'Type' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'CacheUnified', 1: 'CacheInstruction', 2: 'CacheData', 3: 'CacheTrace'})]],
} ],
'_SECURITY_SUBJECT_CONTEXT' : [ 0x20, {
'ClientToken' : [ 0x0, ['pointer64', ['void']]],
'ImpersonationLevel' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'SecurityAnonymous', 1: 'SecurityIdentification', 2: 'SecurityImpersonation', 3: 'SecurityDelegation'})]],
'PrimaryToken' : [ 0x10, ['pointer64', ['void']]],
'ProcessAuditId' : [ 0x18, ['pointer64', ['void']]],
} ],
'_GENERIC_MAPPING' : [ 0x10, {
'GenericRead' : [ 0x0, ['unsigned long']],
'GenericWrite' : [ 0x4, ['unsigned long']],
'GenericExecute' : [ 0x8, ['unsigned long']],
'GenericAll' : [ 0xc, ['unsigned long']],
} ],
'_KBUGCHECK_ACTIVE_STATE' : [ 0x4, {
'BugCheckState' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 2, native_type='unsigned long')]],
'RecursionCount' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 4, native_type='unsigned long')]],
'BugCheckOwner' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 32, native_type='unsigned long')]],
'Value' : [ 0x0, ['long']],
} ],
'_PF_KERNEL_GLOBALS' : [ 0x60, {
'AccessBufferAgeThreshold' : [ 0x0, ['unsigned long long']],
'AccessBufferRef' : [ 0x8, ['_EX_RUNDOWN_REF']],
'AccessBufferExistsEvent' : [ 0x10, ['_KEVENT']],
'AccessBufferMax' : [ 0x28, ['unsigned long']],
'AccessBufferList' : [ 0x40, ['_SLIST_HEADER']],
'StreamSequenceNumber' : [ 0x50, ['long']],
'Flags' : [ 0x54, ['unsigned long']],
'ScenarioPrefetchCount' : [ 0x58, ['long']],
} ],
'_ARBITER_QUERY_ARBITRATE_PARAMETERS' : [ 0x8, {
'ArbitrationList' : [ 0x0, ['pointer64', ['_LIST_ENTRY']]],
} ],
'_DBGKD_RESTORE_BREAKPOINT' : [ 0x4, {
'BreakPointHandle' : [ 0x0, ['unsigned long']],
} ],
'_ARBITER_BOOT_ALLOCATION_PARAMETERS' : [ 0x8, {
'ArbitrationList' : [ 0x0, ['pointer64', ['_LIST_ENTRY']]],
} ],
'_EXCEPTION_REGISTRATION_RECORD' : [ 0x10, {
'Next' : [ 0x0, ['pointer64', ['_EXCEPTION_REGISTRATION_RECORD']]],
'Handler' : [ 0x8, ['pointer64', ['void']]],
} ],
'_WHEA_PCIXBUS_ID' : [ 0x2, {
'BusNumber' : [ 0x0, ['unsigned char']],
'BusSegment' : [ 0x1, ['unsigned char']],
'AsUSHORT' : [ 0x0, ['unsigned short']],
} ],
'_ETW_REF_CLOCK' : [ 0x10, {
'StartTime' : [ 0x0, ['_LARGE_INTEGER']],
'StartPerfClock' : [ 0x8, ['_LARGE_INTEGER']],
} ],
'_OB_DUPLICATE_OBJECT_STATE' : [ 0x30, {
'SourceProcess' : [ 0x0, ['pointer64', ['_EPROCESS']]],
'SourceHandle' : [ 0x8, ['pointer64', ['void']]],
'Object' : [ 0x10, ['pointer64', ['void']]],
'ObjectType' : [ 0x18, ['pointer64', ['_OBJECT_TYPE']]],
'TargetAccess' : [ 0x20, ['unsigned long']],
'ObjectInfo' : [ 0x24, ['_HANDLE_TABLE_ENTRY_INFO']],
'HandleAttributes' : [ 0x28, ['unsigned long']],
} ],
'_MMPTE_SUBSECTION' : [ 0x8, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'Unused0' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 5, native_type='unsigned long long')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 10, native_type='unsigned long long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long long')]],
'Unused1' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 16, native_type='unsigned long long')]],
'SubsectionAddress' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 64, native_type='long long')]],
} ],
'_POWER_STATE' : [ 0x4, {
'SystemState' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'DeviceState' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'PowerDeviceUnspecified', 1: 'PowerDeviceD0', 2: 'PowerDeviceD1', 3: 'PowerDeviceD2', 4: 'PowerDeviceD3', 5: 'PowerDeviceMaximum'})]],
} ],
'_EFI_FIRMWARE_INFORMATION' : [ 0x18, {
'FirmwareVersion' : [ 0x0, ['unsigned long']],
'VirtualEfiRuntimeServices' : [ 0x8, ['pointer64', ['_VIRTUAL_EFI_RUNTIME_SERVICES']]],
'SetVirtualAddressMapStatus' : [ 0x10, ['long']],
'MissedMappingsCount' : [ 0x14, ['unsigned long']],
} ],
'__unnamed_1cec' : [ 0xc, {
'Start' : [ 0x0, ['_LARGE_INTEGER']],
'Length' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_1cee' : [ 0x10, {
'Level' : [ 0x0, ['unsigned long']],
'Vector' : [ 0x4, ['unsigned long']],
'Affinity' : [ 0x8, ['unsigned long long']],
} ],
'__unnamed_1cf0' : [ 0x10, {
'Reserved' : [ 0x0, ['unsigned short']],
'MessageCount' : [ 0x2, ['unsigned short']],
'Vector' : [ 0x4, ['unsigned long']],
'Affinity' : [ 0x8, ['unsigned long long']],
} ],
'__unnamed_1cf2' : [ 0x10, {
'Raw' : [ 0x0, ['__unnamed_1cf0']],
'Translated' : [ 0x0, ['__unnamed_1cee']],
} ],
'__unnamed_1cf4' : [ 0xc, {
'Channel' : [ 0x0, ['unsigned long']],
'Port' : [ 0x4, ['unsigned long']],
'Reserved1' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_1cf6' : [ 0xc, {
'Start' : [ 0x0, ['unsigned long']],
'Length' : [ 0x4, ['unsigned long']],
'Reserved' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_1cf8' : [ 0xc, {
'DataSize' : [ 0x0, ['unsigned long']],
'Reserved1' : [ 0x4, ['unsigned long']],
'Reserved2' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_1cfa' : [ 0xc, {
'Start' : [ 0x0, ['_LARGE_INTEGER']],
'Length40' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_1cfc' : [ 0xc, {
'Start' : [ 0x0, ['_LARGE_INTEGER']],
'Length48' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_1cfe' : [ 0xc, {
'Start' : [ 0x0, ['_LARGE_INTEGER']],
'Length64' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_1d00' : [ 0x10, {
'Generic' : [ 0x0, ['__unnamed_1cec']],
'Port' : [ 0x0, ['__unnamed_1cec']],
'Interrupt' : [ 0x0, ['__unnamed_1cee']],
'MessageInterrupt' : [ 0x0, ['__unnamed_1cf2']],
'Memory' : [ 0x0, ['__unnamed_1cec']],
'Dma' : [ 0x0, ['__unnamed_1cf4']],
'DevicePrivate' : [ 0x0, ['__unnamed_1bfc']],
'BusNumber' : [ 0x0, ['__unnamed_1cf6']],
'DeviceSpecificData' : [ 0x0, ['__unnamed_1cf8']],
'Memory40' : [ 0x0, ['__unnamed_1cfa']],
'Memory48' : [ 0x0, ['__unnamed_1cfc']],
'Memory64' : [ 0x0, ['__unnamed_1cfe']],
} ],
'_CM_PARTIAL_RESOURCE_DESCRIPTOR' : [ 0x14, {
'Type' : [ 0x0, ['unsigned char']],
'ShareDisposition' : [ 0x1, ['unsigned char']],
'Flags' : [ 0x2, ['unsigned short']],
'u' : [ 0x4, ['__unnamed_1d00']],
} ],
'_WHEA_REVISION' : [ 0x2, {
'MinorRevision' : [ 0x0, ['unsigned char']],
'MajorRevision' : [ 0x1, ['unsigned char']],
'AsUSHORT' : [ 0x0, ['unsigned short']],
} ],
'__unnamed_1d07' : [ 0x4, {
'PhysicalAddress' : [ 0x0, ['unsigned long']],
'VirtualSize' : [ 0x0, ['unsigned long']],
} ],
'_IMAGE_SECTION_HEADER' : [ 0x28, {
'Name' : [ 0x0, ['array', 8, ['unsigned char']]],
'Misc' : [ 0x8, ['__unnamed_1d07']],
'VirtualAddress' : [ 0xc, ['unsigned long']],
'SizeOfRawData' : [ 0x10, ['unsigned long']],
'PointerToRawData' : [ 0x14, ['unsigned long']],
'PointerToRelocations' : [ 0x18, ['unsigned long']],
'PointerToLinenumbers' : [ 0x1c, ['unsigned long']],
'NumberOfRelocations' : [ 0x20, ['unsigned short']],
'NumberOfLinenumbers' : [ 0x22, ['unsigned short']],
'Characteristics' : [ 0x24, ['unsigned long']],
} ],
'_ARBITER_ADD_RESERVED_PARAMETERS' : [ 0x8, {
'ReserveDevice' : [ 0x0, ['pointer64', ['_DEVICE_OBJECT']]],
} ],
'_MMPTE_HARDWARE_LARGEPAGE' : [ 0x8, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'Dirty1' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long long')]],
'Owner' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long long')]],
'WriteThrough' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long long')]],
'CacheDisable' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long long')]],
'Accessed' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long long')]],
'Dirty' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long long')]],
'LargePage' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long long')]],
'Global' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long long')]],
'CopyOnWrite' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long long')]],
'Write' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long long')]],
'PAT' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long long')]],
'reserved1' : [ 0x0, ['BitField', dict(start_bit = 13, end_bit = 21, native_type='unsigned long long')]],
'PageFrameNumber' : [ 0x0, ['BitField', dict(start_bit = 21, end_bit = 48, native_type='unsigned long long')]],
'reserved2' : [ 0x0, ['BitField', dict(start_bit = 48, end_bit = 64, native_type='unsigned long long')]],
} ],
'_KUSER_SHARED_DATA' : [ 0x3b8, {
'TickCountLowDeprecated' : [ 0x0, ['unsigned long']],
'TickCountMultiplier' : [ 0x4, ['unsigned long']],
'InterruptTime' : [ 0x8, ['_KSYSTEM_TIME']],
'SystemTime' : [ 0x14, ['_KSYSTEM_TIME']],
'TimeZoneBias' : [ 0x20, ['_KSYSTEM_TIME']],
'ImageNumberLow' : [ 0x2c, ['unsigned short']],
'ImageNumberHigh' : [ 0x2e, ['unsigned short']],
'NtSystemRoot' : [ 0x30, ['array', 260, ['wchar']]],
'MaxStackTraceDepth' : [ 0x238, ['unsigned long']],
'CryptoExponent' : [ 0x23c, ['unsigned long']],
'TimeZoneId' : [ 0x240, ['unsigned long']],
'LargePageMinimum' : [ 0x244, ['unsigned long']],
'Reserved2' : [ 0x248, ['array', 7, ['unsigned long']]],
'NtProductType' : [ 0x264, ['Enumeration', dict(target = 'long', choices = {1: 'NtProductWinNt', 2: 'NtProductLanManNt', 3: 'NtProductServer'})]],
'ProductTypeIsValid' : [ 0x268, ['unsigned char']],
'NtMajorVersion' : [ 0x26c, ['unsigned long']],
'NtMinorVersion' : [ 0x270, ['unsigned long']],
'ProcessorFeatures' : [ 0x274, ['array', 64, ['unsigned char']]],
'Reserved1' : [ 0x2b4, ['unsigned long']],
'Reserved3' : [ 0x2b8, ['unsigned long']],
'TimeSlip' : [ 0x2bc, ['unsigned long']],
'AlternativeArchitecture' : [ 0x2c0, ['Enumeration', dict(target = 'long', choices = {0: 'StandardDesign', 1: 'NEC98x86', 2: 'EndAlternatives'})]],
'SystemExpirationDate' : [ 0x2c8, ['_LARGE_INTEGER']],
'SuiteMask' : [ 0x2d0, ['unsigned long']],
'KdDebuggerEnabled' : [ 0x2d4, ['unsigned char']],
'NXSupportPolicy' : [ 0x2d5, ['unsigned char']],
'ActiveConsoleId' : [ 0x2d8, ['unsigned long']],
'DismountCount' : [ 0x2dc, ['unsigned long']],
'ComPlusPackage' : [ 0x2e0, ['unsigned long']],
'LastSystemRITEventTickCount' : [ 0x2e4, ['unsigned long']],
'NumberOfPhysicalPages' : [ 0x2e8, ['unsigned long']],
'SafeBootMode' : [ 0x2ec, ['unsigned char']],
'SharedDataFlags' : [ 0x2f0, ['unsigned long']],
'DbgErrorPortPresent' : [ 0x2f0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'DbgElevationEnabled' : [ 0x2f0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'DbgVirtEnabled' : [ 0x2f0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'DbgInstallerDetectEnabled' : [ 0x2f0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'DbgSystemDllRelocated' : [ 0x2f0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'DbgDynProcessorEnabled' : [ 0x2f0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'DbgSEHValidationEnabled' : [ 0x2f0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'SpareBits' : [ 0x2f0, ['BitField', dict(start_bit = 7, end_bit = 32, native_type='unsigned long')]],
'TestRetInstruction' : [ 0x2f8, ['unsigned long long']],
'SystemCall' : [ 0x300, ['unsigned long']],
'SystemCallReturn' : [ 0x304, ['unsigned long']],
'SystemCallPad' : [ 0x308, ['array', 3, ['unsigned long long']]],
'TickCount' : [ 0x320, ['_KSYSTEM_TIME']],
'TickCountQuad' : [ 0x320, ['unsigned long long']],
'Cookie' : [ 0x330, ['unsigned long']],
'ConsoleSessionForegroundProcessId' : [ 0x338, ['long long']],
'Wow64SharedInformation' : [ 0x340, ['array', 16, ['unsigned long']]],
'UserModeGlobalLogger' : [ 0x380, ['array', 8, ['unsigned short']]],
'HeapTracingPid' : [ 0x390, ['array', 2, ['unsigned long']]],
'CritSecTracingPid' : [ 0x398, ['array', 2, ['unsigned long']]],
'ImageFileExecutionOptions' : [ 0x3a0, ['unsigned long']],
'LangGenerationCount' : [ 0x3a4, ['unsigned long']],
'AffinityPad' : [ 0x3a8, ['unsigned long long']],
'ActiveProcessorAffinity' : [ 0x3a8, ['unsigned long long']],
'InterruptTimeBias' : [ 0x3b0, ['unsigned long long']],
} ],
'__unnamed_1d24' : [ 0x50, {
'CellData' : [ 0x0, ['_CELL_DATA']],
'List' : [ 0x0, ['array', 1, ['unsigned long long']]],
} ],
'_CM_CACHED_VALUE_INDEX' : [ 0x58, {
'CellIndex' : [ 0x0, ['unsigned long']],
'Data' : [ 0x8, ['__unnamed_1d24']],
} ],
'_CONFIGURATION_COMPONENT_DATA' : [ 0x48, {
'Parent' : [ 0x0, ['pointer64', ['_CONFIGURATION_COMPONENT_DATA']]],
'Child' : [ 0x8, ['pointer64', ['_CONFIGURATION_COMPONENT_DATA']]],
'Sibling' : [ 0x10, ['pointer64', ['_CONFIGURATION_COMPONENT_DATA']]],
'ComponentEntry' : [ 0x18, ['_CONFIGURATION_COMPONENT']],
'ConfigurationData' : [ 0x40, ['pointer64', ['void']]],
} ],
'_DBGKD_QUERY_SPECIAL_CALLS' : [ 0x4, {
'NumberOfSpecialCalls' : [ 0x0, ['unsigned long']],
} ],
'__unnamed_1d2e' : [ 0x8, {
'Balance' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 2, native_type='long long')]],
'Parent' : [ 0x0, ['pointer64', ['_MMSUBSECTION_NODE']]],
} ],
'_MMSUBSECTION_NODE' : [ 0x28, {
'u' : [ 0x0, ['__unnamed_14cf']],
'StartingSector' : [ 0x4, ['unsigned long']],
'NumberOfFullSectors' : [ 0x8, ['unsigned long']],
'u1' : [ 0x10, ['__unnamed_1d2e']],
'LeftChild' : [ 0x18, ['pointer64', ['_MMSUBSECTION_NODE']]],
'RightChild' : [ 0x20, ['pointer64', ['_MMSUBSECTION_NODE']]],
} ],
'__unnamed_1d34' : [ 0x8, {
'IdleTime' : [ 0x0, ['unsigned long']],
'NonIdleTime' : [ 0x4, ['unsigned long']],
} ],
'__unnamed_1d36' : [ 0x8, {
'Disk' : [ 0x0, ['__unnamed_1d34']],
} ],
'_DEVICE_OBJECT_POWER_EXTENSION' : [ 0x98, {
'IdleCount' : [ 0x0, ['unsigned long']],
'BusyCount' : [ 0x4, ['unsigned long']],
'TotalBusyCount' : [ 0x8, ['unsigned long']],
'ConservationIdleTime' : [ 0xc, ['unsigned long']],
'PerformanceIdleTime' : [ 0x10, ['unsigned long']],
'DeviceObject' : [ 0x18, ['pointer64', ['_DEVICE_OBJECT']]],
'IdleList' : [ 0x20, ['_LIST_ENTRY']],
'DeviceType' : [ 0x30, ['unsigned char']],
'IdleState' : [ 0x34, ['Enumeration', dict(target = 'long', choices = {0: 'PowerDeviceUnspecified', 1: 'PowerDeviceD0', 2: 'PowerDeviceD1', 3: 'PowerDeviceD2', 4: 'PowerDeviceD3', 5: 'PowerDeviceMaximum'})]],
'CurrentState' : [ 0x38, ['Enumeration', dict(target = 'long', choices = {0: 'PowerDeviceUnspecified', 1: 'PowerDeviceD0', 2: 'PowerDeviceD1', 3: 'PowerDeviceD2', 4: 'PowerDeviceD3', 5: 'PowerDeviceMaximum'})]],
'NotifySourceList' : [ 0x40, ['_LIST_ENTRY']],
'NotifyTargetList' : [ 0x50, ['_LIST_ENTRY']],
'PowerChannelSummary' : [ 0x60, ['_POWER_CHANNEL_SUMMARY']],
'Volume' : [ 0x80, ['_LIST_ENTRY']],
'Specific' : [ 0x90, ['__unnamed_1d36']],
} ],
'_ARBITER_RETEST_ALLOCATION_PARAMETERS' : [ 0x18, {
'ArbitrationList' : [ 0x0, ['pointer64', ['_LIST_ENTRY']]],
'AllocateFromCount' : [ 0x8, ['unsigned long']],
'AllocateFrom' : [ 0x10, ['pointer64', ['_CM_PARTIAL_RESOURCE_DESCRIPTOR']]],
} ],
'_WHEA_ERROR_STATUS' : [ 0x8, {
'ErrorStatus' : [ 0x0, ['unsigned long long']],
'Reserved1' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned long long')]],
'ErrorType' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 16, native_type='unsigned long long')]],
'Address' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long long')]],
'Control' : [ 0x0, ['BitField', dict(start_bit = 17, end_bit = 18, native_type='unsigned long long')]],
'Data' : [ 0x0, ['BitField', dict(start_bit = 18, end_bit = 19, native_type='unsigned long long')]],
'Responder' : [ 0x0, ['BitField', dict(start_bit = 19, end_bit = 20, native_type='unsigned long long')]],
'Requester' : [ 0x0, ['BitField', dict(start_bit = 20, end_bit = 21, native_type='unsigned long long')]],
'FirstError' : [ 0x0, ['BitField', dict(start_bit = 21, end_bit = 22, native_type='unsigned long long')]],
'Overflow' : [ 0x0, ['BitField', dict(start_bit = 22, end_bit = 23, native_type='unsigned long long')]],
'Reserved2' : [ 0x0, ['BitField', dict(start_bit = 23, end_bit = 64, native_type='unsigned long long')]],
} ],
'_WHEA_ERROR_RECORD_SECTION_DESCRIPTOR_VALIDBITS' : [ 0x1, {
'FRUId' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'FRUText' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 8, native_type='unsigned char')]],
'AsUCHAR' : [ 0x0, ['unsigned char']],
} ],
'_FS_FILTER_CALLBACKS' : [ 0x68, {
'SizeOfFsFilterCallbacks' : [ 0x0, ['unsigned long']],
'Reserved' : [ 0x4, ['unsigned long']],
'PreAcquireForSectionSynchronization' : [ 0x8, ['pointer64', ['void']]],
'PostAcquireForSectionSynchronization' : [ 0x10, ['pointer64', ['void']]],
'PreReleaseForSectionSynchronization' : [ 0x18, ['pointer64', ['void']]],
'PostReleaseForSectionSynchronization' : [ 0x20, ['pointer64', ['void']]],
'PreAcquireForCcFlush' : [ 0x28, ['pointer64', ['void']]],
'PostAcquireForCcFlush' : [ 0x30, ['pointer64', ['void']]],
'PreReleaseForCcFlush' : [ 0x38, ['pointer64', ['void']]],
'PostReleaseForCcFlush' : [ 0x40, ['pointer64', ['void']]],
'PreAcquireForModifiedPageWriter' : [ 0x48, ['pointer64', ['void']]],
'PostAcquireForModifiedPageWriter' : [ 0x50, ['pointer64', ['void']]],
'PreReleaseForModifiedPageWriter' : [ 0x58, ['pointer64', ['void']]],
'PostReleaseForModifiedPageWriter' : [ 0x60, ['pointer64', ['void']]],
} ],
'_KENLISTMENT' : [ 0x1e0, {
'cookie' : [ 0x0, ['unsigned long']],
'NamespaceLink' : [ 0x8, ['_KTMOBJECT_NAMESPACE_LINK']],
'EnlistmentId' : [ 0x30, ['_GUID']],
'Mutex' : [ 0x40, ['_KMUTANT']],
'NextSameTx' : [ 0x78, ['_LIST_ENTRY']],
'NextSameRm' : [ 0x88, ['_LIST_ENTRY']],
'ResourceManager' : [ 0x98, ['pointer64', ['_KRESOURCEMANAGER']]],
'Transaction' : [ 0xa0, ['pointer64', ['_KTRANSACTION']]],
'State' : [ 0xa8, ['Enumeration', dict(target = 'long', choices = {0: 'KEnlistmentUninitialized', 256: 'KEnlistmentActive', 258: 'KEnlistmentPrepared', 259: 'KEnlistmentInDoubt', 260: 'KEnlistmentCommitted', 261: 'KEnlistmentCommittedNotify', 262: 'KEnlistmentCommitRequested', 257: 'KEnlistmentPreparing', 264: 'KEnlistmentDelegated', 265: 'KEnlistmentDelegatedDisconnected', 266: 'KEnlistmentPrePreparing', 263: 'KEnlistmentAborted', 268: 'KEnlistmentRecovering', 269: 'KEnlistmentAborting', 270: 'KEnlistmentReadOnly', 271: 'KEnlistmentOutcomeUnavailable', 272: 'KEnlistmentOffline', 273: 'KEnlistmentPrePrepared', 274: 'KEnlistmentInitialized', 267: 'KEnlistmentForgotten'})]],
'Flags' : [ 0xac, ['unsigned long']],
'NotificationMask' : [ 0xb0, ['unsigned long']],
'Key' : [ 0xb8, ['pointer64', ['void']]],
'KeyRefCount' : [ 0xc0, ['unsigned long']],
'RecoveryInformation' : [ 0xc8, ['pointer64', ['void']]],
'RecoveryInformationLength' : [ 0xd0, ['unsigned long']],
'DynamicNameInformation' : [ 0xd8, ['pointer64', ['void']]],
'DynamicNameInformationLength' : [ 0xe0, ['unsigned long']],
'FinalNotification' : [ 0xe8, ['pointer64', ['_KTMNOTIFICATION_PACKET']]],
'SupSubEnlistment' : [ 0xf0, ['pointer64', ['_KENLISTMENT']]],
'SupSubEnlHandle' : [ 0xf8, ['pointer64', ['void']]],
'SubordinateTxHandle' : [ 0x100, ['pointer64', ['void']]],
'CrmEnlistmentEnId' : [ 0x108, ['_GUID']],
'CrmEnlistmentTmId' : [ 0x118, ['_GUID']],
'CrmEnlistmentRmId' : [ 0x128, ['_GUID']],
'NextHistory' : [ 0x138, ['unsigned long']],
'History' : [ 0x13c, ['array', 20, ['_KENLISTMENT_HISTORY']]],
} ],
'_ARBITER_INTERFACE' : [ 0x30, {
'Size' : [ 0x0, ['unsigned short']],
'Version' : [ 0x2, ['unsigned short']],
'Context' : [ 0x8, ['pointer64', ['void']]],
'InterfaceReference' : [ 0x10, ['pointer64', ['void']]],
'InterfaceDereference' : [ 0x18, ['pointer64', ['void']]],
'ArbiterHandler' : [ 0x20, ['pointer64', ['void']]],
'Flags' : [ 0x28, ['unsigned long']],
} ],
'_KAPC_STATE' : [ 0x30, {
'ApcListHead' : [ 0x0, ['array', 2, ['_LIST_ENTRY']]],
'Process' : [ 0x20, ['pointer64', ['_KPROCESS']]],
'KernelApcInProgress' : [ 0x28, ['unsigned char']],
'KernelApcPending' : [ 0x29, ['unsigned char']],
'UserApcPending' : [ 0x2a, ['unsigned char']],
} ],
'_IA64_LOADER_BLOCK' : [ 0x4, {
'PlaceHolder' : [ 0x0, ['unsigned long']],
} ],
'_DEVICE_RELATIONS' : [ 0x10, {
'Count' : [ 0x0, ['unsigned long']],
'Objects' : [ 0x8, ['array', 1, ['pointer64', ['_DEVICE_OBJECT']]]],
} ],
'_IMAGE_ROM_OPTIONAL_HEADER' : [ 0x38, {
'Magic' : [ 0x0, ['unsigned short']],
'MajorLinkerVersion' : [ 0x2, ['unsigned char']],
'MinorLinkerVersion' : [ 0x3, ['unsigned char']],
'SizeOfCode' : [ 0x4, ['unsigned long']],
'SizeOfInitializedData' : [ 0x8, ['unsigned long']],
'SizeOfUninitializedData' : [ 0xc, ['unsigned long']],
'AddressOfEntryPoint' : [ 0x10, ['unsigned long']],
'BaseOfCode' : [ 0x14, ['unsigned long']],
'BaseOfData' : [ 0x18, ['unsigned long']],
'BaseOfBss' : [ 0x1c, ['unsigned long']],
'GprMask' : [ 0x20, ['unsigned long']],
'CprMask' : [ 0x24, ['array', 4, ['unsigned long']]],
'GpValue' : [ 0x34, ['unsigned long']],
} ],
'_ALPC_COMPLETION_LIST_HEADER' : [ 0x300, {
'StartMagic' : [ 0x0, ['unsigned long long']],
'TotalSize' : [ 0x8, ['unsigned long']],
'ListOffset' : [ 0xc, ['unsigned long']],
'ListSize' : [ 0x10, ['unsigned long']],
'BitmapOffset' : [ 0x14, ['unsigned long']],
'BitmapSize' : [ 0x18, ['unsigned long']],
'DataOffset' : [ 0x1c, ['unsigned long']],
'DataSize' : [ 0x20, ['unsigned long']],
'AttributeFlags' : [ 0x24, ['unsigned long']],
'AttributeSize' : [ 0x28, ['unsigned long']],
'State' : [ 0x80, ['_ALPC_COMPLETION_LIST_STATE']],
'LastMessageId' : [ 0x88, ['unsigned long']],
'LastCallbackId' : [ 0x8c, ['unsigned long']],
'PostCount' : [ 0x100, ['unsigned long']],
'ReturnCount' : [ 0x180, ['unsigned long']],
'LogSequenceNumber' : [ 0x200, ['unsigned long']],
'UserLock' : [ 0x280, ['_RTL_SRWLOCK']],
'EndMagic' : [ 0x288, ['unsigned long long']],
} ],
'_IMAGE_DEBUG_DIRECTORY' : [ 0x1c, {
'Characteristics' : [ 0x0, ['unsigned long']],
'TimeDateStamp' : [ 0x4, ['unsigned long']],
'MajorVersion' : [ 0x8, ['unsigned short']],
'MinorVersion' : [ 0xa, ['unsigned short']],
'Type' : [ 0xc, ['unsigned long']],
'SizeOfData' : [ 0x10, ['unsigned long']],
'AddressOfRawData' : [ 0x14, ['unsigned long']],
'PointerToRawData' : [ 0x18, ['unsigned long']],
} ],
'_DEVICE_MAP' : [ 0x38, {
'DosDevicesDirectory' : [ 0x0, ['pointer64', ['_OBJECT_DIRECTORY']]],
'GlobalDosDevicesDirectory' : [ 0x8, ['pointer64', ['_OBJECT_DIRECTORY']]],
'ReferenceCount' : [ 0x10, ['unsigned long']],
'DriveMap' : [ 0x14, ['unsigned long']],
'DriveType' : [ 0x18, ['array', 32, ['unsigned char']]],
} ],
'_CHILD_LIST' : [ 0x8, {
'Count' : [ 0x0, ['unsigned long']],
'List' : [ 0x4, ['unsigned long']],
} ],
'_ETW_KERNEL_TRACE_TIMESTAMP' : [ 0x10, {
'KernelTraceTimeStamp' : [ 0x0, ['array', 2, ['_LARGE_INTEGER']]],
} ],
'_HEAP_DEBUGGING_INFORMATION' : [ 0x30, {
'InterceptorFunction' : [ 0x0, ['pointer64', ['void']]],
'InterceptorValue' : [ 0x8, ['unsigned short']],
'ExtendedOptions' : [ 0xc, ['unsigned long']],
'StackTraceDepth' : [ 0x10, ['unsigned long']],
'MinTotalBlockSize' : [ 0x18, ['unsigned long long']],
'MaxTotalBlockSize' : [ 0x20, ['unsigned long long']],
'HeapLeakEnumerationRoutine' : [ 0x28, ['pointer64', ['void']]],
} ],
'_IO_RESOURCE_LIST' : [ 0x28, {
'Version' : [ 0x0, ['unsigned short']],
'Revision' : [ 0x2, ['unsigned short']],
'Count' : [ 0x4, ['unsigned long']],
'Descriptors' : [ 0x8, ['array', 1, ['_IO_RESOURCE_DESCRIPTOR']]],
} ],
'_MMBANKED_SECTION' : [ 0x38, {
'BasePhysicalPage' : [ 0x0, ['unsigned long long']],
'BasedPte' : [ 0x8, ['pointer64', ['_MMPTE']]],
'BankSize' : [ 0x10, ['unsigned long']],
'BankShift' : [ 0x14, ['unsigned long']],
'BankedRoutine' : [ 0x18, ['pointer64', ['void']]],
'Context' : [ 0x20, ['pointer64', ['void']]],
'CurrentMappedPte' : [ 0x28, ['pointer64', ['_MMPTE']]],
'BankTemplate' : [ 0x30, ['array', 1, ['_MMPTE']]],
} ],
'_WHEA_ERROR_RECORD_HEADER_FLAGS' : [ 0x4, {
'Recovered' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'PreviousError' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'Simulated' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 32, native_type='unsigned long')]],
'AsULONG' : [ 0x0, ['unsigned long']],
} ],
'_WHEA_PCIEXPRESS_ERROR' : [ 0xd0, {
'ValidBits' : [ 0x0, ['_WHEA_PCIEXPRESS_ERROR_VALIDBITS']],
'PortType' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'WheaPciExpressEndpoint', 1: 'WheaPciExpressLegacyEndpoint', 4: 'WheaPciExpressRootPort', 5: 'WheaPciExpressUpstreamSwitchPort', 6: 'WheaPciExpressDownstreamSwitchPort', 7: 'WheaPciExpressToPciXBridge', 8: 'WheaPciXToExpressBridge', 9: 'WheaPciExpressRootComplexIntegratedEndpoint', 10: 'WheaPciExpressRootComplexEventCollector'})]],
'Version' : [ 0xc, ['_WHEA_PCIEXPRESS_VERSION']],
'CommandStatus' : [ 0x10, ['_WHEA_PCIEXPRESS_COMMAND_STATUS']],
'Reserved' : [ 0x14, ['unsigned long']],
'DeviceId' : [ 0x18, ['_WHEA_PCIEXPRESS_DEVICE_ID']],
'DeviceSerialNumber' : [ 0x28, ['unsigned long long']],
'BridgeControlStatus' : [ 0x30, ['_WHEA_PCIEXPRESS_BRIDGE_CONTROL_STATUS']],
'ExpressCapability' : [ 0x34, ['array', 60, ['unsigned char']]],
'AerInfo' : [ 0x70, ['array', 96, ['unsigned char']]],
} ],
'_HEAP_VIRTUAL_ALLOC_ENTRY' : [ 0x40, {
'Entry' : [ 0x0, ['_LIST_ENTRY']],
'ExtraStuff' : [ 0x10, ['_HEAP_ENTRY_EXTRA']],
'CommitSize' : [ 0x20, ['unsigned long long']],
'ReserveSize' : [ 0x28, ['unsigned long long']],
'BusyBlock' : [ 0x30, ['_HEAP_ENTRY']],
} ],
'_RTL_CRITICAL_SECTION' : [ 0x28, {
'DebugInfo' : [ 0x0, ['pointer64', ['_RTL_CRITICAL_SECTION_DEBUG']]],
'LockCount' : [ 0x8, ['long']],
'RecursionCount' : [ 0xc, ['long']],
'OwningThread' : [ 0x10, ['pointer64', ['void']]],
'LockSemaphore' : [ 0x18, ['pointer64', ['void']]],
'SpinCount' : [ 0x20, ['unsigned long long']],
} ],
'__unnamed_1da8' : [ 0x4, {
'AsULONG' : [ 0x0, ['unsigned long']],
'UsingHypervisor' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'NoDomainAccounting' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'IncreasePolicy' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 4, native_type='unsigned long')]],
'DecreasePolicy' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 6, native_type='unsigned long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 32, native_type='unsigned long')]],
} ],
'_PPM_PERF_STATES' : [ 0x98, {
'Count' : [ 0x0, ['unsigned long']],
'MaxFrequency' : [ 0x4, ['unsigned long']],
'PStateCap' : [ 0x8, ['unsigned long']],
'TStateCap' : [ 0xc, ['unsigned long']],
'MaxPerfState' : [ 0x10, ['unsigned long']],
'MinPerfState' : [ 0x14, ['unsigned long']],
'LowestPState' : [ 0x18, ['unsigned long']],
'IncreaseTime' : [ 0x1c, ['unsigned long']],
'DecreaseTime' : [ 0x20, ['unsigned long']],
'BusyAdjThreshold' : [ 0x24, ['unsigned char']],
'Reserved' : [ 0x25, ['unsigned char']],
'ThrottleStatesOnly' : [ 0x26, ['unsigned char']],
'PolicyType' : [ 0x27, ['unsigned char']],
'TimerInterval' : [ 0x28, ['unsigned long']],
'Flags' : [ 0x2c, ['__unnamed_1da8']],
'TargetProcessors' : [ 0x30, ['unsigned long long']],
'PStateHandler' : [ 0x38, ['pointer64', ['void']]],
'PStateContext' : [ 0x40, ['unsigned long long']],
'TStateHandler' : [ 0x48, ['pointer64', ['void']]],
'TStateContext' : [ 0x50, ['unsigned long long']],
'FeedbackHandler' : [ 0x58, ['pointer64', ['void']]],
'DiaStats' : [ 0x60, ['pointer64', ['_PPM_DIA_STATS']]],
'DiaStatsCount' : [ 0x68, ['unsigned long']],
'State' : [ 0x70, ['array', 1, ['_PPM_PERF_STATE']]],
} ],
'_WAIT_CONTEXT_BLOCK' : [ 0x48, {
'WaitQueueEntry' : [ 0x0, ['_KDEVICE_QUEUE_ENTRY']],
'DeviceRoutine' : [ 0x18, ['pointer64', ['void']]],
'DeviceContext' : [ 0x20, ['pointer64', ['void']]],
'NumberOfMapRegisters' : [ 0x28, ['unsigned long']],
'DeviceObject' : [ 0x30, ['pointer64', ['void']]],
'CurrentIrp' : [ 0x38, ['pointer64', ['void']]],
'BufferChainingDpc' : [ 0x40, ['pointer64', ['_KDPC']]],
} ],
'_SECTION_OBJECT' : [ 0x30, {
'StartingVa' : [ 0x0, ['pointer64', ['void']]],
'EndingVa' : [ 0x8, ['pointer64', ['void']]],
'Parent' : [ 0x10, ['pointer64', ['void']]],
'LeftChild' : [ 0x18, ['pointer64', ['void']]],
'RightChild' : [ 0x20, ['pointer64', ['void']]],
'Segment' : [ 0x28, ['pointer64', ['_SEGMENT_OBJECT']]],
} ],
'_CM_NAME_CONTROL_BLOCK' : [ 0x20, {
'Compressed' : [ 0x0, ['unsigned char']],
'RefCount' : [ 0x2, ['unsigned short']],
'NameHash' : [ 0x8, ['_CM_NAME_HASH']],
'ConvKey' : [ 0x8, ['unsigned long']],
'NextHash' : [ 0x10, ['pointer64', ['_CM_KEY_HASH']]],
'NameLength' : [ 0x18, ['unsigned short']],
'Name' : [ 0x1a, ['array', 1, ['wchar']]],
} ],
'_PPM_PERF_STATE' : [ 0x28, {
'Frequency' : [ 0x0, ['unsigned long']],
'Power' : [ 0x4, ['unsigned long']],
'PercentFrequency' : [ 0x8, ['unsigned char']],
'IncreaseLevel' : [ 0x9, ['unsigned char']],
'DecreaseLevel' : [ 0xa, ['unsigned char']],
'Type' : [ 0xb, ['unsigned char']],
'Control' : [ 0x10, ['unsigned long long']],
'Status' : [ 0x18, ['unsigned long long']],
'TotalHitCount' : [ 0x20, ['unsigned long']],
'DesiredCount' : [ 0x24, ['unsigned long']],
} ],
'_u' : [ 0x50, {
'KeyNode' : [ 0x0, ['_CM_KEY_NODE']],
'KeyValue' : [ 0x0, ['_CM_KEY_VALUE']],
'KeySecurity' : [ 0x0, ['_CM_KEY_SECURITY']],
'KeyIndex' : [ 0x0, ['_CM_KEY_INDEX']],
'ValueData' : [ 0x0, ['_CM_BIG_DATA']],
'KeyList' : [ 0x0, ['array', 1, ['unsigned long']]],
'KeyString' : [ 0x0, ['array', 1, ['wchar']]],
} ],
'_REQUEST_MAILBOX' : [ 0x40, {
'RequestSummary' : [ 0x0, ['long long']],
'RequestPacket' : [ 0x8, ['_KREQUEST_PACKET']],
'Virtual' : [ 0x8, ['array', 7, ['pointer64', ['void']]]],
} ],
'_GENERAL_LOOKASIDE_POOL' : [ 0x60, {
'ListHead' : [ 0x0, ['_SLIST_HEADER']],
'SingleListHead' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
'Depth' : [ 0x10, ['unsigned short']],
'MaximumDepth' : [ 0x12, ['unsigned short']],
'TotalAllocates' : [ 0x14, ['unsigned long']],
'AllocateMisses' : [ 0x18, ['unsigned long']],
'AllocateHits' : [ 0x18, ['unsigned long']],
'TotalFrees' : [ 0x1c, ['unsigned long']],
'FreeMisses' : [ 0x20, ['unsigned long']],
'FreeHits' : [ 0x20, ['unsigned long']],
'Type' : [ 0x24, ['Enumeration', dict(target = 'long', choices = {0: 'NonPagedPool', 1: 'PagedPool', 2: 'NonPagedPoolMustSucceed', 3: 'DontUseThisType', 4: 'NonPagedPoolCacheAligned', 5: 'PagedPoolCacheAligned', 6: 'NonPagedPoolCacheAlignedMustS', 7: 'MaxPoolType', 34: 'NonPagedPoolMustSucceedSession', 35: 'DontUseThisTypeSession', 32: 'NonPagedPoolSession', 36: 'NonPagedPoolCacheAlignedSession', 33: 'PagedPoolSession', 38: 'NonPagedPoolCacheAlignedMustSSession', 37: 'PagedPoolCacheAlignedSession'})]],
'Tag' : [ 0x28, ['unsigned long']],
'Size' : [ 0x2c, ['unsigned long']],
'AllocateEx' : [ 0x30, ['pointer64', ['void']]],
'Allocate' : [ 0x30, ['pointer64', ['void']]],
'FreeEx' : [ 0x38, ['pointer64', ['void']]],
'Free' : [ 0x38, ['pointer64', ['void']]],
'ListEntry' : [ 0x40, ['_LIST_ENTRY']],
'LastTotalAllocates' : [ 0x50, ['unsigned long']],
'LastAllocateMisses' : [ 0x54, ['unsigned long']],
'LastAllocateHits' : [ 0x54, ['unsigned long']],
'Future' : [ 0x58, ['array', 2, ['unsigned long']]],
} ],
'_WHEA_NMI_ERROR_FLAGS' : [ 0x4, {
'HypervisorError' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 32, native_type='unsigned long')]],
'AsULONG' : [ 0x0, ['unsigned long']],
} ],
'_M128A' : [ 0x10, {
'Low' : [ 0x0, ['unsigned long long']],
'High' : [ 0x8, ['long long']],
} ],
'_HEAP_LOOKASIDE' : [ 0x40, {
'ListHead' : [ 0x0, ['_SLIST_HEADER']],
'Depth' : [ 0x10, ['unsigned short']],
'MaximumDepth' : [ 0x12, ['unsigned short']],
'TotalAllocates' : [ 0x14, ['unsigned long']],
'AllocateMisses' : [ 0x18, ['unsigned long']],
'TotalFrees' : [ 0x1c, ['unsigned long']],
'FreeMisses' : [ 0x20, ['unsigned long']],
'LastTotalAllocates' : [ 0x24, ['unsigned long']],
'LastAllocateMisses' : [ 0x28, ['unsigned long']],
'Counters' : [ 0x2c, ['array', 2, ['unsigned long']]],
} ],
'_WMI_TRACE_PACKET' : [ 0x4, {
'Size' : [ 0x0, ['unsigned short']],
'HookId' : [ 0x2, ['unsigned short']],
'Type' : [ 0x2, ['unsigned char']],
'Group' : [ 0x3, ['unsigned char']],
} ],
'_RTL_ATOM_TABLE' : [ 0x70, {
'Signature' : [ 0x0, ['unsigned long']],
'CriticalSection' : [ 0x8, ['_RTL_CRITICAL_SECTION']],
'RtlHandleTable' : [ 0x30, ['_RTL_HANDLE_TABLE']],
'NumberOfBuckets' : [ 0x60, ['unsigned long']],
'Buckets' : [ 0x68, ['array', 1, ['pointer64', ['_RTL_ATOM_TABLE_ENTRY']]]],
} ],
'_POP_POWER_ACTION' : [ 0xb0, {
'Updates' : [ 0x0, ['unsigned char']],
'State' : [ 0x1, ['unsigned char']],
'Shutdown' : [ 0x2, ['unsigned char']],
'Action' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'PowerActionNone', 1: 'PowerActionReserved', 2: 'PowerActionSleep', 3: 'PowerActionHibernate', 4: 'PowerActionShutdown', 5: 'PowerActionShutdownReset', 6: 'PowerActionShutdownOff', 7: 'PowerActionWarmEject'})]],
'LightestState' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'Flags' : [ 0xc, ['unsigned long']],
'Status' : [ 0x10, ['long']],
'DeviceType' : [ 0x14, ['Enumeration', dict(target = 'long', choices = {0: 'PolicyDeviceSystemButton', 1: 'PolicyDeviceThermalZone', 2: 'PolicyDeviceBattery', 3: 'PolicyDeviceMemory', 4: 'PolicyInitiatePowerActionAPI', 5: 'PolicySetPowerStateAPI', 6: 'PolicyImmediateDozeS4', 7: 'PolicySystemIdle', 8: 'PolicyDeviceMax'})]],
'DeviceTypeFlags' : [ 0x18, ['unsigned long']],
'IrpMinor' : [ 0x1c, ['unsigned char']],
'Waking' : [ 0x1d, ['unsigned char']],
'SystemState' : [ 0x20, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'NextSystemState' : [ 0x24, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'EffectiveSystemState' : [ 0x28, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'CurrentSystemState' : [ 0x2c, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'ShutdownBugCode' : [ 0x30, ['pointer64', ['_POP_SHUTDOWN_BUG_CHECK']]],
'DevState' : [ 0x38, ['pointer64', ['_POP_DEVICE_SYS_STATE']]],
'DisplayResumeContext' : [ 0x40, ['pointer64', ['_POP_DISPLAY_RESUME_CONTEXT']]],
'HiberContext' : [ 0x48, ['pointer64', ['_POP_HIBER_CONTEXT']]],
'WakeTime' : [ 0x50, ['unsigned long long']],
'SleepTime' : [ 0x58, ['unsigned long long']],
'FilteredCapabilities' : [ 0x60, ['SYSTEM_POWER_CAPABILITIES']],
} ],
'_CM_KEY_VALUE' : [ 0x18, {
'Signature' : [ 0x0, ['unsigned short']],
'NameLength' : [ 0x2, ['unsigned short']],
'DataLength' : [ 0x4, ['unsigned long']],
'Data' : [ 0x8, ['unsigned long']],
'Type' : [ 0xc, ['unsigned long']],
'Flags' : [ 0x10, ['unsigned short']],
'Spare' : [ 0x12, ['unsigned short']],
'Name' : [ 0x14, ['array', 1, ['wchar']]],
} ],
'_CM_KEY_HASH' : [ 0x20, {
'ConvKey' : [ 0x0, ['unsigned long']],
'NextHash' : [ 0x8, ['pointer64', ['_CM_KEY_HASH']]],
'KeyHive' : [ 0x10, ['pointer64', ['_HHIVE']]],
'KeyCell' : [ 0x18, ['unsigned long']],
} ],
'_PO_DEVICE_NOTIFY' : [ 0x40, {
'Link' : [ 0x0, ['_LIST_ENTRY']],
'TargetDevice' : [ 0x10, ['pointer64', ['_DEVICE_OBJECT']]],
'OrderLevel' : [ 0x18, ['unsigned char']],
'DeviceObject' : [ 0x20, ['pointer64', ['_DEVICE_OBJECT']]],
'DeviceName' : [ 0x28, ['pointer64', ['unsigned short']]],
'DriverName' : [ 0x30, ['pointer64', ['unsigned short']]],
'ChildCount' : [ 0x38, ['unsigned long']],
'ActiveChild' : [ 0x3c, ['unsigned long']],
} ],
'_CM_KEY_SECURITY_CACHE_ENTRY' : [ 0x10, {
'Cell' : [ 0x0, ['unsigned long']],
'CachedSecurity' : [ 0x8, ['pointer64', ['_CM_KEY_SECURITY_CACHE']]],
} ],
'_FS_FILTER_CALLBACK_DATA' : [ 0x40, {
'SizeOfFsFilterCallbackData' : [ 0x0, ['unsigned long']],
'Operation' : [ 0x4, ['unsigned char']],
'Reserved' : [ 0x5, ['unsigned char']],
'DeviceObject' : [ 0x8, ['pointer64', ['_DEVICE_OBJECT']]],
'FileObject' : [ 0x10, ['pointer64', ['_FILE_OBJECT']]],
'Parameters' : [ 0x18, ['_FS_FILTER_PARAMETERS']],
} ],
'_IMAGE_SECURITY_CONTEXT' : [ 0x8, {
'PageHashes' : [ 0x0, ['pointer64', ['void']]],
'Value' : [ 0x0, ['unsigned long long']],
'SecurityBeingCreated' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'SecurityMandatory' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long long')]],
'Unused' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long long')]],
'PageHashPointer' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 64, native_type='unsigned long long')]],
} ],
'__unnamed_1e04' : [ 0x4, {
'Level' : [ 0x0, ['unsigned long']],
} ],
'__unnamed_1e06' : [ 0x4, {
'Type' : [ 0x0, ['unsigned long']],
} ],
'_POP_ACTION_TRIGGER' : [ 0x18, {
'Type' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'PolicyDeviceSystemButton', 1: 'PolicyDeviceThermalZone', 2: 'PolicyDeviceBattery', 3: 'PolicyDeviceMemory', 4: 'PolicyInitiatePowerActionAPI', 5: 'PolicySetPowerStateAPI', 6: 'PolicyImmediateDozeS4', 7: 'PolicySystemIdle', 8: 'PolicyDeviceMax'})]],
'Flags' : [ 0x4, ['unsigned long']],
'Wait' : [ 0x8, ['pointer64', ['_POP_TRIGGER_WAIT']]],
'Battery' : [ 0x10, ['__unnamed_1e04']],
'Button' : [ 0x10, ['__unnamed_1e06']],
} ],
'_KENLISTMENT_HISTORY' : [ 0x8, {
'Notification' : [ 0x0, ['unsigned long']],
'NewState' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'KEnlistmentUninitialized', 256: 'KEnlistmentActive', 258: 'KEnlistmentPrepared', 259: 'KEnlistmentInDoubt', 260: 'KEnlistmentCommitted', 261: 'KEnlistmentCommittedNotify', 262: 'KEnlistmentCommitRequested', 257: 'KEnlistmentPreparing', 264: 'KEnlistmentDelegated', 265: 'KEnlistmentDelegatedDisconnected', 266: 'KEnlistmentPrePreparing', 263: 'KEnlistmentAborted', 268: 'KEnlistmentRecovering', 269: 'KEnlistmentAborting', 270: 'KEnlistmentReadOnly', 271: 'KEnlistmentOutcomeUnavailable', 272: 'KEnlistmentOffline', 273: 'KEnlistmentPrePrepared', 274: 'KEnlistmentInitialized', 267: 'KEnlistmentForgotten'})]],
} ],
'_FAST_IO_DISPATCH' : [ 0xe0, {
'SizeOfFastIoDispatch' : [ 0x0, ['unsigned long']],
'FastIoCheckIfPossible' : [ 0x8, ['pointer64', ['void']]],
'FastIoRead' : [ 0x10, ['pointer64', ['void']]],
'FastIoWrite' : [ 0x18, ['pointer64', ['void']]],
'FastIoQueryBasicInfo' : [ 0x20, ['pointer64', ['void']]],
'FastIoQueryStandardInfo' : [ 0x28, ['pointer64', ['void']]],
'FastIoLock' : [ 0x30, ['pointer64', ['void']]],
'FastIoUnlockSingle' : [ 0x38, ['pointer64', ['void']]],
'FastIoUnlockAll' : [ 0x40, ['pointer64', ['void']]],
'FastIoUnlockAllByKey' : [ 0x48, ['pointer64', ['void']]],
'FastIoDeviceControl' : [ 0x50, ['pointer64', ['void']]],
'AcquireFileForNtCreateSection' : [ 0x58, ['pointer64', ['void']]],
'ReleaseFileForNtCreateSection' : [ 0x60, ['pointer64', ['void']]],
'FastIoDetachDevice' : [ 0x68, ['pointer64', ['void']]],
'FastIoQueryNetworkOpenInfo' : [ 0x70, ['pointer64', ['void']]],
'AcquireForModWrite' : [ 0x78, ['pointer64', ['void']]],
'MdlRead' : [ 0x80, ['pointer64', ['void']]],
'MdlReadComplete' : [ 0x88, ['pointer64', ['void']]],
'PrepareMdlWrite' : [ 0x90, ['pointer64', ['void']]],
'MdlWriteComplete' : [ 0x98, ['pointer64', ['void']]],
'FastIoReadCompressed' : [ 0xa0, ['pointer64', ['void']]],
'FastIoWriteCompressed' : [ 0xa8, ['pointer64', ['void']]],
'MdlReadCompleteCompressed' : [ 0xb0, ['pointer64', ['void']]],
'MdlWriteCompleteCompressed' : [ 0xb8, ['pointer64', ['void']]],
'FastIoQueryOpen' : [ 0xc0, ['pointer64', ['void']]],
'ReleaseForModWrite' : [ 0xc8, ['pointer64', ['void']]],
'AcquireForCcFlush' : [ 0xd0, ['pointer64', ['void']]],
'ReleaseForCcFlush' : [ 0xd8, ['pointer64', ['void']]],
} ],
'_DBGKD_BREAKPOINTEX' : [ 0x8, {
'BreakPointCount' : [ 0x0, ['unsigned long']],
'ContinueStatus' : [ 0x4, ['long']],
} ],
'_CM_CELL_REMAP_BLOCK' : [ 0x8, {
'OldCell' : [ 0x0, ['unsigned long']],
'NewCell' : [ 0x4, ['unsigned long']],
} ],
'_OBJECT_DIRECTORY_ENTRY' : [ 0x18, {
'ChainLink' : [ 0x0, ['pointer64', ['_OBJECT_DIRECTORY_ENTRY']]],
'Object' : [ 0x8, ['pointer64', ['void']]],
'HashValue' : [ 0x10, ['unsigned long']],
} ],
'_LOADER_PARAMETER_EXTENSION' : [ 0xc8, {
'Size' : [ 0x0, ['unsigned long']],
'Profile' : [ 0x4, ['_PROFILE_PARAMETER_BLOCK']],
'MajorVersion' : [ 0x14, ['unsigned long']],
'MinorVersion' : [ 0x18, ['unsigned long']],
'EmInfFileImage' : [ 0x20, ['pointer64', ['void']]],
'EmInfFileSize' : [ 0x28, ['unsigned long']],
'TriageDumpBlock' : [ 0x30, ['pointer64', ['void']]],
'LoaderPagesSpanned' : [ 0x38, ['unsigned long long']],
'HeadlessLoaderBlock' : [ 0x40, ['pointer64', ['_HEADLESS_LOADER_BLOCK']]],
'SMBiosEPSHeader' : [ 0x48, ['pointer64', ['_SMBIOS_TABLE_HEADER']]],
'DrvDBImage' : [ 0x50, ['pointer64', ['void']]],
'DrvDBSize' : [ 0x58, ['unsigned long']],
'NetworkLoaderBlock' : [ 0x60, ['pointer64', ['_NETWORK_LOADER_BLOCK']]],
'FirmwareDescriptorListHead' : [ 0x68, ['_LIST_ENTRY']],
'AcpiTable' : [ 0x78, ['pointer64', ['void']]],
'AcpiTableSize' : [ 0x80, ['unsigned long']],
'BootViaWinload' : [ 0x84, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Reserved' : [ 0x84, ['BitField', dict(start_bit = 1, end_bit = 32, native_type='unsigned long')]],
'LoaderPerformanceData' : [ 0x88, ['pointer64', ['_LOADER_PERFORMANCE_DATA']]],
'BootApplicationPersistentData' : [ 0x90, ['_LIST_ENTRY']],
'WmdTestResult' : [ 0xa0, ['pointer64', ['void']]],
'BootIdentifier' : [ 0xa8, ['_GUID']],
'ResumePages' : [ 0xb8, ['unsigned long']],
'DumpHeader' : [ 0xc0, ['pointer64', ['void']]],
} ],
'_PI_RESOURCE_ARBITER_ENTRY' : [ 0x70, {
'DeviceArbiterList' : [ 0x0, ['_LIST_ENTRY']],
'ResourceType' : [ 0x10, ['unsigned char']],
'ArbiterInterface' : [ 0x18, ['pointer64', ['_ARBITER_INTERFACE']]],
'DeviceNode' : [ 0x20, ['pointer64', ['_DEVICE_NODE']]],
'ResourceList' : [ 0x28, ['_LIST_ENTRY']],
'BestResourceList' : [ 0x38, ['_LIST_ENTRY']],
'BestConfig' : [ 0x48, ['_LIST_ENTRY']],
'ActiveArbiterList' : [ 0x58, ['_LIST_ENTRY']],
'State' : [ 0x68, ['unsigned char']],
'ResourcesChanged' : [ 0x69, ['unsigned char']],
} ],
'_DBGKD_CONTINUE' : [ 0x4, {
'ContinueStatus' : [ 0x0, ['long']],
} ],
'_WHEA_PCIEXPRESS_VERSION' : [ 0x4, {
'MinorVersion' : [ 0x0, ['unsigned char']],
'MajorVersion' : [ 0x1, ['unsigned char']],
'Reserved' : [ 0x2, ['unsigned short']],
'AsULONG' : [ 0x0, ['unsigned long']],
} ],
'_SECURITY_DESCRIPTOR' : [ 0x28, {
'Revision' : [ 0x0, ['unsigned char']],
'Sbz1' : [ 0x1, ['unsigned char']],
'Control' : [ 0x2, ['unsigned short']],
'Owner' : [ 0x8, ['pointer64', ['void']]],
'Group' : [ 0x10, ['pointer64', ['void']]],
'Sacl' : [ 0x18, ['pointer64', ['_ACL']]],
'Dacl' : [ 0x20, ['pointer64', ['_ACL']]],
} ],
'_RTL_USER_PROCESS_PARAMETERS' : [ 0x3f8, {
'MaximumLength' : [ 0x0, ['unsigned long']],
'Length' : [ 0x4, ['unsigned long']],
'Flags' : [ 0x8, ['unsigned long']],
'DebugFlags' : [ 0xc, ['unsigned long']],
'ConsoleHandle' : [ 0x10, ['pointer64', ['void']]],
'ConsoleFlags' : [ 0x18, ['unsigned long']],
'StandardInput' : [ 0x20, ['pointer64', ['void']]],
'StandardOutput' : [ 0x28, ['pointer64', ['void']]],
'StandardError' : [ 0x30, ['pointer64', ['void']]],
'CurrentDirectory' : [ 0x38, ['_CURDIR']],
'DllPath' : [ 0x50, ['_UNICODE_STRING']],
'ImagePathName' : [ 0x60, ['_UNICODE_STRING']],
'CommandLine' : [ 0x70, ['_UNICODE_STRING']],
'Environment' : [ 0x80, ['pointer64', ['void']]],
'StartingX' : [ 0x88, ['unsigned long']],
'StartingY' : [ 0x8c, ['unsigned long']],
'CountX' : [ 0x90, ['unsigned long']],
'CountY' : [ 0x94, ['unsigned long']],
'CountCharsX' : [ 0x98, ['unsigned long']],
'CountCharsY' : [ 0x9c, ['unsigned long']],
'FillAttribute' : [ 0xa0, ['unsigned long']],
'WindowFlags' : [ 0xa4, ['unsigned long']],
'ShowWindowFlags' : [ 0xa8, ['unsigned long']],
'WindowTitle' : [ 0xb0, ['_UNICODE_STRING']],
'DesktopInfo' : [ 0xc0, ['_UNICODE_STRING']],
'ShellInfo' : [ 0xd0, ['_UNICODE_STRING']],
'RuntimeData' : [ 0xe0, ['_UNICODE_STRING']],
'CurrentDirectores' : [ 0xf0, ['array', 32, ['_RTL_DRIVE_LETTER_CURDIR']]],
'EnvironmentSize' : [ 0x3f0, ['unsigned long long']],
} ],
'_PHYSICAL_MEMORY_RUN' : [ 0x10, {
'BasePage' : [ 0x0, ['unsigned long long']],
'PageCount' : [ 0x8, ['unsigned long long']],
} ],
'_MI_VERIFIER_DRIVER_ENTRY' : [ 0xa0, {
'Links' : [ 0x0, ['_LIST_ENTRY']],
'Loads' : [ 0x10, ['unsigned long']],
'Unloads' : [ 0x14, ['unsigned long']],
'BaseName' : [ 0x18, ['_UNICODE_STRING']],
'StartAddress' : [ 0x28, ['pointer64', ['void']]],
'EndAddress' : [ 0x30, ['pointer64', ['void']]],
'Flags' : [ 0x38, ['unsigned long']],
'Signature' : [ 0x40, ['unsigned long long']],
'PoolPageHeaders' : [ 0x50, ['_SLIST_HEADER']],
'PoolTrackers' : [ 0x60, ['_SLIST_HEADER']],
'CurrentPagedPoolAllocations' : [ 0x70, ['unsigned long']],
'CurrentNonPagedPoolAllocations' : [ 0x74, ['unsigned long']],
'PeakPagedPoolAllocations' : [ 0x78, ['unsigned long']],
'PeakNonPagedPoolAllocations' : [ 0x7c, ['unsigned long']],
'PagedBytes' : [ 0x80, ['unsigned long long']],
'NonPagedBytes' : [ 0x88, ['unsigned long long']],
'PeakPagedBytes' : [ 0x90, ['unsigned long long']],
'PeakNonPagedBytes' : [ 0x98, ['unsigned long long']],
} ],
'_RTL_SRWLOCK' : [ 0x8, {
'Locked' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'Waiting' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long long')]],
'Waking' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long long')]],
'MultipleShared' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long long')]],
'Shared' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 64, native_type='unsigned long long')]],
'Value' : [ 0x0, ['unsigned long long']],
'Ptr' : [ 0x0, ['pointer64', ['void']]],
} ],
'_ALPC_MESSAGE_ZONE' : [ 0x30, {
'Mdl' : [ 0x0, ['pointer64', ['_MDL']]],
'UserVa' : [ 0x8, ['pointer64', ['void']]],
'UserLimit' : [ 0x10, ['pointer64', ['void']]],
'SystemVa' : [ 0x18, ['pointer64', ['void']]],
'SystemLimit' : [ 0x20, ['pointer64', ['void']]],
'Size' : [ 0x28, ['unsigned long long']],
} ],
'_KTMOBJECT_NAMESPACE_LINK' : [ 0x28, {
'Links' : [ 0x0, ['_RTL_BALANCED_LINKS']],
'Expired' : [ 0x20, ['unsigned char']],
} ],
'_CACHE_MANAGER_CALLBACKS' : [ 0x20, {
'AcquireForLazyWrite' : [ 0x0, ['pointer64', ['void']]],
'ReleaseFromLazyWrite' : [ 0x8, ['pointer64', ['void']]],
'AcquireForReadAhead' : [ 0x10, ['pointer64', ['void']]],
'ReleaseFromReadAhead' : [ 0x18, ['pointer64', ['void']]],
} ],
'_FILE_BASIC_INFORMATION' : [ 0x28, {
'CreationTime' : [ 0x0, ['_LARGE_INTEGER']],
'LastAccessTime' : [ 0x8, ['_LARGE_INTEGER']],
'LastWriteTime' : [ 0x10, ['_LARGE_INTEGER']],
'ChangeTime' : [ 0x18, ['_LARGE_INTEGER']],
'FileAttributes' : [ 0x20, ['unsigned long']],
} ],
'_RTL_RANGE' : [ 0x28, {
'Start' : [ 0x0, ['unsigned long long']],
'End' : [ 0x8, ['unsigned long long']],
'UserData' : [ 0x10, ['pointer64', ['void']]],
'Owner' : [ 0x18, ['pointer64', ['void']]],
'Attributes' : [ 0x20, ['unsigned char']],
'Flags' : [ 0x21, ['unsigned char']],
} ],
'_KSPECIAL_REGISTERS' : [ 0xd8, {
'Cr0' : [ 0x0, ['unsigned long long']],
'Cr2' : [ 0x8, ['unsigned long long']],
'Cr3' : [ 0x10, ['unsigned long long']],
'Cr4' : [ 0x18, ['unsigned long long']],
'KernelDr0' : [ 0x20, ['unsigned long long']],
'KernelDr1' : [ 0x28, ['unsigned long long']],
'KernelDr2' : [ 0x30, ['unsigned long long']],
'KernelDr3' : [ 0x38, ['unsigned long long']],
'KernelDr6' : [ 0x40, ['unsigned long long']],
'KernelDr7' : [ 0x48, ['unsigned long long']],
'Gdtr' : [ 0x50, ['_KDESCRIPTOR']],
'Idtr' : [ 0x60, ['_KDESCRIPTOR']],
'Tr' : [ 0x70, ['unsigned short']],
'Ldtr' : [ 0x72, ['unsigned short']],
'MxCsr' : [ 0x74, ['unsigned long']],
'DebugControl' : [ 0x78, ['unsigned long long']],
'LastBranchToRip' : [ 0x80, ['unsigned long long']],
'LastBranchFromRip' : [ 0x88, ['unsigned long long']],
'LastExceptionToRip' : [ 0x90, ['unsigned long long']],
'LastExceptionFromRip' : [ 0x98, ['unsigned long long']],
'Cr8' : [ 0xa0, ['unsigned long long']],
'MsrGsBase' : [ 0xa8, ['unsigned long long']],
'MsrGsSwap' : [ 0xb0, ['unsigned long long']],
'MsrStar' : [ 0xb8, ['unsigned long long']],
'MsrLStar' : [ 0xc0, ['unsigned long long']],
'MsrCStar' : [ 0xc8, ['unsigned long long']],
'MsrSyscallMask' : [ 0xd0, ['unsigned long long']],
} ],
'_SYSTEM_POWER_POLICY' : [ 0xe8, {
'Revision' : [ 0x0, ['unsigned long']],
'PowerButton' : [ 0x4, ['POWER_ACTION_POLICY']],
'SleepButton' : [ 0x10, ['POWER_ACTION_POLICY']],
'LidClose' : [ 0x1c, ['POWER_ACTION_POLICY']],
'LidOpenWake' : [ 0x28, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'Reserved' : [ 0x2c, ['unsigned long']],
'Idle' : [ 0x30, ['POWER_ACTION_POLICY']],
'IdleTimeout' : [ 0x3c, ['unsigned long']],
'IdleSensitivity' : [ 0x40, ['unsigned char']],
'DynamicThrottle' : [ 0x41, ['unsigned char']],
'Spare2' : [ 0x42, ['array', 2, ['unsigned char']]],
'MinSleep' : [ 0x44, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'MaxSleep' : [ 0x48, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'ReducedLatencySleep' : [ 0x4c, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'WinLogonFlags' : [ 0x50, ['unsigned long']],
'Spare3' : [ 0x54, ['unsigned long']],
'DozeS4Timeout' : [ 0x58, ['unsigned long']],
'BroadcastCapacityResolution' : [ 0x5c, ['unsigned long']],
'DischargePolicy' : [ 0x60, ['array', 4, ['SYSTEM_POWER_LEVEL']]],
'VideoTimeout' : [ 0xc0, ['unsigned long']],
'VideoDimDisplay' : [ 0xc4, ['unsigned char']],
'VideoReserved' : [ 0xc8, ['array', 3, ['unsigned long']]],
'SpindownTimeout' : [ 0xd4, ['unsigned long']],
'OptimizeForPower' : [ 0xd8, ['unsigned char']],
'FanThrottleTolerance' : [ 0xd9, ['unsigned char']],
'ForcedThrottle' : [ 0xda, ['unsigned char']],
'MinThrottle' : [ 0xdb, ['unsigned char']],
'OverThrottled' : [ 0xdc, ['POWER_ACTION_POLICY']],
} ],
'_POOL_HEADER' : [ 0x10, {
'PreviousSize' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned long')]],
'PoolIndex' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 16, native_type='unsigned long')]],
'BlockSize' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 24, native_type='unsigned long')]],
'PoolType' : [ 0x0, ['BitField', dict(start_bit = 24, end_bit = 32, native_type='unsigned long')]],
'Ulong1' : [ 0x0, ['unsigned long']],
'PoolTag' : [ 0x4, ['unsigned long']],
'ProcessBilled' : [ 0x8, ['pointer64', ['_EPROCESS']]],
'AllocatorBackTraceIndex' : [ 0x8, ['unsigned short']],
'PoolTagHash' : [ 0xa, ['unsigned short']],
} ],
'_ETW_PROVIDER_TABLE_ENTRY' : [ 0x18, {
'RefCount' : [ 0x0, ['long']],
'State' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'EtwProviderStateFree', 1: 'EtwProviderStateTransition', 2: 'EtwProviderStateActive', 3: 'EtwProviderStateMax'})]],
'RegEntry' : [ 0x8, ['pointer64', ['_ETW_REG_ENTRY']]],
'Caller' : [ 0x10, ['pointer64', ['void']]],
} ],
'_PEB64' : [ 0x368, {
'InheritedAddressSpace' : [ 0x0, ['unsigned char']],
'ReadImageFileExecOptions' : [ 0x1, ['unsigned char']],
'BeingDebugged' : [ 0x2, ['unsigned char']],
'BitField' : [ 0x3, ['unsigned char']],
'ImageUsesLargePages' : [ 0x3, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'IsProtectedProcess' : [ 0x3, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'IsLegacyProcess' : [ 0x3, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'IsImageDynamicallyRelocated' : [ 0x3, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'SkipPatchingUser32Forwarders' : [ 0x3, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'SpareBits' : [ 0x3, ['BitField', dict(start_bit = 5, end_bit = 8, native_type='unsigned char')]],
'Mutant' : [ 0x8, ['unsigned long long']],
'ImageBaseAddress' : [ 0x10, ['unsigned long long']],
'Ldr' : [ 0x18, ['unsigned long long']],
'ProcessParameters' : [ 0x20, ['unsigned long long']],
'SubSystemData' : [ 0x28, ['unsigned long long']],
'ProcessHeap' : [ 0x30, ['unsigned long long']],
'FastPebLock' : [ 0x38, ['unsigned long long']],
'AtlThunkSListPtr' : [ 0x40, ['unsigned long long']],
'IFEOKey' : [ 0x48, ['unsigned long long']],
'CrossProcessFlags' : [ 0x50, ['unsigned long']],
'ProcessInJob' : [ 0x50, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'ProcessInitializing' : [ 0x50, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'ProcessUsingVEH' : [ 0x50, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'ProcessUsingVCH' : [ 0x50, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'ReservedBits0' : [ 0x50, ['BitField', dict(start_bit = 4, end_bit = 32, native_type='unsigned long')]],
'KernelCallbackTable' : [ 0x58, ['unsigned long long']],
'UserSharedInfoPtr' : [ 0x58, ['unsigned long long']],
'SystemReserved' : [ 0x60, ['array', 1, ['unsigned long']]],
'SpareUlong' : [ 0x64, ['unsigned long']],
'SparePebPtr0' : [ 0x68, ['unsigned long long']],
'TlsExpansionCounter' : [ 0x70, ['unsigned long']],
'TlsBitmap' : [ 0x78, ['unsigned long long']],
'TlsBitmapBits' : [ 0x80, ['array', 2, ['unsigned long']]],
'ReadOnlySharedMemoryBase' : [ 0x88, ['unsigned long long']],
'HotpatchInformation' : [ 0x90, ['unsigned long long']],
'ReadOnlyStaticServerData' : [ 0x98, ['unsigned long long']],
'AnsiCodePageData' : [ 0xa0, ['unsigned long long']],
'OemCodePageData' : [ 0xa8, ['unsigned long long']],
'UnicodeCaseTableData' : [ 0xb0, ['unsigned long long']],
'NumberOfProcessors' : [ 0xb8, ['unsigned long']],
'NtGlobalFlag' : [ 0xbc, ['unsigned long']],
'CriticalSectionTimeout' : [ 0xc0, ['_LARGE_INTEGER']],
'HeapSegmentReserve' : [ 0xc8, ['unsigned long long']],
'HeapSegmentCommit' : [ 0xd0, ['unsigned long long']],
'HeapDeCommitTotalFreeThreshold' : [ 0xd8, ['unsigned long long']],
'HeapDeCommitFreeBlockThreshold' : [ 0xe0, ['unsigned long long']],
'NumberOfHeaps' : [ 0xe8, ['unsigned long']],
'MaximumNumberOfHeaps' : [ 0xec, ['unsigned long']],
'ProcessHeaps' : [ 0xf0, ['unsigned long long']],
'GdiSharedHandleTable' : [ 0xf8, ['unsigned long long']],
'ProcessStarterHelper' : [ 0x100, ['unsigned long long']],
'GdiDCAttributeList' : [ 0x108, ['unsigned long']],
'LoaderLock' : [ 0x110, ['unsigned long long']],
'OSMajorVersion' : [ 0x118, ['unsigned long']],
'OSMinorVersion' : [ 0x11c, ['unsigned long']],
'OSBuildNumber' : [ 0x120, ['unsigned short']],
'OSCSDVersion' : [ 0x122, ['unsigned short']],
'OSPlatformId' : [ 0x124, ['unsigned long']],
'ImageSubsystem' : [ 0x128, ['unsigned long']],
'ImageSubsystemMajorVersion' : [ 0x12c, ['unsigned long']],
'ImageSubsystemMinorVersion' : [ 0x130, ['unsigned long']],
'ActiveProcessAffinityMask' : [ 0x138, ['unsigned long long']],
'GdiHandleBuffer' : [ 0x140, ['array', 60, ['unsigned long']]],
'PostProcessInitRoutine' : [ 0x230, ['unsigned long long']],
'TlsExpansionBitmap' : [ 0x238, ['unsigned long long']],
'TlsExpansionBitmapBits' : [ 0x240, ['array', 32, ['unsigned long']]],
'SessionId' : [ 0x2c0, ['unsigned long']],
'AppCompatFlags' : [ 0x2c8, ['_ULARGE_INTEGER']],
'AppCompatFlagsUser' : [ 0x2d0, ['_ULARGE_INTEGER']],
'pShimData' : [ 0x2d8, ['unsigned long long']],
'AppCompatInfo' : [ 0x2e0, ['unsigned long long']],
'CSDVersion' : [ 0x2e8, ['_STRING64']],
'ActivationContextData' : [ 0x2f8, ['unsigned long long']],
'ProcessAssemblyStorageMap' : [ 0x300, ['unsigned long long']],
'SystemDefaultActivationContextData' : [ 0x308, ['unsigned long long']],
'SystemAssemblyStorageMap' : [ 0x310, ['unsigned long long']],
'MinimumStackCommit' : [ 0x318, ['unsigned long long']],
'FlsCallback' : [ 0x320, ['unsigned long long']],
'FlsListHead' : [ 0x328, ['LIST_ENTRY64']],
'FlsBitmap' : [ 0x338, ['unsigned long long']],
'FlsBitmapBits' : [ 0x340, ['array', 4, ['unsigned long']]],
'FlsHighIndex' : [ 0x350, ['unsigned long']],
'WerRegistrationData' : [ 0x358, ['unsigned long long']],
'WerShipAssertPtr' : [ 0x360, ['unsigned long long']],
} ],
'_SE_AUDIT_PROCESS_CREATION_INFO' : [ 0x8, {
'ImageFileName' : [ 0x0, ['pointer64', ['_OBJECT_NAME_INFORMATION']]],
} ],
'_HEAP_ENTRY_EXTRA' : [ 0x10, {
'AllocatorBackTraceIndex' : [ 0x0, ['unsigned short']],
'TagIndex' : [ 0x2, ['unsigned short']],
'Settable' : [ 0x8, ['unsigned long long']],
'ZeroInit' : [ 0x0, ['unsigned long long']],
'ZeroInit1' : [ 0x8, ['unsigned long long']],
} ],
'_VF_POOL_TRACE' : [ 0x80, {
'Address' : [ 0x0, ['pointer64', ['void']]],
'Size' : [ 0x8, ['unsigned long long']],
'Thread' : [ 0x10, ['pointer64', ['_ETHREAD']]],
'StackTrace' : [ 0x18, ['array', 13, ['pointer64', ['void']]]],
} ],
'__unnamed_1eac' : [ 0x4, {
'LongFlags' : [ 0x0, ['unsigned long']],
'Flags' : [ 0x0, ['_MM_SESSION_SPACE_FLAGS']],
} ],
'_MM_SESSION_SPACE' : [ 0x1e00, {
'ReferenceCount' : [ 0x0, ['long']],
'u' : [ 0x4, ['__unnamed_1eac']],
'SessionId' : [ 0x8, ['unsigned long']],
'ProcessReferenceToSession' : [ 0xc, ['long']],
'ProcessList' : [ 0x10, ['_LIST_ENTRY']],
'LastProcessSwappedOutTime' : [ 0x20, ['_LARGE_INTEGER']],
'SessionPageDirectoryIndex' : [ 0x28, ['unsigned long long']],
'NonPagablePages' : [ 0x30, ['unsigned long long']],
'CommittedPages' : [ 0x38, ['unsigned long long']],
'PagedPoolStart' : [ 0x40, ['pointer64', ['void']]],
'PagedPoolEnd' : [ 0x48, ['pointer64', ['void']]],
'SessionObject' : [ 0x50, ['pointer64', ['void']]],
'SessionObjectHandle' : [ 0x58, ['pointer64', ['void']]],
'ResidentProcessCount' : [ 0x60, ['long']],
'ImageLoadingCount' : [ 0x64, ['long']],
'SessionPoolAllocationFailures' : [ 0x68, ['array', 4, ['unsigned long']]],
'ImageList' : [ 0x78, ['_LIST_ENTRY']],
'LocaleId' : [ 0x88, ['unsigned long']],
'AttachCount' : [ 0x8c, ['unsigned long']],
'AttachGate' : [ 0x90, ['_KGATE']],
'WsListEntry' : [ 0xa8, ['_LIST_ENTRY']],
'Lookaside' : [ 0xc0, ['array', 21, ['_GENERAL_LOOKASIDE']]],
'Session' : [ 0xb40, ['_MMSESSION']],
'PagedPoolInfo' : [ 0xb98, ['_MM_PAGED_POOL_INFO']],
'Vm' : [ 0xc00, ['_MMSUPPORT']],
'Wsle' : [ 0xc68, ['pointer64', ['_MMWSLE']]],
'DriverUnload' : [ 0xc70, ['pointer64', ['void']]],
'PagedPool' : [ 0xc78, ['_POOL_DESCRIPTOR']],
'PageDirectory' : [ 0x1cc0, ['_MMPTE']],
'SessionVaLock' : [ 0x1cc8, ['_KGUARDED_MUTEX']],
'DynamicVaBitMap' : [ 0x1d00, ['_RTL_BITMAP']],
'DynamicVaHint' : [ 0x1d10, ['unsigned long']],
'SpecialPool' : [ 0x1d18, ['_MI_SPECIAL_POOL']],
'SessionPteLock' : [ 0x1d48, ['_KGUARDED_MUTEX']],
'PoolBigEntriesInUse' : [ 0x1d80, ['long']],
'PagedPoolPdeCount' : [ 0x1d84, ['unsigned long']],
'SpecialPoolPdeCount' : [ 0x1d88, ['unsigned long']],
'DynamicSessionPdeCount' : [ 0x1d8c, ['unsigned long']],
'SystemPteInfo' : [ 0x1d90, ['_MI_SYSTEM_PTE_TYPE']],
'PoolTrackTableExpansion' : [ 0x1dd8, ['pointer64', ['void']]],
'PoolTrackTableExpansionSize' : [ 0x1de0, ['unsigned long long']],
'PoolTrackBigPages' : [ 0x1de8, ['pointer64', ['void']]],
'PoolTrackBigPagesSize' : [ 0x1df0, ['unsigned long long']],
} ],
'_WORK_QUEUE_ITEM' : [ 0x20, {
'List' : [ 0x0, ['_LIST_ENTRY']],
'WorkerRoutine' : [ 0x10, ['pointer64', ['void']]],
'Parameter' : [ 0x18, ['pointer64', ['void']]],
} ],
'_OBJECT_HANDLE_COUNT_ENTRY' : [ 0x10, {
'Process' : [ 0x0, ['pointer64', ['_EPROCESS']]],
'HandleCount' : [ 0x8, ['BitField', dict(start_bit = 0, end_bit = 24, native_type='unsigned long')]],
'LockCount' : [ 0x8, ['BitField', dict(start_bit = 24, end_bit = 32, native_type='unsigned long')]],
} ],
'_CLIENT_ID' : [ 0x10, {
'UniqueProcess' : [ 0x0, ['pointer64', ['void']]],
'UniqueThread' : [ 0x8, ['pointer64', ['void']]],
} ],
'_VI_DEADLOCK_RESOURCE' : [ 0xf8, {
'Type' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'VfDeadlockUnknown', 1: 'VfDeadlockMutex', 2: 'VfDeadlockMutexAbandoned', 3: 'VfDeadlockFastMutex', 4: 'VfDeadlockFastMutexUnsafe', 5: 'VfDeadlockSpinLock', 6: 'VfDeadlockTypeMaximum'})]],
'NodeCount' : [ 0x4, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned long')]],
'RecursionCount' : [ 0x4, ['BitField', dict(start_bit = 16, end_bit = 32, native_type='unsigned long')]],
'ResourceAddress' : [ 0x8, ['pointer64', ['void']]],
'ThreadOwner' : [ 0x10, ['pointer64', ['_VI_DEADLOCK_THREAD']]],
'ResourceList' : [ 0x18, ['_LIST_ENTRY']],
'HashChainList' : [ 0x28, ['_LIST_ENTRY']],
'FreeListEntry' : [ 0x28, ['_LIST_ENTRY']],
'StackTrace' : [ 0x38, ['array', 8, ['pointer64', ['void']]]],
'LastAcquireTrace' : [ 0x78, ['array', 8, ['pointer64', ['void']]]],
'LastReleaseTrace' : [ 0xb8, ['array', 8, ['pointer64', ['void']]]],
} ],
'_DBGKD_GET_SET_BUS_DATA' : [ 0x14, {
'BusDataType' : [ 0x0, ['unsigned long']],
'BusNumber' : [ 0x4, ['unsigned long']],
'SlotNumber' : [ 0x8, ['unsigned long']],
'Offset' : [ 0xc, ['unsigned long']],
'Length' : [ 0x10, ['unsigned long']],
} ],
'_PRIVILEGE_SET' : [ 0x14, {
'PrivilegeCount' : [ 0x0, ['unsigned long']],
'Control' : [ 0x4, ['unsigned long']],
'Privilege' : [ 0x8, ['array', 1, ['_LUID_AND_ATTRIBUTES']]],
} ],
'_MMSECTION_FLAGS' : [ 0x4, {
'BeingDeleted' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'BeingCreated' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'BeingPurged' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'NoModifiedWriting' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'FailAllIo' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'Image' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'Based' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'File' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'Networked' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'Rom' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'PhysicalMemory' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'CopyOnWrite' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'Reserve' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long')]],
'Commit' : [ 0x0, ['BitField', dict(start_bit = 13, end_bit = 14, native_type='unsigned long')]],
'Accessed' : [ 0x0, ['BitField', dict(start_bit = 14, end_bit = 15, native_type='unsigned long')]],
'WasPurged' : [ 0x0, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
'UserReference' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
'GlobalMemory' : [ 0x0, ['BitField', dict(start_bit = 17, end_bit = 18, native_type='unsigned long')]],
'DeleteOnClose' : [ 0x0, ['BitField', dict(start_bit = 18, end_bit = 19, native_type='unsigned long')]],
'FilePointerNull' : [ 0x0, ['BitField', dict(start_bit = 19, end_bit = 20, native_type='unsigned long')]],
'GlobalOnlyPerSession' : [ 0x0, ['BitField', dict(start_bit = 20, end_bit = 21, native_type='unsigned long')]],
'SetMappedFileIoComplete' : [ 0x0, ['BitField', dict(start_bit = 21, end_bit = 22, native_type='unsigned long')]],
'CollidedFlush' : [ 0x0, ['BitField', dict(start_bit = 22, end_bit = 23, native_type='unsigned long')]],
'NoChange' : [ 0x0, ['BitField', dict(start_bit = 23, end_bit = 24, native_type='unsigned long')]],
'Spare' : [ 0x0, ['BitField', dict(start_bit = 24, end_bit = 25, native_type='unsigned long')]],
'UserWritable' : [ 0x0, ['BitField', dict(start_bit = 25, end_bit = 26, native_type='unsigned long')]],
'PreferredNode' : [ 0x0, ['BitField', dict(start_bit = 26, end_bit = 32, native_type='unsigned long')]],
} ],
'_SECURITY_CLIENT_CONTEXT' : [ 0x48, {
'SecurityQos' : [ 0x0, ['_SECURITY_QUALITY_OF_SERVICE']],
'ClientToken' : [ 0x10, ['pointer64', ['void']]],
'DirectlyAccessClientToken' : [ 0x18, ['unsigned char']],
'DirectAccessEffectiveOnly' : [ 0x19, ['unsigned char']],
'ServerIsRemote' : [ 0x1a, ['unsigned char']],
'ClientTokenControl' : [ 0x1c, ['_TOKEN_CONTROL']],
} ],
'_MM_PAGED_POOL_INFO' : [ 0x68, {
'Mutex' : [ 0x0, ['_KGUARDED_MUTEX']],
'PagedPoolAllocationMap' : [ 0x38, ['_RTL_BITMAP']],
'FirstPteForPagedPool' : [ 0x48, ['pointer64', ['_MMPTE']]],
'PagedPoolHint' : [ 0x50, ['unsigned long']],
'PagedPoolCommit' : [ 0x58, ['unsigned long long']],
'AllocatedPagedPool' : [ 0x60, ['unsigned long long']],
} ],
'_BITMAP_RANGE' : [ 0x30, {
'Links' : [ 0x0, ['_LIST_ENTRY']],
'BasePage' : [ 0x10, ['long long']],
'FirstDirtyPage' : [ 0x18, ['unsigned long']],
'LastDirtyPage' : [ 0x1c, ['unsigned long']],
'DirtyPages' : [ 0x20, ['unsigned long']],
'Bitmap' : [ 0x28, ['pointer64', ['unsigned long']]],
} ],
'_NT_TIB64' : [ 0x38, {
'ExceptionList' : [ 0x0, ['unsigned long long']],
'StackBase' : [ 0x8, ['unsigned long long']],
'StackLimit' : [ 0x10, ['unsigned long long']],
'SubSystemTib' : [ 0x18, ['unsigned long long']],
'FiberData' : [ 0x20, ['unsigned long long']],
'Version' : [ 0x20, ['unsigned long']],
'ArbitraryUserPointer' : [ 0x28, ['unsigned long long']],
'Self' : [ 0x30, ['unsigned long long']],
} ],
'_IO_SECURITY_CONTEXT' : [ 0x18, {
'SecurityQos' : [ 0x0, ['pointer64', ['_SECURITY_QUALITY_OF_SERVICE']]],
'AccessState' : [ 0x8, ['pointer64', ['_ACCESS_STATE']]],
'DesiredAccess' : [ 0x10, ['unsigned long']],
'FullCreateOptions' : [ 0x14, ['unsigned long']],
} ],
'_WHEA_GENERIC_PROCESSOR_ERROR' : [ 0xc0, {
'ValidBits' : [ 0x0, ['_WHEA_GENERIC_PROCESSOR_ERROR_VALIDBITS']],
'ProcessorType' : [ 0x8, ['unsigned char']],
'InstructionSet' : [ 0x9, ['unsigned char']],
'ErrorType' : [ 0xa, ['unsigned char']],
'Operation' : [ 0xb, ['unsigned char']],
'Flags' : [ 0xc, ['unsigned char']],
'Level' : [ 0xd, ['unsigned char']],
'Reserved' : [ 0xe, ['unsigned short']],
'CPUVersion' : [ 0x10, ['unsigned long long']],
'CPUBrandString' : [ 0x18, ['array', 128, ['unsigned char']]],
'ProcessorId' : [ 0x98, ['unsigned long long']],
'TargetAddress' : [ 0xa0, ['unsigned long long']],
'RequesterId' : [ 0xa8, ['unsigned long long']],
'ResponderId' : [ 0xb0, ['unsigned long long']],
'InstructionPointer' : [ 0xb8, ['unsigned long long']],
} ],
'_HANDLE_TRACE_DB_ENTRY' : [ 0xa0, {
'ClientId' : [ 0x0, ['_CLIENT_ID']],
'Handle' : [ 0x10, ['pointer64', ['void']]],
'Type' : [ 0x18, ['unsigned long']],
'StackTrace' : [ 0x20, ['array', 16, ['pointer64', ['void']]]],
} ],
'_POP_TRIGGER_WAIT' : [ 0x38, {
'Event' : [ 0x0, ['_KEVENT']],
'Status' : [ 0x18, ['long']],
'Link' : [ 0x20, ['_LIST_ENTRY']],
'Trigger' : [ 0x30, ['pointer64', ['_POP_ACTION_TRIGGER']]],
} ],
'_IO_TIMER' : [ 0x30, {
'Type' : [ 0x0, ['short']],
'TimerFlag' : [ 0x2, ['short']],
'TimerList' : [ 0x8, ['_LIST_ENTRY']],
'TimerRoutine' : [ 0x18, ['pointer64', ['void']]],
'Context' : [ 0x20, ['pointer64', ['void']]],
'DeviceObject' : [ 0x28, ['pointer64', ['_DEVICE_OBJECT']]],
} ],
'_ARBITER_TEST_ALLOCATION_PARAMETERS' : [ 0x18, {
'ArbitrationList' : [ 0x0, ['pointer64', ['_LIST_ENTRY']]],
'AllocateFromCount' : [ 0x8, ['unsigned long']],
'AllocateFrom' : [ 0x10, ['pointer64', ['_CM_PARTIAL_RESOURCE_DESCRIPTOR']]],
} ],
'_MI_SPECIAL_POOL' : [ 0x30, {
'PteBase' : [ 0x0, ['pointer64', ['_MMPTE']]],
'FreePteHead' : [ 0x8, ['_MMPTE']],
'FreePteTail' : [ 0x10, ['_MMPTE']],
'PagesInUse' : [ 0x18, ['long long']],
'SpecialPoolPdes' : [ 0x20, ['_RTL_BITMAP']],
} ],
'_ARBITER_QUERY_CONFLICT_PARAMETERS' : [ 0x20, {
'PhysicalDeviceObject' : [ 0x0, ['pointer64', ['_DEVICE_OBJECT']]],
'ConflictingResource' : [ 0x8, ['pointer64', ['_IO_RESOURCE_DESCRIPTOR']]],
'ConflictCount' : [ 0x10, ['pointer64', ['unsigned long']]],
'Conflicts' : [ 0x18, ['pointer64', ['pointer64', ['_ARBITER_CONFLICT_INFO']]]],
} ],
'_PHYSICAL_MEMORY_DESCRIPTOR' : [ 0x20, {
'NumberOfRuns' : [ 0x0, ['unsigned long']],
'NumberOfPages' : [ 0x8, ['unsigned long long']],
'Run' : [ 0x10, ['array', 1, ['_PHYSICAL_MEMORY_RUN']]],
} ],
'_PNP_DEVICE_EVENT_LIST' : [ 0x88, {
'Status' : [ 0x0, ['long']],
'EventQueueMutex' : [ 0x8, ['_KMUTANT']],
'Lock' : [ 0x40, ['_KGUARDED_MUTEX']],
'List' : [ 0x78, ['_LIST_ENTRY']],
} ],
'_MAILSLOT_CREATE_PARAMETERS' : [ 0x18, {
'MailslotQuota' : [ 0x0, ['unsigned long']],
'MaximumMessageSize' : [ 0x4, ['unsigned long']],
'ReadTimeout' : [ 0x8, ['_LARGE_INTEGER']],
'TimeoutSpecified' : [ 0x10, ['unsigned char']],
} ],
'_PO_IRP_MANAGER' : [ 0x20, {
'DeviceIrpQueue' : [ 0x0, ['_PO_IRP_QUEUE']],
'SystemIrpQueue' : [ 0x10, ['_PO_IRP_QUEUE']],
} ],
'_CLIENT_ID64' : [ 0x10, {
'UniqueProcess' : [ 0x0, ['unsigned long long']],
'UniqueThread' : [ 0x8, ['unsigned long long']],
} ],
'_WHEA_PCIEXPRESS_BRIDGE_CONTROL_STATUS' : [ 0x4, {
'BridgeSecondaryStatus' : [ 0x0, ['unsigned short']],
'BridgeControl' : [ 0x2, ['unsigned short']],
'AsULONG' : [ 0x0, ['unsigned long']],
} ],
'_KDPC_DATA' : [ 0x20, {
'DpcListHead' : [ 0x0, ['_LIST_ENTRY']],
'DpcLock' : [ 0x10, ['unsigned long long']],
'DpcQueueDepth' : [ 0x18, ['long']],
'DpcCount' : [ 0x1c, ['unsigned long']],
} ],
'_NAMED_PIPE_CREATE_PARAMETERS' : [ 0x28, {
'NamedPipeType' : [ 0x0, ['unsigned long']],
'ReadMode' : [ 0x4, ['unsigned long']],
'CompletionMode' : [ 0x8, ['unsigned long']],
'MaximumInstances' : [ 0xc, ['unsigned long']],
'InboundQuota' : [ 0x10, ['unsigned long']],
'OutboundQuota' : [ 0x14, ['unsigned long']],
'DefaultTimeout' : [ 0x18, ['_LARGE_INTEGER']],
'TimeoutSpecified' : [ 0x20, ['unsigned char']],
} ],
'_CM_BIG_DATA' : [ 0x8, {
'Signature' : [ 0x0, ['unsigned short']],
'Count' : [ 0x2, ['unsigned short']],
'List' : [ 0x4, ['unsigned long']],
} ],
'_CM_WORKITEM' : [ 0x20, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'WorkerRoutine' : [ 0x10, ['pointer64', ['void']]],
'Parameter' : [ 0x18, ['pointer64', ['void']]],
} ],
'__unnamed_1f26' : [ 0x10, {
'UserData' : [ 0x0, ['pointer64', ['void']]],
'Owner' : [ 0x8, ['pointer64', ['void']]],
} ],
'__unnamed_1f28' : [ 0x10, {
'ListHead' : [ 0x0, ['_LIST_ENTRY']],
} ],
'_RTLP_RANGE_LIST_ENTRY' : [ 0x38, {
'Start' : [ 0x0, ['unsigned long long']],
'End' : [ 0x8, ['unsigned long long']],
'Allocated' : [ 0x10, ['__unnamed_1f26']],
'Merged' : [ 0x10, ['__unnamed_1f28']],
'Attributes' : [ 0x20, ['unsigned char']],
'PublicFlags' : [ 0x21, ['unsigned char']],
'PrivateFlags' : [ 0x22, ['unsigned short']],
'ListEntry' : [ 0x28, ['_LIST_ENTRY']],
} ],
'_AMD64_DBGKD_CONTROL_SET' : [ 0x1c, {
'TraceFlag' : [ 0x0, ['unsigned long']],
'Dr7' : [ 0x4, ['unsigned long long']],
'CurrentSymbolStart' : [ 0xc, ['unsigned long long']],
'CurrentSymbolEnd' : [ 0x14, ['unsigned long long']],
} ],
'_ALPC_COMPLETION_PACKET_LOOKASIDE_ENTRY' : [ 0x18, {
'ListEntry' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
'Packet' : [ 0x8, ['pointer64', ['void']]],
'Lookaside' : [ 0x10, ['pointer64', ['_ALPC_COMPLETION_PACKET_LOOKASIDE']]],
} ],
'__unnamed_1f31' : [ 0x2, {
'AsUSHORT' : [ 0x0, ['unsigned short']],
'AllowScaling' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned short')]],
'Disabled' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned short')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 16, native_type='unsigned short')]],
} ],
'PROCESSOR_IDLESTATE_POLICY' : [ 0x20, {
'Revision' : [ 0x0, ['unsigned short']],
'Flags' : [ 0x2, ['__unnamed_1f31']],
'PolicyCount' : [ 0x4, ['unsigned long']],
'Policy' : [ 0x8, ['array', 3, ['PROCESSOR_IDLESTATE_INFO']]],
} ],
'_ACTIVATION_CONTEXT_STACK' : [ 0x28, {
'ActiveFrame' : [ 0x0, ['pointer64', ['_RTL_ACTIVATION_CONTEXT_STACK_FRAME']]],
'FrameListCache' : [ 0x8, ['_LIST_ENTRY']],
'Flags' : [ 0x18, ['unsigned long']],
'NextCookieSequenceNumber' : [ 0x1c, ['unsigned long']],
'StackId' : [ 0x20, ['unsigned long']],
} ],
'_MSUBSECTION' : [ 0x68, {
'ControlArea' : [ 0x0, ['pointer64', ['_CONTROL_AREA']]],
'SubsectionBase' : [ 0x8, ['pointer64', ['_MMPTE']]],
'NextSubsection' : [ 0x10, ['pointer64', ['_SUBSECTION']]],
'NextMappedSubsection' : [ 0x10, ['pointer64', ['_MSUBSECTION']]],
'PtesInSubsection' : [ 0x18, ['unsigned long']],
'UnusedPtes' : [ 0x20, ['unsigned long']],
'GlobalPerSessionHead' : [ 0x20, ['pointer64', ['_MM_AVL_TABLE']]],
'u' : [ 0x28, ['__unnamed_14cf']],
'StartingSector' : [ 0x2c, ['unsigned long']],
'NumberOfFullSectors' : [ 0x30, ['unsigned long']],
'u1' : [ 0x38, ['__unnamed_1d2e']],
'LeftChild' : [ 0x40, ['pointer64', ['_MMSUBSECTION_NODE']]],
'RightChild' : [ 0x48, ['pointer64', ['_MMSUBSECTION_NODE']]],
'DereferenceList' : [ 0x50, ['_LIST_ENTRY']],
'NumberOfMappedViews' : [ 0x60, ['unsigned long long']],
} ],
'_RTL_DRIVE_LETTER_CURDIR' : [ 0x18, {
'Flags' : [ 0x0, ['unsigned short']],
'Length' : [ 0x2, ['unsigned short']],
'TimeStamp' : [ 0x4, ['unsigned long']],
'DosPath' : [ 0x8, ['_STRING']],
} ],
'_VIRTUAL_EFI_RUNTIME_SERVICES' : [ 0x70, {
'GetTime' : [ 0x0, ['unsigned long long']],
'SetTime' : [ 0x8, ['unsigned long long']],
'GetWakeupTime' : [ 0x10, ['unsigned long long']],
'SetWakeupTime' : [ 0x18, ['unsigned long long']],
'SetVirtualAddressMap' : [ 0x20, ['unsigned long long']],
'ConvertPointer' : [ 0x28, ['unsigned long long']],
'GetVariable' : [ 0x30, ['unsigned long long']],
'GetNextVariableName' : [ 0x38, ['unsigned long long']],
'SetVariable' : [ 0x40, ['unsigned long long']],
'GetNextHighMonotonicCount' : [ 0x48, ['unsigned long long']],
'ResetSystem' : [ 0x50, ['unsigned long long']],
'UpdateCapsule' : [ 0x58, ['unsigned long long']],
'QueryCapsuleCapabilities' : [ 0x60, ['unsigned long long']],
'QueryVariableInfo' : [ 0x68, ['unsigned long long']],
} ],
'SYSTEM_POWER_CAPABILITIES' : [ 0x4c, {
'PowerButtonPresent' : [ 0x0, ['unsigned char']],
'SleepButtonPresent' : [ 0x1, ['unsigned char']],
'LidPresent' : [ 0x2, ['unsigned char']],
'SystemS1' : [ 0x3, ['unsigned char']],
'SystemS2' : [ 0x4, ['unsigned char']],
'SystemS3' : [ 0x5, ['unsigned char']],
'SystemS4' : [ 0x6, ['unsigned char']],
'SystemS5' : [ 0x7, ['unsigned char']],
'HiberFilePresent' : [ 0x8, ['unsigned char']],
'FullWake' : [ 0x9, ['unsigned char']],
'VideoDimPresent' : [ 0xa, ['unsigned char']],
'ApmPresent' : [ 0xb, ['unsigned char']],
'UpsPresent' : [ 0xc, ['unsigned char']],
'ThermalControl' : [ 0xd, ['unsigned char']],
'ProcessorThrottle' : [ 0xe, ['unsigned char']],
'ProcessorMinThrottle' : [ 0xf, ['unsigned char']],
'ProcessorMaxThrottle' : [ 0x10, ['unsigned char']],
'FastSystemS4' : [ 0x11, ['unsigned char']],
'spare2' : [ 0x12, ['array', 3, ['unsigned char']]],
'DiskSpinDown' : [ 0x15, ['unsigned char']],
'spare3' : [ 0x16, ['array', 8, ['unsigned char']]],
'SystemBatteriesPresent' : [ 0x1e, ['unsigned char']],
'BatteriesAreShortTerm' : [ 0x1f, ['unsigned char']],
'BatteryScale' : [ 0x20, ['array', 3, ['BATTERY_REPORTING_SCALE']]],
'AcOnLineWake' : [ 0x38, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'SoftLidWake' : [ 0x3c, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'RtcWake' : [ 0x40, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'MinDeviceWakeState' : [ 0x44, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'DefaultLowLatencyWake' : [ 0x48, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
} ],
'_WHEA_MEMORY_ERROR_VALIDBITS' : [ 0x8, {
'ErrorStatus' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'PhysicalAddress' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long long')]],
'PhysicalAddressMask' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long long')]],
'Node' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long long')]],
'Card' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long long')]],
'Module' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long long')]],
'Bank' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long long')]],
'Device' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long long')]],
'Row' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long long')]],
'Column' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long long')]],
'BitPosition' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long long')]],
'RequesterId' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long long')]],
'ResponderId' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long long')]],
'TargetId' : [ 0x0, ['BitField', dict(start_bit = 13, end_bit = 14, native_type='unsigned long long')]],
'ErrorType' : [ 0x0, ['BitField', dict(start_bit = 14, end_bit = 15, native_type='unsigned long long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 15, end_bit = 64, native_type='unsigned long long')]],
'ValidBits' : [ 0x0, ['unsigned long long']],
} ],
'_WHEA_PCIEXPRESS_DEVICE_ID' : [ 0x10, {
'VendorID' : [ 0x0, ['unsigned short']],
'DeviceID' : [ 0x2, ['unsigned short']],
'ClassCode' : [ 0x4, ['BitField', dict(start_bit = 0, end_bit = 24, native_type='unsigned long')]],
'FunctionNumber' : [ 0x4, ['BitField', dict(start_bit = 24, end_bit = 32, native_type='unsigned long')]],
'DeviceNumber' : [ 0x8, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned long')]],
'Segment' : [ 0x8, ['BitField', dict(start_bit = 8, end_bit = 24, native_type='unsigned long')]],
'PrimaryBusNumber' : [ 0x8, ['BitField', dict(start_bit = 24, end_bit = 32, native_type='unsigned long')]],
'SecondaryBusNumber' : [ 0xc, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned long')]],
'Reserved1' : [ 0xc, ['BitField', dict(start_bit = 8, end_bit = 10, native_type='unsigned long')]],
'SlotNumber' : [ 0xc, ['BitField', dict(start_bit = 10, end_bit = 24, native_type='unsigned long')]],
'Reserved2' : [ 0xc, ['BitField', dict(start_bit = 24, end_bit = 32, native_type='unsigned long')]],
} ],
'_WNODE_HEADER' : [ 0x30, {
'BufferSize' : [ 0x0, ['unsigned long']],
'ProviderId' : [ 0x4, ['unsigned long']],
'HistoricalContext' : [ 0x8, ['unsigned long long']],
'Version' : [ 0x8, ['unsigned long']],
'Linkage' : [ 0xc, ['unsigned long']],
'CountLost' : [ 0x10, ['unsigned long']],
'KernelHandle' : [ 0x10, ['pointer64', ['void']]],
'TimeStamp' : [ 0x10, ['_LARGE_INTEGER']],
'Guid' : [ 0x18, ['_GUID']],
'ClientContext' : [ 0x28, ['unsigned long']],
'Flags' : [ 0x2c, ['unsigned long']],
} ],
'__unnamed_1f51' : [ 0x8, {
'ImageCommitment' : [ 0x0, ['unsigned long long']],
'CreatingProcess' : [ 0x0, ['pointer64', ['_EPROCESS']]],
} ],
'__unnamed_1f55' : [ 0x8, {
'ImageInformation' : [ 0x0, ['pointer64', ['_MI_SECTION_IMAGE_INFORMATION']]],
'FirstMappedVa' : [ 0x0, ['pointer64', ['void']]],
} ],
'_SEGMENT' : [ 0x50, {
'ControlArea' : [ 0x0, ['pointer64', ['_CONTROL_AREA']]],
'TotalNumberOfPtes' : [ 0x8, ['unsigned long']],
'SegmentFlags' : [ 0xc, ['_SEGMENT_FLAGS']],
'NumberOfCommittedPages' : [ 0x10, ['unsigned long long']],
'SizeOfSegment' : [ 0x18, ['unsigned long long']],
'ExtendInfo' : [ 0x20, ['pointer64', ['_MMEXTEND_INFO']]],
'BasedAddress' : [ 0x20, ['pointer64', ['void']]],
'SegmentLock' : [ 0x28, ['_EX_PUSH_LOCK']],
'u1' : [ 0x30, ['__unnamed_1f51']],
'u2' : [ 0x38, ['__unnamed_1f55']],
'PrototypePte' : [ 0x40, ['pointer64', ['_MMPTE']]],
'ThePtes' : [ 0x48, ['array', 1, ['_MMPTE']]],
} ],
'_WHEA_PCIXDEVICE_ID' : [ 0x10, {
'VendorId' : [ 0x0, ['unsigned short']],
'DeviceId' : [ 0x2, ['unsigned short']],
'ClassCode' : [ 0x4, ['BitField', dict(start_bit = 0, end_bit = 24, native_type='unsigned long')]],
'FunctionNumber' : [ 0x4, ['BitField', dict(start_bit = 24, end_bit = 32, native_type='unsigned long')]],
'DeviceNumber' : [ 0x8, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned long')]],
'BusNumber' : [ 0x8, ['BitField', dict(start_bit = 8, end_bit = 16, native_type='unsigned long')]],
'SegmentNumber' : [ 0x8, ['BitField', dict(start_bit = 16, end_bit = 24, native_type='unsigned long')]],
'Reserved1' : [ 0x8, ['BitField', dict(start_bit = 24, end_bit = 32, native_type='unsigned long')]],
'Reserved2' : [ 0xc, ['unsigned long']],
} ],
'_PCAT_FIRMWARE_INFORMATION' : [ 0x4, {
'PlaceHolder' : [ 0x0, ['unsigned long']],
} ],
'_PRIVATE_CACHE_MAP' : [ 0x68, {
'NodeTypeCode' : [ 0x0, ['short']],
'Flags' : [ 0x0, ['_PRIVATE_CACHE_MAP_FLAGS']],
'UlongFlags' : [ 0x0, ['unsigned long']],
'ReadAheadMask' : [ 0x4, ['unsigned long']],
'FileObject' : [ 0x8, ['pointer64', ['_FILE_OBJECT']]],
'FileOffset1' : [ 0x10, ['_LARGE_INTEGER']],
'BeyondLastByte1' : [ 0x18, ['_LARGE_INTEGER']],
'FileOffset2' : [ 0x20, ['_LARGE_INTEGER']],
'BeyondLastByte2' : [ 0x28, ['_LARGE_INTEGER']],
'ReadAheadOffset' : [ 0x30, ['array', 2, ['_LARGE_INTEGER']]],
'ReadAheadLength' : [ 0x40, ['array', 2, ['unsigned long']]],
'ReadAheadSpinLock' : [ 0x48, ['unsigned long long']],
'PrivateLinks' : [ 0x50, ['_LIST_ENTRY']],
'ReadAheadWorkItem' : [ 0x60, ['pointer64', ['void']]],
} ],
'_CM_KEY_NODE' : [ 0x50, {
'Signature' : [ 0x0, ['unsigned short']],
'Flags' : [ 0x2, ['unsigned short']],
'LastWriteTime' : [ 0x4, ['_LARGE_INTEGER']],
'Spare' : [ 0xc, ['unsigned long']],
'Parent' : [ 0x10, ['unsigned long']],
'SubKeyCounts' : [ 0x14, ['array', 2, ['unsigned long']]],
'SubKeyLists' : [ 0x1c, ['array', 2, ['unsigned long']]],
'ValueList' : [ 0x24, ['_CHILD_LIST']],
'ChildHiveReference' : [ 0x1c, ['_CM_KEY_REFERENCE']],
'Security' : [ 0x2c, ['unsigned long']],
'Class' : [ 0x30, ['unsigned long']],
'MaxNameLen' : [ 0x34, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned long')]],
'UserFlags' : [ 0x34, ['BitField', dict(start_bit = 16, end_bit = 20, native_type='unsigned long')]],
'VirtControlFlags' : [ 0x34, ['BitField', dict(start_bit = 20, end_bit = 24, native_type='unsigned long')]],
'Debug' : [ 0x34, ['BitField', dict(start_bit = 24, end_bit = 32, native_type='unsigned long')]],
'MaxClassLen' : [ 0x38, ['unsigned long']],
'MaxValueNameLen' : [ 0x3c, ['unsigned long']],
'MaxValueDataLen' : [ 0x40, ['unsigned long']],
'WorkVar' : [ 0x44, ['unsigned long']],
'NameLength' : [ 0x48, ['unsigned short']],
'ClassLength' : [ 0x4a, ['unsigned short']],
'Name' : [ 0x4c, ['array', 1, ['wchar']]],
} ],
'_RTL_HANDLE_TABLE' : [ 0x30, {
'MaximumNumberOfHandles' : [ 0x0, ['unsigned long']],
'SizeOfHandleTableEntry' : [ 0x4, ['unsigned long']],
'Reserved' : [ 0x8, ['array', 2, ['unsigned long']]],
'FreeHandles' : [ 0x10, ['pointer64', ['_RTL_HANDLE_TABLE_ENTRY']]],
'CommittedHandles' : [ 0x18, ['pointer64', ['_RTL_HANDLE_TABLE_ENTRY']]],
'UnCommittedHandles' : [ 0x20, ['pointer64', ['_RTL_HANDLE_TABLE_ENTRY']]],
'MaxReservedHandles' : [ 0x28, ['pointer64', ['_RTL_HANDLE_TABLE_ENTRY']]],
} ],
'_PTE_TRACKER' : [ 0x58, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'Mdl' : [ 0x10, ['pointer64', ['_MDL']]],
'Count' : [ 0x18, ['unsigned long long']],
'SystemVa' : [ 0x20, ['pointer64', ['void']]],
'StartVa' : [ 0x28, ['pointer64', ['void']]],
'Offset' : [ 0x30, ['unsigned long']],
'Length' : [ 0x34, ['unsigned long']],
'Page' : [ 0x38, ['unsigned long long']],
'IoMapping' : [ 0x40, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Matched' : [ 0x40, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'CacheAttribute' : [ 0x40, ['BitField', dict(start_bit = 2, end_bit = 4, native_type='unsigned long')]],
'Spare' : [ 0x40, ['BitField', dict(start_bit = 4, end_bit = 32, native_type='unsigned long')]],
'CallingAddress' : [ 0x48, ['pointer64', ['void']]],
'CallersCaller' : [ 0x50, ['pointer64', ['void']]],
} ],
'_MMPFNLIST' : [ 0x20, {
'Total' : [ 0x0, ['unsigned long long']],
'ListName' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'ZeroedPageList', 1: 'FreePageList', 2: 'StandbyPageList', 3: 'ModifiedPageList', 4: 'ModifiedNoWritePageList', 5: 'BadPageList', 6: 'ActiveAndValid', 7: 'TransitionPage'})]],
'Flink' : [ 0x10, ['unsigned long long']],
'Blink' : [ 0x18, ['unsigned long long']],
} ],
'_DEVOBJ_EXTENSION' : [ 0x70, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['unsigned short']],
'DeviceObject' : [ 0x8, ['pointer64', ['_DEVICE_OBJECT']]],
'PowerFlags' : [ 0x10, ['unsigned long']],
'Dope' : [ 0x18, ['pointer64', ['_DEVICE_OBJECT_POWER_EXTENSION']]],
'ExtensionFlags' : [ 0x20, ['unsigned long']],
'DeviceNode' : [ 0x28, ['pointer64', ['void']]],
'AttachedTo' : [ 0x30, ['pointer64', ['_DEVICE_OBJECT']]],
'StartIoCount' : [ 0x38, ['long']],
'StartIoKey' : [ 0x3c, ['long']],
'StartIoFlags' : [ 0x40, ['unsigned long']],
'Vpb' : [ 0x48, ['pointer64', ['_VPB']]],
'DependentList' : [ 0x50, ['_LIST_ENTRY']],
'ProviderList' : [ 0x60, ['_LIST_ENTRY']],
} ],
'_DBGKD_GET_VERSION64' : [ 0x28, {
'MajorVersion' : [ 0x0, ['unsigned short']],
'MinorVersion' : [ 0x2, ['unsigned short']],
'ProtocolVersion' : [ 0x4, ['unsigned char']],
'KdSecondaryVersion' : [ 0x5, ['unsigned char']],
'Flags' : [ 0x6, ['unsigned short']],
'MachineType' : [ 0x8, ['unsigned short']],
'MaxPacketType' : [ 0xa, ['unsigned char']],
'MaxStateChange' : [ 0xb, ['unsigned char']],
'MaxManipulate' : [ 0xc, ['unsigned char']],
'Simulation' : [ 0xd, ['unsigned char']],
'Unused' : [ 0xe, ['array', 1, ['unsigned short']]],
'KernBase' : [ 0x10, ['unsigned long long']],
'PsLoadedModuleList' : [ 0x18, ['unsigned long long']],
'DebuggerDataList' : [ 0x20, ['unsigned long long']],
} ],
'_STRING32' : [ 0x8, {
'Length' : [ 0x0, ['unsigned short']],
'MaximumLength' : [ 0x2, ['unsigned short']],
'Buffer' : [ 0x4, ['unsigned long']],
} ],
'_KSYSTEM_TIME' : [ 0xc, {
'LowPart' : [ 0x0, ['unsigned long']],
'High1Time' : [ 0x4, ['long']],
'High2Time' : [ 0x8, ['long']],
} ],
'_WHEA_PCIEXPRESS_COMMAND_STATUS' : [ 0x4, {
'Command' : [ 0x0, ['unsigned short']],
'Status' : [ 0x2, ['unsigned short']],
'AsULONG' : [ 0x0, ['unsigned long']],
} ],
'_HMAP_ENTRY' : [ 0x20, {
'BlockAddress' : [ 0x0, ['unsigned long long']],
'BinAddress' : [ 0x8, ['unsigned long long']],
'CmView' : [ 0x10, ['pointer64', ['_CM_VIEW_OF_FILE']]],
'MemAlloc' : [ 0x18, ['unsigned long']],
} ],
'_RTL_ATOM_TABLE_ENTRY' : [ 0x18, {
'HashLink' : [ 0x0, ['pointer64', ['_RTL_ATOM_TABLE_ENTRY']]],
'HandleIndex' : [ 0x8, ['unsigned short']],
'Atom' : [ 0xa, ['unsigned short']],
'ReferenceCount' : [ 0xc, ['unsigned short']],
'Flags' : [ 0xe, ['unsigned char']],
'NameLength' : [ 0xf, ['unsigned char']],
'Name' : [ 0x10, ['array', 1, ['wchar']]],
} ],
'_TXN_PARAMETER_BLOCK' : [ 0x10, {
'Length' : [ 0x0, ['unsigned short']],
'TxFsContext' : [ 0x2, ['unsigned short']],
'TransactionObject' : [ 0x8, ['pointer64', ['void']]],
} ],
'_LOADER_PERFORMANCE_DATA' : [ 0x10, {
'StartTime' : [ 0x0, ['unsigned long long']],
'EndTime' : [ 0x8, ['unsigned long long']],
} ],
'_MMSESSION' : [ 0x58, {
'SystemSpaceViewLock' : [ 0x0, ['_KGUARDED_MUTEX']],
'SystemSpaceViewLockPointer' : [ 0x38, ['pointer64', ['_KGUARDED_MUTEX']]],
'SystemSpaceViewTable' : [ 0x40, ['pointer64', ['_MMVIEW']]],
'SystemSpaceHashSize' : [ 0x48, ['unsigned long']],
'SystemSpaceHashEntries' : [ 0x4c, ['unsigned long']],
'SystemSpaceHashKey' : [ 0x50, ['unsigned long']],
'BitmapFailures' : [ 0x54, ['unsigned long']],
} ],
'_WHEA_PCIEXPRESS_ERROR_VALIDBITS' : [ 0x8, {
'PortType' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'Version' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long long')]],
'CommandStatus' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long long')]],
'DeviceId' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long long')]],
'DeviceSerialNumber' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long long')]],
'BridgeControlStatus' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long long')]],
'ExpressCapability' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long long')]],
'AerInfo' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 64, native_type='unsigned long long')]],
'ValidBits' : [ 0x0, ['unsigned long long']],
} ],
'_ETW_REG_ENTRY' : [ 0x50, {
'RegList' : [ 0x0, ['_LIST_ENTRY']],
'GuidEntry' : [ 0x10, ['pointer64', ['_ETW_GUID_ENTRY']]],
'Index' : [ 0x18, ['unsigned short']],
'Flags' : [ 0x1a, ['unsigned short']],
'EnableMask' : [ 0x1c, ['unsigned char']],
'ReplyQueue' : [ 0x20, ['pointer64', ['_ETW_REPLY_QUEUE']]],
'ReplySlot' : [ 0x20, ['array', 4, ['pointer64', ['_ETW_REG_ENTRY']]]],
'Process' : [ 0x40, ['pointer64', ['_EPROCESS']]],
'Callback' : [ 0x40, ['pointer64', ['void']]],
'CallbackContext' : [ 0x48, ['pointer64', ['void']]],
} ],
'_LPCP_PORT_OBJECT' : [ 0x100, {
'ConnectionPort' : [ 0x0, ['pointer64', ['_LPCP_PORT_OBJECT']]],
'ConnectedPort' : [ 0x8, ['pointer64', ['_LPCP_PORT_OBJECT']]],
'MsgQueue' : [ 0x10, ['_LPCP_PORT_QUEUE']],
'Creator' : [ 0x30, ['_CLIENT_ID']],
'ClientSectionBase' : [ 0x40, ['pointer64', ['void']]],
'ServerSectionBase' : [ 0x48, ['pointer64', ['void']]],
'PortContext' : [ 0x50, ['pointer64', ['void']]],
'ClientThread' : [ 0x58, ['pointer64', ['_ETHREAD']]],
'SecurityQos' : [ 0x60, ['_SECURITY_QUALITY_OF_SERVICE']],
'StaticSecurity' : [ 0x70, ['_SECURITY_CLIENT_CONTEXT']],
'LpcReplyChainHead' : [ 0xb8, ['_LIST_ENTRY']],
'LpcDataInfoChainHead' : [ 0xc8, ['_LIST_ENTRY']],
'ServerProcess' : [ 0xd8, ['pointer64', ['_EPROCESS']]],
'MappingProcess' : [ 0xd8, ['pointer64', ['_EPROCESS']]],
'MaxMessageLength' : [ 0xe0, ['unsigned short']],
'MaxConnectionInfoLength' : [ 0xe2, ['unsigned short']],
'Flags' : [ 0xe4, ['unsigned long']],
'WaitEvent' : [ 0xe8, ['_KEVENT']],
} ],
'_ARBITER_LIST_ENTRY' : [ 0x60, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'AlternativeCount' : [ 0x10, ['unsigned long']],
'Alternatives' : [ 0x18, ['pointer64', ['_IO_RESOURCE_DESCRIPTOR']]],
'PhysicalDeviceObject' : [ 0x20, ['pointer64', ['_DEVICE_OBJECT']]],
'RequestSource' : [ 0x28, ['Enumeration', dict(target = 'long', choices = {0: 'ArbiterRequestLegacyReported', 1: 'ArbiterRequestHalReported', 2: 'ArbiterRequestLegacyAssigned', 3: 'ArbiterRequestPnpDetected', 4: 'ArbiterRequestPnpEnumerated', -1: 'ArbiterRequestUndefined'})]],
'Flags' : [ 0x2c, ['unsigned long']],
'WorkSpace' : [ 0x30, ['long long']],
'InterfaceType' : [ 0x38, ['Enumeration', dict(target = 'long', choices = {0: 'Internal', 1: 'Isa', 2: 'Eisa', 3: 'MicroChannel', 4: 'TurboChannel', 5: 'PCIBus', 6: 'VMEBus', 7: 'NuBus', 8: 'PCMCIABus', 9: 'CBus', 10: 'MPIBus', 11: 'MPSABus', 12: 'ProcessorInternal', 13: 'InternalPowerBus', 14: 'PNPISABus', 15: 'PNPBus', 16: 'Vmcs', 17: 'MaximumInterfaceType', -1: 'InterfaceTypeUndefined'})]],
'SlotNumber' : [ 0x3c, ['unsigned long']],
'BusNumber' : [ 0x40, ['unsigned long']],
'Assignment' : [ 0x48, ['pointer64', ['_CM_PARTIAL_RESOURCE_DESCRIPTOR']]],
'SelectedAlternative' : [ 0x50, ['pointer64', ['_IO_RESOURCE_DESCRIPTOR']]],
'Result' : [ 0x58, ['Enumeration', dict(target = 'long', choices = {0: 'ArbiterResultSuccess', 1: 'ArbiterResultExternalConflict', 2: 'ArbiterResultNullRequest', -1: 'ArbiterResultUndefined'})]],
} ],
'_KNODE' : [ 0xc0, {
'PagedPoolSListHead' : [ 0x0, ['_SLIST_HEADER']],
'NonPagedPoolSListHead' : [ 0x10, ['array', 3, ['_SLIST_HEADER']]],
'PfnDereferenceSListHead' : [ 0x40, ['_SLIST_HEADER']],
'ProcessorMask' : [ 0x50, ['unsigned long long']],
'Color' : [ 0x58, ['unsigned char']],
'Seed' : [ 0x59, ['unsigned char']],
'NodeNumber' : [ 0x5a, ['unsigned char']],
'Flags' : [ 0x5b, ['_flags']],
'MmShiftedColor' : [ 0x5c, ['unsigned long']],
'FreeCount' : [ 0x60, ['array', 2, ['unsigned long long']]],
'PfnDeferredList' : [ 0x70, ['pointer64', ['_SLIST_ENTRY']]],
'Right' : [ 0x78, ['unsigned long']],
'Left' : [ 0x7c, ['unsigned long']],
'CachedKernelStacks' : [ 0x80, ['_CACHED_KSTACK_LIST']],
} ],
'_CACHED_KSTACK_LIST' : [ 0x20, {
'SListHead' : [ 0x0, ['_SLIST_HEADER']],
'MinimumFree' : [ 0x10, ['long']],
'Misses' : [ 0x14, ['unsigned long']],
'MissesLast' : [ 0x18, ['unsigned long']],
} ],
'_POP_DEVICE_SYS_STATE' : [ 0x2b8, {
'IrpMinor' : [ 0x0, ['unsigned char']],
'SystemState' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'SpinLock' : [ 0x8, ['unsigned long long']],
'Thread' : [ 0x10, ['pointer64', ['_KTHREAD']]],
'AbortEvent' : [ 0x18, ['pointer64', ['_KEVENT']]],
'ReadySemaphore' : [ 0x20, ['pointer64', ['_KSEMAPHORE']]],
'FinishedSemaphore' : [ 0x28, ['pointer64', ['_KSEMAPHORE']]],
'GetNewDeviceList' : [ 0x30, ['unsigned char']],
'Order' : [ 0x38, ['_PO_DEVICE_NOTIFY_ORDER']],
'NotifyGdiLevelForPowerOn' : [ 0x288, ['long']],
'NotifyGdiLevelForResumeUI' : [ 0x28c, ['long']],
'Pending' : [ 0x290, ['_LIST_ENTRY']],
'Status' : [ 0x2a0, ['long']],
'FailedDevice' : [ 0x2a8, ['pointer64', ['_DEVICE_OBJECT']]],
'Waking' : [ 0x2b0, ['unsigned char']],
'Cancelled' : [ 0x2b1, ['unsigned char']],
'IgnoreErrors' : [ 0x2b2, ['unsigned char']],
'IgnoreNotImplemented' : [ 0x2b3, ['unsigned char']],
'TimeRefreshLockAcquired' : [ 0x2b4, ['unsigned char']],
} ],
'_SEGMENT_FLAGS' : [ 0x4, {
'TotalNumberOfPtes4132' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 10, native_type='unsigned long')]],
'ExtraSharedWowSubsections' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'LargePages' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'WatchProto' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long')]],
'DebugSymbolsLoaded' : [ 0x0, ['BitField', dict(start_bit = 13, end_bit = 14, native_type='unsigned long')]],
'WriteCombined' : [ 0x0, ['BitField', dict(start_bit = 14, end_bit = 15, native_type='unsigned long')]],
'NoCache' : [ 0x0, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
'FloppyMedia' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
'DefaultProtectionMask' : [ 0x0, ['BitField', dict(start_bit = 17, end_bit = 22, native_type='unsigned long')]],
'ContainsPxeSubsection' : [ 0x0, ['BitField', dict(start_bit = 22, end_bit = 23, native_type='unsigned long')]],
'Binary32' : [ 0x0, ['BitField', dict(start_bit = 23, end_bit = 24, native_type='unsigned long')]],
'Spare' : [ 0x0, ['BitField', dict(start_bit = 24, end_bit = 32, native_type='unsigned long')]],
} ],
'_EX_WORK_QUEUE' : [ 0x58, {
'WorkerQueue' : [ 0x0, ['_KQUEUE']],
'DynamicThreadCount' : [ 0x40, ['unsigned long']],
'WorkItemsProcessed' : [ 0x44, ['unsigned long']],
'WorkItemsProcessedLastPass' : [ 0x48, ['unsigned long']],
'QueueDepthLastPass' : [ 0x4c, ['unsigned long']],
'Info' : [ 0x50, ['EX_QUEUE_WORKER_INFO']],
} ],
'_SHARED_CACHE_MAP_LIST_CURSOR' : [ 0x18, {
'SharedCacheMapLinks' : [ 0x0, ['_LIST_ENTRY']],
'Flags' : [ 0x10, ['unsigned long']],
} ],
'_CLIENT_ID32' : [ 0x8, {
'UniqueProcess' : [ 0x0, ['unsigned long']],
'UniqueThread' : [ 0x4, ['unsigned long']],
} ],
'_TEB32' : [ 0xff8, {
'NtTib' : [ 0x0, ['_NT_TIB32']],
'EnvironmentPointer' : [ 0x1c, ['unsigned long']],
'ClientId' : [ 0x20, ['_CLIENT_ID32']],
'ActiveRpcHandle' : [ 0x28, ['unsigned long']],
'ThreadLocalStoragePointer' : [ 0x2c, ['unsigned long']],
'ProcessEnvironmentBlock' : [ 0x30, ['unsigned long']],
'LastErrorValue' : [ 0x34, ['unsigned long']],
'CountOfOwnedCriticalSections' : [ 0x38, ['unsigned long']],
'CsrClientThread' : [ 0x3c, ['unsigned long']],
'Win32ThreadInfo' : [ 0x40, ['unsigned long']],
'User32Reserved' : [ 0x44, ['array', 26, ['unsigned long']]],
'UserReserved' : [ 0xac, ['array', 5, ['unsigned long']]],
'WOW32Reserved' : [ 0xc0, ['unsigned long']],
'CurrentLocale' : [ 0xc4, ['unsigned long']],
'FpSoftwareStatusRegister' : [ 0xc8, ['unsigned long']],
'SystemReserved1' : [ 0xcc, ['array', 54, ['unsigned long']]],
'ExceptionCode' : [ 0x1a4, ['long']],
'ActivationContextStackPointer' : [ 0x1a8, ['unsigned long']],
'SpareBytes1' : [ 0x1ac, ['array', 36, ['unsigned char']]],
'TxFsContext' : [ 0x1d0, ['unsigned long']],
'GdiTebBatch' : [ 0x1d4, ['_GDI_TEB_BATCH32']],
'RealClientId' : [ 0x6b4, ['_CLIENT_ID32']],
'GdiCachedProcessHandle' : [ 0x6bc, ['unsigned long']],
'GdiClientPID' : [ 0x6c0, ['unsigned long']],
'GdiClientTID' : [ 0x6c4, ['unsigned long']],
'GdiThreadLocalInfo' : [ 0x6c8, ['unsigned long']],
'Win32ClientInfo' : [ 0x6cc, ['array', 62, ['unsigned long']]],
'glDispatchTable' : [ 0x7c4, ['array', 233, ['unsigned long']]],
'glReserved1' : [ 0xb68, ['array', 29, ['unsigned long']]],
'glReserved2' : [ 0xbdc, ['unsigned long']],
'glSectionInfo' : [ 0xbe0, ['unsigned long']],
'glSection' : [ 0xbe4, ['unsigned long']],
'glTable' : [ 0xbe8, ['unsigned long']],
'glCurrentRC' : [ 0xbec, ['unsigned long']],
'glContext' : [ 0xbf0, ['unsigned long']],
'LastStatusValue' : [ 0xbf4, ['unsigned long']],
'StaticUnicodeString' : [ 0xbf8, ['_STRING32']],
'StaticUnicodeBuffer' : [ 0xc00, ['array', 261, ['wchar']]],
'DeallocationStack' : [ 0xe0c, ['unsigned long']],
'TlsSlots' : [ 0xe10, ['array', 64, ['unsigned long']]],
'TlsLinks' : [ 0xf10, ['LIST_ENTRY32']],
'Vdm' : [ 0xf18, ['unsigned long']],
'ReservedForNtRpc' : [ 0xf1c, ['unsigned long']],
'DbgSsReserved' : [ 0xf20, ['array', 2, ['unsigned long']]],
'HardErrorMode' : [ 0xf28, ['unsigned long']],
'Instrumentation' : [ 0xf2c, ['array', 9, ['unsigned long']]],
'ActivityId' : [ 0xf50, ['_GUID']],
'SubProcessTag' : [ 0xf60, ['unsigned long']],
'EtwLocalData' : [ 0xf64, ['unsigned long']],
'EtwTraceData' : [ 0xf68, ['unsigned long']],
'WinSockData' : [ 0xf6c, ['unsigned long']],
'GdiBatchCount' : [ 0xf70, ['unsigned long']],
'SpareBool0' : [ 0xf74, ['unsigned char']],
'SpareBool1' : [ 0xf75, ['unsigned char']],
'SpareBool2' : [ 0xf76, ['unsigned char']],
'IdealProcessor' : [ 0xf77, ['unsigned char']],
'GuaranteedStackBytes' : [ 0xf78, ['unsigned long']],
'ReservedForPerf' : [ 0xf7c, ['unsigned long']],
'ReservedForOle' : [ 0xf80, ['unsigned long']],
'WaitingOnLoaderLock' : [ 0xf84, ['unsigned long']],
'SavedPriorityState' : [ 0xf88, ['unsigned long']],
'SoftPatchPtr1' : [ 0xf8c, ['unsigned long']],
'ThreadPoolData' : [ 0xf90, ['unsigned long']],
'TlsExpansionSlots' : [ 0xf94, ['unsigned long']],
'ImpersonationLocale' : [ 0xf98, ['unsigned long']],
'IsImpersonating' : [ 0xf9c, ['unsigned long']],
'NlsCache' : [ 0xfa0, ['unsigned long']],
'pShimData' : [ 0xfa4, ['unsigned long']],
'HeapVirtualAffinity' : [ 0xfa8, ['unsigned long']],
'CurrentTransactionHandle' : [ 0xfac, ['unsigned long']],
'ActiveFrame' : [ 0xfb0, ['unsigned long']],
'FlsData' : [ 0xfb4, ['unsigned long']],
'PreferredLanguages' : [ 0xfb8, ['unsigned long']],
'UserPrefLanguages' : [ 0xfbc, ['unsigned long']],
'MergedPrefLanguages' : [ 0xfc0, ['unsigned long']],
'MuiImpersonation' : [ 0xfc4, ['unsigned long']],
'CrossTebFlags' : [ 0xfc8, ['unsigned short']],
'SpareCrossTebBits' : [ 0xfc8, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned short')]],
'SameTebFlags' : [ 0xfca, ['unsigned short']],
'DbgSafeThunkCall' : [ 0xfca, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned short')]],
'DbgInDebugPrint' : [ 0xfca, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned short')]],
'DbgHasFiberData' : [ 0xfca, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned short')]],
'DbgSkipThreadAttach' : [ 0xfca, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned short')]],
'DbgWerInShipAssertCode' : [ 0xfca, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned short')]],
'DbgRanProcessInit' : [ 0xfca, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned short')]],
'DbgClonedThread' : [ 0xfca, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned short')]],
'DbgSuppressDebugMsg' : [ 0xfca, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned short')]],
'RtlDisableUserStackWalk' : [ 0xfca, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned short')]],
'RtlExceptionAttached' : [ 0xfca, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned short')]],
'SpareSameTebBits' : [ 0xfca, ['BitField', dict(start_bit = 10, end_bit = 16, native_type='unsigned short')]],
'TxnScopeEnterCallback' : [ 0xfcc, ['unsigned long']],
'TxnScopeExitCallback' : [ 0xfd0, ['unsigned long']],
'TxnScopeContext' : [ 0xfd4, ['unsigned long']],
'LockCount' : [ 0xfd8, ['unsigned long']],
'ProcessRundown' : [ 0xfdc, ['unsigned long']],
'LastSwitchTime' : [ 0xfe0, ['unsigned long long']],
'TotalSwitchOutTime' : [ 0xfe8, ['unsigned long long']],
'WaitReasonBitMap' : [ 0xff0, ['_LARGE_INTEGER']],
} ],
'_CM_KEY_INDEX' : [ 0x8, {
'Signature' : [ 0x0, ['unsigned short']],
'Count' : [ 0x2, ['unsigned short']],
'List' : [ 0x4, ['array', 1, ['unsigned long']]],
} ],
'_VI_DEADLOCK_THREAD' : [ 0x30, {
'Thread' : [ 0x0, ['pointer64', ['_KTHREAD']]],
'CurrentSpinNode' : [ 0x8, ['pointer64', ['_VI_DEADLOCK_NODE']]],
'CurrentOtherNode' : [ 0x10, ['pointer64', ['_VI_DEADLOCK_NODE']]],
'ListEntry' : [ 0x18, ['_LIST_ENTRY']],
'FreeListEntry' : [ 0x18, ['_LIST_ENTRY']],
'NodeCount' : [ 0x28, ['unsigned long']],
'PagingCount' : [ 0x2c, ['unsigned long']],
} ],
'_PPM_IDLE_STATE' : [ 0x28, {
'IdleHandler' : [ 0x0, ['pointer64', ['void']]],
'Context' : [ 0x8, ['pointer64', ['void']]],
'Latency' : [ 0x10, ['unsigned long']],
'Power' : [ 0x14, ['unsigned long']],
'TimeCheck' : [ 0x18, ['unsigned long']],
'StateFlags' : [ 0x1c, ['unsigned long']],
'PromotePercent' : [ 0x20, ['unsigned char']],
'DemotePercent' : [ 0x21, ['unsigned char']],
'PromotePercentBase' : [ 0x22, ['unsigned char']],
'DemotePercentBase' : [ 0x23, ['unsigned char']],
'StateType' : [ 0x24, ['unsigned char']],
} ],
'_WHEA_ERROR_RECORD_SECTION_DESCRIPTOR_FLAGS' : [ 0x4, {
'Primary' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'ContainmentWarning' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'Reset' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'ThresholdExceeded' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'ResourceNotAvailable' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'LatentError' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 32, native_type='unsigned long')]],
'AsULONG' : [ 0x0, ['unsigned long']],
} ],
'_KRESOURCEMANAGER' : [ 0x250, {
'NotificationAvailable' : [ 0x0, ['_KEVENT']],
'cookie' : [ 0x18, ['unsigned long']],
'State' : [ 0x1c, ['Enumeration', dict(target = 'long', choices = {0: 'KResourceManagerUninitialized', 1: 'KResourceManagerOffline', 2: 'KResourceManagerOnline'})]],
'Flags' : [ 0x20, ['unsigned long']],
'Mutex' : [ 0x28, ['_KMUTANT']],
'NamespaceLink' : [ 0x60, ['_KTMOBJECT_NAMESPACE_LINK']],
'RmId' : [ 0x88, ['_GUID']],
'NotificationQueue' : [ 0x98, ['_KQUEUE']],
'NotificationMutex' : [ 0xd8, ['_KMUTANT']],
'EnlistmentHead' : [ 0x110, ['_LIST_ENTRY']],
'EnlistmentCount' : [ 0x120, ['unsigned long']],
'NotificationRoutine' : [ 0x128, ['pointer64', ['void']]],
'Key' : [ 0x130, ['pointer64', ['void']]],
'ProtocolListHead' : [ 0x138, ['_LIST_ENTRY']],
'PendingPropReqListHead' : [ 0x148, ['_LIST_ENTRY']],
'CRMListEntry' : [ 0x158, ['_LIST_ENTRY']],
'Tm' : [ 0x168, ['pointer64', ['_KTM']]],
'Description' : [ 0x170, ['_UNICODE_STRING']],
'Enlistments' : [ 0x180, ['_KTMOBJECT_NAMESPACE']],
'CompletionBinding' : [ 0x228, ['_KRESOURCEMANAGER_COMPLETION_BINDING']],
} ],
'_GDI_TEB_BATCH64' : [ 0x4e8, {
'Offset' : [ 0x0, ['unsigned long']],
'HDC' : [ 0x8, ['unsigned long long']],
'Buffer' : [ 0x10, ['array', 310, ['unsigned long']]],
} ],
'_FILE_NETWORK_OPEN_INFORMATION' : [ 0x38, {
'CreationTime' : [ 0x0, ['_LARGE_INTEGER']],
'LastAccessTime' : [ 0x8, ['_LARGE_INTEGER']],
'LastWriteTime' : [ 0x10, ['_LARGE_INTEGER']],
'ChangeTime' : [ 0x18, ['_LARGE_INTEGER']],
'AllocationSize' : [ 0x20, ['_LARGE_INTEGER']],
'EndOfFile' : [ 0x28, ['_LARGE_INTEGER']],
'FileAttributes' : [ 0x30, ['unsigned long']],
} ],
'_MM_DRIVER_VERIFIER_DATA' : [ 0x90, {
'Level' : [ 0x0, ['unsigned long']],
'RaiseIrqls' : [ 0x4, ['unsigned long']],
'AcquireSpinLocks' : [ 0x8, ['unsigned long']],
'SynchronizeExecutions' : [ 0xc, ['unsigned long']],
'AllocationsAttempted' : [ 0x10, ['unsigned long']],
'AllocationsSucceeded' : [ 0x14, ['unsigned long']],
'AllocationsSucceededSpecialPool' : [ 0x18, ['unsigned long']],
'AllocationsWithNoTag' : [ 0x1c, ['unsigned long']],
'TrimRequests' : [ 0x20, ['unsigned long']],
'Trims' : [ 0x24, ['unsigned long']],
'AllocationsFailed' : [ 0x28, ['unsigned long']],
'AllocationsFailedDeliberately' : [ 0x2c, ['unsigned long']],
'Loads' : [ 0x30, ['unsigned long']],
'Unloads' : [ 0x34, ['unsigned long']],
'UnTrackedPool' : [ 0x38, ['unsigned long']],
'UserTrims' : [ 0x3c, ['unsigned long']],
'CurrentPagedPoolAllocations' : [ 0x40, ['unsigned long']],
'CurrentNonPagedPoolAllocations' : [ 0x44, ['unsigned long']],
'PeakPagedPoolAllocations' : [ 0x48, ['unsigned long']],
'PeakNonPagedPoolAllocations' : [ 0x4c, ['unsigned long']],
'PagedBytes' : [ 0x50, ['unsigned long long']],
'NonPagedBytes' : [ 0x58, ['unsigned long long']],
'PeakPagedBytes' : [ 0x60, ['unsigned long long']],
'PeakNonPagedBytes' : [ 0x68, ['unsigned long long']],
'BurstAllocationsFailedDeliberately' : [ 0x70, ['unsigned long']],
'SessionTrims' : [ 0x74, ['unsigned long']],
'OptionChanges' : [ 0x78, ['unsigned long']],
'VerifyMode' : [ 0x7c, ['unsigned long']],
'PreviousBucketName' : [ 0x80, ['_UNICODE_STRING']],
} ],
'_HEAP_FREE_ENTRY_EXTRA' : [ 0x4, {
'TagIndex' : [ 0x0, ['unsigned short']],
'FreeBackTraceIndex' : [ 0x2, ['unsigned short']],
} ],
'_VI_DEADLOCK_GLOBALS' : [ 0x8168, {
'TimeAcquire' : [ 0x0, ['long long']],
'TimeRelease' : [ 0x8, ['long long']],
'ResourceDatabase' : [ 0x10, ['pointer64', ['_LIST_ENTRY']]],
'ResourceDatabaseCount' : [ 0x18, ['unsigned long long']],
'ResourceAddressRange' : [ 0x20, ['array', 1023, ['_VI_DEADLOCK_ADDRESS_RANGE']]],
'ThreadDatabase' : [ 0x4010, ['pointer64', ['_LIST_ENTRY']]],
'ThreadDatabaseCount' : [ 0x4018, ['unsigned long long']],
'ThreadAddressRange' : [ 0x4020, ['array', 1023, ['_VI_DEADLOCK_ADDRESS_RANGE']]],
'AllocationFailures' : [ 0x8010, ['unsigned long']],
'NodesTrimmedBasedOnAge' : [ 0x8014, ['unsigned long']],
'NodesTrimmedBasedOnCount' : [ 0x8018, ['unsigned long']],
'NodesSearched' : [ 0x801c, ['unsigned long']],
'MaxNodesSearched' : [ 0x8020, ['unsigned long']],
'SequenceNumber' : [ 0x8024, ['unsigned long']],
'RecursionDepthLimit' : [ 0x8028, ['unsigned long']],
'SearchedNodesLimit' : [ 0x802c, ['unsigned long']],
'DepthLimitHits' : [ 0x8030, ['unsigned long']],
'SearchLimitHits' : [ 0x8034, ['unsigned long']],
'ABC_ACB_Skipped' : [ 0x8038, ['unsigned long']],
'OutOfOrderReleases' : [ 0x803c, ['unsigned long']],
'NodesReleasedOutOfOrder' : [ 0x8040, ['unsigned long']],
'TotalReleases' : [ 0x8044, ['unsigned long']],
'RootNodesDeleted' : [ 0x8048, ['unsigned long']],
'ForgetHistoryCounter' : [ 0x804c, ['unsigned long']],
'Instigator' : [ 0x8050, ['pointer64', ['void']]],
'NumberOfParticipants' : [ 0x8058, ['unsigned long']],
'Participant' : [ 0x8060, ['array', 32, ['pointer64', ['_VI_DEADLOCK_NODE']]]],
'ChildrenCountWatermark' : [ 0x8160, ['long']],
} ],
'_POP_DISPLAY_RESUME_CONTEXT' : [ 0x80, {
'WorkItem' : [ 0x0, ['_WORK_QUEUE_ITEM']],
'WorkerThread' : [ 0x20, ['pointer64', ['_ETHREAD']]],
'PrepareUIEvent' : [ 0x28, ['_KEVENT']],
'PowerOnEvent' : [ 0x40, ['_KEVENT']],
'DoneEvent' : [ 0x58, ['_KEVENT']],
'WorkerQueued' : [ 0x70, ['unsigned long']],
'WorkerAbort' : [ 0x74, ['unsigned long']],
'NoResumeUI' : [ 0x78, ['unsigned long']],
} ],
'_KPCR' : [ 0x3ca0, {
'NtTib' : [ 0x0, ['_NT_TIB']],
'GdtBase' : [ 0x0, ['pointer64', ['_KGDTENTRY64']]],
'TssBase' : [ 0x8, ['pointer64', ['_KTSS64']]],
'UserRsp' : [ 0x10, ['unsigned long long']],
'Self' : [ 0x18, ['pointer64', ['_KPCR']]],
'CurrentPrcb' : [ 0x20, ['pointer64', ['_KPRCB']]],
'LockArray' : [ 0x28, ['pointer64', ['_KSPIN_LOCK_QUEUE']]],
'Used_Self' : [ 0x30, ['pointer64', ['void']]],
'IdtBase' : [ 0x38, ['pointer64', ['_KIDTENTRY64']]],
'Unused' : [ 0x40, ['array', 2, ['unsigned long long']]],
'Irql' : [ 0x50, ['unsigned char']],
'SecondLevelCacheAssociativity' : [ 0x51, ['unsigned char']],
'ObsoleteNumber' : [ 0x52, ['unsigned char']],
'Fill0' : [ 0x53, ['unsigned char']],
'Unused0' : [ 0x54, ['array', 3, ['unsigned long']]],
'MajorVersion' : [ 0x60, ['unsigned short']],
'MinorVersion' : [ 0x62, ['unsigned short']],
'StallScaleFactor' : [ 0x64, ['unsigned long']],
'Unused1' : [ 0x68, ['array', 3, ['pointer64', ['void']]]],
'KernelReserved' : [ 0x80, ['array', 15, ['unsigned long']]],
'SecondLevelCacheSize' : [ 0xbc, ['unsigned long']],
'HalReserved' : [ 0xc0, ['array', 16, ['unsigned long']]],
'Unused2' : [ 0x100, ['unsigned long']],
'KdVersionBlock' : [ 0x108, ['pointer64', ['void']]],
'Unused3' : [ 0x110, ['pointer64', ['void']]],
'PcrAlign1' : [ 0x118, ['array', 24, ['unsigned long']]],
'Prcb' : [ 0x180, ['_KPRCB']],
} ],
'_KTM' : [ 0x3a0, {
'cookie' : [ 0x0, ['unsigned long']],
'Mutex' : [ 0x8, ['_KMUTANT']],
'State' : [ 0x40, ['Enumeration', dict(target = 'long', choices = {0: 'KKtmUninitialized', 1: 'KKtmInitialized', 2: 'KKtmRecovering', 3: 'KKtmOnline', 4: 'KKtmRecoveryFailed', 5: 'KKtmOffline'})]],
'NamespaceLink' : [ 0x48, ['_KTMOBJECT_NAMESPACE_LINK']],
'TmIdentity' : [ 0x70, ['_GUID']],
'Flags' : [ 0x80, ['unsigned long']],
'VolatileFlags' : [ 0x84, ['unsigned long']],
'LogFileName' : [ 0x88, ['_UNICODE_STRING']],
'LogFileObject' : [ 0x98, ['pointer64', ['_FILE_OBJECT']]],
'MarshallingContext' : [ 0xa0, ['pointer64', ['void']]],
'LogManagementContext' : [ 0xa8, ['pointer64', ['void']]],
'Transactions' : [ 0xb0, ['_KTMOBJECT_NAMESPACE']],
'ResourceManagers' : [ 0x158, ['_KTMOBJECT_NAMESPACE']],
'LsnOrderedMutex' : [ 0x200, ['_KMUTANT']],
'LsnOrderedList' : [ 0x238, ['_LIST_ENTRY']],
'CommitVirtualClock' : [ 0x248, ['_LARGE_INTEGER']],
'CommitVirtualClockMutex' : [ 0x250, ['_FAST_MUTEX']],
'BaseLsn' : [ 0x288, ['_CLS_LSN']],
'CurrentReadLsn' : [ 0x290, ['_CLS_LSN']],
'LastRecoveredLsn' : [ 0x298, ['_CLS_LSN']],
'TmRmHandle' : [ 0x2a0, ['pointer64', ['void']]],
'TmRm' : [ 0x2a8, ['pointer64', ['_KRESOURCEMANAGER']]],
'LogFullNotifyEvent' : [ 0x2b0, ['_KEVENT']],
'CheckpointWorkItem' : [ 0x2c8, ['_WORK_QUEUE_ITEM']],
'CheckpointTargetLsn' : [ 0x2e8, ['_CLS_LSN']],
'LogFullCompletedWorkItem' : [ 0x2f0, ['_WORK_QUEUE_ITEM']],
'LogWriteResource' : [ 0x310, ['_ERESOURCE']],
'LogFlags' : [ 0x378, ['unsigned long']],
'LogFullStatus' : [ 0x37c, ['long']],
'RecoveryStatus' : [ 0x380, ['long']],
'LastCheckBaseLsn' : [ 0x388, ['_CLS_LSN']],
'RestartOrderedList' : [ 0x390, ['_LIST_ENTRY']],
} ],
}
| gpl-2.0 |
kdwink/intellij-community | python/lib/Lib/codeop.py | 110 | 5243 | r"""Utilities to compile possibly incomplete Python source code.
This module provides two interfaces, broadly similar to the builtin
function compile(), that take progam text, a filename and a 'mode'
and:
- Return a code object if the command is complete and valid
- Return None if the command is incomplete
- Raise SyntaxError, ValueError or OverflowError if the command is a
syntax error (OverflowError and ValueError can be produced by
malformed literals).
Approach:
First, check if the source consists entirely of blank lines and
comments; if so, replace it with 'pass', because the built-in
parser doesn't always do the right thing for these.
Compile three times: as is, with \n, and with \n\n appended. If it
compiles as is, it's complete. If it compiles with one \n appended,
we expect more. If it doesn't compile either way, we compare the
error we get when compiling with \n or \n\n appended. If the errors
are the same, the code is broken. But if the errors are different, we
expect more. Not intuitive; not even guaranteed to hold in future
releases; but this matches the compiler's behavior from Python 1.4
through 2.2, at least.
Caveat:
It is possible (but not likely) that the parser stops parsing with a
successful outcome before reaching the end of the source; in this
case, trailing symbols may be ignored instead of causing an error.
For example, a backslash followed by two newlines may be followed by
arbitrary garbage. This will be fixed once the API for the parser is
better.
The two interfaces are:
compile_command(source, filename, symbol):
Compiles a single command in the manner described above.
CommandCompiler():
Instances of this class have __call__ methods identical in
signature to compile_command; the difference is that if the
instance compiles program text containing a __future__ statement,
the instance 'remembers' and compiles all subsequent program texts
with the statement in force.
The module also provides another class:
Compile():
Instances of this class act like the built-in function compile,
but with 'memory' in the sense described above.
"""
# import internals, not guaranteed interface
from org.python.core import Py,CompilerFlags,CompileMode
from org.python.core.CompilerFlags import PyCF_DONT_IMPLY_DEDENT
# public interface
__all__ = ["compile_command", "Compile", "CommandCompiler"]
def compile_command(source, filename="<input>", symbol="single"):
r"""Compile a command and determine whether it is incomplete.
Arguments:
source -- the source string; may contain \n characters
filename -- optional filename from which source was read; default
"<input>"
symbol -- optional grammar start symbol; "single" (default) or "eval"
Return value / exceptions raised:
- Return a code object if the command is complete and valid
- Return None if the command is incomplete
- Raise SyntaxError, ValueError or OverflowError if the command is a
syntax error (OverflowError and ValueError can be produced by
malformed literals).
"""
if symbol not in ['single','eval']:
raise ValueError,"symbol arg must be either single or eval"
symbol = CompileMode.getMode(symbol)
return Py.compile_command_flags(source,filename,symbol,Py.getCompilerFlags(),0)
class Compile:
"""Instances of this class behave much like the built-in compile
function, but if one is used to compile text containing a future
statement, it "remembers" and compiles all subsequent program texts
with the statement in force."""
def __init__(self):
self._cflags = CompilerFlags()
def __call__(self, source, filename, symbol):
symbol = CompileMode.getMode(symbol)
return Py.compile_flags(source, filename, symbol, self._cflags)
class CommandCompiler:
"""Instances of this class have __call__ methods identical in
signature to compile_command; the difference is that if the
instance compiles program text containing a __future__ statement,
the instance 'remembers' and compiles all subsequent program texts
with the statement in force."""
def __init__(self,):
self._cflags = CompilerFlags()
def __call__(self, source, filename="<input>", symbol="single"):
r"""Compile a command and determine whether it is incomplete.
Arguments:
source -- the source string; may contain \n characters
filename -- optional filename from which source was read;
default "<input>"
symbol -- optional grammar start symbol; "single" (default) or
"eval"
Return value / exceptions raised:
- Return a code object if the command is complete and valid
- Return None if the command is incomplete
- Raise SyntaxError, ValueError or OverflowError if the command is a
syntax error (OverflowError and ValueError can be produced by
malformed literals).
"""
if symbol not in ['single','eval']:
raise ValueError,"symbol arg must be either single or eval"
symbol = CompileMode.getMode(symbol)
return Py.compile_command_flags(source,filename,symbol,self._cflags,0)
| apache-2.0 |
alrusdi/lettuce | tests/integration/lib/Django-1.3/django/contrib/staticfiles/storage.py | 154 | 2080 | import os
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.core.files.storage import FileSystemStorage
from django.utils.importlib import import_module
from django.contrib.staticfiles import utils
class StaticFilesStorage(FileSystemStorage):
"""
Standard file system storage for static files.
The defaults for ``location`` and ``base_url`` are
``STATIC_ROOT`` and ``STATIC_URL``.
"""
def __init__(self, location=None, base_url=None, *args, **kwargs):
if location is None:
location = settings.STATIC_ROOT
if base_url is None:
base_url = settings.STATIC_URL
if not location:
raise ImproperlyConfigured("You're using the staticfiles app "
"without having set the STATIC_ROOT setting.")
# check for None since we might use a root URL (``/``)
if base_url is None:
raise ImproperlyConfigured("You're using the staticfiles app "
"without having set the STATIC_URL setting.")
utils.check_settings()
super(StaticFilesStorage, self).__init__(location, base_url, *args, **kwargs)
class AppStaticStorage(FileSystemStorage):
"""
A file system storage backend that takes an app module and works
for the ``static`` directory of it.
"""
prefix = None
source_dir = 'static'
def __init__(self, app, *args, **kwargs):
"""
Returns a static file storage if available in the given app.
"""
# app is the actual app module
self.app_module = app
# We special case the admin app here since it has its static files
# in 'media' for historic reasons.
if self.app_module == 'django.contrib.admin':
self.prefix = 'admin'
self.source_dir = 'media'
mod = import_module(self.app_module)
mod_path = os.path.dirname(mod.__file__)
location = os.path.join(mod_path, self.source_dir)
super(AppStaticStorage, self).__init__(location, *args, **kwargs)
| gpl-3.0 |
loco-odoo/localizacion_co | openerp/addons-extra/odoo-pruebas/odoo-server/addons/mrp/report/mrp_report.py | 341 | 3839 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields,osv
class report_workcenter_load(osv.osv):
_name="report.workcenter.load"
_description="Work Center Load"
_auto = False
_log_access = False
_columns = {
'name': fields.char('Week', required=True),
'workcenter_id': fields.many2one('mrp.workcenter', 'Work Center', required=True),
'cycle': fields.float('Number of Cycles'),
'hour': fields.float('Number of Hours'),
}
def init(self, cr):
cr.execute("""
create or replace view report_workcenter_load as (
SELECT
min(wl.id) as id,
to_char(p.date_planned,'YYYY:mm:dd') as name,
SUM(wl.hour) AS hour,
SUM(wl.cycle) AS cycle,
wl.workcenter_id as workcenter_id
FROM
mrp_production_workcenter_line wl
LEFT JOIN mrp_production p
ON p.id = wl.production_id
GROUP BY
wl.workcenter_id,
to_char(p.date_planned,'YYYY:mm:dd')
)""")
class report_mrp_inout(osv.osv):
_name="report.mrp.inout"
_description="Stock value variation"
_auto = False
_log_access = False
_rec_name = 'date'
_columns = {
'date': fields.char('Week', required=True),
'value': fields.float('Stock value', required=True, digits=(16,2)),
'company_id': fields.many2one('res.company', 'Company', required=True),
}
def init(self, cr):
cr.execute("""
create or replace view report_mrp_inout as (
select
min(sm.id) as id,
to_char(sm.date,'YYYY:IW') as date,
sum(case when (sl.usage='internal') then
sm.price_unit * sm.product_qty
else
0.0
end - case when (sl2.usage='internal') then
sm.price_unit * sm.product_qty
else
0.0
end) as value,
sm.company_id
from
stock_move sm
left join product_product pp
on (pp.id = sm.product_id)
left join product_template pt
on (pt.id = pp.product_tmpl_id)
left join stock_location sl
on ( sl.id = sm.location_id)
left join stock_location sl2
on ( sl2.id = sm.location_dest_id)
where
sm.state = 'done'
group by
to_char(sm.date,'YYYY:IW'), sm.company_id
)""")
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
eqmcc/clamav_decode | contrib/phishing/regex_opt.py | 14 | 1170 | #!/usr/bin/env python
def strlen(a,b):
if len(a)<len(b):
return -1;
elif len(a)>len(b):
return 1;
else:
return 0;
def getcommon_prefix(a,b):
if a==b:
return b;
if a[:-1]==b[:-1]:
return a[:-1];
else:
return ""
fil = file("iana_tld.h")
left = fil.read().split("(")
out=[]
for i in range(1,len(left)):
right = left[i].split(")")
regex_split = right[0].split("|")
regex_split.sort()
regex_split.sort(strlen)
prefix=''
prefixlen=0;
c_map=''
list=[]
for val in regex_split:
if val[:prefixlen] == prefix:
if len(val) == (prefixlen+1):
c_map = c_map+val[prefixlen]
else:
if len(c_map)>1:
c_map = "["+c_map+"]"
if len(prefix+c_map)>0:
list.append(prefix+c_map)
prefix = val[:-1]
prefixlen=len(prefix)
c_map=val[prefixlen]
else:
if len(c_map)>1:
c_map = "["+c_map+"]"
list.append(prefix+c_map)
prefix = getcommon_prefix(prefix,val)
if len(prefix)==0:
prefix=val[:-1]
prefixlen=len(prefix)
c_map=val[prefixlen]
if i==1:
left0=left[0]
else:
left0=""
out.append(left0)
out.append("(")
out.append("|".join(list))
out.append(")")
out.append(right[1])
print "".join(out)
| gpl-2.0 |
cyrixhero/bitcoin | qa/rpc-tests/merkle_blocks.py | 133 | 4001 | #!/usr/bin/env python2
# Copyright (c) 2014 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Test merkleblock fetch/validation
#
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
import os
import shutil
class MerkleBlockTest(BitcoinTestFramework):
def setup_chain(self):
print("Initializing test directory "+self.options.tmpdir)
initialize_chain_clean(self.options.tmpdir, 4)
def setup_network(self):
self.nodes = []
# Nodes 0/1 are "wallet" nodes
self.nodes.append(start_node(0, self.options.tmpdir, ["-debug"]))
self.nodes.append(start_node(1, self.options.tmpdir, ["-debug"]))
# Nodes 2/3 are used for testing
self.nodes.append(start_node(2, self.options.tmpdir, ["-debug"]))
self.nodes.append(start_node(3, self.options.tmpdir, ["-debug", "-txindex"]))
connect_nodes(self.nodes[0], 1)
connect_nodes(self.nodes[0], 2)
connect_nodes(self.nodes[0], 3)
self.is_network_split = False
self.sync_all()
def run_test(self):
print "Mining blocks..."
self.nodes[0].generate(105)
self.sync_all()
chain_height = self.nodes[1].getblockcount()
assert_equal(chain_height, 105)
assert_equal(self.nodes[1].getbalance(), 0)
assert_equal(self.nodes[2].getbalance(), 0)
node0utxos = self.nodes[0].listunspent(1)
tx1 = self.nodes[0].createrawtransaction([node0utxos.pop()], {self.nodes[1].getnewaddress(): 50})
txid1 = self.nodes[0].sendrawtransaction(self.nodes[0].signrawtransaction(tx1)["hex"])
tx2 = self.nodes[0].createrawtransaction([node0utxos.pop()], {self.nodes[1].getnewaddress(): 50})
txid2 = self.nodes[0].sendrawtransaction(self.nodes[0].signrawtransaction(tx2)["hex"])
assert_raises(JSONRPCException, self.nodes[0].gettxoutproof, [txid1])
self.nodes[0].generate(1)
blockhash = self.nodes[0].getblockhash(chain_height + 1)
self.sync_all()
txlist = []
blocktxn = self.nodes[0].getblock(blockhash, True)["tx"]
txlist.append(blocktxn[1])
txlist.append(blocktxn[2])
assert_equal(self.nodes[2].verifytxoutproof(self.nodes[2].gettxoutproof([txid1])), [txid1])
assert_equal(self.nodes[2].verifytxoutproof(self.nodes[2].gettxoutproof([txid1, txid2])), txlist)
assert_equal(self.nodes[2].verifytxoutproof(self.nodes[2].gettxoutproof([txid1, txid2], blockhash)), txlist)
txin_spent = self.nodes[1].listunspent(1).pop()
tx3 = self.nodes[1].createrawtransaction([txin_spent], {self.nodes[0].getnewaddress(): 50})
self.nodes[0].sendrawtransaction(self.nodes[1].signrawtransaction(tx3)["hex"])
self.nodes[0].generate(1)
self.sync_all()
txid_spent = txin_spent["txid"]
txid_unspent = txid1 if txin_spent["txid"] != txid1 else txid2
# We cant find the block from a fully-spent tx
assert_raises(JSONRPCException, self.nodes[2].gettxoutproof, [txid_spent])
# ...but we can if we specify the block
assert_equal(self.nodes[2].verifytxoutproof(self.nodes[2].gettxoutproof([txid_spent], blockhash)), [txid_spent])
# ...or if the first tx is not fully-spent
assert_equal(self.nodes[2].verifytxoutproof(self.nodes[2].gettxoutproof([txid_unspent])), [txid_unspent])
try:
assert_equal(self.nodes[2].verifytxoutproof(self.nodes[2].gettxoutproof([txid1, txid2])), txlist)
except JSONRPCException:
assert_equal(self.nodes[2].verifytxoutproof(self.nodes[2].gettxoutproof([txid2, txid1])), txlist)
# ...or if we have a -txindex
assert_equal(self.nodes[2].verifytxoutproof(self.nodes[3].gettxoutproof([txid_spent])), [txid_spent])
if __name__ == '__main__':
MerkleBlockTest().main()
| mit |
rafiqsaleh/VERCE | verce-hpc-pe/src/networkx/algorithms/centrality/current_flow_betweenness.py | 31 | 13429 | """
Current-flow betweenness centrality measures.
"""
# Copyright (C) 2010-2012 by
# Aric Hagberg <hagberg@lanl.gov>
# Dan Schult <dschult@colgate.edu>
# Pieter Swart <swart@lanl.gov>
# All rights reserved.
# BSD license.
import random
import networkx as nx
from networkx.algorithms.centrality.flow_matrix import *
__author__ = """Aric Hagberg (hagberg@lanl.gov)"""
__all__ = ['current_flow_betweenness_centrality',
'approximate_current_flow_betweenness_centrality',
'edge_current_flow_betweenness_centrality']
def approximate_current_flow_betweenness_centrality(G, normalized=True,
weight='weight',
dtype=float, solver='full',
epsilon=0.5, kmax=10000):
r"""Compute the approximate current-flow betweenness centrality for nodes.
Approximates the current-flow betweenness centrality within absolute
error of epsilon with high probability [1]_.
Parameters
----------
G : graph
A NetworkX graph
normalized : bool, optional (default=True)
If True the betweenness values are normalized by 2/[(n-1)(n-2)] where
n is the number of nodes in G.
weight : string or None, optional (default='weight')
Key for edge data used as the edge weight.
If None, then use 1 as each edge weight.
dtype: data type (float)
Default data type for internal matrices.
Set to np.float32 for lower memory consumption.
solver: string (default='lu')
Type of linear solver to use for computing the flow matrix.
Options are "full" (uses most memory), "lu" (recommended), and
"cg" (uses least memory).
epsilon: float
Absolute error tolerance.
kmax: int
Maximum number of sample node pairs to use for approximation.
Returns
-------
nodes : dictionary
Dictionary of nodes with betweenness centrality as the value.
See Also
--------
current_flow_betweenness_centrality
Notes
-----
The running time is `O((1/\epsilon^2)m{\sqrt k} \log n)`
and the space required is `O(m)` for n nodes and m edges.
If the edges have a 'weight' attribute they will be used as
weights in this algorithm. Unspecified weights are set to 1.
References
----------
.. [1] Centrality Measures Based on Current Flow.
Ulrik Brandes and Daniel Fleischer,
Proc. 22nd Symp. Theoretical Aspects of Computer Science (STACS '05).
LNCS 3404, pp. 533-544. Springer-Verlag, 2005.
http://www.inf.uni-konstanz.de/algo/publications/bf-cmbcf-05.pdf
"""
from networkx.utils import reverse_cuthill_mckee_ordering
try:
import numpy as np
except ImportError:
raise ImportError('current_flow_betweenness_centrality requires NumPy ',
'http://scipy.org/')
try:
from scipy import sparse
from scipy.sparse import linalg
except ImportError:
raise ImportError('current_flow_betweenness_centrality requires SciPy ',
'http://scipy.org/')
if G.is_directed():
raise nx.NetworkXError('current_flow_betweenness_centrality() ',
'not defined for digraphs.')
if not nx.is_connected(G):
raise nx.NetworkXError("Graph not connected.")
solvername={"full" :FullInverseLaplacian,
"lu": SuperLUInverseLaplacian,
"cg": CGInverseLaplacian}
n = G.number_of_nodes()
ordering = list(reverse_cuthill_mckee_ordering(G))
# make a copy with integer labels according to rcm ordering
# this could be done without a copy if we really wanted to
H = nx.relabel_nodes(G,dict(zip(ordering,range(n))))
L = laplacian_sparse_matrix(H, nodelist=range(n), weight=weight,
dtype=dtype, format='csc')
C = solvername[solver](L, dtype=dtype) # initialize solver
betweenness = dict.fromkeys(H,0.0)
nb = (n-1.0)*(n-2.0) # normalization factor
cstar = n*(n-1)/nb
l = 1 # parameter in approximation, adjustable
k = l*int(np.ceil((cstar/epsilon)**2*np.log(n)))
if k > kmax:
raise nx.NetworkXError('Number random pairs k>kmax (%d>%d) '%(k,kmax),
'Increase kmax or epsilon')
cstar2k = cstar/(2*k)
for i in range(k):
s,t = random.sample(range(n),2)
b = np.zeros(n, dtype=dtype)
b[s] = 1
b[t] = -1
p = C.solve(b)
for v in H:
if v==s or v==t:
continue
for nbr in H[v]:
w = H[v][nbr].get(weight,1.0)
betweenness[v] += w*np.abs(p[v]-p[nbr])*cstar2k
if normalized:
factor = 1.0
else:
factor = nb/2.0
# remap to original node names and "unnormalize" if required
return dict((ordering[k],float(v*factor)) for k,v in betweenness.items())
def current_flow_betweenness_centrality(G, normalized=True, weight='weight',
dtype=float, solver='full'):
r"""Compute current-flow betweenness centrality for nodes.
Current-flow betweenness centrality uses an electrical current
model for information spreading in contrast to betweenness
centrality which uses shortest paths.
Current-flow betweenness centrality is also known as
random-walk betweenness centrality [2]_.
Parameters
----------
G : graph
A NetworkX graph
normalized : bool, optional (default=True)
If True the betweenness values are normalized by 2/[(n-1)(n-2)] where
n is the number of nodes in G.
weight : string or None, optional (default='weight')
Key for edge data used as the edge weight.
If None, then use 1 as each edge weight.
dtype: data type (float)
Default data type for internal matrices.
Set to np.float32 for lower memory consumption.
solver: string (default='lu')
Type of linear solver to use for computing the flow matrix.
Options are "full" (uses most memory), "lu" (recommended), and
"cg" (uses least memory).
Returns
-------
nodes : dictionary
Dictionary of nodes with betweenness centrality as the value.
See Also
--------
approximate_current_flow_betweenness_centrality
betweenness_centrality
edge_betweenness_centrality
edge_current_flow_betweenness_centrality
Notes
-----
Current-flow betweenness can be computed in `O(I(n-1)+mn \log n)`
time [1]_, where `I(n-1)` is the time needed to compute the
inverse Laplacian. For a full matrix this is `O(n^3)` but using
sparse methods you can achieve `O(nm{\sqrt k})` where `k` is the
Laplacian matrix condition number.
The space required is `O(nw) where `w` is the width of the sparse
Laplacian matrix. Worse case is `w=n` for `O(n^2)`.
If the edges have a 'weight' attribute they will be used as
weights in this algorithm. Unspecified weights are set to 1.
References
----------
.. [1] Centrality Measures Based on Current Flow.
Ulrik Brandes and Daniel Fleischer,
Proc. 22nd Symp. Theoretical Aspects of Computer Science (STACS '05).
LNCS 3404, pp. 533-544. Springer-Verlag, 2005.
http://www.inf.uni-konstanz.de/algo/publications/bf-cmbcf-05.pdf
.. [2] A measure of betweenness centrality based on random walks,
M. E. J. Newman, Social Networks 27, 39-54 (2005).
"""
from networkx.utils import reverse_cuthill_mckee_ordering
try:
import numpy as np
except ImportError:
raise ImportError('current_flow_betweenness_centrality requires NumPy ',
'http://scipy.org/')
try:
import scipy
except ImportError:
raise ImportError('current_flow_betweenness_centrality requires SciPy ',
'http://scipy.org/')
if G.is_directed():
raise nx.NetworkXError('current_flow_betweenness_centrality() ',
'not defined for digraphs.')
if not nx.is_connected(G):
raise nx.NetworkXError("Graph not connected.")
n = G.number_of_nodes()
ordering = list(reverse_cuthill_mckee_ordering(G))
# make a copy with integer labels according to rcm ordering
# this could be done without a copy if we really wanted to
H = nx.relabel_nodes(G,dict(zip(ordering,range(n))))
betweenness = dict.fromkeys(H,0.0) # b[v]=0 for v in H
for row,(s,t) in flow_matrix_row(H, weight=weight, dtype=dtype,
solver=solver):
pos = dict(zip(row.argsort()[::-1],range(n)))
for i in range(n):
betweenness[s] += (i-pos[i])*row[i]
betweenness[t] += (n-i-1-pos[i])*row[i]
if normalized:
nb = (n-1.0)*(n-2.0) # normalization factor
else:
nb = 2.0
for i,v in enumerate(H): # map integers to nodes
betweenness[v] = float((betweenness[v]-i)*2.0/nb)
return dict((ordering[k],v) for k,v in betweenness.items())
def edge_current_flow_betweenness_centrality(G, normalized=True,
weight='weight',
dtype=float, solver='full'):
"""Compute current-flow betweenness centrality for edges.
Current-flow betweenness centrality uses an electrical current
model for information spreading in contrast to betweenness
centrality which uses shortest paths.
Current-flow betweenness centrality is also known as
random-walk betweenness centrality [2]_.
Parameters
----------
G : graph
A NetworkX graph
normalized : bool, optional (default=True)
If True the betweenness values are normalized by 2/[(n-1)(n-2)] where
n is the number of nodes in G.
weight : string or None, optional (default='weight')
Key for edge data used as the edge weight.
If None, then use 1 as each edge weight.
dtype: data type (float)
Default data type for internal matrices.
Set to np.float32 for lower memory consumption.
solver: string (default='lu')
Type of linear solver to use for computing the flow matrix.
Options are "full" (uses most memory), "lu" (recommended), and
"cg" (uses least memory).
Returns
-------
nodes : dictionary
Dictionary of edge tuples with betweenness centrality as the value.
See Also
--------
betweenness_centrality
edge_betweenness_centrality
current_flow_betweenness_centrality
Notes
-----
Current-flow betweenness can be computed in `O(I(n-1)+mn \log n)`
time [1]_, where `I(n-1)` is the time needed to compute the
inverse Laplacian. For a full matrix this is `O(n^3)` but using
sparse methods you can achieve `O(nm{\sqrt k})` where `k` is the
Laplacian matrix condition number.
The space required is `O(nw) where `w` is the width of the sparse
Laplacian matrix. Worse case is `w=n` for `O(n^2)`.
If the edges have a 'weight' attribute they will be used as
weights in this algorithm. Unspecified weights are set to 1.
References
----------
.. [1] Centrality Measures Based on Current Flow.
Ulrik Brandes and Daniel Fleischer,
Proc. 22nd Symp. Theoretical Aspects of Computer Science (STACS '05).
LNCS 3404, pp. 533-544. Springer-Verlag, 2005.
http://www.inf.uni-konstanz.de/algo/publications/bf-cmbcf-05.pdf
.. [2] A measure of betweenness centrality based on random walks,
M. E. J. Newman, Social Networks 27, 39-54 (2005).
"""
from networkx.utils import reverse_cuthill_mckee_ordering
try:
import numpy as np
except ImportError:
raise ImportError('current_flow_betweenness_centrality requires NumPy ',
'http://scipy.org/')
try:
import scipy
except ImportError:
raise ImportError('current_flow_betweenness_centrality requires SciPy ',
'http://scipy.org/')
if G.is_directed():
raise nx.NetworkXError('edge_current_flow_betweenness_centrality ',
'not defined for digraphs.')
if not nx.is_connected(G):
raise nx.NetworkXError("Graph not connected.")
n = G.number_of_nodes()
ordering = list(reverse_cuthill_mckee_ordering(G))
# make a copy with integer labels according to rcm ordering
# this could be done without a copy if we really wanted to
H = nx.relabel_nodes(G,dict(zip(ordering,range(n))))
betweenness=(dict.fromkeys(H.edges(),0.0))
if normalized:
nb=(n-1.0)*(n-2.0) # normalization factor
else:
nb=2.0
for row,(e) in flow_matrix_row(H, weight=weight, dtype=dtype,
solver=solver):
pos=dict(zip(row.argsort()[::-1],range(1,n+1)))
for i in range(n):
betweenness[e]+=(i+1-pos[i])*row[i]
betweenness[e]+=(n-i-pos[i])*row[i]
betweenness[e]/=nb
return dict(((ordering[s],ordering[t]),float(v))
for (s,t),v in betweenness.items())
# fixture for nose tests
def setup_module(module):
from nose import SkipTest
try:
import numpy
import scipy
except:
raise SkipTest("NumPy not available")
| mit |
stone5495/NewsBlur | apps/rss_feeds/migrations/0048_has_page.py | 18 | 5524 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Feed.has_page'
db.add_column('feeds', 'has_page', self.gf('django.db.models.fields.BooleanField')(default=True), keep_default=False)
def backwards(self, orm):
# Deleting field 'Feed.has_page'
db.delete_column('feeds', 'has_page')
models = {
'rss_feeds.duplicatefeed': {
'Meta': {'object_name': 'DuplicateFeed'},
'duplicate_address': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'duplicate_feed_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'feed': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'duplicate_addresses'", 'to': "orm['rss_feeds.Feed']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'rss_feeds.feed': {
'Meta': {'ordering': "['feed_title']", 'object_name': 'Feed', 'db_table': "'feeds'"},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'active_subscribers': ('django.db.models.fields.IntegerField', [], {'default': '-1', 'db_index': 'True'}),
'average_stories_per_month': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'creation': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'days_to_trim': ('django.db.models.fields.IntegerField', [], {'default': '90'}),
'etag': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'exception_code': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'favicon_color': ('django.db.models.fields.CharField', [], {'max_length': '6', 'null': 'True', 'blank': 'True'}),
'favicon_not_found': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'feed_address': ('django.db.models.fields.URLField', [], {'unique': 'True', 'max_length': '255'}),
'feed_link': ('django.db.models.fields.URLField', [], {'default': "''", 'max_length': '1000', 'null': 'True', 'blank': 'True'}),
'feed_link_locked': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'feed_title': ('django.db.models.fields.CharField', [], {'default': "'[Untitled]'", 'max_length': '255', 'null': 'True', 'blank': 'True'}),
'fetched_once': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'has_feed_exception': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'has_page': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'has_page_exception': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_load_time': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'last_modified': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'last_update': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'min_to_decay': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'next_scheduled_update': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'num_subscribers': ('django.db.models.fields.IntegerField', [], {'default': '-1'}),
'premium_subscribers': ('django.db.models.fields.IntegerField', [], {'default': '-1'}),
'queued_date': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'stories_last_month': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'rss_feeds.feeddata': {
'Meta': {'object_name': 'FeedData'},
'feed': ('utils.fields.AutoOneToOneField', [], {'related_name': "'data'", 'unique': 'True', 'to': "orm['rss_feeds.Feed']"}),
'feed_classifier_counts': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'feed_tagline': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'popular_authors': ('django.db.models.fields.CharField', [], {'max_length': '2048', 'null': 'True', 'blank': 'True'}),
'popular_tags': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True', 'blank': 'True'}),
'story_count_history': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
'rss_feeds.feedloadtime': {
'Meta': {'object_name': 'FeedLoadtime'},
'date_accessed': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'feed': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['rss_feeds.Feed']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'loadtime': ('django.db.models.fields.FloatField', [], {})
}
}
complete_apps = ['rss_feeds']
| mit |
alo-alt/airflow | https_operator.py | 1 | 2607 | import logging
from airflow.exceptions import AirflowException
from airflow.hooks import HttpsHook
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
class SimpleHttpsOperator(BaseOperator):
"""
Calls an endpoint on an HTTPS system to execute an action
:param https_conn_id: The connection to run the sensor against
:type https_conn_id: string
:param endpoint: The relative part of the full url
:type endpoint: string
:param method: The HTTP method to use, default = "POST"
:type method: string
:param data: The data to pass. POST-data in POST/PUT and params
in the URL for a GET request.
:type data: For POST/PUT, depends on the content-type parameter,
for GET a dictionary of key/value string pairs
:param headers: The HTTP headers to be added to the GET request
:type headers: a dictionary of string key/value pairs
:param response_check: A check against the 'requests' response object.
Returns True for 'pass' and False otherwise.
:type response_check: A lambda or defined function.
:param extra_options: Extra options for the 'requests' library, see the
'requests' documentation (options to modify timeout, ssl, etc.)
:type extra_options: A dictionary of options, where key is string and value
depends on the option that's being modified.
"""
template_fields = ('endpoint','data',)
template_ext = ()
ui_color = '#f4a578'
@apply_defaults
def __init__(self,
endpoint,
method='POST',
data=None,
headers=None,
response_check=None,
extra_options=None,
https_conn_id='https_default', *args, **kwargs):
super(SimpleHttpsOperator, self).__init__(*args, **kwargs)
self.https_conn_id = https_conn_id
self.method = method
self.endpoint = endpoint
self.headers = headers or {}
self.data = data or {}
self.response_check = response_check
self.extra_options = extra_options or {}
def execute(self, context):
https = HttpsHook(self.method, https_conn_id=self.https_conn_id)
logging.info("Calling HTTP method")
response = https.run(self.endpoint,
self.data,
self.headers,
self.extra_options)
if self.response_check:
if not self.response_check(response):
raise AirflowException("Response check returned False.")
| apache-2.0 |
richardtran415/pymatgen | pymatgen/io/vasp/tests/test_outputs.py | 2 | 87130 | # coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
import gzip
import json
import os
import unittest
import warnings
import xml.etree.cElementTree as ET
from pathlib import Path
from shutil import copyfile, copyfileobj
import numpy as np
import pytest
from monty.tempfile import ScratchDir
from pymatgen.core.lattice import Lattice
from pymatgen.electronic_structure.core import Orbital, Spin
from pymatgen.core.structure import Structure
from pymatgen.core import Element
from pymatgen.electronic_structure.core import Magmom, OrbitalType
from pymatgen.entries.compatibility import MaterialsProjectCompatibility
from pymatgen.io.vasp.inputs import Kpoints, Poscar
from pymatgen.io.vasp.outputs import (
BSVasprun,
Chgcar,
Dynmat,
Eigenval,
Elfcar,
Locpot,
Oszicar,
Outcar,
Procar,
UnconvergedVASPWarning,
VaspParserError,
Vasprun,
Wavecar,
Waveder,
Xdatcar,
)
from pymatgen.io.wannier90 import Unk
from pymatgen.util.testing import PymatgenTest
class VasprunTest(PymatgenTest):
_multiprocess_shared_ = True
def setUp(self):
warnings.simplefilter("ignore")
def tearDown(self):
warnings.simplefilter("default")
def test_multiple_dielectric(self):
v = Vasprun(self.TEST_FILES_DIR / "vasprun.GW0.xml")
self.assertEqual(len(v.other_dielectric), 3)
def test_charge_charge_dielectric(self):
"""
VASP 5.4.4 writes out two dielectric functions to vasprun.xml
These are the "density-density" and "velocity-velocity" linear response functions.
See the comments in `linear_optics.F` for details.
"""
v = Vasprun(
self.TEST_FILES_DIR / "vasprun.xml.dielectric_5.4.4",
parse_potcar_file=False,
)
self.assertEqual(v.dielectric is not None, True)
self.assertEqual("density" in v.dielectric_data, True)
self.assertEqual("velocity" in v.dielectric_data, True)
def test_optical_absorption_coeff(self):
v = Vasprun(self.TEST_FILES_DIR / "vasprun.BSE.xml.gz")
absorption_coeff = v.optical_absorption_coeff
self.assertEqual(absorption_coeff[1], 24966408728.917931)
def test_vasprun_with_more_than_two_unlabelled_dielectric_functions(self):
with self.assertRaises(NotImplementedError):
Vasprun(
self.TEST_FILES_DIR / "vasprun.xml.dielectric_bad",
parse_potcar_file=False,
)
def test_bad_vasprun(self):
self.assertRaises(ET.ParseError, Vasprun, self.TEST_FILES_DIR / "bad_vasprun.xml")
with warnings.catch_warnings(record=True) as w:
# Cause all warnings to always be triggered.
warnings.simplefilter("always")
# Trigger a warning.
v = Vasprun(self.TEST_FILES_DIR / "bad_vasprun.xml", exception_on_bad_xml=False)
# Verify some things
self.assertEqual(len(v.ionic_steps), 1)
self.assertAlmostEqual(v.final_energy, -269.00551374)
self.assertTrue(issubclass(w[-1].category, UserWarning))
def test_runtype(self):
v = Vasprun(self.TEST_FILES_DIR / "vasprun.GW0.xml")
self.assertIn(v.run_type, "HF")
v = Vasprun(self.TEST_FILES_DIR / "vasprun.xml.pbesol_vdw")
self.assertIn(v.run_type, "PBEsol+vdW-DFT-D3-BJ")
v = Vasprun(self.TEST_FILES_DIR / "vasprun.xml.hse06")
self.assertIn(v.run_type, "HSE06")
v = Vasprun(self.TEST_FILES_DIR / "vasprun.xml.scan_rvv10")
self.assertIn(v.run_type, "SCAN+rVV10")
v = Vasprun(self.TEST_FILES_DIR / "vasprun.xml.dfpt.ionic")
self.assertIn(v.run_type, "GGA")
v = Vasprun(self.TEST_FILES_DIR / "vasprun.xml.dfpt")
self.assertIn(v.run_type, "GGA+U")
v = Vasprun(self.TEST_FILES_DIR / "vasprun.xml.r2scan")
self.assertIn(v.run_type, "R2SCAN")
v = Vasprun(self.TEST_FILES_DIR / "vasprun.xml.scan")
self.assertIn(v.run_type, "SCAN")
v = Vasprun(self.TEST_FILES_DIR / "vasprun.xml.pbesol")
self.assertIn(v.run_type, "PBEsol")
v = Vasprun(self.TEST_FILES_DIR / "vasprun.xml.rscan")
self.assertIn(v.run_type, "RSCAN")
v = Vasprun(self.TEST_FILES_DIR / "vasprun.xml.random")
self.assertIn(v.run_type, "RANDOMFUNCTIONAL")
v = Vasprun(self.TEST_FILES_DIR / "vasprun.xml.unknown")
with pytest.warns(UserWarning, match="Unknown run type!"):
self.assertIn(v.run_type, "unknown")
def test_vdw(self):
v = Vasprun(self.TEST_FILES_DIR / "vasprun.xml.vdw")
self.assertAlmostEqual(v.final_energy, -9.78310677)
def test_nonlmn(self):
filepath = self.TEST_FILES_DIR / "vasprun.xml.nonlm"
vasprun = Vasprun(filepath, parse_potcar_file=False)
orbs = list(vasprun.complete_dos.pdos[vasprun.final_structure[0]].keys())
self.assertIn(OrbitalType.s, orbs)
def test_standard(self):
filepath = self.TEST_FILES_DIR / "vasprun.xml"
vasprun = Vasprun(filepath, parse_potcar_file=False)
# Test NELM parsing.
self.assertEqual(vasprun.parameters["NELM"], 60)
# test pdos parsing
pdos0 = vasprun.complete_dos.pdos[vasprun.final_structure[0]]
self.assertAlmostEqual(pdos0[Orbital.s][Spin.up][16], 0.0026)
self.assertAlmostEqual(pdos0[Orbital.pz][Spin.down][16], 0.0012)
self.assertEqual(pdos0[Orbital.s][Spin.up].shape, (301,))
filepath2 = self.TEST_FILES_DIR / "lifepo4.xml"
vasprun_ggau = Vasprun(filepath2, parse_projected_eigen=True, parse_potcar_file=False)
totalscsteps = sum([len(i["electronic_steps"]) for i in vasprun.ionic_steps])
self.assertEqual(29, len(vasprun.ionic_steps))
self.assertEqual(len(vasprun.structures), len(vasprun.ionic_steps))
trajectory = vasprun.get_trajectory()
self.assertEqual(len(trajectory), len(vasprun.ionic_steps))
self.assertIn("forces", trajectory[0].site_properties)
for i, step in enumerate(vasprun.ionic_steps):
self.assertEqual(vasprun.structures[i], step["structure"])
self.assertTrue(
all(vasprun.structures[i] == vasprun.ionic_steps[i]["structure"] for i in range(len(vasprun.ionic_steps)))
)
self.assertEqual(308, totalscsteps, "Incorrect number of energies read from vasprun.xml")
self.assertEqual(["Li"] + 4 * ["Fe"] + 4 * ["P"] + 16 * ["O"], vasprun.atomic_symbols)
self.assertEqual(vasprun.final_structure.composition.reduced_formula, "LiFe4(PO4)4")
self.assertIsNotNone(vasprun.incar, "Incar cannot be read")
self.assertIsNotNone(vasprun.kpoints, "Kpoints cannot be read")
self.assertIsNotNone(vasprun.eigenvalues, "Eigenvalues cannot be read")
self.assertAlmostEqual(vasprun.final_energy, -269.38319884, 7)
self.assertAlmostEqual(vasprun.tdos.get_gap(), 2.0589, 4)
expectedans = (2.539, 4.0906, 1.5516, False)
(gap, cbm, vbm, direct) = vasprun.eigenvalue_band_properties
self.assertAlmostEqual(gap, expectedans[0])
self.assertAlmostEqual(cbm, expectedans[1])
self.assertAlmostEqual(vbm, expectedans[2])
self.assertEqual(direct, expectedans[3])
self.assertFalse(vasprun.is_hubbard)
self.assertEqual(
vasprun.potcar_symbols,
[
"PAW_PBE Li 17Jan2003",
"PAW_PBE Fe 06Sep2000",
"PAW_PBE Fe 06Sep2000",
"PAW_PBE P 17Jan2003",
"PAW_PBE O 08Apr2002",
],
)
self.assertIsNotNone(vasprun.kpoints, "Kpoints cannot be read")
self.assertIsNotNone(vasprun.actual_kpoints, "Actual kpoints cannot be read")
self.assertIsNotNone(vasprun.actual_kpoints_weights, "Actual kpoints weights cannot be read")
for atomdoses in vasprun.pdos:
for orbitaldos in atomdoses:
self.assertIsNotNone(orbitaldos, "Partial Dos cannot be read")
# test skipping ionic steps.
vasprun_skip = Vasprun(filepath, 3, parse_potcar_file=False)
self.assertEqual(vasprun_skip.nionic_steps, 29)
self.assertEqual(len(vasprun_skip.ionic_steps), int(vasprun.nionic_steps / 3) + 1)
self.assertEqual(len(vasprun_skip.ionic_steps), len(vasprun_skip.structures))
self.assertEqual(len(vasprun_skip.ionic_steps), int(vasprun.nionic_steps / 3) + 1)
# Check that nionic_steps is preserved no matter what.
self.assertEqual(vasprun_skip.nionic_steps, vasprun.nionic_steps)
self.assertNotAlmostEqual(vasprun_skip.final_energy, vasprun.final_energy)
# Test with ionic_step_offset
vasprun_offset = Vasprun(filepath, 3, 6, parse_potcar_file=False)
self.assertEqual(len(vasprun_offset.ionic_steps), int(len(vasprun.ionic_steps) / 3) - 1)
self.assertEqual(vasprun_offset.structures[0], vasprun_skip.structures[2])
self.assertTrue(vasprun_ggau.is_hubbard)
self.assertEqual(vasprun_ggau.hubbards["Fe"], 4.3)
self.assertAlmostEqual(vasprun_ggau.projected_eigenvalues[Spin.up][0][0][96][0], 0.0032)
d = vasprun_ggau.as_dict()
self.assertEqual(d["elements"], ["Fe", "Li", "O", "P"])
self.assertEqual(d["nelements"], 4)
def test_unconverged(self):
filepath = self.TEST_FILES_DIR / "vasprun.xml.unconverged"
with warnings.catch_warnings(record=True) as w:
# Cause all warnings to always be triggered.
warnings.simplefilter("always")
# Trigger a warning.
vasprun_unconverged = Vasprun(filepath, parse_potcar_file=False)
# Verify some things
self.assertEqual(len(w), 1)
self.assertTrue(issubclass(w[-1].category, UnconvergedVASPWarning))
self.assertTrue(vasprun_unconverged.converged_ionic)
self.assertFalse(vasprun_unconverged.converged_electronic)
self.assertFalse(vasprun_unconverged.converged)
def test_dfpt(self):
filepath = self.TEST_FILES_DIR / "vasprun.xml.dfpt"
vasprun_dfpt = Vasprun(filepath, parse_potcar_file=False)
self.assertAlmostEqual(vasprun_dfpt.epsilon_static[0][0], 3.26105533)
self.assertAlmostEqual(vasprun_dfpt.epsilon_static[0][1], -0.00459066)
self.assertAlmostEqual(vasprun_dfpt.epsilon_static[2][2], 3.24330517)
self.assertAlmostEqual(vasprun_dfpt.epsilon_static_wolfe[0][0], 3.33402531)
self.assertAlmostEqual(vasprun_dfpt.epsilon_static_wolfe[0][1], -0.00559998)
self.assertAlmostEqual(vasprun_dfpt.epsilon_static_wolfe[2][2], 3.31237357)
self.assertTrue(vasprun_dfpt.converged)
entry = vasprun_dfpt.get_computed_entry()
entry = MaterialsProjectCompatibility(check_potcar_hash=False).process_entry(entry)
self.assertAlmostEqual(entry.uncorrected_energy + entry.correction, entry.energy)
def test_dfpt_ionic(self):
filepath = self.TEST_FILES_DIR / "vasprun.xml.dfpt.ionic"
vasprun_dfpt_ionic = Vasprun(filepath, parse_potcar_file=False)
self.assertAlmostEqual(vasprun_dfpt_ionic.epsilon_ionic[0][0], 515.73485838)
self.assertAlmostEqual(vasprun_dfpt_ionic.epsilon_ionic[0][1], -0.00263523)
self.assertAlmostEqual(vasprun_dfpt_ionic.epsilon_ionic[2][2], 19.02110169)
def test_dfpt_unconverged(self):
filepath = self.TEST_FILES_DIR / "vasprun.xml.dfpt.unconverged"
vasprun_dfpt_unconv = Vasprun(filepath, parse_potcar_file=False)
self.assertFalse(vasprun_dfpt_unconv.converged_electronic)
self.assertTrue(vasprun_dfpt_unconv.converged_ionic)
self.assertFalse(vasprun_dfpt_unconv.converged)
def test_uniform(self):
vasprun_uniform = Vasprun(self.TEST_FILES_DIR / "vasprun.xml.uniform", parse_potcar_file=False)
self.assertEqual(vasprun_uniform.kpoints.style, Kpoints.supported_modes.Reciprocal)
def test_no_projected(self):
vasprun_no_pdos = Vasprun(self.TEST_FILES_DIR / "Li_no_projected.xml", parse_potcar_file=False)
self.assertIsNotNone(vasprun_no_pdos.complete_dos)
self.assertFalse(vasprun_no_pdos.dos_has_errors)
def test_dielectric(self):
vasprun_diel = Vasprun(self.TEST_FILES_DIR / "vasprun.xml.dielectric", parse_potcar_file=False)
self.assertAlmostEqual(0.4294, vasprun_diel.dielectric[0][10])
self.assertAlmostEqual(19.941, vasprun_diel.dielectric[1][51][0])
self.assertAlmostEqual(19.941, vasprun_diel.dielectric[1][51][1])
self.assertAlmostEqual(19.941, vasprun_diel.dielectric[1][51][2])
self.assertAlmostEqual(0.0, vasprun_diel.dielectric[1][51][3])
self.assertAlmostEqual(34.186, vasprun_diel.dielectric[2][85][0])
self.assertAlmostEqual(34.186, vasprun_diel.dielectric[2][85][1])
self.assertAlmostEqual(34.186, vasprun_diel.dielectric[2][85][2])
self.assertAlmostEqual(0.0, vasprun_diel.dielectric[2][85][3])
def test_dielectric_vasp608(self):
# test reading dielectric constant in vasp 6.0.8
vasprun_diel = Vasprun(
self.TEST_FILES_DIR / "vasprun.xml.dielectric_6.0.8",
parse_potcar_file=False,
)
self.assertAlmostEqual(0.4338, vasprun_diel.dielectric[0][10])
self.assertAlmostEqual(5.267, vasprun_diel.dielectric[1][51][0])
self.assertAlmostEqual(0.4338, vasprun_diel.dielectric_data["density"][0][10])
self.assertAlmostEqual(5.267, vasprun_diel.dielectric_data["density"][1][51][0])
self.assertAlmostEqual(0.4338, vasprun_diel.dielectric_data["velocity"][0][10])
self.assertAlmostEqual(1.0741, vasprun_diel.dielectric_data["velocity"][1][51][0])
self.assertEqual(len(vasprun_diel.other_dielectric), 0)
def test_indirect_vasprun(self):
v = Vasprun(self.TEST_FILES_DIR / "vasprun.xml.indirect.gz")
(gap, cbm, vbm, direct) = v.eigenvalue_band_properties
self.assertFalse(direct)
def test_optical_vasprun(self):
vasprun_optical = Vasprun(
self.TEST_FILES_DIR / "vasprun.xml.opticaltransitions",
parse_potcar_file=False,
)
self.assertAlmostEqual(3.084, vasprun_optical.optical_transition[0][0])
self.assertAlmostEqual(3.087, vasprun_optical.optical_transition[3][0])
self.assertAlmostEqual(0.001, vasprun_optical.optical_transition[0][1])
self.assertAlmostEqual(0.001, vasprun_optical.optical_transition[1][1])
self.assertAlmostEqual(0.001, vasprun_optical.optical_transition[7][1])
self.assertAlmostEqual(0.001, vasprun_optical.optical_transition[19][1])
self.assertAlmostEqual(3.3799999999, vasprun_optical.optical_transition[54][0])
self.assertAlmostEqual(3.381, vasprun_optical.optical_transition[55][0])
self.assertAlmostEqual(3.381, vasprun_optical.optical_transition[56][0])
self.assertAlmostEqual(10554.9860, vasprun_optical.optical_transition[54][1])
self.assertAlmostEqual(0.0, vasprun_optical.optical_transition[55][1])
self.assertAlmostEqual(0.001, vasprun_optical.optical_transition[56][1])
def test_force_constants(self):
vasprun_fc = Vasprun(self.TEST_FILES_DIR / "vasprun.xml.dfpt.phonon", parse_potcar_file=False)
fc_ans = [
[-0.00184451, -0.0, -0.0],
[-0.0, -0.00933824, -0.03021279],
[-0.0, -0.03021279, 0.01202547],
]
nm_ans = [
[0.0884346, -0.08837289, -0.24995639],
[-0.0884346, 0.08837289, 0.24995639],
[0.15306645, -0.05105771, -0.14441306],
[-0.15306645, 0.05105771, 0.14441306],
[-0.0884346, 0.08837289, 0.24995639],
[0.0884346, -0.08837289, -0.24995639],
[-0.15306645, 0.05105771, 0.14441306],
[0.15306645, -0.05105771, -0.14441306],
[-0.0884346, 0.08837289, 0.24995639],
[0.0884346, -0.08837289, -0.24995639],
[-0.15306645, 0.05105771, 0.14441306],
[0.15306645, -0.05105771, -0.14441306],
[0.0884346, -0.08837289, -0.24995639],
[-0.0884346, 0.08837289, 0.24995639],
[0.15306645, -0.05105771, -0.14441306],
[-0.15306645, 0.05105771, 0.14441306],
]
nm_eigenval_ans = [
-0.59067079,
-0.59067079,
-0.59067003,
-0.59067003,
-0.59067003,
-0.59067003,
-0.585009,
-0.585009,
-0.58500895,
-0.58500883,
-0.5062956,
-0.5062956,
]
self.assertEqual(vasprun_fc.force_constants.shape, (16, 16, 3, 3))
self.assertTrue(np.allclose(vasprun_fc.force_constants[8, 9], fc_ans))
self.assertEqual(vasprun_fc.normalmode_eigenvals.size, 48)
self.assertTrue(np.allclose(vasprun_fc.normalmode_eigenvals[17:29], nm_eigenval_ans))
self.assertEqual(vasprun_fc.normalmode_eigenvecs.shape, (48, 16, 3))
self.assertTrue(np.allclose(vasprun_fc.normalmode_eigenvecs[33], nm_ans))
def test_Xe(self):
vr = Vasprun(self.TEST_FILES_DIR / "vasprun.xml.xe", parse_potcar_file=False)
self.assertEqual(vr.atomic_symbols, ["Xe"])
def test_invalid_element(self):
self.assertRaises(ValueError, Vasprun, self.TEST_FILES_DIR / "vasprun.xml.wrong_sp")
def test_selective_dynamics(self):
vsd = Vasprun(self.TEST_FILES_DIR / "vasprun.xml.indirect.gz")
np.testing.assert_array_equal(
vsd.final_structure.site_properties.get("selective_dynamics"),
[[True] * 3, [False] * 3],
"Selective dynamics parsing error",
)
def test_as_dict(self):
filepath = self.TEST_FILES_DIR / "vasprun.xml"
vasprun = Vasprun(filepath, parse_potcar_file=False)
# Test that as_dict() is json-serializable
self.assertIsNotNone(json.dumps(vasprun.as_dict()))
self.assertEqual(
vasprun.as_dict()["input"]["potcar_type"],
["PAW_PBE", "PAW_PBE", "PAW_PBE", "PAW_PBE", "PAW_PBE"],
)
self.assertEqual(vasprun.as_dict()["input"]["nkpoints"], 24)
def test_get_band_structure(self):
with warnings.catch_warnings():
warnings.simplefilter("ignore")
filepath = self.TEST_FILES_DIR / "vasprun_Si_bands.xml"
vasprun = Vasprun(filepath, parse_projected_eigen=True, parse_potcar_file=False)
bs = vasprun.get_band_structure(kpoints_filename=self.TEST_FILES_DIR / "KPOINTS_Si_bands")
cbm = bs.get_cbm()
vbm = bs.get_vbm()
self.assertEqual(cbm["kpoint_index"], [13], "wrong cbm kpoint index")
self.assertAlmostEqual(cbm["energy"], 6.2301, "wrong cbm energy")
self.assertEqual(cbm["band_index"], {Spin.up: [4], Spin.down: [4]}, "wrong cbm bands")
self.assertEqual(vbm["kpoint_index"], [0, 63, 64])
self.assertAlmostEqual(vbm["energy"], 5.6158, "wrong vbm energy")
self.assertEqual(
vbm["band_index"],
{Spin.up: [1, 2, 3], Spin.down: [1, 2, 3]},
"wrong vbm bands",
)
self.assertEqual(vbm["kpoint"].label, "\\Gamma", "wrong vbm label")
self.assertEqual(cbm["kpoint"].label, None, "wrong cbm label")
projected = bs.get_projection_on_elements()
self.assertAlmostEqual(projected[Spin.up][0][0]["Si"], 0.4238)
projected = bs.get_projections_on_elements_and_orbitals({"Si": ["s"]})
self.assertAlmostEqual(projected[Spin.up][0][0]["Si"]["s"], 0.4238)
# Test compressed files case 1: compressed KPOINTS in current dir
with ScratchDir("./"):
copyfile(self.TEST_FILES_DIR / "vasprun_Si_bands.xml", "vasprun.xml")
# Check for error if no KPOINTS file
vasprun = Vasprun("vasprun.xml", parse_projected_eigen=True, parse_potcar_file=False)
with self.assertRaises(VaspParserError):
_ = vasprun.get_band_structure(line_mode=True)
# Check KPOINTS.gz succesfully inferred and used if present
with open(self.TEST_FILES_DIR / "KPOINTS_Si_bands", "rb") as f_in:
with gzip.open("KPOINTS.gz", "wb") as f_out:
copyfileobj(f_in, f_out)
bs_kpts_gzip = vasprun.get_band_structure()
self.assertEqual(bs.efermi, bs_kpts_gzip.efermi)
self.assertEqual(bs.as_dict(), bs_kpts_gzip.as_dict())
# Test compressed files case 2: compressed vasprun in another dir
with ScratchDir("./"):
os.mkdir("deeper")
copyfile(self.TEST_FILES_DIR / "KPOINTS_Si_bands", Path("deeper") / "KPOINTS")
with open(self.TEST_FILES_DIR / "vasprun_Si_bands.xml", "rb") as f_in:
with gzip.open(os.path.join("deeper", "vasprun.xml.gz"), "wb") as f_out:
copyfileobj(f_in, f_out)
vasprun = Vasprun(
os.path.join("deeper", "vasprun.xml.gz"),
parse_projected_eigen=True,
parse_potcar_file=False,
)
bs_vasprun_gzip = vasprun.get_band_structure(line_mode=True)
self.assertEqual(bs.efermi, bs_vasprun_gzip.efermi)
self.assertEqual(bs.as_dict(), bs_vasprun_gzip.as_dict())
# test hybrid band structures
vasprun.actual_kpoints_weights[-1] = 0.0
bs = vasprun.get_band_structure(kpoints_filename=self.TEST_FILES_DIR / "KPOINTS_Si_bands")
cbm = bs.get_cbm()
vbm = bs.get_vbm()
self.assertEqual(cbm["kpoint_index"], [0])
self.assertAlmostEqual(cbm["energy"], 6.3676)
self.assertEqual(cbm["kpoint"].label, None)
self.assertEqual(vbm["kpoint_index"], [0])
self.assertAlmostEqual(vbm["energy"], 2.8218)
self.assertEqual(vbm["kpoint"].label, None)
# test self-consistent band structure calculation for non-hybrid functionals
vasprun = Vasprun(
self.TEST_FILES_DIR / "vasprun.xml.forcehybridlikecalc",
parse_projected_eigen=True,
parse_potcar_file=False,
)
bs = vasprun.get_band_structure(
kpoints_filename=self.TEST_FILES_DIR / "KPOINTS.forcehybridlikecalc",
force_hybrid_mode=True,
line_mode=True,
)
dict_to_test = bs.get_band_gap()
self.assertTrue(dict_to_test["direct"])
self.assertAlmostEqual(dict_to_test["energy"], 6.007899999999999)
self.assertEqual(dict_to_test["transition"], "\\Gamma-\\Gamma")
self.assertEqual(bs.get_branch(0)[0]["start_index"], 0)
self.assertEqual(bs.get_branch(0)[0]["end_index"], 0)
def test_projected_magnetisation(self):
filepath = self.TEST_FILES_DIR / "vasprun.lvel.Si2H.xml"
vasprun = Vasprun(filepath, parse_projected_eigen=True)
self.assertTrue(vasprun.projected_magnetisation is not None)
self.assertEqual(vasprun.projected_magnetisation.shape, (76, 240, 4, 9, 3))
self.assertAlmostEqual(vasprun.projected_magnetisation[0, 0, 0, 0, 0], -0.0712)
def test_smart_efermi(self):
# branch 1 - E_fermi does not cross a band
vrun = Vasprun(self.TEST_FILES_DIR / "vasprun.xml.LiF")
smart_fermi = vrun.calculate_efermi()
self.assertAlmostEqual(smart_fermi, vrun.efermi, places=4)
eigen_gap = vrun.eigenvalue_band_properties[0]
bs_gap = vrun.get_band_structure(efermi=smart_fermi).get_band_gap()["energy"]
self.assertAlmostEqual(bs_gap, eigen_gap, places=3)
# branch 2 - E_fermi crosses a band but bandgap=0
vrun = Vasprun(self.TEST_FILES_DIR / "vasprun.xml.Al")
smart_fermi = vrun.calculate_efermi()
self.assertAlmostEqual(smart_fermi, vrun.efermi, places=4)
eigen_gap = vrun.eigenvalue_band_properties[0]
bs_gap = vrun.get_band_structure(efermi=smart_fermi).get_band_gap()["energy"]
self.assertAlmostEqual(bs_gap, eigen_gap, places=3)
# branch 3 - E_fermi crosses a band in an insulator
vrun = Vasprun(self.TEST_FILES_DIR / "vasprun.xml.LiH_bad_efermi")
smart_fermi = vrun.calculate_efermi()
self.assertNotAlmostEqual(smart_fermi, vrun.efermi, places=4)
eigen_gap = vrun.eigenvalue_band_properties[0]
bs_gap = vrun.get_band_structure(efermi="smart").get_band_gap()["energy"]
self.assertAlmostEqual(bs_gap, eigen_gap, places=3)
self.assertNotAlmostEqual(vrun.get_band_structure(efermi=None).get_band_gap()["energy"], eigen_gap, places=3)
self.assertNotEqual(bs_gap, 0)
def test_sc_step_overflow(self):
filepath = self.TEST_FILES_DIR / "vasprun.xml.sc_overflow"
# with warnings.catch_warnings(record=True) as w:
# warnings.simplefilter("always")
# vasprun = Vasprun(filepath)
# self.assertEqual(len(w), 3)
vasprun = Vasprun(filepath)
estep = vasprun.ionic_steps[0]["electronic_steps"][29]
self.assertTrue(np.isnan(estep["e_wo_entrp"]))
def test_update_potcar(self):
filepath = self.TEST_FILES_DIR / "vasprun.xml"
potcar_path = self.TEST_FILES_DIR / "POTCAR.LiFePO4.gz"
potcar_path2 = self.TEST_FILES_DIR / "POTCAR2.LiFePO4.gz"
vasprun = Vasprun(filepath, parse_potcar_file=False)
self.assertEqual(
vasprun.potcar_spec,
[
{"titel": "PAW_PBE Li 17Jan2003", "hash": None},
{"titel": "PAW_PBE Fe 06Sep2000", "hash": None},
{"titel": "PAW_PBE Fe 06Sep2000", "hash": None},
{"titel": "PAW_PBE P 17Jan2003", "hash": None},
{"titel": "PAW_PBE O 08Apr2002", "hash": None},
],
)
vasprun.update_potcar_spec(potcar_path)
self.assertEqual(
vasprun.potcar_spec,
[
{
"titel": "PAW_PBE Li 17Jan2003",
"hash": "65e83282d1707ec078c1012afbd05be8",
},
{
"titel": "PAW_PBE Fe 06Sep2000",
"hash": "9530da8244e4dac17580869b4adab115",
},
{
"titel": "PAW_PBE Fe 06Sep2000",
"hash": "9530da8244e4dac17580869b4adab115",
},
{
"titel": "PAW_PBE P 17Jan2003",
"hash": "7dc3393307131ae67785a0cdacb61d5f",
},
{
"titel": "PAW_PBE O 08Apr2002",
"hash": "7a25bc5b9a5393f46600a4939d357982",
},
],
)
vasprun2 = Vasprun(filepath, parse_potcar_file=False)
self.assertRaises(ValueError, vasprun2.update_potcar_spec, potcar_path2)
vasprun = Vasprun(filepath, parse_potcar_file=potcar_path)
self.assertEqual(
vasprun.potcar_spec,
[
{
"titel": "PAW_PBE Li 17Jan2003",
"hash": "65e83282d1707ec078c1012afbd05be8",
},
{
"titel": "PAW_PBE Fe 06Sep2000",
"hash": "9530da8244e4dac17580869b4adab115",
},
{
"titel": "PAW_PBE Fe 06Sep2000",
"hash": "9530da8244e4dac17580869b4adab115",
},
{
"titel": "PAW_PBE P 17Jan2003",
"hash": "7dc3393307131ae67785a0cdacb61d5f",
},
{
"titel": "PAW_PBE O 08Apr2002",
"hash": "7a25bc5b9a5393f46600a4939d357982",
},
],
)
self.assertRaises(ValueError, Vasprun, filepath, parse_potcar_file=potcar_path2)
def test_search_for_potcar(self):
filepath = self.TEST_FILES_DIR / "vasprun.xml"
vasprun = Vasprun(filepath, parse_potcar_file=True)
self.assertEqual(
vasprun.potcar_spec,
[
{
"titel": "PAW_PBE Li 17Jan2003",
"hash": "65e83282d1707ec078c1012afbd05be8",
},
{
"titel": "PAW_PBE Fe 06Sep2000",
"hash": "9530da8244e4dac17580869b4adab115",
},
{
"titel": "PAW_PBE Fe 06Sep2000",
"hash": "9530da8244e4dac17580869b4adab115",
},
{
"titel": "PAW_PBE P 17Jan2003",
"hash": "7dc3393307131ae67785a0cdacb61d5f",
},
{
"titel": "PAW_PBE O 08Apr2002",
"hash": "7a25bc5b9a5393f46600a4939d357982",
},
],
)
def test_potcar_not_found(self):
filepath = self.TEST_FILES_DIR / "vasprun.xml"
# Ensure no potcar is found and nothing is updated
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
vasprun = Vasprun(filepath, parse_potcar_file=".")
self.assertEqual(len(w), 2)
self.assertEqual(
vasprun.potcar_spec,
[
{"titel": "PAW_PBE Li 17Jan2003", "hash": None},
{"titel": "PAW_PBE Fe 06Sep2000", "hash": None},
{"titel": "PAW_PBE Fe 06Sep2000", "hash": None},
{"titel": "PAW_PBE P 17Jan2003", "hash": None},
{"titel": "PAW_PBE O 08Apr2002", "hash": None},
],
)
def test_parsing_chemical_shift_calculations(self):
with warnings.catch_warnings():
warnings.simplefilter("ignore")
filepath = self.TEST_FILES_DIR / "nmr" / "cs" / "basic" / "vasprun.xml.chemical_shift.scstep"
vasprun = Vasprun(filepath)
nestep = len(vasprun.ionic_steps[-1]["electronic_steps"])
self.assertEqual(nestep, 10)
self.assertTrue(vasprun.converged)
def test_parsing_efg_calcs(self):
with warnings.catch_warnings():
warnings.simplefilter("ignore")
filepath = self.TEST_FILES_DIR / "nmr" / "efg" / "AlPO4" / "vasprun.xml"
vasprun = Vasprun(filepath)
nestep = len(vasprun.ionic_steps[-1]["electronic_steps"])
self.assertEqual(nestep, 18)
self.assertTrue(vasprun.converged)
def test_charged_structure(self):
vpath = self.TEST_FILES_DIR / "vasprun.charged.xml"
potcar_path = self.TEST_FILES_DIR / "POT_GGA_PAW_PBE" / "POTCAR.Si.gz"
vasprun = Vasprun(vpath, parse_potcar_file=False)
vasprun.update_charge_from_potcar(potcar_path)
self.assertEqual(vasprun.parameters.get("NELECT", 8), 9)
self.assertEqual(vasprun.structures[0].charge, 1)
vpath = self.TEST_FILES_DIR / "vasprun.split.charged.xml"
potcar_path = self.TEST_FILES_DIR / "POTCAR.split.charged.gz"
vasprun = Vasprun(vpath, parse_potcar_file=False)
vasprun.update_charge_from_potcar(potcar_path)
self.assertEqual(vasprun.parameters.get("NELECT", 0), 7)
self.assertEqual(vasprun.structures[-1].charge, 1)
def test_kpointset_electronvelocities(self):
vpath = self.TEST_FILES_DIR / "vasprun.lvel.Si2H.xml"
vasprun = Vasprun(vpath, parse_potcar_file=False)
self.assertEqual(vasprun.eigenvalues[Spin.up].shape[0], len(vasprun.actual_kpoints))
class OutcarTest(PymatgenTest):
_multiprocess_shared_ = True
def test_init(self):
for f in ["OUTCAR", "OUTCAR.gz"]:
filepath = self.TEST_FILES_DIR / f
outcar = Outcar(filepath)
expected_mag = (
{"d": 0.0, "p": 0.003, "s": 0.002, "tot": 0.005},
{"d": 0.798, "p": 0.008, "s": 0.007, "tot": 0.813},
{"d": 0.798, "p": 0.008, "s": 0.007, "tot": 0.813},
{"d": 0.0, "p": -0.117, "s": 0.005, "tot": -0.112},
{"d": 0.0, "p": -0.165, "s": 0.004, "tot": -0.162},
{"d": 0.0, "p": -0.117, "s": 0.005, "tot": -0.112},
{"d": 0.0, "p": -0.165, "s": 0.004, "tot": -0.162},
)
expected_chg = (
{"p": 0.154, "s": 0.078, "d": 0.0, "tot": 0.232},
{"p": 0.707, "s": 0.463, "d": 8.316, "tot": 9.486},
{"p": 0.707, "s": 0.463, "d": 8.316, "tot": 9.486},
{"p": 3.388, "s": 1.576, "d": 0.0, "tot": 4.964},
{"p": 3.365, "s": 1.582, "d": 0.0, "tot": 4.947},
{"p": 3.388, "s": 1.576, "d": 0.0, "tot": 4.964},
{"p": 3.365, "s": 1.582, "d": 0.0, "tot": 4.947},
)
self.assertAlmostEqual(
outcar.magnetization,
expected_mag,
5,
"Wrong magnetization read from Outcar",
)
self.assertAlmostEqual(outcar.charge, expected_chg, 5, "Wrong charge read from Outcar")
self.assertFalse(outcar.is_stopped)
self.assertEqual(
outcar.run_stats,
{
"System time (sec)": 0.938,
"Total CPU time used (sec)": 545.142,
"Elapsed time (sec)": 546.709,
"Maximum memory used (kb)": 0.0,
"Average memory used (kb)": 0.0,
"User time (sec)": 544.204,
"cores": "8",
},
)
self.assertAlmostEqual(outcar.efermi, 2.0112)
self.assertAlmostEqual(outcar.nelect, 44.9999991)
self.assertAlmostEqual(outcar.total_mag, 0.9999998)
self.assertIsNotNone(outcar.as_dict())
self.assertFalse(outcar.lepsilon)
toten = 0
for k in outcar.final_energy_contribs.keys():
toten += outcar.final_energy_contribs[k]
self.assertAlmostEqual(toten, outcar.final_energy, 6)
def test_stopped_old(self):
filepath = self.TEST_FILES_DIR / "OUTCAR.stopped"
outcar = Outcar(filepath)
self.assertTrue(outcar.is_stopped)
for f in ["OUTCAR.lepsilon_old_born", "OUTCAR.lepsilon_old_born.gz"]:
filepath = self.TEST_FILES_DIR / f
outcar = Outcar(filepath)
self.assertTrue(outcar.lepsilon)
self.assertAlmostEqual(outcar.dielectric_tensor[0][0], 3.716432)
self.assertAlmostEqual(outcar.dielectric_tensor[0][1], -0.20464)
self.assertAlmostEqual(outcar.dielectric_tensor[1][2], -0.20464)
self.assertAlmostEqual(outcar.dielectric_ionic_tensor[0][0], 0.001419)
self.assertAlmostEqual(outcar.dielectric_ionic_tensor[0][2], 0.001419)
self.assertAlmostEqual(outcar.dielectric_ionic_tensor[2][2], 0.001419)
self.assertAlmostEqual(outcar.piezo_tensor[0][0], 0.52799)
self.assertAlmostEqual(outcar.piezo_tensor[1][3], 0.35998)
self.assertAlmostEqual(outcar.piezo_tensor[2][5], 0.35997)
self.assertAlmostEqual(outcar.piezo_ionic_tensor[0][0], 0.05868)
self.assertAlmostEqual(outcar.piezo_ionic_tensor[1][3], 0.06241)
self.assertAlmostEqual(outcar.piezo_ionic_tensor[2][5], 0.06242)
self.assertAlmostEqual(outcar.born[0][1][2], -0.385)
self.assertAlmostEqual(outcar.born[1][2][0], 0.36465)
self.assertAlmostEqual(outcar.internal_strain_tensor[0][0][0], -572.5437, places=4)
self.assertAlmostEqual(outcar.internal_strain_tensor[0][1][0], 683.2985, places=4)
self.assertAlmostEqual(outcar.internal_strain_tensor[0][1][3], 73.07059, places=4)
self.assertAlmostEqual(outcar.internal_strain_tensor[1][0][0], 570.98927, places=4)
self.assertAlmostEqual(outcar.internal_strain_tensor[1][1][0], -683.68519, places=4)
self.assertAlmostEqual(outcar.internal_strain_tensor[1][2][2], 570.98927, places=4)
def test_stopped(self):
filepath = self.TEST_FILES_DIR / "OUTCAR.stopped"
outcar = Outcar(filepath)
self.assertTrue(outcar.is_stopped)
for f in ["OUTCAR.lepsilon", "OUTCAR.lepsilon.gz"]:
filepath = self.TEST_FILES_DIR / f
outcar = Outcar(filepath)
self.assertTrue(outcar.lepsilon)
self.assertAlmostEqual(outcar.dielectric_tensor[0][0], 3.716432)
self.assertAlmostEqual(outcar.dielectric_tensor[0][1], -0.20464)
self.assertAlmostEqual(outcar.dielectric_tensor[1][2], -0.20464)
self.assertAlmostEqual(outcar.dielectric_ionic_tensor[0][0], 0.001419)
self.assertAlmostEqual(outcar.dielectric_ionic_tensor[0][2], 0.001419)
self.assertAlmostEqual(outcar.dielectric_ionic_tensor[2][2], 0.001419)
self.assertAlmostEqual(outcar.piezo_tensor[0][0], 0.52799)
self.assertAlmostEqual(outcar.piezo_tensor[1][3], 0.35998)
self.assertAlmostEqual(outcar.piezo_tensor[2][5], 0.35997)
self.assertAlmostEqual(outcar.piezo_ionic_tensor[0][0], 0.05868)
self.assertAlmostEqual(outcar.piezo_ionic_tensor[1][3], 0.06241)
self.assertAlmostEqual(outcar.piezo_ionic_tensor[2][5], 0.06242)
self.assertAlmostEqual(outcar.born[0][1][2], -0.385)
self.assertAlmostEqual(outcar.born[1][2][0], 0.36465)
self.assertAlmostEqual(outcar.internal_strain_tensor[0][0][0], -572.5437, places=4)
self.assertAlmostEqual(outcar.internal_strain_tensor[0][1][0], 683.2985, places=4)
self.assertAlmostEqual(outcar.internal_strain_tensor[0][1][3], 73.07059, places=4)
self.assertAlmostEqual(outcar.internal_strain_tensor[1][0][0], 570.98927, places=4)
self.assertAlmostEqual(outcar.internal_strain_tensor[1][1][0], -683.68519, places=4)
self.assertAlmostEqual(outcar.internal_strain_tensor[1][2][2], 570.98927, places=4)
def test_soc(self):
filepath = self.TEST_FILES_DIR / "OUTCAR.NiO_SOC.gz"
outcar = Outcar(filepath)
expected_mag = (
{
"s": Magmom([0.0, 0.0, -0.001]),
"p": Magmom([0.0, 0.0, -0.003]),
"d": Magmom([0.0, 0.0, 1.674]),
"tot": Magmom([0.0, 0.0, 1.671]),
},
{
"s": Magmom([0.0, 0.0, 0.001]),
"p": Magmom([0.0, 0.0, 0.003]),
"d": Magmom([0.0, 0.0, -1.674]),
"tot": Magmom([0.0, 0.0, -1.671]),
},
{
"s": Magmom([0.0, 0.0, 0.0]),
"p": Magmom([0.0, 0.0, 0.0]),
"d": Magmom([0.0, 0.0, 0.0]),
"tot": Magmom([0.0, 0.0, 0.0]),
},
{
"s": Magmom([0.0, 0.0, 0.0]),
"p": Magmom([0.0, 0.0, 0.0]),
"d": Magmom([0.0, 0.0, 0.0]),
"tot": Magmom([0.0, 0.0, 0.0]),
},
)
# test note: Magmom class uses np.allclose() when testing for equality
# so fine to use assertEqual here
self.assertEqual(
outcar.magnetization,
expected_mag,
"Wrong vector magnetization read from Outcar for SOC calculation",
)
def test_polarization(self):
filepath = self.TEST_FILES_DIR / "OUTCAR.BaTiO3.polar"
outcar = Outcar(filepath)
self.assertEqual(outcar.spin, True)
self.assertEqual(outcar.noncollinear, False)
self.assertAlmostEqual(outcar.p_ion[0], 0.0)
self.assertAlmostEqual(outcar.p_ion[1], 0.0)
self.assertAlmostEqual(outcar.p_ion[2], -5.56684)
self.assertAlmostEqual(outcar.p_sp1[0], 2.00068)
self.assertAlmostEqual(outcar.p_sp2[0], -2.00044)
self.assertAlmostEqual(outcar.p_elec[0], 0.00024)
self.assertAlmostEqual(outcar.p_elec[1], 0.00019)
self.assertAlmostEqual(outcar.p_elec[2], 3.61674)
def test_pseudo_zval(self):
filepath = self.TEST_FILES_DIR / "OUTCAR.BaTiO3.polar"
outcar = Outcar(filepath)
self.assertDictEqual({"Ba": 10.00, "Ti": 10.00, "O": 6.00}, outcar.zval_dict)
filepath = self.TEST_FILES_DIR / "OUTCAR.LaSnNO2.polar"
outcar = Outcar(filepath)
self.assertDictEqual({"La": 11.0, "N": 5.0, "O": 6.0, "Sn": 14.0}, outcar.zval_dict)
def test_dielectric(self):
filepath = self.TEST_FILES_DIR / "OUTCAR.dielectric"
outcar = Outcar(filepath)
outcar.read_corrections()
self.assertAlmostEqual(outcar.data["dipol_quadrupol_correction"], 0.03565)
self.assertAlmostEqual(outcar.final_energy, -797.46760559)
def test_freq_dielectric(self):
filepath = self.TEST_FILES_DIR / "OUTCAR.LOPTICS"
outcar = Outcar(filepath)
outcar.read_freq_dielectric()
self.assertAlmostEqual(outcar.dielectric_energies[0], 0)
self.assertAlmostEqual(outcar.dielectric_energies[-1], 39.826101)
self.assertAlmostEqual(outcar.dielectric_tensor_function[0][0, 0], 8.96938800)
self.assertAlmostEqual(
outcar.dielectric_tensor_function[-1][0, 0],
7.36167000e-01 + 1.53800000e-03j,
)
self.assertEqual(len(outcar.dielectric_energies), len(outcar.dielectric_tensor_function))
np.testing.assert_array_equal(
outcar.dielectric_tensor_function[0],
outcar.dielectric_tensor_function[0].transpose(),
)
plasma_freq = outcar.plasma_frequencies
self.assertArrayAlmostEqual(plasma_freq["intraband"], np.zeros((3, 3)))
self.assertArrayAlmostEqual(
plasma_freq["interband"],
[
[367.49, 63.939, 11.976],
[63.939, 381.155, -24.461],
[11.976, -24.461, 297.844],
],
)
def test_freq_dielectric_vasp544(self):
filepath = self.TEST_FILES_DIR / "OUTCAR.LOPTICS.vasp544"
outcar = Outcar(filepath)
outcar.read_freq_dielectric()
self.assertAlmostEqual(outcar.dielectric_energies[0], 0)
self.assertAlmostEqual(outcar.dielectric_energies[-1], 39.63964)
self.assertAlmostEqual(outcar.dielectric_tensor_function[0][0, 0], 12.769435 + 0j)
self.assertAlmostEqual(outcar.dielectric_tensor_function[-1][0, 0], 0.828615 + 0.016594j)
self.assertEqual(len(outcar.dielectric_energies), len(outcar.dielectric_tensor_function))
np.testing.assert_array_equal(
outcar.dielectric_tensor_function[0],
outcar.dielectric_tensor_function[0].transpose(),
)
def test_parse_sci_notation(self):
invalid_pattern = "23535.35 35235.34 325325.3"
valid_pattern1 = " 0.00000E+00 0.00000E+00 0.00000E+00 0.00000E+00 0.00000E+00 0.00000E+00 0.00000E+00"
valid_pattern2 = " 0.62963E+00 0.15467E+02 0.15467E+02 0.15467E+02-0.30654E-16-0.91612E-16 0.52388E-16"
self.assertEqual(Outcar._parse_sci_notation(invalid_pattern), [])
self.assertEqual(Outcar._parse_sci_notation(valid_pattern1), [0, 0, 0, 0, 0, 0, 0])
self.assertEqual(
Outcar._parse_sci_notation(valid_pattern2),
[
0.62963,
0.15467e02,
0.15467e02,
0.15467e02,
-0.30654e-16,
-0.91612e-16,
0.52388e-16,
],
)
def test_read_elastic_tensor(self):
filepath = self.TEST_FILES_DIR / "OUTCAR.total_tensor.Li2O.gz"
outcar = Outcar(filepath)
outcar.read_elastic_tensor()
self.assertAlmostEqual(outcar.data["elastic_tensor"][0][0], 1986.3391)
self.assertAlmostEqual(outcar.data["elastic_tensor"][0][1], 187.8324)
self.assertAlmostEqual(outcar.data["elastic_tensor"][3][3], 586.3034)
def test_read_piezo_tensor(self):
filepath = self.TEST_FILES_DIR / "OUTCAR.lepsilon.gz"
outcar = Outcar(filepath)
outcar.read_piezo_tensor()
self.assertAlmostEqual(outcar.data["piezo_tensor"][0][0], 0.52799)
self.assertAlmostEqual(outcar.data["piezo_tensor"][1][3], 0.35998)
self.assertAlmostEqual(outcar.data["piezo_tensor"][2][5], 0.35997)
def test_core_state_eigen(self):
filepath = self.TEST_FILES_DIR / "OUTCAR.CL"
cl = Outcar(filepath).read_core_state_eigen()
self.assertAlmostEqual(cl[6]["2s"][-1], -174.4779)
filepath = self.TEST_FILES_DIR / "OUTCAR.icorelevel"
outcar = Outcar(filepath)
cl = outcar.read_core_state_eigen()
self.assertAlmostEqual(cl[4]["3d"][-1], -31.4522)
# test serialization
outcar.as_dict()
def test_avg_core_poten(self):
filepath = self.TEST_FILES_DIR / "OUTCAR.lepsilon"
cp = Outcar(filepath).read_avg_core_poten()
self.assertAlmostEqual(cp[-1][1], -90.0487)
filepath = self.TEST_FILES_DIR / "OUTCAR"
cp = Outcar(filepath).read_avg_core_poten()
self.assertAlmostEqual(cp[0][6], -73.1068)
filepath = self.TEST_FILES_DIR / "OUTCAR.bad_core_poten.gz"
cp = Outcar(filepath).read_avg_core_poten()
self.assertAlmostEqual(cp[0][1], -101.5055)
def test_single_atom(self):
filepath = self.TEST_FILES_DIR / "OUTCAR.Al"
outcar = Outcar(filepath)
expected_mag = ({"p": 0.0, "s": 0.0, "d": 0.0, "tot": 0.0},)
expected_chg = ({"p": 0.343, "s": 0.425, "d": 0.0, "tot": 0.768},)
self.assertAlmostEqual(outcar.magnetization, expected_mag)
self.assertAlmostEqual(outcar.charge, expected_chg)
self.assertFalse(outcar.is_stopped)
self.assertEqual(
outcar.run_stats,
{
"System time (sec)": 0.592,
"Total CPU time used (sec)": 50.194,
"Elapsed time (sec)": 52.337,
"Maximum memory used (kb)": 62900.0,
"Average memory used (kb)": 0.0,
"User time (sec)": 49.602,
"cores": "32",
},
)
self.assertAlmostEqual(outcar.efermi, 8.0942)
self.assertAlmostEqual(outcar.nelect, 3)
self.assertAlmostEqual(outcar.total_mag, 8.2e-06)
self.assertIsNotNone(outcar.as_dict())
def test_chemical_shielding(self):
filename = self.TEST_FILES_DIR / "nmr" / "cs" / "core.diff" / "hydromagnesite" / "OUTCAR"
outcar = Outcar(filename)
expected_chemical_shielding = [
[191.9974, 69.5232, 0.6342],
[195.0808, 68.183, 0.833],
[192.0389, 69.5762, 0.6329],
[195.0844, 68.1756, 0.8336],
[192.005, 69.5289, 0.6339],
[195.0913, 68.1859, 0.833],
[192.0237, 69.565, 0.6333],
[195.0788, 68.1733, 0.8337],
]
self.assertAlmostEqual(
len(outcar.data["chemical_shielding"]["valence_only"][20:28]),
len(expected_chemical_shielding),
)
self.assertArrayAlmostEqual(
outcar.data["chemical_shielding"]["valence_and_core"][20:28],
expected_chemical_shielding,
decimal=5,
)
def test_chemical_shielding_with_different_core_contribution(self):
filename = self.TEST_FILES_DIR / "nmr" / "cs" / "core.diff" / "core.diff.chemical.shifts.OUTCAR"
outcar = Outcar(filename)
c_vo = outcar.data["chemical_shielding"]["valence_only"][7]
for x1, x2 in zip(list(c_vo), [198.7009, 73.7484, 1.0000]):
self.assertAlmostEqual(x1, x2)
c_vc = outcar.data["chemical_shielding"]["valence_and_core"][7]
for x1, x2 in zip(list(c_vc), [-1.9406, 73.7484, 1.0000]):
self.assertAlmostEqual(x1, x2)
def test_cs_raw_tensors(self):
filename = self.TEST_FILES_DIR / "nmr" / "cs" / "core.diff" / "core.diff.chemical.shifts.OUTCAR"
outcar = Outcar(filename)
unsym_tensors = outcar.data["unsym_cs_tensor"]
self.assertEqual(
unsym_tensors[0],
[
[-145.814605, -4.263425, 0.000301],
[4.263434, -145.812238, -8.7e-05],
[0.000136, -0.000189, -142.794068],
],
)
self.assertEqual(
unsym_tensors[29],
[
[287.789318, -53.799325, 30.900024],
[-53.799571, 225.668117, -17.839598],
[3.801103, -2.195218, 88.896756],
],
)
def test_cs_g0_contribution(self):
filename = self.TEST_FILES_DIR / "nmr" / "cs" / "core.diff" / "core.diff.chemical.shifts.OUTCAR"
outcar = Outcar(filename)
g0_contrib = outcar.data["cs_g0_contribution"]
self.assertEqual(
g0_contrib,
[
[-8.773535, 9e-06, 1e-06],
[1.7e-05, -8.773536, -0.0792],
[-6e-06, -0.008328, -9.320237],
],
)
def test_cs_core_contribution(self):
filename = self.TEST_FILES_DIR / "nmr" / "cs" / "core.diff" / "core.diff.chemical.shifts.OUTCAR"
outcar = Outcar(filename)
core_contrib = outcar.data["cs_core_contribution"]
self.assertEqual(core_contrib, {"Mg": -412.8248405, "C": -200.5098812, "O": -271.0766979})
def test_nmr_efg(self):
filename = self.TEST_FILES_DIR / "nmr" / "efg" / "AlPO4" / "OUTCAR"
outcar = Outcar(filename)
expected_efg = [
{"eta": 0.465, "nuclear_quadrupole_moment": 146.6, "cq": -5.573},
{"eta": 0.465, "nuclear_quadrupole_moment": 146.6, "cq": -5.573},
{"eta": 0.137, "nuclear_quadrupole_moment": 146.6, "cq": 6.327},
{"eta": 0.137, "nuclear_quadrupole_moment": 146.6, "cq": 6.327},
{"eta": 0.112, "nuclear_quadrupole_moment": 146.6, "cq": -7.453},
{"eta": 0.112, "nuclear_quadrupole_moment": 146.6, "cq": -7.453},
{"eta": 0.42, "nuclear_quadrupole_moment": 146.6, "cq": -5.58},
{"eta": 0.42, "nuclear_quadrupole_moment": 146.6, "cq": -5.58},
]
self.assertEqual(len(outcar.data["efg"][2:10]), len(expected_efg))
for e1, e2 in zip(outcar.data["efg"][2:10], expected_efg):
for k in e1.keys():
self.assertAlmostEqual(e1[k], e2[k], places=5)
exepected_tensors = [
[[11.11, 1.371, 2.652], [1.371, 3.635, -3.572], [2.652, -3.572, -14.746]],
[[11.11, -1.371, 2.652], [-1.371, 3.635, 3.572], [2.652, 3.572, -14.746]],
[[-3.098, 6.511, 7.732], [6.511, 1.419, 11.445], [7.732, 11.445, 1.678]],
[
[-3.098, -6.511, 7.732],
[-6.511, 1.419, -11.445],
[7.732, -11.445, 1.678],
],
[
[2.344, -10.775, -7.006],
[-10.775, -7.152, -11.309],
[-7.006, -11.309, 4.808],
],
[
[2.344, 10.775, -7.006],
[10.775, -7.152, 11.309],
[-7.006, 11.309, 4.808],
],
[[2.404, -0.588, -6.83], [-0.588, 10.435, 3.159], [-6.83, 3.159, -12.839]],
[[2.404, 0.588, -6.83], [0.588, 10.435, -3.159], [-6.83, -3.159, -12.839]],
]
self.assertEqual(len(outcar.data["unsym_efg_tensor"][2:10]), len(exepected_tensors))
for e1, e2 in zip(outcar.data["unsym_efg_tensor"][2:10], exepected_tensors):
self.assertArrayAlmostEqual(e1, e2)
def test_read_fermi_contact_shift(self):
filepath = self.TEST_FILES_DIR / "OUTCAR_fc"
outcar = Outcar(filepath)
outcar.read_fermi_contact_shift()
self.assertAlmostEqual(outcar.data["fermi_contact_shift"]["fch"][0][0], -0.002)
self.assertAlmostEqual(outcar.data["fermi_contact_shift"]["th"][0][0], -0.052)
self.assertAlmostEqual(outcar.data["fermi_contact_shift"]["dh"][0][0], 0.0)
def test_drift(self):
outcar = Outcar(self.TEST_FILES_DIR / "OUTCAR")
self.assertEqual(len(outcar.drift), 5)
self.assertAlmostEqual(np.sum(outcar.drift), 0)
outcar = Outcar(self.TEST_FILES_DIR / "OUTCAR.CL")
self.assertEqual(len(outcar.drift), 79)
self.assertAlmostEqual(np.sum(outcar.drift), 0.448010)
def test_electrostatic_potential(self):
outcar = Outcar(self.TEST_FILES_DIR / "OUTCAR")
self.assertEqual(outcar.ngf, [54, 30, 54])
self.assertTrue(np.allclose(outcar.sampling_radii, [0.9748, 0.9791, 0.7215]))
self.assertTrue(
np.allclose(
outcar.electrostatic_potential,
[-26.0704, -45.5046, -45.5046, -72.9539, -73.0621, -72.9539, -73.0621],
)
)
def test_mag_electrostatic_error(self):
outcar = Outcar(self.TEST_FILES_DIR / "OUTCAR.electrostaticerror.gz")
self.assertEqual(
outcar.electrostatic_potential,
[
-21.1667,
-19.6865,
-22.3983,
-22.3307,
-20.5213,
-20.9292,
-21.5063,
-21.3554,
-21.74,
-21.7018,
-20.3422,
-20.6128,
-21.4405,
-21.0022,
-21.975,
-21.915,
-21.0156,
-21.9027,
-22.3712,
-21.5816,
-21.8535,
-20.5061,
-22.2474,
-22.1904,
-22.2203,
-20.1727,
-21.1068,
-20.1669,
-22.1272,
-21.3446,
-82.4717,
-83.035,
-81.8289,
-82.5957,
-81.7813,
-82.5011,
-82.6098,
-82.2885,
-81.606,
-99.1621,
-99.3146,
-99.1742,
-99.4728,
-100.2139,
-99.852,
-99.3575,
-99.4135,
-98.9092,
-99.8867,
-99.3707,
-99.0794,
-98.8376,
-99.3656,
-98.6474,
-99.3264,
-98.844,
-99.074,
-98.9354,
-99.1643,
-99.2412,
-68.7667,
-68.2528,
-66.7326,
-67.7113,
-69.2228,
-67.014,
-69.1456,
-67.3151,
-68.2625,
-67.6156,
-69.8112,
-68.9266,
-67.8286,
-69.3289,
-68.7017,
-67.2834,
-68.4665,
-68.0188,
-67.7083,
-69.7195,
-67.4078,
-67.9646,
-68.584,
-69.2387,
-69.7822,
-67.0701,
-67.8236,
-68.2468,
-68.6533,
-68.3218,
-67.5923,
-69.1266,
-68.4615,
-68.302,
-67.999,
-68.6709,
-68.9973,
-67.4147,
-68.4463,
-68.0899,
-67.665,
-69.6705,
-68.6433,
-68.4288,
-66.9027,
-67.3211,
-68.604,
-69.1299,
-67.5565,
-69.0845,
-67.4289,
-66.6864,
-67.6484,
-67.9783,
-67.7661,
-66.9797,
-67.8007,
-68.3194,
-69.3671,
-67.2708,
],
)
def test_onsite_density_matrix(self):
outcar = Outcar(self.TEST_FILES_DIR / "OUTCAR.LinearResponseU.gz")
matrices = outcar.data["onsite_density_matrices"]
self.assertEqual(matrices[0][Spin.up][0][0], 1.0227)
self.assertEqual(len(matrices[0][Spin.up]), 5)
self.assertEqual(len(matrices[0][Spin.up][0]), 5)
self.assertTrue("onsite_density_matrices" in outcar.as_dict())
outcar = Outcar(self.TEST_FILES_DIR / "OUTCAR_merged_numbers")
matrices = outcar.data["onsite_density_matrices"]
self.assertEqual(matrices[0][Spin.up][0][-1], 0.0)
self.assertEqual(len(matrices[0][Spin.up]), 7)
self.assertEqual(len(matrices[0][Spin.up][0]), 7)
self.assertTrue("onsite_density_matrices" in outcar.as_dict())
outcar = Outcar(self.TEST_FILES_DIR / "OUTCAR_merged_numbers2")
self.assertTrue("onsite_density_matrices" in outcar.as_dict())
def test_nplwvs(self):
outcar = Outcar(self.TEST_FILES_DIR / "OUTCAR")
self.assertEqual(outcar.data["nplwv"], [[34560]])
self.assertEqual(
outcar.data["nplwvs_at_kpoints"],
[
1719,
1714,
1722,
1728,
1722,
1726,
1722,
1720,
1717,
1724,
1715,
1724,
1726,
1724,
1728,
1715,
1722,
1715,
1726,
1730,
1730,
1715,
1716,
1729,
1727,
1723,
1721,
1712,
1723,
1719,
1717,
1717,
1724,
1719,
1719,
1727,
1726,
1730,
1719,
1720,
1718,
1717,
1722,
1719,
1709,
1714,
1724,
1726,
1718,
1713,
1720,
1713,
1711,
1713,
1715,
1717,
1728,
1726,
1712,
1722,
1714,
1713,
1717,
1714,
1714,
1717,
1712,
1710,
1721,
1722,
1724,
1720,
1726,
1719,
1722,
1714,
],
)
outcar = Outcar(self.TEST_FILES_DIR / "OUTCAR.CL")
self.assertEqual(outcar.data["nplwv"], [[None]])
self.assertEqual(outcar.data["nplwvs_at_kpoints"], [85687])
def test_vasp620_format(self):
filepath = self.TEST_FILES_DIR / "OUTCAR.vasp.6.2.0"
outcar = Outcar(filepath)
self.assertEqual(outcar.run_stats["Average memory used (kb)"], None)
class BSVasprunTest(PymatgenTest):
_multiprocess_shared_ = True
def test_get_band_structure(self):
filepath = self.TEST_FILES_DIR / "vasprun_Si_bands.xml"
vasprun = BSVasprun(filepath, parse_potcar_file=False)
bs = vasprun.get_band_structure(kpoints_filename=self.TEST_FILES_DIR / "KPOINTS_Si_bands")
cbm = bs.get_cbm()
vbm = bs.get_vbm()
self.assertEqual(cbm["kpoint_index"], [13], "wrong cbm kpoint index")
self.assertAlmostEqual(cbm["energy"], 6.2301, "wrong cbm energy")
self.assertEqual(cbm["band_index"], {Spin.up: [4], Spin.down: [4]}, "wrong cbm bands")
self.assertEqual(vbm["kpoint_index"], [0, 63, 64])
self.assertAlmostEqual(vbm["energy"], 5.6158, "wrong vbm energy")
self.assertEqual(
vbm["band_index"],
{Spin.up: [1, 2, 3], Spin.down: [1, 2, 3]},
"wrong vbm bands",
)
self.assertEqual(vbm["kpoint"].label, "\\Gamma", "wrong vbm label")
self.assertEqual(cbm["kpoint"].label, None, "wrong cbm label")
d = vasprun.as_dict()
self.assertIn("eigenvalues", d["output"])
class OszicarTest(PymatgenTest):
def test_init(self):
filepath = self.TEST_FILES_DIR / "OSZICAR"
oszicar = Oszicar(filepath)
self.assertEqual(len(oszicar.electronic_steps), len(oszicar.ionic_steps))
self.assertEqual(len(oszicar.all_energies), 60)
self.assertAlmostEqual(oszicar.final_energy, -526.63928)
class LocpotTest(PymatgenTest):
def test_init(self):
filepath = self.TEST_FILES_DIR / "LOCPOT"
locpot = Locpot.from_file(filepath)
self.assertAlmostEqual(-217.05226954, sum(locpot.get_average_along_axis(0)))
self.assertAlmostEqual(locpot.get_axis_grid(0)[-1], 2.87629, 2)
self.assertAlmostEqual(locpot.get_axis_grid(1)[-1], 2.87629, 2)
self.assertAlmostEqual(locpot.get_axis_grid(2)[-1], 2.87629, 2)
class ChgcarTest(PymatgenTest):
@classmethod
def setUpClass(cls):
filepath = cls.TEST_FILES_DIR / "CHGCAR.nospin"
cls.chgcar_no_spin = Chgcar.from_file(filepath)
filepath = cls.TEST_FILES_DIR / "CHGCAR.spin"
cls.chgcar_spin = Chgcar.from_file(filepath)
filepath = cls.TEST_FILES_DIR / "CHGCAR.Fe3O4"
cls.chgcar_fe3o4 = Chgcar.from_file(filepath)
filepath = cls.TEST_FILES_DIR / "CHGCAR.NiO_SOC.gz"
cls.chgcar_NiO_SOC = Chgcar.from_file(filepath)
def test_init(self):
self.assertAlmostEqual(self.chgcar_no_spin.get_integrated_diff(0, 2)[0, 1], 0)
self.assertAlmostEqual(self.chgcar_spin.get_integrated_diff(0, 1)[0, 1], -0.0043896932237534022)
# test sum
chgcar = self.chgcar_spin + self.chgcar_spin
self.assertAlmostEqual(chgcar.get_integrated_diff(0, 1)[0, 1], -0.0043896932237534022 * 2)
chgcar = self.chgcar_spin - self.chgcar_spin
self.assertAlmostEqual(chgcar.get_integrated_diff(0, 1)[0, 1], 0)
ans = [1.56472768, 3.25985108, 3.49205728, 3.66275028, 3.8045896, 5.10813352]
myans = self.chgcar_fe3o4.get_integrated_diff(0, 3, 6)
self.assertTrue(np.allclose(myans[:, 1], ans))
def test_write(self):
self.chgcar_spin.write_file("CHGCAR_pmg")
with open("CHGCAR_pmg") as f:
for i, line in enumerate(f):
if i == 22130:
self.assertEqual("augmentation occupancies 1 15\n", line)
if i == 44255:
self.assertEqual("augmentation occupancies 1 15\n", line)
os.remove("CHGCAR_pmg")
def test_soc_chgcar(self):
self.assertEqual(
set(self.chgcar_NiO_SOC.data.keys()),
{"total", "diff_x", "diff_y", "diff_z", "diff"},
)
self.assertTrue(self.chgcar_NiO_SOC.is_soc)
self.assertEqual(
self.chgcar_NiO_SOC.data["diff"].shape,
self.chgcar_NiO_SOC.data["diff_y"].shape,
)
# check our construction of chg.data['diff'] makes sense
# this has been checked visually too and seems reasonable
self.assertEqual(
abs(self.chgcar_NiO_SOC.data["diff"][0][0][0]),
np.linalg.norm(
[
self.chgcar_NiO_SOC.data["diff_x"][0][0][0],
self.chgcar_NiO_SOC.data["diff_y"][0][0][0],
self.chgcar_NiO_SOC.data["diff_z"][0][0][0],
]
),
)
# and that the net magnetization is about zero
# note: we get ~ 0.08 here, seems a little high compared to
# vasp output, but might be due to chgcar limitations?
self.assertAlmostEqual(self.chgcar_NiO_SOC.net_magnetization, 0.0, places=0)
self.chgcar_NiO_SOC.write_file("CHGCAR_pmg_soc")
chg_from_file = Chgcar.from_file("CHGCAR_pmg_soc")
self.assertTrue(chg_from_file.is_soc)
os.remove("CHGCAR_pmg_soc")
def test_hdf5(self):
chgcar = Chgcar.from_file(self.TEST_FILES_DIR / "CHGCAR.NiO_SOC.gz")
chgcar.to_hdf5("chgcar_test.hdf5")
import h5py
with h5py.File("chgcar_test.hdf5", "r") as f:
self.assertArrayAlmostEqual(np.array(f["vdata"]["total"]), chgcar.data["total"])
self.assertArrayAlmostEqual(np.array(f["vdata"]["diff"]), chgcar.data["diff"])
self.assertArrayAlmostEqual(np.array(f["lattice"]), chgcar.structure.lattice.matrix)
self.assertArrayAlmostEqual(np.array(f["fcoords"]), chgcar.structure.frac_coords)
for z in f["Z"]:
self.assertIn(z, [Element.Ni.Z, Element.O.Z])
for sp in f["species"]:
self.assertIn(sp, ["Ni", "O"])
chgcar2 = Chgcar.from_hdf5("chgcar_test.hdf5")
self.assertArrayAlmostEqual(chgcar2.data["total"], chgcar.data["total"])
os.remove("chgcar_test.hdf5")
def test_spin_data(self):
d = self.chgcar_spin.spin_data
for k, v in d.items():
self.assertEqual(v.shape, (48, 48, 48))
def test_add(self):
chgcar_sum = self.chgcar_spin + self.chgcar_spin
self.assertArrayAlmostEqual(chgcar_sum.data["total"], self.chgcar_spin.data["total"] * 2)
chgcar_copy = self.chgcar_spin.copy()
chgcar_copy.structure = self.get_structure("Li2O")
with warnings.catch_warnings(record=True) as w:
# Cause all warnings to always be triggered.
warnings.simplefilter("always")
# Trigger a warning.
chgcar_sum = chgcar_copy + self.chgcar_spin
# Verify some things
assert len(w) == 1
assert "Structures are different. Make sure you know what you are doing..." in str(w[-1].message)
self.assertRaises(ValueError, self.chgcar_spin.__add__, self.chgcar_fe3o4)
self.assertRaises(ValueError, self.chgcar_spin.__add__, self.chgcar_no_spin)
def test_as_dict_and_from_dict(self):
d = self.chgcar_NiO_SOC.as_dict()
chgcar_from_dict = Chgcar.from_dict(d)
self.assertArrayAlmostEqual(self.chgcar_NiO_SOC.data["total"], chgcar_from_dict.data["total"])
self.assertArrayAlmostEqual(
self.chgcar_NiO_SOC.structure.lattice.matrix,
chgcar_from_dict.structure.lattice.matrix,
)
class ElfcarTest(PymatgenTest):
def test_init(self):
elfcar = Elfcar.from_file(self.TEST_FILES_DIR / "ELFCAR.gz")
self.assertAlmostEqual(0.19076207645194002, np.mean(elfcar.data["total"]))
self.assertAlmostEqual(0.19076046677910055, np.mean(elfcar.data["diff"]))
reconstituted = Elfcar.from_dict(elfcar.as_dict())
self.assertEqual(elfcar.data, reconstituted.data)
self.assertEqual(elfcar.poscar.structure, reconstituted.poscar.structure)
def test_alpha(self):
elfcar = Elfcar.from_file(self.TEST_FILES_DIR / "ELFCAR.gz")
alpha = elfcar.get_alpha()
self.assertAlmostEqual(2.936678808979031, np.median(alpha.data["total"]))
def test_interpolation(self):
elfcar = Elfcar.from_file(self.TEST_FILES_DIR / "ELFCAR.gz")
self.assertAlmostEqual(0.0918471, elfcar.value_at(0.4, 0.5, 0.6))
self.assertEqual(100, len(elfcar.linear_slice([0.0, 0.0, 0.0], [1.0, 1.0, 1.0])))
class ProcarTest(PymatgenTest):
_multiprocess_shared_ = True
def test_init(self):
filepath = self.TEST_FILES_DIR / "PROCAR.simple"
p = Procar(filepath)
self.assertAlmostEqual(p.get_occupation(0, "d")[Spin.up], 0)
self.assertAlmostEqual(p.get_occupation(0, "s")[Spin.up], 0.35381249999999997)
self.assertAlmostEqual(p.get_occupation(0, "p")[Spin.up], 1.19540625)
self.assertRaises(ValueError, p.get_occupation, 1, "m")
self.assertEqual(p.nbands, 10)
self.assertEqual(p.nkpoints, 10)
self.assertEqual(p.nions, 3)
lat = Lattice.cubic(3.0)
s = Structure(
lat,
["Li", "Na", "K"],
[[0.0, 0.0, 0.0], [0.25, 0.25, 0.25], [0.75, 0.75, 0.75]],
)
d = p.get_projection_on_elements(s)
self.assertAlmostEqual(d[Spin.up][2][2], {"Na": 0.042, "K": 0.646, "Li": 0.042})
filepath = self.TEST_FILES_DIR / "PROCAR"
p = Procar(filepath)
self.assertAlmostEqual(p.get_occupation(0, "dxy")[Spin.up], 0.96214813853000025)
self.assertAlmostEqual(p.get_occupation(0, "dxy")[Spin.down], 0.85796295426000124)
def test_phase_factors(self):
filepath = self.TEST_FILES_DIR / "PROCAR.phase"
p = Procar(filepath)
self.assertAlmostEqual(p.phase_factors[Spin.up][0, 0, 0, 0], -0.746 + 0.099j)
self.assertAlmostEqual(p.phase_factors[Spin.down][0, 0, 0, 0], 0.372 - 0.654j)
# Two Li should have same phase factor.
self.assertAlmostEqual(p.phase_factors[Spin.up][0, 0, 0, 0], p.phase_factors[Spin.up][0, 0, 1, 0])
self.assertAlmostEqual(p.phase_factors[Spin.up][0, 0, 2, 0], -0.053 + 0.007j)
self.assertAlmostEqual(p.phase_factors[Spin.down][0, 0, 2, 0], 0.027 - 0.047j)
# new style phase factors (VASP 5.4.4+)
filepath = self.TEST_FILES_DIR / "PROCAR.new_format_5.4.4"
p = Procar(filepath)
self.assertAlmostEqual(p.phase_factors[Spin.up][0, 0, 0, 0], -0.13 + 0.199j)
class XdatcarTest(PymatgenTest):
def test_init(self):
filepath = self.TEST_FILES_DIR / "XDATCAR_4"
x = Xdatcar(filepath)
structures = x.structures
self.assertEqual(len(structures), 4)
for s in structures:
self.assertEqual(s.formula, "Li2 O1")
filepath = self.TEST_FILES_DIR / "XDATCAR_5"
x = Xdatcar(filepath)
structures = x.structures
self.assertEqual(len(structures), 4)
for s in structures:
self.assertEqual(s.formula, "Li2 O1")
x.concatenate(self.TEST_FILES_DIR / "XDATCAR_4")
self.assertEqual(len(x.structures), 8)
self.assertIsNotNone(x.get_string())
filepath = self.TEST_FILES_DIR / "XDATCAR_6"
x = Xdatcar(filepath)
structures = x.structures
self.assertNotEqual(structures[0].lattice, structures[-1].lattice)
class DynmatTest(PymatgenTest):
def test_init(self):
# nosetests pymatgen/io/vasp/tests/test_outputs.py:DynmatTest.test_init
filepath = self.TEST_FILES_DIR / "DYNMAT"
d = Dynmat(filepath)
self.assertEqual(d.nspecs, 2)
self.assertEqual(d.natoms, 6)
self.assertEqual(d.ndisps, 3)
self.assertTrue(np.allclose(d.masses, [63.546, 196.966]))
self.assertTrue(4 in d.data)
self.assertTrue(2 in d.data[4])
self.assertTrue(np.allclose(d.data[4][2]["dispvec"], [0.0, 0.05, 0.0]))
self.assertTrue(np.allclose(d.data[4][2]["dynmat"][3], [0.055046, -0.298080, 0.0]))
# TODO: test get_phonon_frequencies once cross-checked
class WavecarTest(PymatgenTest):
_multiprocess_shared_ = True
def setUp(self):
a = np.array([[10.0, 0.0, 0.0], [0.0, 10.0, 0.0], [0.0, 0.0, 10.0]])
self.vol = np.dot(a[0, :], np.cross(a[1, :], a[2, :]))
b = np.array(
[
np.cross(a[1, :], a[2, :]),
np.cross(a[2, :], a[0, :]),
np.cross(a[0, :], a[1, :]),
]
)
self.b = 2 * np.pi * b / self.vol
self.a = a
self.w = Wavecar(self.TEST_FILES_DIR / "WAVECAR.N2")
self.wH2 = Wavecar(self.TEST_FILES_DIR / "WAVECAR.H2_low_symm")
self.wH2_gamma = Wavecar(self.TEST_FILES_DIR / "WAVECAR.H2_low_symm.gamma")
self.w_ncl = Wavecar(self.TEST_FILES_DIR / "WAVECAR.H2.ncl")
def test_standard(self):
w = self.w
a = np.array([[10.0, 0.0, 0.0], [0.0, 10.0, 0.0], [0.0, 0.0, 10.0]])
vol = np.dot(a[0, :], np.cross(a[1, :], a[2, :]))
b = np.array(
[
np.cross(a[1, :], a[2, :]),
np.cross(a[2, :], a[0, :]),
np.cross(a[0, :], a[1, :]),
]
)
b = 2 * np.pi * b / vol
self.assertEqual(w.filename, self.TEST_FILES_DIR / "WAVECAR.N2")
self.assertAlmostEqual(w.efermi, -5.7232, places=4)
self.assertEqual(w.encut, 25)
self.assertEqual(w.nb, 9)
self.assertEqual(w.nk, 1)
self.assertTrue(np.allclose(w.a, a))
self.assertTrue(np.allclose(w.b, b))
self.assertAlmostEqual(w.vol, vol)
self.assertEqual(len(w.kpoints), w.nk)
self.assertEqual(len(w.coeffs), w.nk)
self.assertEqual(len(w.coeffs[0]), w.nb)
self.assertEqual(len(w.band_energy), w.nk)
self.assertEqual(w.band_energy[0].shape, (w.nb, 3))
self.assertLessEqual(len(w.Gpoints[0]), 257)
for k in range(w.nk):
for b in range(w.nb):
self.assertEqual(len(w.coeffs[k][b]), len(w.Gpoints[k]))
with self.assertRaises(ValueError):
Wavecar(self.TEST_FILES_DIR / "WAVECAR.N2.malformed")
with self.assertRaises(ValueError):
Wavecar(self.TEST_FILES_DIR / "WAVECAR.N2", vasp_type="poop")
with self.assertRaises(ValueError):
Wavecar(self.TEST_FILES_DIR / "WAVECAR.N2", vasp_type="g")
with self.assertRaises(ValueError):
Wavecar(self.TEST_FILES_DIR / "WAVECAR.N2", vasp_type="n")
import sys
from io import StringIO
saved_stdout = sys.stdout
try:
out = StringIO()
sys.stdout = out
Wavecar(self.TEST_FILES_DIR / "WAVECAR.N2", verbose=True)
self.assertNotEqual(out.getvalue().strip(), "")
finally:
sys.stdout = saved_stdout
def test_n2_45210(self):
w = Wavecar(self.TEST_FILES_DIR / "WAVECAR.N2.45210")
self.assertEqual(w.filename, self.TEST_FILES_DIR / "WAVECAR.N2.45210")
self.assertAlmostEqual(w.efermi, -5.7232, places=4)
self.assertEqual(w.encut, 25)
self.assertEqual(w.nb, 9)
self.assertEqual(w.nk, 1)
self.assertTrue(np.allclose(w.a, self.a))
self.assertTrue(np.allclose(w.b, self.b))
self.assertAlmostEqual(w.vol, self.vol)
self.assertEqual(len(w.kpoints), w.nk)
self.assertEqual(len(w.coeffs), w.nk)
self.assertEqual(len(w.coeffs[0]), w.nb)
self.assertEqual(len(w.band_energy), w.nk)
self.assertEqual(w.band_energy[0].shape, (w.nb, 3))
self.assertLessEqual(len(w.Gpoints[0]), 257)
def test_n2_spin(self):
w = Wavecar(self.TEST_FILES_DIR / "WAVECAR.N2.spin")
self.assertEqual(len(w.coeffs), 2)
self.assertEqual(len(w.band_energy), 2)
self.assertEqual(len(w.kpoints), w.nk)
self.assertEqual(len(w.Gpoints), w.nk)
self.assertEqual(len(w.coeffs[0][0]), w.nb)
self.assertEqual(len(w.band_energy[0]), w.nk)
temp_ggp = Wavecar._generate_G_points
try:
Wavecar._generate_G_points = lambda x, y, gamma: []
with self.assertRaises(ValueError):
Wavecar(self.TEST_FILES_DIR / "WAVECAR.N2")
finally:
Wavecar._generate_G_points = temp_ggp
def test__generate_nbmax(self):
self.w._generate_nbmax()
self.assertEqual(self.w._nbmax.tolist(), [5, 5, 5])
def test__generate_G_points(self):
for k in range(self.w.nk):
kp = self.w.kpoints[k]
self.assertLessEqual(len(self.w._generate_G_points(kp)), 257)
def test_evaluate_wavefunc(self):
self.w.Gpoints.append(np.array([0, 0, 0]))
self.w.kpoints.append(np.array([0, 0, 0]))
self.w.coeffs.append([[1 + 1j]])
self.assertAlmostEqual(
self.w.evaluate_wavefunc(-1, -1, [0, 0, 0]),
(1 + 1j) / np.sqrt(self.vol),
places=4,
)
self.assertAlmostEqual(
self.w.evaluate_wavefunc(0, 0, [0, 0, 0]),
np.sum(self.w.coeffs[0][0]) / np.sqrt(self.vol),
places=4,
)
w = Wavecar(self.TEST_FILES_DIR / "WAVECAR.N2.spin")
w.Gpoints.append(np.array([0, 0, 0]))
w.kpoints.append(np.array([0, 0, 0]))
w.coeffs[0].append([[1 + 1j]])
self.assertAlmostEqual(
w.evaluate_wavefunc(-1, -1, [0, 0, 0]),
(1 + 1j) / np.sqrt(self.vol),
places=4,
)
def test_fft_mesh_basic(self):
mesh = self.w.fft_mesh(0, 5)
ind = np.argmax(np.abs(mesh))
self.assertEqual(np.unravel_index(ind, mesh.shape), (14, 1, 1))
self.assertEqual(mesh[tuple((self.w.ng / 2).astype(np.int_))], 0j)
mesh = self.w.fft_mesh(0, 5, shift=False)
ind = np.argmax(np.abs(mesh))
self.assertEqual(np.unravel_index(ind, mesh.shape), (6, 8, 8))
self.assertEqual(mesh[0, 0, 0], 0j)
def test_fft_mesh_advanced(self):
ik = 0
ib = 0
mesh = self.wH2.fft_mesh(ik, ib)
mesh_gamma = self.wH2_gamma.fft_mesh(ik, ib)
mesh_ncl = self.w_ncl.fft_mesh(ik, ib)
# check equality of plane-wave coefficients
ind_max = np.unravel_index(np.argmax(np.abs(mesh)), mesh.shape)
phase = mesh[ind_max] / mesh_gamma[ind_max]
self.assertLessEqual(np.max(np.abs(mesh - phase * mesh_gamma)), 1.0e-6)
# transform to real space for further checking
mesh = np.fft.ifftn(mesh)
mesh_gamma = np.fft.ifftn(mesh_gamma)
mesh_ncl = np.fft.ifftn(mesh_ncl)
# check equality in real space for regular vs. gamma only
ind_max = np.unravel_index(np.argmax(np.abs(mesh)), mesh.shape)
phase = mesh[ind_max] / mesh_gamma[ind_max]
self.assertLessEqual(np.max(np.abs(mesh - phase * mesh_gamma)), 1.0e-6)
# spot check some points in real space
p1 = (
int(mesh.shape[0] / 2),
int(mesh.shape[1] / 2) - 1,
int(mesh.shape[2] / 2) - 2,
)
p2 = (p1[0] + 1, p1[1], p1[2])
c = np.array([[5, 0, 0], [0, 4, 0], [0, 0, 6]]) # this needs to match POSCAR, which we don't have
r1 = np.dot(np.array(p1) / mesh.shape, c)
r2 = np.dot(np.array(p2) / mesh.shape, c)
# check equality of FFT and slow FT for regular mesh (ratio, to account for normalization)
v1 = self.wH2.evaluate_wavefunc(ik, ib, r1)
v2 = self.wH2.evaluate_wavefunc(ik, ib, r2)
self.assertAlmostEqual(np.abs(mesh[p1]) / np.abs(mesh[p2]), np.abs(v1) / np.abs(v2), places=6)
# spot check one value that we happen to know from reference run
self.assertAlmostEqual(v1, -0.01947068011502887 + 0.23340228099620275j, places=8)
# check equality of FFT and slow FT for gamma-only mesh (ratio again)
v1_gamma = self.wH2_gamma.evaluate_wavefunc(ik, ib, r1)
v2_gamma = self.wH2_gamma.evaluate_wavefunc(ik, ib, r2)
self.assertAlmostEqual(
np.abs(mesh_gamma[p1]) / np.abs(mesh_gamma[p2]),
np.abs(v1_gamma) / np.abs(v2_gamma),
places=6,
)
# check equality of FFT and slow FT for ncl mesh (ratio again)
v1_ncl = self.w_ncl.evaluate_wavefunc(ik, ib, r1)
v2_ncl = self.w_ncl.evaluate_wavefunc(ik, ib, r2)
self.assertAlmostEqual(
np.abs(mesh_ncl[p1]) / np.abs(mesh_ncl[p2]),
np.abs(v1_ncl) / np.abs(v2_ncl),
places=6,
)
def test_get_parchg(self):
poscar = Poscar.from_file(self.TEST_FILES_DIR / "POSCAR")
w = self.w
c = w.get_parchg(poscar, 0, 0, spin=0, phase=False)
self.assertTrue("total" in c.data)
self.assertTrue("diff" not in c.data)
self.assertEqual(np.prod(c.data["total"].shape), np.prod(w.ng * 2))
self.assertTrue(np.all(c.data["total"] > 0.0))
c = w.get_parchg(poscar, 0, 0, spin=0, phase=True)
self.assertTrue("total" in c.data)
self.assertTrue("diff" not in c.data)
self.assertEqual(np.prod(c.data["total"].shape), np.prod(w.ng * 2))
self.assertFalse(np.all(c.data["total"] > 0.0))
w = Wavecar(self.TEST_FILES_DIR / "WAVECAR.N2.spin")
c = w.get_parchg(poscar, 0, 0, phase=False, scale=1)
self.assertTrue("total" in c.data)
self.assertTrue("diff" in c.data)
self.assertEqual(np.prod(c.data["total"].shape), np.prod(w.ng))
self.assertTrue(np.all(c.data["total"] > 0.0))
self.assertFalse(np.all(c.data["diff"] > 0.0))
c = w.get_parchg(poscar, 0, 0, spin=0, phase=False)
self.assertTrue("total" in c.data)
self.assertTrue("diff" not in c.data)
self.assertEqual(np.prod(c.data["total"].shape), np.prod(w.ng * 2))
self.assertTrue(np.all(c.data["total"] > 0.0))
c = w.get_parchg(poscar, 0, 0, spin=0, phase=True)
self.assertTrue("total" in c.data)
self.assertTrue("diff" not in c.data)
self.assertEqual(np.prod(c.data["total"].shape), np.prod(w.ng * 2))
self.assertFalse(np.all(c.data["total"] > 0.0))
w = self.w_ncl
w.coeffs.append([np.ones((2, 100))])
c = w.get_parchg(poscar, -1, 0, phase=False, spinor=None)
self.assertTrue("total" in c.data)
self.assertTrue("diff" not in c.data)
self.assertEqual(np.prod(c.data["total"].shape), np.prod(w.ng * 2))
self.assertFalse(np.all(c.data["total"] > 0.0))
c = w.get_parchg(poscar, -1, 0, phase=True, spinor=0)
self.assertTrue("total" in c.data)
self.assertTrue("diff" not in c.data)
self.assertEqual(np.prod(c.data["total"].shape), np.prod(w.ng * 2))
self.assertFalse(np.all(c.data["total"] > 0.0))
w.coeffs[-1] = [np.zeros((2, 100))]
c = w.get_parchg(poscar, -1, 0, phase=False, spinor=1)
self.assertTrue("total" in c.data)
self.assertTrue("diff" not in c.data)
self.assertEqual(np.prod(c.data["total"].shape), np.prod(w.ng * 2))
self.assertTrue(np.allclose(c.data["total"], 0.0))
def test_write_unks(self):
unk_std = Unk.from_file(self.TEST_FILES_DIR / "UNK.N2.std")
unk_ncl = Unk.from_file(self.TEST_FILES_DIR / "UNK.H2.ncl")
with self.assertRaises(ValueError):
self.w.write_unks(self.TEST_FILES_DIR / "UNK.N2.std")
# different grids
with ScratchDir("."):
self.w.write_unks("./unk_dir")
self.assertEqual(len(list(Path("./unk_dir").glob("UNK*"))), 1)
unk = Unk.from_file("./unk_dir/UNK00001.1")
self.assertNotEqual(unk, unk_std)
# correct grid
self.w.ng = np.array([12, 12, 12])
with ScratchDir("."):
self.w.write_unks(".")
unk = Unk.from_file("UNK00001.1")
self.assertEqual(unk, unk_std)
# ncl test
with ScratchDir("."):
self.w_ncl.write_unks(".")
unk = Unk.from_file("UNK00001.NC")
self.assertEqual(unk, unk_ncl)
class EigenvalTest(PymatgenTest):
_multiprocess_shared_ = True
def test_init(self):
eig = Eigenval(self.TEST_FILES_DIR / "EIGENVAL.gz")
self.assertEqual(eig.ispin, 1)
self.assertEqual(eig.nkpt, len(eig.kpoints))
self.assertEqual(eig.nkpt, len(eig.kpoints_weights))
self.assertEqual(eig.nkpt, eig.eigenvalues[Spin.up].shape[0])
self.assertEqual(eig.nelect, 16)
self.assertEqual(eig.nbands, eig.eigenvalues[Spin.up].shape[1])
self.assertTrue(np.max(eig.eigenvalues[Spin.up]) > 0)
self.assertTrue(np.min(eig.eigenvalues[Spin.up]) < 0)
def test_ispin2(self):
eig = Eigenval(self.TEST_FILES_DIR / "EIGENVAL.ispin2.gz")
self.assertEqual(eig.ispin, 2)
self.assertEqual(eig.nkpt, eig.eigenvalues[Spin.up].shape[0])
self.assertEqual(eig.nbands, eig.eigenvalues[Spin.up].shape[1])
self.assertEqual(eig.nkpt, eig.eigenvalues[Spin.down].shape[0])
self.assertEqual(eig.nbands, eig.eigenvalues[Spin.down].shape[1])
def test_eigenvalue_band_properties(self):
eig = Eigenval(self.TEST_FILES_DIR / "EIGENVAL.gz")
props = eig.eigenvalue_band_properties
self.assertAlmostEqual(props[0], 6.4153, places=4)
self.assertAlmostEqual(props[1], 7.5587, places=4)
self.assertAlmostEqual(props[2], 1.1434, places=4)
self.assertEqual(props[3], False)
class WavederTest(PymatgenTest):
_multiprocess_shared_ = True
def setUp(self):
wder = Waveder(self.TEST_FILES_DIR / "WAVEDER", gamma_only=True)
self.assertEqual(wder.nbands, 36)
self.assertEqual(wder.nkpoints, 56)
self.assertEqual(wder.nelect, 8)
band_i = 0
band_j = 0
kp_index = 0
spin_index = 0
cart_dir_index = 0
cder = wder.get_orbital_derivative_between_states(band_i, band_j, kp_index, spin_index, cart_dir_index)
self.assertAlmostEqual(cder, -1.33639226092e-103, places=114)
def test_consistency(self):
wder = Waveder(self.TEST_FILES_DIR / "WAVEDER.Si")
wderf = np.loadtxt(self.TEST_FILES_DIR / "WAVEDERF.Si", skiprows=1)
with open(self.TEST_FILES_DIR / "WAVEDERF.Si", "r") as f:
first_line = [int(a) for a in f.readline().split()]
self.assertEqual(wder.nkpoints, first_line[1])
self.assertEqual(wder.nbands, first_line[2])
for i in range(10):
self.assertAlmostEqual(
first=wder.get_orbital_derivative_between_states(0, i, 0, 0, 0).real,
second=wderf[i, 6],
places=10,
)
self.assertAlmostEqual(wder.cder_data[0, i, 0, 0, 0].real, wderf[i, 6], places=10)
self.assertAlmostEqual(wder.cder_data[0, i, 0, 0, 0].imag, wderf[i, 7], places=10)
self.assertAlmostEqual(wder.cder_data[0, i, 0, 0, 1].real, wderf[i, 8], places=10)
self.assertAlmostEqual(wder.cder_data[0, i, 0, 0, 1].imag, wderf[i, 9], places=10)
self.assertAlmostEqual(wder.cder_data[0, i, 0, 0, 2].real, wderf[i, 10], places=10)
self.assertAlmostEqual(wder.cder_data[0, i, 0, 0, 2].imag, wderf[i, 11], places=10)
if __name__ == "__main__":
unittest.main()
| mit |
damiansoriano/odoo | addons/mail/mail_message.py | 12 | 47075 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2010-today OpenERP SA (<http://www.openerp.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
##############################################################################
import logging
from openerp import tools
from email.header import decode_header
from openerp import SUPERUSER_ID, api
from openerp.osv import osv, orm, fields
from openerp.tools import html_email_clean
from openerp.tools.translate import _
from HTMLParser import HTMLParser
_logger = logging.getLogger(__name__)
try:
from mako.template import Template as MakoTemplate
except ImportError:
_logger.warning("payment_acquirer: mako templates not available, payment acquirer will not work!")
""" Some tools for parsing / creating email fields """
def decode(text):
"""Returns unicode() string conversion of the the given encoded smtp header text"""
if text:
text = decode_header(text.replace('\r', ''))
return ''.join([tools.ustr(x[0], x[1]) for x in text])
class MLStripper(HTMLParser):
def __init__(self):
self.reset()
self.fed = []
def handle_data(self, d):
self.fed.append(d)
def get_data(self):
return ''.join(self.fed)
def strip_tags(html):
s = MLStripper()
s.feed(html)
return s.get_data()
class mail_message(osv.Model):
""" Messages model: system notification (replacing res.log notifications),
comments (OpenChatter discussion) and incoming emails. """
_name = 'mail.message'
_description = 'Message'
_inherit = ['ir.needaction_mixin']
_order = 'id desc'
_rec_name = 'record_name'
_message_read_limit = 30
_message_read_fields = ['id', 'parent_id', 'model', 'res_id', 'body', 'subject', 'date', 'to_read', 'email_from',
'type', 'vote_user_ids', 'attachment_ids', 'author_id', 'partner_ids', 'record_name']
_message_record_name_length = 18
_message_read_more_limit = 1024
def default_get(self, cr, uid, fields, context=None):
# protection for `default_type` values leaking from menu action context (e.g. for invoices)
if context and context.get('default_type') and context.get('default_type') not in [
val[0] for val in self._columns['type'].selection]:
context = dict(context, default_type=None)
return super(mail_message, self).default_get(cr, uid, fields, context=context)
def _get_to_read(self, cr, uid, ids, name, arg, context=None):
""" Compute if the message is unread by the current user. """
res = dict((id, False) for id in ids)
partner_id = self.pool['res.users'].browse(cr, SUPERUSER_ID, uid, context=context).partner_id.id
notif_obj = self.pool.get('mail.notification')
notif_ids = notif_obj.search(cr, uid, [
('partner_id', 'in', [partner_id]),
('message_id', 'in', ids),
('is_read', '=', False),
], context=context)
for notif in notif_obj.browse(cr, uid, notif_ids, context=context):
res[notif.message_id.id] = True
return res
def _search_to_read(self, cr, uid, obj, name, domain, context=None):
""" Search for messages to read by the current user. Condition is
inversed because we search unread message on a is_read column. """
return ['&', ('notification_ids.partner_id.user_ids', 'in', [uid]), ('notification_ids.is_read', '=', not domain[0][2])]
def _get_starred(self, cr, uid, ids, name, arg, context=None):
""" Compute if the message is unread by the current user. """
res = dict((id, False) for id in ids)
partner_id = self.pool['res.users'].browse(cr, SUPERUSER_ID, uid, context=context).partner_id.id
notif_obj = self.pool.get('mail.notification')
notif_ids = notif_obj.search(cr, uid, [
('partner_id', 'in', [partner_id]),
('message_id', 'in', ids),
('starred', '=', True),
], context=context)
for notif in notif_obj.browse(cr, uid, notif_ids, context=context):
res[notif.message_id.id] = True
return res
def _search_starred(self, cr, uid, obj, name, domain, context=None):
""" Search for starred messages by the current user."""
return ['&', ('notification_ids.partner_id.user_ids', 'in', [uid]), ('notification_ids.starred', '=', domain[0][2])]
_columns = {
'type': fields.selection([
('email', 'Email'),
('comment', 'Comment'),
('notification', 'System notification'),
], 'Type', size=12,
help="Message type: email for email message, notification for system "\
"message, comment for other messages such as user replies"),
'email_from': fields.char('From',
help="Email address of the sender. This field is set when no matching partner is found for incoming emails."),
'reply_to': fields.char('Reply-To',
help='Reply email address. Setting the reply_to bypasses the automatic thread creation.'),
'same_thread': fields.boolean('Same thread',
help='Redirect answers to the same discussion thread.'),
'author_id': fields.many2one('res.partner', 'Author', select=1,
ondelete='set null',
help="Author of the message. If not set, email_from may hold an email address that did not match any partner."),
'author_avatar': fields.related('author_id', 'image_small', type="binary", string="Author's Avatar"),
'partner_ids': fields.many2many('res.partner', string='Recipients'),
'notified_partner_ids': fields.many2many('res.partner', 'mail_notification',
'message_id', 'partner_id', 'Notified partners',
help='Partners that have a notification pushing this message in their mailboxes'),
'attachment_ids': fields.many2many('ir.attachment', 'message_attachment_rel',
'message_id', 'attachment_id', 'Attachments'),
'parent_id': fields.many2one('mail.message', 'Parent Message', select=True,
ondelete='set null', help="Initial thread message."),
'child_ids': fields.one2many('mail.message', 'parent_id', 'Child Messages'),
'model': fields.char('Related Document Model', size=128, select=1),
'res_id': fields.integer('Related Document ID', select=1),
'record_name': fields.char('Message Record Name', help="Name get of the related document."),
'notification_ids': fields.one2many('mail.notification', 'message_id',
string='Notifications', auto_join=True,
help='Technical field holding the message notifications. Use notified_partner_ids to access notified partners.'),
'subject': fields.char('Subject'),
'date': fields.datetime('Date'),
'message_id': fields.char('Message-Id', help='Message unique identifier', select=1, readonly=1, copy=False),
'body': fields.html('Contents', help='Automatically sanitized HTML contents'),
'to_read': fields.function(_get_to_read, fnct_search=_search_to_read,
type='boolean', string='To read',
help='Current user has an unread notification linked to this message'),
'starred': fields.function(_get_starred, fnct_search=_search_starred,
type='boolean', string='Starred',
help='Current user has a starred notification linked to this message'),
'subtype_id': fields.many2one('mail.message.subtype', 'Subtype',
ondelete='set null', select=1,),
'vote_user_ids': fields.many2many('res.users', 'mail_vote',
'message_id', 'user_id', string='Votes',
help='Users that voted for this message'),
'mail_server_id': fields.many2one('ir.mail_server', 'Outgoing mail server', readonly=1),
}
def _needaction_domain_get(self, cr, uid, context=None):
return [('to_read', '=', True)]
def _get_default_from(self, cr, uid, context=None):
this = self.pool.get('res.users').browse(cr, SUPERUSER_ID, uid, context=context)
if this.alias_name and this.alias_domain:
return '%s <%s@%s>' % (this.name, this.alias_name, this.alias_domain)
elif this.email:
return '%s <%s>' % (this.name, this.email)
raise osv.except_osv(_('Invalid Action!'), _("Unable to send email, please configure the sender's email address or alias."))
def _get_default_author(self, cr, uid, context=None):
return self.pool.get('res.users').browse(cr, SUPERUSER_ID, uid, context=context).partner_id.id
_defaults = {
'type': 'email',
'date': fields.datetime.now,
'author_id': lambda self, cr, uid, ctx=None: self._get_default_author(cr, uid, ctx),
'body': '',
'email_from': lambda self, cr, uid, ctx=None: self._get_default_from(cr, uid, ctx),
'same_thread': True,
}
#------------------------------------------------------
# Vote/Like
#------------------------------------------------------
def vote_toggle(self, cr, uid, ids, context=None):
''' Toggles vote. Performed using read to avoid access rights issues.
Done as SUPERUSER_ID because uid may vote for a message he cannot modify. '''
for message in self.read(cr, uid, ids, ['vote_user_ids'], context=context):
new_has_voted = not (uid in message.get('vote_user_ids'))
if new_has_voted:
self.write(cr, SUPERUSER_ID, message.get('id'), {'vote_user_ids': [(4, uid)]}, context=context)
else:
self.write(cr, SUPERUSER_ID, message.get('id'), {'vote_user_ids': [(3, uid)]}, context=context)
return new_has_voted or False
#------------------------------------------------------
# download an attachment
#------------------------------------------------------
def download_attachment(self, cr, uid, id_message, attachment_id, context=None):
""" Return the content of linked attachments. """
# this will fail if you cannot read the message
message_values = self.read(cr, uid, [id_message], ['attachment_ids'], context=context)[0]
if attachment_id in message_values['attachment_ids']:
attachment = self.pool.get('ir.attachment').browse(cr, SUPERUSER_ID, attachment_id, context=context)
if attachment.datas and attachment.datas_fname:
return {
'base64': attachment.datas,
'filename': attachment.datas_fname,
}
return False
#------------------------------------------------------
# Notification API
#------------------------------------------------------
@api.cr_uid_ids_context
def set_message_read(self, cr, uid, msg_ids, read, create_missing=True, context=None):
""" Set messages as (un)read. Technically, the notifications related
to uid are set to (un)read. If for some msg_ids there are missing
notifications (i.e. due to load more or thread parent fetching),
they are created.
:param bool read: set notification as (un)read
:param bool create_missing: create notifications for missing entries
(i.e. when acting on displayed messages not notified)
:return number of message mark as read
"""
notification_obj = self.pool.get('mail.notification')
user_pid = self.pool['res.users'].browse(cr, SUPERUSER_ID, uid, context=context).partner_id.id
domain = [('partner_id', '=', user_pid), ('message_id', 'in', msg_ids)]
if not create_missing:
domain += [('is_read', '=', not read)]
notif_ids = notification_obj.search(cr, uid, domain, context=context)
# all message have notifications: already set them as (un)read
if len(notif_ids) == len(msg_ids) or not create_missing:
notification_obj.write(cr, uid, notif_ids, {'is_read': read}, context=context)
return len(notif_ids)
# some messages do not have notifications: find which one, create notification, update read status
notified_msg_ids = [notification.message_id.id for notification in notification_obj.browse(cr, uid, notif_ids, context=context)]
to_create_msg_ids = list(set(msg_ids) - set(notified_msg_ids))
for msg_id in to_create_msg_ids:
notification_obj.create(cr, uid, {'partner_id': user_pid, 'is_read': read, 'message_id': msg_id}, context=context)
notification_obj.write(cr, uid, notif_ids, {'is_read': read}, context=context)
return len(notif_ids)
@api.cr_uid_ids_context
def set_message_starred(self, cr, uid, msg_ids, starred, create_missing=True, context=None):
""" Set messages as (un)starred. Technically, the notifications related
to uid are set to (un)starred.
:param bool starred: set notification as (un)starred
:param bool create_missing: create notifications for missing entries
(i.e. when acting on displayed messages not notified)
"""
notification_obj = self.pool.get('mail.notification')
user_pid = self.pool['res.users'].browse(cr, SUPERUSER_ID, uid, context=context).partner_id.id
domain = [('partner_id', '=', user_pid), ('message_id', 'in', msg_ids)]
if not create_missing:
domain += [('starred', '=', not starred)]
values = {
'starred': starred
}
if starred:
values['is_read'] = False
notif_ids = notification_obj.search(cr, uid, domain, context=context)
# all message have notifications: already set them as (un)starred
if len(notif_ids) == len(msg_ids) or not create_missing:
notification_obj.write(cr, uid, notif_ids, values, context=context)
return starred
# some messages do not have notifications: find which one, create notification, update starred status
notified_msg_ids = [notification.message_id.id for notification in notification_obj.browse(cr, uid, notif_ids, context=context)]
to_create_msg_ids = list(set(msg_ids) - set(notified_msg_ids))
for msg_id in to_create_msg_ids:
notification_obj.create(cr, uid, dict(values, partner_id=user_pid, message_id=msg_id), context=context)
notification_obj.write(cr, uid, notif_ids, values, context=context)
return starred
#------------------------------------------------------
# Message loading for web interface
#------------------------------------------------------
def _message_read_dict_postprocess(self, cr, uid, messages, message_tree, context=None):
""" Post-processing on values given by message_read. This method will
handle partners in batch to avoid doing numerous queries.
:param list messages: list of message, as get_dict result
:param dict message_tree: {[msg.id]: msg browse record}
"""
res_partner_obj = self.pool.get('res.partner')
ir_attachment_obj = self.pool.get('ir.attachment')
pid = self.pool['res.users'].browse(cr, SUPERUSER_ID, uid, context=context).partner_id.id
# 1. Aggregate partners (author_id and partner_ids) and attachments
partner_ids = set()
attachment_ids = set()
for key, message in message_tree.iteritems():
if message.author_id:
partner_ids |= set([message.author_id.id])
if message.subtype_id and message.notified_partner_ids: # take notified people of message with a subtype
partner_ids |= set([partner.id for partner in message.notified_partner_ids])
elif not message.subtype_id and message.partner_ids: # take specified people of message without a subtype (log)
partner_ids |= set([partner.id for partner in message.partner_ids])
if message.attachment_ids:
attachment_ids |= set([attachment.id for attachment in message.attachment_ids])
# Read partners as SUPERUSER -> display the names like classic m2o even if no access
partners = res_partner_obj.name_get(cr, SUPERUSER_ID, list(partner_ids), context=context)
partner_tree = dict((partner[0], partner) for partner in partners)
# 2. Attachments as SUPERUSER, because could receive msg and attachments for doc uid cannot see
attachments = ir_attachment_obj.read(cr, SUPERUSER_ID, list(attachment_ids), ['id', 'datas_fname', 'name', 'file_type_icon'], context=context)
attachments_tree = dict((attachment['id'], {
'id': attachment['id'],
'filename': attachment['datas_fname'],
'name': attachment['name'],
'file_type_icon': attachment['file_type_icon'],
}) for attachment in attachments)
# 3. Update message dictionaries
for message_dict in messages:
message_id = message_dict.get('id')
message = message_tree[message_id]
if message.author_id:
author = partner_tree[message.author_id.id]
else:
author = (0, message.email_from)
partner_ids = []
if message.subtype_id:
partner_ids = [partner_tree[partner.id] for partner in message.notified_partner_ids
if partner.id in partner_tree]
else:
partner_ids = [partner_tree[partner.id] for partner in message.partner_ids
if partner.id in partner_tree]
attachment_ids = []
for attachment in message.attachment_ids:
if attachment.id in attachments_tree:
attachment_ids.append(attachments_tree[attachment.id])
message_dict.update({
'is_author': pid == author[0],
'author_id': author,
'partner_ids': partner_ids,
'attachment_ids': attachment_ids,
'user_pid': pid
})
return True
def _message_read_dict(self, cr, uid, message, parent_id=False, context=None):
""" Return a dict representation of the message. This representation is
used in the JS client code, to display the messages. Partners and
attachments related stuff will be done in post-processing in batch.
:param dict message: mail.message browse record
"""
# private message: no model, no res_id
is_private = False
if not message.model or not message.res_id:
is_private = True
# votes and favorites: res.users ids, no prefetching should be done
vote_nb = len(message.vote_user_ids)
has_voted = uid in [user.id for user in message.vote_user_ids]
try:
if parent_id:
max_length = 300
else:
max_length = 100
body_short = html_email_clean(message.body, remove=False, shorten=True, max_length=max_length)
except Exception:
body_short = '<p><b>Encoding Error : </b><br/>Unable to convert this message (id: %s).</p>' % message.id
_logger.exception(Exception)
return {'id': message.id,
'type': message.type,
'subtype': message.subtype_id.name if message.subtype_id else False,
'body': message.body,
'body_short': body_short,
'model': message.model,
'res_id': message.res_id,
'record_name': message.record_name,
'subject': message.subject,
'date': message.date,
'to_read': message.to_read,
'parent_id': parent_id,
'is_private': is_private,
'author_id': False,
'author_avatar': message.author_avatar,
'is_author': False,
'partner_ids': [],
'vote_nb': vote_nb,
'has_voted': has_voted,
'is_favorite': message.starred,
'attachment_ids': [],
}
def _message_read_add_expandables(self, cr, uid, messages, message_tree, parent_tree,
message_unload_ids=[], thread_level=0, domain=[], parent_id=False, context=None):
""" Create expandables for message_read, to load new messages.
1. get the expandable for new threads
if display is flat (thread_level == 0):
fetch message_ids < min(already displayed ids), because we
want a flat display, ordered by id
else:
fetch message_ids that are not childs of already displayed
messages
2. get the expandables for new messages inside threads if display
is not flat
for each thread header, search for its childs
for each hole in the child list based on message displayed,
create an expandable
:param list messages: list of message structure for the Chatter
widget to which expandables are added
:param dict message_tree: dict [id]: browse record of this message
:param dict parent_tree: dict [parent_id]: [child_ids]
:param list message_unload_ids: list of message_ids we do not want
to load
:return bool: True
"""
def _get_expandable(domain, message_nb, parent_id, max_limit):
return {
'domain': domain,
'nb_messages': message_nb,
'type': 'expandable',
'parent_id': parent_id,
'max_limit': max_limit,
}
if not messages:
return True
message_ids = sorted(message_tree.keys())
# 1. get the expandable for new threads
if thread_level == 0:
exp_domain = domain + [('id', '<', min(message_unload_ids + message_ids))]
else:
exp_domain = domain + ['!', ('id', 'child_of', message_unload_ids + parent_tree.keys())]
ids = self.search(cr, uid, exp_domain, context=context, limit=1)
if ids:
# inside a thread: prepend
if parent_id:
messages.insert(0, _get_expandable(exp_domain, -1, parent_id, True))
# new threads: append
else:
messages.append(_get_expandable(exp_domain, -1, parent_id, True))
# 2. get the expandables for new messages inside threads if display is not flat
if thread_level == 0:
return True
for message_id in message_ids:
message = message_tree[message_id]
# generate only for thread header messages (TDE note: parent_id may be False is uid cannot see parent_id, seems ok)
if message.parent_id:
continue
# check there are message for expandable
child_ids = set([child.id for child in message.child_ids]) - set(message_unload_ids)
child_ids = sorted(list(child_ids), reverse=True)
if not child_ids:
continue
# make groups of unread messages
id_min, id_max, nb = max(child_ids), 0, 0
for child_id in child_ids:
if not child_id in message_ids:
nb += 1
if id_min > child_id:
id_min = child_id
if id_max < child_id:
id_max = child_id
elif nb > 0:
exp_domain = [('id', '>=', id_min), ('id', '<=', id_max), ('id', 'child_of', message_id)]
idx = [msg.get('id') for msg in messages].index(child_id) + 1
# messages.append(_get_expandable(exp_domain, nb, message_id, False))
messages.insert(idx, _get_expandable(exp_domain, nb, message_id, False))
id_min, id_max, nb = max(child_ids), 0, 0
else:
id_min, id_max, nb = max(child_ids), 0, 0
if nb > 0:
exp_domain = [('id', '>=', id_min), ('id', '<=', id_max), ('id', 'child_of', message_id)]
idx = [msg.get('id') for msg in messages].index(message_id) + 1
# messages.append(_get_expandable(exp_domain, nb, message_id, id_min))
messages.insert(idx, _get_expandable(exp_domain, nb, message_id, False))
return True
@api.cr_uid_context
def message_read(self, cr, uid, ids=None, domain=None, message_unload_ids=None,
thread_level=0, context=None, parent_id=False, limit=None):
""" Read messages from mail.message, and get back a list of structured
messages to be displayed as discussion threads. If IDs is set,
fetch these records. Otherwise use the domain to fetch messages.
After having fetch messages, their ancestors will be added to obtain
well formed threads, if uid has access to them.
After reading the messages, expandable messages are added in the
message list (see ``_message_read_add_expandables``). It consists
in messages holding the 'read more' data: number of messages to
read, domain to apply.
:param list ids: optional IDs to fetch
:param list domain: optional domain for searching ids if ids not set
:param list message_unload_ids: optional ids we do not want to fetch,
because i.e. they are already displayed somewhere
:param int parent_id: context of parent_id
- if parent_id reached when adding ancestors, stop going further
in the ancestor search
- if set in flat mode, ancestor_id is set to parent_id
:param int limit: number of messages to fetch, before adding the
ancestors and expandables
:return list: list of message structure for the Chatter widget
"""
assert thread_level in [0, 1], 'message_read() thread_level should be 0 (flat) or 1 (1 level of thread); given %s.' % thread_level
domain = domain if domain is not None else []
message_unload_ids = message_unload_ids if message_unload_ids is not None else []
if message_unload_ids:
domain += [('id', 'not in', message_unload_ids)]
limit = limit or self._message_read_limit
message_tree = {}
message_list = []
parent_tree = {}
# no specific IDS given: fetch messages according to the domain, add their parents if uid has access to
if ids is None:
ids = self.search(cr, uid, domain, context=context, limit=limit)
# fetch parent if threaded, sort messages
for message in self.browse(cr, uid, ids, context=context):
message_id = message.id
if message_id in message_tree:
continue
message_tree[message_id] = message
# find parent_id
if thread_level == 0:
tree_parent_id = parent_id
else:
tree_parent_id = message_id
parent = message
while parent.parent_id and parent.parent_id.id != parent_id:
parent = parent.parent_id
tree_parent_id = parent.id
if not parent.id in message_tree:
message_tree[parent.id] = parent
# newest messages first
parent_tree.setdefault(tree_parent_id, [])
if tree_parent_id != message_id:
parent_tree[tree_parent_id].append(self._message_read_dict(cr, uid, message_tree[message_id], parent_id=tree_parent_id, context=context))
if thread_level:
for key, message_id_list in parent_tree.iteritems():
message_id_list.sort(key=lambda item: item['id'])
message_id_list.insert(0, self._message_read_dict(cr, uid, message_tree[key], context=context))
# create final ordered message_list based on parent_tree
parent_list = parent_tree.items()
parent_list = sorted(parent_list, key=lambda item: max([msg.get('id') for msg in item[1]]) if item[1] else item[0], reverse=True)
message_list = [message for (key, msg_list) in parent_list for message in msg_list]
# get the child expandable messages for the tree
self._message_read_dict_postprocess(cr, uid, message_list, message_tree, context=context)
self._message_read_add_expandables(cr, uid, message_list, message_tree, parent_tree,
thread_level=thread_level, message_unload_ids=message_unload_ids, domain=domain, parent_id=parent_id, context=context)
return message_list
#------------------------------------------------------
# mail_message internals
#------------------------------------------------------
def init(self, cr):
cr.execute("""SELECT indexname FROM pg_indexes WHERE indexname = 'mail_message_model_res_id_idx'""")
if not cr.fetchone():
cr.execute("""CREATE INDEX mail_message_model_res_id_idx ON mail_message (model, res_id)""")
def _find_allowed_model_wise(self, cr, uid, doc_model, doc_dict, context=None):
doc_ids = doc_dict.keys()
allowed_doc_ids = self.pool[doc_model].search(cr, uid, [('id', 'in', doc_ids)], context=context)
return set([message_id for allowed_doc_id in allowed_doc_ids for message_id in doc_dict[allowed_doc_id]])
def _find_allowed_doc_ids(self, cr, uid, model_ids, context=None):
model_access_obj = self.pool.get('ir.model.access')
allowed_ids = set()
for doc_model, doc_dict in model_ids.iteritems():
if not model_access_obj.check(cr, uid, doc_model, 'read', False):
continue
allowed_ids |= self._find_allowed_model_wise(cr, uid, doc_model, doc_dict, context=context)
return allowed_ids
def _search(self, cr, uid, args, offset=0, limit=None, order=None,
context=None, count=False, access_rights_uid=None):
""" Override that adds specific access rights of mail.message, to remove
ids uid could not see according to our custom rules. Please refer
to check_access_rule for more details about those rules.
After having received ids of a classic search, keep only:
- if author_id == pid, uid is the author, OR
- a notification (id, pid) exists, uid has been notified, OR
- uid have read access to the related document is model, res_id
- otherwise: remove the id
"""
# Rules do not apply to administrator
if uid == SUPERUSER_ID:
return super(mail_message, self)._search(cr, uid, args, offset=offset, limit=limit, order=order,
context=context, count=count, access_rights_uid=access_rights_uid)
# Perform a super with count as False, to have the ids, not a counter
ids = super(mail_message, self)._search(cr, uid, args, offset=offset, limit=limit, order=order,
context=context, count=False, access_rights_uid=access_rights_uid)
if not ids and count:
return 0
elif not ids:
return ids
pid = self.pool['res.users'].browse(cr, SUPERUSER_ID, uid, context=context).partner_id.id
author_ids, partner_ids, allowed_ids = set([]), set([]), set([])
model_ids = {}
messages = super(mail_message, self).read(cr, uid, ids, ['author_id', 'model', 'res_id', 'notified_partner_ids'], context=context)
for message in messages:
if message.get('author_id') and message.get('author_id')[0] == pid:
author_ids.add(message.get('id'))
elif pid in message.get('notified_partner_ids'):
partner_ids.add(message.get('id'))
elif message.get('model') and message.get('res_id'):
model_ids.setdefault(message.get('model'), {}).setdefault(message.get('res_id'), set()).add(message.get('id'))
allowed_ids = self._find_allowed_doc_ids(cr, uid, model_ids, context=context)
final_ids = author_ids | partner_ids | allowed_ids
if count:
return len(final_ids)
else:
# re-construct a list based on ids, because set did not keep the original order
id_list = [id for id in ids if id in final_ids]
return id_list
def check_access_rule(self, cr, uid, ids, operation, context=None):
""" Access rules of mail.message:
- read: if
- author_id == pid, uid is the author, OR
- mail_notification (id, pid) exists, uid has been notified, OR
- uid have read access to the related document if model, res_id
- otherwise: raise
- create: if
- no model, no res_id, I create a private message OR
- pid in message_follower_ids if model, res_id OR
- mail_notification (parent_id.id, pid) exists, uid has been notified of the parent, OR
- uid have write or create access on the related document if model, res_id, OR
- otherwise: raise
- write: if
- author_id == pid, uid is the author, OR
- uid has write or create access on the related document if model, res_id
- otherwise: raise
- unlink: if
- uid has write or create access on the related document if model, res_id
- otherwise: raise
"""
def _generate_model_record_ids(msg_val, msg_ids):
""" :param model_record_ids: {'model': {'res_id': (msg_id, msg_id)}, ... }
:param message_values: {'msg_id': {'model': .., 'res_id': .., 'author_id': ..}}
"""
model_record_ids = {}
for id in msg_ids:
vals = msg_val.get(id, {})
if vals.get('model') and vals.get('res_id'):
model_record_ids.setdefault(vals['model'], set()).add(vals['res_id'])
return model_record_ids
if uid == SUPERUSER_ID:
return
if isinstance(ids, (int, long)):
ids = [ids]
not_obj = self.pool.get('mail.notification')
fol_obj = self.pool.get('mail.followers')
partner_id = self.pool['res.users'].browse(cr, SUPERUSER_ID, uid, context=None).partner_id.id
# Read mail_message.ids to have their values
message_values = dict.fromkeys(ids, {})
cr.execute('SELECT DISTINCT id, model, res_id, author_id, parent_id FROM "%s" WHERE id = ANY (%%s)' % self._table, (ids,))
for id, rmod, rid, author_id, parent_id in cr.fetchall():
message_values[id] = {'model': rmod, 'res_id': rid, 'author_id': author_id, 'parent_id': parent_id}
# Author condition (READ, WRITE, CREATE (private)) -> could become an ir.rule ?
author_ids = []
if operation == 'read' or operation == 'write':
author_ids = [mid for mid, message in message_values.iteritems()
if message.get('author_id') and message.get('author_id') == partner_id]
elif operation == 'create':
author_ids = [mid for mid, message in message_values.iteritems()
if not message.get('model') and not message.get('res_id')]
# Parent condition, for create (check for received notifications for the created message parent)
notified_ids = []
if operation == 'create':
parent_ids = [message.get('parent_id') for mid, message in message_values.iteritems()
if message.get('parent_id')]
not_ids = not_obj.search(cr, SUPERUSER_ID, [('message_id.id', 'in', parent_ids), ('partner_id', '=', partner_id)], context=context)
not_parent_ids = [notif.message_id.id for notif in not_obj.browse(cr, SUPERUSER_ID, not_ids, context=context)]
notified_ids += [mid for mid, message in message_values.iteritems()
if message.get('parent_id') in not_parent_ids]
# Notification condition, for read (check for received notifications and create (in message_follower_ids)) -> could become an ir.rule, but not till we do not have a many2one variable field
other_ids = set(ids).difference(set(author_ids), set(notified_ids))
model_record_ids = _generate_model_record_ids(message_values, other_ids)
if operation == 'read':
not_ids = not_obj.search(cr, SUPERUSER_ID, [
('partner_id', '=', partner_id),
('message_id', 'in', ids),
], context=context)
notified_ids = [notification.message_id.id for notification in not_obj.browse(cr, SUPERUSER_ID, not_ids, context=context)]
elif operation == 'create':
for doc_model, doc_ids in model_record_ids.items():
fol_ids = fol_obj.search(cr, SUPERUSER_ID, [
('res_model', '=', doc_model),
('res_id', 'in', list(doc_ids)),
('partner_id', '=', partner_id),
], context=context)
fol_mids = [follower.res_id for follower in fol_obj.browse(cr, SUPERUSER_ID, fol_ids, context=context)]
notified_ids += [mid for mid, message in message_values.iteritems()
if message.get('model') == doc_model and message.get('res_id') in fol_mids]
# CRUD: Access rights related to the document
other_ids = other_ids.difference(set(notified_ids))
model_record_ids = _generate_model_record_ids(message_values, other_ids)
document_related_ids = []
for model, doc_ids in model_record_ids.items():
model_obj = self.pool[model]
mids = model_obj.exists(cr, uid, list(doc_ids))
if hasattr(model_obj, 'check_mail_message_access'):
model_obj.check_mail_message_access(cr, uid, mids, operation, context=context)
else:
self.pool['mail.thread'].check_mail_message_access(cr, uid, mids, operation, model_obj=model_obj, context=context)
document_related_ids += [mid for mid, message in message_values.iteritems()
if message.get('model') == model and message.get('res_id') in mids]
# Calculate remaining ids: if not void, raise an error
other_ids = other_ids.difference(set(document_related_ids))
if not other_ids:
return
raise orm.except_orm(_('Access Denied'),
_('The requested operation cannot be completed due to security restrictions. Please contact your system administrator.\n\n(Document type: %s, Operation: %s)') % \
(self._description, operation))
def _get_record_name(self, cr, uid, values, context=None):
""" Return the related document name, using name_get. It is done using
SUPERUSER_ID, to be sure to have the record name correctly stored. """
if not values.get('model') or not values.get('res_id') or values['model'] not in self.pool:
return False
return self.pool[values['model']].name_get(cr, SUPERUSER_ID, [values['res_id']], context=context)[0][1]
def _get_reply_to(self, cr, uid, values, context=None):
""" Return a specific reply_to: alias of the document through message_get_reply_to
or take the email_from
"""
model, res_id, email_from = values.get('model'), values.get('res_id'), values.get('email_from')
ctx = dict(context, thread_model=model)
return self.pool['mail.thread'].message_get_reply_to(cr, uid, [res_id], default=email_from, context=ctx)[res_id]
def _get_message_id(self, cr, uid, values, context=None):
if values.get('same_thread', True) is False:
message_id = tools.generate_tracking_message_id('reply_to')
elif values.get('res_id') and values.get('model'):
message_id = tools.generate_tracking_message_id('%(res_id)s-%(model)s' % values)
else:
message_id = tools.generate_tracking_message_id('private')
return message_id
def create(self, cr, uid, values, context=None):
context = dict(context or {})
default_starred = context.pop('default_starred', False)
if 'email_from' not in values: # needed to compute reply_to
values['email_from'] = self._get_default_from(cr, uid, context=context)
if 'message_id' not in values:
values['message_id'] = self._get_message_id(cr, uid, values, context=context)
if 'reply_to' not in values:
values['reply_to'] = self._get_reply_to(cr, uid, values, context=context)
if 'record_name' not in values and 'default_record_name' not in context:
values['record_name'] = self._get_record_name(cr, uid, values, context=context)
newid = super(mail_message, self).create(cr, uid, values, context)
self._notify(cr, uid, newid, context=context,
force_send=context.get('mail_notify_force_send', True),
user_signature=context.get('mail_notify_user_signature', True))
# TDE FIXME: handle default_starred. Why not setting an inv on starred ?
# Because starred will call set_message_starred, that looks for notifications.
# When creating a new mail_message, it will create a notification to a message
# that does not exist, leading to an error (key not existing). Also this
# this means unread notifications will be created, yet we can not assure
# this is what we want.
if default_starred:
self.set_message_starred(cr, uid, [newid], True, context=context)
return newid
def read(self, cr, uid, ids, fields=None, context=None, load='_classic_read'):
""" Override to explicitely call check_access_rule, that is not called
by the ORM. It instead directly fetches ir.rules and apply them. """
self.check_access_rule(cr, uid, ids, 'read', context=context)
res = super(mail_message, self).read(cr, uid, ids, fields=fields, context=context, load=load)
return res
def unlink(self, cr, uid, ids, context=None):
# cascade-delete attachments that are directly attached to the message (should only happen
# for mail.messages that act as parent for a standalone mail.mail record).
self.check_access_rule(cr, uid, ids, 'unlink', context=context)
attachments_to_delete = []
for message in self.browse(cr, uid, ids, context=context):
for attach in message.attachment_ids:
if attach.res_model == self._name and (attach.res_id == message.id or attach.res_id == 0):
attachments_to_delete.append(attach.id)
if attachments_to_delete:
self.pool.get('ir.attachment').unlink(cr, uid, attachments_to_delete, context=context)
return super(mail_message, self).unlink(cr, uid, ids, context=context)
#------------------------------------------------------
# Messaging API
#------------------------------------------------------
def _notify(self, cr, uid, newid, context=None, force_send=False, user_signature=True):
""" Add the related record followers to the destination partner_ids if is not a private message.
Call mail_notification.notify to manage the email sending
"""
notification_obj = self.pool.get('mail.notification')
message = self.browse(cr, uid, newid, context=context)
partners_to_notify = set([])
# all followers of the mail.message document have to be added as partners and notified if a subtype is defined (otherwise: log message)
if message.subtype_id and message.model and message.res_id:
fol_obj = self.pool.get("mail.followers")
# browse as SUPERUSER because rules could restrict the search results
fol_ids = fol_obj.search(
cr, SUPERUSER_ID, [
('res_model', '=', message.model),
('res_id', '=', message.res_id),
], context=context)
partners_to_notify |= set(
fo.partner_id.id for fo in fol_obj.browse(cr, SUPERUSER_ID, fol_ids, context=context)
if message.subtype_id.id in [st.id for st in fo.subtype_ids]
)
# remove me from notified partners, unless the message is written on my own wall
if message.subtype_id and message.author_id and message.model == "res.partner" and message.res_id == message.author_id.id:
partners_to_notify |= set([message.author_id.id])
elif message.author_id:
partners_to_notify -= set([message.author_id.id])
# all partner_ids of the mail.message have to be notified regardless of the above (even the author if explicitly added!)
if message.partner_ids:
partners_to_notify |= set([p.id for p in message.partner_ids])
# notify
notification_obj._notify(
cr, uid, newid, partners_to_notify=list(partners_to_notify), context=context,
force_send=force_send, user_signature=user_signature
)
message.refresh()
# An error appear when a user receive a notification without notifying
# the parent message -> add a read notification for the parent
if message.parent_id:
# all notified_partner_ids of the mail.message have to be notified for the parented messages
partners_to_parent_notify = set(message.notified_partner_ids).difference(message.parent_id.notified_partner_ids)
for partner in partners_to_parent_notify:
notification_obj.create(cr, uid, {
'message_id': message.parent_id.id,
'partner_id': partner.id,
'is_read': True,
}, context=context)
| agpl-3.0 |
squirrelo/qiime | qiime/remote.py | 15 | 13012 | #!/usr/bin/env python
from __future__ import division
__author__ = "Jai Ram Rideout"
__copyright__ = "Copyright 2012, The QIIME project"
__credits__ = ["Jai Ram Rideout"]
__license__ = "GPL"
__version__ = "1.9.1-dev"
__maintainer__ = "Jai Ram Rideout"
__email__ = "jai.rideout@gmail.com"
"""Contains functionality to interact with remote services."""
from collections import defaultdict
from csv import writer
from re import sub
from socket import gaierror
from StringIO import StringIO
from burrito.util import ApplicationNotFoundError
def raise_gdata_not_found_error(*args, **kwargs):
raise ApplicationNotFoundError("gdata cannot be found.\nIs it installed? "
"Is it in your $PYTHONPATH?\nThis is an optional QIIME "
"dependency, but is required if you plan to use QIIME's remote "
"mapping file features. For more information, please see "
"http://qiime.org/install/install.html.")
# Load gdata if it's available. If it's not, skip it but set up to raise errors
# if the user tries to use it.
try:
from gdata.spreadsheet import SpreadsheetsCellsFeedFromString
from gdata.spreadsheet.service import CellQuery
from gdata.spreadsheet.service import SpreadsheetsService
except ImportError:
# Set functions which cannot be imported to raise_gdata_not_found_error.
SpreadsheetsCellsFeedFromString = CellQuery = SpreadsheetsService = \
raise_gdata_not_found_error
class GoogleSpreadsheetError(Exception):
pass
class GoogleSpreadsheetConnectionError(Exception):
pass
def load_google_spreadsheet(spreadsheet_key, worksheet_name=None):
"""Downloads and exports a Google Spreadsheet in TSV format.
Returns a string containing the spreadsheet contents in TSV format (e.g.
for writing out to a file or parsing).
The first line is assumed to be the spreadsheet header (i.e. containing
column names), which can optionally be followed by one or more comment
lines (starting with '#'). Only the first cell of a comment line will be
parsed (to keep exported spreadsheets consistent with QIIME mapping files'
comments). The (optional) comments section is then followed by the
spreadsheet data.
Some of this code is based on the following websites, as well as the
gdata.spreadsheet.text_db module:
http://www.payne.org/index.php/Reading_Google_Spreadsheets_in_Python
http://stackoverflow.com/a/12031835
Arguments:
spreadsheet_key - the key used to identify the spreadsheet (a string).
Can either be a key or a URL containing the key
worksheet_name - the name of the worksheet to load data from (a
string). If not supplied, will use first worksheet in the
spreadsheet
"""
spreadsheet_key = _extract_spreadsheet_key_from_url(spreadsheet_key)
gd_client = SpreadsheetsService()
try:
worksheets_feed = gd_client.GetWorksheetsFeed(spreadsheet_key,
visibility='public',
projection='basic')
except gaierror:
raise GoogleSpreadsheetConnectionError("Could not establish "
"connection with server. Do "
"you have an active Internet "
"connection?")
if len(worksheets_feed.entry) < 1:
raise GoogleSpreadsheetError("The Google Spreadsheet with key '%s' "
"does not have any worksheets associated "
"with it." % spreadsheet_key)
# Find worksheet that will be exported. If a name has not been provided,
# use the first worksheet.
worksheet = None
if worksheet_name is not None:
for sheet in worksheets_feed.entry:
if sheet.title.text == worksheet_name:
worksheet = sheet
if worksheet is None:
raise GoogleSpreadsheetError("The worksheet name '%s' could not "
"be found in the Google Spreadsheet "
"with key '%s'."
% (worksheet_name, spreadsheet_key))
else:
# Choose the first one.
worksheet = worksheets_feed.entry[0]
# Extract the ID of the worksheet.
worksheet_id = worksheet.id.text.split('/')[-1]
# Now that we have a spreadsheet key and worksheet ID, we can read the
# data. First get the headers (first row). We need this in order to grab
# the rest of the actual data in the correct order (it is returned
# unordered).
headers = _get_spreadsheet_headers(gd_client, spreadsheet_key,
worksheet_id)
if len(headers) < 1:
raise GoogleSpreadsheetError("Could not load spreadsheet header (it "
"appears to be empty). Is your Google "
"Spreadsheet with key '%s' empty?"
% spreadsheet_key)
# Loop through the rest of the rows and build up a list of data (in the
# same row/col order found in the spreadsheet).
spreadsheet_lines = _export_spreadsheet(gd_client, spreadsheet_key,
worksheet_id, headers)
out_lines = StringIO()
tsv_writer = writer(out_lines, delimiter='\t', lineterminator='\n')
tsv_writer.writerows(spreadsheet_lines)
return out_lines.getvalue()
def _extract_spreadsheet_key_from_url(url):
"""Extracts a key from a URL in the form '...key=some_key&foo=42...
If the URL doesn't look valid, assumes the URL is the key and returns it
unmodified.
"""
result = url
if 'key=' in url:
result = url.split('key=')[-1].split('#')[0].split('&')[0]
return result
def _get_spreadsheet_headers(client, spreadsheet_key, worksheet_id):
"""Returns a list of headers (the first line of the spreadsheet).
Will be in the order they appear in the spreadsheet.
"""
headers = []
query = CellQuery()
query.max_row = '1'
query.min_row = '1'
feed = client.GetCellsFeed(spreadsheet_key, worksheet_id, query=query,
visibility='public', projection='values')
# Wish python had a do-while...
while True:
for entry in feed.entry:
headers.append(entry.content.text)
# Get the next set of cells if needed.
next_link = feed.GetNextLink()
if next_link:
feed = client.Get(next_link.href,
converter=SpreadsheetsCellsFeedFromString)
else:
break
return headers
def _export_spreadsheet(client, spreadsheet_key, worksheet_id, headers):
"""Returns a list of lists containing the entire spreadsheet.
This will include the header, any comment lines, and the spreadsheet data.
Blank cells are represented as None. Data will only be read up to the first
blank line that is encountered (this is a limitation of the Google
Spreadsheet API).
Comments are only supported after the header and before any real data is
encountered. The lines must start with [optional whitespace] '#' and only
the first cell is kept in that case (to avoid many empty cells after the
comment cell, which mimics QIIME's mapping file format).
Only cell data that falls under the supplied headers will be included.
"""
# Convert the headers into Google's internal "cleaned" representation.
# These will be used as lookups to pull out cell data.
cleaned_headers = _get_cleaned_headers(headers)
# List feed skips header and returns rows in the order they appear in the
# spreadsheet.
spreadsheet_lines = [headers]
rows_feed = client.GetListFeed(spreadsheet_key, worksheet_id,
visibility='public', projection='values')
while True:
found_data = False
for row in rows_feed.entry:
line = []
# Loop through our headers and use the cleaned version to look up
# the cell data. In certain cases (if the original header was blank
# or only contained special characters) we will not be able to map
# our header, so the best we can do is tell the user to change the
# name of their header to be something simple/alphanumeric.
for header_idx, (header, cleaned_header) in \
enumerate(zip(headers, cleaned_headers)):
try:
cell_data = row.custom[cleaned_header].text
except KeyError:
raise GoogleSpreadsheetError("Could not map header '%s' "
"to Google Spreadsheet's internal representation "
"of the header. We suggest changing the name of "
"the header in your Google Spreadsheet to be "
"alphanumeric if possible, as this will likely "
"solve the issue. Note that the name isn't "
"*required* to be alphanumeric, but it may fix "
"issues with converting to Google Spreadsheet's "
"internal format in some cases." % header)
# Special handling of comments (if it's a comment, only keep
# that cell to avoid several blank cells following it).
if not found_data and header_idx == 0 and \
cell_data.lstrip().startswith('#'):
line.append(cell_data)
break
else:
line.append(cell_data)
found_data = True
spreadsheet_lines.append(line)
# Get the next set of rows if necessary.
next_link = rows_feed.GetNextLink()
if next_link:
rows_feed = client.Get(next_link.href,
converter=SpreadsheetsListFeedFromString)
else:
break
return spreadsheet_lines
def _get_cleaned_headers(headers):
"""Creates a list of "cleaned" headers which spreadsheets accept.
A Google Spreadsheet converts the header names into a "cleaned" internal
representation, which must be used to reference a cell at a particular
header/column. They are all lower case and contain no spaces or special
characters. If two columns have the same name after being sanitized, the
columns further to the right have _2, _3 _4, etc. appended to them.
If there are column names which consist of all special characters, or if
the column header is blank, an obfuscated value will be used for a column
name. This method does not handle blank column names or column names with
only special characters.
Taken from gdata.spreadsheet.text_db.ConvertStringsToColumnHeaders and
modified to handle headers with pound signs or that start with numbers, as
well as correctly handle duplicate cleaned headers.
"""
cleaned_headers = []
for header in headers:
# Google strips special characters, whitespace, and underscores first,
# and then strips any *leading* digits. This order is extremely
# important!
sanitized = sub(r'^\d+', '', sub(r'[\W_]', '', header.lower()))
if len(sanitized) > 0:
cleaned_headers.append(sanitized)
else:
raise GoogleSpreadsheetError("Encountered a header '%s' that was "
"either blank or consisted only of special characters. "
"Could not map the header to the internal representation "
"used by the Google Spreadsheet. Please change the header "
"to consist of at least one alphanumeric character."
% header)
# When the same sanitized header appears multiple times in the first row
# of a spreadsheet, _n is appended to the name to make it unique.
header_count = defaultdict(int)
results = []
for header, cleaned_header in zip(headers, cleaned_headers):
new_header = cleaned_header
if header_count[cleaned_header] > 0:
# Google's numbering starts from _2, hence the +1.
new_header = '%s_%d' % (cleaned_header,
header_count[cleaned_header] + 1)
header_count[cleaned_header] += 1
results.append(new_header)
return results
| gpl-2.0 |
djnugent/ardupilot-solo | Tools/autotest/pysim/aircraft.py | 164 | 4001 | import math, util, rotmat, time
import random
from rotmat import Vector3, Matrix3
class Aircraft(object):
'''a basic aircraft class'''
def __init__(self):
self.home_latitude = 0
self.home_longitude = 0
self.home_altitude = 0
self.ground_level = 0
self.frame_height = 0.0
self.latitude = self.home_latitude
self.longitude = self.home_longitude
self.altitude = self.home_altitude
self.dcm = Matrix3()
# rotation rate in body frame
self.gyro = Vector3(0,0,0) # rad/s
self.velocity = Vector3(0, 0, 0) # m/s, North, East, Down
self.position = Vector3(0, 0, 0) # m North, East, Down
self.mass = 0.0
self.update_frequency = 50 # in Hz
self.gravity = 9.80665 # m/s/s
self.accelerometer = Vector3(0, 0, -self.gravity)
self.wind = util.Wind('0,0,0')
self.time_base = time.time()
self.time_now = self.time_base + 100*1.0e-6
self.gyro_noise = math.radians(0.1)
self.accel_noise = 0.3
def on_ground(self, position=None):
'''return true if we are on the ground'''
if position is None:
position = self.position
return (-position.z) + self.home_altitude <= self.ground_level + self.frame_height
def update_position(self):
'''update lat/lon/alt from position'''
bearing = math.degrees(math.atan2(self.position.y, self.position.x))
distance = math.sqrt(self.position.x**2 + self.position.y**2)
(self.latitude, self.longitude) = util.gps_newpos(self.home_latitude, self.home_longitude,
bearing, distance)
self.altitude = self.home_altitude - self.position.z
velocity_body = self.dcm.transposed() * self.velocity
self.accelerometer = self.accel_body.copy()
def set_yaw_degrees(self, yaw_degrees):
'''rotate to the given yaw'''
(roll, pitch, yaw) = self.dcm.to_euler()
yaw = math.radians(yaw_degrees)
self.dcm.from_euler(roll, pitch, yaw)
def time_advance(self, deltat):
'''advance time by deltat in seconds'''
self.time_now += deltat
def setup_frame_time(self, rate, speedup):
'''setup frame_time calculation'''
self.rate = rate
self.speedup = speedup
self.frame_time = 1.0/rate
self.scaled_frame_time = self.frame_time/speedup
self.last_wall_time = time.time()
self.achieved_rate = rate
def adjust_frame_time(self, rate):
'''adjust frame_time calculation'''
self.rate = rate
self.frame_time = 1.0/rate
self.scaled_frame_time = self.frame_time/self.speedup
def sync_frame_time(self):
'''try to synchronise simulation time with wall clock time, taking
into account desired speedup'''
now = time.time()
if now < self.last_wall_time + self.scaled_frame_time:
time.sleep(self.last_wall_time+self.scaled_frame_time - now)
now = time.time()
if now > self.last_wall_time and now - self.last_wall_time < 0.1:
rate = 1.0/(now - self.last_wall_time)
self.achieved_rate = (0.98*self.achieved_rate) + (0.02*rate)
if self.achieved_rate < self.rate*self.speedup:
self.scaled_frame_time *= 0.999
else:
self.scaled_frame_time *= 1.001
self.last_wall_time = now
def add_noise(self, throttle):
'''add noise based on throttle level (from 0..1)'''
self.gyro += Vector3(random.gauss(0, 1),
random.gauss(0, 1),
random.gauss(0, 1)) * throttle * self.gyro_noise
self.accel_body += Vector3(random.gauss(0, 1),
random.gauss(0, 1),
random.gauss(0, 1)) * throttle * self.accel_noise
| gpl-3.0 |
s3nk4s/flaskTutorials | FlaskApp/FlaskApp/venv/local/lib/python2.7/site-packages/pip/backwardcompat/__init__.py | 394 | 3756 | """Stuff that differs in different Python versions and platform
distributions."""
import os
import imp
import sys
import site
__all__ = ['WindowsError']
uses_pycache = hasattr(imp, 'cache_from_source')
class NeverUsedException(Exception):
"""this exception should never be raised"""
try:
WindowsError = WindowsError
except NameError:
WindowsError = NeverUsedException
try:
#new in Python 3.3
PermissionError = PermissionError
except NameError:
PermissionError = NeverUsedException
console_encoding = sys.__stdout__.encoding
if sys.version_info >= (3,):
from io import StringIO, BytesIO
from functools import reduce
from urllib.error import URLError, HTTPError
from queue import Queue, Empty
from urllib.request import url2pathname, urlretrieve, pathname2url
from email import message as emailmessage
import urllib.parse as urllib
import urllib.request as urllib2
import configparser as ConfigParser
import xmlrpc.client as xmlrpclib
import urllib.parse as urlparse
import http.client as httplib
def cmp(a, b):
return (a > b) - (a < b)
def b(s):
return s.encode('utf-8')
def u(s):
return s.decode('utf-8')
def console_to_str(s):
try:
return s.decode(console_encoding)
except UnicodeDecodeError:
return s.decode('utf_8')
def get_http_message_param(http_message, param, default_value):
return http_message.get_param(param, default_value)
bytes = bytes
string_types = (str,)
raw_input = input
else:
from cStringIO import StringIO
from urllib2 import URLError, HTTPError
from Queue import Queue, Empty
from urllib import url2pathname, urlretrieve, pathname2url
from email import Message as emailmessage
import urllib
import urllib2
import urlparse
import ConfigParser
import xmlrpclib
import httplib
def b(s):
return s
def u(s):
return s
def console_to_str(s):
return s
def get_http_message_param(http_message, param, default_value):
result = http_message.getparam(param)
return result or default_value
bytes = str
string_types = (basestring,)
reduce = reduce
cmp = cmp
raw_input = raw_input
BytesIO = StringIO
from distutils.sysconfig import get_python_lib, get_python_version
#site.USER_SITE was created in py2.6
user_site = getattr(site, 'USER_SITE', None)
def product(*args, **kwds):
# product('ABCD', 'xy') --> Ax Ay Bx By Cx Cy Dx Dy
# product(range(2), repeat=3) --> 000 001 010 011 100 101 110 111
pools = list(map(tuple, args)) * kwds.get('repeat', 1)
result = [[]]
for pool in pools:
result = [x + [y] for x in result for y in pool]
for prod in result:
yield tuple(prod)
def get_path_uid(path):
"""
Return path's uid.
Does not follow symlinks: https://github.com/pypa/pip/pull/935#discussion_r5307003
Placed this function in backwardcompat due to differences on AIX and Jython,
that should eventually go away.
:raises OSError: When path is a symlink or can't be read.
"""
if hasattr(os, 'O_NOFOLLOW'):
fd = os.open(path, os.O_RDONLY | os.O_NOFOLLOW)
file_uid = os.fstat(fd).st_uid
os.close(fd)
else: # AIX and Jython
# WARNING: time of check vulnerabity, but best we can do w/o NOFOLLOW
if not os.path.islink(path):
# older versions of Jython don't have `os.fstat`
file_uid = os.stat(path).st_uid
else:
# raise OSError for parity with os.O_NOFOLLOW above
raise OSError("%s is a symlink; Will not return uid for symlinks" % path)
return file_uid
| mit |
ichenjin/webgl-fundamentals | obj2.py | 13 | 2818 | # Copyright 2012, Gregg Tavares.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Gregg Tavares. nor the names of his
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import markdown
import glob
import os
import re
import sys
class ObjParser(object):
def __init__(self, filename):
self.positions = []
self.normals = []
self.texcoords = []
self.out_positions = []
self.out_normals = []
self.out_texcoords = []
file = open(filename, "r")
lines = file.readlines()
file.close()
for line in lines:
parts = line.split()
if parts[0] == "v":
self.positions.append([parts[1], parts[2], parts[3]])
elif parts[0] == 'vn':
self.normals.append([parts[1], parts[2], parts[3]])
elif parts[0] == 'vt':
self.texcoords.append([parts[1], parts[2]])
elif parts[0] == 'f':
for v in parts[1:4]:
f = v.split("/")
self.out_positions.append(self.positions[int(f[0]) - 1])
self.out_texcoords.append(self.texcoords[int(f[1]) - 1])
self.out_normals.append(self.normals[int(f[2]) - 1])
print "// positions"
self.dump(self.out_positions)
print "// texcoords"
self.dump(self.out_texcoords)
print "// normals"
self.dump(self.out_normals)
def dump(self, array):
for e in array:
print ", ".join(e) + ","
def main (argv):
o = ObjParser(argv[0])
if __name__ == '__main__':
main(sys.argv[1:])
| bsd-3-clause |
Matt-Deacalion/django | tests/gis_tests/relatedapp/tests.py | 199 | 15937 | from __future__ import unicode_literals
from django.contrib.gis.db.models import F, Collect, Count, Extent, Union
from django.contrib.gis.geometry.backend import Geometry
from django.contrib.gis.geos import GEOSGeometry, MultiPoint, Point
from django.db import connection
from django.test import TestCase, ignore_warnings, skipUnlessDBFeature
from django.test.utils import override_settings
from django.utils import timezone
from django.utils.deprecation import RemovedInDjango110Warning
from ..utils import no_oracle
from .models import (
Article, Author, Book, City, DirectoryEntry, Event, Location, Parcel,
)
@skipUnlessDBFeature("gis_enabled")
class RelatedGeoModelTest(TestCase):
fixtures = ['initial']
def test02_select_related(self):
"Testing `select_related` on geographic models (see #7126)."
qs1 = City.objects.order_by('id')
qs2 = City.objects.order_by('id').select_related()
qs3 = City.objects.order_by('id').select_related('location')
# Reference data for what's in the fixtures.
cities = (
('Aurora', 'TX', -97.516111, 33.058333),
('Roswell', 'NM', -104.528056, 33.387222),
('Kecksburg', 'PA', -79.460734, 40.18476),
)
for qs in (qs1, qs2, qs3):
for ref, c in zip(cities, qs):
nm, st, lon, lat = ref
self.assertEqual(nm, c.name)
self.assertEqual(st, c.state)
self.assertEqual(Point(lon, lat), c.location.point)
@skipUnlessDBFeature("has_transform_method")
def test03_transform_related(self):
"Testing the `transform` GeoQuerySet method on related geographic models."
# All the transformations are to state plane coordinate systems using
# US Survey Feet (thus a tolerance of 0 implies error w/in 1 survey foot).
tol = 0
def check_pnt(ref, pnt):
self.assertAlmostEqual(ref.x, pnt.x, tol)
self.assertAlmostEqual(ref.y, pnt.y, tol)
self.assertEqual(ref.srid, pnt.srid)
# Each city transformed to the SRID of their state plane coordinate system.
transformed = (('Kecksburg', 2272, 'POINT(1490553.98959621 314792.131023984)'),
('Roswell', 2257, 'POINT(481902.189077221 868477.766629735)'),
('Aurora', 2276, 'POINT(2269923.2484839 7069381.28722222)'),
)
for name, srid, wkt in transformed:
# Doing this implicitly sets `select_related` select the location.
# TODO: Fix why this breaks on Oracle.
qs = list(City.objects.filter(name=name).transform(srid, field_name='location__point'))
check_pnt(GEOSGeometry(wkt, srid), qs[0].location.point)
@skipUnlessDBFeature("supports_extent_aggr")
@ignore_warnings(category=RemovedInDjango110Warning)
def test_related_extent_aggregate(self):
"Testing the `extent` GeoQuerySet aggregates on related geographic models."
# This combines the Extent and Union aggregates into one query
aggs = City.objects.aggregate(Extent('location__point'))
# One for all locations, one that excludes New Mexico (Roswell).
all_extent = (-104.528056, 29.763374, -79.460734, 40.18476)
txpa_extent = (-97.516111, 29.763374, -79.460734, 40.18476)
e1 = City.objects.extent(field_name='location__point')
e2 = City.objects.exclude(state='NM').extent(field_name='location__point')
e3 = aggs['location__point__extent']
# The tolerance value is to four decimal places because of differences
# between the Oracle and PostGIS spatial backends on the extent calculation.
tol = 4
for ref, e in [(all_extent, e1), (txpa_extent, e2), (all_extent, e3)]:
for ref_val, e_val in zip(ref, e):
self.assertAlmostEqual(ref_val, e_val, tol)
@skipUnlessDBFeature("supports_extent_aggr")
def test_related_extent_annotate(self):
"""
Test annotation with Extent GeoAggregate.
"""
cities = City.objects.annotate(points_extent=Extent('location__point')).order_by('name')
tol = 4
self.assertAlmostEqual(
cities[0].points_extent,
(-97.516111, 33.058333, -97.516111, 33.058333),
tol
)
@skipUnlessDBFeature("has_unionagg_method")
@ignore_warnings(category=RemovedInDjango110Warning)
def test_related_union_aggregate(self):
"Testing the `unionagg` GeoQuerySet aggregates on related geographic models."
# This combines the Extent and Union aggregates into one query
aggs = City.objects.aggregate(Union('location__point'))
# These are the points that are components of the aggregate geographic
# union that is returned. Each point # corresponds to City PK.
p1 = Point(-104.528056, 33.387222)
p2 = Point(-97.516111, 33.058333)
p3 = Point(-79.460734, 40.18476)
p4 = Point(-96.801611, 32.782057)
p5 = Point(-95.363151, 29.763374)
# The second union aggregate is for a union
# query that includes limiting information in the WHERE clause (in other
# words a `.filter()` precedes the call to `.unionagg()`).
ref_u1 = MultiPoint(p1, p2, p4, p5, p3, srid=4326)
ref_u2 = MultiPoint(p2, p3, srid=4326)
u1 = City.objects.unionagg(field_name='location__point')
u2 = City.objects.exclude(
name__in=('Roswell', 'Houston', 'Dallas', 'Fort Worth'),
).unionagg(field_name='location__point')
u3 = aggs['location__point__union']
self.assertEqual(type(u1), MultiPoint)
self.assertEqual(type(u3), MultiPoint)
# Ordering of points in the result of the union is not defined and
# implementation-dependent (DB backend, GEOS version)
self.assertSetEqual({p.ewkt for p in ref_u1}, {p.ewkt for p in u1})
self.assertSetEqual({p.ewkt for p in ref_u2}, {p.ewkt for p in u2})
self.assertSetEqual({p.ewkt for p in ref_u1}, {p.ewkt for p in u3})
def test05_select_related_fk_to_subclass(self):
"Testing that calling select_related on a query over a model with an FK to a model subclass works"
# Regression test for #9752.
list(DirectoryEntry.objects.all().select_related())
def test06_f_expressions(self):
"Testing F() expressions on GeometryFields."
# Constructing a dummy parcel border and getting the City instance for
# assigning the FK.
b1 = GEOSGeometry(
'POLYGON((-97.501205 33.052520,-97.501205 33.052576,'
'-97.501150 33.052576,-97.501150 33.052520,-97.501205 33.052520))',
srid=4326
)
pcity = City.objects.get(name='Aurora')
# First parcel has incorrect center point that is equal to the City;
# it also has a second border that is different from the first as a
# 100ft buffer around the City.
c1 = pcity.location.point
c2 = c1.transform(2276, clone=True)
b2 = c2.buffer(100)
Parcel.objects.create(name='P1', city=pcity, center1=c1, center2=c2, border1=b1, border2=b2)
# Now creating a second Parcel where the borders are the same, just
# in different coordinate systems. The center points are also the
# same (but in different coordinate systems), and this time they
# actually correspond to the centroid of the border.
c1 = b1.centroid
c2 = c1.transform(2276, clone=True)
Parcel.objects.create(name='P2', city=pcity, center1=c1, center2=c2, border1=b1, border2=b1)
# Should return the second Parcel, which has the center within the
# border.
qs = Parcel.objects.filter(center1__within=F('border1'))
self.assertEqual(1, len(qs))
self.assertEqual('P2', qs[0].name)
if connection.features.supports_transform:
# This time center2 is in a different coordinate system and needs
# to be wrapped in transformation SQL.
qs = Parcel.objects.filter(center2__within=F('border1'))
self.assertEqual(1, len(qs))
self.assertEqual('P2', qs[0].name)
# Should return the first Parcel, which has the center point equal
# to the point in the City ForeignKey.
qs = Parcel.objects.filter(center1=F('city__location__point'))
self.assertEqual(1, len(qs))
self.assertEqual('P1', qs[0].name)
if connection.features.supports_transform:
# This time the city column should be wrapped in transformation SQL.
qs = Parcel.objects.filter(border2__contains=F('city__location__point'))
self.assertEqual(1, len(qs))
self.assertEqual('P1', qs[0].name)
def test07_values(self):
"Testing values() and values_list() and GeoQuerySets."
gqs = Location.objects.all()
gvqs = Location.objects.values()
gvlqs = Location.objects.values_list()
# Incrementing through each of the models, dictionaries, and tuples
# returned by the different types of GeoQuerySets.
for m, d, t in zip(gqs, gvqs, gvlqs):
# The values should be Geometry objects and not raw strings returned
# by the spatial database.
self.assertIsInstance(d['point'], Geometry)
self.assertIsInstance(t[1], Geometry)
self.assertEqual(m.point, d['point'])
self.assertEqual(m.point, t[1])
@override_settings(USE_TZ=True)
def test_07b_values(self):
"Testing values() and values_list() with aware datetime. See #21565."
Event.objects.create(name="foo", when=timezone.now())
list(Event.objects.values_list('when'))
def test08_defer_only(self):
"Testing defer() and only() on Geographic models."
qs = Location.objects.all()
def_qs = Location.objects.defer('point')
for loc, def_loc in zip(qs, def_qs):
self.assertEqual(loc.point, def_loc.point)
def test09_pk_relations(self):
"Ensuring correct primary key column is selected across relations. See #10757."
# The expected ID values -- notice the last two location IDs
# are out of order. Dallas and Houston have location IDs that differ
# from their PKs -- this is done to ensure that the related location
# ID column is selected instead of ID column for the city.
city_ids = (1, 2, 3, 4, 5)
loc_ids = (1, 2, 3, 5, 4)
ids_qs = City.objects.order_by('id').values('id', 'location__id')
for val_dict, c_id, l_id in zip(ids_qs, city_ids, loc_ids):
self.assertEqual(val_dict['id'], c_id)
self.assertEqual(val_dict['location__id'], l_id)
# TODO: fix on Oracle -- qs2 returns an empty result for an unknown reason
@no_oracle
def test10_combine(self):
"Testing the combination of two GeoQuerySets. See #10807."
buf1 = City.objects.get(name='Aurora').location.point.buffer(0.1)
buf2 = City.objects.get(name='Kecksburg').location.point.buffer(0.1)
qs1 = City.objects.filter(location__point__within=buf1)
qs2 = City.objects.filter(location__point__within=buf2)
combined = qs1 | qs2
names = [c.name for c in combined]
self.assertEqual(2, len(names))
self.assertIn('Aurora', names)
self.assertIn('Kecksburg', names)
# TODO: fix on Oracle -- get the following error because the SQL is ordered
# by a geometry object, which Oracle apparently doesn't like:
# ORA-22901: cannot compare nested table or VARRAY or LOB attributes of an object type
@no_oracle
def test12a_count(self):
"Testing `Count` aggregate use with the `GeoManager` on geo-fields."
# The City, 'Fort Worth' uses the same location as Dallas.
dallas = City.objects.get(name='Dallas')
# Count annotation should be 2 for the Dallas location now.
loc = Location.objects.annotate(num_cities=Count('city')).get(id=dallas.location.id)
self.assertEqual(2, loc.num_cities)
def test12b_count(self):
"Testing `Count` aggregate use with the `GeoManager` on non geo-fields. See #11087."
# Should only be one author (Trevor Paglen) returned by this query, and
# the annotation should have 3 for the number of books, see #11087.
# Also testing with a values(), see #11489.
qs = Author.objects.annotate(num_books=Count('books')).filter(num_books__gt=1)
vqs = Author.objects.values('name').annotate(num_books=Count('books')).filter(num_books__gt=1)
self.assertEqual(1, len(qs))
self.assertEqual(3, qs[0].num_books)
self.assertEqual(1, len(vqs))
self.assertEqual(3, vqs[0]['num_books'])
# TODO: fix on Oracle -- get the following error because the SQL is ordered
# by a geometry object, which Oracle apparently doesn't like:
# ORA-22901: cannot compare nested table or VARRAY or LOB attributes of an object type
@no_oracle
def test13c_count(self):
"Testing `Count` aggregate with `.values()`. See #15305."
qs = Location.objects.filter(id=5).annotate(num_cities=Count('city')).values('id', 'point', 'num_cities')
self.assertEqual(1, len(qs))
self.assertEqual(2, qs[0]['num_cities'])
self.assertIsInstance(qs[0]['point'], GEOSGeometry)
# TODO: The phantom model does appear on Oracle.
@no_oracle
def test13_select_related_null_fk(self):
"Testing `select_related` on a nullable ForeignKey via `GeoManager`. See #11381."
Book.objects.create(title='Without Author')
b = Book.objects.select_related('author').get(title='Without Author')
# Should be `None`, and not a 'dummy' model.
self.assertIsNone(b.author)
@skipUnlessDBFeature("supports_collect_aggr")
@ignore_warnings(category=RemovedInDjango110Warning)
def test_collect(self):
"""
Testing the (deprecated) `collect` GeoQuerySet method and `Collect`
aggregate.
"""
# Reference query:
# SELECT AsText(ST_Collect("relatedapp_location"."point")) FROM "relatedapp_city" LEFT OUTER JOIN
# "relatedapp_location" ON ("relatedapp_city"."location_id" = "relatedapp_location"."id")
# WHERE "relatedapp_city"."state" = 'TX';
ref_geom = GEOSGeometry(
'MULTIPOINT(-97.516111 33.058333,-96.801611 32.782057,'
'-95.363151 29.763374,-96.801611 32.782057)'
)
c1 = City.objects.filter(state='TX').collect(field_name='location__point')
c2 = City.objects.filter(state='TX').aggregate(Collect('location__point'))['location__point__collect']
for coll in (c1, c2):
# Even though Dallas and Ft. Worth share same point, Collect doesn't
# consolidate -- that's why 4 points in MultiPoint.
self.assertEqual(4, len(coll))
self.assertTrue(ref_geom.equals(coll))
def test15_invalid_select_related(self):
"Testing doing select_related on the related name manager of a unique FK. See #13934."
qs = Article.objects.select_related('author__article')
# This triggers TypeError when `get_default_columns` has no `local_only`
# keyword. The TypeError is swallowed if QuerySet is actually
# evaluated as list generation swallows TypeError in CPython.
str(qs.query)
def test16_annotated_date_queryset(self):
"Ensure annotated date querysets work if spatial backend is used. See #14648."
birth_years = [dt.year for dt in
list(Author.objects.annotate(num_books=Count('books')).dates('dob', 'year'))]
birth_years.sort()
self.assertEqual([1950, 1974], birth_years)
# TODO: Related tests for KML, GML, and distance lookups.
| bsd-3-clause |
splbio/openobject-server | openerp/addons/base/res/res_currency.py | 34 | 12824 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import re
import time
from openerp import tools
from openerp.osv import fields, osv
from openerp.tools import float_round, float_is_zero, float_compare
from openerp.tools.translate import _
CURRENCY_DISPLAY_PATTERN = re.compile(r'(\w+)\s*(?:\((.*)\))?')
class res_currency(osv.osv):
def _current_rate(self, cr, uid, ids, name, arg, context=None):
return self._current_rate_computation(cr, uid, ids, name, arg, True, context=context)
def _current_rate_silent(self, cr, uid, ids, name, arg, context=None):
return self._current_rate_computation(cr, uid, ids, name, arg, False, context=context)
def _current_rate_computation(self, cr, uid, ids, name, arg, raise_on_no_rate, context=None):
if context is None:
context = {}
res = {}
if 'date' in context:
date = context['date']
else:
date = time.strftime('%Y-%m-%d')
date = date or time.strftime('%Y-%m-%d')
# Convert False values to None ...
currency_rate_type = context.get('currency_rate_type_id') or None
# ... and use 'is NULL' instead of '= some-id'.
operator = '=' if currency_rate_type else 'is'
for id in ids:
cr.execute("SELECT currency_id, rate FROM res_currency_rate WHERE currency_id = %s AND name <= %s AND currency_rate_type_id " + operator +" %s ORDER BY name desc LIMIT 1" ,(id, date, currency_rate_type))
if cr.rowcount:
id, rate = cr.fetchall()[0]
res[id] = rate
elif not raise_on_no_rate:
res[id] = 0
else:
raise osv.except_osv(_('Error!'),_("No currency rate associated for currency %d for the given period" % (id)))
return res
_name = "res.currency"
_description = "Currency"
_columns = {
# Note: 'code' column was removed as of v6.0, the 'name' should now hold the ISO code.
'name': fields.char('Currency', size=32, required=True, help="Currency Code (ISO 4217)"),
'symbol': fields.char('Symbol', size=4, help="Currency sign, to be used when printing amounts."),
'rate': fields.function(_current_rate, string='Current Rate', digits=(12,6),
help='The rate of the currency to the currency of rate 1.'),
# Do not use for computation ! Same as rate field with silent failing
'rate_silent': fields.function(_current_rate_silent, string='Current Rate', digits=(12,6),
help='The rate of the currency to the currency of rate 1 (0 if no rate defined).'),
'rate_ids': fields.one2many('res.currency.rate', 'currency_id', 'Rates'),
'accuracy': fields.integer('Computational Accuracy'),
'rounding': fields.float('Rounding Factor', digits=(12,6)),
'active': fields.boolean('Active'),
'company_id':fields.many2one('res.company', 'Company'),
'date': fields.date('Date'),
'base': fields.boolean('Base'),
'position': fields.selection([('after','After Amount'),('before','Before Amount')], 'Symbol Position', help="Determines where the currency symbol should be placed after or before the amount.")
}
_defaults = {
'active': 1,
'position' : 'after',
'rounding': 0.01,
'accuracy': 4,
'company_id': False,
}
_sql_constraints = [
# this constraint does not cover all cases due to SQL NULL handling for company_id,
# so it is complemented with a unique index (see below). The constraint and index
# share the same prefix so that IntegrityError triggered by the index will be caught
# and reported to the user with the constraint's error message.
('unique_name_company_id', 'unique (name, company_id)', 'The currency code must be unique per company!'),
]
_order = "name"
def init(self, cr):
# CONSTRAINT/UNIQUE INDEX on (name,company_id)
# /!\ The unique constraint 'unique_name_company_id' is not sufficient, because SQL92
# only support field names in constraint definitions, and we need a function here:
# we need to special-case company_id to treat all NULL company_id as equal, otherwise
# we would allow duplicate "global" currencies (all having company_id == NULL)
cr.execute("""SELECT indexname FROM pg_indexes WHERE indexname = 'res_currency_unique_name_company_id_idx'""")
if not cr.fetchone():
cr.execute("""CREATE UNIQUE INDEX res_currency_unique_name_company_id_idx
ON res_currency
(name, (COALESCE(company_id,-1)))""")
def read(self, cr, user, ids, fields=None, context=None, load='_classic_read'):
res = super(res_currency, self).read(cr, user, ids, fields, context, load)
currency_rate_obj = self.pool.get('res.currency.rate')
values = res
if not isinstance(values, list):
values = [values]
for r in values:
if r.__contains__('rate_ids'):
rates=r['rate_ids']
if rates:
currency_date = currency_rate_obj.read(cr, user, rates[0], ['name'])['name']
r['date'] = currency_date
return res
def name_search(self, cr, user, name='', args=None, operator='ilike', context=None, limit=100):
if not args:
args = []
results = super(res_currency,self)\
.name_search(cr, user, name, args, operator=operator, context=context, limit=limit)
if not results:
name_match = CURRENCY_DISPLAY_PATTERN.match(name)
if name_match:
results = super(res_currency,self)\
.name_search(cr, user, name_match.group(1), args, operator=operator, context=context, limit=limit)
return results
def name_get(self, cr, uid, ids, context=None):
if not ids:
return []
if isinstance(ids, (int, long)):
ids = [ids]
reads = self.read(cr, uid, ids, ['name','symbol'], context=context, load='_classic_write')
return [(x['id'], tools.ustr(x['name'])) for x in reads]
def round(self, cr, uid, currency, amount):
"""Return ``amount`` rounded according to ``currency``'s
rounding rules.
:param browse_record currency: currency for which we are rounding
:param float amount: the amount to round
:return: rounded float
"""
return float_round(amount, precision_rounding=currency.rounding)
def compare_amounts(self, cr, uid, currency, amount1, amount2):
"""Compare ``amount1`` and ``amount2`` after rounding them according to the
given currency's precision..
An amount is considered lower/greater than another amount if their rounded
value is different. This is not the same as having a non-zero difference!
For example 1.432 and 1.431 are equal at 2 digits precision,
so this method would return 0.
However 0.006 and 0.002 are considered different (returns 1) because
they respectively round to 0.01 and 0.0, even though
0.006-0.002 = 0.004 which would be considered zero at 2 digits precision.
:param browse_record currency: currency for which we are rounding
:param float amount1: first amount to compare
:param float amount2: second amount to compare
:return: (resp.) -1, 0 or 1, if ``amount1`` is (resp.) lower than,
equal to, or greater than ``amount2``, according to
``currency``'s rounding.
"""
return float_compare(amount1, amount2, precision_rounding=currency.rounding)
def is_zero(self, cr, uid, currency, amount):
"""Returns true if ``amount`` is small enough to be treated as
zero according to ``currency``'s rounding rules.
Warning: ``is_zero(amount1-amount2)`` is not always equivalent to
``compare_amounts(amount1,amount2) == 0``, as the former will round after
computing the difference, while the latter will round before, giving
different results for e.g. 0.006 and 0.002 at 2 digits precision.
:param browse_record currency: currency for which we are rounding
:param float amount: amount to compare with currency's zero
"""
return float_is_zero(amount, precision_rounding=currency.rounding)
def _get_conversion_rate(self, cr, uid, from_currency, to_currency, context=None):
if context is None:
context = {}
ctx = context.copy()
ctx.update({'currency_rate_type_id': ctx.get('currency_rate_type_from')})
from_currency = self.browse(cr, uid, from_currency.id, context=ctx)
ctx.update({'currency_rate_type_id': ctx.get('currency_rate_type_to')})
to_currency = self.browse(cr, uid, to_currency.id, context=ctx)
if from_currency.rate == 0 or to_currency.rate == 0:
date = context.get('date', time.strftime('%Y-%m-%d'))
if from_currency.rate == 0:
currency_symbol = from_currency.symbol
else:
currency_symbol = to_currency.symbol
raise osv.except_osv(_('Error'), _('No rate found \n' \
'for the currency: %s \n' \
'at the date: %s') % (currency_symbol, date))
return to_currency.rate/from_currency.rate
def compute(self, cr, uid, from_currency_id, to_currency_id, from_amount,
round=True, currency_rate_type_from=False, currency_rate_type_to=False, context=None):
if not context:
context = {}
if not from_currency_id:
from_currency_id = to_currency_id
if not to_currency_id:
to_currency_id = from_currency_id
xc = self.browse(cr, uid, [from_currency_id,to_currency_id], context=context)
from_currency = (xc[0].id == from_currency_id and xc[0]) or xc[1]
to_currency = (xc[0].id == to_currency_id and xc[0]) or xc[1]
if (to_currency_id == from_currency_id) and (currency_rate_type_from == currency_rate_type_to):
if round:
return self.round(cr, uid, to_currency, from_amount)
else:
return from_amount
else:
context.update({'currency_rate_type_from': currency_rate_type_from, 'currency_rate_type_to': currency_rate_type_to})
rate = self._get_conversion_rate(cr, uid, from_currency, to_currency, context=context)
if round:
return self.round(cr, uid, to_currency, from_amount * rate)
else:
return from_amount * rate
res_currency()
class res_currency_rate_type(osv.osv):
_name = "res.currency.rate.type"
_description = "Currency Rate Type"
_columns = {
'name': fields.char('Name', size=64, required=True, translate=True),
}
res_currency_rate_type()
class res_currency_rate(osv.osv):
_name = "res.currency.rate"
_description = "Currency Rate"
_columns = {
'name': fields.date('Date', required=True, select=True),
'rate': fields.float('Rate', digits=(12,6), help='The rate of the currency to the currency of rate 1'),
'currency_id': fields.many2one('res.currency', 'Currency', readonly=True),
'currency_rate_type_id': fields.many2one('res.currency.rate.type', 'Currency Rate Type', help="Allow you to define your own currency rate types, like 'Average' or 'Year to Date'. Leave empty if you simply want to use the normal 'spot' rate type"),
}
_defaults = {
'name': lambda *a: time.strftime('%Y-%m-%d'),
}
_order = "name desc"
res_currency_rate()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
fener06/pyload | module/web/cnl_app.py | 35 | 4421 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from os.path import join
import re
from urllib import unquote
from base64 import standard_b64decode
from binascii import unhexlify
from bottle import route, request, HTTPError
from webinterface import PYLOAD, DL_ROOT, JS
try:
from Crypto.Cipher import AES
except:
pass
def local_check(function):
def _view(*args, **kwargs):
if request.environ.get('REMOTE_ADDR', "0") in ('127.0.0.1', 'localhost') \
or request.environ.get('HTTP_HOST','0') == '127.0.0.1:9666':
return function(*args, **kwargs)
else:
return HTTPError(403, "Forbidden")
return _view
@route("/flash")
@route("/flash/:id")
@route("/flash", method="POST")
@local_check
def flash(id="0"):
return "JDownloader\r\n"
@route("/flash/add", method="POST")
@local_check
def add(request):
package = request.POST.get('referer', None)
urls = filter(lambda x: x != "", request.POST['urls'].split("\n"))
if package:
PYLOAD.addPackage(package, urls, 0)
else:
PYLOAD.generateAndAddPackages(urls, 0)
return ""
@route("/flash/addcrypted", method="POST")
@local_check
def addcrypted():
package = request.forms.get('referer', 'ClickAndLoad Package')
dlc = request.forms['crypted'].replace(" ", "+")
dlc_path = join(DL_ROOT, package.replace("/", "").replace("\\", "").replace(":", "") + ".dlc")
dlc_file = open(dlc_path, "wb")
dlc_file.write(dlc)
dlc_file.close()
try:
PYLOAD.addPackage(package, [dlc_path], 0)
except:
return HTTPError()
else:
return "success\r\n"
@route("/flash/addcrypted2", method="POST")
@local_check
def addcrypted2():
package = request.forms.get("source", None)
crypted = request.forms["crypted"]
jk = request.forms["jk"]
crypted = standard_b64decode(unquote(crypted.replace(" ", "+")))
if JS:
jk = "%s f()" % jk
jk = JS.eval(jk)
else:
try:
jk = re.findall(r"return ('|\")(.+)('|\")", jk)[0][1]
except:
## Test for some known js functions to decode
if jk.find("dec") > -1 and jk.find("org") > -1:
org = re.findall(r"var org = ('|\")([^\"']+)", jk)[0][1]
jk = list(org)
jk.reverse()
jk = "".join(jk)
else:
print "Could not decrypt key, please install py-spidermonkey or ossp-js"
try:
Key = unhexlify(jk)
except:
print "Could not decrypt key, please install py-spidermonkey or ossp-js"
return "failed"
IV = Key
obj = AES.new(Key, AES.MODE_CBC, IV)
result = obj.decrypt(crypted).replace("\x00", "").replace("\r","").split("\n")
result = filter(lambda x: x != "", result)
try:
if package:
PYLOAD.addPackage(package, result, 0)
else:
PYLOAD.generateAndAddPackages(result, 0)
except:
return "failed can't add"
else:
return "success\r\n"
@route("/flashgot_pyload")
@route("/flashgot_pyload", method="POST")
@route("/flashgot")
@route("/flashgot", method="POST")
@local_check
def flashgot():
if request.environ['HTTP_REFERER'] != "http://localhost:9666/flashgot" and request.environ['HTTP_REFERER'] != "http://127.0.0.1:9666/flashgot":
return HTTPError()
autostart = int(request.forms.get('autostart', 0))
package = request.forms.get('package', None)
urls = filter(lambda x: x != "", request.forms['urls'].split("\n"))
folder = request.forms.get('dir', None)
if package:
PYLOAD.addPackage(package, urls, autostart)
else:
PYLOAD.generateAndAddPackages(urls, autostart)
return ""
@route("/crossdomain.xml")
@local_check
def crossdomain():
rep = "<?xml version=\"1.0\"?>\n"
rep += "<!DOCTYPE cross-domain-policy SYSTEM \"http://www.macromedia.com/xml/dtds/cross-domain-policy.dtd\">\n"
rep += "<cross-domain-policy>\n"
rep += "<allow-access-from domain=\"*\" />\n"
rep += "</cross-domain-policy>"
return rep
@route("/flash/checkSupportForUrl")
@local_check
def checksupport():
url = request.GET.get("url")
res = PYLOAD.checkURLs([url])
supported = (not res[0][1] is None)
return str(supported).lower()
@route("/jdcheck.js")
@local_check
def jdcheck():
rep = "jdownloader=true;\n"
rep += "var version='9.581;'"
return rep
| gpl-3.0 |
jealousrobot/PlexArt | lib/cherrypy/__init__.py | 6 | 11843 | """CherryPy is a pythonic, object-oriented HTTP framework.
CherryPy consists of not one, but four separate API layers.
The APPLICATION LAYER is the simplest. CherryPy applications are written as
a tree of classes and methods, where each branch in the tree corresponds to
a branch in the URL path. Each method is a 'page handler', which receives
GET and POST params as keyword arguments, and returns or yields the (HTML)
body of the response. The special method name 'index' is used for paths
that end in a slash, and the special method name 'default' is used to
handle multiple paths via a single handler. This layer also includes:
* the 'exposed' attribute (and cherrypy.expose)
* cherrypy.quickstart()
* _cp_config attributes
* cherrypy.tools (including cherrypy.session)
* cherrypy.url()
The ENVIRONMENT LAYER is used by developers at all levels. It provides
information about the current request and response, plus the application
and server environment, via a (default) set of top-level objects:
* cherrypy.request
* cherrypy.response
* cherrypy.engine
* cherrypy.server
* cherrypy.tree
* cherrypy.config
* cherrypy.thread_data
* cherrypy.log
* cherrypy.HTTPError, NotFound, and HTTPRedirect
* cherrypy.lib
The EXTENSION LAYER allows advanced users to construct and share their own
plugins. It consists of:
* Hook API
* Tool API
* Toolbox API
* Dispatch API
* Config Namespace API
Finally, there is the CORE LAYER, which uses the core API's to construct
the default components which are available at higher layers. You can think
of the default components as the 'reference implementation' for CherryPy.
Megaframeworks (and advanced users) may replace the default components
with customized or extended components. The core API's are:
* Application API
* Engine API
* Request API
* Server API
* WSGI API
These API's are described in the `CherryPy specification <https://bitbucket.org/cherrypy/cherrypy/wiki/CherryPySpec>`_.
"""
try:
import pkg_resources
except ImportError:
pass
from cherrypy._cperror import HTTPError, HTTPRedirect, InternalRedirect
from cherrypy._cperror import NotFound, CherryPyException, TimeoutError
from cherrypy import _cpdispatch as dispatch
from cherrypy import _cptools
tools = _cptools.default_toolbox
Tool = _cptools.Tool
from cherrypy import _cprequest
from cherrypy.lib import httputil as _httputil
from cherrypy import _cptree
tree = _cptree.Tree()
from cherrypy._cptree import Application
from cherrypy import _cpwsgi as wsgi
from cherrypy import process
try:
from cherrypy.process import win32
engine = win32.Win32Bus()
engine.console_control_handler = win32.ConsoleCtrlHandler(engine)
del win32
except ImportError:
engine = process.bus
try:
__version__ = pkg_resources.require('cherrypy')[0].version
except Exception:
__version__ = 'unknown'
# Timeout monitor. We add two channels to the engine
# to which cherrypy.Application will publish.
engine.listeners['before_request'] = set()
engine.listeners['after_request'] = set()
class _TimeoutMonitor(process.plugins.Monitor):
def __init__(self, bus):
self.servings = []
process.plugins.Monitor.__init__(self, bus, self.run)
def before_request(self):
self.servings.append((serving.request, serving.response))
def after_request(self):
try:
self.servings.remove((serving.request, serving.response))
except ValueError:
pass
def run(self):
"""Check timeout on all responses. (Internal)"""
for req, resp in self.servings:
resp.check_timeout()
engine.timeout_monitor = _TimeoutMonitor(engine)
engine.timeout_monitor.subscribe()
engine.autoreload = process.plugins.Autoreloader(engine)
engine.autoreload.subscribe()
engine.thread_manager = process.plugins.ThreadManager(engine)
engine.thread_manager.subscribe()
engine.signal_handler = process.plugins.SignalHandler(engine)
class _HandleSignalsPlugin(object):
"""Handle signals from other processes based on the configured
platform handlers above."""
def __init__(self, bus):
self.bus = bus
def subscribe(self):
"""Add the handlers based on the platform"""
if hasattr(self.bus, "signal_handler"):
self.bus.signal_handler.subscribe()
if hasattr(self.bus, "console_control_handler"):
self.bus.console_control_handler.subscribe()
engine.signals = _HandleSignalsPlugin(engine)
from cherrypy import _cpserver
server = _cpserver.Server()
server.subscribe()
def quickstart(root=None, script_name="", config=None):
"""Mount the given root, start the builtin server (and engine), then block.
root: an instance of a "controller class" (a collection of page handler
methods) which represents the root of the application.
script_name: a string containing the "mount point" of the application.
This should start with a slash, and be the path portion of the URL
at which to mount the given root. For example, if root.index() will
handle requests to "http://www.example.com:8080/dept/app1/", then
the script_name argument would be "/dept/app1".
It MUST NOT end in a slash. If the script_name refers to the root
of the URI, it MUST be an empty string (not "/").
config: a file or dict containing application config. If this contains
a [global] section, those entries will be used in the global
(site-wide) config.
"""
if config:
_global_conf_alias.update(config)
tree.mount(root, script_name, config)
engine.signals.subscribe()
engine.start()
engine.block()
from cherrypy._cpcompat import threadlocal as _local
class _Serving(_local):
"""An interface for registering request and response objects.
Rather than have a separate "thread local" object for the request and
the response, this class works as a single threadlocal container for
both objects (and any others which developers wish to define). In this
way, we can easily dump those objects when we stop/start a new HTTP
conversation, yet still refer to them as module-level globals in a
thread-safe way.
"""
request = _cprequest.Request(_httputil.Host("127.0.0.1", 80),
_httputil.Host("127.0.0.1", 1111))
"""
The request object for the current thread. In the main thread,
and any threads which are not receiving HTTP requests, this is None."""
response = _cprequest.Response()
"""
The response object for the current thread. In the main thread,
and any threads which are not receiving HTTP requests, this is None."""
def load(self, request, response):
self.request = request
self.response = response
def clear(self):
"""Remove all attributes of self."""
self.__dict__.clear()
serving = _Serving()
class _ThreadLocalProxy(object):
__slots__ = ['__attrname__', '__dict__']
def __init__(self, attrname):
self.__attrname__ = attrname
def __getattr__(self, name):
child = getattr(serving, self.__attrname__)
return getattr(child, name)
def __setattr__(self, name, value):
if name in ("__attrname__", ):
object.__setattr__(self, name, value)
else:
child = getattr(serving, self.__attrname__)
setattr(child, name, value)
def __delattr__(self, name):
child = getattr(serving, self.__attrname__)
delattr(child, name)
def _get_dict(self):
child = getattr(serving, self.__attrname__)
d = child.__class__.__dict__.copy()
d.update(child.__dict__)
return d
__dict__ = property(_get_dict)
def __getitem__(self, key):
child = getattr(serving, self.__attrname__)
return child[key]
def __setitem__(self, key, value):
child = getattr(serving, self.__attrname__)
child[key] = value
def __delitem__(self, key):
child = getattr(serving, self.__attrname__)
del child[key]
def __contains__(self, key):
child = getattr(serving, self.__attrname__)
return key in child
def __len__(self):
child = getattr(serving, self.__attrname__)
return len(child)
def __nonzero__(self):
child = getattr(serving, self.__attrname__)
return bool(child)
# Python 3
__bool__ = __nonzero__
# Create request and response object (the same objects will be used
# throughout the entire life of the webserver, but will redirect
# to the "serving" object)
request = _ThreadLocalProxy('request')
response = _ThreadLocalProxy('response')
# Create thread_data object as a thread-specific all-purpose storage
class _ThreadData(_local):
"""A container for thread-specific data."""
thread_data = _ThreadData()
# Monkeypatch pydoc to allow help() to go through the threadlocal proxy.
# Jan 2007: no Googleable examples of anyone else replacing pydoc.resolve.
# The only other way would be to change what is returned from type(request)
# and that's not possible in pure Python (you'd have to fake ob_type).
def _cherrypy_pydoc_resolve(thing, forceload=0):
"""Given an object or a path to an object, get the object and its name."""
if isinstance(thing, _ThreadLocalProxy):
thing = getattr(serving, thing.__attrname__)
return _pydoc._builtin_resolve(thing, forceload)
try:
import pydoc as _pydoc
_pydoc._builtin_resolve = _pydoc.resolve
_pydoc.resolve = _cherrypy_pydoc_resolve
except ImportError:
pass
from cherrypy import _cplogging
class _GlobalLogManager(_cplogging.LogManager):
"""A site-wide LogManager; routes to app.log or global log as appropriate.
This :class:`LogManager<cherrypy._cplogging.LogManager>` implements
cherrypy.log() and cherrypy.log.access(). If either
function is called during a request, the message will be sent to the
logger for the current Application. If they are called outside of a
request, the message will be sent to the site-wide logger.
"""
def __call__(self, *args, **kwargs):
"""Log the given message to the app.log or global log as appropriate.
"""
# Do NOT use try/except here. See
# https://github.com/cherrypy/cherrypy/issues/945
if hasattr(request, 'app') and hasattr(request.app, 'log'):
log = request.app.log
else:
log = self
return log.error(*args, **kwargs)
def access(self):
"""Log an access message to the app.log or global log as appropriate.
"""
try:
return request.app.log.access()
except AttributeError:
return _cplogging.LogManager.access(self)
log = _GlobalLogManager()
# Set a default screen handler on the global log.
log.screen = True
log.error_file = ''
# Using an access file makes CP about 10% slower. Leave off by default.
log.access_file = ''
def _buslog(msg, level):
log.error(msg, 'ENGINE', severity=level)
engine.subscribe('log', _buslog)
from cherrypy._helper import expose, popargs, url
# import _cpconfig last so it can reference other top-level objects
from cherrypy import _cpconfig
# Use _global_conf_alias so quickstart can use 'config' as an arg
# without shadowing cherrypy.config.
config = _global_conf_alias = _cpconfig.Config()
config.defaults = {
'tools.log_tracebacks.on': True,
'tools.log_headers.on': True,
'tools.trailing_slash.on': True,
'tools.encode.on': True
}
config.namespaces["log"] = lambda k, v: setattr(log, k, v)
config.namespaces["checker"] = lambda k, v: setattr(checker, k, v)
# Must reset to get our defaults applied.
config.reset()
from cherrypy import _cpchecker
checker = _cpchecker.Checker()
engine.subscribe('start', checker)
| gpl-3.0 |
JakeBrand/CMPUT410-E4 | lab4/lib/python2.7/site-packages/distribute-0.6.24-py2.7.egg/setuptools/dist.py | 64 | 30309 | __all__ = ['Distribution']
import re
from distutils.core import Distribution as _Distribution
from setuptools.depends import Require
from setuptools.command.install import install
from setuptools.command.sdist import sdist
from setuptools.command.install_lib import install_lib
from distutils.errors import DistutilsOptionError, DistutilsPlatformError
from distutils.errors import DistutilsSetupError
import setuptools, pkg_resources, distutils.core, distutils.dist, distutils.cmd
import os, distutils.log
def _get_unpatched(cls):
"""Protect against re-patching the distutils if reloaded
Also ensures that no other distutils extension monkeypatched the distutils
first.
"""
while cls.__module__.startswith('setuptools'):
cls, = cls.__bases__
if not cls.__module__.startswith('distutils'):
raise AssertionError(
"distutils has already been patched by %r" % cls
)
return cls
_Distribution = _get_unpatched(_Distribution)
sequence = tuple, list
def check_importable(dist, attr, value):
try:
ep = pkg_resources.EntryPoint.parse('x='+value)
assert not ep.extras
except (TypeError,ValueError,AttributeError,AssertionError):
raise DistutilsSetupError(
"%r must be importable 'module:attrs' string (got %r)"
% (attr,value)
)
def assert_string_list(dist, attr, value):
"""Verify that value is a string list or None"""
try:
assert ''.join(value)!=value
except (TypeError,ValueError,AttributeError,AssertionError):
raise DistutilsSetupError(
"%r must be a list of strings (got %r)" % (attr,value)
)
def check_nsp(dist, attr, value):
"""Verify that namespace packages are valid"""
assert_string_list(dist,attr,value)
for nsp in value:
if not dist.has_contents_for(nsp):
raise DistutilsSetupError(
"Distribution contains no modules or packages for " +
"namespace package %r" % nsp
)
if '.' in nsp:
parent = '.'.join(nsp.split('.')[:-1])
if parent not in value:
distutils.log.warn(
"%r is declared as a package namespace, but %r is not:"
" please correct this in setup.py", nsp, parent
)
def check_extras(dist, attr, value):
"""Verify that extras_require mapping is valid"""
try:
for k,v in value.items():
list(pkg_resources.parse_requirements(v))
except (TypeError,ValueError,AttributeError):
raise DistutilsSetupError(
"'extras_require' must be a dictionary whose values are "
"strings or lists of strings containing valid project/version "
"requirement specifiers."
)
def assert_bool(dist, attr, value):
"""Verify that value is True, False, 0, or 1"""
if bool(value) != value:
raise DistutilsSetupError(
"%r must be a boolean value (got %r)" % (attr,value)
)
def check_requirements(dist, attr, value):
"""Verify that install_requires is a valid requirements list"""
try:
list(pkg_resources.parse_requirements(value))
except (TypeError,ValueError):
raise DistutilsSetupError(
"%r must be a string or list of strings "
"containing valid project/version requirement specifiers" % (attr,)
)
def check_entry_points(dist, attr, value):
"""Verify that entry_points map is parseable"""
try:
pkg_resources.EntryPoint.parse_map(value)
except ValueError, e:
raise DistutilsSetupError(e)
def check_test_suite(dist, attr, value):
if not isinstance(value,basestring):
raise DistutilsSetupError("test_suite must be a string")
def check_package_data(dist, attr, value):
"""Verify that value is a dictionary of package names to glob lists"""
if isinstance(value,dict):
for k,v in value.items():
if not isinstance(k,str): break
try: iter(v)
except TypeError:
break
else:
return
raise DistutilsSetupError(
attr+" must be a dictionary mapping package names to lists of "
"wildcard patterns"
)
class Distribution(_Distribution):
"""Distribution with support for features, tests, and package data
This is an enhanced version of 'distutils.dist.Distribution' that
effectively adds the following new optional keyword arguments to 'setup()':
'install_requires' -- a string or sequence of strings specifying project
versions that the distribution requires when installed, in the format
used by 'pkg_resources.require()'. They will be installed
automatically when the package is installed. If you wish to use
packages that are not available in PyPI, or want to give your users an
alternate download location, you can add a 'find_links' option to the
'[easy_install]' section of your project's 'setup.cfg' file, and then
setuptools will scan the listed web pages for links that satisfy the
requirements.
'extras_require' -- a dictionary mapping names of optional "extras" to the
additional requirement(s) that using those extras incurs. For example,
this::
extras_require = dict(reST = ["docutils>=0.3", "reSTedit"])
indicates that the distribution can optionally provide an extra
capability called "reST", but it can only be used if docutils and
reSTedit are installed. If the user installs your package using
EasyInstall and requests one of your extras, the corresponding
additional requirements will be installed if needed.
'features' -- a dictionary mapping option names to 'setuptools.Feature'
objects. Features are a portion of the distribution that can be
included or excluded based on user options, inter-feature dependencies,
and availability on the current system. Excluded features are omitted
from all setup commands, including source and binary distributions, so
you can create multiple distributions from the same source tree.
Feature names should be valid Python identifiers, except that they may
contain the '-' (minus) sign. Features can be included or excluded
via the command line options '--with-X' and '--without-X', where 'X' is
the name of the feature. Whether a feature is included by default, and
whether you are allowed to control this from the command line, is
determined by the Feature object. See the 'Feature' class for more
information.
'test_suite' -- the name of a test suite to run for the 'test' command.
If the user runs 'python setup.py test', the package will be installed,
and the named test suite will be run. The format is the same as
would be used on a 'unittest.py' command line. That is, it is the
dotted name of an object to import and call to generate a test suite.
'package_data' -- a dictionary mapping package names to lists of filenames
or globs to use to find data files contained in the named packages.
If the dictionary has filenames or globs listed under '""' (the empty
string), those names will be searched for in every package, in addition
to any names for the specific package. Data files found using these
names/globs will be installed along with the package, in the same
location as the package. Note that globs are allowed to reference
the contents of non-package subdirectories, as long as you use '/' as
a path separator. (Globs are automatically converted to
platform-specific paths at runtime.)
In addition to these new keywords, this class also has several new methods
for manipulating the distribution's contents. For example, the 'include()'
and 'exclude()' methods can be thought of as in-place add and subtract
commands that add or remove packages, modules, extensions, and so on from
the distribution. They are used by the feature subsystem to configure the
distribution for the included and excluded features.
"""
_patched_dist = None
def patch_missing_pkg_info(self, attrs):
# Fake up a replacement for the data that would normally come from
# PKG-INFO, but which might not yet be built if this is a fresh
# checkout.
#
if not attrs or 'name' not in attrs or 'version' not in attrs:
return
key = pkg_resources.safe_name(str(attrs['name'])).lower()
dist = pkg_resources.working_set.by_key.get(key)
if dist is not None and not dist.has_metadata('PKG-INFO'):
dist._version = pkg_resources.safe_version(str(attrs['version']))
self._patched_dist = dist
def __init__ (self, attrs=None):
have_package_data = hasattr(self, "package_data")
if not have_package_data:
self.package_data = {}
self.require_features = []
self.features = {}
self.dist_files = []
self.src_root = attrs and attrs.pop("src_root", None)
self.patch_missing_pkg_info(attrs)
# Make sure we have any eggs needed to interpret 'attrs'
if attrs is not None:
self.dependency_links = attrs.pop('dependency_links', [])
assert_string_list(self,'dependency_links',self.dependency_links)
if attrs and 'setup_requires' in attrs:
self.fetch_build_eggs(attrs.pop('setup_requires'))
for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'):
if not hasattr(self,ep.name):
setattr(self,ep.name,None)
_Distribution.__init__(self,attrs)
if isinstance(self.metadata.version, (int,long,float)):
# Some people apparently take "version number" too literally :)
self.metadata.version = str(self.metadata.version)
def parse_command_line(self):
"""Process features after parsing command line options"""
result = _Distribution.parse_command_line(self)
if self.features:
self._finalize_features()
return result
def _feature_attrname(self,name):
"""Convert feature name to corresponding option attribute name"""
return 'with_'+name.replace('-','_')
def fetch_build_eggs(self, requires):
"""Resolve pre-setup requirements"""
from pkg_resources import working_set, parse_requirements
for dist in working_set.resolve(
parse_requirements(requires), installer=self.fetch_build_egg
):
working_set.add(dist)
def finalize_options(self):
_Distribution.finalize_options(self)
if self.features:
self._set_global_opts_from_features()
for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'):
value = getattr(self,ep.name,None)
if value is not None:
ep.require(installer=self.fetch_build_egg)
ep.load()(self, ep.name, value)
if getattr(self, 'convert_2to3_doctests', None):
# XXX may convert to set here when we can rely on set being builtin
self.convert_2to3_doctests = [os.path.abspath(p) for p in self.convert_2to3_doctests]
else:
self.convert_2to3_doctests = []
def fetch_build_egg(self, req):
"""Fetch an egg needed for building"""
try:
cmd = self._egg_fetcher
cmd.package_index.to_scan = []
except AttributeError:
from setuptools.command.easy_install import easy_install
dist = self.__class__({'script_args':['easy_install']})
dist.parse_config_files()
opts = dist.get_option_dict('easy_install')
keep = (
'find_links', 'site_dirs', 'index_url', 'optimize',
'site_dirs', 'allow_hosts'
)
for key in opts.keys():
if key not in keep:
del opts[key] # don't use any other settings
if self.dependency_links:
links = self.dependency_links[:]
if 'find_links' in opts:
links = opts['find_links'][1].split() + links
opts['find_links'] = ('setup', links)
cmd = easy_install(
dist, args=["x"], install_dir=os.curdir, exclude_scripts=True,
always_copy=False, build_directory=None, editable=False,
upgrade=False, multi_version=True, no_report = True
)
cmd.ensure_finalized()
self._egg_fetcher = cmd
return cmd.easy_install(req)
def _set_global_opts_from_features(self):
"""Add --with-X/--without-X options based on optional features"""
go = []
no = self.negative_opt.copy()
for name,feature in self.features.items():
self._set_feature(name,None)
feature.validate(self)
if feature.optional:
descr = feature.description
incdef = ' (default)'
excdef=''
if not feature.include_by_default():
excdef, incdef = incdef, excdef
go.append(('with-'+name, None, 'include '+descr+incdef))
go.append(('without-'+name, None, 'exclude '+descr+excdef))
no['without-'+name] = 'with-'+name
self.global_options = self.feature_options = go + self.global_options
self.negative_opt = self.feature_negopt = no
def _finalize_features(self):
"""Add/remove features and resolve dependencies between them"""
# First, flag all the enabled items (and thus their dependencies)
for name,feature in self.features.items():
enabled = self.feature_is_included(name)
if enabled or (enabled is None and feature.include_by_default()):
feature.include_in(self)
self._set_feature(name,1)
# Then disable the rest, so that off-by-default features don't
# get flagged as errors when they're required by an enabled feature
for name,feature in self.features.items():
if not self.feature_is_included(name):
feature.exclude_from(self)
self._set_feature(name,0)
def get_command_class(self, command):
"""Pluggable version of get_command_class()"""
if command in self.cmdclass:
return self.cmdclass[command]
for ep in pkg_resources.iter_entry_points('distutils.commands',command):
ep.require(installer=self.fetch_build_egg)
self.cmdclass[command] = cmdclass = ep.load()
return cmdclass
else:
return _Distribution.get_command_class(self, command)
def print_commands(self):
for ep in pkg_resources.iter_entry_points('distutils.commands'):
if ep.name not in self.cmdclass:
cmdclass = ep.load(False) # don't require extras, we're not running
self.cmdclass[ep.name] = cmdclass
return _Distribution.print_commands(self)
def _set_feature(self,name,status):
"""Set feature's inclusion status"""
setattr(self,self._feature_attrname(name),status)
def feature_is_included(self,name):
"""Return 1 if feature is included, 0 if excluded, 'None' if unknown"""
return getattr(self,self._feature_attrname(name))
def include_feature(self,name):
"""Request inclusion of feature named 'name'"""
if self.feature_is_included(name)==0:
descr = self.features[name].description
raise DistutilsOptionError(
descr + " is required, but was excluded or is not available"
)
self.features[name].include_in(self)
self._set_feature(name,1)
def include(self,**attrs):
"""Add items to distribution that are named in keyword arguments
For example, 'dist.exclude(py_modules=["x"])' would add 'x' to
the distribution's 'py_modules' attribute, if it was not already
there.
Currently, this method only supports inclusion for attributes that are
lists or tuples. If you need to add support for adding to other
attributes in this or a subclass, you can add an '_include_X' method,
where 'X' is the name of the attribute. The method will be called with
the value passed to 'include()'. So, 'dist.include(foo={"bar":"baz"})'
will try to call 'dist._include_foo({"bar":"baz"})', which can then
handle whatever special inclusion logic is needed.
"""
for k,v in attrs.items():
include = getattr(self, '_include_'+k, None)
if include:
include(v)
else:
self._include_misc(k,v)
def exclude_package(self,package):
"""Remove packages, modules, and extensions in named package"""
pfx = package+'.'
if self.packages:
self.packages = [
p for p in self.packages
if p != package and not p.startswith(pfx)
]
if self.py_modules:
self.py_modules = [
p for p in self.py_modules
if p != package and not p.startswith(pfx)
]
if self.ext_modules:
self.ext_modules = [
p for p in self.ext_modules
if p.name != package and not p.name.startswith(pfx)
]
def has_contents_for(self,package):
"""Return true if 'exclude_package(package)' would do something"""
pfx = package+'.'
for p in self.iter_distribution_names():
if p==package or p.startswith(pfx):
return True
def _exclude_misc(self,name,value):
"""Handle 'exclude()' for list/tuple attrs without a special handler"""
if not isinstance(value,sequence):
raise DistutilsSetupError(
"%s: setting must be a list or tuple (%r)" % (name, value)
)
try:
old = getattr(self,name)
except AttributeError:
raise DistutilsSetupError(
"%s: No such distribution setting" % name
)
if old is not None and not isinstance(old,sequence):
raise DistutilsSetupError(
name+": this setting cannot be changed via include/exclude"
)
elif old:
setattr(self,name,[item for item in old if item not in value])
def _include_misc(self,name,value):
"""Handle 'include()' for list/tuple attrs without a special handler"""
if not isinstance(value,sequence):
raise DistutilsSetupError(
"%s: setting must be a list (%r)" % (name, value)
)
try:
old = getattr(self,name)
except AttributeError:
raise DistutilsSetupError(
"%s: No such distribution setting" % name
)
if old is None:
setattr(self,name,value)
elif not isinstance(old,sequence):
raise DistutilsSetupError(
name+": this setting cannot be changed via include/exclude"
)
else:
setattr(self,name,old+[item for item in value if item not in old])
def exclude(self,**attrs):
"""Remove items from distribution that are named in keyword arguments
For example, 'dist.exclude(py_modules=["x"])' would remove 'x' from
the distribution's 'py_modules' attribute. Excluding packages uses
the 'exclude_package()' method, so all of the package's contained
packages, modules, and extensions are also excluded.
Currently, this method only supports exclusion from attributes that are
lists or tuples. If you need to add support for excluding from other
attributes in this or a subclass, you can add an '_exclude_X' method,
where 'X' is the name of the attribute. The method will be called with
the value passed to 'exclude()'. So, 'dist.exclude(foo={"bar":"baz"})'
will try to call 'dist._exclude_foo({"bar":"baz"})', which can then
handle whatever special exclusion logic is needed.
"""
for k,v in attrs.items():
exclude = getattr(self, '_exclude_'+k, None)
if exclude:
exclude(v)
else:
self._exclude_misc(k,v)
def _exclude_packages(self,packages):
if not isinstance(packages,sequence):
raise DistutilsSetupError(
"packages: setting must be a list or tuple (%r)" % (packages,)
)
map(self.exclude_package, packages)
def _parse_command_opts(self, parser, args):
# Remove --with-X/--without-X options when processing command args
self.global_options = self.__class__.global_options
self.negative_opt = self.__class__.negative_opt
# First, expand any aliases
command = args[0]
aliases = self.get_option_dict('aliases')
while command in aliases:
src,alias = aliases[command]
del aliases[command] # ensure each alias can expand only once!
import shlex
args[:1] = shlex.split(alias,True)
command = args[0]
nargs = _Distribution._parse_command_opts(self, parser, args)
# Handle commands that want to consume all remaining arguments
cmd_class = self.get_command_class(command)
if getattr(cmd_class,'command_consumes_arguments',None):
self.get_option_dict(command)['args'] = ("command line", nargs)
if nargs is not None:
return []
return nargs
def get_cmdline_options(self):
"""Return a '{cmd: {opt:val}}' map of all command-line options
Option names are all long, but do not include the leading '--', and
contain dashes rather than underscores. If the option doesn't take
an argument (e.g. '--quiet'), the 'val' is 'None'.
Note that options provided by config files are intentionally excluded.
"""
d = {}
for cmd,opts in self.command_options.items():
for opt,(src,val) in opts.items():
if src != "command line":
continue
opt = opt.replace('_','-')
if val==0:
cmdobj = self.get_command_obj(cmd)
neg_opt = self.negative_opt.copy()
neg_opt.update(getattr(cmdobj,'negative_opt',{}))
for neg,pos in neg_opt.items():
if pos==opt:
opt=neg
val=None
break
else:
raise AssertionError("Shouldn't be able to get here")
elif val==1:
val = None
d.setdefault(cmd,{})[opt] = val
return d
def iter_distribution_names(self):
"""Yield all packages, modules, and extension names in distribution"""
for pkg in self.packages or ():
yield pkg
for module in self.py_modules or ():
yield module
for ext in self.ext_modules or ():
if isinstance(ext,tuple):
name, buildinfo = ext
else:
name = ext.name
if name.endswith('module'):
name = name[:-6]
yield name
# Install it throughout the distutils
for module in distutils.dist, distutils.core, distutils.cmd:
module.Distribution = Distribution
class Feature:
"""A subset of the distribution that can be excluded if unneeded/wanted
Features are created using these keyword arguments:
'description' -- a short, human readable description of the feature, to
be used in error messages, and option help messages.
'standard' -- if true, the feature is included by default if it is
available on the current system. Otherwise, the feature is only
included if requested via a command line '--with-X' option, or if
another included feature requires it. The default setting is 'False'.
'available' -- if true, the feature is available for installation on the
current system. The default setting is 'True'.
'optional' -- if true, the feature's inclusion can be controlled from the
command line, using the '--with-X' or '--without-X' options. If
false, the feature's inclusion status is determined automatically,
based on 'availabile', 'standard', and whether any other feature
requires it. The default setting is 'True'.
'require_features' -- a string or sequence of strings naming features
that should also be included if this feature is included. Defaults to
empty list. May also contain 'Require' objects that should be
added/removed from the distribution.
'remove' -- a string or list of strings naming packages to be removed
from the distribution if this feature is *not* included. If the
feature *is* included, this argument is ignored. This argument exists
to support removing features that "crosscut" a distribution, such as
defining a 'tests' feature that removes all the 'tests' subpackages
provided by other features. The default for this argument is an empty
list. (Note: the named package(s) or modules must exist in the base
distribution when the 'setup()' function is initially called.)
other keywords -- any other keyword arguments are saved, and passed to
the distribution's 'include()' and 'exclude()' methods when the
feature is included or excluded, respectively. So, for example, you
could pass 'packages=["a","b"]' to cause packages 'a' and 'b' to be
added or removed from the distribution as appropriate.
A feature must include at least one 'requires', 'remove', or other
keyword argument. Otherwise, it can't affect the distribution in any way.
Note also that you can subclass 'Feature' to create your own specialized
feature types that modify the distribution in other ways when included or
excluded. See the docstrings for the various methods here for more detail.
Aside from the methods, the only feature attributes that distributions look
at are 'description' and 'optional'.
"""
def __init__(self, description, standard=False, available=True,
optional=True, require_features=(), remove=(), **extras
):
self.description = description
self.standard = standard
self.available = available
self.optional = optional
if isinstance(require_features,(str,Require)):
require_features = require_features,
self.require_features = [
r for r in require_features if isinstance(r,str)
]
er = [r for r in require_features if not isinstance(r,str)]
if er: extras['require_features'] = er
if isinstance(remove,str):
remove = remove,
self.remove = remove
self.extras = extras
if not remove and not require_features and not extras:
raise DistutilsSetupError(
"Feature %s: must define 'require_features', 'remove', or at least one"
" of 'packages', 'py_modules', etc."
)
def include_by_default(self):
"""Should this feature be included by default?"""
return self.available and self.standard
def include_in(self,dist):
"""Ensure feature and its requirements are included in distribution
You may override this in a subclass to perform additional operations on
the distribution. Note that this method may be called more than once
per feature, and so should be idempotent.
"""
if not self.available:
raise DistutilsPlatformError(
self.description+" is required,"
"but is not available on this platform"
)
dist.include(**self.extras)
for f in self.require_features:
dist.include_feature(f)
def exclude_from(self,dist):
"""Ensure feature is excluded from distribution
You may override this in a subclass to perform additional operations on
the distribution. This method will be called at most once per
feature, and only after all included features have been asked to
include themselves.
"""
dist.exclude(**self.extras)
if self.remove:
for item in self.remove:
dist.exclude_package(item)
def validate(self,dist):
"""Verify that feature makes sense in context of distribution
This method is called by the distribution just before it parses its
command line. It checks to ensure that the 'remove' attribute, if any,
contains only valid package/module names that are present in the base
distribution when 'setup()' is called. You may override it in a
subclass to perform any other required validation of the feature
against a target distribution.
"""
for item in self.remove:
if not dist.has_contents_for(item):
raise DistutilsSetupError(
"%s wants to be able to remove %s, but the distribution"
" doesn't contain any packages or modules under %s"
% (self.description, item, item)
)
def check_packages(dist, attr, value):
for pkgname in value:
if not re.match(r'\w+(\.\w+)*', pkgname):
distutils.log.warn(
"WARNING: %r not a valid package name; please use only"
".-separated package names in setup.py", pkgname
)
| apache-2.0 |
NeuralEnsemble/neuroConstruct | lib/jython/Lib/test/test_shutil.py | 14 | 29246 | # Copyright (C) 2003 Python Software Foundation
import unittest
import shutil
import tempfile
import sys
import stat
import os
import os.path
from os.path import splitdrive
from distutils.spawn import find_executable, spawn
from shutil import (_make_tarball, _make_zipfile, make_archive,
register_archive_format, unregister_archive_format,
get_archive_formats)
import tarfile
import warnings
from test import test_support
from test.test_support import TESTFN, check_warnings, captured_stdout
TESTFN2 = TESTFN + "2"
try:
import grp
import pwd
UID_GID_SUPPORT = True
except ImportError:
UID_GID_SUPPORT = False
try:
import zlib
except ImportError:
zlib = None
try:
import zipfile
ZIP_SUPPORT = True
except ImportError:
ZIP_SUPPORT = find_executable('zip')
class TestShutil(unittest.TestCase):
def setUp(self):
super(TestShutil, self).setUp()
self.tempdirs = []
def tearDown(self):
super(TestShutil, self).tearDown()
while self.tempdirs:
d = self.tempdirs.pop()
shutil.rmtree(d, os.name in ('nt', 'cygwin'))
def write_file(self, path, content='xxx'):
"""Writes a file in the given path.
path can be a string or a sequence.
"""
if isinstance(path, (list, tuple)):
path = os.path.join(*path)
f = open(path, 'w')
try:
f.write(content)
finally:
f.close()
def mkdtemp(self):
"""Create a temporary directory that will be cleaned up.
Returns the path of the directory.
"""
d = tempfile.mkdtemp()
self.tempdirs.append(d)
return d
def test_rmtree_errors(self):
# filename is guaranteed not to exist
filename = tempfile.mktemp()
self.assertRaises(OSError, shutil.rmtree, filename)
# See bug #1071513 for why we don't run this on cygwin
# and bug #1076467 for why we don't run this as root.
# XXX: Fails on Jython because Java resets the S_IREAD permission
# when removing the file
if (hasattr(os, 'chmod') and sys.platform[:6] != 'cygwin'
and not (hasattr(os, 'geteuid') and os.geteuid() == 0)
and (not test_support.is_jython or os._name != 'nt')):
def test_on_error(self):
self.errorState = 0
os.mkdir(TESTFN)
self.childpath = os.path.join(TESTFN, 'a')
f = open(self.childpath, 'w')
f.close()
old_dir_mode = os.stat(TESTFN).st_mode
old_child_mode = os.stat(self.childpath).st_mode
# Make unwritable.
os.chmod(self.childpath, stat.S_IREAD)
os.chmod(TESTFN, stat.S_IREAD)
shutil.rmtree(TESTFN, onerror=self.check_args_to_onerror)
# Test whether onerror has actually been called.
self.assertEqual(self.errorState, 2,
"Expected call to onerror function did not happen.")
# Make writable again.
os.chmod(TESTFN, old_dir_mode)
os.chmod(self.childpath, old_child_mode)
# Clean up.
shutil.rmtree(TESTFN)
def check_args_to_onerror(self, func, arg, exc):
# test_rmtree_errors deliberately runs rmtree
# on a directory that is chmod 400, which will fail.
# This function is run when shutil.rmtree fails.
# 99.9% of the time it initially fails to remove
# a file in the directory, so the first time through
# func is os.remove.
# However, some Linux machines running ZFS on
# FUSE experienced a failure earlier in the process
# at os.listdir. The first failure may legally
# be either.
if self.errorState == 0:
if func is os.remove:
self.assertEqual(arg, self.childpath)
else:
self.assertIs(func, os.listdir,
"func must be either os.remove or os.listdir")
self.assertEqual(arg, TESTFN)
self.assertTrue(issubclass(exc[0], OSError))
self.errorState = 1
else:
self.assertEqual(func, os.rmdir)
self.assertEqual(arg, TESTFN)
self.assertTrue(issubclass(exc[0], OSError))
self.errorState = 2
def test_rmtree_dont_delete_file(self):
# When called on a file instead of a directory, don't delete it.
handle, path = tempfile.mkstemp()
os.fdopen(handle).close()
self.assertRaises(OSError, shutil.rmtree, path)
os.remove(path)
def test_copytree_simple(self):
def write_data(path, data):
f = open(path, "w")
f.write(data)
f.close()
def read_data(path):
f = open(path)
data = f.read()
f.close()
return data
src_dir = tempfile.mkdtemp()
dst_dir = os.path.join(tempfile.mkdtemp(), 'destination')
write_data(os.path.join(src_dir, 'test.txt'), '123')
os.mkdir(os.path.join(src_dir, 'test_dir'))
write_data(os.path.join(src_dir, 'test_dir', 'test.txt'), '456')
try:
shutil.copytree(src_dir, dst_dir)
self.assertTrue(os.path.isfile(os.path.join(dst_dir, 'test.txt')))
self.assertTrue(os.path.isdir(os.path.join(dst_dir, 'test_dir')))
self.assertTrue(os.path.isfile(os.path.join(dst_dir, 'test_dir',
'test.txt')))
actual = read_data(os.path.join(dst_dir, 'test.txt'))
self.assertEqual(actual, '123')
actual = read_data(os.path.join(dst_dir, 'test_dir', 'test.txt'))
self.assertEqual(actual, '456')
finally:
for path in (
os.path.join(src_dir, 'test.txt'),
os.path.join(dst_dir, 'test.txt'),
os.path.join(src_dir, 'test_dir', 'test.txt'),
os.path.join(dst_dir, 'test_dir', 'test.txt'),
):
if os.path.exists(path):
os.remove(path)
for path in (src_dir,
os.path.dirname(dst_dir)
):
if os.path.exists(path):
shutil.rmtree(path)
def test_copytree_with_exclude(self):
def write_data(path, data):
f = open(path, "w")
f.write(data)
f.close()
def read_data(path):
f = open(path)
data = f.read()
f.close()
return data
# creating data
join = os.path.join
exists = os.path.exists
src_dir = tempfile.mkdtemp()
try:
dst_dir = join(tempfile.mkdtemp(), 'destination')
write_data(join(src_dir, 'test.txt'), '123')
write_data(join(src_dir, 'test.tmp'), '123')
os.mkdir(join(src_dir, 'test_dir'))
write_data(join(src_dir, 'test_dir', 'test.txt'), '456')
os.mkdir(join(src_dir, 'test_dir2'))
write_data(join(src_dir, 'test_dir2', 'test.txt'), '456')
os.mkdir(join(src_dir, 'test_dir2', 'subdir'))
os.mkdir(join(src_dir, 'test_dir2', 'subdir2'))
write_data(join(src_dir, 'test_dir2', 'subdir', 'test.txt'), '456')
write_data(join(src_dir, 'test_dir2', 'subdir2', 'test.py'), '456')
# testing glob-like patterns
try:
patterns = shutil.ignore_patterns('*.tmp', 'test_dir2')
shutil.copytree(src_dir, dst_dir, ignore=patterns)
# checking the result: some elements should not be copied
self.assertTrue(exists(join(dst_dir, 'test.txt')))
self.assertTrue(not exists(join(dst_dir, 'test.tmp')))
self.assertTrue(not exists(join(dst_dir, 'test_dir2')))
finally:
if os.path.exists(dst_dir):
shutil.rmtree(dst_dir)
try:
patterns = shutil.ignore_patterns('*.tmp', 'subdir*')
shutil.copytree(src_dir, dst_dir, ignore=patterns)
# checking the result: some elements should not be copied
self.assertTrue(not exists(join(dst_dir, 'test.tmp')))
self.assertTrue(not exists(join(dst_dir, 'test_dir2', 'subdir2')))
self.assertTrue(not exists(join(dst_dir, 'test_dir2', 'subdir')))
finally:
if os.path.exists(dst_dir):
shutil.rmtree(dst_dir)
# testing callable-style
try:
def _filter(src, names):
res = []
for name in names:
path = os.path.join(src, name)
if (os.path.isdir(path) and
path.split()[-1] == 'subdir'):
res.append(name)
elif os.path.splitext(path)[-1] in ('.py'):
res.append(name)
return res
shutil.copytree(src_dir, dst_dir, ignore=_filter)
# checking the result: some elements should not be copied
self.assertTrue(not exists(join(dst_dir, 'test_dir2', 'subdir2',
'test.py')))
self.assertTrue(not exists(join(dst_dir, 'test_dir2', 'subdir')))
finally:
if os.path.exists(dst_dir):
shutil.rmtree(dst_dir)
finally:
shutil.rmtree(src_dir)
shutil.rmtree(os.path.dirname(dst_dir))
if hasattr(os, "symlink"):
def test_dont_copy_file_onto_link_to_itself(self):
# bug 851123.
os.mkdir(TESTFN)
src = os.path.join(TESTFN, 'cheese')
dst = os.path.join(TESTFN, 'shop')
try:
f = open(src, 'w')
f.write('cheddar')
f.close()
os.link(src, dst)
self.assertRaises(shutil.Error, shutil.copyfile, src, dst)
with open(src, 'r') as f:
self.assertEqual(f.read(), 'cheddar')
os.remove(dst)
# Using `src` here would mean we end up with a symlink pointing
# to TESTFN/TESTFN/cheese, while it should point at
# TESTFN/cheese.
os.symlink('cheese', dst)
self.assertRaises(shutil.Error, shutil.copyfile, src, dst)
with open(src, 'r') as f:
self.assertEqual(f.read(), 'cheddar')
os.remove(dst)
finally:
try:
shutil.rmtree(TESTFN)
except OSError:
pass
def test_rmtree_on_symlink(self):
# bug 1669.
os.mkdir(TESTFN)
try:
src = os.path.join(TESTFN, 'cheese')
dst = os.path.join(TESTFN, 'shop')
os.mkdir(src)
os.symlink(src, dst)
self.assertRaises(OSError, shutil.rmtree, dst)
finally:
shutil.rmtree(TESTFN, ignore_errors=True)
if hasattr(os, "mkfifo"):
# Issue #3002: copyfile and copytree block indefinitely on named pipes
def test_copyfile_named_pipe(self):
os.mkfifo(TESTFN)
try:
self.assertRaises(shutil.SpecialFileError,
shutil.copyfile, TESTFN, TESTFN2)
self.assertRaises(shutil.SpecialFileError,
shutil.copyfile, __file__, TESTFN)
finally:
os.remove(TESTFN)
def test_copytree_named_pipe(self):
os.mkdir(TESTFN)
try:
subdir = os.path.join(TESTFN, "subdir")
os.mkdir(subdir)
pipe = os.path.join(subdir, "mypipe")
os.mkfifo(pipe)
try:
shutil.copytree(TESTFN, TESTFN2)
except shutil.Error as e:
errors = e.args[0]
self.assertEqual(len(errors), 1)
src, dst, error_msg = errors[0]
self.assertEqual("`%s` is a named pipe" % pipe, error_msg)
else:
self.fail("shutil.Error should have been raised")
finally:
shutil.rmtree(TESTFN, ignore_errors=True)
shutil.rmtree(TESTFN2, ignore_errors=True)
@unittest.skipUnless(zlib, "requires zlib")
def test_make_tarball(self):
# creating something to tar
tmpdir = self.mkdtemp()
self.write_file([tmpdir, 'file1'], 'xxx')
self.write_file([tmpdir, 'file2'], 'xxx')
os.mkdir(os.path.join(tmpdir, 'sub'))
self.write_file([tmpdir, 'sub', 'file3'], 'xxx')
tmpdir2 = self.mkdtemp()
unittest.skipUnless(splitdrive(tmpdir)[0] == splitdrive(tmpdir2)[0],
"source and target should be on same drive")
base_name = os.path.join(tmpdir2, 'archive')
# working with relative paths to avoid tar warnings
old_dir = os.getcwd()
os.chdir(tmpdir)
try:
_make_tarball(splitdrive(base_name)[1], '.')
finally:
os.chdir(old_dir)
# check if the compressed tarball was created
tarball = base_name + '.tar.gz'
self.assertTrue(os.path.exists(tarball))
# trying an uncompressed one
base_name = os.path.join(tmpdir2, 'archive')
old_dir = os.getcwd()
os.chdir(tmpdir)
try:
_make_tarball(splitdrive(base_name)[1], '.', compress=None)
finally:
os.chdir(old_dir)
tarball = base_name + '.tar'
self.assertTrue(os.path.exists(tarball))
def _tarinfo(self, path):
tar = tarfile.open(path)
try:
names = tar.getnames()
names.sort()
return tuple(names)
finally:
tar.close()
def _create_files(self):
# creating something to tar
tmpdir = self.mkdtemp()
dist = os.path.join(tmpdir, 'dist')
os.mkdir(dist)
self.write_file([dist, 'file1'], 'xxx')
self.write_file([dist, 'file2'], 'xxx')
os.mkdir(os.path.join(dist, 'sub'))
self.write_file([dist, 'sub', 'file3'], 'xxx')
os.mkdir(os.path.join(dist, 'sub2'))
tmpdir2 = self.mkdtemp()
base_name = os.path.join(tmpdir2, 'archive')
return tmpdir, tmpdir2, base_name
@unittest.skipUnless(zlib, "Requires zlib")
@unittest.skipUnless(find_executable('tar') and find_executable('gzip'),
'Need the tar command to run')
def test_tarfile_vs_tar(self):
tmpdir, tmpdir2, base_name = self._create_files()
old_dir = os.getcwd()
os.chdir(tmpdir)
try:
_make_tarball(base_name, 'dist')
finally:
os.chdir(old_dir)
# check if the compressed tarball was created
tarball = base_name + '.tar.gz'
self.assertTrue(os.path.exists(tarball))
# now create another tarball using `tar`
tarball2 = os.path.join(tmpdir, 'archive2.tar.gz')
tar_cmd = ['tar', '-cf', 'archive2.tar', 'dist']
gzip_cmd = ['gzip', '-f9', 'archive2.tar']
old_dir = os.getcwd()
os.chdir(tmpdir)
try:
with captured_stdout() as s:
spawn(tar_cmd)
spawn(gzip_cmd)
finally:
os.chdir(old_dir)
self.assertTrue(os.path.exists(tarball2))
# let's compare both tarballs
self.assertEqual(self._tarinfo(tarball), self._tarinfo(tarball2))
# trying an uncompressed one
base_name = os.path.join(tmpdir2, 'archive')
old_dir = os.getcwd()
os.chdir(tmpdir)
try:
_make_tarball(base_name, 'dist', compress=None)
finally:
os.chdir(old_dir)
tarball = base_name + '.tar'
self.assertTrue(os.path.exists(tarball))
# now for a dry_run
base_name = os.path.join(tmpdir2, 'archive')
old_dir = os.getcwd()
os.chdir(tmpdir)
try:
_make_tarball(base_name, 'dist', compress=None, dry_run=True)
finally:
os.chdir(old_dir)
tarball = base_name + '.tar'
self.assertTrue(os.path.exists(tarball))
@unittest.skipUnless(zlib, "Requires zlib")
@unittest.skipUnless(ZIP_SUPPORT, 'Need zip support to run')
def test_make_zipfile(self):
# creating something to tar
tmpdir = self.mkdtemp()
self.write_file([tmpdir, 'file1'], 'xxx')
self.write_file([tmpdir, 'file2'], 'xxx')
tmpdir2 = self.mkdtemp()
base_name = os.path.join(tmpdir2, 'archive')
_make_zipfile(base_name, tmpdir)
# check if the compressed tarball was created
tarball = base_name + '.zip'
self.assertTrue(os.path.exists(tarball))
def test_make_archive(self):
tmpdir = self.mkdtemp()
base_name = os.path.join(tmpdir, 'archive')
self.assertRaises(ValueError, make_archive, base_name, 'xxx')
@unittest.skipUnless(zlib, "Requires zlib")
def test_make_archive_owner_group(self):
# testing make_archive with owner and group, with various combinations
# this works even if there's not gid/uid support
if UID_GID_SUPPORT:
group = grp.getgrgid(0)[0]
owner = pwd.getpwuid(0)[0]
else:
group = owner = 'root'
base_dir, root_dir, base_name = self._create_files()
base_name = os.path.join(self.mkdtemp() , 'archive')
res = make_archive(base_name, 'zip', root_dir, base_dir, owner=owner,
group=group)
self.assertTrue(os.path.exists(res))
res = make_archive(base_name, 'zip', root_dir, base_dir)
self.assertTrue(os.path.exists(res))
res = make_archive(base_name, 'tar', root_dir, base_dir,
owner=owner, group=group)
self.assertTrue(os.path.exists(res))
res = make_archive(base_name, 'tar', root_dir, base_dir,
owner='kjhkjhkjg', group='oihohoh')
self.assertTrue(os.path.exists(res))
@unittest.skipUnless(zlib, "Requires zlib")
@unittest.skipUnless(UID_GID_SUPPORT, "Requires grp and pwd support")
def test_tarfile_root_owner(self):
tmpdir, tmpdir2, base_name = self._create_files()
old_dir = os.getcwd()
os.chdir(tmpdir)
group = grp.getgrgid(0)[0]
owner = pwd.getpwuid(0)[0]
try:
archive_name = _make_tarball(base_name, 'dist', compress=None,
owner=owner, group=group)
finally:
os.chdir(old_dir)
# check if the compressed tarball was created
self.assertTrue(os.path.exists(archive_name))
# now checks the rights
archive = tarfile.open(archive_name)
try:
for member in archive.getmembers():
self.assertEqual(member.uid, 0)
self.assertEqual(member.gid, 0)
finally:
archive.close()
def test_make_archive_cwd(self):
current_dir = os.getcwd()
def _breaks(*args, **kw):
raise RuntimeError()
register_archive_format('xxx', _breaks, [], 'xxx file')
try:
try:
make_archive('xxx', 'xxx', root_dir=self.mkdtemp())
except Exception:
pass
self.assertEqual(os.getcwd(), current_dir)
finally:
unregister_archive_format('xxx')
def test_register_archive_format(self):
self.assertRaises(TypeError, register_archive_format, 'xxx', 1)
self.assertRaises(TypeError, register_archive_format, 'xxx', lambda: x,
1)
self.assertRaises(TypeError, register_archive_format, 'xxx', lambda: x,
[(1, 2), (1, 2, 3)])
register_archive_format('xxx', lambda: x, [(1, 2)], 'xxx file')
formats = [name for name, params in get_archive_formats()]
self.assertIn('xxx', formats)
unregister_archive_format('xxx')
formats = [name for name, params in get_archive_formats()]
self.assertNotIn('xxx', formats)
class TestMove(unittest.TestCase):
def setUp(self):
filename = "foo"
self.src_dir = tempfile.mkdtemp()
self.dst_dir = tempfile.mkdtemp()
self.src_file = os.path.join(self.src_dir, filename)
self.dst_file = os.path.join(self.dst_dir, filename)
# Try to create a dir in the current directory, hoping that it is
# not located on the same filesystem as the system tmp dir.
try:
self.dir_other_fs = tempfile.mkdtemp(
dir=os.path.dirname(__file__))
self.file_other_fs = os.path.join(self.dir_other_fs,
filename)
except OSError:
self.dir_other_fs = None
with open(self.src_file, "wb") as f:
f.write("spam")
def tearDown(self):
for d in (self.src_dir, self.dst_dir, self.dir_other_fs):
try:
if d:
shutil.rmtree(d)
except:
pass
def _check_move_file(self, src, dst, real_dst):
with open(src, "rb") as f:
contents = f.read()
shutil.move(src, dst)
with open(real_dst, "rb") as f:
self.assertEqual(contents, f.read())
self.assertFalse(os.path.exists(src))
def _check_move_dir(self, src, dst, real_dst):
contents = sorted(os.listdir(src))
shutil.move(src, dst)
self.assertEqual(contents, sorted(os.listdir(real_dst)))
self.assertFalse(os.path.exists(src))
def test_move_file(self):
# Move a file to another location on the same filesystem.
self._check_move_file(self.src_file, self.dst_file, self.dst_file)
def test_move_file_to_dir(self):
# Move a file inside an existing dir on the same filesystem.
self._check_move_file(self.src_file, self.dst_dir, self.dst_file)
def test_move_file_other_fs(self):
# Move a file to an existing dir on another filesystem.
if not self.dir_other_fs:
# skip
return
self._check_move_file(self.src_file, self.file_other_fs,
self.file_other_fs)
def test_move_file_to_dir_other_fs(self):
# Move a file to another location on another filesystem.
if not self.dir_other_fs:
# skip
return
self._check_move_file(self.src_file, self.dir_other_fs,
self.file_other_fs)
def test_move_dir(self):
# Move a dir to another location on the same filesystem.
dst_dir = tempfile.mktemp()
try:
self._check_move_dir(self.src_dir, dst_dir, dst_dir)
finally:
try:
shutil.rmtree(dst_dir)
except:
pass
def test_move_dir_other_fs(self):
# Move a dir to another location on another filesystem.
if not self.dir_other_fs:
# skip
return
dst_dir = tempfile.mktemp(dir=self.dir_other_fs)
try:
self._check_move_dir(self.src_dir, dst_dir, dst_dir)
finally:
try:
shutil.rmtree(dst_dir)
except:
pass
def test_move_dir_to_dir(self):
# Move a dir inside an existing dir on the same filesystem.
self._check_move_dir(self.src_dir, self.dst_dir,
os.path.join(self.dst_dir, os.path.basename(self.src_dir)))
def test_move_dir_to_dir_other_fs(self):
# Move a dir inside an existing dir on another filesystem.
if not self.dir_other_fs:
# skip
return
self._check_move_dir(self.src_dir, self.dir_other_fs,
os.path.join(self.dir_other_fs, os.path.basename(self.src_dir)))
def test_existing_file_inside_dest_dir(self):
# A file with the same name inside the destination dir already exists.
with open(self.dst_file, "wb"):
pass
self.assertRaises(shutil.Error, shutil.move, self.src_file, self.dst_dir)
def test_dont_move_dir_in_itself(self):
# Moving a dir inside itself raises an Error.
dst = os.path.join(self.src_dir, "bar")
self.assertRaises(shutil.Error, shutil.move, self.src_dir, dst)
def test_destinsrc_false_negative(self):
os.mkdir(TESTFN)
try:
for src, dst in [('srcdir', 'srcdir/dest')]:
src = os.path.join(TESTFN, src)
dst = os.path.join(TESTFN, dst)
self.assertTrue(shutil._destinsrc(src, dst),
msg='_destinsrc() wrongly concluded that '
'dst (%s) is not in src (%s)' % (dst, src))
finally:
shutil.rmtree(TESTFN, ignore_errors=True)
def test_destinsrc_false_positive(self):
os.mkdir(TESTFN)
try:
for src, dst in [('srcdir', 'src/dest'), ('srcdir', 'srcdir.new')]:
src = os.path.join(TESTFN, src)
dst = os.path.join(TESTFN, dst)
self.assertFalse(shutil._destinsrc(src, dst),
msg='_destinsrc() wrongly concluded that '
'dst (%s) is in src (%s)' % (dst, src))
finally:
shutil.rmtree(TESTFN, ignore_errors=True)
class TestCopyFile(unittest.TestCase):
_delete = False
class Faux(object):
_entered = False
_exited_with = None
_raised = False
def __init__(self, raise_in_exit=False, suppress_at_exit=True):
self._raise_in_exit = raise_in_exit
self._suppress_at_exit = suppress_at_exit
def read(self, *args):
return ''
def __enter__(self):
self._entered = True
def __exit__(self, exc_type, exc_val, exc_tb):
self._exited_with = exc_type, exc_val, exc_tb
if self._raise_in_exit:
self._raised = True
raise IOError("Cannot close")
return self._suppress_at_exit
def tearDown(self):
if self._delete:
del shutil.open
def _set_shutil_open(self, func):
shutil.open = func
self._delete = True
def test_w_source_open_fails(self):
def _open(filename, mode='r'):
if filename == 'srcfile':
raise IOError('Cannot open "srcfile"')
assert 0 # shouldn't reach here.
self._set_shutil_open(_open)
self.assertRaises(IOError, shutil.copyfile, 'srcfile', 'destfile')
def test_w_dest_open_fails(self):
srcfile = self.Faux()
def _open(filename, mode='r'):
if filename == 'srcfile':
return srcfile
if filename == 'destfile':
raise IOError('Cannot open "destfile"')
assert 0 # shouldn't reach here.
self._set_shutil_open(_open)
shutil.copyfile('srcfile', 'destfile')
self.assertTrue(srcfile._entered)
self.assertTrue(srcfile._exited_with[0] is IOError)
self.assertEqual(srcfile._exited_with[1].args,
('Cannot open "destfile"',))
def test_w_dest_close_fails(self):
srcfile = self.Faux()
destfile = self.Faux(True)
def _open(filename, mode='r'):
if filename == 'srcfile':
return srcfile
if filename == 'destfile':
return destfile
assert 0 # shouldn't reach here.
self._set_shutil_open(_open)
shutil.copyfile('srcfile', 'destfile')
self.assertTrue(srcfile._entered)
self.assertTrue(destfile._entered)
self.assertTrue(destfile._raised)
self.assertTrue(srcfile._exited_with[0] is IOError)
self.assertEqual(srcfile._exited_with[1].args,
('Cannot close',))
def test_w_source_close_fails(self):
srcfile = self.Faux(True)
destfile = self.Faux()
def _open(filename, mode='r'):
if filename == 'srcfile':
return srcfile
if filename == 'destfile':
return destfile
assert 0 # shouldn't reach here.
self._set_shutil_open(_open)
self.assertRaises(IOError,
shutil.copyfile, 'srcfile', 'destfile')
self.assertTrue(srcfile._entered)
self.assertTrue(destfile._entered)
self.assertFalse(destfile._raised)
self.assertTrue(srcfile._exited_with[0] is None)
self.assertTrue(srcfile._raised)
def test_main():
test_support.run_unittest(TestShutil, TestMove, TestCopyFile)
if __name__ == '__main__':
test_main()
| gpl-2.0 |
sestrella/ansible | lib/ansible/modules/network/check_point/cp_mgmt_application_site_group_facts.py | 20 | 4493 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Ansible module to manage Check Point Firewall (c) 2019
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = """
---
module: cp_mgmt_application_site_group_facts
short_description: Get application-site-group objects facts on Check Point over Web Services API
description:
- Get application-site-group objects facts on Check Point devices.
- All operations are performed over Web Services API.
- This module handles both operations, get a specific object and get several objects,
For getting a specific object use the parameter 'name'.
version_added: "2.9"
author: "Or Soffer (@chkp-orso)"
options:
name:
description:
- Object name.
This parameter is relevant only for getting a specific object.
type: str
details_level:
description:
- The level of detail for some of the fields in the response can vary from showing only the UID value of the object to a fully detailed
representation of the object.
type: str
choices: ['uid', 'standard', 'full']
limit:
description:
- No more than that many results will be returned.
This parameter is relevant only for getting few objects.
type: int
offset:
description:
- Skip that many results before beginning to return them.
This parameter is relevant only for getting few objects.
type: int
order:
description:
- Sorts results by the given field. By default the results are sorted in the ascending order by name.
This parameter is relevant only for getting few objects.
type: list
suboptions:
ASC:
description:
- Sorts results by the given field in ascending order.
type: str
choices: ['name']
DESC:
description:
- Sorts results by the given field in descending order.
type: str
choices: ['name']
dereference_group_members:
description:
- Indicates whether to dereference "members" field by details level for every object in reply.
type: bool
show_membership:
description:
- Indicates whether to calculate and show "groups" field for every object in reply.
type: bool
extends_documentation_fragment: checkpoint_facts
"""
EXAMPLES = """
- name: show-application-site-group
cp_mgmt_application_site_group_facts:
name: New Application Site Group 1
- name: show-application-site-groups
cp_mgmt_application_site_group_facts:
details_level: standard
limit: 50
offset: 0
"""
RETURN = """
ansible_facts:
description: The checkpoint object facts.
returned: always.
type: dict
"""
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.checkpoint.checkpoint import checkpoint_argument_spec_for_facts, api_call_facts
def main():
argument_spec = dict(
name=dict(type='str'),
details_level=dict(type='str', choices=['uid', 'standard', 'full']),
limit=dict(type='int'),
offset=dict(type='int'),
order=dict(type='list', options=dict(
ASC=dict(type='str', choices=['name']),
DESC=dict(type='str', choices=['name'])
)),
dereference_group_members=dict(type='bool'),
show_membership=dict(type='bool')
)
argument_spec.update(checkpoint_argument_spec_for_facts)
module = AnsibleModule(argument_spec=argument_spec)
api_call_object = "application-site-group"
api_call_object_plural_version = "application-site-groups"
result = api_call_facts(module, api_call_object, api_call_object_plural_version)
module.exit_json(ansible_facts=result)
if __name__ == '__main__':
main()
| gpl-3.0 |
QuLogic/specfem3d | CUBIT_GEOCUBIT/geocubitlib/absorbing_boundary.py | 4 | 12965 | #!python
# Retrieving absorbing boundaries.
# P. Galvez (ETH-Zurich, 10.09.2011):
# This function is based on Emmanuele Cassarotti , boundary_definition.py routine.
#
# It returns absorbing_surf,absorbing_surf_xmin,absorbing_surf_xmax,
# absorbing_surf_ymin,absorbing_surf_ymax,absorbing_surf_bottom,topo_surf
# where absorbing_surf is the list of all the absorbing boundary surf
# absorbing_surf_xmin is the list of the absorbing boundary surfaces that correnspond to x=xmin
# ...
# absorbing_surf_bottom is the list of the absorbing boundary surfaces that correspond to z=zmin
class abs_surface:
def __init__(self,xmin,xmax,ymin,ymax):
self.xmin = xmin
self.xmax = xmax
self.ymin = ymin
self.ymax = ymax
class abs_surface_topo:
def __init__(self,xmin,xmax,ymin,ymax,bottom,topo):
self.xmin = xmin
self.xmax = xmax
self.ymin = ymin
self.ymax = ymax
self.bottom = bottom
self.topo = topo
# Emmanuele Cassarotti function for Parallel absorbing boundaries.
# WARNING : absorbing.surf deleted due to CUBIT 13.0 does not allow elements beloging to diferent blocks.
def define_parallel_absorbing_surf():
"""
define the absorbing surfaces for a layered topological box where boundary are surfaces parallel to the axis.
it returns absorbing_surf,absorbing_surf_xmin,absorbing_surf_xmax,absorbing_surf_ymin,absorbing_surf_ymax,absorbing_surf_bottom,topo_surf
where
absorbing_surf is the list of all the absorbing boundary surf
absorbing_surf_xmin is the list of the absorbing boundary surfaces that correnspond to x=xmin
...
absorbing_surf_bottom is the list of the absorbing boundary surfaces that correspond to z=zmin
"""
try:
cubit.cmd('comment')
except:
try:
import cubit
cubit.init([""])
except:
print 'error importing cubit'
import sys
sys.exit()
absorbing_surf_xmin=[]
absorbing_surf_xmax=[]
absorbing_surf_ymin=[]
absorbing_surf_ymax=[]
absorbing_surf_bottom=[]
top_surf=[]
list_vol=cubit.parse_cubit_list("volume","all")
init_n_vol=len(list_vol)
zmax_box=cubit.get_total_bounding_box("volume",list_vol)[7]
zmin_box=cubit.get_total_bounding_box("volume",list_vol)[6] #it is the z_min of the box ... box= xmin,xmax,d,ymin,ymax,d,zmin...
xmin_box=cubit.get_total_bounding_box("volume",list_vol)[0]
xmax_box=cubit.get_total_bounding_box("volume",list_vol)[1]
ymin_box=cubit.get_total_bounding_box("volume",list_vol)[3]
ymax_box=cubit.get_total_bounding_box("volume",list_vol)[4]
list_surf=cubit.parse_cubit_list("surface","all")
print '##boundary box: '
print '## x min: ' + str(xmin_box)
print '## y min: ' + str(ymin_box)
print '## z min: ' + str(zmin_box)
print '## x max: ' + str(xmax_box)
print '## y max: ' + str(ymax_box)
print '## z max: ' + str(zmax_box)
#box lengths
x_len = abs( xmax_box - xmin_box)
y_len = abs( ymax_box - ymin_box)
z_len = abs( zmax_box - zmin_box)
print '##boundary box: '
print '## x length: ' + str(x_len)
print '## y length: ' + str(y_len)
print '## z length: ' + str(z_len)
# tolerance parameters
absorbing_surface_distance_tolerance=0.005
topographic_surface_distance_tolerance=0.001
topographic_surface_normal_tolerance=0.2
for k in list_surf:
center_point = cubit.get_center_point("surface", k)
if abs((center_point[0] - xmin_box)/x_len) <= absorbing_surface_distance_tolerance:
absorbing_surf_xmin.append(k)
elif abs((center_point[0] - xmax_box)/x_len) <= absorbing_surface_distance_tolerance:
absorbing_surf_xmax.append(k)
elif abs((center_point[1] - ymin_box)/y_len) <= absorbing_surface_distance_tolerance:
absorbing_surf_ymin.append(k)
elif abs((center_point[1] - ymax_box)/y_len) <= absorbing_surface_distance_tolerance:
absorbing_surf_ymax.append(k)
elif abs((center_point[2] - zmin_box)/z_len) <= absorbing_surface_distance_tolerance:
print 'center_point[2]' + str(center_point[2])
print 'kz:' + str(k)
absorbing_surf_bottom.append(k)
else:
sbox=cubit.get_bounding_box('surface',k)
dz=abs((sbox[7] - zmax_box)/z_len)
normal=cubit.get_surface_normal(k)
zn=normal[2]
dn=abs(zn-1)
if dz <= topographic_surface_distance_tolerance and dn < topographic_surface_normal_tolerance:
top_surf.append(k)
return absorbing_surf_xmin,absorbing_surf_xmax,absorbing_surf_ymin,absorbing_surf_ymax,absorbing_surf_bottom,top_surf
def define_top_bottom_absorbing_surf(zmin_box,zmax_box):
"""
absorbing_surf_bottom is the list of the absorbing boundary surfaces that correspond to z=zmin
"""
try:
cubit.cmd('comment')
except:
try:
import cubit
cubit.init([""])
except:
print 'error importing cubit'
import sys
sys.exit()
absorbing_surf_bottom=[]
top_surf = []
list_vol=cubit.parse_cubit_list("volume","all")
init_n_vol=len(list_vol)
# TO DO : Make zmin_box work properly.
# zmax_box=cubit.get_total_bounding_box("volume",list_vol)[7]
# zmin_box=cubit.get_total_bounding_box("volume",list_vol)[6] #it is the z_min of the box ... box= xmin,xmax,d,ymin,ymax,d,zmin...
xmin_box=cubit.get_total_bounding_box("volume",list_vol)[0]
xmax_box=cubit.get_total_bounding_box("volume",list_vol)[1]
ymin_box=cubit.get_total_bounding_box("volume",list_vol)[3]
ymax_box=cubit.get_total_bounding_box("volume",list_vol)[4]
list_surf=cubit.parse_cubit_list("surface","all")
print '##boundary box: '
print '## x min: ' + str(xmin_box)
print '## y min: ' + str(ymin_box)
print '## z min: ' + str(zmin_box)
print '## x max: ' + str(xmax_box)
print '## y max: ' + str(ymax_box)
print '## z max: ' + str(zmax_box)
#box lengths
x_len = abs( xmax_box - xmin_box)
y_len = abs( ymax_box - ymin_box)
z_len = abs( zmax_box - zmin_box)
print '##boundary box: '
print '## x length: ' + str(x_len)
print '## y length: ' + str(y_len)
print '## z length: ' + str(z_len)
# tolerance parameters
absorbing_surface_distance_tolerance=0.005
topographic_surface_distance_tolerance=0.001
topographic_surface_normal_tolerance=0.2
for k in list_surf:
center_point = cubit.get_center_point("surface", k)
if abs((center_point[2] - zmin_box)/z_len) <= absorbing_surface_distance_tolerance:
print 'center_point[2]' + str(center_point[2])
print 'kz:' + str(k)
absorbing_surf_bottom.append(k)
else:
sbox=cubit.get_bounding_box('surface',k)
dz=abs((sbox[7] - zmax_box)/z_len)
normal=cubit.get_surface_normal(k)
zn=normal[2]
dn=abs(zn-1)
if dz <= topographic_surface_distance_tolerance and dn < topographic_surface_normal_tolerance:
top_surf.append(k)
return absorbing_surf_bottom,top_surf
def build_block(vol_list,name):
from sets import Set
try:
cubit.cmd('comment')
except:
try:
import cubit
cubit.init([""])
except:
print 'error importing cubit'
import sys
sys.exit()
block_list=cubit.get_block_id_list()
if len(block_list) > 0:
id_block=max(block_list)
else:
id_block=0
for v,n in zip(vol_list,name):
id_block+=1
v_other=Set(vol_list)-Set([v])
command= 'block '+str(id_block)+' hex in vol '+str(v)
command = command.replace("["," ").replace("]"," ")
cubit.cmd(command)
command = "block "+str(id_block)+" name '"+n+"'"
cubit.cmd(command)
def define_block():
"""
Renumbering number of volumes from 1 to NVOLUMES.
"""
try:
cubit.cmd('comment')
except:
try:
import cubit
cubit.init([""])
except:
print 'error importing cubit'
import sys
sys.exit()
list_vol=cubit.parse_cubit_list("volume","all")
init_n_vol=len(list_vol)
list_name=map(lambda x: 'vol'+x,map(str,list_vol))
return list_vol,list_name
def build_block_side(surf_list,name,obj='surface'):
try:
cubit.cmd('comment')
except:
try:
import cubit
cubit.init([""])
except:
print 'error importing cubit'
import sys
sys.exit()
id_nodeset=cubit.get_next_nodeset_id()
id_block=cubit.get_next_block_id()
if obj == 'hex':
txt='hex in node in surface'
txt1='block '+str(id_block)+ ' '+ txt +' '+str(list(surf_list))
txt2="block "+str(id_block)+" name '"+name+"'"
txt1=txt1.replace("["," ").replace("]"," ")
elif obj == 'node':
txt=obj+' in surface'
txt1= 'nodeset '+str(id_nodeset)+ ' '+ txt +' '+str(list(surf_list))
txt1 = txt1.replace("["," ").replace("]"," ")
txt2 = "nodeset "+str(id_nodeset)+" name '"+name+"'"
elif obj == 'face' or obj == 'edge':
txt=obj+' in surface'
txt1= 'block '+str(id_block)+ ' '+ txt +' '+str(list(surf_list))
txt1 = txt1.replace("["," ").replace("]"," ")
txt2 = "block "+str(id_block)+" name '"+name+"'"
else:
txt1=''
# do not execute: block id might be wrong
print "##block "+str(id_block)+" name '"+name+"_notsupported (only hex,face,edge,node)'"
txt2=''
cubit.cmd(txt1)
cubit.cmd(txt2)
def define_bc(entities,zmin,zmax,self):
# Temporal : Variable zmin should be obtained automatically.
xmin = self.xmin
xmax = self.xmax
ymin = self.ymin
ymax = self.ymax
bottom,topo=define_top_bottom_absorbing_surf(zmin,zmax)
v_list,name_list=define_block()
build_block(v_list,name_list)
print entities
for entity in entities:
print "##entity: "+str(entity)
build_block_side(xmin,entity+'_abs_xmin',obj=entity)
build_block_side(xmax,entity+'_abs_xmax',obj=entity)
build_block_side(ymin,entity+'_abs_ymin',obj=entity)
build_block_side(ymax,entity+'_abs_ymax',obj=entity)
build_block_side(bottom,entity+'_abs_bottom',obj=entity)
build_block_side(topo,entity+'_topo',obj=entity)
def define_parallel_bc(entities):
xmax = []
ymin = []
ymax = []
zmin = []
zmax = []
#Extracting parallel surfaces.
xmin,xmax,ymin,ymax,bottom,topo=define_parallel_absorbing_surf()
v_list,name_list=define_block()
build_block(v_list,name_list)
print entities
for entity in entities:
print "##entity: "+str(entity)
build_block_side(xmin,entity+'_abs_xmin',obj=entity)
build_block_side(xmax,entity+'_abs_xmax',obj=entity)
build_block_side(ymin,entity+'_abs_ymin',obj=entity)
build_block_side(ymax,entity+'_abs_ymax',obj=entity)
build_block_side(bottom,entity+'_abs_bottom',obj=entity)
build_block_side(topo,entity+'_topo',obj=entity)
def define_boundaries(entities,xmin,xmax,ymin,ymax,zmin,zmax):
bottom=zmin
topo=zmax
v_list,name_list=define_block()
build_block(v_list,name_list)
print entities
for entity in entities:
print "##entity: "+str(entity)
build_block_side(xmin,entity+'_abs_xmin',obj=entity)
build_block_side(xmax,entity+'_abs_xmax',obj=entity)
build_block_side(ymin,entity+'_abs_ymin',obj=entity)
build_block_side(ymax,entity+'_abs_ymax',obj=entity)
build_block_side(bottom,entity+'_abs_bottom',obj=entity)
build_block_side(topo,entity+'_topo',obj=entity)
def define_bc_topo(entities,self):
# Temporal : Variable zmin should be obtained automatically.
xmin = self.xmin
xmax = self.xmax
ymin = self.ymin
ymax = self.ymax
bottom = self.bottom
topo = self.topo
v_list,name_list=define_block()
build_block(v_list,name_list)
print entities
for entity in entities:
print "##entity: "+str(entity)
build_block_side(xmin,entity+'_abs_xmin',obj=entity)
build_block_side(xmax,entity+'_abs_xmax',obj=entity)
build_block_side(ymin,entity+'_abs_ymin',obj=entity)
build_block_side(ymax,entity+'_abs_ymax',obj=entity)
build_block_side(bottom,entity+'_abs_bottom',obj=entity)
build_block_side(topo,entity+'_topo',obj=entity)
| gpl-2.0 |
azureplus/chromium_depot_tools | third_party/pylint/gui.py | 56 | 19674 | # Copyright (c) 2003-2013 LOGILAB S.A. (Paris, FRANCE).
# http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; either version 2 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""Tkinker gui for pylint"""
from __future__ import print_function
import os
import sys
import re
from threading import Thread
import six
from six.moves.tkinter import (
Tk, Frame, Listbox, Entry, Label, Button, Scrollbar,
Checkbutton, Radiobutton, IntVar, StringVar, PanedWindow,
TOP, LEFT, RIGHT, BOTTOM, END, X, Y, BOTH, SUNKEN, W,
HORIZONTAL, DISABLED, NORMAL, W,
)
from six.moves.tkinter_tkfiledialog import (
askopenfilename, askdirectory,
)
import pylint.lint
from pylint.reporters.guireporter import GUIReporter
HOME = os.path.expanduser('~/')
HISTORY = '.pylint-gui-history'
COLORS = {'(I)':'lightblue',
'(C)':'blue', '(R)':'darkblue',
'(W)':'black', '(E)':'darkred',
'(F)':'red'}
def convert_to_string(msg):
"""make a string representation of a message"""
module_object = msg.module
if msg.obj:
module_object += ".%s" % msg.obj
return "(%s) %s [%d]: %s" % (msg.C, module_object, msg.line, msg.msg)
class BasicStream(object):
'''
used in gui reporter instead of writing to stdout, it is written to
this stream and saved in contents
'''
def __init__(self, gui):
"""init"""
self.curline = ""
self.gui = gui
self.contents = []
self.outdict = {}
self.currout = None
self.next_title = None
def write(self, text):
"""write text to the stream"""
if re.match('^--+$', text.strip()) or re.match('^==+$', text.strip()):
if self.currout:
self.outdict[self.currout].remove(self.next_title)
self.outdict[self.currout].pop()
self.currout = self.next_title
self.outdict[self.currout] = ['']
if text.strip():
self.next_title = text.strip()
if text.startswith(os.linesep):
self.contents.append('')
if self.currout:
self.outdict[self.currout].append('')
self.contents[-1] += text.strip(os.linesep)
if self.currout:
self.outdict[self.currout][-1] += text.strip(os.linesep)
if text.endswith(os.linesep) and text.strip():
self.contents.append('')
if self.currout:
self.outdict[self.currout].append('')
def fix_contents(self):
"""finalize what the contents of the dict should look like before output"""
for item in self.outdict:
num_empty = self.outdict[item].count('')
for _ in range(num_empty):
self.outdict[item].remove('')
if self.outdict[item]:
self.outdict[item].pop(0)
def output_contents(self):
"""output contents of dict to the gui, and set the rating"""
self.fix_contents()
self.gui.tabs = self.outdict
try:
self.gui.rating.set(self.outdict['Global evaluation'][0])
except KeyError:
self.gui.rating.set('Error')
self.gui.refresh_results_window()
#reset stream variables for next run
self.contents = []
self.outdict = {}
self.currout = None
self.next_title = None
class LintGui(object):
"""Build and control a window to interact with pylint"""
def __init__(self, root=None):
"""init"""
self.root = root or Tk()
self.root.title('Pylint')
#reporter
self.reporter = None
#message queue for output from reporter
self.msg_queue = six.moves.queue.Queue()
self.msgs = []
self.visible_msgs = []
self.filenames = []
self.rating = StringVar()
self.tabs = {}
self.report_stream = BasicStream(self)
#gui objects
self.lb_messages = None
self.showhistory = None
self.results = None
self.btnRun = None
self.information_box = None
self.convention_box = None
self.refactor_box = None
self.warning_box = None
self.error_box = None
self.fatal_box = None
self.txtModule = None
self.status = None
self.msg_type_dict = None
self.init_gui()
def init_gui(self):
"""init helper"""
window = PanedWindow(self.root, orient="vertical")
window.pack(side=TOP, fill=BOTH, expand=True)
top_pane = Frame(window)
window.add(top_pane)
mid_pane = Frame(window)
window.add(mid_pane)
bottom_pane = Frame(window)
window.add(bottom_pane)
#setting up frames
top_frame = Frame(top_pane)
mid_frame = Frame(top_pane)
history_frame = Frame(top_pane)
radio_frame = Frame(mid_pane)
rating_frame = Frame(mid_pane)
res_frame = Frame(mid_pane)
check_frame = Frame(bottom_pane)
msg_frame = Frame(bottom_pane)
btn_frame = Frame(bottom_pane)
top_frame.pack(side=TOP, fill=X)
mid_frame.pack(side=TOP, fill=X)
history_frame.pack(side=TOP, fill=BOTH, expand=True)
radio_frame.pack(side=TOP, fill=X)
rating_frame.pack(side=TOP, fill=X)
res_frame.pack(side=TOP, fill=BOTH, expand=True)
check_frame.pack(side=TOP, fill=X)
msg_frame.pack(side=TOP, fill=BOTH, expand=True)
btn_frame.pack(side=TOP, fill=X)
# Binding F5 application-wide to run lint
self.root.bind('<F5>', self.run_lint)
#Message ListBox
rightscrollbar = Scrollbar(msg_frame)
rightscrollbar.pack(side=RIGHT, fill=Y)
bottomscrollbar = Scrollbar(msg_frame, orient=HORIZONTAL)
bottomscrollbar.pack(side=BOTTOM, fill=X)
self.lb_messages = Listbox(
msg_frame,
yscrollcommand=rightscrollbar.set,
xscrollcommand=bottomscrollbar.set,
bg="white")
self.lb_messages.bind("<Double-Button-1>", self.show_sourcefile)
self.lb_messages.pack(expand=True, fill=BOTH)
rightscrollbar.config(command=self.lb_messages.yview)
bottomscrollbar.config(command=self.lb_messages.xview)
#History ListBoxes
rightscrollbar2 = Scrollbar(history_frame)
rightscrollbar2.pack(side=RIGHT, fill=Y)
bottomscrollbar2 = Scrollbar(history_frame, orient=HORIZONTAL)
bottomscrollbar2.pack(side=BOTTOM, fill=X)
self.showhistory = Listbox(
history_frame,
yscrollcommand=rightscrollbar2.set,
xscrollcommand=bottomscrollbar2.set,
bg="white")
self.showhistory.pack(expand=True, fill=BOTH)
rightscrollbar2.config(command=self.showhistory.yview)
bottomscrollbar2.config(command=self.showhistory.xview)
self.showhistory.bind('<Double-Button-1>', self.select_recent_file)
self.set_history_window()
#status bar
self.status = Label(self.root, text="", bd=1, relief=SUNKEN, anchor=W)
self.status.pack(side=BOTTOM, fill=X)
#labelbl_ratingls
lbl_rating_label = Label(rating_frame, text='Rating:')
lbl_rating_label.pack(side=LEFT)
lbl_rating = Label(rating_frame, textvariable=self.rating)
lbl_rating.pack(side=LEFT)
Label(mid_frame, text='Recently Used:').pack(side=LEFT)
Label(top_frame, text='Module or package').pack(side=LEFT)
#file textbox
self.txt_module = Entry(top_frame, background='white')
self.txt_module.bind('<Return>', self.run_lint)
self.txt_module.pack(side=LEFT, expand=True, fill=X)
#results box
rightscrollbar = Scrollbar(res_frame)
rightscrollbar.pack(side=RIGHT, fill=Y)
bottomscrollbar = Scrollbar(res_frame, orient=HORIZONTAL)
bottomscrollbar.pack(side=BOTTOM, fill=X)
self.results = Listbox(
res_frame,
yscrollcommand=rightscrollbar.set,
xscrollcommand=bottomscrollbar.set,
bg="white", font="Courier")
self.results.pack(expand=True, fill=BOTH, side=BOTTOM)
rightscrollbar.config(command=self.results.yview)
bottomscrollbar.config(command=self.results.xview)
#buttons
Button(top_frame, text='Open', command=self.file_open).pack(side=LEFT)
Button(top_frame, text='Open Package',
command=(lambda: self.file_open(package=True))).pack(side=LEFT)
self.btnRun = Button(top_frame, text='Run', command=self.run_lint)
self.btnRun.pack(side=LEFT)
Button(btn_frame, text='Quit', command=self.quit).pack(side=BOTTOM)
#radio buttons
self.information_box = IntVar()
self.convention_box = IntVar()
self.refactor_box = IntVar()
self.warning_box = IntVar()
self.error_box = IntVar()
self.fatal_box = IntVar()
i = Checkbutton(check_frame, text="Information", fg=COLORS['(I)'],
variable=self.information_box, command=self.refresh_msg_window)
c = Checkbutton(check_frame, text="Convention", fg=COLORS['(C)'],
variable=self.convention_box, command=self.refresh_msg_window)
r = Checkbutton(check_frame, text="Refactor", fg=COLORS['(R)'],
variable=self.refactor_box, command=self.refresh_msg_window)
w = Checkbutton(check_frame, text="Warning", fg=COLORS['(W)'],
variable=self.warning_box, command=self.refresh_msg_window)
e = Checkbutton(check_frame, text="Error", fg=COLORS['(E)'],
variable=self.error_box, command=self.refresh_msg_window)
f = Checkbutton(check_frame, text="Fatal", fg=COLORS['(F)'],
variable=self.fatal_box, command=self.refresh_msg_window)
i.select()
c.select()
r.select()
w.select()
e.select()
f.select()
i.pack(side=LEFT)
c.pack(side=LEFT)
r.pack(side=LEFT)
w.pack(side=LEFT)
e.pack(side=LEFT)
f.pack(side=LEFT)
#check boxes
self.box = StringVar()
# XXX should be generated
report = Radiobutton(
radio_frame, text="Report", variable=self.box,
value="Report", command=self.refresh_results_window)
raw_met = Radiobutton(
radio_frame, text="Raw metrics", variable=self.box,
value="Raw metrics", command=self.refresh_results_window)
dup = Radiobutton(
radio_frame, text="Duplication", variable=self.box,
value="Duplication", command=self.refresh_results_window)
ext = Radiobutton(
radio_frame, text="External dependencies",
variable=self.box, value="External dependencies",
command=self.refresh_results_window)
stat = Radiobutton(
radio_frame, text="Statistics by type",
variable=self.box, value="Statistics by type",
command=self.refresh_results_window)
msg_cat = Radiobutton(
radio_frame, text="Messages by category",
variable=self.box, value="Messages by category",
command=self.refresh_results_window)
msg = Radiobutton(
radio_frame, text="Messages", variable=self.box,
value="Messages", command=self.refresh_results_window)
source_file = Radiobutton(
radio_frame, text="Source File", variable=self.box,
value="Source File", command=self.refresh_results_window)
report.select()
report.grid(column=0, row=0, sticky=W)
raw_met.grid(column=1, row=0, sticky=W)
dup.grid(column=2, row=0, sticky=W)
msg.grid(column=3, row=0, sticky=W)
stat.grid(column=0, row=1, sticky=W)
msg_cat.grid(column=1, row=1, sticky=W)
ext.grid(column=2, row=1, sticky=W)
source_file.grid(column=3, row=1, sticky=W)
#dictionary for check boxes and associated error term
self.msg_type_dict = {
'I': lambda: self.information_box.get() == 1,
'C': lambda: self.convention_box.get() == 1,
'R': lambda: self.refactor_box.get() == 1,
'E': lambda: self.error_box.get() == 1,
'W': lambda: self.warning_box.get() == 1,
'F': lambda: self.fatal_box.get() == 1
}
self.txt_module.focus_set()
def select_recent_file(self, event): # pylint: disable=unused-argument
"""adds the selected file in the history listbox to the Module box"""
if not self.showhistory.size():
return
selected = self.showhistory.curselection()
item = self.showhistory.get(selected)
#update module
self.txt_module.delete(0, END)
self.txt_module.insert(0, item)
def refresh_msg_window(self):
"""refresh the message window with current output"""
#clear the window
self.lb_messages.delete(0, END)
self.visible_msgs = []
for msg in self.msgs:
if self.msg_type_dict.get(msg.C)():
self.visible_msgs.append(msg)
msg_str = convert_to_string(msg)
self.lb_messages.insert(END, msg_str)
fg_color = COLORS.get(msg_str[:3], 'black')
self.lb_messages.itemconfigure(END, fg=fg_color)
def refresh_results_window(self):
"""refresh the results window with current output"""
#clear the window
self.results.delete(0, END)
try:
for res in self.tabs[self.box.get()]:
self.results.insert(END, res)
except KeyError:
pass
def process_incoming(self):
"""process the incoming messages from running pylint"""
while self.msg_queue.qsize():
try:
msg = self.msg_queue.get(0)
if msg == "DONE":
self.report_stream.output_contents()
return False
#adding message to list of msgs
self.msgs.append(msg)
#displaying msg if message type is selected in check box
if self.msg_type_dict.get(msg.C)():
self.visible_msgs.append(msg)
msg_str = convert_to_string(msg)
self.lb_messages.insert(END, msg_str)
fg_color = COLORS.get(msg_str[:3], 'black')
self.lb_messages.itemconfigure(END, fg=fg_color)
except six.moves.queue.Empty:
pass
return True
def periodic_call(self):
"""determine when to unlock the run button"""
if self.process_incoming():
self.root.after(100, self.periodic_call)
else:
#enabling button so it can be run again
self.btnRun.config(state=NORMAL)
def mainloop(self):
"""launch the mainloop of the application"""
self.root.mainloop()
def quit(self, _=None):
"""quit the application"""
self.root.quit()
def halt(self): # pylint: disable=no-self-use
"""program halt placeholder"""
return
def file_open(self, package=False, _=None):
"""launch a file browser"""
if not package:
filename = askopenfilename(parent=self.root,
filetypes=[('pythonfiles', '*.py'),
('allfiles', '*')],
title='Select Module')
else:
filename = askdirectory(title="Select A Folder", mustexist=1)
if filename == ():
return
self.txt_module.delete(0, END)
self.txt_module.insert(0, filename)
def update_filenames(self):
"""update the list of recent filenames"""
filename = self.txt_module.get()
if not filename:
filename = os.getcwd()
if filename+'\n' in self.filenames:
index = self.filenames.index(filename+'\n')
self.filenames.pop(index)
#ensure only 10 most recent are stored
if len(self.filenames) == 10:
self.filenames.pop()
self.filenames.insert(0, filename+'\n')
def set_history_window(self):
"""update the history window with info from the history file"""
#clear the window
self.showhistory.delete(0, END)
# keep the last 10 most recent files
try:
view_history = open(HOME+HISTORY, 'r')
for hist in view_history.readlines():
if not hist in self.filenames:
self.filenames.append(hist)
self.showhistory.insert(END, hist.split('\n')[0])
view_history.close()
except IOError:
# do nothing since history file will be created later
return
def run_lint(self, _=None):
"""launches pylint"""
self.update_filenames()
self.root.configure(cursor='watch')
self.reporter = GUIReporter(self, output=self.report_stream)
module = self.txt_module.get()
if not module:
module = os.getcwd()
#cleaning up msgs and windows
self.msgs = []
self.visible_msgs = []
self.lb_messages.delete(0, END)
self.tabs = {}
self.results.delete(0, END)
self.btnRun.config(state=DISABLED)
#setting up a worker thread to run pylint
worker = Thread(target=lint_thread, args=(module, self.reporter, self,))
self.periodic_call()
worker.start()
# Overwrite the .pylint-gui-history file with all the new recently added files
# in order from filenames but only save last 10 files
write_history = open(HOME+HISTORY, 'w')
write_history.writelines(self.filenames)
write_history.close()
self.set_history_window()
self.root.configure(cursor='')
def show_sourcefile(self, event=None): # pylint: disable=unused-argument
selected = self.lb_messages.curselection()
if not selected:
return
msg = self.visible_msgs[int(selected[0])]
scroll = msg.line - 3
if scroll < 0:
scroll = 0
self.tabs["Source File"] = open(msg.path, "r").readlines()
self.box.set("Source File")
self.refresh_results_window()
self.results.yview(scroll)
self.results.select_set(msg.line - 1)
def lint_thread(module, reporter, gui):
"""thread for pylint"""
gui.status.text = "processing module(s)"
pylint.lint.Run(args=[module], reporter=reporter, exit=False)
gui.msg_queue.put("DONE")
def Run(args):
"""launch pylint gui from args"""
if args:
print('USAGE: pylint-gui\n launch a simple pylint gui using Tk')
sys.exit(1)
gui = LintGui()
gui.mainloop()
sys.exit(0)
if __name__ == '__main__':
Run(sys.argv[1:])
| bsd-3-clause |
camilortte/RecomendadorUD | apps/externals/recommends/similarities.py | 4 | 1351 | from math import sqrt
@staticmethod
def sim_distance(p1, p2):
"""Returns a distance-based similarity score for p1 and p2"""
# Get the list of shared_items
si = [item for item in p1 if item in p2]
if len(si) != 0:
squares = [pow(p1[item] - p2[item], 2) for item in si]
# Add up the squares of all the differences
sum_of_squares = sum(squares)
return 1 / (1 + sqrt(sum_of_squares))
return 0
@staticmethod
def sim_pearson(p1, p2):
"""
Returns the Pearson correlation coefficient for p1 and p2
"""
# Get the list of mutually rated items
si = [item for item in p1 if item in p2]
# Find the number of elements
n = len(si)
# if they have no ratings in common, return 0
if n != 0:
# Add up all the preferences
sum1 = sum([p1[it] for it in si])
sum2 = sum([p2[it] for it in si])
# Sum up the squares
sum1Sq = sum([pow(p1[it], 2) for it in si])
sum2Sq = sum([pow(p2[it], 2) for it in si])
# Sum up the products
pSum = sum([p1[it] * p2[it] for it in si])
# Calculate Pearson score
num = pSum - (sum1 * sum2 / n)
den = sqrt((sum1Sq - pow(sum1, 2) / n) * (sum2Sq - pow(sum2, 2) / n))
if den == 0:
return 0
r = num / den
return r
return 0
| mit |
kevinmel2000/sl4a | python/src/Lib/test/test_undocumented_details.py | 56 | 1137 | from test.test_support import run_unittest, have_unicode
import unittest
import sys
class TestImplementationComparisons(unittest.TestCase):
def test_type_comparisons(self):
self.assertTrue(str < int or str > int)
self.assertTrue(int <= str or int >= str)
self.assertTrue(cmp(int, str) != 0)
self.assertTrue(int is int)
self.assertTrue(str == str)
self.assertTrue(int != str)
def test_cell_comparisons(self):
def f(x):
if x:
y = 1
def g():
return x
def h():
return y
return g, h
g, h = f(0)
g_cell, = g.func_closure
h_cell, = h.func_closure
self.assertTrue(h_cell < g_cell)
self.assertTrue(g_cell >= h_cell)
self.assertEqual(cmp(g_cell, h_cell), 1)
self.assertTrue(g_cell is g_cell)
self.assertTrue(g_cell == g_cell)
self.assertTrue(h_cell == h_cell)
self.assertTrue(g_cell != h_cell)
def test_main():
run_unittest(TestImplementationComparisons)
if __name__ == '__main__':
test_main()
| apache-2.0 |
damienmg/bazel | combine_distfiles.py | 18 | 1682 | # pylint: disable=g-bad-file-header
# pylint: disable=g-direct-third-party-import
#
# Copyright 2017 The Bazel Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http:#www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Creates the Bazel source distribution archive."""
import contextlib
import os.path
import sys
import zipfile
from src.create_embedded_tools_lib import copy_tar_to_zip
from src.create_embedded_tools_lib import copy_zip_to_zip
def main():
output_zip = os.path.join(os.getcwd(), sys.argv[1])
input_files = sorted(sys.argv[2:])
# Copy all the input_files into output_zip.
# Adding contextlib.closing to be python 2.6 (for centos 6.7) compatible
with contextlib.closing(
zipfile.ZipFile(output_zip, "w", zipfile.ZIP_DEFLATED)) as output_zip:
def _normalize(path):
return path[2:] if path.startswith("./") else path
for input_file in input_files:
if input_file.endswith(".tar"):
copy_tar_to_zip(output_zip, input_file, _normalize)
elif input_file.endswith(".zip"):
copy_zip_to_zip(output_zip, input_file, _normalize)
else:
raise Exception("unknown archive type \"%s\"" % input_file)
if __name__ == "__main__":
main()
| apache-2.0 |
menardorama/ReadyNAS-Add-ons | headphones-1.0.0/debian/headphones/apps/headphones/lib/unidecode/x0af.py | 253 | 5012 | data = (
'ggyeols', # 0x00
'ggyeolt', # 0x01
'ggyeolp', # 0x02
'ggyeolh', # 0x03
'ggyeom', # 0x04
'ggyeob', # 0x05
'ggyeobs', # 0x06
'ggyeos', # 0x07
'ggyeoss', # 0x08
'ggyeong', # 0x09
'ggyeoj', # 0x0a
'ggyeoc', # 0x0b
'ggyeok', # 0x0c
'ggyeot', # 0x0d
'ggyeop', # 0x0e
'ggyeoh', # 0x0f
'ggye', # 0x10
'ggyeg', # 0x11
'ggyegg', # 0x12
'ggyegs', # 0x13
'ggyen', # 0x14
'ggyenj', # 0x15
'ggyenh', # 0x16
'ggyed', # 0x17
'ggyel', # 0x18
'ggyelg', # 0x19
'ggyelm', # 0x1a
'ggyelb', # 0x1b
'ggyels', # 0x1c
'ggyelt', # 0x1d
'ggyelp', # 0x1e
'ggyelh', # 0x1f
'ggyem', # 0x20
'ggyeb', # 0x21
'ggyebs', # 0x22
'ggyes', # 0x23
'ggyess', # 0x24
'ggyeng', # 0x25
'ggyej', # 0x26
'ggyec', # 0x27
'ggyek', # 0x28
'ggyet', # 0x29
'ggyep', # 0x2a
'ggyeh', # 0x2b
'ggo', # 0x2c
'ggog', # 0x2d
'ggogg', # 0x2e
'ggogs', # 0x2f
'ggon', # 0x30
'ggonj', # 0x31
'ggonh', # 0x32
'ggod', # 0x33
'ggol', # 0x34
'ggolg', # 0x35
'ggolm', # 0x36
'ggolb', # 0x37
'ggols', # 0x38
'ggolt', # 0x39
'ggolp', # 0x3a
'ggolh', # 0x3b
'ggom', # 0x3c
'ggob', # 0x3d
'ggobs', # 0x3e
'ggos', # 0x3f
'ggoss', # 0x40
'ggong', # 0x41
'ggoj', # 0x42
'ggoc', # 0x43
'ggok', # 0x44
'ggot', # 0x45
'ggop', # 0x46
'ggoh', # 0x47
'ggwa', # 0x48
'ggwag', # 0x49
'ggwagg', # 0x4a
'ggwags', # 0x4b
'ggwan', # 0x4c
'ggwanj', # 0x4d
'ggwanh', # 0x4e
'ggwad', # 0x4f
'ggwal', # 0x50
'ggwalg', # 0x51
'ggwalm', # 0x52
'ggwalb', # 0x53
'ggwals', # 0x54
'ggwalt', # 0x55
'ggwalp', # 0x56
'ggwalh', # 0x57
'ggwam', # 0x58
'ggwab', # 0x59
'ggwabs', # 0x5a
'ggwas', # 0x5b
'ggwass', # 0x5c
'ggwang', # 0x5d
'ggwaj', # 0x5e
'ggwac', # 0x5f
'ggwak', # 0x60
'ggwat', # 0x61
'ggwap', # 0x62
'ggwah', # 0x63
'ggwae', # 0x64
'ggwaeg', # 0x65
'ggwaegg', # 0x66
'ggwaegs', # 0x67
'ggwaen', # 0x68
'ggwaenj', # 0x69
'ggwaenh', # 0x6a
'ggwaed', # 0x6b
'ggwael', # 0x6c
'ggwaelg', # 0x6d
'ggwaelm', # 0x6e
'ggwaelb', # 0x6f
'ggwaels', # 0x70
'ggwaelt', # 0x71
'ggwaelp', # 0x72
'ggwaelh', # 0x73
'ggwaem', # 0x74
'ggwaeb', # 0x75
'ggwaebs', # 0x76
'ggwaes', # 0x77
'ggwaess', # 0x78
'ggwaeng', # 0x79
'ggwaej', # 0x7a
'ggwaec', # 0x7b
'ggwaek', # 0x7c
'ggwaet', # 0x7d
'ggwaep', # 0x7e
'ggwaeh', # 0x7f
'ggoe', # 0x80
'ggoeg', # 0x81
'ggoegg', # 0x82
'ggoegs', # 0x83
'ggoen', # 0x84
'ggoenj', # 0x85
'ggoenh', # 0x86
'ggoed', # 0x87
'ggoel', # 0x88
'ggoelg', # 0x89
'ggoelm', # 0x8a
'ggoelb', # 0x8b
'ggoels', # 0x8c
'ggoelt', # 0x8d
'ggoelp', # 0x8e
'ggoelh', # 0x8f
'ggoem', # 0x90
'ggoeb', # 0x91
'ggoebs', # 0x92
'ggoes', # 0x93
'ggoess', # 0x94
'ggoeng', # 0x95
'ggoej', # 0x96
'ggoec', # 0x97
'ggoek', # 0x98
'ggoet', # 0x99
'ggoep', # 0x9a
'ggoeh', # 0x9b
'ggyo', # 0x9c
'ggyog', # 0x9d
'ggyogg', # 0x9e
'ggyogs', # 0x9f
'ggyon', # 0xa0
'ggyonj', # 0xa1
'ggyonh', # 0xa2
'ggyod', # 0xa3
'ggyol', # 0xa4
'ggyolg', # 0xa5
'ggyolm', # 0xa6
'ggyolb', # 0xa7
'ggyols', # 0xa8
'ggyolt', # 0xa9
'ggyolp', # 0xaa
'ggyolh', # 0xab
'ggyom', # 0xac
'ggyob', # 0xad
'ggyobs', # 0xae
'ggyos', # 0xaf
'ggyoss', # 0xb0
'ggyong', # 0xb1
'ggyoj', # 0xb2
'ggyoc', # 0xb3
'ggyok', # 0xb4
'ggyot', # 0xb5
'ggyop', # 0xb6
'ggyoh', # 0xb7
'ggu', # 0xb8
'ggug', # 0xb9
'ggugg', # 0xba
'ggugs', # 0xbb
'ggun', # 0xbc
'ggunj', # 0xbd
'ggunh', # 0xbe
'ggud', # 0xbf
'ggul', # 0xc0
'ggulg', # 0xc1
'ggulm', # 0xc2
'ggulb', # 0xc3
'gguls', # 0xc4
'ggult', # 0xc5
'ggulp', # 0xc6
'ggulh', # 0xc7
'ggum', # 0xc8
'ggub', # 0xc9
'ggubs', # 0xca
'ggus', # 0xcb
'gguss', # 0xcc
'ggung', # 0xcd
'gguj', # 0xce
'gguc', # 0xcf
'gguk', # 0xd0
'ggut', # 0xd1
'ggup', # 0xd2
'gguh', # 0xd3
'ggweo', # 0xd4
'ggweog', # 0xd5
'ggweogg', # 0xd6
'ggweogs', # 0xd7
'ggweon', # 0xd8
'ggweonj', # 0xd9
'ggweonh', # 0xda
'ggweod', # 0xdb
'ggweol', # 0xdc
'ggweolg', # 0xdd
'ggweolm', # 0xde
'ggweolb', # 0xdf
'ggweols', # 0xe0
'ggweolt', # 0xe1
'ggweolp', # 0xe2
'ggweolh', # 0xe3
'ggweom', # 0xe4
'ggweob', # 0xe5
'ggweobs', # 0xe6
'ggweos', # 0xe7
'ggweoss', # 0xe8
'ggweong', # 0xe9
'ggweoj', # 0xea
'ggweoc', # 0xeb
'ggweok', # 0xec
'ggweot', # 0xed
'ggweop', # 0xee
'ggweoh', # 0xef
'ggwe', # 0xf0
'ggweg', # 0xf1
'ggwegg', # 0xf2
'ggwegs', # 0xf3
'ggwen', # 0xf4
'ggwenj', # 0xf5
'ggwenh', # 0xf6
'ggwed', # 0xf7
'ggwel', # 0xf8
'ggwelg', # 0xf9
'ggwelm', # 0xfa
'ggwelb', # 0xfb
'ggwels', # 0xfc
'ggwelt', # 0xfd
'ggwelp', # 0xfe
'ggwelh', # 0xff
)
| gpl-2.0 |
ecrespo/django_kanban-agile | kanban/lib/python2.7/site-packages/django/template/utils.py | 41 | 4662 | import os
import warnings
from collections import Counter, OrderedDict
from django.apps import apps
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.utils import lru_cache
from django.utils._os import upath
from django.utils.deprecation import RemovedInDjango20Warning
from django.utils.functional import cached_property
from django.utils.module_loading import import_string
class InvalidTemplateEngineError(ImproperlyConfigured):
pass
class EngineHandler(object):
def __init__(self, templates=None):
"""
templates is an optional list of template engine definitions
(structured like settings.TEMPLATES).
"""
self._templates = templates
self._engines = {}
@cached_property
def templates(self):
if self._templates is None:
self._templates = settings.TEMPLATES
if not self._templates:
warnings.warn(
"You haven't defined a TEMPLATES setting. You must do so "
"before upgrading to Django 2.0. Otherwise Django will be "
"unable to load templates.", RemovedInDjango20Warning)
self._templates = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': settings.TEMPLATE_DIRS,
'OPTIONS': {
'allowed_include_roots': settings.ALLOWED_INCLUDE_ROOTS,
'context_processors': settings.TEMPLATE_CONTEXT_PROCESSORS,
'debug': settings.TEMPLATE_DEBUG,
'loaders': settings.TEMPLATE_LOADERS,
'string_if_invalid': settings.TEMPLATE_STRING_IF_INVALID,
},
},
]
templates = OrderedDict()
for tpl in self._templates:
tpl = tpl.copy()
try:
# This will raise an exception if 'BACKEND' doesn't exist or
# isn't a string containing at least one dot.
default_name = tpl['BACKEND'].rsplit('.', 2)[-2]
except Exception:
invalid_backend = tpl.get('BACKEND', '<not defined>')
raise ImproperlyConfigured(
"Invalid BACKEND for a template engine: {}. Check "
"your TEMPLATES setting.".format(invalid_backend))
tpl.setdefault('NAME', default_name)
tpl.setdefault('DIRS', [])
tpl.setdefault('APP_DIRS', False)
tpl.setdefault('OPTIONS', {})
templates[tpl['NAME']] = tpl
counts = Counter(list(templates))
duplicates = [alias for alias, count in counts.most_common() if count > 1]
if duplicates:
raise ImproperlyConfigured(
"Template engine aliases aren't unique, duplicates: {}. "
"Set a unique NAME for each engine in settings.TEMPLATES."
.format(", ".join(duplicates)))
return templates
def __getitem__(self, alias):
try:
return self._engines[alias]
except KeyError:
try:
params = self.templates[alias]
except KeyError:
raise InvalidTemplateEngineError(
"Could not find config for '{}' "
"in settings.TEMPLATES".format(alias))
# If importing or initializing the backend raises an exception,
# self._engines[alias] isn't set and this code may get executed
# again, so we must preserve the original params. See #24265.
params = params.copy()
backend = params.pop('BACKEND')
engine_cls = import_string(backend)
engine = engine_cls(params)
self._engines[alias] = engine
return engine
def __iter__(self):
return iter(self.templates)
def all(self):
return [self[alias] for alias in self]
@lru_cache.lru_cache()
def get_app_template_dirs(dirname):
"""
Return an iterable of paths of directories to load app templates from.
dirname is the name of the subdirectory containing templates inside
installed applications.
"""
template_dirs = []
for app_config in apps.get_app_configs():
if not app_config.path:
continue
template_dir = os.path.join(app_config.path, dirname)
if os.path.isdir(template_dir):
template_dirs.append(upath(template_dir))
# Immutable return value because it will be cached and shared by callers.
return tuple(template_dirs)
| mit |
IMCG/lmctfy | gmock/gtest/test/gtest_help_test.py | 2968 | 5856 | #!/usr/bin/env python
#
# Copyright 2009, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Tests the --help flag of Google C++ Testing Framework.
SYNOPSIS
gtest_help_test.py --build_dir=BUILD/DIR
# where BUILD/DIR contains the built gtest_help_test_ file.
gtest_help_test.py
"""
__author__ = 'wan@google.com (Zhanyong Wan)'
import os
import re
import gtest_test_utils
IS_LINUX = os.name == 'posix' and os.uname()[0] == 'Linux'
IS_WINDOWS = os.name == 'nt'
PROGRAM_PATH = gtest_test_utils.GetTestExecutablePath('gtest_help_test_')
FLAG_PREFIX = '--gtest_'
DEATH_TEST_STYLE_FLAG = FLAG_PREFIX + 'death_test_style'
STREAM_RESULT_TO_FLAG = FLAG_PREFIX + 'stream_result_to'
UNKNOWN_FLAG = FLAG_PREFIX + 'unknown_flag_for_testing'
LIST_TESTS_FLAG = FLAG_PREFIX + 'list_tests'
INCORRECT_FLAG_VARIANTS = [re.sub('^--', '-', LIST_TESTS_FLAG),
re.sub('^--', '/', LIST_TESTS_FLAG),
re.sub('_', '-', LIST_TESTS_FLAG)]
INTERNAL_FLAG_FOR_TESTING = FLAG_PREFIX + 'internal_flag_for_testing'
SUPPORTS_DEATH_TESTS = "DeathTest" in gtest_test_utils.Subprocess(
[PROGRAM_PATH, LIST_TESTS_FLAG]).output
# The help message must match this regex.
HELP_REGEX = re.compile(
FLAG_PREFIX + r'list_tests.*' +
FLAG_PREFIX + r'filter=.*' +
FLAG_PREFIX + r'also_run_disabled_tests.*' +
FLAG_PREFIX + r'repeat=.*' +
FLAG_PREFIX + r'shuffle.*' +
FLAG_PREFIX + r'random_seed=.*' +
FLAG_PREFIX + r'color=.*' +
FLAG_PREFIX + r'print_time.*' +
FLAG_PREFIX + r'output=.*' +
FLAG_PREFIX + r'break_on_failure.*' +
FLAG_PREFIX + r'throw_on_failure.*' +
FLAG_PREFIX + r'catch_exceptions=0.*',
re.DOTALL)
def RunWithFlag(flag):
"""Runs gtest_help_test_ with the given flag.
Returns:
the exit code and the text output as a tuple.
Args:
flag: the command-line flag to pass to gtest_help_test_, or None.
"""
if flag is None:
command = [PROGRAM_PATH]
else:
command = [PROGRAM_PATH, flag]
child = gtest_test_utils.Subprocess(command)
return child.exit_code, child.output
class GTestHelpTest(gtest_test_utils.TestCase):
"""Tests the --help flag and its equivalent forms."""
def TestHelpFlag(self, flag):
"""Verifies correct behavior when help flag is specified.
The right message must be printed and the tests must
skipped when the given flag is specified.
Args:
flag: A flag to pass to the binary or None.
"""
exit_code, output = RunWithFlag(flag)
self.assertEquals(0, exit_code)
self.assert_(HELP_REGEX.search(output), output)
if IS_LINUX:
self.assert_(STREAM_RESULT_TO_FLAG in output, output)
else:
self.assert_(STREAM_RESULT_TO_FLAG not in output, output)
if SUPPORTS_DEATH_TESTS and not IS_WINDOWS:
self.assert_(DEATH_TEST_STYLE_FLAG in output, output)
else:
self.assert_(DEATH_TEST_STYLE_FLAG not in output, output)
def TestNonHelpFlag(self, flag):
"""Verifies correct behavior when no help flag is specified.
Verifies that when no help flag is specified, the tests are run
and the help message is not printed.
Args:
flag: A flag to pass to the binary or None.
"""
exit_code, output = RunWithFlag(flag)
self.assert_(exit_code != 0)
self.assert_(not HELP_REGEX.search(output), output)
def testPrintsHelpWithFullFlag(self):
self.TestHelpFlag('--help')
def testPrintsHelpWithShortFlag(self):
self.TestHelpFlag('-h')
def testPrintsHelpWithQuestionFlag(self):
self.TestHelpFlag('-?')
def testPrintsHelpWithWindowsStyleQuestionFlag(self):
self.TestHelpFlag('/?')
def testPrintsHelpWithUnrecognizedGoogleTestFlag(self):
self.TestHelpFlag(UNKNOWN_FLAG)
def testPrintsHelpWithIncorrectFlagStyle(self):
for incorrect_flag in INCORRECT_FLAG_VARIANTS:
self.TestHelpFlag(incorrect_flag)
def testRunsTestsWithoutHelpFlag(self):
"""Verifies that when no help flag is specified, the tests are run
and the help message is not printed."""
self.TestNonHelpFlag(None)
def testRunsTestsWithGtestInternalFlag(self):
"""Verifies that the tests are run and no help message is printed when
a flag starting with Google Test prefix and 'internal_' is supplied."""
self.TestNonHelpFlag(INTERNAL_FLAG_FOR_TESTING)
if __name__ == '__main__':
gtest_test_utils.Main()
| apache-2.0 |
jalanb/co.py.cat | copycat/coderackPressure.py | 1 | 4369 | import logging
from formulas import Temperature
from slipnet import slipnet
class CoderackPressure(object):
def __init__(self, name):
self.name = name
def reset(self):
self.unmodifedValues = []
self.values = []
self.codelets = []
def _codelet_index(codelet):
name_indices = {
'bottom-up-bond-scout': 0,
'top-down-bond-scout--category': {
slipnet.successor: 1,
slipnet.predecessor: 2,
None: 3
},
'top-down-bond-scout--direction': {
slipnet.left: 4,
slipnet.right: 5,
None: 3,
},
'top-down-group-scout--category': {
slipnet.successorGroup: 6,
slipnet.predecessorGroup: 7,
None: 8,
},
'top-down-group-scout--direction': {
slipnet.left: 9,
slipnet.right: 10,
None: -1,
},
'group-scout--whole-string': 11,
'replacement-finder': 12,
'rule-scout': 13,
'rule-translator': 14,
'bottom-up-correspondence-scout': 15,
'important-object-correspondence-scout': 16,
'breaker': 17,
}
i = name_indices.get(codelet.name, -1)
try:
return int(i)
except (TypeError, ValueError):
try:
node = codelet.arguments[0]
return i[node]
except KeyError:
return i[None]
class CoderackPressures(object):
def __init__(self):
self.initialisePressures()
self.reset()
def initialisePressures(self):
self.pressures = []
self.pressures += [CoderackPressure('Bottom Up Bonds')]
self.pressures += [CoderackPressure('Top Down Successor Bonds')]
self.pressures += [CoderackPressure('Top Down Predecessor Bonds')]
self.pressures += [CoderackPressure('Top Down Sameness Bonds')]
self.pressures += [CoderackPressure('Top Down Left Bonds')]
self.pressures += [CoderackPressure('Top Down Right Bonds')]
self.pressures += [CoderackPressure('Top Down Successor Group')]
self.pressures += [CoderackPressure('Top Down Predecessor Group')]
self.pressures += [CoderackPressure('Top Down Sameness Group')]
self.pressures += [CoderackPressure('Top Down Left Group')]
self.pressures += [CoderackPressure('Top Down Right Group')]
self.pressures += [CoderackPressure('Bottom Up Whole Group')]
self.pressures += [CoderackPressure('Replacement Finder')]
self.pressures += [CoderackPressure('Rule Codelets')]
self.pressures += [CoderackPressure('Rule Translator')]
self.pressures += [CoderackPressure('Bottom Up Correspondences')]
self.pressures += [CoderackPressure(
'Important Object Correspondences')]
self.pressures += [CoderackPressure('Breakers')]
def calculatePressures(self):
scale = (100.0 - Temperature + 10.0) / 15.0
values = []
for pressure in self.pressures:
value = sum(c.urgency ** scale for c in pressure.codelets)
values += [value]
totalValue = sum(values)
if not totalValue:
totalValue = 1.0
values = [value / totalValue for value in values]
self.maxValue = max(values)
for pressure, value in zip(self.pressures, values):
pressure.values += [value * 100.0]
for codelet in self.removedCodelets:
if codelet.pressure:
codelet.pressure.codelets.removeElement(codelet)
self.removedCodelets = []
def reset(self):
self.maxValue = 0.001
for pressure in self.pressures:
pressure.reset()
self.removedCodelets = []
def addCodelet(self, codelet):
node = None
i = _codelet_index(codelet)
if i >= 0:
self.pressures[i].codelets += [codelet]
if codelet.pressure:
codelet.pressure.codelets += [codelet]
if i >= 0:
codelet.pressure = self.pressures[i]
logging.info('Add %s: %d', codelet.name, i)
if node:
logging.info('Node: %s', node.name)
def removeCodelet(self, codelet):
self.removedCodelets += [codelet]
def numberOfPressures(self):
return len(self.pressures)
coderackPressures = CoderackPressures()
| mit |
beernarrd/gramps | gramps/test/test/test_util_test.py | 1 | 7338 | #
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2000-2007 Donald N. Allingham
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# test/test/test_util_test.py
"""unittest (test_util_test.py) for test_util.py"""
import sys
import os
import tempfile
import unittest as U
from gramps.gen.constfunc import get_env_var
usage_note="""
**************************************************************
Testing (and runing) Gramps requires that PYTHONPATH include
the path to the top Gramps directory (where gramps.py resides).
For example, in bash, a shell export would look like
export PYTHONPATH=/.../src
with the ... filled in appropriately.
**************************************************************
"""
# **************************************************************
#
# Since this module is used by other test modules, it is
# strongly advised to test this module to 100% coverage,
# and in all calling variations, eg:
# run directly, from this dir with and without ./ prefix
# run from other dirs (with path prefix)
# run from within regrtest.py
# run from regrtest.py with other test modules present
# which use the test_util module itself
#
# **************************************************************
try:
from gramps.test import test_util as tu
##here = tu.absdir()
except ImportError:
print("Cannot import 'test_util' from package 'test'" + usage_note)
exit(1)
# grouping into multiple TestCases (classes) is not required,
# but may be useful for various reasons, such as collecting
# tests that share a setUp/tearDown mechanism or that share
# some test data, or just because they're related.
#
# The test function name should not have docstrings, but should
# have names which add to the value of failure reporting, and
# which make it easy to find them within the source.
# some enabling infrastructure features
class Test1(U.TestCase):
def test1a_custom_exception(self):
tmsg = "testing"
try:
err = None
raise tu.TestError(tmsg)
except tu.TestError as e:
emsg = e.value
self.assertEqual(emsg, tmsg,
"raising TestError: g=%r e=%r" % (emsg, tmsg))
def test1b_msg_reporting_utility(self):
g,e = "got this", "expected that"
m,p = "your message here", "pfx"
tmsg0 = m + "\n .....got:'" + g + \
"'\n expected:'" + e +"'"
tmsg1 = p + ": " + tmsg0
self.assertEqual(tu.msg(g,e,m), tmsg0, "non-prefix message")
self.assertEqual(tu.msg(g,e,m,p), tmsg1, "prefix message")
# path-related features (note use of tu.msg tested above)
class Test2(U.TestCase):
def test2a_context_via_traceback(self):
e = os.path.basename(__file__).rstrip(".co") # eg in *.py[co]
g = os.path.basename(tu._caller_context()[0]).rstrip('co')
self.assertEqual(g,e, tu.msg(g,e, "_caller_context"))
def test2b_absdir(self):
here = tu.absdir();
g=tu.absdir(__file__)
self.assertEqual(g, here, tu.msg(g, here, "absdir"))
def test2c_path_append_parent(self):
here = tu.absdir();
par = os.path.dirname(here)
was_there = par in sys.path
if was_there:
while par in sys.path:
sys.path.remove(par)
np = len(sys.path)
for p in (None, __file__):
self.assertFalse(par in sys.path, "par not in initial path")
if not p:
g = tu.path_append_parent()
else:
g = tu.path_append_parent(p)
self.assertEqual(g,par, tu.msg(g,par, "path_append_parent return"))
self.assertTrue(par in sys.path, "actually appends")
sys.path.remove(par)
l= len(sys.path)
self.assertEqual(l, np, tu.msg(l, np,"numpaths"))
if was_there:
# restore entry state (but no multiples needed!)
sys.path.append(par)
# make and remove test dirs
class Test3(U.TestCase):
here = tu.absdir()
bases = (here, tempfile.gettempdir())
asubs = [os.path.join(b,"test_sub") for b in bases]
home = get_env_var("HOME")
if home:
home_junk = os.path.join(home,"test_junk")
def _rmsubs(self):
import shutil
for sub in self.asubs:
if os.path.isdir(sub):
shutil.rmtree(sub)
def setUp(self):
self._rmsubs()
if self.home and not os.path.isdir(self.home_junk):
os.mkdir(self.home_junk)
def tearDown(self):
self._rmsubs()
if self.home and os.path.isdir(self.home_junk):
os.rmdir(self.home_junk)
def test3a_subdir(self):
for sub in self.asubs:
self.assertFalse(os.path.isdir(sub), "init: no dir %r" % sub)
b,d = os.path.dirname(sub), os.path.basename(sub)
md = tu.make_subdir(d, b)
self.assertTrue(os.path.isdir(sub), "made dir %r" % sub)
self.assertEqual(md,sub, tu.msg(md,sub,
"make_subdir returns path"))
s2 = os.path.join(sub,"sub2")
tu.make_subdir("sub2", sub)
self.assertTrue(os.path.isdir(s2), "made dir %r" % s2)
f = os.path.join(s2,"test_file")
open(f,"w").write("testing..")
self.assertTrue(os.path.isfile(f), "file %r exists" % f)
tu.delete_tree(sub)
self.assertFalse(os.path.isdir(sub),
"delete_tree removes subdir %r" % sub )
def test3b_delete_tree_constraint(self):
if self.home:
err = None
try:
tu.delete_tree(self.home_junk)
except tu.TestError as e:
err = e.value
self.assertFalse(err is None,
"deltree on %r raises TestError" % (self.home_junk))
else:
self.fail("Skip deltree constraint test, no '$HOME' var")
# logging (& misc?)
class Test4(U.TestCase):
logf = "/tmp/__tu__log__"
def test4a(self):
wmsg = "a warning message"
emsg = "an error message"
import logging
# file logging helps with file capture of log-messages
tl = tu.TestLogger()
for i in (1,2):
# 2 passes to test clearing old file
tl.logfile_init(self.logf)
logging.warn(wmsg)
logging.info("nada")
logging.error(emsg)
ll = tl.logfile_getlines()
nl = len(ll)
print(repr(ll))
self.assertEquals(nl,2,
tu.msg(nl,2, "pass %d: expected line count" % i))
#del tl
if __name__ == "__main__":
U.main()
#===eof===
| gpl-2.0 |
OsirisSPS/osiris-sps | client/share/plugins/AF9A4C281070FDB0F34CF417CDB168AB38C8A388/lib/_pyio.py | 76 | 68418 | """
Python implementation of the io module.
"""
from __future__ import (print_function, unicode_literals)
import os
import abc
import codecs
import warnings
# Import thread instead of threading to reduce startup cost
try:
from thread import allocate_lock as Lock
except ImportError:
from dummy_thread import allocate_lock as Lock
import io
from io import (__all__, SEEK_SET, SEEK_CUR, SEEK_END)
from errno import EINTR
__metaclass__ = type
# open() uses st_blksize whenever we can
DEFAULT_BUFFER_SIZE = 8 * 1024 # bytes
# NOTE: Base classes defined here are registered with the "official" ABCs
# defined in io.py. We don't use real inheritance though, because we don't
# want to inherit the C implementations.
class BlockingIOError(IOError):
"""Exception raised when I/O would block on a non-blocking I/O stream."""
def __init__(self, errno, strerror, characters_written=0):
super(IOError, self).__init__(errno, strerror)
if not isinstance(characters_written, (int, long)):
raise TypeError("characters_written must be a integer")
self.characters_written = characters_written
def open(file, mode="r", buffering=-1,
encoding=None, errors=None,
newline=None, closefd=True):
r"""Open file and return a stream. Raise IOError upon failure.
file is either a text or byte string giving the name (and the path
if the file isn't in the current working directory) of the file to
be opened or an integer file descriptor of the file to be
wrapped. (If a file descriptor is given, it is closed when the
returned I/O object is closed, unless closefd is set to False.)
mode is an optional string that specifies the mode in which the file
is opened. It defaults to 'r' which means open for reading in text
mode. Other common values are 'w' for writing (truncating the file if
it already exists), and 'a' for appending (which on some Unix systems,
means that all writes append to the end of the file regardless of the
current seek position). In text mode, if encoding is not specified the
encoding used is platform dependent. (For reading and writing raw
bytes use binary mode and leave encoding unspecified.) The available
modes are:
========= ===============================================================
Character Meaning
--------- ---------------------------------------------------------------
'r' open for reading (default)
'w' open for writing, truncating the file first
'a' open for writing, appending to the end of the file if it exists
'b' binary mode
't' text mode (default)
'+' open a disk file for updating (reading and writing)
'U' universal newline mode (for backwards compatibility; unneeded
for new code)
========= ===============================================================
The default mode is 'rt' (open for reading text). For binary random
access, the mode 'w+b' opens and truncates the file to 0 bytes, while
'r+b' opens the file without truncation.
Python distinguishes between files opened in binary and text modes,
even when the underlying operating system doesn't. Files opened in
binary mode (appending 'b' to the mode argument) return contents as
bytes objects without any decoding. In text mode (the default, or when
't' is appended to the mode argument), the contents of the file are
returned as strings, the bytes having been first decoded using a
platform-dependent encoding or using the specified encoding if given.
buffering is an optional integer used to set the buffering policy.
Pass 0 to switch buffering off (only allowed in binary mode), 1 to select
line buffering (only usable in text mode), and an integer > 1 to indicate
the size of a fixed-size chunk buffer. When no buffering argument is
given, the default buffering policy works as follows:
* Binary files are buffered in fixed-size chunks; the size of the buffer
is chosen using a heuristic trying to determine the underlying device's
"block size" and falling back on `io.DEFAULT_BUFFER_SIZE`.
On many systems, the buffer will typically be 4096 or 8192 bytes long.
* "Interactive" text files (files for which isatty() returns True)
use line buffering. Other text files use the policy described above
for binary files.
encoding is the name of the encoding used to decode or encode the
file. This should only be used in text mode. The default encoding is
platform dependent, but any encoding supported by Python can be
passed. See the codecs module for the list of supported encodings.
errors is an optional string that specifies how encoding errors are to
be handled---this argument should not be used in binary mode. Pass
'strict' to raise a ValueError exception if there is an encoding error
(the default of None has the same effect), or pass 'ignore' to ignore
errors. (Note that ignoring encoding errors can lead to data loss.)
See the documentation for codecs.register for a list of the permitted
encoding error strings.
newline controls how universal newlines works (it only applies to text
mode). It can be None, '', '\n', '\r', and '\r\n'. It works as
follows:
* On input, if newline is None, universal newlines mode is
enabled. Lines in the input can end in '\n', '\r', or '\r\n', and
these are translated into '\n' before being returned to the
caller. If it is '', universal newline mode is enabled, but line
endings are returned to the caller untranslated. If it has any of
the other legal values, input lines are only terminated by the given
string, and the line ending is returned to the caller untranslated.
* On output, if newline is None, any '\n' characters written are
translated to the system default line separator, os.linesep. If
newline is '', no translation takes place. If newline is any of the
other legal values, any '\n' characters written are translated to
the given string.
If closefd is False, the underlying file descriptor will be kept open
when the file is closed. This does not work when a file name is given
and must be True in that case.
open() returns a file object whose type depends on the mode, and
through which the standard file operations such as reading and writing
are performed. When open() is used to open a file in a text mode ('w',
'r', 'wt', 'rt', etc.), it returns a TextIOWrapper. When used to open
a file in a binary mode, the returned class varies: in read binary
mode, it returns a BufferedReader; in write binary and append binary
modes, it returns a BufferedWriter, and in read/write mode, it returns
a BufferedRandom.
It is also possible to use a string or bytearray as a file for both
reading and writing. For strings StringIO can be used like a file
opened in a text mode, and for bytes a BytesIO can be used like a file
opened in a binary mode.
"""
if not isinstance(file, (basestring, int, long)):
raise TypeError("invalid file: %r" % file)
if not isinstance(mode, basestring):
raise TypeError("invalid mode: %r" % mode)
if not isinstance(buffering, (int, long)):
raise TypeError("invalid buffering: %r" % buffering)
if encoding is not None and not isinstance(encoding, basestring):
raise TypeError("invalid encoding: %r" % encoding)
if errors is not None and not isinstance(errors, basestring):
raise TypeError("invalid errors: %r" % errors)
modes = set(mode)
if modes - set("arwb+tU") or len(mode) > len(modes):
raise ValueError("invalid mode: %r" % mode)
reading = "r" in modes
writing = "w" in modes
appending = "a" in modes
updating = "+" in modes
text = "t" in modes
binary = "b" in modes
if "U" in modes:
if writing or appending:
raise ValueError("can't use U and writing mode at once")
reading = True
if text and binary:
raise ValueError("can't have text and binary mode at once")
if reading + writing + appending > 1:
raise ValueError("can't have read/write/append mode at once")
if not (reading or writing or appending):
raise ValueError("must have exactly one of read/write/append mode")
if binary and encoding is not None:
raise ValueError("binary mode doesn't take an encoding argument")
if binary and errors is not None:
raise ValueError("binary mode doesn't take an errors argument")
if binary and newline is not None:
raise ValueError("binary mode doesn't take a newline argument")
raw = FileIO(file,
(reading and "r" or "") +
(writing and "w" or "") +
(appending and "a" or "") +
(updating and "+" or ""),
closefd)
line_buffering = False
if buffering == 1 or buffering < 0 and raw.isatty():
buffering = -1
line_buffering = True
if buffering < 0:
buffering = DEFAULT_BUFFER_SIZE
try:
bs = os.fstat(raw.fileno()).st_blksize
except (os.error, AttributeError):
pass
else:
if bs > 1:
buffering = bs
if buffering < 0:
raise ValueError("invalid buffering size")
if buffering == 0:
if binary:
return raw
raise ValueError("can't have unbuffered text I/O")
if updating:
buffer = BufferedRandom(raw, buffering)
elif writing or appending:
buffer = BufferedWriter(raw, buffering)
elif reading:
buffer = BufferedReader(raw, buffering)
else:
raise ValueError("unknown mode: %r" % mode)
if binary:
return buffer
text = TextIOWrapper(buffer, encoding, errors, newline, line_buffering)
text.mode = mode
return text
class DocDescriptor:
"""Helper for builtins.open.__doc__
"""
def __get__(self, obj, typ):
return (
"open(file, mode='r', buffering=-1, encoding=None, "
"errors=None, newline=None, closefd=True)\n\n" +
open.__doc__)
class OpenWrapper:
"""Wrapper for builtins.open
Trick so that open won't become a bound method when stored
as a class variable (as dbm.dumb does).
See initstdio() in Python/pythonrun.c.
"""
__doc__ = DocDescriptor()
def __new__(cls, *args, **kwargs):
return open(*args, **kwargs)
class UnsupportedOperation(ValueError, IOError):
pass
class IOBase:
__metaclass__ = abc.ABCMeta
"""The abstract base class for all I/O classes, acting on streams of
bytes. There is no public constructor.
This class provides dummy implementations for many methods that
derived classes can override selectively; the default implementations
represent a file that cannot be read, written or seeked.
Even though IOBase does not declare read, readinto, or write because
their signatures will vary, implementations and clients should
consider those methods part of the interface. Also, implementations
may raise a IOError when operations they do not support are called.
The basic type used for binary data read from or written to a file is
bytes. bytearrays are accepted too, and in some cases (such as
readinto) needed. Text I/O classes work with str data.
Note that calling any method (even inquiries) on a closed stream is
undefined. Implementations may raise IOError in this case.
IOBase (and its subclasses) support the iterator protocol, meaning
that an IOBase object can be iterated over yielding the lines in a
stream.
IOBase also supports the :keyword:`with` statement. In this example,
fp is closed after the suite of the with statement is complete:
with open('spam.txt', 'r') as fp:
fp.write('Spam and eggs!')
"""
### Internal ###
def _unsupported(self, name):
"""Internal: raise an exception for unsupported operations."""
raise UnsupportedOperation("%s.%s() not supported" %
(self.__class__.__name__, name))
### Positioning ###
def seek(self, pos, whence=0):
"""Change stream position.
Change the stream position to byte offset offset. offset is
interpreted relative to the position indicated by whence. Values
for whence are:
* 0 -- start of stream (the default); offset should be zero or positive
* 1 -- current stream position; offset may be negative
* 2 -- end of stream; offset is usually negative
Return the new absolute position.
"""
self._unsupported("seek")
def tell(self):
"""Return current stream position."""
return self.seek(0, 1)
def truncate(self, pos=None):
"""Truncate file to size bytes.
Size defaults to the current IO position as reported by tell(). Return
the new size.
"""
self._unsupported("truncate")
### Flush and close ###
def flush(self):
"""Flush write buffers, if applicable.
This is not implemented for read-only and non-blocking streams.
"""
self._checkClosed()
# XXX Should this return the number of bytes written???
__closed = False
def close(self):
"""Flush and close the IO object.
This method has no effect if the file is already closed.
"""
if not self.__closed:
self.flush()
self.__closed = True
def __del__(self):
"""Destructor. Calls close()."""
# The try/except block is in case this is called at program
# exit time, when it's possible that globals have already been
# deleted, and then the close() call might fail. Since
# there's nothing we can do about such failures and they annoy
# the end users, we suppress the traceback.
try:
self.close()
except:
pass
### Inquiries ###
def seekable(self):
"""Return whether object supports random access.
If False, seek(), tell() and truncate() will raise IOError.
This method may need to do a test seek().
"""
return False
def _checkSeekable(self, msg=None):
"""Internal: raise an IOError if file is not seekable
"""
if not self.seekable():
raise IOError("File or stream is not seekable."
if msg is None else msg)
def readable(self):
"""Return whether object was opened for reading.
If False, read() will raise IOError.
"""
return False
def _checkReadable(self, msg=None):
"""Internal: raise an IOError if file is not readable
"""
if not self.readable():
raise IOError("File or stream is not readable."
if msg is None else msg)
def writable(self):
"""Return whether object was opened for writing.
If False, write() and truncate() will raise IOError.
"""
return False
def _checkWritable(self, msg=None):
"""Internal: raise an IOError if file is not writable
"""
if not self.writable():
raise IOError("File or stream is not writable."
if msg is None else msg)
@property
def closed(self):
"""closed: bool. True iff the file has been closed.
For backwards compatibility, this is a property, not a predicate.
"""
return self.__closed
def _checkClosed(self, msg=None):
"""Internal: raise an ValueError if file is closed
"""
if self.closed:
raise ValueError("I/O operation on closed file."
if msg is None else msg)
### Context manager ###
def __enter__(self):
"""Context management protocol. Returns self."""
self._checkClosed()
return self
def __exit__(self, *args):
"""Context management protocol. Calls close()"""
self.close()
### Lower-level APIs ###
# XXX Should these be present even if unimplemented?
def fileno(self):
"""Returns underlying file descriptor if one exists.
An IOError is raised if the IO object does not use a file descriptor.
"""
self._unsupported("fileno")
def isatty(self):
"""Return whether this is an 'interactive' stream.
Return False if it can't be determined.
"""
self._checkClosed()
return False
### Readline[s] and writelines ###
def readline(self, limit=-1):
r"""Read and return a line from the stream.
If limit is specified, at most limit bytes will be read.
The line terminator is always b'\n' for binary files; for text
files, the newlines argument to open can be used to select the line
terminator(s) recognized.
"""
# For backwards compatibility, a (slowish) readline().
if hasattr(self, "peek"):
def nreadahead():
readahead = self.peek(1)
if not readahead:
return 1
n = (readahead.find(b"\n") + 1) or len(readahead)
if limit >= 0:
n = min(n, limit)
return n
else:
def nreadahead():
return 1
if limit is None:
limit = -1
elif not isinstance(limit, (int, long)):
raise TypeError("limit must be an integer")
res = bytearray()
while limit < 0 or len(res) < limit:
b = self.read(nreadahead())
if not b:
break
res += b
if res.endswith(b"\n"):
break
return bytes(res)
def __iter__(self):
self._checkClosed()
return self
def next(self):
line = self.readline()
if not line:
raise StopIteration
return line
def readlines(self, hint=None):
"""Return a list of lines from the stream.
hint can be specified to control the number of lines read: no more
lines will be read if the total size (in bytes/characters) of all
lines so far exceeds hint.
"""
if hint is not None and not isinstance(hint, (int, long)):
raise TypeError("integer or None expected")
if hint is None or hint <= 0:
return list(self)
n = 0
lines = []
for line in self:
lines.append(line)
n += len(line)
if n >= hint:
break
return lines
def writelines(self, lines):
self._checkClosed()
for line in lines:
self.write(line)
io.IOBase.register(IOBase)
class RawIOBase(IOBase):
"""Base class for raw binary I/O."""
# The read() method is implemented by calling readinto(); derived
# classes that want to support read() only need to implement
# readinto() as a primitive operation. In general, readinto() can be
# more efficient than read().
# (It would be tempting to also provide an implementation of
# readinto() in terms of read(), in case the latter is a more suitable
# primitive operation, but that would lead to nasty recursion in case
# a subclass doesn't implement either.)
def read(self, n=-1):
"""Read and return up to n bytes.
Returns an empty bytes object on EOF, or None if the object is
set not to block and has no data to read.
"""
if n is None:
n = -1
if n < 0:
return self.readall()
b = bytearray(n.__index__())
n = self.readinto(b)
if n is None:
return None
del b[n:]
return bytes(b)
def readall(self):
"""Read until EOF, using multiple read() call."""
res = bytearray()
while True:
data = self.read(DEFAULT_BUFFER_SIZE)
if not data:
break
res += data
if res:
return bytes(res)
else:
# b'' or None
return data
def readinto(self, b):
"""Read up to len(b) bytes into b.
Returns number of bytes read (0 for EOF), or None if the object
is set not to block and has no data to read.
"""
self._unsupported("readinto")
def write(self, b):
"""Write the given buffer to the IO stream.
Returns the number of bytes written, which may be less than len(b).
"""
self._unsupported("write")
io.RawIOBase.register(RawIOBase)
from _io import FileIO
RawIOBase.register(FileIO)
class BufferedIOBase(IOBase):
"""Base class for buffered IO objects.
The main difference with RawIOBase is that the read() method
supports omitting the size argument, and does not have a default
implementation that defers to readinto().
In addition, read(), readinto() and write() may raise
BlockingIOError if the underlying raw stream is in non-blocking
mode and not ready; unlike their raw counterparts, they will never
return None.
A typical implementation should not inherit from a RawIOBase
implementation, but wrap one.
"""
def read(self, n=None):
"""Read and return up to n bytes.
If the argument is omitted, None, or negative, reads and
returns all data until EOF.
If the argument is positive, and the underlying raw stream is
not 'interactive', multiple raw reads may be issued to satisfy
the byte count (unless EOF is reached first). But for
interactive raw streams (XXX and for pipes?), at most one raw
read will be issued, and a short result does not imply that
EOF is imminent.
Returns an empty bytes array on EOF.
Raises BlockingIOError if the underlying raw stream has no
data at the moment.
"""
self._unsupported("read")
def read1(self, n=None):
"""Read up to n bytes with at most one read() system call."""
self._unsupported("read1")
def readinto(self, b):
"""Read up to len(b) bytes into b.
Like read(), this may issue multiple reads to the underlying raw
stream, unless the latter is 'interactive'.
Returns the number of bytes read (0 for EOF).
Raises BlockingIOError if the underlying raw stream has no
data at the moment.
"""
# XXX This ought to work with anything that supports the buffer API
data = self.read(len(b))
n = len(data)
try:
b[:n] = data
except TypeError as err:
import array
if not isinstance(b, array.array):
raise err
b[:n] = array.array(b'b', data)
return n
def write(self, b):
"""Write the given buffer to the IO stream.
Return the number of bytes written, which is never less than
len(b).
Raises BlockingIOError if the buffer is full and the
underlying raw stream cannot accept more data at the moment.
"""
self._unsupported("write")
def detach(self):
"""
Separate the underlying raw stream from the buffer and return it.
After the raw stream has been detached, the buffer is in an unusable
state.
"""
self._unsupported("detach")
io.BufferedIOBase.register(BufferedIOBase)
class _BufferedIOMixin(BufferedIOBase):
"""A mixin implementation of BufferedIOBase with an underlying raw stream.
This passes most requests on to the underlying raw stream. It
does *not* provide implementations of read(), readinto() or
write().
"""
def __init__(self, raw):
self._raw = raw
### Positioning ###
def seek(self, pos, whence=0):
new_position = self.raw.seek(pos, whence)
if new_position < 0:
raise IOError("seek() returned an invalid position")
return new_position
def tell(self):
pos = self.raw.tell()
if pos < 0:
raise IOError("tell() returned an invalid position")
return pos
def truncate(self, pos=None):
# Flush the stream. We're mixing buffered I/O with lower-level I/O,
# and a flush may be necessary to synch both views of the current
# file state.
self.flush()
if pos is None:
pos = self.tell()
# XXX: Should seek() be used, instead of passing the position
# XXX directly to truncate?
return self.raw.truncate(pos)
### Flush and close ###
def flush(self):
if self.closed:
raise ValueError("flush of closed file")
self.raw.flush()
def close(self):
if self.raw is not None and not self.closed:
self.flush()
self.raw.close()
def detach(self):
if self.raw is None:
raise ValueError("raw stream already detached")
self.flush()
raw = self._raw
self._raw = None
return raw
### Inquiries ###
def seekable(self):
return self.raw.seekable()
def readable(self):
return self.raw.readable()
def writable(self):
return self.raw.writable()
@property
def raw(self):
return self._raw
@property
def closed(self):
return self.raw.closed
@property
def name(self):
return self.raw.name
@property
def mode(self):
return self.raw.mode
def __repr__(self):
clsname = self.__class__.__name__
try:
name = self.name
except AttributeError:
return "<_pyio.{0}>".format(clsname)
else:
return "<_pyio.{0} name={1!r}>".format(clsname, name)
### Lower-level APIs ###
def fileno(self):
return self.raw.fileno()
def isatty(self):
return self.raw.isatty()
class BytesIO(BufferedIOBase):
"""Buffered I/O implementation using an in-memory bytes buffer."""
def __init__(self, initial_bytes=None):
buf = bytearray()
if initial_bytes is not None:
buf.extend(initial_bytes)
self._buffer = buf
self._pos = 0
def __getstate__(self):
if self.closed:
raise ValueError("__getstate__ on closed file")
return self.__dict__.copy()
def getvalue(self):
"""Return the bytes value (contents) of the buffer
"""
if self.closed:
raise ValueError("getvalue on closed file")
return bytes(self._buffer)
def read(self, n=None):
if self.closed:
raise ValueError("read from closed file")
if n is None:
n = -1
if not isinstance(n, (int, long)):
raise TypeError("integer argument expected, got {0!r}".format(
type(n)))
if n < 0:
n = len(self._buffer)
if len(self._buffer) <= self._pos:
return b""
newpos = min(len(self._buffer), self._pos + n)
b = self._buffer[self._pos : newpos]
self._pos = newpos
return bytes(b)
def read1(self, n):
"""This is the same as read.
"""
return self.read(n)
def write(self, b):
if self.closed:
raise ValueError("write to closed file")
if isinstance(b, unicode):
raise TypeError("can't write unicode to binary stream")
n = len(b)
if n == 0:
return 0
pos = self._pos
if pos > len(self._buffer):
# Inserts null bytes between the current end of the file
# and the new write position.
padding = b'\x00' * (pos - len(self._buffer))
self._buffer += padding
self._buffer[pos:pos + n] = b
self._pos += n
return n
def seek(self, pos, whence=0):
if self.closed:
raise ValueError("seek on closed file")
try:
pos.__index__
except AttributeError:
raise TypeError("an integer is required")
if whence == 0:
if pos < 0:
raise ValueError("negative seek position %r" % (pos,))
self._pos = pos
elif whence == 1:
self._pos = max(0, self._pos + pos)
elif whence == 2:
self._pos = max(0, len(self._buffer) + pos)
else:
raise ValueError("invalid whence value")
return self._pos
def tell(self):
if self.closed:
raise ValueError("tell on closed file")
return self._pos
def truncate(self, pos=None):
if self.closed:
raise ValueError("truncate on closed file")
if pos is None:
pos = self._pos
else:
try:
pos.__index__
except AttributeError:
raise TypeError("an integer is required")
if pos < 0:
raise ValueError("negative truncate position %r" % (pos,))
del self._buffer[pos:]
return pos
def readable(self):
return True
def writable(self):
return True
def seekable(self):
return True
class BufferedReader(_BufferedIOMixin):
"""BufferedReader(raw[, buffer_size])
A buffer for a readable, sequential BaseRawIO object.
The constructor creates a BufferedReader for the given readable raw
stream and buffer_size. If buffer_size is omitted, DEFAULT_BUFFER_SIZE
is used.
"""
def __init__(self, raw, buffer_size=DEFAULT_BUFFER_SIZE):
"""Create a new buffered reader using the given readable raw IO object.
"""
if not raw.readable():
raise IOError('"raw" argument must be readable.')
_BufferedIOMixin.__init__(self, raw)
if buffer_size <= 0:
raise ValueError("invalid buffer size")
self.buffer_size = buffer_size
self._reset_read_buf()
self._read_lock = Lock()
def _reset_read_buf(self):
self._read_buf = b""
self._read_pos = 0
def read(self, n=None):
"""Read n bytes.
Returns exactly n bytes of data unless the underlying raw IO
stream reaches EOF or if the call would block in non-blocking
mode. If n is negative, read until EOF or until read() would
block.
"""
if n is not None and n < -1:
raise ValueError("invalid number of bytes to read")
with self._read_lock:
return self._read_unlocked(n)
def _read_unlocked(self, n=None):
nodata_val = b""
empty_values = (b"", None)
buf = self._read_buf
pos = self._read_pos
# Special case for when the number of bytes to read is unspecified.
if n is None or n == -1:
self._reset_read_buf()
chunks = [buf[pos:]] # Strip the consumed bytes.
current_size = 0
while True:
# Read until EOF or until read() would block.
try:
chunk = self.raw.read()
except IOError as e:
if e.errno != EINTR:
raise
continue
if chunk in empty_values:
nodata_val = chunk
break
current_size += len(chunk)
chunks.append(chunk)
return b"".join(chunks) or nodata_val
# The number of bytes to read is specified, return at most n bytes.
avail = len(buf) - pos # Length of the available buffered data.
if n <= avail:
# Fast path: the data to read is fully buffered.
self._read_pos += n
return buf[pos:pos+n]
# Slow path: read from the stream until enough bytes are read,
# or until an EOF occurs or until read() would block.
chunks = [buf[pos:]]
wanted = max(self.buffer_size, n)
while avail < n:
try:
chunk = self.raw.read(wanted)
except IOError as e:
if e.errno != EINTR:
raise
continue
if chunk in empty_values:
nodata_val = chunk
break
avail += len(chunk)
chunks.append(chunk)
# n is more then avail only when an EOF occurred or when
# read() would have blocked.
n = min(n, avail)
out = b"".join(chunks)
self._read_buf = out[n:] # Save the extra data in the buffer.
self._read_pos = 0
return out[:n] if out else nodata_val
def peek(self, n=0):
"""Returns buffered bytes without advancing the position.
The argument indicates a desired minimal number of bytes; we
do at most one raw read to satisfy it. We never return more
than self.buffer_size.
"""
with self._read_lock:
return self._peek_unlocked(n)
def _peek_unlocked(self, n=0):
want = min(n, self.buffer_size)
have = len(self._read_buf) - self._read_pos
if have < want or have <= 0:
to_read = self.buffer_size - have
while True:
try:
current = self.raw.read(to_read)
except IOError as e:
if e.errno != EINTR:
raise
continue
break
if current:
self._read_buf = self._read_buf[self._read_pos:] + current
self._read_pos = 0
return self._read_buf[self._read_pos:]
def read1(self, n):
"""Reads up to n bytes, with at most one read() system call."""
# Returns up to n bytes. If at least one byte is buffered, we
# only return buffered bytes. Otherwise, we do one raw read.
if n < 0:
raise ValueError("number of bytes to read must be positive")
if n == 0:
return b""
with self._read_lock:
self._peek_unlocked(1)
return self._read_unlocked(
min(n, len(self._read_buf) - self._read_pos))
def tell(self):
return _BufferedIOMixin.tell(self) - len(self._read_buf) + self._read_pos
def seek(self, pos, whence=0):
if not (0 <= whence <= 2):
raise ValueError("invalid whence value")
with self._read_lock:
if whence == 1:
pos -= len(self._read_buf) - self._read_pos
pos = _BufferedIOMixin.seek(self, pos, whence)
self._reset_read_buf()
return pos
class BufferedWriter(_BufferedIOMixin):
"""A buffer for a writeable sequential RawIO object.
The constructor creates a BufferedWriter for the given writeable raw
stream. If the buffer_size is not given, it defaults to
DEFAULT_BUFFER_SIZE.
"""
_warning_stack_offset = 2
def __init__(self, raw,
buffer_size=DEFAULT_BUFFER_SIZE, max_buffer_size=None):
if not raw.writable():
raise IOError('"raw" argument must be writable.')
_BufferedIOMixin.__init__(self, raw)
if buffer_size <= 0:
raise ValueError("invalid buffer size")
if max_buffer_size is not None:
warnings.warn("max_buffer_size is deprecated", DeprecationWarning,
self._warning_stack_offset)
self.buffer_size = buffer_size
self._write_buf = bytearray()
self._write_lock = Lock()
def write(self, b):
if self.closed:
raise ValueError("write to closed file")
if isinstance(b, unicode):
raise TypeError("can't write unicode to binary stream")
with self._write_lock:
# XXX we can implement some more tricks to try and avoid
# partial writes
if len(self._write_buf) > self.buffer_size:
# We're full, so let's pre-flush the buffer
try:
self._flush_unlocked()
except BlockingIOError as e:
# We can't accept anything else.
# XXX Why not just let the exception pass through?
raise BlockingIOError(e.errno, e.strerror, 0)
before = len(self._write_buf)
self._write_buf.extend(b)
written = len(self._write_buf) - before
if len(self._write_buf) > self.buffer_size:
try:
self._flush_unlocked()
except BlockingIOError as e:
if len(self._write_buf) > self.buffer_size:
# We've hit the buffer_size. We have to accept a partial
# write and cut back our buffer.
overage = len(self._write_buf) - self.buffer_size
written -= overage
self._write_buf = self._write_buf[:self.buffer_size]
raise BlockingIOError(e.errno, e.strerror, written)
return written
def truncate(self, pos=None):
with self._write_lock:
self._flush_unlocked()
if pos is None:
pos = self.raw.tell()
return self.raw.truncate(pos)
def flush(self):
with self._write_lock:
self._flush_unlocked()
def _flush_unlocked(self):
if self.closed:
raise ValueError("flush of closed file")
written = 0
try:
while self._write_buf:
try:
n = self.raw.write(self._write_buf)
except IOError as e:
if e.errno != EINTR:
raise
continue
if n > len(self._write_buf) or n < 0:
raise IOError("write() returned incorrect number of bytes")
del self._write_buf[:n]
written += n
except BlockingIOError as e:
n = e.characters_written
del self._write_buf[:n]
written += n
raise BlockingIOError(e.errno, e.strerror, written)
def tell(self):
return _BufferedIOMixin.tell(self) + len(self._write_buf)
def seek(self, pos, whence=0):
if not (0 <= whence <= 2):
raise ValueError("invalid whence")
with self._write_lock:
self._flush_unlocked()
return _BufferedIOMixin.seek(self, pos, whence)
class BufferedRWPair(BufferedIOBase):
"""A buffered reader and writer object together.
A buffered reader object and buffered writer object put together to
form a sequential IO object that can read and write. This is typically
used with a socket or two-way pipe.
reader and writer are RawIOBase objects that are readable and
writeable respectively. If the buffer_size is omitted it defaults to
DEFAULT_BUFFER_SIZE.
"""
# XXX The usefulness of this (compared to having two separate IO
# objects) is questionable.
def __init__(self, reader, writer,
buffer_size=DEFAULT_BUFFER_SIZE, max_buffer_size=None):
"""Constructor.
The arguments are two RawIO instances.
"""
if max_buffer_size is not None:
warnings.warn("max_buffer_size is deprecated", DeprecationWarning, 2)
if not reader.readable():
raise IOError('"reader" argument must be readable.')
if not writer.writable():
raise IOError('"writer" argument must be writable.')
self.reader = BufferedReader(reader, buffer_size)
self.writer = BufferedWriter(writer, buffer_size)
def read(self, n=None):
if n is None:
n = -1
return self.reader.read(n)
def readinto(self, b):
return self.reader.readinto(b)
def write(self, b):
return self.writer.write(b)
def peek(self, n=0):
return self.reader.peek(n)
def read1(self, n):
return self.reader.read1(n)
def readable(self):
return self.reader.readable()
def writable(self):
return self.writer.writable()
def flush(self):
return self.writer.flush()
def close(self):
self.writer.close()
self.reader.close()
def isatty(self):
return self.reader.isatty() or self.writer.isatty()
@property
def closed(self):
return self.writer.closed
class BufferedRandom(BufferedWriter, BufferedReader):
"""A buffered interface to random access streams.
The constructor creates a reader and writer for a seekable stream,
raw, given in the first argument. If the buffer_size is omitted it
defaults to DEFAULT_BUFFER_SIZE.
"""
_warning_stack_offset = 3
def __init__(self, raw,
buffer_size=DEFAULT_BUFFER_SIZE, max_buffer_size=None):
raw._checkSeekable()
BufferedReader.__init__(self, raw, buffer_size)
BufferedWriter.__init__(self, raw, buffer_size, max_buffer_size)
def seek(self, pos, whence=0):
if not (0 <= whence <= 2):
raise ValueError("invalid whence")
self.flush()
if self._read_buf:
# Undo read ahead.
with self._read_lock:
self.raw.seek(self._read_pos - len(self._read_buf), 1)
# First do the raw seek, then empty the read buffer, so that
# if the raw seek fails, we don't lose buffered data forever.
pos = self.raw.seek(pos, whence)
with self._read_lock:
self._reset_read_buf()
if pos < 0:
raise IOError("seek() returned invalid position")
return pos
def tell(self):
if self._write_buf:
return BufferedWriter.tell(self)
else:
return BufferedReader.tell(self)
def truncate(self, pos=None):
if pos is None:
pos = self.tell()
# Use seek to flush the read buffer.
return BufferedWriter.truncate(self, pos)
def read(self, n=None):
if n is None:
n = -1
self.flush()
return BufferedReader.read(self, n)
def readinto(self, b):
self.flush()
return BufferedReader.readinto(self, b)
def peek(self, n=0):
self.flush()
return BufferedReader.peek(self, n)
def read1(self, n):
self.flush()
return BufferedReader.read1(self, n)
def write(self, b):
if self._read_buf:
# Undo readahead
with self._read_lock:
self.raw.seek(self._read_pos - len(self._read_buf), 1)
self._reset_read_buf()
return BufferedWriter.write(self, b)
class TextIOBase(IOBase):
"""Base class for text I/O.
This class provides a character and line based interface to stream
I/O. There is no readinto method because Python's character strings
are immutable. There is no public constructor.
"""
def read(self, n=-1):
"""Read at most n characters from stream.
Read from underlying buffer until we have n characters or we hit EOF.
If n is negative or omitted, read until EOF.
"""
self._unsupported("read")
def write(self, s):
"""Write string s to stream."""
self._unsupported("write")
def truncate(self, pos=None):
"""Truncate size to pos."""
self._unsupported("truncate")
def readline(self):
"""Read until newline or EOF.
Returns an empty string if EOF is hit immediately.
"""
self._unsupported("readline")
def detach(self):
"""
Separate the underlying buffer from the TextIOBase and return it.
After the underlying buffer has been detached, the TextIO is in an
unusable state.
"""
self._unsupported("detach")
@property
def encoding(self):
"""Subclasses should override."""
return None
@property
def newlines(self):
"""Line endings translated so far.
Only line endings translated during reading are considered.
Subclasses should override.
"""
return None
@property
def errors(self):
"""Error setting of the decoder or encoder.
Subclasses should override."""
return None
io.TextIOBase.register(TextIOBase)
class IncrementalNewlineDecoder(codecs.IncrementalDecoder):
r"""Codec used when reading a file in universal newlines mode. It wraps
another incremental decoder, translating \r\n and \r into \n. It also
records the types of newlines encountered. When used with
translate=False, it ensures that the newline sequence is returned in
one piece.
"""
def __init__(self, decoder, translate, errors='strict'):
codecs.IncrementalDecoder.__init__(self, errors=errors)
self.translate = translate
self.decoder = decoder
self.seennl = 0
self.pendingcr = False
def decode(self, input, final=False):
# decode input (with the eventual \r from a previous pass)
if self.decoder is None:
output = input
else:
output = self.decoder.decode(input, final=final)
if self.pendingcr and (output or final):
output = "\r" + output
self.pendingcr = False
# retain last \r even when not translating data:
# then readline() is sure to get \r\n in one pass
if output.endswith("\r") and not final:
output = output[:-1]
self.pendingcr = True
# Record which newlines are read
crlf = output.count('\r\n')
cr = output.count('\r') - crlf
lf = output.count('\n') - crlf
self.seennl |= (lf and self._LF) | (cr and self._CR) \
| (crlf and self._CRLF)
if self.translate:
if crlf:
output = output.replace("\r\n", "\n")
if cr:
output = output.replace("\r", "\n")
return output
def getstate(self):
if self.decoder is None:
buf = b""
flag = 0
else:
buf, flag = self.decoder.getstate()
flag <<= 1
if self.pendingcr:
flag |= 1
return buf, flag
def setstate(self, state):
buf, flag = state
self.pendingcr = bool(flag & 1)
if self.decoder is not None:
self.decoder.setstate((buf, flag >> 1))
def reset(self):
self.seennl = 0
self.pendingcr = False
if self.decoder is not None:
self.decoder.reset()
_LF = 1
_CR = 2
_CRLF = 4
@property
def newlines(self):
return (None,
"\n",
"\r",
("\r", "\n"),
"\r\n",
("\n", "\r\n"),
("\r", "\r\n"),
("\r", "\n", "\r\n")
)[self.seennl]
class TextIOWrapper(TextIOBase):
r"""Character and line based layer over a BufferedIOBase object, buffer.
encoding gives the name of the encoding that the stream will be
decoded or encoded with. It defaults to locale.getpreferredencoding.
errors determines the strictness of encoding and decoding (see the
codecs.register) and defaults to "strict".
newline can be None, '', '\n', '\r', or '\r\n'. It controls the
handling of line endings. If it is None, universal newlines is
enabled. With this enabled, on input, the lines endings '\n', '\r',
or '\r\n' are translated to '\n' before being returned to the
caller. Conversely, on output, '\n' is translated to the system
default line seperator, os.linesep. If newline is any other of its
legal values, that newline becomes the newline when the file is read
and it is returned untranslated. On output, '\n' is converted to the
newline.
If line_buffering is True, a call to flush is implied when a call to
write contains a newline character.
"""
_CHUNK_SIZE = 2048
def __init__(self, buffer, encoding=None, errors=None, newline=None,
line_buffering=False):
if newline is not None and not isinstance(newline, basestring):
raise TypeError("illegal newline type: %r" % (type(newline),))
if newline not in (None, "", "\n", "\r", "\r\n"):
raise ValueError("illegal newline value: %r" % (newline,))
if encoding is None:
try:
import locale
except ImportError:
# Importing locale may fail if Python is being built
encoding = "ascii"
else:
encoding = locale.getpreferredencoding()
if not isinstance(encoding, basestring):
raise ValueError("invalid encoding: %r" % encoding)
if errors is None:
errors = "strict"
else:
if not isinstance(errors, basestring):
raise ValueError("invalid errors: %r" % errors)
self._buffer = buffer
self._line_buffering = line_buffering
self._encoding = encoding
self._errors = errors
self._readuniversal = not newline
self._readtranslate = newline is None
self._readnl = newline
self._writetranslate = newline != ''
self._writenl = newline or os.linesep
self._encoder = None
self._decoder = None
self._decoded_chars = '' # buffer for text returned from decoder
self._decoded_chars_used = 0 # offset into _decoded_chars for read()
self._snapshot = None # info for reconstructing decoder state
self._seekable = self._telling = self.buffer.seekable()
if self._seekable and self.writable():
position = self.buffer.tell()
if position != 0:
try:
self._get_encoder().setstate(0)
except LookupError:
# Sometimes the encoder doesn't exist
pass
# self._snapshot is either None, or a tuple (dec_flags, next_input)
# where dec_flags is the second (integer) item of the decoder state
# and next_input is the chunk of input bytes that comes next after the
# snapshot point. We use this to reconstruct decoder states in tell().
# Naming convention:
# - "bytes_..." for integer variables that count input bytes
# - "chars_..." for integer variables that count decoded characters
def __repr__(self):
try:
name = self.name
except AttributeError:
return "<_pyio.TextIOWrapper encoding='{0}'>".format(self.encoding)
else:
return "<_pyio.TextIOWrapper name={0!r} encoding='{1}'>".format(
name, self.encoding)
@property
def encoding(self):
return self._encoding
@property
def errors(self):
return self._errors
@property
def line_buffering(self):
return self._line_buffering
@property
def buffer(self):
return self._buffer
def seekable(self):
return self._seekable
def readable(self):
return self.buffer.readable()
def writable(self):
return self.buffer.writable()
def flush(self):
self.buffer.flush()
self._telling = self._seekable
def close(self):
if self.buffer is not None and not self.closed:
self.flush()
self.buffer.close()
@property
def closed(self):
return self.buffer.closed
@property
def name(self):
return self.buffer.name
def fileno(self):
return self.buffer.fileno()
def isatty(self):
return self.buffer.isatty()
def write(self, s):
if self.closed:
raise ValueError("write to closed file")
if not isinstance(s, unicode):
raise TypeError("can't write %s to text stream" %
s.__class__.__name__)
length = len(s)
haslf = (self._writetranslate or self._line_buffering) and "\n" in s
if haslf and self._writetranslate and self._writenl != "\n":
s = s.replace("\n", self._writenl)
encoder = self._encoder or self._get_encoder()
# XXX What if we were just reading?
b = encoder.encode(s)
self.buffer.write(b)
if self._line_buffering and (haslf or "\r" in s):
self.flush()
self._snapshot = None
if self._decoder:
self._decoder.reset()
return length
def _get_encoder(self):
make_encoder = codecs.getincrementalencoder(self._encoding)
self._encoder = make_encoder(self._errors)
return self._encoder
def _get_decoder(self):
make_decoder = codecs.getincrementaldecoder(self._encoding)
decoder = make_decoder(self._errors)
if self._readuniversal:
decoder = IncrementalNewlineDecoder(decoder, self._readtranslate)
self._decoder = decoder
return decoder
# The following three methods implement an ADT for _decoded_chars.
# Text returned from the decoder is buffered here until the client
# requests it by calling our read() or readline() method.
def _set_decoded_chars(self, chars):
"""Set the _decoded_chars buffer."""
self._decoded_chars = chars
self._decoded_chars_used = 0
def _get_decoded_chars(self, n=None):
"""Advance into the _decoded_chars buffer."""
offset = self._decoded_chars_used
if n is None:
chars = self._decoded_chars[offset:]
else:
chars = self._decoded_chars[offset:offset + n]
self._decoded_chars_used += len(chars)
return chars
def _rewind_decoded_chars(self, n):
"""Rewind the _decoded_chars buffer."""
if self._decoded_chars_used < n:
raise AssertionError("rewind decoded_chars out of bounds")
self._decoded_chars_used -= n
def _read_chunk(self):
"""
Read and decode the next chunk of data from the BufferedReader.
"""
# The return value is True unless EOF was reached. The decoded
# string is placed in self._decoded_chars (replacing its previous
# value). The entire input chunk is sent to the decoder, though
# some of it may remain buffered in the decoder, yet to be
# converted.
if self._decoder is None:
raise ValueError("no decoder")
if self._telling:
# To prepare for tell(), we need to snapshot a point in the
# file where the decoder's input buffer is empty.
dec_buffer, dec_flags = self._decoder.getstate()
# Given this, we know there was a valid snapshot point
# len(dec_buffer) bytes ago with decoder state (b'', dec_flags).
# Read a chunk, decode it, and put the result in self._decoded_chars.
input_chunk = self.buffer.read1(self._CHUNK_SIZE)
eof = not input_chunk
self._set_decoded_chars(self._decoder.decode(input_chunk, eof))
if self._telling:
# At the snapshot point, len(dec_buffer) bytes before the read,
# the next input to be decoded is dec_buffer + input_chunk.
self._snapshot = (dec_flags, dec_buffer + input_chunk)
return not eof
def _pack_cookie(self, position, dec_flags=0,
bytes_to_feed=0, need_eof=0, chars_to_skip=0):
# The meaning of a tell() cookie is: seek to position, set the
# decoder flags to dec_flags, read bytes_to_feed bytes, feed them
# into the decoder with need_eof as the EOF flag, then skip
# chars_to_skip characters of the decoded result. For most simple
# decoders, tell() will often just give a byte offset in the file.
return (position | (dec_flags<<64) | (bytes_to_feed<<128) |
(chars_to_skip<<192) | bool(need_eof)<<256)
def _unpack_cookie(self, bigint):
rest, position = divmod(bigint, 1<<64)
rest, dec_flags = divmod(rest, 1<<64)
rest, bytes_to_feed = divmod(rest, 1<<64)
need_eof, chars_to_skip = divmod(rest, 1<<64)
return position, dec_flags, bytes_to_feed, need_eof, chars_to_skip
def tell(self):
if not self._seekable:
raise IOError("underlying stream is not seekable")
if not self._telling:
raise IOError("telling position disabled by next() call")
self.flush()
position = self.buffer.tell()
decoder = self._decoder
if decoder is None or self._snapshot is None:
if self._decoded_chars:
# This should never happen.
raise AssertionError("pending decoded text")
return position
# Skip backward to the snapshot point (see _read_chunk).
dec_flags, next_input = self._snapshot
position -= len(next_input)
# How many decoded characters have been used up since the snapshot?
chars_to_skip = self._decoded_chars_used
if chars_to_skip == 0:
# We haven't moved from the snapshot point.
return self._pack_cookie(position, dec_flags)
# Starting from the snapshot position, we will walk the decoder
# forward until it gives us enough decoded characters.
saved_state = decoder.getstate()
try:
# Note our initial start point.
decoder.setstate((b'', dec_flags))
start_pos = position
start_flags, bytes_fed, chars_decoded = dec_flags, 0, 0
need_eof = 0
# Feed the decoder one byte at a time. As we go, note the
# nearest "safe start point" before the current location
# (a point where the decoder has nothing buffered, so seek()
# can safely start from there and advance to this location).
for next_byte in next_input:
bytes_fed += 1
chars_decoded += len(decoder.decode(next_byte))
dec_buffer, dec_flags = decoder.getstate()
if not dec_buffer and chars_decoded <= chars_to_skip:
# Decoder buffer is empty, so this is a safe start point.
start_pos += bytes_fed
chars_to_skip -= chars_decoded
start_flags, bytes_fed, chars_decoded = dec_flags, 0, 0
if chars_decoded >= chars_to_skip:
break
else:
# We didn't get enough decoded data; signal EOF to get more.
chars_decoded += len(decoder.decode(b'', final=True))
need_eof = 1
if chars_decoded < chars_to_skip:
raise IOError("can't reconstruct logical file position")
# The returned cookie corresponds to the last safe start point.
return self._pack_cookie(
start_pos, start_flags, bytes_fed, need_eof, chars_to_skip)
finally:
decoder.setstate(saved_state)
def truncate(self, pos=None):
self.flush()
if pos is None:
pos = self.tell()
return self.buffer.truncate(pos)
def detach(self):
if self.buffer is None:
raise ValueError("buffer is already detached")
self.flush()
buffer = self._buffer
self._buffer = None
return buffer
def seek(self, cookie, whence=0):
if self.closed:
raise ValueError("tell on closed file")
if not self._seekable:
raise IOError("underlying stream is not seekable")
if whence == 1: # seek relative to current position
if cookie != 0:
raise IOError("can't do nonzero cur-relative seeks")
# Seeking to the current position should attempt to
# sync the underlying buffer with the current position.
whence = 0
cookie = self.tell()
if whence == 2: # seek relative to end of file
if cookie != 0:
raise IOError("can't do nonzero end-relative seeks")
self.flush()
position = self.buffer.seek(0, 2)
self._set_decoded_chars('')
self._snapshot = None
if self._decoder:
self._decoder.reset()
return position
if whence != 0:
raise ValueError("invalid whence (%r, should be 0, 1 or 2)" %
(whence,))
if cookie < 0:
raise ValueError("negative seek position %r" % (cookie,))
self.flush()
# The strategy of seek() is to go back to the safe start point
# and replay the effect of read(chars_to_skip) from there.
start_pos, dec_flags, bytes_to_feed, need_eof, chars_to_skip = \
self._unpack_cookie(cookie)
# Seek back to the safe start point.
self.buffer.seek(start_pos)
self._set_decoded_chars('')
self._snapshot = None
# Restore the decoder to its state from the safe start point.
if cookie == 0 and self._decoder:
self._decoder.reset()
elif self._decoder or dec_flags or chars_to_skip:
self._decoder = self._decoder or self._get_decoder()
self._decoder.setstate((b'', dec_flags))
self._snapshot = (dec_flags, b'')
if chars_to_skip:
# Just like _read_chunk, feed the decoder and save a snapshot.
input_chunk = self.buffer.read(bytes_to_feed)
self._set_decoded_chars(
self._decoder.decode(input_chunk, need_eof))
self._snapshot = (dec_flags, input_chunk)
# Skip chars_to_skip of the decoded characters.
if len(self._decoded_chars) < chars_to_skip:
raise IOError("can't restore logical file position")
self._decoded_chars_used = chars_to_skip
# Finally, reset the encoder (merely useful for proper BOM handling)
try:
encoder = self._encoder or self._get_encoder()
except LookupError:
# Sometimes the encoder doesn't exist
pass
else:
if cookie != 0:
encoder.setstate(0)
else:
encoder.reset()
return cookie
def read(self, n=None):
self._checkReadable()
if n is None:
n = -1
decoder = self._decoder or self._get_decoder()
try:
n.__index__
except AttributeError:
raise TypeError("an integer is required")
if n < 0:
# Read everything.
result = (self._get_decoded_chars() +
decoder.decode(self.buffer.read(), final=True))
self._set_decoded_chars('')
self._snapshot = None
return result
else:
# Keep reading chunks until we have n characters to return.
eof = False
result = self._get_decoded_chars(n)
while len(result) < n and not eof:
eof = not self._read_chunk()
result += self._get_decoded_chars(n - len(result))
return result
def next(self):
self._telling = False
line = self.readline()
if not line:
self._snapshot = None
self._telling = self._seekable
raise StopIteration
return line
def readline(self, limit=None):
if self.closed:
raise ValueError("read from closed file")
if limit is None:
limit = -1
elif not isinstance(limit, (int, long)):
raise TypeError("limit must be an integer")
# Grab all the decoded text (we will rewind any extra bits later).
line = self._get_decoded_chars()
start = 0
# Make the decoder if it doesn't already exist.
if not self._decoder:
self._get_decoder()
pos = endpos = None
while True:
if self._readtranslate:
# Newlines are already translated, only search for \n
pos = line.find('\n', start)
if pos >= 0:
endpos = pos + 1
break
else:
start = len(line)
elif self._readuniversal:
# Universal newline search. Find any of \r, \r\n, \n
# The decoder ensures that \r\n are not split in two pieces
# In C we'd look for these in parallel of course.
nlpos = line.find("\n", start)
crpos = line.find("\r", start)
if crpos == -1:
if nlpos == -1:
# Nothing found
start = len(line)
else:
# Found \n
endpos = nlpos + 1
break
elif nlpos == -1:
# Found lone \r
endpos = crpos + 1
break
elif nlpos < crpos:
# Found \n
endpos = nlpos + 1
break
elif nlpos == crpos + 1:
# Found \r\n
endpos = crpos + 2
break
else:
# Found \r
endpos = crpos + 1
break
else:
# non-universal
pos = line.find(self._readnl)
if pos >= 0:
endpos = pos + len(self._readnl)
break
if limit >= 0 and len(line) >= limit:
endpos = limit # reached length limit
break
# No line ending seen yet - get more data'
while self._read_chunk():
if self._decoded_chars:
break
if self._decoded_chars:
line += self._get_decoded_chars()
else:
# end of file
self._set_decoded_chars('')
self._snapshot = None
return line
if limit >= 0 and endpos > limit:
endpos = limit # don't exceed limit
# Rewind _decoded_chars to just after the line ending we found.
self._rewind_decoded_chars(len(line) - endpos)
return line[:endpos]
@property
def newlines(self):
return self._decoder.newlines if self._decoder else None
class StringIO(TextIOWrapper):
"""Text I/O implementation using an in-memory buffer.
The initial_value argument sets the value of object. The newline
argument is like the one of TextIOWrapper's constructor.
"""
def __init__(self, initial_value="", newline="\n"):
super(StringIO, self).__init__(BytesIO(),
encoding="utf-8",
errors="strict",
newline=newline)
# Issue #5645: make universal newlines semantics the same as in the
# C version, even under Windows.
if newline is None:
self._writetranslate = False
if initial_value:
if not isinstance(initial_value, unicode):
initial_value = unicode(initial_value)
self.write(initial_value)
self.seek(0)
def getvalue(self):
self.flush()
return self.buffer.getvalue().decode(self._encoding, self._errors)
def __repr__(self):
# TextIOWrapper tells the encoding in its repr. In StringIO,
# that's a implementation detail.
return object.__repr__(self)
@property
def errors(self):
return None
@property
def encoding(self):
return None
def detach(self):
# This doesn't make sense on StringIO.
self._unsupported("detach")
| gpl-3.0 |
xwolf12/django | tests/auth_tests/test_basic.py | 328 | 4643 | from __future__ import unicode_literals
from django.apps import apps
from django.contrib.auth import get_user_model
from django.contrib.auth.models import AnonymousUser, User
from django.contrib.auth.tests.custom_user import CustomUser
from django.core.exceptions import ImproperlyConfigured
from django.dispatch import receiver
from django.test import TestCase, override_settings
from django.test.signals import setting_changed
from django.utils import translation
@receiver(setting_changed)
def user_model_swapped(**kwargs):
if kwargs['setting'] == 'AUTH_USER_MODEL':
from django.db.models.manager import ensure_default_manager
# Reset User manager
setattr(User, 'objects', User._default_manager)
ensure_default_manager(User)
apps.clear_cache()
class BasicTestCase(TestCase):
def test_user(self):
"Check that users can be created and can set their password"
u = User.objects.create_user('testuser', 'test@example.com', 'testpw')
self.assertTrue(u.has_usable_password())
self.assertFalse(u.check_password('bad'))
self.assertTrue(u.check_password('testpw'))
# Check we can manually set an unusable password
u.set_unusable_password()
u.save()
self.assertFalse(u.check_password('testpw'))
self.assertFalse(u.has_usable_password())
u.set_password('testpw')
self.assertTrue(u.check_password('testpw'))
u.set_password(None)
self.assertFalse(u.has_usable_password())
# Check username getter
self.assertEqual(u.get_username(), 'testuser')
# Check authentication/permissions
self.assertTrue(u.is_authenticated())
self.assertFalse(u.is_staff)
self.assertTrue(u.is_active)
self.assertFalse(u.is_superuser)
# Check API-based user creation with no password
u2 = User.objects.create_user('testuser2', 'test2@example.com')
self.assertFalse(u2.has_usable_password())
def test_user_no_email(self):
"Check that users can be created without an email"
u = User.objects.create_user('testuser1')
self.assertEqual(u.email, '')
u2 = User.objects.create_user('testuser2', email='')
self.assertEqual(u2.email, '')
u3 = User.objects.create_user('testuser3', email=None)
self.assertEqual(u3.email, '')
def test_anonymous_user(self):
"Check the properties of the anonymous user"
a = AnonymousUser()
self.assertEqual(a.pk, None)
self.assertEqual(a.username, '')
self.assertEqual(a.get_username(), '')
self.assertFalse(a.is_authenticated())
self.assertFalse(a.is_staff)
self.assertFalse(a.is_active)
self.assertFalse(a.is_superuser)
self.assertEqual(a.groups.all().count(), 0)
self.assertEqual(a.user_permissions.all().count(), 0)
def test_superuser(self):
"Check the creation and properties of a superuser"
super = User.objects.create_superuser('super', 'super@example.com', 'super')
self.assertTrue(super.is_superuser)
self.assertTrue(super.is_active)
self.assertTrue(super.is_staff)
def test_get_user_model(self):
"The current user model can be retrieved"
self.assertEqual(get_user_model(), User)
@override_settings(AUTH_USER_MODEL='auth.CustomUser')
def test_swappable_user(self):
"The current user model can be swapped out for another"
self.assertEqual(get_user_model(), CustomUser)
with self.assertRaises(AttributeError):
User.objects.all()
@override_settings(AUTH_USER_MODEL='badsetting')
def test_swappable_user_bad_setting(self):
"The alternate user setting must point to something in the format app.model"
with self.assertRaises(ImproperlyConfigured):
get_user_model()
@override_settings(AUTH_USER_MODEL='thismodel.doesntexist')
def test_swappable_user_nonexistent_model(self):
"The current user model must point to an installed model"
with self.assertRaises(ImproperlyConfigured):
get_user_model()
def test_user_verbose_names_translatable(self):
"Default User model verbose names are translatable (#19945)"
with translation.override('en'):
self.assertEqual(User._meta.verbose_name, 'user')
self.assertEqual(User._meta.verbose_name_plural, 'users')
with translation.override('es'):
self.assertEqual(User._meta.verbose_name, 'usuario')
self.assertEqual(User._meta.verbose_name_plural, 'usuarios')
| bsd-3-clause |
fernandoacorreia/DjangoWAWSLogging | DjangoWAWSLogging/env/Lib/site-packages/pywin32-218-py2.7-win32.egg/isapi/__init__.py | 41 | 1240 | # The Python ISAPI package.
# Exceptions thrown by the DLL framework.
class ISAPIError(Exception):
def __init__(self, errno, strerror = None, funcname = None):
# named attributes match IOError etc.
self.errno = errno
self.strerror = strerror
self.funcname = funcname
Exception.__init__(self, errno, strerror, funcname)
def __str__(self):
if self.strerror is None:
try:
import win32api
self.strerror = win32api.FormatMessage(self.errno).strip()
except:
self.strerror = "no error message is available"
# str() looks like a win32api error.
return str( (self.errno, self.strerror, self.funcname) )
class FilterError(ISAPIError):
pass
class ExtensionError(ISAPIError):
pass
# A little development aid - a filter or extension callback function can
# raise one of these exceptions, and the handler module will be reloaded.
# This means you can change your code without restarting IIS.
# After a reload, your filter/extension will have the GetFilterVersion/
# GetExtensionVersion function called, but with None as the first arg.
class InternalReloadException(Exception):
pass
| mit |
ubc/edx-platform | common/lib/xmodule/xmodule/progress.py | 110 | 5066 | '''
Progress class for modules. Represents where a student is in a module.
Useful things to know:
- Use Progress.to_js_status_str() to convert a progress into a simple
status string to pass to js.
- Use Progress.to_js_detail_str() to convert a progress into a more detailed
string to pass to js.
In particular, these functions have a canonical handing of None.
For most subclassing needs, you should only need to reimplement
frac() and __str__().
'''
import numbers
class Progress(object):
'''Represents a progress of a/b (a out of b done)
a and b must be numeric, but not necessarily integer, with
0 <= a <= b and b > 0.
Progress can only represent Progress for modules where that makes sense. Other
modules (e.g. html) should return None from get_progress().
TODO: add tag for module type? Would allow for smarter merging.
'''
def __init__(self, a, b):
'''Construct a Progress object. a and b must be numbers, and must have
0 <= a <= b and b > 0
'''
# Want to do all checking at construction time, so explicitly check types
if not (isinstance(a, numbers.Number) and
isinstance(b, numbers.Number)):
raise TypeError('a and b must be numbers. Passed {0}/{1}'.format(a, b))
if a > b:
a = b
if a < 0:
a = 0
if b <= 0:
raise ValueError('fraction a/b = {0}/{1} must have b > 0'.format(a, b))
self._a = a
self._b = b
def frac(self):
''' Return tuple (a,b) representing progress of a/b'''
return (self._a, self._b)
def percent(self):
''' Returns a percentage progress as a float between 0 and 100.
subclassing note: implemented in terms of frac(), assumes sanity
checking is done at construction time.
'''
(a, b) = self.frac()
return 100.0 * a / b
def started(self):
''' Returns True if fractional progress is greater than 0.
subclassing note: implemented in terms of frac(), assumes sanity
checking is done at construction time.
'''
return self.frac()[0] > 0
def inprogress(self):
''' Returns True if fractional progress is strictly between 0 and 1.
subclassing note: implemented in terms of frac(), assumes sanity
checking is done at construction time.
'''
(a, b) = self.frac()
return a > 0 and a < b
def done(self):
''' Return True if this represents done.
subclassing note: implemented in terms of frac(), assumes sanity
checking is done at construction time.
'''
(a, b) = self.frac()
return a == b
def ternary_str(self):
''' Return a string version of this progress: either
"none", "in_progress", or "done".
subclassing note: implemented in terms of frac()
'''
(a, b) = self.frac()
if a == 0:
return "none"
if a < b:
return "in_progress"
return "done"
def __eq__(self, other):
''' Two Progress objects are equal if they have identical values.
Implemented in terms of frac()'''
if not isinstance(other, Progress):
return False
(a, b) = self.frac()
(a2, b2) = other.frac()
return a == a2 and b == b2
def __ne__(self, other):
''' The opposite of equal'''
return not self.__eq__(other)
def __str__(self):
'''Return a string representation of this string. Rounds results to
two decimal places, stripping out any trailing zeroes.
subclassing note: implemented in terms of frac().
'''
(a, b) = self.frac()
display = lambda n: '{:.2f}'.format(n).rstrip('0').rstrip('.')
return "{0}/{1}".format(display(a), display(b))
@staticmethod
def add_counts(a, b):
'''Add two progress indicators, assuming that each represents items done:
(a / b) + (c / d) = (a + c) / (b + d).
If either is None, returns the other.
'''
if a is None:
return b
if b is None:
return a
# get numerators + denominators
(n, d) = a.frac()
(n2, d2) = b.frac()
return Progress(n + n2, d + d2)
@staticmethod
def to_js_status_str(progress):
'''
Return the "status string" version of the passed Progress
object that should be passed to js. Use this function when
sending Progress objects to js to limit dependencies.
'''
if progress is None:
return "0"
return progress.ternary_str()
@staticmethod
def to_js_detail_str(progress):
'''
Return the "detail string" version of the passed Progress
object that should be passed to js. Use this function when
passing Progress objects to js to limit dependencies.
'''
if progress is None:
return "0"
return str(progress)
| agpl-3.0 |
ericjpj/ns-3-dev | src/visualizer/visualizer/hud.py | 189 | 5462 | import goocanvas
import core
import math
import pango
import gtk
class Axes(object):
def __init__(self, viz):
self.viz = viz
self.color = 0x8080C0FF
self.hlines = goocanvas.Path(parent=viz.canvas.get_root_item(), stroke_color_rgba=self.color)
self.hlines.lower(None)
self.vlines = goocanvas.Path(parent=viz.canvas.get_root_item(), stroke_color_rgba=self.color)
self.vlines.lower(None)
self.labels = []
hadj = self.viz.get_hadjustment()
vadj = self.viz.get_vadjustment()
def update(adj):
if self.visible:
self.update_view()
hadj.connect("value-changed", update)
vadj.connect("value-changed", update)
hadj.connect("changed", update)
vadj.connect("changed", update)
self.visible = True
self.update_view()
def set_visible(self, visible):
self.visible = visible
if self.visible:
self.hlines.props.visibility = goocanvas.ITEM_VISIBLE
self.vlines.props.visibility = goocanvas.ITEM_VISIBLE
else:
self.hlines.props.visibility = goocanvas.ITEM_HIDDEN
self.vlines.props.visibility = goocanvas.ITEM_HIDDEN
for label in self.labels:
label.props.visibility = goocanvas.ITEM_HIDDEN
def _compute_divisions(self, xi, xf):
assert xf > xi
dx = xf - xi
size = dx
ndiv = 5
text_width = dx/ndiv/2
def rint(x):
return math.floor(x+0.5)
dx_over_ndiv = dx / ndiv
for n in range(5): # iterate 5 times to find optimum division size
#/* div: length of each division */
tbe = math.log10(dx_over_ndiv)#; /* looking for approx. 'ndiv' divisions in a length 'dx' */
div = pow(10, rint(tbe))#; /* div: power of 10 closest to dx/ndiv */
if math.fabs(div/2 - dx_over_ndiv) < math.fabs(div - dx_over_ndiv): #/* test if div/2 is closer to dx/ndiv */
div /= 2
elif math.fabs(div*2 - dx_over_ndiv) < math.fabs(div - dx_over_ndiv):
div *= 2 # /* test if div*2 is closer to dx/ndiv */
x0 = div*math.ceil(xi / div) - div
if n > 1:
ndiv = rint(size / text_width)
return x0, div
def update_view(self):
if self.viz.zoom is None:
return
unused_labels = self.labels
self.labels = []
for label in unused_labels:
label.set_property("visibility", goocanvas.ITEM_HIDDEN)
def get_label():
try:
label = unused_labels.pop(0)
except IndexError:
label = goocanvas.Text(parent=self.viz.canvas.get_root_item(), stroke_color_rgba=self.color)
else:
label.set_property("visibility", goocanvas.ITEM_VISIBLE)
label.lower(None)
self.labels.append(label)
return label
hadj = self.viz.get_hadjustment()
vadj = self.viz.get_vadjustment()
zoom = self.viz.zoom.value
offset = 10/zoom
x1, y1 = self.viz.canvas.convert_from_pixels(hadj.value, vadj.value)
x2, y2 = self.viz.canvas.convert_from_pixels(hadj.value + hadj.page_size, vadj.value + vadj.page_size)
line_width = 5.0/self.viz.zoom.value
# draw the horizontal axis
self.hlines.set_property("line-width", line_width)
yc = y2 - line_width/2
sim_x1 = x1/core.PIXELS_PER_METER
sim_x2 = x2/core.PIXELS_PER_METER
x0, xdiv = self._compute_divisions(sim_x1, sim_x2)
path = ["M %r %r L %r %r" % (x1, yc, x2, yc)]
x = x0
while x < sim_x2:
path.append("M %r %r L %r %r" % (core.PIXELS_PER_METER*x, yc - offset, core.PIXELS_PER_METER*x, yc))
label = get_label()
label.set_properties(font=("Sans Serif %f" % int(12/zoom)),
text=("%G" % x),
fill_color_rgba=self.color,
alignment=pango.ALIGN_CENTER,
anchor=gtk.ANCHOR_S,
x=core.PIXELS_PER_METER*x,
y=(yc - offset))
x += xdiv
del x
self.hlines.set_property("data", " ".join(path))
# draw the vertical axis
self.vlines.set_property("line-width", line_width)
xc = x1 + line_width/2
sim_y1 = y1/core.PIXELS_PER_METER
sim_y2 = y2/core.PIXELS_PER_METER
y0, ydiv = self._compute_divisions(sim_y1, sim_y2)
path = ["M %r %r L %r %r" % (xc, y1, xc, y2)]
y = y0
while y < sim_y2:
path.append("M %r %r L %r %r" % (xc, core.PIXELS_PER_METER*y, xc + offset, core.PIXELS_PER_METER*y))
label = get_label()
label.set_properties(font=("Sans Serif %f" % int(12/zoom)),
text=("%G" % y),
fill_color_rgba=self.color,
alignment=pango.ALIGN_LEFT,
anchor=gtk.ANCHOR_W,
x=xc + offset,
y=core.PIXELS_PER_METER*y)
y += ydiv
self.vlines.set_property("data", " ".join(path))
self.labels.extend(unused_labels)
| gpl-2.0 |
bitthunder-toolchain/gdb | gdb/python/lib/gdb/prompt.py | 137 | 4210 | # Extended prompt utilities.
# Copyright (C) 2011-2013 Free Software Foundation, Inc.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
""" Extended prompt library functions."""
import gdb
import os
def _prompt_pwd(ignore):
"The current working directory."
return os.getcwdu()
def _prompt_object_attr(func, what, attr, nattr):
"""Internal worker for fetching GDB attributes."""
if attr is None:
attr = nattr
try:
obj = func()
except gdb.error:
return '<no %s>' % what
if hasattr(obj, attr):
result = getattr(obj, attr)
if callable(result):
result = result()
return result
else:
return '<no attribute %s on current %s>' % (attr, what)
def _prompt_frame(attr):
"The selected frame; an argument names a frame parameter."
return _prompt_object_attr(gdb.selected_frame, 'frame', attr, 'name')
def _prompt_thread(attr):
"The selected thread; an argument names a thread parameter."
return _prompt_object_attr(gdb.selected_thread, 'thread', attr, 'num')
def _prompt_version(attr):
"The version of GDB."
return gdb.VERSION
def _prompt_esc(attr):
"The ESC character."
return '\033'
def _prompt_bs(attr):
"A backslash."
return '\\'
def _prompt_n(attr):
"A newline."
return '\n'
def _prompt_r(attr):
"A carriage return."
return '\r'
def _prompt_param(attr):
"A parameter's value; the argument names the parameter."
return gdb.parameter(attr)
def _prompt_noprint_begin(attr):
"Begins a sequence of non-printing characters."
return '\001'
def _prompt_noprint_end(attr):
"Ends a sequence of non-printing characters."
return '\002'
prompt_substitutions = {
'e': _prompt_esc,
'\\': _prompt_bs,
'n': _prompt_n,
'r': _prompt_r,
'v': _prompt_version,
'w': _prompt_pwd,
'f': _prompt_frame,
't': _prompt_thread,
'p': _prompt_param,
'[': _prompt_noprint_begin,
']': _prompt_noprint_end
}
def prompt_help():
"""Generate help dynamically from the __doc__ strings of attribute
functions."""
result = ''
keys = sorted (prompt_substitutions.keys())
for key in keys:
result += ' \\%s\t%s\n' % (key, prompt_substitutions[key].__doc__)
result += """
A substitution can be used in a simple form, like "\\f".
An argument can also be passed to it, like "\\f{name}".
The meaning of the argument depends on the particular substitution."""
return result
def substitute_prompt(prompt):
"Perform substitutions on PROMPT."
result = ''
plen = len(prompt)
i = 0
while i < plen:
if prompt[i] == '\\':
i = i + 1
if i >= plen:
break
cmdch = prompt[i]
if cmdch in prompt_substitutions:
cmd = prompt_substitutions[cmdch]
if i + 1 < plen and prompt[i + 1] == '{':
j = i + 1
while j < plen and prompt[j] != '}':
j = j + 1
# Just ignore formatting errors.
if j >= plen or prompt[j] != '}':
arg = None
else:
arg = prompt[i + 2 : j]
i = j
else:
arg = None
result += str(cmd(arg))
else:
# Unrecognized escapes are turned into the escaped
# character itself.
result += prompt[i]
else:
result += prompt[i]
i = i + 1
return result
| gpl-2.0 |
liduanw/viewfinder | backend/watchdog/scenario.py | 13 | 10120 | # Copyright 2012 Viewfinder Inc. All Rights Reserved.
"""Viewfinder watchdog scenario.
"""
__author__ = 'matt@emailscrubbed.com (Matt Tracy)'
import logging
import json
import os
import re
import time
import urllib
from viewfinder.backend.base import util, secrets
from viewfinder.backend.base.environ import ServerEnvironment
from viewfinder.backend.www import www_util
from tornado.ioloop import IOLoop
from tornado import httpclient, options, web
_GOOGLE_OAUTH2_DEVICECODE_URL = 'https://accounts.google.com/o/oauth2/device/code'
_GOOGLE_OAUTH2_TOKEN_URL = 'https://accounts.google.com/o/oauth2/token'
_GOOGLE_OAUTH2_SCOPES = 'https://www.googleapis.com/auth/userinfo.profile ' \
'https://www.googleapis.com/auth/userinfo.email ' \
'https://accounts.google.com/o/oauth2/auth'
options.define('watchdog_auth_dir', './local/watchdog', help='Storage location for watchdog authorization credentials.')
options.define('watchdog_auth_reset', False, help='If true, resets credentials for watchdog logins.')
class ScenarioLoginError(Exception):
"""Error occurred during the login process for a scenario client."""
pass
class Scenario(object):
"""Class describes a scenario objects. Scenarios are initialized with a handler function, a name and a description.
The frequency parameter is the frequency in seconds to repeat this scenario.
The handler method is invoked with a ScenarioDevice client, a logger and a barrier callback each time it is run.
The handler can indicate its status by using the logger provided to it - a log message of level 'error' will result
in an immediate alert being sent. A series of too many errors will also result in an alert. The handler is completed
by calling the provided barrier callback.
"""
_http_error_dict = {404: '404 File not found received the viewfinder service.',
500: '500 Internal server error from viewfinder service.',
599: '599 Timeout when attempting to reach the viewfinder service.'}
def __init__(self, name, handler, frequency, description=None):
self.name = name
self.handler = handler
self.description = description
self.frequency = frequency
self._timeout = None
def StartLoop(self, device):
"""Start this scenario. It will run at the configured frequency until StopLoop() is called."""
logger = logging.LoggerAdapter(logging.getLogger(), {'scenario': self.name})
def _OnComplete():
self._timeout = IOLoop.current().add_timeout(time.time() + self.frequency, _RunIteration)
def _OnException(typ, val, tb):
if (typ, val, tb) != (None, None, None):
if typ is web.HTTPError:
message = self._http_error_dict.get(val.status_code,
'HTTP status %d received from viewfinder: %s' %
(val.status_code, val.log_message))
logger.error(message)
else:
logger.error('Unknown exception in scenario %s', self.name, exc_info=(typ, val, tb))
_OnComplete()
def _RunIteration():
with util.Barrier(_OnComplete, _OnException) as b:
self.handler(device, logger, b.Callback())
_RunIteration()
def StopLoop(self):
"""Stop the loop if it is already running."""
if self._timeout is not None:
IOLoop.current().remove_timeout(self._timeout)
class ScenarioDevice(object):
"""Represents a single client."""
def __init__(self, name):
self.name = name
self._svc_url = 'https://%s:%d/' % (ServerEnvironment.GetHost(), options.options.port)
self._user_cookie = None
if options.options.watchdog_auth_reset:
self._ClearAuthentication()
else:
self._LoadAuthentication()
def SendRequest(self, service_path, callback, method='POST', **kwargs):
"""Send an arbitrary service request to the viewfinder service from this client.
The request is a json request consisting of any additional keyword arguments to
SendRequest.
"""
if self._user_cookie is None:
raise ScenarioLoginError('Client %s can not be used until it is has a valid authorization cookie.'
% self.name)
http_client = httpclient.AsyncHTTPClient()
url = self._GetUrl(service_path)
headers = {'Cookie': 'user=%s;_xsrf=watchdog' % (self._user_cookie),
'X-Xsrftoken': 'watchdog'}
if method == 'GET':
if len(kwargs) > 0:
url += '?' + urllib.urlencode(kwargs)
http_client.fetch(url, method=method, callback=callback, validate_cert=False, headers=headers)
elif method == 'POST':
headers['Content-Type'] = 'application/json'
request_body = json.dumps(kwargs)
http_client.fetch(url, method=method, body=request_body, callback=callback, validate_cert=False,
headers=headers)
else:
raise ValueError('Invalid method %s: must be one of "GET" or "POST"' % method)
def IsAuthenticated(self):
"""Return true if this device has a valid authentication cookie from the server."""
return self._user_cookie is not None
def GetUserCode(self, callback):
"""Retrieve a user code from google's device login API. The given callback will
be invoked with the user code and a URL where the user code can be used to
authenticate a google account.
"""
def _OnGetDeviceCode(response):
response_dict = www_util.ParseJSONResponse(response)
self._device_code = response_dict.get('device_code')
callback(response_dict.get('user_code'), response_dict.get('verification_url'))
# Get a device code from google's API
request_args = {'client_id': secrets.GetSecret('google_client_mobile_id'),
'scope': _GOOGLE_OAUTH2_SCOPES}
url = _GOOGLE_OAUTH2_DEVICECODE_URL
http_client = httpclient.AsyncHTTPClient()
http_client.fetch(url, method='POST',
body=urllib.urlencode(request_args), callback=_OnGetDeviceCode)
def PollForAuthentication(self, callback):
"""Poll the google authorization service to find if the user code generated
in a previous call to GetUserCode() has been used to authorize a google user account.
If an account has been authorized, this method will use that authorization to log
into the viewfinder service, thus retrieving the needed authentication cookie.
The given callback will be invoked with a boolean parameter to indicate whether
or not the authentication was successful. If authentication was not successful, then
this method can be polled again until it is successful.
"""
if not hasattr(self, '_device_code'):
raise ScenarioLoginError('Must call GetUserCode() on a device before using the '
'PollForAuthentication() method.')
http_client = httpclient.AsyncHTTPClient()
def _OnLogin(response):
if not response.code in (200, 302):
raise ScenarioLoginError('Error during login process:%s' % response.error)
self._user_cookie = self._GetUserCookieFromResponse(response)
self._SaveAuthentication()
callback(True)
def _OnPollTokenEndpoint(response):
json_response = www_util.ParseJSONResponse(response)
if 'error' in json_response:
callback(False)
else:
refresh_token = json_response.get('refresh_token')
url = 'https://%s:%d/auth/google?refresh_token=%s' % \
(ServerEnvironment.GetHost(), options.options.port, refresh_token)
http_client.fetch(url, method='POST',
callback=_OnLogin,
body=json.dumps({}),
validate_cert=False, follow_redirects=False,
headers={'Content-Type': 'application/json'})
url = _GOOGLE_OAUTH2_TOKEN_URL
request_args = {'client_id': secrets.GetSecret('google_client_mobile_id'),
'client_secret': secrets.GetSecret('google_client_mobile_secret'),
'code': self._device_code,
'grant_type': 'http://oauth.net/grant_type/device/1.0'}
http_client.fetch(url, method='POST',
body=urllib.urlencode(request_args),
callback=_OnPollTokenEndpoint)
def _GetUserCookieFromResponse(self, response):
"""Extracts the user cookie from an HTTP response and returns it if
it exists, or returns None if not."""
user_cookie_header = [h for h in response.headers.get_list('Set-Cookie') if h.startswith('user=')][-1]
return re.match(r'user="?([^";]*)', user_cookie_header).group(1)
def _LoadAuthentication(self):
"""Loads a previous authorization cookie for this client from file."""
auth_file = self._AuthFilePath()
if os.path.exists(auth_file):
try:
fh = open(auth_file, 'r')
self._user_cookie = fh.read()
except:
logging.fatal('Exception loading authorization file %s', auth_file, exc_info=True)
raise ScenarioLoginError('Error loading auth file for client %s.' % self.name)
def _SaveAuthentication(self):
"""Save the authorization cookie to a local file."""
auth_file = self._AuthFilePath()
try:
dir = os.path.dirname(auth_file)
if not os.path.exists(dir):
os.makedirs(dir)
fh = open(auth_file, 'w')
fh.write(self._user_cookie)
fh.close()
except:
logging.fatal('Failed to save authorization file %s', auth_file, exc_info=True)
raise ScenarioLoginError('Error saving auth file for client %s.' % self.name)
def _ClearAuthentication(self):
"""Clears any existing authorization for this client."""
auth_file = self._AuthFilePath()
if os.path.exists(auth_file):
try:
os.remove(auth_file)
except:
logging.fatal('Could not clear authorization file %s', auth_file, exc_info=True)
raise ScenarioLoginError('Error clearing auth file for client %s.' % self.name)
def _AuthFilePath(self):
return os.path.join(options.options.watchdog_auth_dir, self.name)
def _GetUrl(self, path):
return self._svc_url + path
| apache-2.0 |
SickGear/SickGear | lib/html5lib/treebuilders/__init__.py | 156 | 3592 | """A collection of modules for building different kinds of trees from HTML
documents.
To create a treebuilder for a new type of tree, you need to do
implement several things:
1. A set of classes for various types of elements: Document, Doctype, Comment,
Element. These must implement the interface of ``base.treebuilders.Node``
(although comment nodes have a different signature for their constructor,
see ``treebuilders.etree.Comment``) Textual content may also be implemented
as another node type, or not, as your tree implementation requires.
2. A treebuilder object (called ``TreeBuilder`` by convention) that inherits
from ``treebuilders.base.TreeBuilder``. This has 4 required attributes:
* ``documentClass`` - the class to use for the bottommost node of a document
* ``elementClass`` - the class to use for HTML Elements
* ``commentClass`` - the class to use for comments
* ``doctypeClass`` - the class to use for doctypes
It also has one required method:
* ``getDocument`` - Returns the root node of the complete document tree
3. If you wish to run the unit tests, you must also create a ``testSerializer``
method on your treebuilder which accepts a node and returns a string
containing Node and its children serialized according to the format used in
the unittests
"""
from __future__ import absolute_import, division, unicode_literals
from .._utils import default_etree
treeBuilderCache = {}
def getTreeBuilder(treeType, implementation=None, **kwargs):
"""Get a TreeBuilder class for various types of trees with built-in support
:arg treeType: the name of the tree type required (case-insensitive). Supported
values are:
* "dom" - A generic builder for DOM implementations, defaulting to a
xml.dom.minidom based implementation.
* "etree" - A generic builder for tree implementations exposing an
ElementTree-like interface, defaulting to xml.etree.cElementTree if
available and xml.etree.ElementTree if not.
* "lxml" - A etree-based builder for lxml.etree, handling limitations
of lxml's implementation.
:arg implementation: (Currently applies to the "etree" and "dom" tree
types). A module implementing the tree type e.g. xml.etree.ElementTree
or xml.etree.cElementTree.
:arg kwargs: Any additional options to pass to the TreeBuilder when
creating it.
Example:
>>> from html5lib.treebuilders import getTreeBuilder
>>> builder = getTreeBuilder('etree')
"""
treeType = treeType.lower()
if treeType not in treeBuilderCache:
if treeType == "dom":
from . import dom
# Come up with a sane default (pref. from the stdlib)
if implementation is None:
from xml.dom import minidom
implementation = minidom
# NEVER cache here, caching is done in the dom submodule
return dom.getDomModule(implementation, **kwargs).TreeBuilder
elif treeType == "lxml":
from . import etree_lxml
treeBuilderCache[treeType] = etree_lxml.TreeBuilder
elif treeType == "etree":
from . import etree
if implementation is None:
implementation = default_etree
# NEVER cache here, caching is done in the etree submodule
return etree.getETreeModule(implementation, **kwargs).TreeBuilder
else:
raise ValueError("""Unrecognised treebuilder "%s" """ % treeType)
return treeBuilderCache.get(treeType)
| gpl-3.0 |
peterfpeterson/mantid | scripts/Muon/GUI/ElementalAnalysis2/auto_widget/ea_match_table_presenter.py | 3 | 1172 | # Mantid Repository : https://github.com/mantidproject/mantid
#
# Copyright © 2021 ISIS Rutherford Appleton Laboratory UKRI,
# NScD Oak Ridge National Laboratory, European Spallation Source,
# Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS
# SPDX - License - Identifier: GPL - 3.0 +
class EAMatchTablePresenter(object):
def __init__(self, view):
self.view = view
self.table_entries = []
def update_table(self, entry):
index = self.find_entry_index(entry)
if index != -1:
self.remove_entry(index)
self.table_entries.append(entry)
self.view.add_entry_to_table(entry)
def remove_entry(self, row_index):
del self.table_entries[row_index]
self.view.remove_row(row_index)
def find_entry_index(self, new_entry):
"""
Finds index of entry using first 2 columns returns -1 if not found
"""
for i, entry in enumerate(self.table_entries):
if entry[:2] == new_entry[:2]:
return i
return -1
def clear_table(self):
self.table_entries = []
self.view.clear_table()
| gpl-3.0 |
parrishmyers/tacmap | ThirdParty/googletest/test/gtest_uninitialized_test.py | 372 | 2482 | #!/usr/bin/env python
#
# Copyright 2008, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Verifies that Google Test warns the user when not initialized properly."""
__author__ = 'wan@google.com (Zhanyong Wan)'
import gtest_test_utils
COMMAND = gtest_test_utils.GetTestExecutablePath('gtest_uninitialized_test_')
def Assert(condition):
if not condition:
raise AssertionError
def AssertEq(expected, actual):
if expected != actual:
print('Expected: %s' % (expected,))
print(' Actual: %s' % (actual,))
raise AssertionError
def TestExitCodeAndOutput(command):
"""Runs the given command and verifies its exit code and output."""
# Verifies that 'command' exits with code 1.
p = gtest_test_utils.Subprocess(command)
Assert(p.exited)
AssertEq(1, p.exit_code)
Assert('InitGoogleTest' in p.output)
class GTestUninitializedTest(gtest_test_utils.TestCase):
def testExitCodeAndOutput(self):
TestExitCodeAndOutput(COMMAND)
if __name__ == '__main__':
gtest_test_utils.Main()
| mit |
sloria/osf.io | osf/management/commands/purge_test_node.py | 23 | 2311 | from __future__ import unicode_literals
import logging
from django.core.management.base import BaseCommand
from django.db import transaction
from osf.models import Node, BaseFileNode, TrashedFileNode
from scripts import utils as script_utils
logger = logging.getLogger(__name__)
def remove_logs_and_files(node_guid):
assert node_guid, 'Expected truthy node_id, got {}'.format(node_guid)
node = Node.load(node_guid)
assert node, 'Unable to find node with guid {}'.format(node_guid)
for n in node.node_and_primary_descendants():
logger.info('{} - Deleting file versions...'.format(n._id))
for file in n.files.exclude(parent__isnull=True):
try:
file.versions.exclude(id=file.versions.latest('date_created').id).delete()
except file.versions.model.DoesNotExist:
# No FileVersions, skip
pass
logger.info('{} - Deleting trashed file nodes...'.format(n._id))
BaseFileNode.objects.filter(type__in=TrashedFileNode._typedmodels_subtypes, node=n).delete()
logger.info('{} - Deleting logs...'.format(n._id))
n.logs.exclude(id=n.logs.earliest().id).delete()
class Command(BaseCommand):
"""
Removes all logs and non-root files from a node.
For cleaning up after RunScope tests that get out of hand.
"""
def add_arguments(self, parser):
super(Command, self).add_arguments(parser)
parser.add_argument(
'--node',
type=str,
action='store',
dest='node_id',
required=True,
help='Node guid to purge data from',
)
parser.add_argument(
'--i-am-really-sure-about-this',
action='store_true',
dest='really_delete',
help='Actually delete data'
)
def handle(self, *args, **options):
really_delete = options.get('really_delete', False)
node_id = options.get('node_id', None)
if really_delete:
script_utils.add_file_logger(logger, __file__)
with transaction.atomic():
remove_logs_and_files(node_id)
if not really_delete:
raise RuntimeError('Not certain enough -- transaction rolled back')
logger.info('Committing...')
| apache-2.0 |
ravihansa3000/stratos | components/org.apache.stratos.python.cartridge.agent/src/main/python/cartridge.agent/cartridge.agent/modules/util/asyncscheduledtask.py | 5 | 2984 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import time
from threading import Thread
from log import LogFactory
log = LogFactory().get_log(__name__)
class AbstractAsyncScheduledTask:
"""
Exposes the contract to follow to implement a scheduled task to be executed by the ScheduledExecutor
"""
def execute_task(self):
"""
Override this method and implement the task to be executed by the ScheduledExecutor with a specified
interval.
"""
raise NotImplementedError
class ScheduledExecutor(Thread):
"""
Executes a given task with a given interval until being terminated
"""
def __init__(self, delay, task):
"""
Creates a ScheduledExecutor thread to handle interval based repeated execution of a given task of type
AbstractAsyncScheduledTask
:param int delay: The interval to keep between executions
:param AbstractAsyncScheduledTask task: The task to be implemented
:return:
"""
Thread.__init__(self)
self.delay = delay
""" :type : int """
self.task = task
""" :type : AbstractAsyncScheduledTask """
self.terminated = False
""" :type : bool """
self.setName("ScheduledExecutorForTask%s" % self.task.__class__.__name__)
self.setDaemon(True)
log.debug("Created a ScheduledExecutor thread for task %s" % self.task.__class__.__name__)
def run(self):
"""
Start the scheduled task with a sleep time of delay in between
:return:
"""
while not self.terminated:
time.sleep(self.delay)
if not self.terminated:
task_thread = Thread(target=self.task.execute_task)
task_thread.setName("WorkerThreadForTask%s" % self.task.__class__.__name__)
task_thread.setDaemon(True)
log.debug("Starting a worker thread for the Scheduled Executor for task %s" % self.task.__class__.__name__)
task_thread.start()
def terminate(self):
"""
Terminate the scheduled task. Allow a maximum of 'delay' seconds to be terminated.
:return: void
"""
self.terminated = True
| apache-2.0 |
minhpqn/chainer | cupy/statistics/order.py | 9 | 1538 | from cupy import reduction
def amin(a, axis=None, out=None, keepdims=False, dtype=None):
"""Returns the minimum of an array or the minimum along an axis.
Args:
a (cupy.ndarray): Array to take the minimum.
axis (int): Along which axis to take the minimum. The flattened array
is used by default.
out (cupy.ndarray): Output array.
keepdims (bool): If True, the axis is remained as an axis of size one.
dtype: Data type specifier.
Returns:
cupy.ndarray: The minimum of ``a``, along the axis if specified.
.. seealso:: :func:`numpy.amin`
"""
return reduction.amin(
a, axis=axis, dtype=dtype, out=out, keepdims=keepdims)
def amax(a, axis=None, out=None, keepdims=False, dtype=None):
"""Returns the maximum of an array or the maximum along an axis.
Args:
a (cupy.ndarray): Array to take the maximum.
axis (int): Along which axis to take the maximum. The flattened array
is used by default.
out (cupy.ndarray): Output array.
keepdims (bool): If True, the axis is remained as an axis of size one.
dtype: Data type specifier.
Returns:
cupy.ndarray: The maximum of ``a``, along the axis if specified.
.. seealso:: :func:`numpy.amax`
"""
return reduction.amax(
a, axis=axis, dtype=dtype, out=out, keepdims=keepdims)
# TODO(okuta): Implement nanmin
# TODO(okuta): Implement nanmax
# TODO(okuta): Implement ptp
# TODO(okuta): Implement percentile
| mit |
txemi/ansible | lib/ansible/modules/cloud/centurylink/clc_alert_policy.py | 70 | 18159 | #!/usr/bin/python
#
# Copyright (c) 2015 CenturyLink
#
# This file is part of Ansible.
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>
#
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
module: clc_alert_policy
short_description: Create or Delete Alert Policies at CenturyLink Cloud.
description:
- An Ansible module to Create or Delete Alert Policies at CenturyLink Cloud.
version_added: "2.0"
options:
alias:
description:
- The alias of your CLC Account
required: True
name:
description:
- The name of the alert policy. This is mutually exclusive with id
required: False
default: None
id:
description:
- The alert policy id. This is mutually exclusive with name
required: False
default: None
alert_recipients:
description:
- A list of recipient email ids to notify the alert.
This is required for state 'present'
required: False
default: None
metric:
description:
- The metric on which to measure the condition that will trigger the alert.
This is required for state 'present'
required: False
default: None
choices: ['cpu','memory','disk']
duration:
description:
- The length of time in minutes that the condition must exceed the threshold.
This is required for state 'present'
required: False
default: None
threshold:
description:
- The threshold that will trigger the alert when the metric equals or exceeds it.
This is required for state 'present'
This number represents a percentage and must be a value between 5.0 - 95.0 that is a multiple of 5.0
required: False
default: None
state:
description:
- Whether to create or delete the policy.
required: False
default: present
choices: ['present','absent']
requirements:
- python = 2.7
- requests >= 2.5.0
- clc-sdk
author: "CLC Runner (@clc-runner)"
notes:
- To use this module, it is required to set the below environment variables which enables access to the
Centurylink Cloud
- CLC_V2_API_USERNAME, the account login id for the centurylink cloud
- CLC_V2_API_PASSWORD, the account password for the centurylink cloud
- Alternatively, the module accepts the API token and account alias. The API token can be generated using the
CLC account login and password via the HTTP api call @ https://api.ctl.io/v2/authentication/login
- CLC_V2_API_TOKEN, the API token generated from https://api.ctl.io/v2/authentication/login
- CLC_ACCT_ALIAS, the account alias associated with the centurylink cloud
- Users can set CLC_V2_API_URL to specify an endpoint for pointing to a different CLC environment.
'''
EXAMPLES = '''
# Note - You must set the CLC_V2_API_USERNAME And CLC_V2_API_PASSWD Environment variables before running these examples
---
- name: Create Alert Policy Example
hosts: localhost
gather_facts: False
connection: local
tasks:
- name: Create an Alert Policy for disk above 80% for 5 minutes
clc_alert_policy:
alias: wfad
name: 'alert for disk > 80%'
alert_recipients:
- test1@centurylink.com
- test2@centurylink.com
metric: 'disk'
duration: '00:05:00'
threshold: 80
state: present
register: policy
- name: debug
debug: var=policy
---
- name: Delete Alert Policy Example
hosts: localhost
gather_facts: False
connection: local
tasks:
- name: Delete an Alert Policy
clc_alert_policy:
alias: wfad
name: 'alert for disk > 80%'
state: absent
register: policy
- name: debug
debug: var=policy
'''
RETURN = '''
policy:
description: The alert policy information
returned: success
type: dict
sample:
{
"actions": [
{
"action": "email",
"settings": {
"recipients": [
"user1@domain.com",
"user1@domain.com"
]
}
}
],
"id": "ba54ac54a60d4a4f1ed6d48c1ce240a7",
"links": [
{
"href": "/v2/alertPolicies/alias/ba54ac54a60d4a4fb1d6d48c1ce240a7",
"rel": "self",
"verbs": [
"GET",
"DELETE",
"PUT"
]
}
],
"name": "test_alert",
"triggers": [
{
"duration": "00:05:00",
"metric": "disk",
"threshold": 80.0
}
]
}
'''
__version__ = '${version}'
from distutils.version import LooseVersion
try:
import requests
except ImportError:
REQUESTS_FOUND = False
else:
REQUESTS_FOUND = True
#
# Requires the clc-python-sdk.
# sudo pip install clc-sdk
#
try:
import clc as clc_sdk
from clc import APIFailedResponse
except ImportError:
CLC_FOUND = False
clc_sdk = None
else:
CLC_FOUND = True
class ClcAlertPolicy:
clc = clc_sdk
module = None
def __init__(self, module):
"""
Construct module
"""
self.module = module
self.policy_dict = {}
if not CLC_FOUND:
self.module.fail_json(
msg='clc-python-sdk required for this module')
if not REQUESTS_FOUND:
self.module.fail_json(
msg='requests library is required for this module')
if requests.__version__ and LooseVersion(requests.__version__) < LooseVersion('2.5.0'):
self.module.fail_json(
msg='requests library version should be >= 2.5.0')
self._set_user_agent(self.clc)
@staticmethod
def _define_module_argument_spec():
"""
Define the argument spec for the ansible module
:return: argument spec dictionary
"""
argument_spec = dict(
name=dict(default=None),
id=dict(default=None),
alias=dict(required=True, default=None),
alert_recipients=dict(type='list', default=None),
metric=dict(
choices=[
'cpu',
'memory',
'disk'],
default=None),
duration=dict(type='str', default=None),
threshold=dict(type='int', default=None),
state=dict(default='present', choices=['present', 'absent'])
)
mutually_exclusive = [
['name', 'id']
]
return {'argument_spec': argument_spec,
'mutually_exclusive': mutually_exclusive}
# Module Behavior Goodness
def process_request(self):
"""
Process the request - Main Code Path
:return: Returns with either an exit_json or fail_json
"""
p = self.module.params
self._set_clc_credentials_from_env()
self.policy_dict = self._get_alert_policies(p['alias'])
if p['state'] == 'present':
changed, policy = self._ensure_alert_policy_is_present()
else:
changed, policy = self._ensure_alert_policy_is_absent()
self.module.exit_json(changed=changed, policy=policy)
def _set_clc_credentials_from_env(self):
"""
Set the CLC Credentials on the sdk by reading environment variables
:return: none
"""
env = os.environ
v2_api_token = env.get('CLC_V2_API_TOKEN', False)
v2_api_username = env.get('CLC_V2_API_USERNAME', False)
v2_api_passwd = env.get('CLC_V2_API_PASSWD', False)
clc_alias = env.get('CLC_ACCT_ALIAS', False)
api_url = env.get('CLC_V2_API_URL', False)
if api_url:
self.clc.defaults.ENDPOINT_URL_V2 = api_url
if v2_api_token and clc_alias:
self.clc._LOGIN_TOKEN_V2 = v2_api_token
self.clc._V2_ENABLED = True
self.clc.ALIAS = clc_alias
elif v2_api_username and v2_api_passwd:
self.clc.v2.SetCredentials(
api_username=v2_api_username,
api_passwd=v2_api_passwd)
else:
return self.module.fail_json(
msg="You must set the CLC_V2_API_USERNAME and CLC_V2_API_PASSWD "
"environment variables")
def _ensure_alert_policy_is_present(self):
"""
Ensures that the alert policy is present
:return: (changed, policy)
changed: A flag representing if anything is modified
policy: the created/updated alert policy
"""
changed = False
p = self.module.params
policy_name = p.get('name')
if not policy_name:
self.module.fail_json(msg='Policy name is a required')
policy = self._alert_policy_exists(policy_name)
if not policy:
changed = True
policy = None
if not self.module.check_mode:
policy = self._create_alert_policy()
else:
changed_u, policy = self._ensure_alert_policy_is_updated(policy)
if changed_u:
changed = True
return changed, policy
def _ensure_alert_policy_is_absent(self):
"""
Ensures that the alert policy is absent
:return: (changed, None)
changed: A flag representing if anything is modified
"""
changed = False
p = self.module.params
alert_policy_id = p.get('id')
alert_policy_name = p.get('name')
alias = p.get('alias')
if not alert_policy_id and not alert_policy_name:
self.module.fail_json(
msg='Either alert policy id or policy name is required')
if not alert_policy_id and alert_policy_name:
alert_policy_id = self._get_alert_policy_id(
self.module,
alert_policy_name)
if alert_policy_id and alert_policy_id in self.policy_dict:
changed = True
if not self.module.check_mode:
self._delete_alert_policy(alias, alert_policy_id)
return changed, None
def _ensure_alert_policy_is_updated(self, alert_policy):
"""
Ensures the alert policy is updated if anything is changed in the alert policy configuration
:param alert_policy: the target alert policy
:return: (changed, policy)
changed: A flag representing if anything is modified
policy: the updated the alert policy
"""
changed = False
p = self.module.params
alert_policy_id = alert_policy.get('id')
email_list = p.get('alert_recipients')
metric = p.get('metric')
duration = p.get('duration')
threshold = p.get('threshold')
policy = alert_policy
if (metric and metric != str(alert_policy.get('triggers')[0].get('metric'))) or \
(duration and duration != str(alert_policy.get('triggers')[0].get('duration'))) or \
(threshold and float(threshold) != float(alert_policy.get('triggers')[0].get('threshold'))):
changed = True
elif email_list:
t_email_list = list(
alert_policy.get('actions')[0].get('settings').get('recipients'))
if set(email_list) != set(t_email_list):
changed = True
if changed and not self.module.check_mode:
policy = self._update_alert_policy(alert_policy_id)
return changed, policy
def _get_alert_policies(self, alias):
"""
Get the alert policies for account alias by calling the CLC API.
:param alias: the account alias
:return: the alert policies for the account alias
"""
response = {}
policies = self.clc.v2.API.Call('GET',
'/v2/alertPolicies/%s'
% alias)
for policy in policies.get('items'):
response[policy.get('id')] = policy
return response
def _create_alert_policy(self):
"""
Create an alert Policy using the CLC API.
:return: response dictionary from the CLC API.
"""
p = self.module.params
alias = p['alias']
email_list = p['alert_recipients']
metric = p['metric']
duration = p['duration']
threshold = p['threshold']
policy_name = p['name']
arguments = json.dumps(
{
'name': policy_name,
'actions': [{
'action': 'email',
'settings': {
'recipients': email_list
}
}],
'triggers': [{
'metric': metric,
'duration': duration,
'threshold': threshold
}]
}
)
try:
result = self.clc.v2.API.Call(
'POST',
'/v2/alertPolicies/%s' % alias,
arguments)
except APIFailedResponse as e:
return self.module.fail_json(
msg='Unable to create alert policy "{0}". {1}'.format(
policy_name, str(e.response_text)))
return result
def _update_alert_policy(self, alert_policy_id):
"""
Update alert policy using the CLC API.
:param alert_policy_id: The clc alert policy id
:return: response dictionary from the CLC API.
"""
p = self.module.params
alias = p['alias']
email_list = p['alert_recipients']
metric = p['metric']
duration = p['duration']
threshold = p['threshold']
policy_name = p['name']
arguments = json.dumps(
{
'name': policy_name,
'actions': [{
'action': 'email',
'settings': {
'recipients': email_list
}
}],
'triggers': [{
'metric': metric,
'duration': duration,
'threshold': threshold
}]
}
)
try:
result = self.clc.v2.API.Call(
'PUT', '/v2/alertPolicies/%s/%s' %
(alias, alert_policy_id), arguments)
except APIFailedResponse as e:
return self.module.fail_json(
msg='Unable to update alert policy "{0}". {1}'.format(
policy_name, str(e.response_text)))
return result
def _delete_alert_policy(self, alias, policy_id):
"""
Delete an alert policy using the CLC API.
:param alias : the account alias
:param policy_id: the alert policy id
:return: response dictionary from the CLC API.
"""
try:
result = self.clc.v2.API.Call(
'DELETE', '/v2/alertPolicies/%s/%s' %
(alias, policy_id), None)
except APIFailedResponse as e:
return self.module.fail_json(
msg='Unable to delete alert policy id "{0}". {1}'.format(
policy_id, str(e.response_text)))
return result
def _alert_policy_exists(self, policy_name):
"""
Check to see if an alert policy exists
:param policy_name: name of the alert policy
:return: boolean of if the policy exists
"""
result = False
for policy_id in self.policy_dict:
if self.policy_dict.get(policy_id).get('name') == policy_name:
result = self.policy_dict.get(policy_id)
return result
def _get_alert_policy_id(self, module, alert_policy_name):
"""
retrieves the alert policy id of the account based on the name of the policy
:param module: the AnsibleModule object
:param alert_policy_name: the alert policy name
:return: alert_policy_id: The alert policy id
"""
alert_policy_id = None
for policy_id in self.policy_dict:
if self.policy_dict.get(policy_id).get('name') == alert_policy_name:
if not alert_policy_id:
alert_policy_id = policy_id
else:
return module.fail_json(
msg='multiple alert policies were found with policy name : %s' % alert_policy_name)
return alert_policy_id
@staticmethod
def _set_user_agent(clc):
if hasattr(clc, 'SetRequestsSession'):
agent_string = "ClcAnsibleModule/" + __version__
ses = requests.Session()
ses.headers.update({"Api-Client": agent_string})
ses.headers['User-Agent'] += " " + agent_string
clc.SetRequestsSession(ses)
def main():
"""
The main function. Instantiates the module and calls process_request.
:return: none
"""
argument_dict = ClcAlertPolicy._define_module_argument_spec()
module = AnsibleModule(supports_check_mode=True, **argument_dict)
clc_alert_policy = ClcAlertPolicy(module)
clc_alert_policy.process_request()
from ansible.module_utils.basic import * # pylint: disable=W0614
if __name__ == '__main__':
main()
| gpl-3.0 |
hut8labs/diffscuss | diffscuss/support/tests/test_editor.py | 2 | 13301 | """
Test cases for the editor module.
"""
from functools import wraps
from nose.tools import eq_
from diffscuss.support import editor
class patch(object):
"""
Quick and dirty patching. Use it as a decorator or as a context manager.
"""
PATCH_REMOVE = object()
def __init__(self, obj, attr, new):
self.obj = obj
self.attr = attr
self.new = new
self.patch_attr = '_patched_' + attr
def _patch(self):
"""
Sets `obj.attr` to `new`, saving the original value of `obj.attr` (if
there was one) for later patching.
"""
if not hasattr(self.obj, self.patch_attr):
setattr(self.obj, self.patch_attr, [])
saved = getattr(self.obj, self.attr, self.PATCH_REMOVE)
getattr(self.obj, self.patch_attr).append(saved)
setattr(self.obj, self.attr, self.new)
def _unpatch(self):
"""
Unsets `obj.attr`, restoring its original value if there was one.
"""
assert hasattr(self.obj, self.patch_attr)
restore_list = getattr(self.obj, self.patch_attr)
to_restore = restore_list.pop()
if to_restore is self.PATCH_REMOVE:
delattr(self.obj, self.attr)
else:
setattr(self.obj, self.attr, to_restore)
if not restore_list:
delattr(self.obj, self.patch_attr)
def __call__(self, func):
"""
A decorator that patches `obj.attr` to `new` within the decorated
function.
"""
@wraps(func)
def _wrapped(*args, **kwargs):
with self:
return func(*args, **kwargs)
return _wrapped
def __enter__(self):
self._patch()
def __exit__(self, exc_type, value, traceback):
self._unpatch()
config_patch = patch(editor, 'config',
lambda: dict(author='Test', email='test@example.com'))
def setup_module():
# Patch in a config() function with test defaults.
config_patch._patch()
def teardown_module():
config_patch._unpatch()
class BufferWrapper(list):
"""
Adapts a Python list to the Vim buffer interface.
"""
def append(self, obj, index=None):
if not hasattr(obj, '__iter__'):
obj = [obj]
if index is None:
for item in obj:
list.append(self, obj)
else:
for item in obj:
list.insert(self, index, item)
index += 1
TEST_LINE_PROPERTIES = [
('@@ -0,0 +1,2 @@',
dict(is_diff_meta=True, is_diff_range=True,
is_header=False, is_body=False, is_diffscuss=False, depth=0)),
('diff --git a/some/file',
dict(is_diff_meta=True, is_diff_range=False,
is_header=False, is_body=False, is_diffscuss=False, depth=0)),
('--- a/some/file',
dict(is_diff_meta=True, is_diff_range=False,
is_header=False, is_body=False, is_diffscuss=False, depth=0)),
('+++ a/some/file',
dict(is_diff_meta=True, is_diff_range=False,
is_header=False, is_body=False, is_diffscuss=False, depth=0)),
('index rev1...rev2 100644',
dict(is_diff_meta=True, is_diff_range=False,
is_header=False, is_body=False, is_diffscuss=False, depth=0)),
('-diff line',
dict(is_diff_meta=False, is_diff_range=False,
is_header=False, is_body=False, is_diffscuss=False, depth=0)),
('+diff line',
dict(is_diff_meta=False, is_diff_range=False,
is_header=False, is_body=False, is_diffscuss=False, depth=0)),
(' diff line',
dict(is_diff_meta=False, is_diff_range=False,
is_header=False, is_body=False, is_diffscuss=False, depth=0)),
('#* Header',
dict(is_diff_meta=False, is_diff_range=False,
is_header=True, is_body=False, is_diffscuss=True, depth=1)),
('#- Body',
dict(is_diff_meta=False, is_diff_range=False,
is_header=False, is_body=True, is_diffscuss=True, depth=1)),
('#***** Deep header',
dict(is_diff_meta=False, is_diff_range=False,
is_header=True, is_body=False, is_diffscuss=True, depth=5)),
('#----- Deep body',
dict(is_diff_meta=False, is_diff_range=False,
is_header=False, is_body=True, is_diffscuss=True, depth=5)),
('#*- Strange header',
dict(is_diff_meta=False, is_diff_range=False,
is_header=True, is_body=False, is_diffscuss=True, depth=1)),
('#-* Strange body',
dict(is_diff_meta=False, is_diff_range=False,
is_header=False, is_body=True, is_diffscuss=True, depth=1))
]
def test_line_properties():
for line, expected_attrs in TEST_LINE_PROPERTIES:
yield _check_line_properties, line, expected_attrs
def _check_line_properties(line, expected_attrs):
props = editor.LineProperties(line)
for attr, value in expected_attrs.iteritems():
eq_(value, getattr(props, attr))
TEST_BUFFER_NONE = """
diff --git a/some/file b/some/file
index rev1..rev2 100644
--- a/some/file
+++ b/some/file
@@ -1,1 +1,2 @@
+diff1
diff2
diff3
""".strip().split('\n')
TEST_BUFFER_FILE = """
#*
#* author: Test
#* email: test@example.com
#* date: 2013-01-01T00:00:00-0500
#*
#- This is a test comment.
#-
#**
#** author: Test
#** email: test@example.com
#** date: 2013-01-01T00:01:00-0500
#**
#-- This is a test reply.
#--
diff --git a/some/file b/some/file
index rev1..rev2 100644
--- a/some/file
+++ b/some/file
@@ -1,1 +1,2 @@
+diff1
diff2
diff3
""".strip().split('\n')
TEST_BUFFER_BODY = """
diff --git a/some/file b/some/file
index rev1..rev2 100644
--- a/some/file
+++ b/some/file
@@ -1,1 +1,2 @@
+diff1
#*
#* author: Test
#* email: test@example.com
#* date: 2013-01-01T00:00:00-0500
#*
#- This is a test comment.
#-
#**
#** author: Test
#** email: test@example.com
#** date: 2013-01-01T00:01:00-0500
#**
#-- This is a test reply.
#--
diff2
diff3
""".strip().split('\n')
TEST_BUFFER_END = """
diff --git a/some/file b/some/file
index rev1..rev2 100644
--- a/some/file
+++ b/some/file
@@ -1,1 +1,2 @@
+diff1
diff2
diff3
#*
#* author: Test
#* email: test@example.com
#* date: 2013-01-01T00:00:00-0500
#*
#- This is a test comment.
#-
""".strip().split('\n')
def test_find_header_start():
for i in range(1, len(TEST_BUFFER_NONE) + 1):
result = editor.find_header_start(TEST_BUFFER_NONE, (i, i))
yield eq_, (i, i), result
for i in range(1, len(TEST_BUFFER_FILE) + 1):
result = editor.find_header_start(TEST_BUFFER_FILE, (i, 1))
if i <= TEST_BUFFER_FILE.index('#**'):
yield eq_, (1, 1), result
elif i <= TEST_BUFFER_FILE.index('diff --git a/some/file b/some/file'):
yield eq_, (7, 1), result
else:
yield eq_, (i, 1), result
for i in range(1, len(TEST_BUFFER_BODY) + 1):
result = editor.find_header_start(TEST_BUFFER_BODY, (i, 1))
if i <= TEST_BUFFER_BODY.index('#*'):
yield eq_, (i, 1), result
elif i <= TEST_BUFFER_BODY.index('#**'):
yield eq_, (6, 1), result
elif i <= TEST_BUFFER_BODY.index(' diff2'):
yield eq_, (13, 1), result
else:
yield eq_, (i, 1), result
def test_find_body_end():
for i in range(1, len(TEST_BUFFER_NONE) + 1):
result = editor.find_body_end(TEST_BUFFER_NONE, (i, i))
yield eq_, (i, i), result
for i in range(1, len(TEST_BUFFER_END) + 1):
result = editor.find_body_end(TEST_BUFFER_END, (i, i))
yield eq_, (i, i), result
for i in range(1, len(TEST_BUFFER_FILE) + 1):
result = editor.find_body_end(TEST_BUFFER_FILE, (i, 1))
if i <= TEST_BUFFER_FILE.index('#**'):
yield eq_, (7, 1), result
elif i <= TEST_BUFFER_FILE.index('diff --git a/some/file b/some/file'):
yield eq_, (14, 1), result
else:
yield eq_, (i, 1), result
for i in range(1, len(TEST_BUFFER_BODY) + 1):
result = editor.find_body_end(TEST_BUFFER_BODY, (i, 1))
if i <= TEST_BUFFER_BODY.index('#*'):
yield eq_, (i, 1), result
elif i <= TEST_BUFFER_BODY.index('#**'):
yield eq_, (13, 1), result
elif i <= TEST_BUFFER_BODY.index(' diff2'):
yield eq_, (20, 1), result
else:
yield eq_, (i, 1), result
def test_find_subthread_end():
for i in range(1, len(TEST_BUFFER_NONE) + 1):
result = editor.find_subthread_end(TEST_BUFFER_NONE, (i, i))
yield eq_, (i, i), result
for i in range(1, len(TEST_BUFFER_END) + 1):
result = editor.find_subthread_end(TEST_BUFFER_END, (i, i))
yield eq_, (i, i), result
for i in range(1, len(TEST_BUFFER_FILE) + 1):
result = editor.find_subthread_end(TEST_BUFFER_FILE, (i, 1))
if i <= TEST_BUFFER_FILE.index('#**'):
yield eq_, (14, 1), result
elif i <= TEST_BUFFER_FILE.index('diff --git a/some/file b/some/file'):
yield eq_, (14, 1), result
else:
yield eq_, (i, 1), result
for i in range(1, len(TEST_BUFFER_BODY) + 1):
result = editor.find_subthread_end(TEST_BUFFER_BODY, (i, 1))
if i <= TEST_BUFFER_BODY.index('#*'):
yield eq_, (i, 1), result
elif i <= TEST_BUFFER_BODY.index('#**'):
yield eq_, (20, 1), result
elif i <= TEST_BUFFER_BODY.index(' diff2'):
yield eq_, (20, 1), result
else:
yield eq_, (i, 1), result
def test_find_thread_end():
for i in range(1, len(TEST_BUFFER_NONE) + 1):
result = editor.find_thread_end(TEST_BUFFER_NONE, (i, i))
yield eq_, (i, i), result
for i in range(1, len(TEST_BUFFER_FILE) + 1):
result = editor.find_thread_end(TEST_BUFFER_FILE, (i, 1))
if i <= TEST_BUFFER_FILE.index('#**'):
yield eq_, (14, 1), result
elif i <= TEST_BUFFER_FILE.index('diff --git a/some/file b/some/file'):
yield eq_, (14, 1), result
else:
yield eq_, (i, 1), result
for i in range(1, len(TEST_BUFFER_BODY) + 1):
result = editor.find_thread_end(TEST_BUFFER_BODY, (i, 1))
if i <= TEST_BUFFER_BODY.index('#*'):
yield eq_, (i, 1), result
elif i <= TEST_BUFFER_BODY.index('#**'):
yield eq_, (20, 1), result
elif i <= TEST_BUFFER_BODY.index(' diff2'):
yield eq_, (20, 1), result
else:
yield eq_, (i, 1), result
def test_find_range():
for buf in [TEST_BUFFER_NONE, TEST_BUFFER_FILE, TEST_BUFFER_BODY]:
for i in range(1, len(buf) + 1):
result = editor.find_range(buf, (i, i))
if i <= buf.index('@@ -1,1 +1,2 @@'):
yield eq_, (buf.index('@@ -1,1 +1,2 @@') + 1, i), result
else:
yield eq_, (i, i), result
@patch(editor.time, 'strftime', lambda arg: '2013-01-01T01:01:01-0500')
def test_make_comment():
eq_(['#*', '#* author: Test', '#* email: test@example.com',
'#* date: 2013-01-01T01:01:01-0500', '#*', '#- ', '#-'],
editor.make_comment(depth=1))
eq_(['#**', '#** author: Test', '#** email: test@example.com',
'#** date: 2013-01-01T01:01:01-0500', '#**', '#-- ', '#--'],
editor.make_comment(depth=2))
eq_(['#*', '#* author: Test', '#* email: test@example.com',
'#* date: 2013-01-01T01:01:01-0500', '#*', '#- ', '#-'],
editor.make_comment(depth=0))
with patch(editor, 'config', lambda: dict()):
eq_(['#*', '#* author: Unknown', '#* email: Unknown',
'#* date: 2013-01-01T01:01:01-0500', '#*', '#- ', '#-'],
editor.make_comment(depth=1))
@patch(editor.time, 'strftime', lambda arg: '2013-01-01T00:00:00-0500')
def test_inject_comment():
new_buf = BufferWrapper(list(TEST_BUFFER_NONE))
result = editor.inject_comment(new_buf, (6, 1))
eq_((12, 3), result)
eq_(['diff --git a/some/file b/some/file',
'index rev1..rev2 100644',
'--- a/some/file',
'+++ b/some/file',
'@@ -1,1 +1,2 @@',
'+diff1',
'#*',
'#* author: Test',
'#* email: test@example.com',
'#* date: 2013-01-01T00:00:00-0500',
'#*',
'#- ',
'#-',
' diff2',
' diff3'], new_buf)
@patch(editor.time, 'strftime', lambda arg: '2013-01-01T00:00:00-0500')
def test_insert_comment():
new_buf = BufferWrapper(list(TEST_BUFFER_BODY))
result = editor.insert_comment(new_buf, (7, 1))
eq_((26, 3), result)
eq_(['diff --git a/some/file b/some/file',
'index rev1..rev2 100644',
'--- a/some/file',
'+++ b/some/file',
'@@ -1,1 +1,2 @@',
'+diff1',
'#*',
'#* author: Test',
'#* email: test@example.com',
'#* date: 2013-01-01T00:00:00-0500',
'#*',
'#- This is a test comment.',
'#-',
'#**',
'#** author: Test',
'#** email: test@example.com',
'#** date: 2013-01-01T00:01:00-0500',
'#**',
'#-- This is a test reply.',
'#--',
'#*',
'#* author: Test',
'#* email: test@example.com',
'#* date: 2013-01-01T00:00:00-0500',
'#*',
'#- ',
'#-',
' diff2',
' diff3'], new_buf)
| mit |
miniconfig/home-assistant | homeassistant/components/group.py | 5 | 15082 | """
Provides functionality to group entities.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/group/
"""
import asyncio
import logging
import os
import voluptuous as vol
from homeassistant import config as conf_util, core as ha
from homeassistant.const import (
ATTR_ENTITY_ID, CONF_ICON, CONF_NAME, STATE_CLOSED, STATE_HOME,
STATE_NOT_HOME, STATE_OFF, STATE_ON, STATE_OPEN, STATE_LOCKED,
STATE_UNLOCKED, STATE_UNKNOWN, ATTR_ASSUMED_STATE, SERVICE_RELOAD)
from homeassistant.core import callback
from homeassistant.helpers.entity import Entity, async_generate_entity_id
from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.helpers.event import async_track_state_change
import homeassistant.helpers.config_validation as cv
from homeassistant.util.async import run_coroutine_threadsafe
DOMAIN = 'group'
ENTITY_ID_FORMAT = DOMAIN + '.{}'
CONF_ENTITIES = 'entities'
CONF_VIEW = 'view'
CONF_CONTROL = 'control'
ATTR_AUTO = 'auto'
ATTR_ORDER = 'order'
ATTR_VIEW = 'view'
ATTR_VISIBLE = 'visible'
ATTR_CONTROL = 'control'
SERVICE_SET_VISIBILITY = 'set_visibility'
SET_VISIBILITY_SERVICE_SCHEMA = vol.Schema({
vol.Optional(ATTR_ENTITY_ID): cv.entity_ids,
vol.Required(ATTR_VISIBLE): cv.boolean
})
RELOAD_SERVICE_SCHEMA = vol.Schema({})
_LOGGER = logging.getLogger(__name__)
def _conf_preprocess(value):
"""Preprocess alternative configuration formats."""
if not isinstance(value, dict):
value = {CONF_ENTITIES: value}
return value
GROUP_SCHEMA = vol.Schema({
vol.Optional(CONF_ENTITIES): vol.Any(cv.entity_ids, None),
CONF_VIEW: cv.boolean,
CONF_NAME: cv.string,
CONF_ICON: cv.icon,
CONF_CONTROL: cv.string,
})
CONFIG_SCHEMA = vol.Schema({
DOMAIN: cv.ordered_dict(vol.All(_conf_preprocess, GROUP_SCHEMA))
}, extra=vol.ALLOW_EXTRA)
# List of ON/OFF state tuples for groupable states
_GROUP_TYPES = [(STATE_ON, STATE_OFF), (STATE_HOME, STATE_NOT_HOME),
(STATE_OPEN, STATE_CLOSED), (STATE_LOCKED, STATE_UNLOCKED)]
def _get_group_on_off(state):
"""Determine the group on/off states based on a state."""
for states in _GROUP_TYPES:
if state in states:
return states
return None, None
def is_on(hass, entity_id):
"""Test if the group state is in its ON-state."""
state = hass.states.get(entity_id)
if state:
group_on, _ = _get_group_on_off(state.state)
# If we found a group_type, compare to ON-state
return group_on is not None and state.state == group_on
return False
def reload(hass):
"""Reload the automation from config."""
hass.add_job(async_reload, hass)
@asyncio.coroutine
def async_reload(hass):
"""Reload the automation from config."""
yield from hass.services.async_call(DOMAIN, SERVICE_RELOAD)
def set_visibility(hass, entity_id=None, visible=True):
"""Hide or shows a group."""
data = {ATTR_ENTITY_ID: entity_id, ATTR_VISIBLE: visible}
hass.services.call(DOMAIN, SERVICE_SET_VISIBILITY, data)
def expand_entity_ids(hass, entity_ids):
"""Return entity_ids with group entity ids replaced by their members.
Async friendly.
"""
found_ids = []
for entity_id in entity_ids:
if not isinstance(entity_id, str):
continue
entity_id = entity_id.lower()
try:
# If entity_id points at a group, expand it
domain, _ = ha.split_entity_id(entity_id)
if domain == DOMAIN:
found_ids.extend(
ent_id for ent_id
in expand_entity_ids(hass, get_entity_ids(hass, entity_id))
if ent_id not in found_ids)
else:
if entity_id not in found_ids:
found_ids.append(entity_id)
except AttributeError:
# Raised by split_entity_id if entity_id is not a string
pass
return found_ids
def get_entity_ids(hass, entity_id, domain_filter=None):
"""Get members of this group.
Async friendly.
"""
group = hass.states.get(entity_id)
if not group or ATTR_ENTITY_ID not in group.attributes:
return []
entity_ids = group.attributes[ATTR_ENTITY_ID]
if not domain_filter:
return entity_ids
domain_filter = domain_filter.lower() + '.'
return [ent_id for ent_id in entity_ids
if ent_id.startswith(domain_filter)]
@asyncio.coroutine
def async_setup(hass, config):
"""Setup all groups found definded in the configuration."""
component = EntityComponent(_LOGGER, DOMAIN, hass)
yield from _async_process_config(hass, config, component)
descriptions = yield from hass.loop.run_in_executor(
None, conf_util.load_yaml_config_file, os.path.join(
os.path.dirname(__file__), 'services.yaml')
)
@asyncio.coroutine
def reload_service_handler(service_call):
"""Remove all groups and load new ones from config."""
conf = yield from component.async_prepare_reload()
if conf is None:
return
yield from _async_process_config(hass, conf, component)
@asyncio.coroutine
def visibility_service_handler(service):
"""Change visibility of a group."""
visible = service.data.get(ATTR_VISIBLE)
tasks = [group.async_set_visible(visible) for group
in component.async_extract_from_service(service,
expand_group=False)]
yield from asyncio.wait(tasks, loop=hass.loop)
hass.services.async_register(
DOMAIN, SERVICE_SET_VISIBILITY, visibility_service_handler,
descriptions[DOMAIN][SERVICE_SET_VISIBILITY],
schema=SET_VISIBILITY_SERVICE_SCHEMA)
hass.services.async_register(
DOMAIN, SERVICE_RELOAD, reload_service_handler,
descriptions[DOMAIN][SERVICE_RELOAD], schema=RELOAD_SERVICE_SCHEMA)
return True
@asyncio.coroutine
def _async_process_config(hass, config, component):
"""Process group configuration."""
groups = []
for object_id, conf in config.get(DOMAIN, {}).items():
name = conf.get(CONF_NAME, object_id)
entity_ids = conf.get(CONF_ENTITIES) or []
icon = conf.get(CONF_ICON)
view = conf.get(CONF_VIEW)
control = conf.get(CONF_CONTROL)
# Don't create tasks and await them all. The order is important as
# groups get a number based on creation order.
group = yield from Group.async_create_group(
hass, name, entity_ids, icon=icon, view=view,
control=control, object_id=object_id)
groups.append(group)
if groups:
yield from component.async_add_entities(groups)
class Group(Entity):
"""Track a group of entity ids."""
def __init__(self, hass, name, order=None, user_defined=True, icon=None,
view=False, control=None):
"""Initialize a group.
This Object has factory function for creation.
"""
self.hass = hass
self._name = name
self._state = STATE_UNKNOWN
self._user_defined = user_defined
self._order = order
self._icon = icon
self._view = view
self.tracking = []
self.group_on = None
self.group_off = None
self._assumed_state = False
self._async_unsub_state_changed = None
self._visible = True
self._control = control
@staticmethod
def create_group(hass, name, entity_ids=None, user_defined=True,
icon=None, view=False, control=None, object_id=None):
"""Initialize a group."""
return run_coroutine_threadsafe(
Group.async_create_group(hass, name, entity_ids, user_defined,
icon, view, control, object_id),
hass.loop).result()
@staticmethod
@asyncio.coroutine
def async_create_group(hass, name, entity_ids=None, user_defined=True,
icon=None, view=False, control=None,
object_id=None):
"""Initialize a group.
This method must be run in the event loop.
"""
group = Group(
hass, name,
order=len(hass.states.async_entity_ids(DOMAIN)),
user_defined=user_defined, icon=icon, view=view,
control=control)
group.entity_id = async_generate_entity_id(
ENTITY_ID_FORMAT, object_id or name, hass=hass)
# run other async stuff
if entity_ids is not None:
yield from group.async_update_tracked_entity_ids(entity_ids)
else:
yield from group.async_update_ha_state(True)
return group
@property
def should_poll(self):
"""No need to poll because groups will update themselves."""
return False
@property
def name(self):
"""Return the name of the group."""
return self._name
@property
def state(self):
"""Return the state of the group."""
return self._state
@property
def icon(self):
"""Return the icon of the group."""
return self._icon
@asyncio.coroutine
def async_set_visible(self, visible):
"""Change visibility of the group."""
if self._visible != visible:
self._visible = visible
yield from self.async_update_ha_state()
@property
def hidden(self):
"""If group should be hidden or not."""
# Visibility from set_visibility service overrides
if self._visible:
return not self._user_defined or self._view
return True
@property
def state_attributes(self):
"""Return the state attributes for the group."""
data = {
ATTR_ENTITY_ID: self.tracking,
ATTR_ORDER: self._order,
}
if not self._user_defined:
data[ATTR_AUTO] = True
if self._view:
data[ATTR_VIEW] = True
if self._control:
data[ATTR_CONTROL] = self._control
return data
@property
def assumed_state(self):
"""Test if any member has an assumed state."""
return self._assumed_state
def update_tracked_entity_ids(self, entity_ids):
"""Update the member entity IDs."""
run_coroutine_threadsafe(
self.async_update_tracked_entity_ids(entity_ids), self.hass.loop
).result()
@asyncio.coroutine
def async_update_tracked_entity_ids(self, entity_ids):
"""Update the member entity IDs.
This method must be run in the event loop.
"""
yield from self.async_stop()
self.tracking = tuple(ent_id.lower() for ent_id in entity_ids)
self.group_on, self.group_off = None, None
yield from self.async_update_ha_state(True)
self.async_start()
def start(self):
"""Start tracking members."""
self.hass.add_job(self.async_start)
@callback
def async_start(self):
"""Start tracking members.
This method must be run in the event loop.
"""
if self._async_unsub_state_changed is None:
self._async_unsub_state_changed = async_track_state_change(
self.hass, self.tracking, self._async_state_changed_listener
)
def stop(self):
"""Unregister the group from Home Assistant."""
run_coroutine_threadsafe(self.async_stop(), self.hass.loop).result()
@asyncio.coroutine
def async_stop(self):
"""Unregister the group from Home Assistant.
This method must be run in the event loop.
"""
yield from self.async_remove()
@asyncio.coroutine
def async_update(self):
"""Query all members and determine current group state."""
self._state = STATE_UNKNOWN
self._async_update_group_state()
def async_remove(self):
"""Remove group from HASS.
This method must be run in the event loop and returns a coroutine.
"""
if self._async_unsub_state_changed:
self._async_unsub_state_changed()
self._async_unsub_state_changed = None
return super().async_remove()
@asyncio.coroutine
def _async_state_changed_listener(self, entity_id, old_state, new_state):
"""Respond to a member state changing.
This method must be run in the event loop.
"""
# removed
if self._async_unsub_state_changed is None:
return
self._async_update_group_state(new_state)
yield from self.async_update_ha_state()
@property
def _tracking_states(self):
"""The states that the group is tracking."""
states = []
for entity_id in self.tracking:
state = self.hass.states.get(entity_id)
if state is not None:
states.append(state)
return states
@callback
def _async_update_group_state(self, tr_state=None):
"""Update group state.
Optionally you can provide the only state changed since last update
allowing this method to take shortcuts.
This method must be run in the event loop.
"""
# To store current states of group entities. Might not be needed.
states = None
gr_state = self._state
gr_on = self.group_on
gr_off = self.group_off
# We have not determined type of group yet
if gr_on is None:
if tr_state is None:
states = self._tracking_states
for state in states:
gr_on, gr_off = \
_get_group_on_off(state.state)
if gr_on is not None:
break
else:
gr_on, gr_off = _get_group_on_off(tr_state.state)
if gr_on is not None:
self.group_on, self.group_off = gr_on, gr_off
# We cannot determine state of the group
if gr_on is None:
return
if tr_state is None or ((gr_state == gr_on and
tr_state.state == gr_off) or
tr_state.state not in (gr_on, gr_off)):
if states is None:
states = self._tracking_states
if any(state.state == gr_on for state in states):
self._state = gr_on
else:
self._state = gr_off
elif tr_state.state in (gr_on, gr_off):
self._state = tr_state.state
if tr_state is None or self._assumed_state and \
not tr_state.attributes.get(ATTR_ASSUMED_STATE):
if states is None:
states = self._tracking_states
self._assumed_state = any(
state.attributes.get(ATTR_ASSUMED_STATE) for state
in states)
elif tr_state.attributes.get(ATTR_ASSUMED_STATE):
self._assumed_state = True
| mit |
Ayub-Khan/edx-platform | lms/djangoapps/course_wiki/tests/test_tab.py | 158 | 2454 | """
Tests for wiki views.
"""
from django.conf import settings
from django.test.client import RequestFactory
from courseware.tabs import get_course_tab_list
from student.tests.factories import AdminFactory, UserFactory
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory
class WikiTabTestCase(ModuleStoreTestCase):
"""Test cases for Wiki Tab."""
def setUp(self):
super(WikiTabTestCase, self).setUp()
self.course = CourseFactory.create()
self.instructor = AdminFactory.create()
self.user = UserFactory()
def get_wiki_tab(self, user, course):
"""Returns true if the "Wiki" tab is shown."""
request = RequestFactory().request()
request.user = user
all_tabs = get_course_tab_list(request, course)
wiki_tabs = [tab for tab in all_tabs if tab.name == 'Wiki']
return wiki_tabs[0] if len(wiki_tabs) == 1 else None
def test_wiki_enabled_and_public(self):
"""
Test wiki tab when Enabled setting is True and the wiki is open to
the public.
"""
settings.WIKI_ENABLED = True
self.course.allow_public_wiki_access = True
self.assertIsNotNone(self.get_wiki_tab(self.user, self.course))
def test_wiki_enabled_and_not_public(self):
"""
Test wiki when it is enabled but not open to the public
"""
settings.WIKI_ENABLED = True
self.course.allow_public_wiki_access = False
self.assertIsNone(self.get_wiki_tab(self.user, self.course))
self.assertIsNotNone(self.get_wiki_tab(self.instructor, self.course))
def test_wiki_enabled_false(self):
"""Test wiki tab when Enabled setting is False"""
settings.WIKI_ENABLED = False
self.assertIsNone(self.get_wiki_tab(self.user, self.course))
self.assertIsNone(self.get_wiki_tab(self.instructor, self.course))
def test_wiki_visibility(self):
"""Test toggling of visibility of wiki tab"""
settings.WIKI_ENABLED = True
self.course.allow_public_wiki_access = True
wiki_tab = self.get_wiki_tab(self.user, self.course)
self.assertIsNotNone(wiki_tab)
self.assertTrue(wiki_tab.is_hideable)
wiki_tab.is_hidden = True
self.assertTrue(wiki_tab['is_hidden'])
wiki_tab['is_hidden'] = False
self.assertFalse(wiki_tab.is_hidden)
| agpl-3.0 |
zzjkf2009/Midterm_Astar | opencv/samples/python/browse.py | 1 | 1508 | #!/usr/bin/env python
'''
browse.py
=========
Sample shows how to implement a simple hi resolution image navigation
Usage
-----
browse.py [image filename]
'''
# Python 2/3 compatibility
from __future__ import print_function
import sys
PY3 = sys.version_info[0] == 3
if PY3:
xrange = range
import numpy as np
import cv2
# built-in modules
import sys
if __name__ == '__main__':
print('This sample shows how to implement a simple hi resolution image navigation.')
print('USAGE: browse.py [image filename]')
print()
if len(sys.argv) > 1:
fn = sys.argv[1]
print('loading %s ...' % fn)
img = cv2.imread(fn)
if img is None:
print('Failed to load fn:', fn)
sys.exit(1)
else:
sz = 4096
print('generating %dx%d procedural image ...' % (sz, sz))
img = np.zeros((sz, sz), np.uint8)
track = np.cumsum(np.random.rand(500000, 2)-0.5, axis=0)
track = np.int32(track*10 + (sz/2, sz/2))
cv2.polylines(img, [track], 0, 255, 1, cv2.LINE_AA)
small = img
for i in xrange(3):
small = cv2.pyrDown(small)
def onmouse(event, x, y, flags, param):
h, _w = img.shape[:2]
h1, _w1 = small.shape[:2]
x, y = 1.0*x*h/h1, 1.0*y*h/h1
zoom = cv2.getRectSubPix(img, (800, 600), (x+0.5, y+0.5))
cv2.imshow('zoom', zoom)
cv2.imshow('preview', small)
cv2.setMouseCallback('preview', onmouse)
cv2.waitKey()
cv2.destroyAllWindows()
| mit |
KonradBreitsprecher/espresso | testsuite/layered.py | 1 | 1921 |
#
# Copyright (C) 2013,2014,2015,2016 The ESPResSo project
#
# This file is part of ESPResSo.
#
# ESPResSo is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ESPResSo is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Tests particle property setters/getters
from __future__ import print_function
import unittest as ut
import espressomd
import numpy as np
class Layered(ut.TestCase):
S = espressomd.System(box_l=[1.0, 1.0, 1.0])
def setUp(self):
self.S.part.clear()
self.S.cell_system.set_layered()
def test_resort(self):
n_part = 2351
# Add the particles on node 0, so that they have to be
# resorted
for i in range(n_part):
self.S.part.add(id=i, pos=[0, 0, 0], type=1)
# And now change their positions
for i in range(n_part):
self.S.part[i].pos = pos = np.random.random(3)
# Distribute the particles on the nodes
part_dist = self.S.cell_system.resort()
# Check that we did not lose particles
self.assertEqual(sum(part_dist), n_part)
# Check that we can still access all the particles
# This basically checks if part_node and local_particles
# is still in a valid state after the particle exchange
self.assertEqual(sum(self.S.part[:].type), n_part)
if __name__ == "__main__":
print("Features: ", espressomd.features())
ut.main()
| gpl-3.0 |
OpenLinkedSocialData/gmane1 | gmane-politics-organizations-metareciclagem/scripts/testTriplify.py | 4 | 1765 | import importlib, os
import multiprocessing as mp
from IPython.lib.deepreload import reload as dreload
import gmane as g, percolation as P
G=g
importlib.reload(g.listDataStructures)
importlib.reload(g.loadMessages)
importlib.reload(g.triplifyList)
importlib.reload(P.rdf)
importlib.reload(P.utils)
importlib.reload(g.utils)
dreload(g,exclude="pytz")
#lm=g.LoadMessages("gmane.ietf.rfc822",10,basedir="~/.gmane2/")
#ds=g.ListDataStructures(lm)
#
#dl=g.DownloadGmaneData(dpath)
#dl.downloadedStats() # might take a while
dpath='/disco/.gmane/'
dpath='/home/r/.gmane/'
dpath='/home/r/.gmane4/'
load_msgs=[]
data_structs=[]
scriptpath=os.path.realpath(__file__)
fpath="./publishing/"
umbrella_dir="gmane1/"
#for list_stat in dl.lists:
# list_id=list_stat[0]
#for list_id in ['gmane.comp.gcc.libstdc++.devel']:
#for list_id in ['gmane.comp.java.hadoop.hive.user']:
#for list_id in ['gmane.comp.web.egroupware.user', 'gmane.culture.language.basque.eibartarrak','gmane.org.operators.nznog', 'gmane.science.nmr.relax.scm',"gmane.linux.fbdev.devel",]:
for list_id in ['gmane.politics.organizations.metareciclagem', 'gmane.comp.gcc.libstdc++.devel', 'gmane.linux.audio.devel', 'gmane.linux.audio.users']:
# lm=g.LoadMessages(list_id,basedir=dpath,n_messages=20000)
# lm=g.LoadMessages(list_id,basedir=dpath,n_messages=200)
lm=g.LoadMessages(list_id,basedir=dpath)
ds=g.ListDataStructures(lm)
foo=G.triplifyList.makeRepo(ds,fpath,dpath+list_id,"Linked data of the email list with Gmane id: {}".format(list_id),scriptpath=scriptpath,umbrella_dir=umbrella_dir)
mm= ds.messages
ids=ds.message_ids
print("first: ", mm[ids[0]][2], "last:", mm[ids[-1]][2])
def hardClean(text):
return "".join(c for c in text if c.isalnum() or c in allowed)
| cc0-1.0 |
shadda/AutobahnPython | examples/wamp/rpc/simple/example1/server.py | 27 | 2408 | ###############################################################################
##
## Copyright 2011,2012 Tavendo GmbH
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
###############################################################################
import sys
from twisted.python import log
from twisted.internet import reactor, defer
from twisted.web.server import Site
from twisted.web.static import File
from autobahn.websocket import listenWS
from autobahn.wamp import exportRpc, \
WampServerFactory, \
WampServerProtocol
def multiply(x, y):
"""
A free standing procedure that we remote.
"""
return x * y
class RpcServer1Protocol(WampServerProtocol):
"""
A minimalistic RPC server.
"""
def onSessionOpen(self):
## When the WAMP session has been established, register callables
## to be remoted (made available for RPC)
##
self.registerProcedureForRpc("http://example.com/simple/calc#mul", multiply)
self.registerMethodForRpc("http://example.com/simple/calc#sub", self, RpcServer1Protocol.doSub)
self.registerForRpc(self, "http://example.com/simple/calc#")
@exportRpc("add")
def doAdd(self, x, y):
"""
A method that we remote by using the exportRpc decorator.
"""
return x + y
def doSub(self, x, y):
"""
A method that we remote explicitly.
"""
return x - y
if __name__ == '__main__':
if len(sys.argv) > 1 and sys.argv[1] == 'debug':
log.startLogging(sys.stdout)
debug = True
else:
debug = False
factory = WampServerFactory("ws://localhost:9000", debugWamp = debug)
factory.protocol = RpcServer1Protocol
factory.setProtocolOptions(allowHixie76 = True)
listenWS(factory)
webdir = File(".")
web = Site(webdir)
reactor.listenTCP(8080, web)
reactor.run()
| apache-2.0 |
Jai-Chaudhary/termite-data-server | web2py/applications-original/admin/languages/af.py | 20 | 3519 | # coding: utf8
{
'!langcode!': 'af',
'!langname!': 'Afrikaanse',
'%Y-%m-%d': '%Y-%m-%d',
'%Y-%m-%d %H:%M:%S': '%Y-%m-%d %H:%M:%S',
'%s %%{row} deleted': '%s rows deleted',
'%s %%{row} updated': '%s rows updated',
'(requires internet access)': '(vereis internet toegang)',
'(something like "it-it")': '(iets soos "it-it")',
'@markmin\x01Searching: **%s** %%{file}': 'Soek: **%s** lêre',
'About': 'oor',
'About application': 'Oor program',
'Additional code for your application': 'Additionele kode vir u application',
'Admin language': 'Admin taal',
'Application name:': 'Program naam:',
'Change admin password': 'verander admin wagwoord',
'Check for upgrades': 'soek vir upgrades',
'Clean': 'maak skoon',
'Compile': 'kompileer',
'Controllers': 'Beheerders',
'Create': 'skep',
'Deploy': 'deploy',
'Deploy on Google App Engine': 'Stuur na Google App Engine toe',
'Edit': 'wysig',
'Edit application': 'Wysig program',
'Errors': 'foute',
'Help': 'hulp',
'Install': 'installeer',
'Installed applications': 'Geinstalleerde apps',
'Languages': 'Tale',
'License for': 'Lisensie vir',
'Logout': 'logout',
'Models': 'Modelle',
'Modules': 'Modules',
'New application wizard': 'Nuwe app wizard',
'New simple application': 'Nuwe eenvoudige app',
'Overwrite installed app': 'skryf oor geinstalleerde program',
'Pack all': 'pack alles',
'Plugins': 'Plugins',
'Powered by': 'Aangedryf deur',
'Site': 'site',
'Start wizard': 'start wizard',
'Static files': 'Static files',
'Sure you want to delete this object?': 'Is jy seker jy will hierde object verwyder?',
'The application logic, each URL path is mapped in one exposed function in the controller': 'The application logic, each URL path is mapped in one exposed function in the controller',
'The data representation, define database tables and sets': 'The data representation, define database tables and sets',
'The presentations layer, views are also known as templates': 'The presentations layer, views are also known as templates',
'There are no plugins': 'Daar is geen plugins',
'These files are served without processing, your images go here': 'Hierdie lêre is sonder veranderinge geserved, jou images gaan hier',
'To create a plugin, name a file/folder plugin_[name]': 'Om n plugin te skep, noem n lêer/gids plugin_[name]',
'Translation strings for the application': 'Vertaling woorde vir die program',
'Uninstall': 'verwyder',
'Upload & install packed application': 'Oplaai & install gepakte program',
'Upload a package:': 'Oplaai n package:',
'Use an url:': 'Gebruik n url:',
'Views': 'Views',
'administrative interface': 'administrative interface',
'and rename it:': 'en verander die naam:',
'collapse/expand all': 'collapse/expand all',
'controllers': 'beheerders',
'create file with filename:': 'skep lêer met naam:',
'created by': 'geskep deur',
'crontab': 'crontab',
'currently running': 'loop tans',
'database administration': 'database administration',
'direction: ltr': 'direction: ltr',
'download layouts': 'aflaai layouts',
'download plugins': 'aflaai plugins',
'exposes': 'exposes',
'extends': 'extends',
'filter': 'filter',
'includes': 'includes',
'languages': 'tale',
'loading...': 'laai...',
'models': 'modelle',
'modules': 'modules',
'plugins': 'plugins',
'shell': 'shell',
'static': 'static',
'test': 'toets',
'update all languages': 'update all languages',
'upload': 'oplaai',
'upload file:': 'oplaai lêer:',
'upload plugin file:': 'upload plugin lêer:',
'versioning': 'versioning',
'views': 'views',
'web2py Recent Tweets': 'web2py Onlangse Tweets',
}
| bsd-3-clause |
leeseulstack/openstack | neutron/tests/unit/cisco/l3/device_handling_test_support.py | 8 | 6536 | # Copyright 2014 Cisco Systems, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from novaclient import exceptions as nova_exc
from oslo.config import cfg
from oslo.utils import excutils
from neutron import context as n_context
from neutron.i18n import _LE
from neutron import manager
from neutron.openstack.common import log as logging
from neutron.openstack.common import uuidutils
from neutron.plugins.common import constants
LOG = logging.getLogger(__name__)
_uuid = uuidutils.generate_uuid
class DeviceHandlingTestSupportMixin(object):
@property
def _core_plugin(self):
return manager.NeutronManager.get_plugin()
def _mock_l3_admin_tenant(self):
# Mock l3 admin tenant
self.tenant_id_fcn_p = mock.patch(
'neutron.plugins.cisco.db.l3.device_handling_db.'
'DeviceHandlingMixin.l3_tenant_id')
self.tenant_id_fcn = self.tenant_id_fcn_p.start()
self.tenant_id_fcn.return_value = "L3AdminTenantId"
def _create_mgmt_nw_for_tests(self, fmt):
self._mgmt_nw = self._make_network(fmt,
cfg.CONF.general.management_network,
True, tenant_id="L3AdminTenantId",
shared=False)
self._mgmt_subnet = self._make_subnet(fmt, self._mgmt_nw,
"10.0.100.1", "10.0.100.0/24",
ip_version=4)
def _remove_mgmt_nw_for_tests(self):
q_p = "network_id=%s" % self._mgmt_nw['network']['id']
subnets = self._list('subnets', query_params=q_p)
if subnets:
for p in self._list('ports', query_params=q_p).get('ports'):
self._delete('ports', p['id'])
self._delete('subnets', self._mgmt_subnet['subnet']['id'])
self._delete('networks', self._mgmt_nw['network']['id'])
# Function used to mock novaclient services list
def _novaclient_services_list(self, all=True):
services = set(['nova-conductor', 'nova-cert', 'nova-scheduler',
'nova-compute', 'nova-consoleauth'])
full_list = [FakeResource(binary=res) for res in services]
_all = all
def response():
if _all:
return full_list
else:
return full_list[2:]
return response
# Function used to mock novaclient servers create
def _novaclient_servers_create(self, instance_name, image_id, flavor_id,
nics, files, config_drive):
fake_vm = FakeResource()
for nic in nics:
p_dict = {'port': {'device_id': fake_vm.id,
'device_owner': 'nova'}}
self._core_plugin.update_port(n_context.get_admin_context(),
nic['port-id'], p_dict)
return fake_vm
# Function used to mock novaclient servers delete
def _novaclient_servers_delete(self, vm_id):
q_p = "device_id=%s" % vm_id
ports = self._list('ports', query_params=q_p)
for port in ports.get('ports', []):
try:
self._delete('ports', port['id'])
except Exception as e:
with excutils.save_and_reraise_exception(reraise=False):
LOG.error(_LE('Failed to delete port %(p_id)s for vm '
'instance %(v_id)s due to %(err)s'),
{'p_id': port['id'], 'v_id': vm_id, 'err': e})
raise nova_exc.InternalServerError()
def _mock_svc_vm_create_delete(self, plugin):
# Mock novaclient methods for creation/deletion of service VMs
mock.patch(
'neutron.plugins.cisco.l3.service_vm_lib.n_utils.find_resource',
lambda *args, **kw: FakeResource()).start()
self._nclient_services_mock = mock.MagicMock()
self._nclient_services_mock.list = self._novaclient_services_list()
mock.patch.object(plugin._svc_vm_mgr._nclient, 'services',
self._nclient_services_mock).start()
nclient_servers_mock = mock.MagicMock()
nclient_servers_mock.create = self._novaclient_servers_create
nclient_servers_mock.delete = self._novaclient_servers_delete
mock.patch.object(plugin._svc_vm_mgr._nclient, 'servers',
nclient_servers_mock).start()
def _mock_io_file_ops(self):
# Mock library functions for config drive file operations
cfg_template = '\n'.join(['interface GigabitEthernet1',
'ip address <ip> <mask>',
'no shutdown'])
m = mock.mock_open(read_data=cfg_template)
m.return_value.__iter__.return_value = cfg_template.splitlines()
mock.patch('neutron.plugins.cisco.l3.hosting_device_drivers.'
'csr1kv_hd_driver.open', m, create=True).start()
def _test_remove_all_hosting_devices(self):
"""Removes all hosting devices created during a test."""
plugin = manager.NeutronManager.get_service_plugins()[
constants.L3_ROUTER_NAT]
context = n_context.get_admin_context()
plugin.delete_all_hosting_devices(context, True)
def _get_fake_resource(self, tenant_id=None, id=None):
return {'id': id or _uuid(),
'tenant_id': tenant_id or _uuid()}
def _get_test_context(self, user_id=None, tenant_id=None, is_admin=False):
return n_context.Context(user_id, tenant_id, is_admin,
load_admin_roles=True)
# Used to fake Glance images, Nova VMs and Nova services
class FakeResource(object):
def __init__(self, id=None, enabled='enabled', state='up', binary=None):
self.id = id or _uuid()
self.status = enabled
self.state = state
self.binary = binary
| apache-2.0 |
safwanrahman/kitsune | kitsune/announcements/tasks.py | 6 | 1837 | from django.conf import settings
from django.contrib.auth.models import User
from django.contrib.sites.models import Site
from django.utils.translation import ugettext as _
import bleach
from celery import task
from kitsune.announcements.models import Announcement
from kitsune.sumo.decorators import timeit
from kitsune.sumo.email_utils import make_mail, safe_translation, send_messages
@task()
@timeit
def send_group_email(announcement_id):
"""Build and send the announcement emails to a group."""
try:
announcement = Announcement.objects.get(pk=announcement_id)
except Announcement.DoesNotExist:
return
group = announcement.group
users = User.objects.filter(groups__in=[group])
plain_content = bleach.clean(announcement.content_parsed,
tags=[], strip=True).strip()
email_kwargs = {'content': plain_content,
'content_html': announcement.content_parsed,
'domain': Site.objects.get_current().domain}
text_template = 'announcements/email/announcement.ltxt'
html_template = 'announcements/email/announcement.html'
@safe_translation
def _make_mail(locale, user):
subject = _('New announcement for {group}').format(
group=group.name)
mail = make_mail(subject=subject,
text_template=text_template,
html_template=html_template,
context_vars=email_kwargs,
from_email=settings.TIDINGS_FROM_ADDRESS,
to_email=user.email)
return mail
messages = []
for u in users:
# Localize email each time.
locale = u.profile.locale or settings.LANGUAGE_CODE
messages.append(_make_mail(locale, u))
send_messages(messages)
| bsd-3-clause |
mikewiebe-ansible/ansible | lib/ansible/modules/network/fortios/fortios_system_mac_address_table.py | 13 | 9593 | #!/usr/bin/python
from __future__ import (absolute_import, division, print_function)
# Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fortios_system_mac_address_table
short_description: Configure MAC address tables in Fortinet's FortiOS and FortiGate.
description:
- This module is able to configure a FortiGate or FortiOS (FOS) device by allowing the
user to set and modify system feature and mac_address_table category.
Examples include all parameters and values need to be adjusted to datasources before usage.
Tested with FOS v6.0.5
version_added: "2.9"
author:
- Miguel Angel Munoz (@mamunozgonzalez)
- Nicolas Thomas (@thomnico)
notes:
- Requires fortiosapi library developed by Fortinet
- Run as a local_action in your playbook
requirements:
- fortiosapi>=0.9.8
options:
host:
description:
- FortiOS or FortiGate IP address.
type: str
required: false
username:
description:
- FortiOS or FortiGate username.
type: str
required: false
password:
description:
- FortiOS or FortiGate password.
type: str
default: ""
vdom:
description:
- Virtual domain, among those defined previously. A vdom is a
virtual instance of the FortiGate that can be configured and
used as a different unit.
type: str
default: root
https:
description:
- Indicates if the requests towards FortiGate must use HTTPS protocol.
type: bool
default: true
ssl_verify:
description:
- Ensures FortiGate certificate must be verified by a proper CA.
type: bool
default: true
state:
description:
- Indicates whether to create or remove the object.
type: str
required: true
choices:
- present
- absent
system_mac_address_table:
description:
- Configure MAC address tables.
default: null
type: dict
suboptions:
interface:
description:
- Interface name. Source system.interface.name.
type: str
mac:
description:
- MAC address.
required: true
type: str
reply_substitute:
description:
- New MAC for reply traffic.
type: str
'''
EXAMPLES = '''
- hosts: localhost
vars:
host: "192.168.122.40"
username: "admin"
password: ""
vdom: "root"
ssl_verify: "False"
tasks:
- name: Configure MAC address tables.
fortios_system_mac_address_table:
host: "{{ host }}"
username: "{{ username }}"
password: "{{ password }}"
vdom: "{{ vdom }}"
https: "False"
state: "present"
system_mac_address_table:
interface: "<your_own_value> (source system.interface.name)"
mac: "<your_own_value>"
reply_substitute: "<your_own_value>"
'''
RETURN = '''
build:
description: Build number of the fortigate image
returned: always
type: str
sample: '1547'
http_method:
description: Last method used to provision the content into FortiGate
returned: always
type: str
sample: 'PUT'
http_status:
description: Last result given by FortiGate on last operation applied
returned: always
type: str
sample: "200"
mkey:
description: Master key (id) used in the last call to FortiGate
returned: success
type: str
sample: "id"
name:
description: Name of the table used to fulfill the request
returned: always
type: str
sample: "urlfilter"
path:
description: Path of the table used to fulfill the request
returned: always
type: str
sample: "webfilter"
revision:
description: Internal revision number
returned: always
type: str
sample: "17.0.2.10658"
serial:
description: Serial number of the unit
returned: always
type: str
sample: "FGVMEVYYQT3AB5352"
status:
description: Indication of the operation's result
returned: always
type: str
sample: "success"
vdom:
description: Virtual domain used
returned: always
type: str
sample: "root"
version:
description: Version of the FortiGate
returned: always
type: str
sample: "v5.6.3"
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import Connection
from ansible.module_utils.network.fortios.fortios import FortiOSHandler
from ansible.module_utils.network.fortimanager.common import FAIL_SOCKET_MSG
def login(data, fos):
host = data['host']
username = data['username']
password = data['password']
ssl_verify = data['ssl_verify']
fos.debug('on')
if 'https' in data and not data['https']:
fos.https('off')
else:
fos.https('on')
fos.login(host, username, password, verify=ssl_verify)
def filter_system_mac_address_table_data(json):
option_list = ['interface', 'mac', 'reply_substitute']
dictionary = {}
for attribute in option_list:
if attribute in json and json[attribute] is not None:
dictionary[attribute] = json[attribute]
return dictionary
def underscore_to_hyphen(data):
if isinstance(data, list):
for elem in data:
elem = underscore_to_hyphen(elem)
elif isinstance(data, dict):
new_data = {}
for k, v in data.items():
new_data[k.replace('_', '-')] = underscore_to_hyphen(v)
data = new_data
return data
def system_mac_address_table(data, fos):
vdom = data['vdom']
state = data['state']
system_mac_address_table_data = data['system_mac_address_table']
filtered_data = underscore_to_hyphen(filter_system_mac_address_table_data(system_mac_address_table_data))
if state == "present":
return fos.set('system',
'mac-address-table',
data=filtered_data,
vdom=vdom)
elif state == "absent":
return fos.delete('system',
'mac-address-table',
mkey=filtered_data['mac'],
vdom=vdom)
def is_successful_status(status):
return status['status'] == "success" or \
status['http_method'] == "DELETE" and status['http_status'] == 404
def fortios_system(data, fos):
if data['system_mac_address_table']:
resp = system_mac_address_table(data, fos)
return not is_successful_status(resp), \
resp['status'] == "success", \
resp
def main():
fields = {
"host": {"required": False, "type": "str"},
"username": {"required": False, "type": "str"},
"password": {"required": False, "type": "str", "default": "", "no_log": True},
"vdom": {"required": False, "type": "str", "default": "root"},
"https": {"required": False, "type": "bool", "default": True},
"ssl_verify": {"required": False, "type": "bool", "default": True},
"state": {"required": True, "type": "str",
"choices": ["present", "absent"]},
"system_mac_address_table": {
"required": False, "type": "dict", "default": None,
"options": {
"interface": {"required": False, "type": "str"},
"mac": {"required": True, "type": "str"},
"reply_substitute": {"required": False, "type": "str"}
}
}
}
module = AnsibleModule(argument_spec=fields,
supports_check_mode=False)
# legacy_mode refers to using fortiosapi instead of HTTPAPI
legacy_mode = 'host' in module.params and module.params['host'] is not None and \
'username' in module.params and module.params['username'] is not None and \
'password' in module.params and module.params['password'] is not None
if not legacy_mode:
if module._socket_path:
connection = Connection(module._socket_path)
fos = FortiOSHandler(connection)
is_error, has_changed, result = fortios_system(module.params, fos)
else:
module.fail_json(**FAIL_SOCKET_MSG)
else:
try:
from fortiosapi import FortiOSAPI
except ImportError:
module.fail_json(msg="fortiosapi module is required")
fos = FortiOSAPI()
login(module.params, fos)
is_error, has_changed, result = fortios_system(module.params, fos)
fos.logout()
if not is_error:
module.exit_json(changed=has_changed, meta=result)
else:
module.fail_json(msg="Error in repo", meta=result)
if __name__ == '__main__':
main()
| gpl-3.0 |
jules185/IoT_Hackathon | .homeassistant/deps/sqlalchemy/dialects/mssql/adodbapi.py | 33 | 2692 | # mssql/adodbapi.py
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""
.. dialect:: mssql+adodbapi
:name: adodbapi
:dbapi: adodbapi
:connectstring: mssql+adodbapi://<username>:<password>@<dsnname>
:url: http://adodbapi.sourceforge.net/
.. note::
The adodbapi dialect is not implemented SQLAlchemy versions 0.6 and
above at this time.
"""
import datetime
from sqlalchemy import types as sqltypes, util
from sqlalchemy.dialects.mssql.base import MSDateTime, MSDialect
import sys
class MSDateTime_adodbapi(MSDateTime):
def result_processor(self, dialect, coltype):
def process(value):
# adodbapi will return datetimes with empty time
# values as datetime.date() objects.
# Promote them back to full datetime.datetime()
if type(value) is datetime.date:
return datetime.datetime(value.year, value.month, value.day)
return value
return process
class MSDialect_adodbapi(MSDialect):
supports_sane_rowcount = True
supports_sane_multi_rowcount = True
supports_unicode = sys.maxunicode == 65535
supports_unicode_statements = True
driver = 'adodbapi'
@classmethod
def import_dbapi(cls):
import adodbapi as module
return module
colspecs = util.update_copy(
MSDialect.colspecs,
{
sqltypes.DateTime: MSDateTime_adodbapi
}
)
def create_connect_args(self, url):
def check_quote(token):
if ";" in str(token):
token = "'%s'" % token
return token
keys = dict(
(k, check_quote(v)) for k, v in url.query.items()
)
connectors = ["Provider=SQLOLEDB"]
if 'port' in keys:
connectors.append("Data Source=%s, %s" %
(keys.get("host"), keys.get("port")))
else:
connectors.append("Data Source=%s" % keys.get("host"))
connectors.append("Initial Catalog=%s" % keys.get("database"))
user = keys.get("user")
if user:
connectors.append("User Id=%s" % user)
connectors.append("Password=%s" % keys.get("password", ""))
else:
connectors.append("Integrated Security=SSPI")
return [[";".join(connectors)], {}]
def is_disconnect(self, e, connection, cursor):
return isinstance(e, self.dbapi.adodbapi.DatabaseError) and \
"'connection failure'" in str(e)
dialect = MSDialect_adodbapi
| mit |
facelessuser/sublime-markdown-popups | st3/mdpopups/pygments/lexers/theorem.py | 1 | 18877 | # -*- coding: utf-8 -*-
"""
pygments.lexers.theorem
~~~~~~~~~~~~~~~~~~~~~~~
Lexers for theorem-proving languages.
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from ..lexer import RegexLexer, default, words
from ..token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Generic
__all__ = ['CoqLexer', 'IsabelleLexer', 'LeanLexer']
class CoqLexer(RegexLexer):
"""
For the `Coq <http://coq.inria.fr/>`_ theorem prover.
.. versionadded:: 1.5
"""
name = 'Coq'
aliases = ['coq']
filenames = ['*.v']
mimetypes = ['text/x-coq']
keywords1 = (
# Vernacular commands
'Section', 'Module', 'End', 'Require', 'Import', 'Export', 'Variable',
'Variables', 'Parameter', 'Parameters', 'Axiom', 'Hypothesis',
'Hypotheses', 'Notation', 'Local', 'Tactic', 'Reserved', 'Scope',
'Open', 'Close', 'Bind', 'Delimit', 'Definition', 'Let', 'Ltac',
'Fixpoint', 'CoFixpoint', 'Morphism', 'Relation', 'Implicit',
'Arguments', 'Set', 'Unset', 'Contextual', 'Strict', 'Prenex',
'Implicits', 'Inductive', 'CoInductive', 'Record', 'Structure',
'Canonical', 'Coercion', 'Theorem', 'Lemma', 'Corollary',
'Proposition', 'Fact', 'Remark', 'Example', 'Proof', 'Goal', 'Save',
'Qed', 'Defined', 'Hint', 'Resolve', 'Rewrite', 'View', 'Search',
'Show', 'Print', 'Printing', 'All', 'Graph', 'Projections', 'inside',
'outside', 'Check',
)
keywords2 = (
# Gallina
'forall', 'exists', 'exists2', 'fun', 'fix', 'cofix', 'struct',
'match', 'end', 'in', 'return', 'let', 'if', 'is', 'then', 'else',
'for', 'of', 'nosimpl', 'with', 'as',
)
keywords3 = (
# Sorts
'Type', 'Prop',
)
keywords4 = (
# Tactics
'pose', 'set', 'move', 'case', 'elim', 'apply', 'clear', 'hnf', 'intro',
'intros', 'generalize', 'rename', 'pattern', 'after', 'destruct',
'induction', 'using', 'refine', 'inversion', 'injection', 'rewrite',
'congr', 'unlock', 'compute', 'ring', 'field', 'replace', 'fold',
'unfold', 'change', 'cutrewrite', 'simpl', 'have', 'suff', 'wlog',
'suffices', 'without', 'loss', 'nat_norm', 'assert', 'cut', 'trivial',
'revert', 'bool_congr', 'nat_congr', 'symmetry', 'transitivity', 'auto',
'split', 'left', 'right', 'autorewrite', 'tauto',
)
keywords5 = (
# Terminators
'by', 'done', 'exact', 'reflexivity', 'tauto', 'romega', 'omega',
'assumption', 'solve', 'contradiction', 'discriminate',
)
keywords6 = (
# Control
'do', 'last', 'first', 'try', 'idtac', 'repeat',
)
# 'as', 'assert', 'begin', 'class', 'constraint', 'do', 'done',
# 'downto', 'else', 'end', 'exception', 'external', 'false',
# 'for', 'fun', 'function', 'functor', 'if', 'in', 'include',
# 'inherit', 'initializer', 'lazy', 'let', 'match', 'method',
# 'module', 'mutable', 'new', 'object', 'of', 'open', 'private',
# 'raise', 'rec', 'sig', 'struct', 'then', 'to', 'true', 'try',
# 'type', 'val', 'virtual', 'when', 'while', 'with'
keyopts = (
'!=', '#', '&', '&&', r'\(', r'\)', r'\*', r'\+', ',', '-', r'-\.',
'->', r'\.', r'\.\.', ':', '::', ':=', ':>', ';', ';;', '<', '<-',
'<->', '=', '>', '>]', r'>\}', r'\?', r'\?\?', r'\[', r'\[<', r'\[>',
r'\[\|', ']', '_', '`', r'\{', r'\{<', r'\|', r'\|]', r'\}', '~', '=>',
r'/\\', r'\\/',
u'Π', u'λ',
)
operators = r'[!$%&*+\./:<=>?@^|~-]'
word_operators = ('and', 'asr', 'land', 'lor', 'lsl', 'lxor', 'mod', 'or')
prefix_syms = r'[!?~]'
infix_syms = r'[=<>@^|&+\*/$%-]'
primitives = ('unit', 'int', 'float', 'bool', 'string', 'char', 'list',
'array')
tokens = {
'root': [
(r'\s+', Text),
(r'false|true|\(\)|\[\]', Name.Builtin.Pseudo),
(r'\(\*', Comment, 'comment'),
(words(keywords1, prefix=r'\b', suffix=r'\b'), Keyword.Namespace),
(words(keywords2, prefix=r'\b', suffix=r'\b'), Keyword),
(words(keywords3, prefix=r'\b', suffix=r'\b'), Keyword.Type),
(words(keywords4, prefix=r'\b', suffix=r'\b'), Keyword),
(words(keywords5, prefix=r'\b', suffix=r'\b'), Keyword.Pseudo),
(words(keywords6, prefix=r'\b', suffix=r'\b'), Keyword.Reserved),
(r'\b([A-Z][\w\']*)(?=\s*\.)', Name.Namespace, 'dotted'),
(r'\b([A-Z][\w\']*)', Name.Class),
(r'(%s)' % '|'.join(keyopts[::-1]), Operator),
(r'(%s|%s)?%s' % (infix_syms, prefix_syms, operators), Operator),
(r'\b(%s)\b' % '|'.join(word_operators), Operator.Word),
(r'\b(%s)\b' % '|'.join(primitives), Keyword.Type),
(r"[^\W\d][\w']*", Name),
(r'\d[\d_]*', Number.Integer),
(r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex),
(r'0[oO][0-7][0-7_]*', Number.Oct),
(r'0[bB][01][01_]*', Number.Bin),
(r'-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)', Number.Float),
(r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'",
String.Char),
(r"'.'", String.Char),
(r"'", Keyword), # a stray quote is another syntax element
(r'"', String.Double, 'string'),
(r'[~?][a-z][\w\']*:', Name.Variable),
],
'comment': [
(r'[^(*)]+', Comment),
(r'\(\*', Comment, '#push'),
(r'\*\)', Comment, '#pop'),
(r'[(*)]', Comment),
],
'string': [
(r'[^"]+', String.Double),
(r'""', String.Double),
(r'"', String.Double, '#pop'),
],
'dotted': [
(r'\s+', Text),
(r'\.', Punctuation),
(r'[A-Z][\w\']*(?=\s*\.)', Name.Namespace),
(r'[A-Z][\w\']*', Name.Class, '#pop'),
(r'[a-z][a-z0-9_\']*', Name, '#pop'),
default('#pop')
],
}
def analyse_text(text):
if text.startswith('(*'):
return True
class IsabelleLexer(RegexLexer):
"""
For the `Isabelle <http://isabelle.in.tum.de/>`_ proof assistant.
.. versionadded:: 2.0
"""
name = 'Isabelle'
aliases = ['isabelle']
filenames = ['*.thy']
mimetypes = ['text/x-isabelle']
keyword_minor = (
'and', 'assumes', 'attach', 'avoids', 'binder', 'checking',
'class_instance', 'class_relation', 'code_module', 'congs',
'constant', 'constrains', 'datatypes', 'defines', 'file', 'fixes',
'for', 'functions', 'hints', 'identifier', 'if', 'imports', 'in',
'includes', 'infix', 'infixl', 'infixr', 'is', 'keywords', 'lazy',
'module_name', 'monos', 'morphisms', 'no_discs_sels', 'notes',
'obtains', 'open', 'output', 'overloaded', 'parametric', 'permissive',
'pervasive', 'rep_compat', 'shows', 'structure', 'type_class',
'type_constructor', 'unchecked', 'unsafe', 'where',
)
keyword_diag = (
'ML_command', 'ML_val', 'class_deps', 'code_deps', 'code_thms',
'display_drafts', 'find_consts', 'find_theorems', 'find_unused_assms',
'full_prf', 'help', 'locale_deps', 'nitpick', 'pr', 'prf',
'print_abbrevs', 'print_antiquotations', 'print_attributes',
'print_binds', 'print_bnfs', 'print_bundles',
'print_case_translations', 'print_cases', 'print_claset',
'print_classes', 'print_codeproc', 'print_codesetup',
'print_coercions', 'print_commands', 'print_context',
'print_defn_rules', 'print_dependencies', 'print_facts',
'print_induct_rules', 'print_inductives', 'print_interps',
'print_locale', 'print_locales', 'print_methods', 'print_options',
'print_orders', 'print_quot_maps', 'print_quotconsts',
'print_quotients', 'print_quotientsQ3', 'print_quotmapsQ3',
'print_rules', 'print_simpset', 'print_state', 'print_statement',
'print_syntax', 'print_theorems', 'print_theory', 'print_trans_rules',
'prop', 'pwd', 'quickcheck', 'refute', 'sledgehammer', 'smt_status',
'solve_direct', 'spark_status', 'term', 'thm', 'thm_deps', 'thy_deps',
'try', 'try0', 'typ', 'unused_thms', 'value', 'values', 'welcome',
'print_ML_antiquotations', 'print_term_bindings', 'values_prolog',
)
keyword_thy = ('theory', 'begin', 'end')
keyword_section = ('header', 'chapter')
keyword_subsection = (
'section', 'subsection', 'subsubsection', 'sect', 'subsect',
'subsubsect',
)
keyword_theory_decl = (
'ML', 'ML_file', 'abbreviation', 'adhoc_overloading', 'arities',
'atom_decl', 'attribute_setup', 'axiomatization', 'bundle',
'case_of_simps', 'class', 'classes', 'classrel', 'codatatype',
'code_abort', 'code_class', 'code_const', 'code_datatype',
'code_identifier', 'code_include', 'code_instance', 'code_modulename',
'code_monad', 'code_printing', 'code_reflect', 'code_reserved',
'code_type', 'coinductive', 'coinductive_set', 'consts', 'context',
'datatype', 'datatype_new', 'datatype_new_compat', 'declaration',
'declare', 'default_sort', 'defer_recdef', 'definition', 'defs',
'domain', 'domain_isomorphism', 'domaindef', 'equivariance',
'export_code', 'extract', 'extract_type', 'fixrec', 'fun',
'fun_cases', 'hide_class', 'hide_const', 'hide_fact', 'hide_type',
'import_const_map', 'import_file', 'import_tptp', 'import_type_map',
'inductive', 'inductive_set', 'instantiation', 'judgment', 'lemmas',
'lifting_forget', 'lifting_update', 'local_setup', 'locale',
'method_setup', 'nitpick_params', 'no_adhoc_overloading',
'no_notation', 'no_syntax', 'no_translations', 'no_type_notation',
'nominal_datatype', 'nonterminal', 'notation', 'notepad', 'oracle',
'overloading', 'parse_ast_translation', 'parse_translation',
'partial_function', 'primcorec', 'primrec', 'primrec_new',
'print_ast_translation', 'print_translation', 'quickcheck_generator',
'quickcheck_params', 'realizability', 'realizers', 'recdef', 'record',
'refute_params', 'setup', 'setup_lifting', 'simproc_setup',
'simps_of_case', 'sledgehammer_params', 'spark_end', 'spark_open',
'spark_open_siv', 'spark_open_vcg', 'spark_proof_functions',
'spark_types', 'statespace', 'syntax', 'syntax_declaration', 'text',
'text_raw', 'theorems', 'translations', 'type_notation',
'type_synonym', 'typed_print_translation', 'typedecl', 'hoarestate',
'install_C_file', 'install_C_types', 'wpc_setup', 'c_defs', 'c_types',
'memsafe', 'SML_export', 'SML_file', 'SML_import', 'approximate',
'bnf_axiomatization', 'cartouche', 'datatype_compat',
'free_constructors', 'functor', 'nominal_function',
'nominal_termination', 'permanent_interpretation',
'binds', 'defining', 'smt2_status', 'term_cartouche',
'boogie_file', 'text_cartouche',
)
keyword_theory_script = ('inductive_cases', 'inductive_simps')
keyword_theory_goal = (
'ax_specification', 'bnf', 'code_pred', 'corollary', 'cpodef',
'crunch', 'crunch_ignore',
'enriched_type', 'function', 'instance', 'interpretation', 'lemma',
'lift_definition', 'nominal_inductive', 'nominal_inductive2',
'nominal_primrec', 'pcpodef', 'primcorecursive',
'quotient_definition', 'quotient_type', 'recdef_tc', 'rep_datatype',
'schematic_corollary', 'schematic_lemma', 'schematic_theorem',
'spark_vc', 'specification', 'subclass', 'sublocale', 'termination',
'theorem', 'typedef', 'wrap_free_constructors',
)
keyword_qed = ('by', 'done', 'qed')
keyword_abandon_proof = ('sorry', 'oops')
keyword_proof_goal = ('have', 'hence', 'interpret')
keyword_proof_block = ('next', 'proof')
keyword_proof_chain = (
'finally', 'from', 'then', 'ultimately', 'with',
)
keyword_proof_decl = (
'ML_prf', 'also', 'include', 'including', 'let', 'moreover', 'note',
'txt', 'txt_raw', 'unfolding', 'using', 'write',
)
keyword_proof_asm = ('assume', 'case', 'def', 'fix', 'presume')
keyword_proof_asm_goal = ('guess', 'obtain', 'show', 'thus')
keyword_proof_script = (
'apply', 'apply_end', 'apply_trace', 'back', 'defer', 'prefer',
)
operators = (
'::', ':', '(', ')', '[', ']', '_', '=', ',', '|',
'+', '-', '!', '?',
)
proof_operators = ('{', '}', '.', '..')
tokens = {
'root': [
(r'\s+', Text),
(r'\(\*', Comment, 'comment'),
(r'\{\*', Comment, 'text'),
(words(operators), Operator),
(words(proof_operators), Operator.Word),
(words(keyword_minor, prefix=r'\b', suffix=r'\b'), Keyword.Pseudo),
(words(keyword_diag, prefix=r'\b', suffix=r'\b'), Keyword.Type),
(words(keyword_thy, prefix=r'\b', suffix=r'\b'), Keyword),
(words(keyword_theory_decl, prefix=r'\b', suffix=r'\b'), Keyword),
(words(keyword_section, prefix=r'\b', suffix=r'\b'), Generic.Heading),
(words(keyword_subsection, prefix=r'\b', suffix=r'\b'), Generic.Subheading),
(words(keyword_theory_goal, prefix=r'\b', suffix=r'\b'), Keyword.Namespace),
(words(keyword_theory_script, prefix=r'\b', suffix=r'\b'), Keyword.Namespace),
(words(keyword_abandon_proof, prefix=r'\b', suffix=r'\b'), Generic.Error),
(words(keyword_qed, prefix=r'\b', suffix=r'\b'), Keyword),
(words(keyword_proof_goal, prefix=r'\b', suffix=r'\b'), Keyword),
(words(keyword_proof_block, prefix=r'\b', suffix=r'\b'), Keyword),
(words(keyword_proof_decl, prefix=r'\b', suffix=r'\b'), Keyword),
(words(keyword_proof_chain, prefix=r'\b', suffix=r'\b'), Keyword),
(words(keyword_proof_asm, prefix=r'\b', suffix=r'\b'), Keyword),
(words(keyword_proof_asm_goal, prefix=r'\b', suffix=r'\b'), Keyword),
(words(keyword_proof_script, prefix=r'\b', suffix=r'\b'), Keyword.Pseudo),
(r'\\<\w*>', Text.Symbol),
(r"[^\W\d][.\w']*", Name),
(r"\?[^\W\d][.\w']*", Name),
(r"'[^\W\d][.\w']*", Name.Type),
(r'\d[\d_]*', Name), # display numbers as name
(r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex),
(r'0[oO][0-7][0-7_]*', Number.Oct),
(r'0[bB][01][01_]*', Number.Bin),
(r'"', String, 'string'),
(r'`', String.Other, 'fact'),
],
'comment': [
(r'[^(*)]+', Comment),
(r'\(\*', Comment, '#push'),
(r'\*\)', Comment, '#pop'),
(r'[(*)]', Comment),
],
'text': [
(r'[^*}]+', Comment),
(r'\*\}', Comment, '#pop'),
(r'\*', Comment),
(r'\}', Comment),
],
'string': [
(r'[^"\\]+', String),
(r'\\<\w*>', String.Symbol),
(r'\\"', String),
(r'\\', String),
(r'"', String, '#pop'),
],
'fact': [
(r'[^`\\]+', String.Other),
(r'\\<\w*>', String.Symbol),
(r'\\`', String.Other),
(r'\\', String.Other),
(r'`', String.Other, '#pop'),
],
}
class LeanLexer(RegexLexer):
"""
For the `Lean <https://github.com/leanprover/lean>`_
theorem prover.
.. versionadded:: 2.0
"""
name = 'Lean'
aliases = ['lean']
filenames = ['*.lean']
mimetypes = ['text/x-lean']
flags = re.MULTILINE | re.UNICODE
keywords1 = ('import', 'abbreviation', 'opaque_hint', 'tactic_hint', 'definition', 'renaming',
'inline', 'hiding', 'exposing', 'parameter', 'parameters', 'conjecture',
'hypothesis', 'lemma', 'corollary', 'variable', 'variables', 'print', 'theorem',
'axiom', 'inductive', 'structure', 'universe', 'alias', 'help',
'options', 'precedence', 'postfix', 'prefix', 'calc_trans', 'calc_subst', 'calc_refl',
'infix', 'infixl', 'infixr', 'notation', 'eval', 'check', 'exit', 'coercion', 'end',
'private', 'using', 'namespace', 'including', 'instance', 'section', 'context',
'protected', 'expose', 'export', 'set_option', 'add_rewrite', 'extends',
'open', 'example', 'constant', 'constants', 'print', 'opaque', 'reducible', 'irreducible'
)
keywords2 = (
'forall', 'fun', 'Pi', 'obtain', 'from', 'have', 'show', 'assume', 'take',
'let', 'if', 'else', 'then', 'by', 'in', 'with', 'begin', 'proof', 'qed', 'calc', 'match'
)
keywords3 = (
# Sorts
'Type', 'Prop',
)
operators = (
'!=', '#', '&', '&&', '*', '+', '-', '/', '@', '!', '`',
'-.', '->', '.', '..', '...', '::', ':>', ';', ';;', '<',
'<-', '=', '==', '>', '_', '`', '|', '||', '~', '=>', '<=', '>=',
'/\\', '\\/', u'∀', u'Π', u'λ', u'↔', u'∧', u'∨', u'≠', u'≤', u'≥',
u'¬', u'⁻¹', u'⬝', u'▸', u'→', u'∃', u'ℕ', u'ℤ', u'≈', u'×', u'⌞', u'⌟', u'≡',
u'⟨', u'⟩'
)
punctuation = ('(', ')', ':', '{', '}', '[', ']', u'⦃', u'⦄', ':=', ',')
tokens = {
'root': [
(r'\s+', Text),
(r'/-', Comment, 'comment'),
(r'--.*?$', Comment.Single),
(words(keywords1, prefix=r'\b', suffix=r'\b'), Keyword.Namespace),
(words(keywords2, prefix=r'\b', suffix=r'\b'), Keyword),
(words(keywords3, prefix=r'\b', suffix=r'\b'), Keyword.Type),
(words(operators), Name.Builtin.Pseudo),
(words(punctuation), Operator),
(u"[A-Za-z_\u03b1-\u03ba\u03bc-\u03fb\u1f00-\u1ffe\u2100-\u214f]"
u"[A-Za-z_'\u03b1-\u03ba\u03bc-\u03fb\u1f00-\u1ffe\u2070-\u2079"
u"\u207f-\u2089\u2090-\u209c\u2100-\u214f0-9]*", Name),
(r'\d+', Number.Integer),
(r'"', String.Double, 'string'),
(r'[~?][a-z][\w\']*:', Name.Variable)
],
'comment': [
# Multiline Comments
(r'[^/-]', Comment.Multiline),
(r'/-', Comment.Multiline, '#push'),
(r'-/', Comment.Multiline, '#pop'),
(r'[/-]', Comment.Multiline)
],
'string': [
(r'[^\\"]+', String.Double),
(r'\\[n"\\]', String.Escape),
('"', String.Double, '#pop'),
],
}
| mit |
mrb25/groupme-bot | bin/initialization.py | 1 | 3872 | import datetime
import time
from datetime import timedelta, datetime
# import boto3
# from boto3.dynamodb.conditions import Key
from bin import messageHandler
from bin.selectobjects import *
class Initialization(object):
def runbot(runLoop):
testgroup = SelectObjects.selectgroup(groupy.Group.list()) # Select Group to work from
testbot = SelectObjects.selectbots(groupy.Bot.list()) # Select bot to work with
# dynamodb = boto3.resource('dynamodb')
# table = dynamodb.Table('Groupme-chat') # SQL table to work from
# usertable = dynamodb.Table('User-Commands') # The SQL table that stores the last time user used a command
timedelay = 0
prevmessage = ""
print("Working Group is: " + str(testgroup)) # check what group is being watched
print("Working Bot is: " + str(testbot)) # check what bot is the fuctioning one
handler = messageHandler.MessageHandler(testbot, testgroup)
sleepstart = 3
sleepend = 9
lastmessage = datetime.now()
while runLoop:
if datetime.now().hour >= sleepstart and datetime.now().hour <= sleepend:
print("####### Heavy sleep mode enabled #######")
time.sleep(30) # Heavy sleep mode between 3 - 9 am
elif (datetime.now() - lastmessage) > timedelta(minutes=30):
print("####### Light sleep mode enabled #######")
time.sleep(8) # Light sleep when chat is inactive
else:
time.sleep(0.2) # Just waits a second so my ras pi doesn't turn to lava. Probably doesn't actually matter
message = testgroup.messages().first # Most recent message in the groupchat
try: # Looks to see if last thing was a valid command, if it throws an attribute error, its assumed that
# it was something that wasn't a command
if handler.validcommand(message.text.strip().lower()):
# response = usertable.query(
# KeyConditionExpression=Key('userid').eq(message.user_id)
# )
try:
items = response['Items'][0]['time']
except IndexError:
items = '2000-05-08 02:02:02'
# prevmsgtime = datetime(int(items[0:4]), int(items[5:7]), int(items[8:10]), int(items[11:13]),
# int(items[14:16]), int(items[17:19]), 0)
msgtime = message.created_at
# Stops users from using commands too much
if(msgtime - prevmsgtime) > timedelta(seconds=timedelay):
usertable.put_item(
Item={
'userid': message.user_id,
'time': str(message.created_at)
}
)
handler.handle(message)
except AttributeError:
pass
else:
handler.parseforlikes(message) # Looks for messages that a bot will like (Its a big fan of irony)
if message.text != prevmessage:
print(message.text)
prevmessage = message.text
lastmessage = datetime.now()
# if message.text is not None: # Stops picture only messages from breaking put.item
# table.put_item(
# Item={
# 'messageCount': testgroup.message_count,
# 'userid': message.user_id,
# 'message': message.text.translate('@#%$&').lower(),
# 'time': str(message.created_at)
# }
# )
| bsd-2-clause |
seecr/meresco-components | test/autocomplete/__init__.py | 4 | 1277 | ## begin license ##
#
# "Meresco Components" are components to build searchengines, repositories
# and archives, based on "Meresco Core".
#
# Copyright (C) 2007-2009 SURF Foundation. http://www.surf.nl
# Copyright (C) 2007 SURFnet. http://www.surfnet.nl
# Copyright (C) 2007-2010 Seek You Too (CQ2) http://www.cq2.nl
# Copyright (C) 2007-2009 Stichting Kennisnet Ict op school. http://www.kennisnetictopschool.nl
# Copyright (C) 2012 Seecr (Seek You Too B.V.) http://seecr.nl
#
# This file is part of "Meresco Components"
#
# "Meresco Components" is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# "Meresco Components" is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with "Meresco Components"; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
## end license ##
| gpl-2.0 |
krux/kibana-pkg | kibana-4.0.2-linux-x64/node/lib/node_modules/npm/node_modules/node-gyp/gyp/pylib/gyp/flock_tool.py | 604 | 1533 | #!/usr/bin/env python
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""These functions are executed via gyp-flock-tool when using the Makefile
generator. Used on systems that don't have a built-in flock."""
import fcntl
import os
import struct
import subprocess
import sys
def main(args):
executor = FlockTool()
executor.Dispatch(args)
class FlockTool(object):
"""This class emulates the 'flock' command."""
def Dispatch(self, args):
"""Dispatches a string command to a method."""
if len(args) < 1:
raise Exception("Not enough arguments")
method = "Exec%s" % self._CommandifyName(args[0])
getattr(self, method)(*args[1:])
def _CommandifyName(self, name_string):
"""Transforms a tool name like copy-info-plist to CopyInfoPlist"""
return name_string.title().replace('-', '')
def ExecFlock(self, lockfile, *cmd_list):
"""Emulates the most basic behavior of Linux's flock(1)."""
# Rely on exception handling to report errors.
# Note that the stock python on SunOS has a bug
# where fcntl.flock(fd, LOCK_EX) always fails
# with EBADF, that's why we use this F_SETLK
# hack instead.
fd = os.open(lockfile, os.O_WRONLY|os.O_NOCTTY|os.O_CREAT, 0666)
op = struct.pack('hhllhhl', fcntl.F_WRLCK, 0, 0, 0, 0, 0, 0)
fcntl.fcntl(fd, fcntl.F_SETLK, op)
return subprocess.call(cmd_list)
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| mit |
lad1337/Sick-Beard | lib/hachoir_parser/container/mkv.py | 90 | 20373 | #
# Matroska parser
# Author Julien Muchembled <jm AT jm10.no-ip.com>
# Created: 8 june 2006
#
from lib.hachoir_parser import Parser
from lib.hachoir_core.field import (FieldSet, Link,
MissingField, ParserError,
Enum as _Enum, String as _String,
Float32, Float64,
NullBits, Bits, Bit, RawBytes, Bytes,
Int16, GenericInteger)
from lib.hachoir_core.endian import BIG_ENDIAN
from lib.hachoir_core.iso639 import ISO639_2
from lib.hachoir_core.tools import humanDatetime
from lib.hachoir_core.text_handler import textHandler, hexadecimal
from lib.hachoir_parser.container.ogg import XiphInt
from datetime import datetime, timedelta
class RawInt(GenericInteger):
"""
Raw integer: have to be used in BIG_ENDIAN!
"""
def __init__(self, parent, name, description=None):
GenericInteger.__init__(self, parent, name, False, 8, description)
i = GenericInteger.createValue(self)
if i == 0:
raise ParserError('Invalid integer length!')
while i < 0x80:
self._size += 8
i <<= 1
class Unsigned(RawInt):
def __init__(self, parent, name, description=None):
RawInt.__init__(self, parent, name, description)
def hasValue(self):
return True
def createValue(self):
header = 1 << self._size / 8 * 7
value = RawInt.createValue(self) - header
if value + 1 == header:
return None
return value
class Signed(Unsigned):
def createValue(self):
header = 1 << self._size / 8 * 7 - 1
value = RawInt.createValue(self) - 3 * header + 1
if value == header:
return None
return value
def Enum(parent, enum):
return _Enum(GenericInteger(parent, 'enum', False, parent['size'].value*8), enum)
def Bool(parent):
return textHandler(GenericInteger(parent, 'bool', False, parent['size'].value*8),
lambda chunk: str(chunk.value != 0))
def UInt(parent):
return GenericInteger(parent, 'unsigned', False, parent['size'].value*8)
def SInt(parent):
return GenericInteger(parent, 'signed', True, parent['size'].value*8)
def String(parent):
return _String(parent, 'string', parent['size'].value, charset="ASCII")
def EnumString(parent, enum):
return _Enum(String(parent), enum)
def Binary(parent):
return RawBytes(parent, 'binary', parent['size'].value)
class AttachedFile(Bytes):
def __init__(self, parent):
Bytes.__init__(self, parent, 'file', parent['size'].value, None)
def _getFilename(self):
if not hasattr(self, "_filename"):
try:
self._filename = self["../../FileName/unicode"].value
except MissingField:
self._filename = None
return self._filename
def createDescription(self):
filename = self._getFilename()
if filename:
return 'File "%s"' % filename
return "('Filename' entry not found)"
def _createInputStream(self, **args):
tags = args.setdefault("tags",[])
try:
tags.append(("mime", self["../../FileMimeType/string"].value))
except MissingField:
pass
filename = self._getFilename()
if filename:
tags.append(("filename", filename))
return Bytes._createInputStream(self, **args)
def UTF8(parent):
return _String(parent,'unicode', parent['size'].value, charset='UTF-8')
def Float(parent):
size = parent['size'].value
if size == 4:
return Float32(parent, 'float')
elif size == 8:
return Float64(parent, 'double')
else:
return RawBytes(parent, 'INVALID_FLOAT', size)
TIMESTAMP_T0 = datetime(2001, 1, 1)
def dateToDatetime(value):
return TIMESTAMP_T0 + timedelta(microseconds=value//1000)
def dateToString(field):
return humanDatetime(dateToDatetime(field.value))
def Date(parent):
return textHandler(GenericInteger(parent, 'date', True, parent['size'].value*8),
dateToString)
def SeekID(parent):
return textHandler(GenericInteger(parent, 'binary', False, parent['size'].value*8),
lambda chunk: segment.get(chunk.value, (hexadecimal(chunk),))[0])
def CueClusterPosition(parent):
class Cluster(Link):
def createValue(self):
parent = self.parent
segment = parent['.....']
pos = parent['unsigned'].value * 8 + segment[2].address
return segment.getFieldByAddress(pos, feed=False)
return Cluster(parent, 'cluster')
def CueTrackPositions(parent):
class Block(Link):
def createValue(self):
parent = self.parent
time = parent['../CueTime/unsigned'].value
track = parent['CueTrack/unsigned'].value
cluster = parent['CueClusterPosition/cluster'].value
time -= cluster['Timecode/unsigned'].value
for field in cluster:
if field.name.startswith('BlockGroup['):
for path in 'Block/block', 'SimpleBlock':
try:
block = field[path]
if block['track'].value == track and \
block['timecode'].value == time:
return field
except MissingField:
pass
parent.error('Cue point not found')
return self
return Block(parent, 'block')
class Lace(FieldSet):
def __init__(self, parent, lacing, size):
self.n_frames = parent['n_frames'].value
self.createFields = ( self.parseXiph, self.parseFixed, self.parseEBML )[lacing]
FieldSet.__init__(self, parent, 'Lace', size=size * 8)
def parseXiph(self):
for i in xrange(self.n_frames):
yield XiphInt(self, 'size[]')
for i in xrange(self.n_frames):
yield RawBytes(self, 'frame[]', self['size['+str(i)+']'].value)
yield RawBytes(self,'frame[]', (self._size - self.current_size) / 8)
def parseEBML(self):
yield Unsigned(self, 'size')
for i in xrange(1, self.n_frames):
yield Signed(self, 'dsize[]')
size = self['size'].value
yield RawBytes(self, 'frame[]', size)
for i in xrange(self.n_frames-1):
size += self['dsize['+str(i)+']'].value
yield RawBytes(self, 'frame[]', size)
yield RawBytes(self,'frame[]', (self._size - self.current_size) / 8)
def parseFixed(self):
n = self.n_frames + 1
size = self._size / 8 / n
for i in xrange(n):
yield RawBytes(self, 'frame[]', size)
class Block(FieldSet):
def __init__(self, parent):
FieldSet.__init__(self, parent, 'block')
self._size = 8 * parent['size'].value
def lacing(self):
return _Enum(Bits(self, 'lacing', 2), [ 'none', 'Xiph', 'fixed', 'EBML' ])
def createFields(self):
yield Unsigned(self, 'track')
yield Int16(self, 'timecode')
if self.parent._name == 'Block':
yield NullBits(self, 'reserved[]', 4)
yield Bit(self, 'invisible')
yield self.lacing()
yield NullBits(self, 'reserved[]', 1)
elif self.parent._name == 'SimpleBlock[]':
yield Bit(self, 'keyframe')
yield NullBits(self, 'reserved', 3)
yield Bit(self, 'invisible')
yield self.lacing()
yield Bit(self, 'discardable')
else:
yield NullBits(self, 'reserved', 8)
return
size = (self._size - self.current_size) / 8
lacing = self['lacing'].value
if lacing:
yield textHandler(GenericInteger(self, 'n_frames', False, 8),
lambda chunk: str(chunk.value+1))
yield Lace(self, lacing - 1, size - 1)
else:
yield RawBytes(self,'frame', size)
ebml = {
0x1A45DFA3: ('EBML[]', {
0x4286: ('EBMLVersion',UInt),
0x42F7: ('EBMLReadVersion',UInt),
0x42F2: ('EBMLMaxIDLength',UInt),
0x42F3: ('EBMLMaxSizeLength',UInt),
0x4282: ('DocType',String),
0x4287: ('DocTypeVersion',UInt),
0x4285: ('DocTypeReadVersion',UInt)
})
}
signature = {
0x7E8A: ('SignatureAlgo', UInt),
0x7E9A: ('SignatureHash', UInt),
0x7EA5: ('SignaturePublicKey', Binary),
0x7EB5: ('Signature', Binary),
0x7E5B: ('SignatureElements', {
0x7E7B: ('SignatureElementList[]', {
0x6532: ('SignedElement[]', Binary)
})
})
}
chapter_atom = {
0x73C4: ('ChapterUID', UInt),
0x91: ('ChapterTimeStart', UInt),
0x92: ('ChapterTimeEnd', UInt),
0x98: ('ChapterFlagHidden', Bool),
0x4598: ('ChapterFlagEnabled', Bool),
0x6E67: ('ChapterSegmentUID', Binary),
0x6EBC: ('ChapterSegmentEditionUID', Binary),
0x63C3: ('ChapterPhysicalEquiv', UInt),
0x8F: ('ChapterTrack', {
0x89: ('ChapterTrackNumber[]', UInt)
}),
0x80: ('ChapterDisplay[]', {
0x85: ('ChapString', UTF8),
0x437C: ('ChapLanguage[]', String),
0x437E: ('ChapCountry[]', String)
}),
0x6944: ('ChapProcess[]', {
0x6955: ('ChapProcessCodecID', UInt),
0x450D: ('ChapProcessPrivate', Binary),
0x6911: ('ChapProcessCommand[]', {
0x6922: ('ChapProcessTime', UInt),
0x6933: ('ChapProcessData', Binary)
})
})
}
simple_tag = {
0x45A3: ('TagName', UTF8),
0x447A: ('TagLanguage', String),
0x44B4: ('TagDefault', Bool), # 0x4484
0x4487: ('TagString', UTF8),
0x4485: ('TagBinary', Binary)
}
segment_seek = {
0x4DBB: ('Seek[]', {
0x53AB: ('SeekID', SeekID),
0x53AC: ('SeekPosition', UInt)
})
}
segment_info = {
0x73A4: ('SegmentUID', Binary),
0x7384: ('SegmentFilename', UTF8),
0x3CB923: ('PrevUID', Binary),
0x3C83AB: ('PrevFilename', UTF8),
0x3EB923: ('NextUID', Binary),
0x3E83BB: ('NextFilename', UTF8),
0x4444: ('SegmentFamily[]', Binary),
0x6924: ('ChapterTranslate[]', {
0x69FC: ('ChapterTranslateEditionUID[]', UInt),
0x69BF: ('ChapterTranslateCodec', UInt),
0x69A5: ('ChapterTranslateID', Binary)
}),
0x2AD7B1: ('TimecodeScale', UInt),
0x4489: ('Duration', Float),
0x4461: ('DateUTC', Date),
0x7BA9: ('Title', UTF8),
0x4D80: ('MuxingApp', UTF8),
0x5741: ('WritingApp', UTF8)
}
segment_clusters = {
0xE7: ('Timecode', UInt),
0x5854: ('SilentTracks', {
0x58D7: ('SilentTrackNumber[]', UInt)
}),
0xA7: ('Position', UInt),
0xAB: ('PrevSize', UInt),
0xA0: ('BlockGroup[]', {
0xA1: ('Block', Block),
0xA2: ('BlockVirtual[]', Block),
0x75A1: ('BlockAdditions', {
0xA6: ('BlockMore[]', {
0xEE: ('BlockAddID', UInt),
0xA5: ('BlockAdditional', Binary)
})
}),
0x9B: ('BlockDuration', UInt),
0xFA: ('ReferencePriority', UInt),
0xFB: ('ReferenceBlock[]', SInt),
0xFD: ('ReferenceVirtual', SInt),
0xA4: ('CodecState', Binary),
0x8E: ('Slices[]', {
0xE8: ('TimeSlice[]', {
0xCC: ('LaceNumber', UInt),
0xCD: ('FrameNumber', UInt),
0xCB: ('BlockAdditionID', UInt),
0xCE: ('Delay', UInt),
0xCF: ('Duration', UInt)
})
})
}),
0xA3: ('SimpleBlock[]', Block)
}
tracks_video = {
0x9A: ('FlagInterlaced', Bool),
0x53B8: ('StereoMode', lambda parent: Enum(parent, \
[ 'mono', 'right eye', 'left eye', 'both eyes' ])),
0xB0: ('PixelWidth', UInt),
0xBA: ('PixelHeight', UInt),
0x54AA: ('PixelCropBottom', UInt),
0x54BB: ('PixelCropTop', UInt),
0x54CC: ('PixelCropLeft', UInt),
0x54DD: ('PixelCropRight', UInt),
0x54B0: ('DisplayWidth', UInt),
0x54BA: ('DisplayHeight', UInt),
0x54B2: ('DisplayUnit', lambda parent: Enum(parent, \
[ 'pixels', 'centimeters', 'inches' ])),
0x54B3: ('AspectRatioType', lambda parent: Enum(parent, \
[ 'free resizing', 'keep aspect ratio', 'fixed' ])),
0x2EB524: ('ColourSpace', Binary),
0x2FB523: ('GammaValue', Float)
}
tracks_audio = {
0xB5: ('SamplingFrequency', Float),
0x78B5: ('OutputSamplingFrequency', Float),
0x9F: ('Channels', UInt),
0x7D7B: ('ChannelPositions', Binary),
0x6264: ('BitDepth', UInt)
}
tracks_content_encodings = {
0x6240: ('ContentEncoding[]', {
0x5031: ('ContentEncodingOrder', UInt),
0x5032: ('ContentEncodingScope', UInt),
0x5033: ('ContentEncodingType', UInt),
0x5034: ('ContentCompression', {
0x4254: ('ContentCompAlgo', UInt),
0x4255: ('ContentCompSettings', Binary)
}),
0x5035: ('ContentEncryption', {
0x47e1: ('ContentEncAlgo', UInt),
0x47e2: ('ContentEncKeyID', Binary),
0x47e3: ('ContentSignature', Binary),
0x47e4: ('ContentSigKeyID', Binary),
0x47e5: ('ContentSigAlgo', UInt),
0x47e6: ('ContentSigHashAlgo', UInt),
})
})
}
segment_tracks = {
0xAE: ('TrackEntry[]', {
0xD7: ('TrackNumber', UInt),
0x73C5: ('TrackUID', UInt),
0x83: ('TrackType', lambda parent: Enum(parent, {
0x01: 'video',
0x02: 'audio',
0x03: 'complex',
0x10: 'logo',
0x11: 'subtitle',
0x12: 'buttons',
0x20: 'control'
})),
0xB9: ('FlagEnabled', Bool),
0x88: ('FlagDefault', Bool),
0x55AA: ('FlagForced[]', Bool),
0x9C: ('FlagLacing', Bool),
0x6DE7: ('MinCache', UInt),
0x6DF8: ('MaxCache', UInt),
0x23E383: ('DefaultDuration', UInt),
0x23314F: ('TrackTimecodeScale', Float),
0x537F: ('TrackOffset', SInt),
0x55EE: ('MaxBlockAdditionID', UInt),
0x536E: ('Name', UTF8),
0x22B59C: ('Language', lambda parent: EnumString(parent, ISO639_2)),
0x86: ('CodecID', String),
0x63A2: ('CodecPrivate', Binary),
0x258688: ('CodecName', UTF8),
0x7446: ('AttachmentLink', UInt),
0x3A9697: ('CodecSettings', UTF8),
0x3B4040: ('CodecInfoURL[]', String),
0x26B240: ('CodecDownloadURL[]', String),
0xAA: ('CodecDecodeAll', Bool),
0x6FAB: ('TrackOverlay[]', UInt),
0x6624: ('TrackTranslate[]', {
0x66FC: ('TrackTranslateEditionUID[]', UInt),
0x66BF: ('TrackTranslateCodec', UInt),
0x66A5: ('TrackTranslateTrackID', Binary)
}),
0xE0: ('Video', tracks_video),
0xE1: ('Audio', tracks_audio),
0x6d80: ('ContentEncodings', tracks_content_encodings)
})
}
segment_cues = {
0xBB: ('CuePoint[]', {
0xB3: ('CueTime', UInt),
0xB7: ('CueTrackPositions[]', CueTrackPositions, {
0xF7: ('CueTrack', UInt),
0xF1: ('CueClusterPosition', CueClusterPosition, UInt),
0x5378: ('CueBlockNumber', UInt),
0xEA: ('CueCodecState', UInt),
0xDB: ('CueReference[]', {
0x96: ('CueRefTime', UInt),
0x97: ('CueRefCluster', UInt),
0x535F: ('CueRefNumber', UInt),
0xEB: ('CueRefCodecState', UInt)
})
})
})
}
segment_attachments = {
0x61A7: ('AttachedFile[]', {
0x467E: ('FileDescription', UTF8),
0x466E: ('FileName', UTF8),
0x4660: ('FileMimeType', String),
0x465C: ('FileData', AttachedFile),
0x46AE: ('FileUID', UInt),
0x4675: ('FileReferral', Binary)
})
}
segment_chapters = {
0x45B9: ('EditionEntry[]', {
0x45BC: ('EditionUID', UInt),
0x45BD: ('EditionFlagHidden', Bool),
0x45DB: ('EditionFlagDefault', Bool),
0x45DD: ('EditionFlagOrdered', Bool),
0xB6: ('ChapterAtom[]', chapter_atom)
})
}
segment_tags = {
0x7373: ('Tag[]', {
0x63C0: ('Targets', {
0x68CA: ('TargetTypeValue', UInt),
0x63CA: ('TargetType', String),
0x63C5: ('TrackUID[]', UInt),
0x63C9: ('EditionUID[]', UInt),
0x63C4: ('ChapterUID[]', UInt),
0x63C6: ('AttachmentUID[]', UInt)
}),
0x67C8: ('SimpleTag[]', simple_tag)
})
}
segment = {
0x114D9B74: ('SeekHead[]', segment_seek),
0x1549A966: ('Info[]', segment_info),
0x1F43B675: ('Cluster[]', segment_clusters),
0x1654AE6B: ('Tracks[]', segment_tracks),
0x1C53BB6B: ('Cues', segment_cues),
0x1941A469: ('Attachments', segment_attachments),
0x1043A770: ('Chapters', segment_chapters),
0x1254C367: ('Tags[]', segment_tags)
}
class EBML(FieldSet):
def __init__(self, parent, ids):
FieldSet.__init__(self, parent, "?[]")
# Set name
id = self['id'].value
self.val = ids.get(id)
if not self.val:
if id == 0xBF:
self.val = 'CRC-32[]', Binary
elif id == 0xEC:
self.val = 'Void[]', Binary
elif id == 0x1B538667:
self.val = 'SignatureSlot[]', signature
else:
self.val = 'Unknown[]', Binary
self._name = self.val[0]
# Compute size
size = self['size']
if size.value is not None:
self._size = size.address + size.size + size.value * 8
elif self._parent._parent:
raise ParserError("Unknown length (only allowed for the last Level 0 element)")
elif self._parent._size is not None:
self._size = self._parent._size - self.address
def createFields(self):
yield RawInt(self, 'id')
yield Unsigned(self, 'size')
for val in self.val[1:]:
if callable(val):
yield val(self)
else:
while not self.eof:
yield EBML(self, val)
class MkvFile(Parser):
EBML_SIGNATURE = 0x1A45DFA3
PARSER_TAGS = {
"id": "matroska",
"category": "container",
"file_ext": ("mka", "mkv", "webm"),
"mime": (
u"video/x-matroska",
u"audio/x-matroska",
u"video/webm",
u"audio/webm"),
"min_size": 5*8,
"magic": (("\x1A\x45\xDF\xA3", 0),),
"description": "Matroska multimedia container"
}
endian = BIG_ENDIAN
def _getDoctype(self):
return self[0]['DocType/string'].value
def validate(self):
if self.stream.readBits(0, 32, self.endian) != self.EBML_SIGNATURE:
return False
try:
first = self[0]
except ParserError:
return False
if None < self._size < first._size:
return "First chunk size is invalid"
if self._getDoctype() not in ('matroska', 'webm'):
return "Stream isn't a matroska document."
return True
def createFields(self):
hdr = EBML(self, ebml)
yield hdr
while not self.eof:
yield EBML(self, { 0x18538067: ('Segment[]', segment) })
def createContentSize(self):
field = self["Segment[0]/size"]
return field.absolute_address + field.value * 8 + field.size
def createDescription(self):
if self._getDoctype() == 'webm':
return 'WebM video'
else:
return 'Matroska video'
def createMimeType(self):
if self._getDoctype() == 'webm':
return u"video/webm"
else:
return u"video/x-matroska"
| gpl-3.0 |
pypa/pip | src/pip/_vendor/urllib3/util/timeout.py | 27 | 10003 | from __future__ import absolute_import
import time
# The default socket timeout, used by httplib to indicate that no timeout was
# specified by the user
from socket import _GLOBAL_DEFAULT_TIMEOUT
from ..exceptions import TimeoutStateError
# A sentinel value to indicate that no timeout was specified by the user in
# urllib3
_Default = object()
# Use time.monotonic if available.
current_time = getattr(time, "monotonic", time.time)
class Timeout(object):
"""Timeout configuration.
Timeouts can be defined as a default for a pool:
.. code-block:: python
timeout = Timeout(connect=2.0, read=7.0)
http = PoolManager(timeout=timeout)
response = http.request('GET', 'http://example.com/')
Or per-request (which overrides the default for the pool):
.. code-block:: python
response = http.request('GET', 'http://example.com/', timeout=Timeout(10))
Timeouts can be disabled by setting all the parameters to ``None``:
.. code-block:: python
no_timeout = Timeout(connect=None, read=None)
response = http.request('GET', 'http://example.com/, timeout=no_timeout)
:param total:
This combines the connect and read timeouts into one; the read timeout
will be set to the time leftover from the connect attempt. In the
event that both a connect timeout and a total are specified, or a read
timeout and a total are specified, the shorter timeout will be applied.
Defaults to None.
:type total: int, float, or None
:param connect:
The maximum amount of time (in seconds) to wait for a connection
attempt to a server to succeed. Omitting the parameter will default the
connect timeout to the system default, probably `the global default
timeout in socket.py
<http://hg.python.org/cpython/file/603b4d593758/Lib/socket.py#l535>`_.
None will set an infinite timeout for connection attempts.
:type connect: int, float, or None
:param read:
The maximum amount of time (in seconds) to wait between consecutive
read operations for a response from the server. Omitting the parameter
will default the read timeout to the system default, probably `the
global default timeout in socket.py
<http://hg.python.org/cpython/file/603b4d593758/Lib/socket.py#l535>`_.
None will set an infinite timeout.
:type read: int, float, or None
.. note::
Many factors can affect the total amount of time for urllib3 to return
an HTTP response.
For example, Python's DNS resolver does not obey the timeout specified
on the socket. Other factors that can affect total request time include
high CPU load, high swap, the program running at a low priority level,
or other behaviors.
In addition, the read and total timeouts only measure the time between
read operations on the socket connecting the client and the server,
not the total amount of time for the request to return a complete
response. For most requests, the timeout is raised because the server
has not sent the first byte in the specified time. This is not always
the case; if a server streams one byte every fifteen seconds, a timeout
of 20 seconds will not trigger, even though the request will take
several minutes to complete.
If your goal is to cut off any request after a set amount of wall clock
time, consider having a second "watcher" thread to cut off a slow
request.
"""
#: A sentinel object representing the default timeout value
DEFAULT_TIMEOUT = _GLOBAL_DEFAULT_TIMEOUT
def __init__(self, total=None, connect=_Default, read=_Default):
self._connect = self._validate_timeout(connect, "connect")
self._read = self._validate_timeout(read, "read")
self.total = self._validate_timeout(total, "total")
self._start_connect = None
def __repr__(self):
return "%s(connect=%r, read=%r, total=%r)" % (
type(self).__name__,
self._connect,
self._read,
self.total,
)
# __str__ provided for backwards compatibility
__str__ = __repr__
@classmethod
def _validate_timeout(cls, value, name):
"""Check that a timeout attribute is valid.
:param value: The timeout value to validate
:param name: The name of the timeout attribute to validate. This is
used to specify in error messages.
:return: The validated and casted version of the given value.
:raises ValueError: If it is a numeric value less than or equal to
zero, or the type is not an integer, float, or None.
"""
if value is _Default:
return cls.DEFAULT_TIMEOUT
if value is None or value is cls.DEFAULT_TIMEOUT:
return value
if isinstance(value, bool):
raise ValueError(
"Timeout cannot be a boolean value. It must "
"be an int, float or None."
)
try:
float(value)
except (TypeError, ValueError):
raise ValueError(
"Timeout value %s was %s, but it must be an "
"int, float or None." % (name, value)
)
try:
if value <= 0:
raise ValueError(
"Attempted to set %s timeout to %s, but the "
"timeout cannot be set to a value less "
"than or equal to 0." % (name, value)
)
except TypeError:
# Python 3
raise ValueError(
"Timeout value %s was %s, but it must be an "
"int, float or None." % (name, value)
)
return value
@classmethod
def from_float(cls, timeout):
"""Create a new Timeout from a legacy timeout value.
The timeout value used by httplib.py sets the same timeout on the
connect(), and recv() socket requests. This creates a :class:`Timeout`
object that sets the individual timeouts to the ``timeout`` value
passed to this function.
:param timeout: The legacy timeout value.
:type timeout: integer, float, sentinel default object, or None
:return: Timeout object
:rtype: :class:`Timeout`
"""
return Timeout(read=timeout, connect=timeout)
def clone(self):
"""Create a copy of the timeout object
Timeout properties are stored per-pool but each request needs a fresh
Timeout object to ensure each one has its own start/stop configured.
:return: a copy of the timeout object
:rtype: :class:`Timeout`
"""
# We can't use copy.deepcopy because that will also create a new object
# for _GLOBAL_DEFAULT_TIMEOUT, which socket.py uses as a sentinel to
# detect the user default.
return Timeout(connect=self._connect, read=self._read, total=self.total)
def start_connect(self):
"""Start the timeout clock, used during a connect() attempt
:raises urllib3.exceptions.TimeoutStateError: if you attempt
to start a timer that has been started already.
"""
if self._start_connect is not None:
raise TimeoutStateError("Timeout timer has already been started.")
self._start_connect = current_time()
return self._start_connect
def get_connect_duration(self):
"""Gets the time elapsed since the call to :meth:`start_connect`.
:return: Elapsed time in seconds.
:rtype: float
:raises urllib3.exceptions.TimeoutStateError: if you attempt
to get duration for a timer that hasn't been started.
"""
if self._start_connect is None:
raise TimeoutStateError(
"Can't get connect duration for timer that has not started."
)
return current_time() - self._start_connect
@property
def connect_timeout(self):
"""Get the value to use when setting a connection timeout.
This will be a positive float or integer, the value None
(never timeout), or the default system timeout.
:return: Connect timeout.
:rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None
"""
if self.total is None:
return self._connect
if self._connect is None or self._connect is self.DEFAULT_TIMEOUT:
return self.total
return min(self._connect, self.total)
@property
def read_timeout(self):
"""Get the value for the read timeout.
This assumes some time has elapsed in the connection timeout and
computes the read timeout appropriately.
If self.total is set, the read timeout is dependent on the amount of
time taken by the connect timeout. If the connection time has not been
established, a :exc:`~urllib3.exceptions.TimeoutStateError` will be
raised.
:return: Value to use for the read timeout.
:rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None
:raises urllib3.exceptions.TimeoutStateError: If :meth:`start_connect`
has not yet been called on this object.
"""
if (
self.total is not None
and self.total is not self.DEFAULT_TIMEOUT
and self._read is not None
and self._read is not self.DEFAULT_TIMEOUT
):
# In case the connect timeout has not yet been established.
if self._start_connect is None:
return self._read
return max(0, min(self.total - self.get_connect_duration(), self._read))
elif self.total is not None and self.total is not self.DEFAULT_TIMEOUT:
return max(0, self.total - self.get_connect_duration())
else:
return self._read
| mit |
TeamTwisted/external_chromium_org | tools/telemetry/telemetry/core/platform/profiler/android_traceview_profiler.py | 35 | 2545 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
from telemetry.core import util
from telemetry.core.backends.chrome import android_browser_finder
from telemetry.core.platform import profiler
class AndroidTraceviewProfiler(profiler.Profiler):
"""Collects a Traceview on Android."""
_DEFAULT_DEVICE_DIR = '/data/local/tmp/traceview'
def __init__(self, browser_backend, platform_backend, output_path, state):
super(AndroidTraceviewProfiler, self).__init__(
browser_backend, platform_backend, output_path, state)
if self._browser_backend.adb.device().FileExists(self._DEFAULT_DEVICE_DIR):
self._browser_backend.adb.RunShellCommand(
'rm ' + os.path.join(self._DEFAULT_DEVICE_DIR, '*'))
else:
self._browser_backend.adb.RunShellCommand(
'mkdir -p ' + self._DEFAULT_DEVICE_DIR)
self._browser_backend.adb.RunShellCommand(
'chmod 777 ' + self._DEFAULT_DEVICE_DIR)
self._trace_files = []
for pid in self._GetProcessOutputFileMap().iterkeys():
device_dump_file = '%s/%s.trace' % (self._DEFAULT_DEVICE_DIR, pid)
self._trace_files.append((pid, device_dump_file))
self._browser_backend.adb.RunShellCommand('am profile %s start %s' %
(pid, device_dump_file))
@classmethod
def name(cls):
return 'android-traceview'
@classmethod
def is_supported(cls, browser_type):
if browser_type == 'any':
return android_browser_finder.CanFindAvailableBrowsers()
return browser_type.startswith('android')
def CollectProfile(self):
output_files = []
for pid, trace_file in self._trace_files:
self._browser_backend.adb.RunShellCommand('am profile %s stop' % pid)
util.WaitFor(lambda: self._FileSize(trace_file) > 0, timeout=10)
output_files.append(trace_file)
self._browser_backend.adb.device().old_interface.Adb().Pull(
self._DEFAULT_DEVICE_DIR, self._output_path)
self._browser_backend.adb.RunShellCommand(
'rm ' + os.path.join(self._DEFAULT_DEVICE_DIR, '*'))
print 'Traceview profiles available in ', self._output_path
print 'Use third_party/android_tools/sdk/tools/monitor '
print 'then use "File->Open File" to visualize them.'
return output_files
def _FileSize(self, file_name):
f = self._browser_backend.adb.device().Ls(file_name)
return f.get(os.path.basename(file_name), (0, ))[0]
| bsd-3-clause |
rsignell-usgs/notebook | system-test/Theme_1_Baseline/ioos_data_size.py | 1 | 7358 | # -*- coding: utf-8 -*-
# <nbformat>3.0</nbformat>
# <markdowncell>
# #Estimate how many TB of data are served by IOOS
# <markdowncell>
# Estimate dataset size from the OPeNDAP DDS. Here we use regular expressions to parse the DDS and just the variable size (32 or 64 bit Int or Float) by their shapes. This represents the size in memory, not on disk, since the data could be compressed. But the data in memory is in some sense a more true representation of the quantity of data available by the service.
# <codecell>
from owslib.csw import CatalogueServiceWeb
from owslib import fes
import pandas as pd
import datetime as dt
import requests
import re
import time
from __future__ import print_function
# <codecell>
def service_urls(records,service_string='urn:x-esri:specification:ServiceType:odp:url'):
"""
Get all URLs matching a specific ServiceType
Unfortunately these seem to differ between different CSW-ISO services.
For example, OpenDAP is specified:
NODC geoportal: 'urn:x-esri:specification:ServiceType:OPeNDAP'
NGDC geoportal: 'urn:x-esri:specification:ServiceType:odp:url'
"""
urls=[]
for key,rec in records.iteritems():
#create a generator object, and iterate through it until the match is found
#if not found, gets the default value (here "none")
url = next((d['url'] for d in rec.references if d['scheme'] == service_string), None)
if url is not None:
urls.append(url)
return urls
# <markdowncell>
# ## Find OpenDAP endpoints from NGDC CSW
# <codecell>
endpoint = 'http://www.ngdc.noaa.gov/geoportal/csw' # NGDC/IOOS Geoportal
dap_timeout=4 # timeout for DAP response
csw_timeout=60 # timeout for CSW response
csw = CatalogueServiceWeb(endpoint,timeout=csw_timeout)
csw.version
# <codecell>
[op.name for op in csw.operations]
# <codecell>
csw.get_operation_by_name('GetRecords').constraints
# <codecell>
for oper in csw.operations:
print(oper.name)
# <codecell>
csw.get_operation_by_name('GetRecords').constraints
# <markdowncell>
# Since the supported ISO queryables contain `apiso:ServiceType`, we can use CSW to find all datasets with services that contain the string "dap"
# <codecell>
try:
csw.get_operation_by_name('GetDomain')
csw.getdomain('apiso:ServiceType', 'property')
print(csw.results['values'])
except:
print('GetDomain not supported')
# <markdowncell>
# Since this CSW service doesn't provide us a list of potential values for `apiso:ServiceType`, we guess `opendap`, which seems to work:
# <codecell>
val = 'opendap'
service_type = fes.PropertyIsLike(propertyname='apiso:ServiceType',literal=('*%s*' % val),
escapeChar='\\',wildCard='*',singleChar='?')
filter_list = [ service_type]
# <codecell>
csw.getrecords2(constraints=filter_list,maxrecords=10000,esn='full')
len(csw.records.keys())
# <markdowncell>
# By printing out the references from a random record, we see that for this CSW the DAP URL is identified by
# `urn:x-esri:specification:ServiceType:odp:url`
# <codecell>
choice=random.choice(list(csw.records.keys()))
print(choice)
csw.records[choice].references
# <markdowncell>
# Get all the OPeNDAP endpoints
# <codecell>
dap_urls = service_urls(csw.records,service_string='urn:x-esri:specification:ServiceType:odp:url')
len(dap_urls)
# <codecell>
def calc_dsize(txt):
'''
Calculate dataset size from the OPeNDAP DDS.
Approx method: Multiply 32|64 bit Int|Float variables by their shape.
'''
# split the OpenDAP DDS on ';' characters
all = re.split(';',txt)
'''
Use regex to find numbers following Float or Int (e.g. Float32, Int64)
and also numbers immediately preceding a "]". The idea is that in line like:
Float32 Total_precipitation_surface_6_Hour_Accumulation[time2 = 74][y = 303][x = 491];
we want to find only the numbers that are not part of a variable or dimension name
(want to return [32, 74, 303, 491], *not* [32, 6, 2, 74, 303, 491])
'''
m = re.compile('\d+(?=])|(?<=Float)\d+|(?<=Int)\d+')
dsize=0
for var in all:
c = map(int,m.findall(var))
if len(c)>=2:
vsize = reduce(lambda x,y: x*y,c)
dsize += vsize
return dsize/1.0e6/8. # return megabytes
# <codecell>
def tot_dsize(url,dap_timeout=2):
das = url + '.dds'
tot = 0
try:
response = requests.get(das,verify=True, timeout=dap_timeout)
except:
return tot, -1
if response.status_code==200:
# calculate the total size for all variables:
tot = calc_dsize(response.text)
# calculate the size for MAPS variables and subtract from the total:
maps = re.compile('MAPS:(.*?)}',re.MULTILINE | re.DOTALL)
map_text = ''.join(maps.findall(response.text))
if map_text:
map_tot = calc_dsize(map_text)
tot -= map_tot
return tot,response.status_code
# <codecell>
time0 = time.time()
good_data=[]
bad_data=[]
count=0
for url in dap_urls:
count += 1
dtot, status_code = tot_dsize(url,dap_timeout=dap_timeout)
if status_code==200:
good_data.append([url,dtot])
print('[{}]Good:{},{}'.format(count,url,dtot), end='\r')
else:
bad_data.append([url,status_code])
print('[{}]Fail:{},{}'.format(count,url,status_code), end='\r')
print('Elapsed time={} minutes'.format((time.time()-time0)/60.))
# <codecell>
print('Elapsed time={} minutes'.format((time.time()-time0)/60.))
# <codecell>
len(bad_data)
# <codecell>
bad_data[0][0]
# <markdowncell>
# So how much data are we serving?
# <codecell>
sum=0
for ds in good_data:
sum +=ds[1]
print('{} terabytes'.format(sum/1.e6))
# <codecell>
url=[]
size=[]
for item in good_data:
url.append(item[0])
size.append(item[1])
# <codecell>
d={}
d['url']=url
d['size']=size
# <codecell>
good = pd.DataFrame(d)
# <codecell>
good.head()
# <codecell>
good=good.sort(['size'],ascending=0)
# <codecell>
good.head()
# <codecell>
url=[]
code=[]
for item in bad_data:
url.append(item[0])
code.append(item[1])
# <codecell>
d={}
d['url']=url
d['code']=code
bad = pd.DataFrame(d)
# <codecell>
bad.head()
# <codecell>
cd /usgs/data2/notebook/system-test/Theme_1_Baseline
# <codecell>
td = dt.datetime.today().strftime('%Y-%m-%d')
# <codecell>
bad.to_csv('bad'+td+'.csv')
# <codecell>
good.to_csv('good'+td+'.csv')
# <codecell>
bad=bad.sort(['url','code'],ascending=[0,0])
# <codecell>
bad = pd.read_csv('bad'+td+'.csv',index_col=0)
good = pd.read_csv('good'+td+'.csv',index_col=0)
# <codecell>
bad.head()
# <codecell>
recs = bad[bad['url'].str.contains('neracoos')]
print(len(recs))
# <codecell>
recs = bad[bad['url'].str.contains('ucar')]
print(len(recs))
# <codecell>
recs = bad[bad['url'].str.contains('tamu')]
print(len(recs))
# <codecell>
recs = bad[bad['url'].str.contains('axiom')]
print(len(recs))
# <codecell>
recs = bad[bad['url'].str.contains('caricoos')]
print(len(recs))
# <codecell>
recs = bad[bad['url'].str.contains('secoora')]
print(len(recs))
# <codecell>
recs = bad[bad['url'].str.contains('nanoos')]
print(len(recs))
# <codecell>
recs.to_csv('axiom.csv')
# <codecell>
!git add *.csv
# <codecell>
!git commit -m 'new csv'
# <codecell>
!git push
# <codecell>
| mit |
mzdaniel/oh-mainline | vendor/packages/scrapy/scrapy/utils/response.py | 17 | 3134 | """
This module provides some useful functions for working with
scrapy.http.Response objects
"""
import os
import re
import weakref
import webbrowser
import tempfile
from twisted.web import http
from twisted.web.http import RESPONSES
from w3lib import html
from scrapy.http import Response, HtmlResponse
def body_or_str(obj, unicode=True):
assert isinstance(obj, (Response, basestring)), \
"obj must be Response or basestring, not %s" % type(obj).__name__
if isinstance(obj, Response):
return obj.body_as_unicode() if unicode else obj.body
elif isinstance(obj, str):
return obj.decode('utf-8') if unicode else obj
else:
return obj if unicode else obj.encode('utf-8')
_baseurl_cache = weakref.WeakKeyDictionary()
def get_base_url(response):
"""Return the base url of the given response, joined with the response url"""
if response not in _baseurl_cache:
text = response.body_as_unicode()[0:4096]
_baseurl_cache[response] = html.get_base_url(text, response.url, \
response.encoding)
return _baseurl_cache[response]
_noscript_re = re.compile(u'<noscript>.*?</noscript>', re.IGNORECASE | re.DOTALL)
_script_re = re.compile(u'<script.*?>.*?</script>', re.IGNORECASE | re.DOTALL)
_metaref_cache = weakref.WeakKeyDictionary()
def get_meta_refresh(response):
"""Parse the http-equiv refrsh parameter from the given response"""
if response not in _metaref_cache:
text = response.body_as_unicode()[0:4096]
text = _noscript_re.sub(u'', text)
text = _script_re.sub(u'', text)
_metaref_cache[response] = html.get_meta_refresh(text, response.url, \
response.encoding)
return _metaref_cache[response]
def response_status_message(status):
"""Return status code plus status text descriptive message
>>> response_status_message(200)
'200 OK'
>>> response_status_message(404)
'404 Not Found'
"""
return '%s %s' % (status, http.responses.get(int(status)))
def response_httprepr(response):
"""Return raw HTTP representation (as string) of the given response. This
is provided only for reference, since it's not the exact stream of bytes
that was received (that's not exposed by Twisted).
"""
s = "HTTP/1.1 %d %s\r\n" % (response.status, RESPONSES.get(response.status, ''))
if response.headers:
s += response.headers.to_string() + "\r\n"
s += "\r\n"
s += response.body
return s
def open_in_browser(response, _openfunc=webbrowser.open):
"""Open the given response in a local web browser, populating the <base>
tag for external links to work
"""
# XXX: this implementation is a bit dirty and could be improved
if not isinstance(response, HtmlResponse):
raise TypeError("Unsupported response type: %s" % \
response.__class__.__name__)
body = response.body
if '<base' not in body:
body = body.replace('<head>', '<head><base href="%s">' % response.url)
fd, fname = tempfile.mkstemp('.html')
os.write(fd, body)
os.close(fd)
return _openfunc("file://%s" % fname)
| agpl-3.0 |
bGN4/lib_scanner | module/OMP.py | 1 | 72145 | #!/usr/bin python2.7
# -*- coding: utf-8 -*-
import os
import ssl
import time
import socket
import base64
import logging
import functools
from xml.etree import ElementTree
__version__ = '3.0.0.0'
#PROGRAM = sys.argv[0]
AUTHNAME = 'https://github.com/bGN4'
AUTHEMAIL = 'https://github.com/bGN4'
class OMPError(Exception):
"""Base class for OMP errors."""
def __str__(self):
return repr(self)
class _ErrorResponse(OMPError):
def __init__(self, cmd, *args):
if cmd.endswith('_response'):
cmd = cmd[:-9]
super(_ErrorResponse, self).__init__(cmd, *args)
def __str__(self):
return '%s %s' % self.args[1:3]
class ClientError(_ErrorResponse):
"""command issued could not be executed due to error made by the client"""
class ServerError(_ErrorResponse):
"""error occurred in the manager during the processing of this command"""
class ResultError(OMPError):
"""Get invalid answer from Server"""
def __str__(self):
return 'Result Error: answer from command %s is invalid' % self.args
class AuthFailedError(OMPError):
"""Authentication failed."""
def XMLNode(tag, *kids, **attrs):
n = ElementTree.Element(tag, attrs)
for k in kids:
if isinstance(k, basestring):
assert n.text is None
n.text = k
else:
n.append(k)
return n
class OMP:
_socket = None
_omp_path = ''
_host = ''
_port = 0
_verbose = False
_username = ''
_password = ''
_format = None
_config_file = ''
_pretty_print = False
_cmd_sequence = []
_cmd_string = ''
_xml = ''
_action_task_list = ( 'delete', 'pause', 'resume_or_start_task', 'resume_paused_task', 'resume_stopped_task', 'start', 'stop' )
_new_del_object_list = ( 'agent', 'config', 'alert', 'filter', 'lsc_credential', 'note', 'override', 'port_list', 'port_range', 'report', 'report_format', 'schedule', 'slave', 'target', 'task' )
_get_objects_list = ( 'agents', 'configs', 'alerts', 'filters', 'lsc_credentials', 'notes', 'overrides', 'port_lists', 'reports', 'report_formats', 'schedules', 'slaves', 'targets', 'tasks', 'settings', 'dependencies', 'info', 'nvts', 'nvt_families', 'nvt_feed_checksum', 'preferences', 'results', 'system_reports', 'target_locators', 'version' )
_modify_object_list = ( 'config', 'filter', 'lsc_credential', 'note', 'override', 'report', 'report_format', 'schedule', 'target', 'task', 'setting' )
_nvt_plugins_blacklist = {'Buffer overflow' : { '1.3.6.1.4.1.25623.1.0.900651' : {'name' : 'Mini-stream CastRipper Stack Overflow Vulnerability'}},
'Default Accounts' : { '1.3.6.1.4.1.25623.1.0.103239' : {'name' : 'SSH Brute Force Logins with default Credentials'}},
'Denial of Service' : { '1.3.6.1.4.1.25623.1.0.800327' : {'name' : 'BreakPoint Software Hex Workshop Denial of Service vulnerability'},
'1.3.6.1.4.1.25623.1.0.100305' : {'name' : "Dopewars Server 'REQUESTJET' Message Remote Denial of Service Vulnerability"},
'1.3.6.1.4.1.25623.1.0.800216' : {'name' : 'PGP Desktop Denial of Service Vulnerability'}},
'Gain a shell remotely' : { '1.3.6.1.4.1.25623.1.0.103922' : {'name' : 'Loadbalancer.org Enterprise VA 7.5.2 Static SSH Key'}},
'General' : { '1.3.6.1.4.1.25623.1.0.14629' : {'name' : 'IlohaMail Detection'},
'1.3.6.1.4.1.25623.1.0.800907' : {'name' : 'NullLogic Groupware Multiple Vulnerabilities (Linux)'},
'1.3.6.1.4.1.25623.1.0.800904' : {'name' : 'NullLogic Groupware Version Detection (Linux)'},
'1.3.6.1.4.1.25623.1.0.11962' : {'name' : 'Xoops myheader.php URL Cross Site Scripting Vulnerability'}},
'Privilege escalation' : { '1.3.6.1.4.1.25623.1.0.800560' : {'name' : 'Adobe Flash Media Server Privilege Escalation Vulnerability'}},
'Product detection' : { '1.3.6.1.4.1.25623.1.0.900355' : {'name' : 'Bitweaver Version Detection'},
'1.3.6.1.4.1.25623.1.0.900614' : {'name' : 'Detecting the cubecart version'},
'1.3.6.1.4.1.25623.1.0.105162' : {'name' : 'F5 Networks BIG-IP Webinterface Detection'},
'1.3.6.1.4.1.25623.1.0.800612' : {'name' : 'Foswiki Version Detection'},
'1.3.6.1.4.1.25623.1.0.103594' : {'name' : 'Grandstream GXP Detection'},
'1.3.6.1.4.1.25623.1.0.800295' : {'name' : 'Limny Version Detection'},
'1.3.6.1.4.1.25623.1.0.103740' : {'name' : 'Plesk Detection'},
'1.3.6.1.4.1.25623.1.0.103532' : {'name' : 'Scrutinizer Detection'},
'1.3.6.1.4.1.25623.1.0.800399' : {'name' : 'TWiki Version Detection'},
'1.3.6.1.4.1.25623.1.0.803979' : {'name' : 'TYPO3 Detection'},
'1.3.6.1.4.1.25623.1.0.901001' : {'name' : 'TikiWiki Version Detection'}},
'Service detection' : { '1.3.6.1.4.1.25623.1.0.100846' : {'name' : "Barracuda Spam & Virus Firewall Detection"},
'1.3.6.1.4.1.25623.1.0.800901' : {'name' : "Clicknet CMS Version Detection"},
'1.3.6.1.4.1.25623.1.0.801381' : {'name' : "CruxSoftware Products Version Detection"},
'1.3.6.1.4.1.25623.1.0.902533' : {'name' : "Cybozu Products Version Detection"},
'1.3.6.1.4.1.25623.1.0.901044' : {'name' : "eFront Version Detection"},
'1.3.6.1.4.1.25623.1.0.100911' : {'name' : "FreeNAS Detection"},
'1.3.6.1.4.1.25623.1.0.900256' : {'name' : "FrontAccounting Detection"},
'1.3.6.1.4.1.25623.1.0.900583' : {'name' : "Fuzzylime(cms) Version Detection"},
'1.3.6.1.4.1.25623.1.0.900746' : {'name' : "geccBBlite Version Detection"},
'1.3.6.1.4.1.25623.1.0.902309' : {'name' : "Haudenschilt Family Connections CMS (FCMS) Version Detection"},
'1.3.6.1.4.1.25623.1.0.20834' : {'name' : "Inter-Asterisk eXchange Protocol Detection"},
'1.3.6.1.4.1.25623.1.0.900744' : {'name' : "JAG (Just Another Guestbook) Version Detection"},
'1.3.6.1.4.1.25623.1.0.900352' : {'name' : "LimeSurvey Version Detection"},
'1.3.6.1.4.1.25623.1.0.100208' : {'name' : "Name Server Daemon Detection"},
'1.3.6.1.4.1.25623.1.0.902023' : {'name' : "Netpet CMS Version Detection"},
'1.3.6.1.4.1.25623.1.0.800779' : {'name' : "OpenMairie Products Version Detection"},
'1.3.6.1.4.1.25623.1.0.800735' : {'name' : "phpCOIN Version Detection"},
'1.3.6.1.4.1.25623.1.0.100106' : {'name' : "phpMyFAQ Detection"},
'1.3.6.1.4.1.25623.1.0.800557' : {'name' : "Simple Machines Forum Version Detection"},
'1.3.6.1.4.1.25623.1.0.801390' : {'name' : "SimpNews Version Detection"},
'1.3.6.1.4.1.25623.1.0.801242' : {'name' : "sNews Version Detection"},
'1.3.6.1.4.1.25623.1.0.800622' : {'name' : "Vanilla Version Detection"},
'1.3.6.1.4.1.25623.1.0.801091' : {'name' : "YABSoft Advanced Image Hosting Script (AIHS) Version Detection"}},
'Web Servers' : { '1.3.6.1.4.1.25623.1.0.802418' : {'name' : "IBM WebSphere Application Server Hash Collisions DOS Vulnerability"},
'1.3.6.1.4.1.25623.1.0.100245' : {'name' : "RaidenHTTPD Cross Site Scripting and Local File Include Vulnerabilities"}},
'Web application abuses' : { '1.3.6.1.4.1.25623.1.0.100089' : {'name' : "Acute Control Panel SQL Injection Vulnerability and Remote File Include Vulnerability"},
'1.3.6.1.4.1.25623.1.0.801414' : {'name' : "AdPeeps 'index.php' Multiple Vulnerabilities."},
'1.3.6.1.4.1.25623.1.0.105082' : {'name' : "ALCASAR Remote Code Execution Vulnerability"},
'1.3.6.1.4.1.25623.1.0.100070' : {'name' : "AWStats 'awstats.pl' Multiple Path Disclosure Vulnerability"},
'1.3.6.1.4.1.25623.1.0.100177' : {'name' : "Axigen Mail Server HTML Injection Vulnerability"},
'1.3.6.1.4.1.25623.1.0.805365' : {'name' : "Balero CMS Multiple Vulnerabilities"},
'1.3.6.1.4.1.25623.1.0.100393' : {'name' : "Barracuda IM Firewall 'smtp_test.cgi' Cross-Site Scripting Vulnerabilities"},
'1.3.6.1.4.1.25623.1.0.100847' : {'name' : "Barracuda Networks Multiple Products 'view_help.cgi' Directory Traversal Vulnerability"},
'1.3.6.1.4.1.25623.1.0.805564' : {'name' : "BigAce CMS Cross-Site Scripting Vulnerability"},
'1.3.6.1.4.1.25623.1.0.801151' : {'name' : "Bigforum 'profil.php' SQL Injection Vulnerability"},
'1.3.6.1.4.1.25623.1.0.803791' : {'name' : "BlogEngine.NET 'sioc.axd' Information Disclosure Vulnerability"},
'1.3.6.1.4.1.25623.1.0.803792' : {'name' : "Burden 'burden_user_rememberme' Authentication Bypass Vulnerability"},
'1.3.6.1.4.1.25623.1.0.902611' : {'name' : "Chyrp Multiple Directory Traversal Vulnerabilities"},
'1.3.6.1.4.1.25623.1.0.800789' : {'name' : "CMSQlite 'index.php' SQL Injection and Directory Traversal Vulnerabilities"},
'1.3.6.1.4.1.25623.1.0.802122' : {'name' : "Copyscape SQL Injection Vulnerability"},
'1.3.6.1.4.1.25623.1.0.100060' : {'name' : "Cryptographp 'index.php' Local File Include Vulnerability"},
'1.3.6.1.4.1.25623.1.0.801952' : {'name' : "DmxReady Secure Document Library SQL Injection Vulnerability"},
'1.3.6.1.4.1.25623.1.0.100022' : {'name' : "Dragan Mitic Apoll 'admin/index.php' SQL Injection Vulnerability"},
'1.3.6.1.4.1.25623.1.0.800909' : {'name' : "Drupal Information Disclosure Vulnerability"},
'1.3.6.1.4.1.25623.1.0.105935' : {'name' : "Drupal Session Hijacking Vulnerability"},
'1.3.6.1.4.1.25623.1.0.800908' : {'name' : "Drupal XSS and Code Injection Vulnerability"},
'1.3.6.1.4.1.25623.1.0.800616' : {'name' : "FlashChat Role Filter Security Bypass Vulnerability"},
'1.3.6.1.4.1.25623.1.0.902056' : {'name' : "FreePHPBlogSoftware 'default_theme.php' Remote File Inclusion Vulnerability"},
'1.3.6.1.4.1.25623.1.0.804509' : {'name' : "Ganesha Digital Library Multiple Vulnerabilities"},
'1.3.6.1.4.1.25623.1.0.804489' : {'name' : "GNU Bash Environment Variable Handling Shell Remote Command Execution Vulnerability"},
'1.3.6.1.4.1.25623.1.0.801445' : {'name' : "Irokez CMS 'id' Parameter SQL Injection Vulnerability"},
'1.3.6.1.4.1.25623.1.0.103487' : {'name' : "Kerio WinRoute Firewall Web Server Remote Source Code Disclosure Vulnerability"},
'1.3.6.1.4.1.25623.1.0.101008' : {'name' : "Multiple Cross Site Scripting and SQL Injection vulnerabilities in XRMS"},
'1.3.6.1.4.1.25623.1.0.801454' : {'name' : "NetArt Media Car Portal Multiple Cross-site Scripting Vulnerabilities"},
'1.3.6.1.4.1.25623.1.0.801518' : {'name' : "NetArtMedia WebSiteAdmin Directory Traversal Vulnerability"},
'1.3.6.1.4.1.25623.1.0.800734' : {'name' : "OpenCart SQL Injection Vulnerability"},
'1.3.6.1.4.1.25623.1.0.103760' : {'name' : "OpenNetAdmin 'ona.log' File Remote PHP Code Execution Vulnerability"},
'1.3.6.1.4.1.25623.1.0.110187' : {'name' : "PHP version smaller than 5.2.9"}}}
_task_cache_dictionary = {'__example_task_name__' : {'task_id' : '',
'target_id' : '',
'report_id' : '',
'port_list_id' : '',
'status' : '',
'progress' : ''}}
_targets_dictionary = {'Localhost' : 'b493b7a8-7489-11df-a3ec-002264764cea'}
_port_lists_dictionary = {'All IANA assigned TCP 2012-02-10' : '33d0cd82-57c6-11e1-8ed1-406186ea4fc5',
'All IANA assigned TCP and UDP 2012-02-10' : '4a4717fe-57d2-11e1-9a26-406186ea4fc5',
'All privileged TCP' : '492b72f4-56fe-11e1-98a7-406186ea4fc5',
'All privileged TCP and UDP' : '5f2029f6-56fe-11e1-bb94-406186ea4fc5',
'All TCP' : 'fd591a34-56fd-11e1-9f27-406186ea4fc5',
'All TCP and Nmap 5.51 top 100 UDP' : '730ef368-57e2-11e1-a90f-406186ea4fc5',
'All TCP and Nmap 5.51 top 1000 UDP' : '9ddce1ae-57e7-11e1-b13c-406186ea4fc5',
'Nmap 5.51 top 2000 TCP and top 100 UDP' : 'ab33f6b0-57f8-11e1-96f5-406186ea4fc5',
'OpenVAS Default' : 'c7e03b6c-3bbe-11e1-a057-406186ea4fc5'}
_scan_configs_dictionary = {'empty' : '085569ce-73ed-11df-83c3-002264764cea',
'Discovery' : '8715c877-47a0-438d-98a3-27c7a6ab2196',
'Host Discovery' : '2d3f051c-55ba-11e3-bf43-406186ea4fc5',
'System Discovery' : 'bbca7412-a950-11e3-9109-406186ea4fc5',
'Full and fast' : 'daba56c8-73ec-11df-a475-002264764cea',
'Full and fast ultimate' : '698f691e-7489-11df-9d8c-002264764cea',
'Full and very deep' : '708f25c4-7489-11df-8094-002264764cea',
'Full and very deep ultimate' : '74db13d6-7489-11df-91b9-002264764cea'}
_report_formats_dictionary = {'ARF' : '910200ca-dc05-11e1-954f-406186ea4fc5',
'CPE' : '5ceff8ba-1f62-11e1-ab9f-406186ea4fc5',
'HTML' : '6c248850-1f62-11e1-b082-406186ea4fc5',
'ITG' : '77bd6c4a-1f62-11e1-abf0-406186ea4fc5',
'LaTeX' : 'a684c02c-b531-11e1-bdc2-406186ea4fc5',
'NBE' : '9ca6fe72-1f62-11e1-9e7c-406186ea4fc5',
'PDF' : 'c402cc3e-b531-11e1-9163-406186ea4fc5',
'TXT' : 'a3810a62-1f62-11e1-9219-406186ea4fc5',
'XML' : 'a994b278-1f62-11e1-96ac-406186ea4fc5'}
def __init__(self, host=None, port=None, username=None, password=None, verbose=False, config_file=None, _format=None, pretty_print=True):
self._socket = None
self._host = '127.0.0.1' if host is None else host
self._port = 9390 if port is None else port
self._omp_path = '/usr/bin/omp'
self._verbose = verbose
self._username = 'admin' if username is None else username
self._password = 'admin' if password is None else password
self._format = _format
self._config_file = config_file
self._pretty_print = pretty_print
def _open(self, username=None, password=None):
if username is None:
username = self._username
if password is None:
password = self._password
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self._socket = sock = ssl.wrap_socket(sock)
sock.connect((self._host, self._port))
self._authenticate(username, password)
def _close(self):
self._socket.close()
self._socket = None
def _send(self, data):
BLOCK_SIZE = 1024
if ElementTree.iselement(data):
#print '>>>', etree.tostring(data)
root = ElementTree.ElementTree(data)
root.write(self._socket, 'utf-8')
else:
if isinstance(data, unicode):
data = data.encode('utf-8')
self._socket.send(data)
parser = ElementTree.XMLTreeBuilder()
while True:
res = self._socket.recv(BLOCK_SIZE)
#print repr(res)
parser.feed(res)
if len(res) < BLOCK_SIZE:
break
root = parser.close()
#print '<<<', etree.tostring(root)
return root
def _check_response2(self, response):
status = response.get('status')
if status is None:
raise RunTimeError('response is missing status: %s' % ElementTree.tostring(response))
if status.startswith('4'):
raise ClientError(response.tag, status, response.get('status_text'))
elif status.startswith('5'):
raise ServerError(response.tag, status, response.get('status_text'))
return status
def _authenticate(self, username, password):
request = XMLNode('authenticate',
XMLNode('credentials',
XMLNode('username', username),
XMLNode('password', password),
))
try:
response = self._send(request)
self._check_response2(response)
return response.text
# if not status: connection closed, raise error
except ClientError:
raise AuthFailedError(username)
def _xml_command(self, xml='<help/>'):
if xml.find('<',1) != -1:
from xml.dom import minidom
reparsed = minidom.parseString(xml)
logging.debug( 'XML:\n' + reparsed.toprettyxml(indent=" " , encoding="utf-8")[39:].strip() )
self._open()
response = self._send(xml)
self._close()
return response
def _generate(self, xml='<help/>'):
assert( isinstance(xml, str) )
self._cmd_sequence = ['omp']
self._cmd_sequence.extend( ['-h', self._host] )
self._cmd_sequence.extend( ['-p', str(self._port)] )
self._cmd_sequence.extend( ['-u', self._username] )
self._cmd_sequence.extend( ['-w', self._password] )
if( self._verbose ): self._cmd_sequence.append('-v')
if( self._pretty_print ): self._cmd_sequence.append('-i')
self._cmd_string = functools.reduce(lambda x,y: x+' '+y, self._cmd_sequence) + ' -X ' + '"' + xml.replace('"', "'") + '"'
self._cmd_sequence.extend(['-X', xml])
def _cmd_execute(self, xml='<help/>'):
xml = xml.strip()
self._generate( xml )
if xml.find('<',1) != -1:
from xml.dom import minidom
reparsed = minidom.parseString(xml)
logging.debug( 'XML:\n' + reparsed.toprettyxml(indent=" " , encoding="utf-8")[39:].strip() )
import subprocess
try:
si = subprocess.STARTUPINFO()
si.dwFlags |= subprocess.STARTF_USESHOWWINDOW
si.wShowWindow = subprocess.SW_HIDE
except AttributeError as e:
si = None
logging.debug( self._cmd_sequence )
str_stdout, str_stderr = subprocess.Popen(self._cmd_sequence, stdout=subprocess.PIPE, stderr=subprocess.PIPE, startupinfo=si).communicate()
logging.debug( 'len(str_stdout)=%d, len(str_stderr)=%d'%(len(str_stdout), len(str_stderr)) )
try:
parsed = ElementTree.XML( str_stdout )
except ElementTree.ParseError:
logging.error( '[FAIL] Parse response error !!!' )
logging.debug( 'stdout:\n' + str_stdout )
logging.debug( 'stderr:\n' + str_stderr )
return None
return parsed
def _download_reports_easy(self, report_id, format_id, ToDisk=None):
cmd_str = 'omp'
cmd_str = cmd_str + ' -h ' + self._host
cmd_str = cmd_str + ' -p ' + str(self._port)
cmd_str = cmd_str + ' -u ' + self._username
cmd_str = cmd_str + ' -w ' + self._password
cmd_str = cmd_str + ' --get-report ' + report_id
cmd_str = cmd_str + ' --format ' + format_id
cmd_str = cmd_str + ' > ' + ToDisk
def _download_reports(self, xml='<help/>', ToDisk=None):
assert( isinstance(ToDisk, str) )
self._generate( xml )
import subprocess
try:
si = subprocess.STARTUPINFO()
si.dwFlags |= subprocess.STARTF_USESHOWWINDOW
si.wShowWindow = subprocess.SW_HIDE
except AttributeError as e:
si = None
logging.debug('Download report to %s'%(ToDisk))
tmpToDisk = ToDisk + '.tmp'
with open(tmpToDisk, 'wb+') as temp_file, open(ToDisk, 'wb') as dest_file:
logging.debug( self._cmd_sequence )
subprocess.Popen(self._cmd_sequence, stdout=temp_file, startupinfo=si).communicate()
temp_file.seek(0)
logging.debug('omp exit, parse response ...')
try:
parsed = ElementTree.parse( temp_file )
except ElementTree.ParseError:
parsed = None
logging.error( '[FAIL] Parse response error !!!' )
if isinstance(parsed, ElementTree.ElementTree):
report = parsed.find('report')
parsed = parsed.getroot()
if isinstance(report, ElementTree.Element):
logging.debug('begin base64 decode ...')
import base64, StringIO
base64_str = StringIO.StringIO( report.text )
base64.decode(base64_str, dest_file)
logging.info('[ OK ] base64 decode OK')
report.text = ''
logging.info('[ OK ] Save report file to %s'%(ToDisk))
if os.path.isfile( tmpToDisk ):
try:
os.unlink( tmpToDisk )
except Exception as e:
logging.error('[FAIL] Unlink file %s with %s: %s'%(tmpToDisk, e.__class__.__name__, str(e.args)))
return parsed
def _check_response(self, name, result_et, object_='', status='200'):
if isinstance(result_et, ElementTree.Element):
if result_et.get('status') == status:
object_id = result_et.get('id')
if object_id:
if name not in self._task_cache_dictionary:
self._task_cache_dictionary[name] = {}
self._task_cache_dictionary[name]['%s_id'%(object_)] = object_id
logging.info( '[ OK ] create %s_id=%s'%(object_, object_id) )
return object_id
logging.error( list(result_et.items()) )
return None
def _create_object(self, object_='', name=None, comment=None):
assert( object_ in self._new_del_object_list )
return self._modify_object(object_, '_create_', name, comment)
def _delete_object(self, object_='', object_id='', ultimate=True):
assert( object_ in self._new_del_object_list and isinstance(object_id, str) and object_id )
xml_template = '''<delete_%s %s_id="%s" ultimate="%d"/>''' % (object_, object_, object_id, ultimate)
result_et = self._cmd_execute( xml_template )
if isinstance(result_et, ElementTree.Element):
if result_et.get('status') == '200':
logging.info( '[ OK ] delete %s_id=%s %s'%(object_, object_id, 'permanently' if ultimate else 'to trashcan') )
return True
elif result_et.get('status') == '202' and result_et.get('status_text') == 'OK, request submitted':
logging.warn( list(result_et.items()) )
return False
else:
logging.error( list(result_et.items()) )
return None
def _get_objects(self, objects_='', object_id='', actions=None, filter_=None, filt_id=None, details=None, trash=None, sort_order='descending', sort_field=None):
assert( objects_ in self._get_objects_list and isinstance(object_id, str) )
get_objects = ElementTree.Element('get_%s'%(objects_))
if object_id: get_objects.attrib['%s_id'%(objects_[:-1] if objects_.endswith('s') else objects_)] = object_id
if details is True: get_objects.attrib['details'] = '%d'%(True)
if isinstance(sort_order, str) and isinstance(sort_field, str) and sort_field:
get_objects.attrib['sort_order'] = 'descending' if sort_order == 'descending' else 'ascending'
get_objects.attrib['sort_field'] = sort_field
return get_objects
def _modify_object(self, object_='', object_id='', name=None, comment=None):
assert( object_id and ( object_id == '_create_' or object_ in self._modify_object_list ) )
modify_object = ElementTree.Element('%s_%s'%('create'if(object_id=='_create_')else'modify', object_))
if object_id != '_create_':
modify_object.attrib['%s_id'%(object_)] = object_id
if name is not None: ElementTree.SubElement(modify_object, 'name').text = name
if comment is not None: ElementTree.SubElement(modify_object, 'comment').text = comment
return modify_object
def get_settings(self, setting_id='', filter_=None, first=None, max_=None, sort_order='descending', sort_field=None):
get_settings_et = self._get_objects('settings', setting_id, None, filter_, None, None, None, sort_order, sort_field)
#result_et = self._cmd_execute( ElementTree.tostring(get_settings_et) )
def get_dependencies(self, nvt_oid=''):
pass
def get_info(self, info_id='', filter_=None, filt_id=None, details=None, type_=None, name=None):
get_info_et = self._get_objects('info', info_id, None, filter_, filt_id, details, None, None, None)
#result_et = self._cmd_execute( ElementTree.tostring(get_info_et) )
def get_nvts(self, nvt_oid='', actions=None, details=None, sort_order='descending', sort_field=None, config_id=None, preferences=None, preference_count=None, timeout=None, family=None):
get_nvts_et = self._get_objects('nvts', nvt_oid, actions, None, None, details, None, sort_order, sort_field)
if preferences is True: get_nvts_et.attrib['preferences'] = '%d'%(True)
if preference_count is True: get_nvts_et.attrib['preference_count'] = '%d'%(True)
if timeout is True: get_nvts_et.attrib['timeout'] = '%d'%(True)
if config_id is not None: get_nvts_et.attrib['config_id'] = config_id
if family is not None: get_nvts_et.attrib['family'] = family
result_et = self._cmd_execute( ElementTree.tostring(get_nvts_et) )
if isinstance(result_et, ElementTree.Element):
if result_et.get('status') == '200':
result_dict = {}
for nvt_et in result_et.findall('nvt'):
name_ = nvt_et.find('name')
oid_ = nvt_et.get('oid')
result_dict[nvt_et.get('oid')] = name_.text if name_ is not None else None
return result_dict
logging.error( list(result_et.items()) )
return None
def get_nvt_families(self, sort_order=None):
pass
def get_nvt_feed_checksum(self, algorithm=None):
pass
def get_preferences(self, nvt_oid=None, config_id=None, preference=None):
pass
def get_results(self, result_id='', task_id=None, notes=None, note_details=None, overrides=None, override_details=None, apply_overrides=None):
get_results_et = self._get_objects('results', result_id, None, None, None, None, None, None, None)
#result_et = self._cmd_execute( ElementTree.tostring(get_results_et) )
def get_system_reports(self, slave_id=None, name=None, duration=None, brief=False):
get_system_reports_et = ElementTree.Element('get_system_reports')
if name and isinstance(name, str): get_system_reports_et.attrib['name'] = name
if slave_id and isinstance(slave_id, str): get_system_reports_et.attrib['slave_id'] = slave_id
if isinstance(duration, int): get_system_reports_et.attrib['duration'] = '%d'%(duration)
if brief is True: get_system_reports_et.attrib['brief'] = '%d'%(True)
result_et = self._cmd_execute( ElementTree.tostring(get_system_reports_et) )
if isinstance(result_et, ElementTree.Element):
if result_et.get('status') == '200':
result_dict = {}
for system_report in result_et.findall('system_report'):
name_ = system_report.find('name')
title = system_report.find('title')
report = system_report.find('report')
if name_ is not None and name_.text and report is not None and report.text:
format_ = report.get('format')
duration_ = report.get('duration')
result_dict[name_.text] = {}
if title is not None and title.text:
result_dict[name_.text]['title'] = title.text
if format_:
result_dict[name_.text]['format'] = format_
if duration_:
result_dict[name_.text]['duration'] = duration_
report_text_list = report.text.split('\n\n')
if len(report_text_list)==4 and report_text_list[2] == '/proc/meminfo:':
try:
result_dict[name_.text]['report'] = [tuple([y.strip() for y in x.split(':')]) for x in report_text_list[1].splitlines() + report_text_list[3].splitlines()]
continue
except ValueError as e:
logging.warn('ValueError: '+str(e.args))
result_dict[name_.text]['report'] = report.text
logging.debug( result_dict )
return result_dict
logging.error( list(result_et.items()) )
return None
def get_target_locators(self):
pass
def get_version(self):
result_et = self._cmd_execute('<get_version/>')
if isinstance(result_et, ElementTree.Element):
if result_et.get('status') == '200':
version = result_et.find('version')
if version is not None:
return version.text.strip()
return None
def get_agents(self, agent_id='', filter_=None, filt_id=None, trash=None, format_=None, sort_order='descending', sort_field=None):
get_agents_et = self._get_objects('agents', agent_id, None, filter_, filt_id, None, trash, sort_order, sort_field)
#result_et = self._cmd_execute( ElementTree.tostring(get_agents_et) )
def delete_agent(self, agent_id='', ultimate=True):
return self._delete_object('agent', agent_id, ultimate)
def get_configs(self, config_id='', actions=None, trash=None, export=None, families=None, preferences=None, sort_order='descending', sort_field=None):
get_configs_et = self._get_objects('configs', config_id, actions, None, None, None, trash, sort_order, sort_field)
result_et = self._cmd_execute( ElementTree.tostring(get_configs_et) )
result_dict = {}
if isinstance(result_et, ElementTree.Element):
if result_et.get('status') == '200':
for item in result_et.findall('config'):
name = item.find('name')
comment = item.find('comment')
creation_time = item.find('creation_time')
modification_time = item.find('modification_time')
config_id_ = item.get('id')
if config_id_ and name is not None:
result_dict[name.text.strip()] = {'config_id':config_id_.strip()}
return result_dict
elif result_et.get('status') == '404':
return result_dict
logging.error( list(result_et.items()) )
return None
def create_config(self, name, comment=None, copy=None, rcfile=None, response=None):
create_config_et = self._create_object('config', name, comment);
if copy is not None: ElementTree.SubElement(create_config_et, 'copy').text = copy
if rcfile is not None: ElementTree.SubElement(create_config_et, 'rcfile').text = rcfile
if response is not None: pass
result_et = self._cmd_execute( ElementTree.tostring(create_config_et) )
if isinstance(result_et, ElementTree.Element):
if result_et.get('status') == '201':
config_id = result_et.get('id')
if config_id:
self._scan_configs_dictionary[name] = config_id
logging.info( '[ OK ] create config_id={}'.format(config_id) )
return config_id
logging.error( list(result_et.items()) )
return None
def modify_config(self, config_id, preference=None, family=None, nvt=None, name=None, comment=None):
modify_config_et = self._modify_object('config', config_id, name, comment)
if isinstance(preference, dict):
preference_et = ElementTree.SubElement(modify_config_et, 'preference')
ElementTree.SubElement(preference_et, 'nvt').attrib['oid'] = preference.get('oid')
ElementTree.SubElement(preference_et, 'name').text = preference.get('name')
ElementTree.SubElement(preference_et, 'value').text = preference.get('value')
if isinstance(family, dict):
family_selection_et = ElementTree.SubElement(modify_config_et, 'family_selection')
ElementTree.SubElement(family_selection_et, 'growing').text = family.get('growing')
for item in family.get('family'):
family_et = ElementTree.SubElement(family_selection_et, 'family')
ElementTree.SubElement(family_et, 'name').text = item.get('name')
ElementTree.SubElement(family_et, 'all').text = item.get('all')
ElementTree.SubElement(family_et, 'growing').text = item.get('growing')
if isinstance(nvt, dict):
nvt_selection_et = ElementTree.SubElement(modify_config_et, 'nvt_selection')
ElementTree.SubElement(nvt_selection_et, 'family').text = nvt.get('family')
for item in nvt.get('nvt'):
ElementTree.SubElement(nvt_selection_et, 'nvt').attrib['oid'] = item.get('oid')
result_et = self._xml_command( ElementTree.tostring(modify_config_et) )
if isinstance(result_et, ElementTree.Element):
if result_et.get('status') == '200':
logging.info( '[ OK ] modify config_id={}'.format(config_id) )
return config_id
logging.error( list(result_et.items()) )
return None
def delete_config(self, config_id='', ultimate=True):
return self._delete_object('config', config_id, ultimate)
def get_alerts(self, alert_id='', filter_=None, trash=None, sort_order='descending', sort_field=None):
get_alerts_et = self._get_objects('alerts', alert_id, None, filter_, None, None, trash, sort_order, sort_field)
#result_et = self._cmd_execute( ElementTree.tostring(get_alerts_et) )
def delete_alert(self, alert_id='', ultimate=True):
return self._delete_object('alert', alert_id, ultimate)
def get_filters(self, filter_id='', actions=None, filter_=None, filt_id=None, trash=None, alerts=None):
get_filters_et = self._get_objects('filters', filter_id, actions, filter_, filter_id, None, trash, None, None)
#result_et = self._cmd_execute( ElementTree.tostring(get_filters_et) )
def delete_filter(self, filter_id='', ultimate=True):
return self._delete_object('filter', filter_id, ultimate)
def get_lsc_credentials(self, lsc_credential_id='', actions=None, trash=None, format_=None, sort_order='descending', sort_field=None):
get_lsc_credentials_et = self._get_objects('lsc_credentials', lsc_credential_id, actions, None, None, None, trash, sort_order, sort_field)
#result_et = self._cmd_execute( ElementTree.tostring(get_lsc_credentials_et) )
def delete_lsc_credential(self, lsc_credential_id='', ultimate=True):
return self._delete_object('lsc_credential', lsc_credential_id, ultimate)
def get_notes(self, note_id='', filter_=None, filt_id=None, nvt_oid=None, task_id=None, details=None, result=None, sort_order='descending', sort_field=None):
get_notes_et = self._get_objects('notes', note_id, None, filter_, filt_id, details, None, sort_order, sort_field)
#result_et = self._cmd_execute( ElementTree.tostring(get_notes_et) )
def delete_note(self, note_id='', ultimate=True):
return self._delete_object('note', note_id, ultimate)
def get_overrides(self, override_id='', filter_=None, filt_id=None, nvt_oid=None, task_id=None, details=None, result=None, sort_order='descending', sort_field=None):
get_overrides_et = self._get_objects('overrides', override_id, None, filter_, filt_id, details, None, sort_order, sort_field)
#result_et = self._cmd_execute( ElementTree.tostring(get_overrides_et) )
def delete_override(self, override_id='', ultimate=True):
return self._delete_object('override', override_id, ultimate)
def get_port_lists(self, port_list_id='', targets=True, details=None, trash=None, sort_order='descending', sort_field=None):
get_port_lists_et = self._get_objects('port_lists', port_list_id, None, None, None, details, trash, sort_order, sort_field)
if isinstance(targets, bool): get_port_lists_et.attrib['targets'] = '%d'%(targets)
result_et = self._cmd_execute( ElementTree.tostring(get_port_lists_et) )
if isinstance(result_et, ElementTree.Element):
if result_et.get('status') == '200':
result_dict = {}
for item in result_et.findall('port_list'):
name = item.find('name')
targets_ = item.find('targets')
port_list_id_ = item.get('id')
if port_list_id_ and name is not None and name.text:
result_dict[name.text.strip()] = {'port_list_id':port_list_id_.strip()}
if targets_ is not None:
result_dict[name.text.strip()]['targets'] = {}
for target_ in targets_.findall('target'):
target_name = target_.find('name')
target_id_ = target_.get('id')
if target_id_ and target_name is not None and target_name.text:
result_dict[name.text.strip()]['targets'][target_id_] = target_name.text
result_str = ''
for key in result_dict:
result_str += '\n%36s %s'%(result_dict[key].get('port_list_id'), key)
logging.info( '[ OK ] get_port_lists%s (Total:%d)'%(' port_list_id=%s'%(port_list_id) if port_list_id else '', len(result_dict)) )
logging.debug( 'get_port_lists%s %s'%(' port_list_id=%s'%(port_list_id) if port_list_id else '', result_str) )
return result_dict
logging.error( list(result_et.items()) )
return None
def create_port_list(self, name='', comment=None, port_range=None, get_port_lists_response=None):
create_port_list_et = self._create_object('port_list', name, comment);
if port_range is not None: ElementTree.SubElement(create_port_list_et, 'port_range').text = port_range
if get_port_lists_response is not None: pass
return self._check_response(name, self._cmd_execute( ElementTree.tostring(create_port_list_et) ), 'port_list', '201')
def delete_port_list(self, port_list_id='', ultimate=True):
return self._delete_object('port_list', port_list_id, ultimate)
def delete_port_range(self, port_range_id='', ultimate=True):
return self._delete_object('port_range', port_range_id, ultimate)
def get_reports(self,
report_id = '',
format_id = _report_formats_dictionary.get('XML'),
type_ = None,
alert_id = None,
first_result = 1,
max_results = None,
filter_ = None,
filt_id = None,
sort_order = 'descending',
sort_field = None,
levels = None,
search_phrase = None,
min_cvss_base = None,
notes = True,
note_details = None,
overrides = True,
override_details = None,
result_hosts_only = True,
host = None,
host_first_result = None,
host_max_results = None,
host_levels = None,
pos = None,
delta_report_id = None,
delta_states = None,
autofp = 0,
show_closed_cves = False,
ToDisk = None):
get_reports_et = self._get_objects('reports', report_id, None, filter_, filt_id, None, None, sort_order, sort_field)
if format_id: get_reports_et.attrib['format_id'] = format_id
if isinstance(first_result , int): get_reports_et.attrib['first_result'] = '%d'%(first_result)
if isinstance(max_results , int): get_reports_et.attrib['max_results'] = '%d'%(max_results)
if isinstance(autofp , int): get_reports_et.attrib['autofp'] = '%d'%(autofp)
if isinstance(notes , bool): get_reports_et.attrib['notes'] = '%d'%(notes)
if isinstance(overrides , bool): get_reports_et.attrib['overrides'] = '%d'%(overrides)
if isinstance(result_hosts_only, bool): get_reports_et.attrib['result_hosts_only'] = '%d'%(overrides)
if isinstance(show_closed_cves , bool): get_reports_et.attrib['show_closed_cves'] = '%d'%(show_closed_cves)
result_et = self._cmd_execute( ElementTree.tostring(get_reports_et) )
if isinstance(result_et, ElementTree.Element):
if result_et.get('status') == '200':
report = result_et.find('report')
if isinstance(report, ElementTree.Element):
content_type = report.get('content_type')
if format_id==self._report_formats_dictionary.get('XML'):
if isinstance(ToDisk, str):
ElementTree.ElementTree(report).write(ToDisk)
else:
if isinstance(ToDisk, str):
text = report.text.decode('base64')
with open(ToDisk, 'wb') as dest_file:
dest_file.write( text )
logging.info( 'get_report %s %s'%('content_type=%s'%(content_type) if content_type is not None else '', 'location=%s'%(ToDisk) if isinstance(ToDisk, str) else '') )
return True
logging.error( list(result_et.items()) )
return False
def delete_report(self, report_id='', ultimate=True):
return self._delete_object('report', report_id, ultimate)
def get_report_formats(self, report_format_id='', trash=None, export=None, params=None, sort_order='descending', sort_field=None):
get_report_formats_et = self._get_objects('report_formats', report_format_id, None, None, None, None, trash, sort_order, sort_field)
#result_et = self._cmd_execute( ElementTree.tostring(get_report_formats_et) )
def delete_report_format(self, report_format_id='', ultimate=True):
return self._delete_object('report_format', report_format_id, ultimate)
def get_schedules(self, schedule_id='', details=None, trash=None, sort_order='descending', sort_field=None):
get_schedules_et = self._get_objects('schedules', schedule_id, None, None, None, details, trash, sort_order, sort_field)
#result_et = self._cmd_execute( ElementTree.tostring(get_schedules_et) )
def delete_schedule(self, schedule_id='', ultimate=True):
return self._delete_object('schedule', schedule_id, ultimate)
def get_slaves(self, slave_id='', trash=None, tasks=None, sort_order='descending', sort_field=None):
get_slaves_et = self._get_objects('slaves', slave_id, None, None, None, None, trash, sort_order, sort_field)
#result_et = self._cmd_execute( ElementTree.tostring(get_slaves_et) )
def delete_slave(self, slave_id='', ultimate=True):
return self._delete_object('slave', slave_id, ultimate)
def get_targets(self, target_id='', actions=None, filter_=None, filt_id=None, trash=None, tasks=True, sort_order='descending', sort_field=None):
get_targets_et = self._get_objects('targets', target_id, actions, filter_, filt_id, None, trash, sort_order, sort_field)
if isinstance(tasks, bool): get_targets_et.attrib['tasks'] = '%d'%(tasks)
result_et = self._cmd_execute( ElementTree.tostring(get_targets_et) )
if isinstance(result_et, ElementTree.Element):
if result_et.get('status') == '200':
result_dict = {}
for item in result_et.findall('target'):
name = item.find('name')
hosts = item.find('hosts')
port_list = item.find('port_list')
tasks_ = item.find('tasks')
target_id_ = item.get('id')
port_list_id_ = None if port_list is None else port_list.get('id')
if target_id_ and port_list_id_ and name is not None and port_list is not None and name.text:
result_dict[name.text.strip()] = {'target_id':target_id_.strip(), 'port_list_id':port_list_id_.strip()}
if hosts is not None and hosts.text:
result_dict[name.text.strip()]['hosts'] = hosts.text
if tasks_ is not None:
result_dict[name.text.strip()]['tasks'] = {}
for task_ in tasks_.findall('task'):
task_name = task_.find('name')
task_id_ = task_.get('id')
if task_id_ and task_name is not None and task_name.text:
result_dict[name.text.strip()]['tasks'][task_id_] = task_name.text
result_str = ''
for key in result_dict:
result_str += '\n%36s %36s %s'%(result_dict[key].get('target_id'), result_dict[key].get('port_list_id'), key)
logging.info( '[ OK ] get_targets%s (Total:%d)'%(' targets_id=%s'%(target_id) if target_id else '', len(result_dict)) )
logging.debug( 'get_targets%s %s'%(' targets_id=%s'%(target_id) if target_id else '', result_str) )
return result_dict
logging.error( list(result_et.items()) )
return None
def create_target(self, hosts=None, port_list_id=None, ssh_lsc_credential_id=None, smb_lsc_credential_id=None, target_locator_username=None, target_locator_password=None, name='', comment=None, copy=None, port_range=None):
create_target_et = self.modify_target('_create_', hosts, port_list_id, None, smb_lsc_credential_id, target_locator_username, target_locator_password, name, comment)
return self._check_response(name, self._cmd_execute( ElementTree.tostring(create_target_et) ), 'target', '201')
def modify_target(self, target_id ='', hosts=None, port_list_id=None, ssh_lsc_credential_id=None, smb_lsc_credential_id=None, target_locator_username=None, target_locator_password=None, name=None, comment=None):
modify_target_et = self._modify_object('target', target_id, name, comment)
if hosts is not None: ElementTree.SubElement(modify_target_et, 'hosts').text = hosts
if port_list_id is not None: ElementTree.SubElement(modify_target_et, 'port_list', {'id':port_list_id})
if target_id == '_create_': return modify_target_et
#result_et = self._cmd_execute( ElementTree.tostring(modify_target_et) )
return True
def delete_target(self, target_id ='', ultimate=True):
return self._delete_object('target', target_id, ultimate)
def get_tasks(self,
task_id = '',
actions = None,
details = None,
trash = None,
rcfile = None,
apply_overrides = None,
sort_order = 'descending',
sort_field = None):
get_tasks_et = self._get_objects('tasks', task_id, actions, None, None, details, trash, sort_order, sort_field)
result_et = self._cmd_execute( ElementTree.tostring(get_tasks_et) )
result_dict = {}
result_str = ''
if isinstance(result_et, ElementTree.Element):
if result_et.get('status') == '200':
for item in result_et.findall('task'):
name = item.find('name')
status = item.find('status')
progress = item.find('progress')
task_id_ = item.get('id')
if task_id_ and name is not None and status is not None and progress is not None and name.text and status.text and progress.text:
result_dict[name.text.strip()] = {'task_id':task_id_.strip(), 'status':status.text.strip(), 'progress':progress.text.strip()}
for k, v in result_dict.items():
result_str += '\n%36s %-16s %3s %5s'%(v.get('task_id'), v.get('status'), v.get('progress'), k)
logging.info( '[ OK ] get_tasks%s'%(' task_id=%s'%(result_str[1:]) if task_id else ' (Total:%d)'%(len(result_dict))) )
logging.debug( 'get_tasks%s %s'%(' task_id=%s'%(task_id) if task_id else '', result_str) )
return result_dict
elif result_et.get('status') == '404':
return result_dict
logging.error( list(result_et.items()) )
return None
def create_task(self,
config_id = _scan_configs_dictionary.get('empty'),
target_id = '',
alert_id = None,
schedule_id = None,
slave_id = None,
observers = None,
pref_max_checks = 8,
pref_max_hosts = 20,
pref_in_assets = True,
rcfile = None,
name = '',
comment = None):
create_task_et = self.modify_task('_create_', alert_id, schedule_id, slave_id, observers, pref_max_checks, pref_max_hosts, pref_in_assets, rcfile, name, comment, None, None)
if config_id: ElementTree.SubElement(create_task_et, 'config', {'id':config_id})
if target_id: ElementTree.SubElement(create_task_et, 'target', {'id':target_id})
return self._check_response(name, self._cmd_execute( ElementTree.tostring(create_task_et) ), 'task', '201')
def modify_task(self,
task_id = '',
alert_id = None,
schedule_id = None,
slave_id = None,
observers = None,
pref_max_checks = None,
pref_max_hosts = None,
pref_in_assets = None,
rcfile = None,
name = None,
comment = None,
file_name = None,
file_action = None):
modify_task_et = self._modify_object('task', task_id, name, comment)
if isinstance(pref_max_checks, int) or isinstance(pref_max_hosts, int) or isinstance(pref_in_assets, bool):
preferences_et = ElementTree.SubElement(modify_task_et, 'preferences')
if isinstance(pref_max_checks, int):
preference_et = ElementTree.SubElement(preferences_et, 'preference')
ElementTree.SubElement(preference_et , 'scanner_name').text = 'max_checks'
ElementTree.SubElement(preference_et , 'value').text = '%d'%(pref_max_checks)
if isinstance(pref_max_hosts, int):
preference_et = ElementTree.SubElement(preferences_et, 'preference')
ElementTree.SubElement(preference_et , 'scanner_name').text = 'max_hosts'
ElementTree.SubElement(preference_et , 'value').text = '%d'%(pref_max_hosts)
if isinstance(pref_in_assets, bool):
preference_et = ElementTree.SubElement(preferences_et, 'preference')
ElementTree.SubElement(preference_et , 'scanner_name').text = 'in_assets'
ElementTree.SubElement(preference_et , 'value').text = 'no' if pref_in_assets==False else 'yes'
if task_id == '_create_': return modify_task_et
#result_et = self._cmd_execute( ElementTree.tostring(modify_task_et) )
return True
def delete_task(self, task_id ='', ultimate=True):
return self._delete_object('task', task_id, ultimate)
def _action_task(self, action='start', task_id='', name=None):
if name is not None and name in self._task_cache_dictionary:
task_id = self._task_cache_dictionary[name].get('task_id')
assert( task_id and action in self._action_task_list )
xml_template = '''<%s_task task_id="%s"/>''' % (action, task_id)
result_et = self._cmd_execute( xml_template )
result = {}
if isinstance(result_et, ElementTree.Element):
status = result_et.get('status')
result['status'] = status
if status == '200' or status == '202':
report_id_et = result_et.find('report_id')
report_id = report_id_et.text if isinstance(report_id_et, ElementTree.Element) and report_id_et.text else ''
if report_id:
if name is not None and name in self._task_cache_dictionary:
self._task_cache_dictionary[name]['report_id'] = report_id
result['report_id'] = report_id
logging.info( '[ OK ] %s_task status=%s task_id=%s %s %s'%(action, status, task_id, 'name=%s'%(name) if name is not None else '', 'report_id=%s'%(report_id) if report_id else '') )
return result
logging.error( list(result_et.items()) )
return result
def pause_task(self, task_id=''):
return self._action_task('pause', task_id)
def resume_or_start_task(self, task_id=''):
return self._action_task('resume_or_start_task', task_id)
def resume_paused_task(self, task_id=''):
return self._action_task('resume_paused_task', task_id)
def resume_stopped_task(self, task_id=''):
return self._action_task('resume_stopped_task', task_id)
def start_task(self, task_id='', name=None):
return self._action_task('start', task_id, name).get('report_id')
def stop_task(self, task_id=''):
return self._action_task('stop', task_id).get('status')
def empty_trashcan(self):
result_et = self._cmd_execute( '<empty_trashcan/>' )
if isinstance(result_et, ElementTree.Element):
if result_et.get('status') == '200':
logging.info( '[ OK ] empty trashcan' )
return True
logging.error( '[FAIL] empty trashcan' )
logging.debug( list(result_et.items()) )
return False
return None
def WaitForTask(self, task_id='', wait_status='Stopped', times=10, sleep=1):
assert( task_id and wait_status in ('Deleted', 'Running', 'Stopped', 'Paused') and isinstance(times, int) and isinstance(sleep, int) and times>0 and sleep>0 )
for i in range(times):
result_dict = self.get_tasks(task_id)
if not isinstance(result_dict, dict):
break
if len(result_dict)==0 and wait_status == 'Deleted':
logging.info( '[ OK ] task(id=%s) is not in the task list'%(task_id) )
return True
elif len(result_dict)!=1:
break
else:
task_status = list(result_dict.items())[0][1].get('status')
if wait_status == 'Deleted':
if task_status == 'Ultimate Delete Requested' or task_status == 'Delete Requested':
pass
else:
break
if wait_status == 'Running':
if task_status == 'Running':
logging.info( '[ OK ] task(id=%s) is Running'%(task_id) )
return True
elif task_status in ('Requested', 'Resume Requested'):
pass
else:
break
elif wait_status == 'Stopped':
if task_status == 'Stopped':
logging.info( '[ OK ] task(id=%s) is Stopped'%(task_id) )
return True
elif task_status == 'Stop Requested':
pass
else:
break
elif wait_status == 'Paused':
if task_status == 'Paused':
logging.info( '[ OK ] task(id=%s) is Paused'%(task_id) )
return True
elif task_status == 'Pause Requested':
pass
else:
break
else:
return False
logging.debug('Waiting for task(id=%s) %s, current state is %s, will sleep %ds and retry %d times...'%(task_id, wait_status, task_status, sleep, times-i-1))
time.sleep(sleep)
logging.error('[FAIL] get_tasks(task_id=%s) wait_for_task:%s times=%d sleep=%d'%(task_id, wait_status, times, sleep))
logging.debug('get_tasks_result_dict:\n'+str(result_dict))
return False
def WaitForAnyRunningTask(self, sleep=30):
while True:
tsk_str = ''
running = {k:v for k, v in self.get_tasks().items() if v.get('status')=='Running'}
t_count = len(running)
if t_count == 0:
break
for k, v in running.items():
tsk_str += '\n%36s %-16s %3s %5s'%(v.get('task_id'), v.get('status'), v.get('progress'), k)
logging.info( '[ ] Waiting for {} tasks to die'.format(t_count) )
logging.debug( 'WaitForAnyRunningTask{}'.format( tsk_str ) )
time.sleep( sleep )
return True
def RunNewTask(self, name, hosts, port_range, config_name):
assert( isinstance(name, str) and isinstance(hosts, str) and isinstance(port_range, str) and config_name in self._scan_configs_dictionary )
if self.create_port_list(name=name, port_range=port_range) is not None:
for i in (1,):
if self.create_target(name=name, hosts=hosts, port_list_id=self._task_cache_dictionary[name]['port_list_id']) is None: break
for i in (1,):
if self.create_task(name=name, config_id=self._scan_configs_dictionary.get( config_name ), target_id=self._task_cache_dictionary[name]['target_id']) is None: break
for i in (1,):
if self.start_task(self._task_cache_dictionary[name]['task_id'], name) is None: break
return self._task_cache_dictionary[name]['task_id']
self.delete_task(self._task_cache_dictionary[name]['task_id'])
self.delete_target(self._task_cache_dictionary[name]['target_id'])
self.delete_port_list(self._task_cache_dictionary[name]['port_list_id'])
return None
def DelTaskByName(self, name):
if name in self._task_cache_dictionary:
if self.delete_task(self._task_cache_dictionary[name].get('task_id')) == False:
self.WaitForTask(self._task_cache_dictionary[name].get('task_id'), 'Deleted')
self.delete_target(self._task_cache_dictionary[name].get('target_id'))
self.delete_port_list(self._task_cache_dictionary[name].get('port_list_id'))
self._task_cache_dictionary.pop(name)
return
def DisasterRecovery(self):
pass
def CustomConfig(self, name, copy=None, comment='Custom Config'):
configs = self.get_configs()
if name not in configs:
config_id = self.create_config(name, comment, copy)
if config_id:
assert( self.modify_config(config_id, {'oid':'', 'name':'log_whole_attack', 'value':'eWVz'}, None, None) and
self.modify_config(config_id, {'oid':'1.3.6.1.4.1.25623.1.0.12288', 'name':'Global variable settings[entry]:Debug level', 'value':'MTI3'}, None, None) and
#self.modify_config(config_id, {'oid':'1.3.6.1.4.1.25623.1.0.12288', 'name':'Global variable settings[entry]:HTTP User-Agent', 'value':'MzYwdnVsblNjYW5uZXJCeUxDWA=='}, None, None) and
self.modify_config(config_id, {'oid':'1.3.6.1.4.1.25623.1.0.12288', 'name':'Global variable settings[radio]:Log verbosity', 'value':'RGVidWc='}, None, None) and
self.modify_config(config_id, {'oid':'1.3.6.1.4.1.25623.1.0.12288', 'name':'Global variable settings[radio]:Report verbosity', 'value':'VmVyYm9zZQ=='}, None, None) and
#self.modify_config(config_id, {'oid':'1.3.6.1.4.1.25623.1.0.14259', 'name':'Nmap (NASL wrapper)[entry]:Ports scanned in parallel (min)', 'value':'MTA='}, None, None) and
self.modify_config(config_id, {'oid':'1.3.6.1.4.1.25623.1.0.14259', 'name':'Nmap (NASL wrapper)[radio]:Timing policy :', 'value':'QWdncmVzc2l2ZQ=='}, None, None) and
self.modify_config(config_id, {'oid':'1.3.6.1.4.1.25623.1.0.100315', 'name':'Ping Host[checkbox]:Report about unrechable Hosts', 'value':'eWVz'}, None, None) and
self.modify_config(config_id, {'oid':'1.3.6.1.4.1.25623.1.0.100315', 'name':'Ping Host[checkbox]:Use ARP', 'value':'bm8='}, None, None) and
self.modify_config(config_id, {'oid':'1.3.6.1.4.1.25623.1.0.100315', 'name':'Ping Host[checkbox]:Use nmap', 'value':'eWVz'}, None, None) and
self.modify_config(config_id, {'oid':'1.3.6.1.4.1.25623.1.0.100315', 'name':'Ping Host[entry]:nmap additional ports for -PA', 'value':'MjIsODAwNiwzMzg5LDgzNjA='}, None, None) )
for (family, bnvts) in self._nvt_plugins_blacklist.items():
nvts = [{'oid':oid} for oid in self.get_nvts(family=family) if oid not in bnvts]
assert( self.modify_config(config_id, None, None, {'family':family,'nvt':nvts}) )
self._scan_configs_dictionary[name] = config_id
return config_id
return None
def CleanUp(self):
skip = []
objects_dict = self.get_tasks(sort_field='status')
if isinstance(objects_dict, dict):
for k, v in objects_dict.items():
if v.get('status') in ('Requested', 'Running'):
skip.append(k)
logging.warn('[ ] task %s(id=%s) is running, skip'%(k, v.get('task_id')))
del objects_dict[k]
for k, v in objects_dict.items():
self.stop_task( v.get('task_id') )
for k, v in objects_dict.items():
if self.delete_task( v.get('task_id') ) == False:
self.WaitForTask(v.get('task_id'), 'Deleted')
objects_dict = self.get_targets()
if isinstance(objects_dict, dict):
for k, v in objects_dict.items():
if k in self._targets_dictionary or k in skip: continue
self.delete_target( v.get('target_id') )
objects_dict = self.get_port_lists()
if isinstance(objects_dict, dict):
for k, v in objects_dict.items():
if k in self._port_lists_dictionary or k in skip: continue
self.delete_port_list( v.get('port_list_id') )
objects_dict = self.get_configs()
if isinstance(objects_dict, dict):
for k, v in objects_dict.items():
if k in self._scan_configs_dictionary or k in skip: continue
self.delete_config( v.get('config_id') )
self.empty_trashcan()
self._task_cache_dictionary.clear()
def Main():
omp = OMP()
print omp.get_version()
omp.CleanUp()
#print omp.CustomConfig('Custom', omp._scan_configs_dictionary.get('Full and fast'))
#print( omp.get_system_reports() )
return
name = 'named'
omp.RunNewTask(name=name, hosts='127.0.0.1', port_range='T:22,80,443,U:53', config_name='empty')
print( omp._task_cache_dictionary )
while True:
time.sleep(2)
tasks = omp.get_tasks()
print( tasks )
if tasks[named]['status']=='Done':
omp.get_reports(report_id=omp._task_cache_dictionary[named].get('report_id'), format_id=omp._report_formats_dictionary.get('PDF'), ToDisk='report.pdf')
break
print( omp._task_cache_dictionary )
omp.DelTaskByName(name=named)
print( omp._task_cache_dictionary )
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG, format='%(message)s')
Main()
| mit |
pk400/catering | myvenv/lib/python3.4/site-packages/django/db/models/functions.py | 69 | 7053 | """
Classes that represent database functions.
"""
from django.db.models import (
DateTimeField, Func, IntegerField, Transform, Value,
)
class Coalesce(Func):
"""
Chooses, from left to right, the first non-null expression and returns it.
"""
function = 'COALESCE'
def __init__(self, *expressions, **extra):
if len(expressions) < 2:
raise ValueError('Coalesce must take at least two expressions')
super(Coalesce, self).__init__(*expressions, **extra)
def as_oracle(self, compiler, connection):
# we can't mix TextField (NCLOB) and CharField (NVARCHAR), so convert
# all fields to NCLOB when we expect NCLOB
if self.output_field.get_internal_type() == 'TextField':
class ToNCLOB(Func):
function = 'TO_NCLOB'
expressions = [
ToNCLOB(expression) for expression in self.get_source_expressions()]
self.set_source_expressions(expressions)
return super(Coalesce, self).as_sql(compiler, connection)
class ConcatPair(Func):
"""
A helper class that concatenates two arguments together. This is used
by `Concat` because not all backend databases support more than two
arguments.
"""
function = 'CONCAT'
def __init__(self, left, right, **extra):
super(ConcatPair, self).__init__(left, right, **extra)
def as_sqlite(self, compiler, connection):
coalesced = self.coalesce()
coalesced.arg_joiner = ' || '
coalesced.template = '%(expressions)s'
return super(ConcatPair, coalesced).as_sql(compiler, connection)
def as_mysql(self, compiler, connection):
# Use CONCAT_WS with an empty separator so that NULLs are ignored.
self.function = 'CONCAT_WS'
self.template = "%(function)s('', %(expressions)s)"
return super(ConcatPair, self).as_sql(compiler, connection)
def coalesce(self):
# null on either side results in null for expression, wrap with coalesce
c = self.copy()
expressions = [
Coalesce(expression, Value('')) for expression in c.get_source_expressions()
]
c.set_source_expressions(expressions)
return c
class Concat(Func):
"""
Concatenates text fields together. Backends that result in an entire
null expression when any arguments are null will wrap each argument in
coalesce functions to ensure we always get a non-null result.
"""
function = None
template = "%(expressions)s"
def __init__(self, *expressions, **extra):
if len(expressions) < 2:
raise ValueError('Concat must take at least two expressions')
paired = self._paired(expressions)
super(Concat, self).__init__(paired, **extra)
def _paired(self, expressions):
# wrap pairs of expressions in successive concat functions
# exp = [a, b, c, d]
# -> ConcatPair(a, ConcatPair(b, ConcatPair(c, d))))
if len(expressions) == 2:
return ConcatPair(*expressions)
return ConcatPair(expressions[0], self._paired(expressions[1:]))
class Greatest(Func):
"""
Chooses the maximum expression and returns it.
If any expression is null the return value is database-specific:
On Postgres, the maximum not-null expression is returned.
On MySQL, Oracle, and SQLite, if any expression is null, null is returned.
"""
function = 'GREATEST'
def __init__(self, *expressions, **extra):
if len(expressions) < 2:
raise ValueError('Greatest must take at least two expressions')
super(Greatest, self).__init__(*expressions, **extra)
def as_sqlite(self, compiler, connection):
"""Use the MAX function on SQLite."""
return super(Greatest, self).as_sql(compiler, connection, function='MAX')
class Least(Func):
"""
Chooses the minimum expression and returns it.
If any expression is null the return value is database-specific:
On Postgres, the minimum not-null expression is returned.
On MySQL, Oracle, and SQLite, if any expression is null, null is returned.
"""
function = 'LEAST'
def __init__(self, *expressions, **extra):
if len(expressions) < 2:
raise ValueError('Least must take at least two expressions')
super(Least, self).__init__(*expressions, **extra)
def as_sqlite(self, compiler, connection):
"""Use the MIN function on SQLite."""
return super(Least, self).as_sql(compiler, connection, function='MIN')
class Length(Transform):
"""Returns the number of characters in the expression"""
function = 'LENGTH'
lookup_name = 'length'
def __init__(self, expression, **extra):
output_field = extra.pop('output_field', IntegerField())
super(Length, self).__init__(expression, output_field=output_field, **extra)
def as_mysql(self, compiler, connection):
self.function = 'CHAR_LENGTH'
return super(Length, self).as_sql(compiler, connection)
class Lower(Transform):
function = 'LOWER'
lookup_name = 'lower'
def __init__(self, expression, **extra):
super(Lower, self).__init__(expression, **extra)
class Now(Func):
template = 'CURRENT_TIMESTAMP'
def __init__(self, output_field=None, **extra):
if output_field is None:
output_field = DateTimeField()
super(Now, self).__init__(output_field=output_field, **extra)
def as_postgresql(self, compiler, connection):
# Postgres' CURRENT_TIMESTAMP means "the time at the start of the
# transaction". We use STATEMENT_TIMESTAMP to be cross-compatible with
# other databases.
self.template = 'STATEMENT_TIMESTAMP()'
return self.as_sql(compiler, connection)
class Substr(Func):
function = 'SUBSTRING'
def __init__(self, expression, pos, length=None, **extra):
"""
expression: the name of a field, or an expression returning a string
pos: an integer > 0, or an expression returning an integer
length: an optional number of characters to return
"""
if not hasattr(pos, 'resolve_expression'):
if pos < 1:
raise ValueError("'pos' must be greater than 0")
pos = Value(pos)
expressions = [expression, pos]
if length is not None:
if not hasattr(length, 'resolve_expression'):
length = Value(length)
expressions.append(length)
super(Substr, self).__init__(*expressions, **extra)
def as_sqlite(self, compiler, connection):
self.function = 'SUBSTR'
return super(Substr, self).as_sql(compiler, connection)
def as_oracle(self, compiler, connection):
self.function = 'SUBSTR'
return super(Substr, self).as_sql(compiler, connection)
class Upper(Transform):
function = 'UPPER'
lookup_name = 'upper'
def __init__(self, expression, **extra):
super(Upper, self).__init__(expression, **extra)
| mit |
naziris/HomeSecPi | venv/lib/python2.7/site-packages/werkzeug/contrib/sessions.py | 295 | 12450 | # -*- coding: utf-8 -*-
r"""
werkzeug.contrib.sessions
~~~~~~~~~~~~~~~~~~~~~~~~~
This module contains some helper classes that help one to add session
support to a python WSGI application. For full client-side session
storage see :mod:`~werkzeug.contrib.securecookie` which implements a
secure, client-side session storage.
Application Integration
=======================
::
from werkzeug.contrib.sessions import SessionMiddleware, \
FilesystemSessionStore
app = SessionMiddleware(app, FilesystemSessionStore())
The current session will then appear in the WSGI environment as
`werkzeug.session`. However it's recommended to not use the middleware
but the stores directly in the application. However for very simple
scripts a middleware for sessions could be sufficient.
This module does not implement methods or ways to check if a session is
expired. That should be done by a cronjob and storage specific. For
example to prune unused filesystem sessions one could check the modified
time of the files. It sessions are stored in the database the new()
method should add an expiration timestamp for the session.
For better flexibility it's recommended to not use the middleware but the
store and session object directly in the application dispatching::
session_store = FilesystemSessionStore()
def application(environ, start_response):
request = Request(environ)
sid = request.cookies.get('cookie_name')
if sid is None:
request.session = session_store.new()
else:
request.session = session_store.get(sid)
response = get_the_response_object(request)
if request.session.should_save:
session_store.save(request.session)
response.set_cookie('cookie_name', request.session.sid)
return response(environ, start_response)
:copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
import re
import os
import sys
import tempfile
from os import path
from time import time
from random import random
from hashlib import sha1
from pickle import dump, load, HIGHEST_PROTOCOL
from werkzeug.datastructures import CallbackDict
from werkzeug.utils import dump_cookie, parse_cookie
from werkzeug.wsgi import ClosingIterator
from werkzeug.posixemulation import rename
from werkzeug._compat import PY2, text_type
_sha1_re = re.compile(r'^[a-f0-9]{40}$')
def _urandom():
if hasattr(os, 'urandom'):
return os.urandom(30)
return text_type(random()).encode('ascii')
def generate_key(salt=None):
if salt is None:
salt = repr(salt).encode('ascii')
return sha1(b''.join([
salt,
str(time()).encode('ascii'),
_urandom()
])).hexdigest()
class ModificationTrackingDict(CallbackDict):
__slots__ = ('modified',)
def __init__(self, *args, **kwargs):
def on_update(self):
self.modified = True
self.modified = False
CallbackDict.__init__(self, on_update=on_update)
dict.update(self, *args, **kwargs)
def copy(self):
"""Create a flat copy of the dict."""
missing = object()
result = object.__new__(self.__class__)
for name in self.__slots__:
val = getattr(self, name, missing)
if val is not missing:
setattr(result, name, val)
return result
def __copy__(self):
return self.copy()
class Session(ModificationTrackingDict):
"""Subclass of a dict that keeps track of direct object changes. Changes
in mutable structures are not tracked, for those you have to set
`modified` to `True` by hand.
"""
__slots__ = ModificationTrackingDict.__slots__ + ('sid', 'new')
def __init__(self, data, sid, new=False):
ModificationTrackingDict.__init__(self, data)
self.sid = sid
self.new = new
def __repr__(self):
return '<%s %s%s>' % (
self.__class__.__name__,
dict.__repr__(self),
self.should_save and '*' or ''
)
@property
def should_save(self):
"""True if the session should be saved.
.. versionchanged:: 0.6
By default the session is now only saved if the session is
modified, not if it is new like it was before.
"""
return self.modified
class SessionStore(object):
"""Baseclass for all session stores. The Werkzeug contrib module does not
implement any useful stores besides the filesystem store, application
developers are encouraged to create their own stores.
:param session_class: The session class to use. Defaults to
:class:`Session`.
"""
def __init__(self, session_class=None):
if session_class is None:
session_class = Session
self.session_class = session_class
def is_valid_key(self, key):
"""Check if a key has the correct format."""
return _sha1_re.match(key) is not None
def generate_key(self, salt=None):
"""Simple function that generates a new session key."""
return generate_key(salt)
def new(self):
"""Generate a new session."""
return self.session_class({}, self.generate_key(), True)
def save(self, session):
"""Save a session."""
def save_if_modified(self, session):
"""Save if a session class wants an update."""
if session.should_save:
self.save(session)
def delete(self, session):
"""Delete a session."""
def get(self, sid):
"""Get a session for this sid or a new session object. This method
has to check if the session key is valid and create a new session if
that wasn't the case.
"""
return self.session_class({}, sid, True)
#: used for temporary files by the filesystem session store
_fs_transaction_suffix = '.__wz_sess'
class FilesystemSessionStore(SessionStore):
"""Simple example session store that saves sessions on the filesystem.
This store works best on POSIX systems and Windows Vista / Windows
Server 2008 and newer.
.. versionchanged:: 0.6
`renew_missing` was added. Previously this was considered `True`,
now the default changed to `False` and it can be explicitly
deactivated.
:param path: the path to the folder used for storing the sessions.
If not provided the default temporary directory is used.
:param filename_template: a string template used to give the session
a filename. ``%s`` is replaced with the
session id.
:param session_class: The session class to use. Defaults to
:class:`Session`.
:param renew_missing: set to `True` if you want the store to
give the user a new sid if the session was
not yet saved.
"""
def __init__(self, path=None, filename_template='werkzeug_%s.sess',
session_class=None, renew_missing=False, mode=0o644):
SessionStore.__init__(self, session_class)
if path is None:
path = tempfile.gettempdir()
self.path = path
if isinstance(filename_template, text_type) and PY2:
filename_template = filename_template.encode(
sys.getfilesystemencoding() or 'utf-8')
assert not filename_template.endswith(_fs_transaction_suffix), \
'filename templates may not end with %s' % _fs_transaction_suffix
self.filename_template = filename_template
self.renew_missing = renew_missing
self.mode = mode
def get_session_filename(self, sid):
# out of the box, this should be a strict ASCII subset but
# you might reconfigure the session object to have a more
# arbitrary string.
if isinstance(sid, text_type) and PY2:
sid = sid.encode(sys.getfilesystemencoding() or 'utf-8')
return path.join(self.path, self.filename_template % sid)
def save(self, session):
fn = self.get_session_filename(session.sid)
fd, tmp = tempfile.mkstemp(suffix=_fs_transaction_suffix,
dir=self.path)
f = os.fdopen(fd, 'wb')
try:
dump(dict(session), f, HIGHEST_PROTOCOL)
finally:
f.close()
try:
rename(tmp, fn)
os.chmod(fn, self.mode)
except (IOError, OSError):
pass
def delete(self, session):
fn = self.get_session_filename(session.sid)
try:
os.unlink(fn)
except OSError:
pass
def get(self, sid):
if not self.is_valid_key(sid):
return self.new()
try:
f = open(self.get_session_filename(sid), 'rb')
except IOError:
if self.renew_missing:
return self.new()
data = {}
else:
try:
try:
data = load(f)
except Exception:
data = {}
finally:
f.close()
return self.session_class(data, sid, False)
def list(self):
"""Lists all sessions in the store.
.. versionadded:: 0.6
"""
before, after = self.filename_template.split('%s', 1)
filename_re = re.compile(r'%s(.{5,})%s$' % (re.escape(before),
re.escape(after)))
result = []
for filename in os.listdir(self.path):
#: this is a session that is still being saved.
if filename.endswith(_fs_transaction_suffix):
continue
match = filename_re.match(filename)
if match is not None:
result.append(match.group(1))
return result
class SessionMiddleware(object):
"""A simple middleware that puts the session object of a store provided
into the WSGI environ. It automatically sets cookies and restores
sessions.
However a middleware is not the preferred solution because it won't be as
fast as sessions managed by the application itself and will put a key into
the WSGI environment only relevant for the application which is against
the concept of WSGI.
The cookie parameters are the same as for the :func:`~dump_cookie`
function just prefixed with ``cookie_``. Additionally `max_age` is
called `cookie_age` and not `cookie_max_age` because of backwards
compatibility.
"""
def __init__(self, app, store, cookie_name='session_id',
cookie_age=None, cookie_expires=None, cookie_path='/',
cookie_domain=None, cookie_secure=None,
cookie_httponly=False, environ_key='werkzeug.session'):
self.app = app
self.store = store
self.cookie_name = cookie_name
self.cookie_age = cookie_age
self.cookie_expires = cookie_expires
self.cookie_path = cookie_path
self.cookie_domain = cookie_domain
self.cookie_secure = cookie_secure
self.cookie_httponly = cookie_httponly
self.environ_key = environ_key
def __call__(self, environ, start_response):
cookie = parse_cookie(environ.get('HTTP_COOKIE', ''))
sid = cookie.get(self.cookie_name, None)
if sid is None:
session = self.store.new()
else:
session = self.store.get(sid)
environ[self.environ_key] = session
def injecting_start_response(status, headers, exc_info=None):
if session.should_save:
self.store.save(session)
headers.append(('Set-Cookie', dump_cookie(self.cookie_name,
session.sid, self.cookie_age,
self.cookie_expires, self.cookie_path,
self.cookie_domain, self.cookie_secure,
self.cookie_httponly)))
return start_response(status, headers, exc_info)
return ClosingIterator(self.app(environ, injecting_start_response),
lambda: self.store.save_if_modified(session))
| apache-2.0 |
tboyce021/home-assistant | tests/components/minio/test_minio.py | 10 | 5220 | """Tests for Minio Hass related code."""
import asyncio
import json
import pytest
from homeassistant.components.minio import (
CONF_ACCESS_KEY,
CONF_HOST,
CONF_LISTEN,
CONF_LISTEN_BUCKET,
CONF_PORT,
CONF_SECRET_KEY,
CONF_SECURE,
DOMAIN,
QueueListener,
)
from homeassistant.core import callback
from homeassistant.setup import async_setup_component
from tests.async_mock import MagicMock, call, patch
from tests.components.minio.common import TEST_EVENT
@pytest.fixture(name="minio_client")
def minio_client_fixture():
"""Patch Minio client."""
with patch("homeassistant.components.minio.minio_helper.Minio") as minio_mock:
minio_client_mock = minio_mock.return_value
yield minio_client_mock
@pytest.fixture(name="minio_client_event")
def minio_client_event_fixture():
"""Patch helper function for minio notification stream."""
with patch("homeassistant.components.minio.minio_helper.Minio") as minio_mock:
minio_client_mock = minio_mock.return_value
response_mock = MagicMock()
stream_mock = MagicMock()
stream_mock.__next__.side_effect = [
"",
"",
bytearray(json.dumps(TEST_EVENT), "utf-8"),
]
response_mock.stream.return_value = stream_mock
minio_client_mock._url_open.return_value = response_mock
yield minio_client_mock
async def test_minio_services(hass, caplog, minio_client):
"""Test Minio services."""
hass.config.allowlist_external_dirs = {"/test"}
await async_setup_component(
hass,
DOMAIN,
{
DOMAIN: {
CONF_HOST: "localhost",
CONF_PORT: "9000",
CONF_ACCESS_KEY: "abcdef",
CONF_SECRET_KEY: "0123456789",
CONF_SECURE: "true",
}
},
)
await hass.async_start()
await hass.async_block_till_done()
assert "Setup of domain minio took" in caplog.text
# Call services
await hass.services.async_call(
DOMAIN,
"put",
{"file_path": "/test/some_file", "key": "some_key", "bucket": "some_bucket"},
blocking=True,
)
assert minio_client.fput_object.call_args == call(
"some_bucket", "some_key", "/test/some_file"
)
minio_client.reset_mock()
await hass.services.async_call(
DOMAIN,
"get",
{"file_path": "/test/some_file", "key": "some_key", "bucket": "some_bucket"},
blocking=True,
)
assert minio_client.fget_object.call_args == call(
"some_bucket", "some_key", "/test/some_file"
)
minio_client.reset_mock()
await hass.services.async_call(
DOMAIN, "remove", {"key": "some_key", "bucket": "some_bucket"}, blocking=True
)
assert minio_client.remove_object.call_args == call("some_bucket", "some_key")
minio_client.reset_mock()
async def test_minio_listen(hass, caplog, minio_client_event):
"""Test minio listen on notifications."""
minio_client_event.presigned_get_object.return_value = "http://url"
events = []
@callback
def event_callback(event):
"""Handle event callbback."""
events.append(event)
hass.bus.async_listen("minio", event_callback)
await async_setup_component(
hass,
DOMAIN,
{
DOMAIN: {
CONF_HOST: "localhost",
CONF_PORT: "9000",
CONF_ACCESS_KEY: "abcdef",
CONF_SECRET_KEY: "0123456789",
CONF_SECURE: "true",
CONF_LISTEN: [{CONF_LISTEN_BUCKET: "test"}],
}
},
)
await hass.async_start()
await hass.async_block_till_done()
assert "Setup of domain minio took" in caplog.text
while not events:
await asyncio.sleep(0)
assert 1 == len(events)
event = events[0]
assert DOMAIN == event.event_type
assert "s3:ObjectCreated:Put" == event.data["event_name"]
assert "5jJkTAo.jpg" == event.data["file_name"]
assert "test" == event.data["bucket"]
assert "5jJkTAo.jpg" == event.data["key"]
assert "http://url" == event.data["presigned_url"]
assert 0 == len(event.data["metadata"])
async def test_queue_listener():
"""Tests QueueListener firing events on Home Assistant event bus."""
hass = MagicMock()
queue_listener = QueueListener(hass)
queue_listener.start()
queue_entry = {
"event_name": "s3:ObjectCreated:Put",
"bucket": "some_bucket",
"key": "some_dir/some_file.jpg",
"presigned_url": "http://host/url?signature=secret",
"metadata": {},
}
queue_listener.queue.put(queue_entry)
queue_listener.stop()
call_domain, call_event = hass.bus.fire.call_args[0]
expected_event = {
"event_name": "s3:ObjectCreated:Put",
"file_name": "some_file.jpg",
"bucket": "some_bucket",
"key": "some_dir/some_file.jpg",
"presigned_url": "http://host/url?signature=secret",
"metadata": {},
}
assert DOMAIN == call_domain
assert json.dumps(expected_event, sort_keys=True) == json.dumps(
call_event, sort_keys=True
)
| apache-2.0 |
sciCloud/OLiMS | lims/tests/test_instruments.py | 2 | 4678 | from dependencies.dependency import _createObjectByType
from lims.utils import tmpID
from lims.testing import BIKA_FUNCTIONAL_TESTING
from lims.tests.base import BikaFunctionalTestCase
from lims.idserver import renameAfterCreation
from dependencies.dependency import login, logout
from dependencies.dependency import TEST_USER_NAME
from dependencies.dependency import date
import unittest
try:
import unittest2 as unittest
except ImportError: # Python 2.7
import unittest
class TestInstrumentAlerts(BikaFunctionalTestCase):
layer = BIKA_FUNCTIONAL_TESTING
def setUp(self):
super(TestInstrumentAlerts, self).setUp()
login(self.portal, TEST_USER_NAME)
def test_instrument_validation(self):
# Getting all instruments
instrument_names = self.portal.bika_setup.bika_instruments.keys()
# Setting validation dates
for instrument_name in instrument_names:
instrument = self.portal.bika_setup.bika_instruments[instrument_name]
today = date.today()
# Getting last valid validation
lastval = instrument.getLatestValidValidation()
if not lastval:
# Creating a new validation
cal_obj = _createObjectByType("InstrumentValidation", instrument, tmpID())
cal_obj.edit(
title='test',
DownFrom=today.strftime("%Y/%m/%d"),
DownTo=today.strftime("%Y/%m/%d"),
Instrument=instrument
)
cal_obj.unmarkCreationFlag()
renameAfterCreation(cal_obj)
else:
# Updating last validation
lastval.setDownTo(today.strftime("%Y/%m/%d"))
lastval.setDownFrom(today.strftime("%Y/%m/%d"))
for instrument_name in instrument_names:
instrument = self.portal.bika_setup.bika_instruments[instrument_name]
self.assertTrue(instrument.isValidationInProgress())
for instrument_name in instrument_names:
instrument = self.portal.bika_setup.bika_instruments[instrument_name]
anotherday = '2014/11/27'
lastval = instrument.getLatestValidValidation()
lastval.setDownTo(anotherday)
lastval.setDownFrom(anotherday)
for instrument_name in instrument_names:
instrument = self.portal.bika_setup.bika_instruments[instrument_name]
self.assertFalse(instrument.isValidationInProgress())
def test_instrument_calibration(self):
# Getting all instruments
instrument_names = self.portal.bika_setup.bika_instruments.keys()
# Setting calibration dates
for instrument_name in instrument_names:
# Getting each instrument
instrument = self.portal.bika_setup.bika_instruments[instrument_name]
today = date.today()
# Getting last valid calibration
lastcal = instrument.getLatestValidCalibration()
if not lastcal:
# Creating a new calibration
cal_obj = _createObjectByType("InstrumentCalibration", instrument, tmpID())
cal_obj.edit(
title='test',
DownFrom=today.strftime("%Y/%m/%d"),
DownTo=today.strftime("%Y/%m/%d"),
Instrument=instrument
)
cal_obj.unmarkCreationFlag()
renameAfterCreation(cal_obj)
else:
# Updating last calibration
lastcal.setDownTo(today)
lastcal.setDownFrom(today)
# Testing calibration state
for instrument_name in instrument_names:
instrument = self.portal.bika_setup.bika_instruments[instrument_name]
self.assertTrue(instrument.isCalibrationInProgress())
for instrument_name in instrument_names:
instrument = self.portal.bika_setup.bika_instruments[instrument_name]
anotherday = '2014/11/27'
lastcal = instrument.getLatestValidCalibration()
lastcal.setDownTo(anotherday)
lastcal.setDownFrom(anotherday)
for instrument_name in instrument_names:
instrument = self.portal.bika_setup.bika_instruments[instrument_name]
self.assertFalse(instrument.isCalibrationInProgress())
def tearDown(self):
logout()
super(TestInstrumentAlerts, self).tearDown()
def test_suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(TestInstrumentAlerts))
suite.layer = BIKA_FUNCTIONAL_TESTING
return suite | agpl-3.0 |
eesatfan/openpli-enigma2 | lib/python/Components/Harddisk.py | 6 | 25890 | import os
import time
from Tools.CList import CList
from SystemInfo import SystemInfo
from Components.Console import Console
import Task
def readFile(filename):
file = open(filename)
data = file.read().strip()
file.close()
return data
def getProcMounts():
try:
mounts = open("/proc/mounts", 'r')
except IOError, ex:
print "[Harddisk] Failed to open /proc/mounts", ex
return []
result = [line.strip().split(' ') for line in mounts]
for item in result:
# Spaces are encoded as \040 in mounts
item[1] = item[1].replace('\\040', ' ')
return result
def isFileSystemSupported(filesystem):
try:
for fs in open('/proc/filesystems', 'r'):
if fs.strip().endswith(filesystem):
return True
return False
except Exception, ex:
print "[Harddisk] Failed to read /proc/filesystems:", ex
def findMountPoint(path):
'Example: findMountPoint("/media/hdd/some/file") returns "/media/hdd"'
path = os.path.abspath(path)
while not os.path.ismount(path):
path = os.path.dirname(path)
return path
DEVTYPE_UDEV = 0
DEVTYPE_DEVFS = 1
class Harddisk:
def __init__(self, device, removable):
self.device = device
if os.access("/dev/.udev", 0):
self.type = DEVTYPE_UDEV
elif os.access("/dev/.devfsd", 0):
self.type = DEVTYPE_DEVFS
else:
print "Unable to determine structure of /dev"
self.max_idle_time = 0
self.idle_running = False
self.last_access = time.time()
self.last_stat = 0
self.timer = None
self.is_sleeping = False
self.dev_path = ''
self.disk_path = ''
self.mount_path = None
self.mount_device = None
self.phys_path = os.path.realpath(self.sysfsPath('device'))
if self.type == DEVTYPE_UDEV:
self.dev_path = '/dev/' + self.device
self.disk_path = self.dev_path
elif self.type == DEVTYPE_DEVFS:
tmp = readFile(self.sysfsPath('dev')).split(':')
s_major = int(tmp[0])
s_minor = int(tmp[1])
for disc in os.listdir("/dev/discs"):
dev_path = os.path.realpath('/dev/discs/' + disc)
disk_path = dev_path + '/disc'
try:
rdev = os.stat(disk_path).st_rdev
except OSError:
continue
if s_major == os.major(rdev) and s_minor == os.minor(rdev):
self.dev_path = dev_path
self.disk_path = disk_path
break
print "new Harddisk", self.device, '->', self.dev_path, '->', self.disk_path
if not removable:
self.startIdle()
def __lt__(self, ob):
return self.device < ob.device
def partitionPath(self, n):
if self.type == DEVTYPE_UDEV:
return self.dev_path + n
elif self.type == DEVTYPE_DEVFS:
return self.dev_path + '/part' + n
def sysfsPath(self, filename):
return os.path.join('/sys/block/', self.device, filename)
def stop(self):
if self.timer:
self.timer.stop()
self.timer.callback.remove(self.runIdle)
def bus(self):
# CF (7025 specific)
if self.type == DEVTYPE_UDEV:
ide_cf = False # FIXME
elif self.type == DEVTYPE_DEVFS:
ide_cf = self.device[:2] == "hd" and "host0" not in self.dev_path
internal = "pci" in self.phys_path
if ide_cf:
ret = "External (CF)"
elif internal:
ret = "Internal"
else:
ret = "External"
return ret
def diskSize(self):
line = readFile(self.sysfsPath('size'))
try:
cap = int(line)
except:
return 0;
return cap / 1000 * 512 / 1000
def capacity(self):
cap = self.diskSize()
if cap == 0:
return ""
if cap < 1000:
return "%03d MB" % cap
return "%d.%03d GB" % (cap/1000, cap%1000)
def model(self):
try:
if self.device[:2] == "hd":
return readFile('/proc/ide/' + self.device + '/model')
elif self.device[:2] == "sd":
vendor = readFile(self.sysfsPath('device/vendor'))
model = readFile(self.sysfsPath('device/model'))
return vendor + '(' + model + ')'
else:
raise Exception, "no hdX or sdX"
except Exception, e:
print "[Harddisk] Failed to get model:", e
return "-?-"
def free(self):
dev = self.findMount()
if dev:
stat = os.statvfs(dev)
return (stat.f_bfree/1000) * (stat.f_bsize/1000)
return -1
def numPartitions(self):
numPart = -1
if self.type == DEVTYPE_UDEV:
try:
devdir = os.listdir('/dev')
except OSError:
return -1
for filename in devdir:
if filename.startswith(self.device):
numPart += 1
elif self.type == DEVTYPE_DEVFS:
try:
idedir = os.listdir(self.dev_path)
except OSError:
return -1
for filename in idedir:
if filename.startswith("disc"):
numPart += 1
if filename.startswith("part"):
numPart += 1
return numPart
def mountDevice(self):
for parts in getProcMounts():
if os.path.realpath(parts[0]).startswith(self.dev_path):
self.mount_device = parts[0]
self.mount_path = parts[1]
return parts[1]
def enumMountDevices(self):
for parts in getProcMounts():
if os.path.realpath(parts[0]).startswith(self.dev_path):
yield parts[1]
def findMount(self):
if self.mount_path is None:
return self.mountDevice()
return self.mount_path
def unmount(self):
dev = self.mountDevice()
if dev is None:
# not mounted, return OK
return 0
cmd = 'umount ' + dev
print "[Harddisk]", cmd
res = os.system(cmd)
return (res >> 8)
def createPartition(self):
cmd = 'printf "8,\n;0,0\n;0,0\n;0,0\ny\n" | sfdisk -f -uS ' + self.disk_path
res = os.system(cmd)
return (res >> 8)
def mkfs(self):
# No longer supported, use createInitializeJob instead
return 1
def mount(self):
# try mounting through fstab first
if self.mount_device is None:
dev = self.partitionPath("1")
else:
# if previously mounted, use the same spot
dev = self.mount_device
try:
fstab = open("/etc/fstab")
lines = fstab.readlines()
except IOError:
return -1
fstab.close()
for line in lines:
parts = line.strip().split(" ")
fspath = os.path.realpath(parts[0])
if fspath == dev:
print "[Harddisk] mounting:", fspath
cmd = "mount -t auto " + fspath
res = os.system(cmd)
return (res >> 8)
# device is not in fstab
res = -1
if self.type == DEVTYPE_UDEV:
# we can let udev do the job, re-read the partition table
res = os.system('sfdisk -R ' + self.disk_path)
# give udev some time to make the mount, which it will do asynchronously
from time import sleep
sleep(3)
return (res >> 8)
def fsck(self):
# No longer supported, use createCheckJob instead
return 1
def killPartitionTable(self):
zero = 512 * '\0'
h = open(self.dev_path, 'wb')
# delete first 9 sectors, which will likely kill the first partition too
for i in range(9):
h.write(zero)
h.close()
def killPartition(self, n):
zero = 512 * '\0'
part = self.partitionPath(n)
h = open(part, 'wb')
for i in range(3):
h.write(zero)
h.close()
def createInitializeJob(self):
job = Task.Job(_("Initializing storage device..."))
size = self.diskSize()
print "[HD] size: %s MB" % size
task = UnmountTask(job, self)
task = Task.PythonTask(job, _("Removing partition table"))
task.work = self.killPartitionTable
task.weighting = 1
task = Task.LoggingTask(job, _("Rereading partition table"))
task.weighting = 1
task.setTool('sfdisk')
task.args.append('-R')
task.args.append(self.disk_path)
task = Task.ConditionTask(job, _("Waiting for partition"), timeoutCount=20)
task.check = lambda: not os.path.exists(self.partitionPath("1"))
task.weighting = 1
if os.path.exists('/usr/sbin/parted'):
use_parted = True
else:
if size > 2097151:
addInstallTask(job, 'parted')
use_parted = True
else:
use_parted = False
task = Task.LoggingTask(job, _("Creating partition"))
task.weighting = 5
if use_parted:
task.setTool('parted')
if size < 1024:
# On very small devices, align to block only
alignment = 'min'
else:
# Prefer optimal alignment for performance
alignment = 'opt'
if size > 2097151:
parttype = 'gpt'
else:
parttype = 'msdos'
task.args += ['-a', alignment, '-s', self.disk_path, 'mklabel', parttype, 'mkpart', 'primary', '0%', '100%']
else:
task.setTool('sfdisk')
task.args.append('-f')
task.args.append('-uS')
task.args.append(self.disk_path)
if size > 128000:
# Start at sector 8 to better support 4k aligned disks
print "[HD] Detected >128GB disk, using 4k alignment"
task.initial_input = "8,\n;0,0\n;0,0\n;0,0\ny\n"
else:
# Smaller disks (CF cards, sticks etc) don't need that
task.initial_input = "0,\n;\n;\n;\ny\n"
task = Task.ConditionTask(job, _("Waiting for partition"))
task.check = lambda: os.path.exists(self.partitionPath("1"))
task.weighting = 1
task = MkfsTask(job, _("Creating filesystem"))
big_o_options = ["dir_index"]
if isFileSystemSupported("ext4"):
task.setTool("mkfs.ext4")
if size > 20000:
try:
version = map(int, open("/proc/version","r").read().split(' ', 4)[2].split('.',2)[:2])
if (version[0] > 3) or ((version[0] > 2) and (version[1] >= 2)):
# Linux version 3.2 supports bigalloc and -C option, use 256k blocks
task.args += ["-C", "262144"]
big_o_options.append("bigalloc")
except Exception, ex:
print "Failed to detect Linux version:", ex
else:
task.setTool("mkfs.ext3")
if size > 250000:
# No more than 256k i-nodes (prevent problems with fsck memory requirements)
task.args += ["-T", "largefile", "-N", "262144"]
big_o_options.append("sparse_super")
elif size > 16384:
# between 16GB and 250GB: 1 i-node per megabyte
task.args += ["-T", "largefile"]
big_o_options.append("sparse_super")
elif size > 2048:
# Over 2GB: 32 i-nodes per megabyte
task.args += ["-T", "largefile", "-N", str(size * 32)]
task.args += ["-m0", "-O", ",".join(big_o_options), self.partitionPath("1")]
task = MountTask(job, self)
task.weighting = 3
task = Task.ConditionTask(job, _("Waiting for mount"), timeoutCount=20)
task.check = self.mountDevice
task.weighting = 1
return job
def initialize(self):
# no longer supported
return -5
def check(self):
# no longer supported
return -5
def createCheckJob(self):
job = Task.Job(_("Checking filesystem..."))
if self.findMount():
# Create unmount task if it was not mounted
UnmountTask(job, self)
dev = self.mount_device
else:
# otherwise, assume there is one partition
dev = self.partitionPath("1")
task = Task.LoggingTask(job, "fsck")
task.setTool('fsck.ext3')
task.args.append('-f')
task.args.append('-p')
task.args.append(dev)
MountTask(job, self)
task = Task.ConditionTask(job, _("Waiting for mount"))
task.check = self.mountDevice
return job
def createExt4ConversionJob(self):
if not isFileSystemSupported('ext4'):
raise Exception, _("You system does not support ext4")
job = Task.Job(_("Converting ext3 to ext4..."))
if not os.path.exists('/sbin/tune2fs'):
addInstallTask(job, 'e2fsprogs-tune2fs')
if self.findMount():
# Create unmount task if it was not mounted
UnmountTask(job, self)
dev = self.mount_device
else:
# otherwise, assume there is one partition
dev = self.partitionPath("1")
task = Task.LoggingTask(job, "fsck")
task.setTool('fsck.ext3')
task.args.append('-p')
task.args.append(dev)
task = Task.LoggingTask(job, "tune2fs")
task.setTool('tune2fs')
task.args.append('-O')
task.args.append('extents,uninit_bg,dir_index')
task.args.append('-o')
task.args.append('journal_data_writeback')
task.args.append(dev)
task = Task.LoggingTask(job, "fsck")
task.setTool('fsck.ext4')
task.postconditions = [] # ignore result, it will always "fail"
task.args.append('-f')
task.args.append('-p')
task.args.append('-D')
task.args.append(dev)
MountTask(job, self)
task = Task.ConditionTask(job, _("Waiting for mount"))
task.check = self.mountDevice
return job
def getDeviceDir(self):
return self.dev_path
def getDeviceName(self):
return self.disk_path
# the HDD idle poll daemon.
# as some harddrives have a buggy standby timer, we are doing this by hand here.
# first, we disable the hardware timer. then, we check every now and then if
# any access has been made to the disc. If there has been no access over a specifed time,
# we set the hdd into standby.
def readStats(self):
try:
l = open("/sys/block/%s/stat" % self.device).read()
except IOError:
return -1,-1
data = l.split(None,5)
return (int(data[0]), int(data[4]))
def startIdle(self):
from enigma import eTimer
# disable HDD standby timer
if self.bus() == "External":
Console().ePopen(("sdparm", "sdparm", "--set=SCT=0", self.disk_path))
else:
Console().ePopen(("hdparm", "hdparm", "-S0", self.disk_path))
self.timer = eTimer()
self.timer.callback.append(self.runIdle)
self.idle_running = True
self.setIdleTime(self.max_idle_time) # kick the idle polling loop
def runIdle(self):
if not self.max_idle_time:
return
t = time.time()
idle_time = t - self.last_access
stats = self.readStats()
l = sum(stats)
if l != self.last_stat and l >= 0: # access
self.last_stat = l
self.last_access = t
idle_time = 0
self.is_sleeping = False
if idle_time >= self.max_idle_time and not self.is_sleeping:
self.setSleep()
self.is_sleeping = True
def setSleep(self):
if self.bus() == "External":
Console().ePopen(("sdparm", "sdparm", "--flexible", "--readonly", "--command=stop", self.disk_path))
else:
Console().ePopen(("hdparm", "hdparm", "-y", self.disk_path))
def setIdleTime(self, idle):
self.max_idle_time = idle
if self.idle_running:
if not idle:
self.timer.stop()
else:
self.timer.start(idle * 100, False) # poll 10 times per period.
def isSleeping(self):
return self.is_sleeping
class Partition:
# for backward compatibility, force_mounted actually means "hotplug"
def __init__(self, mountpoint, device = None, description = "", force_mounted = False):
self.mountpoint = mountpoint
self.description = description
self.force_mounted = mountpoint and force_mounted
self.is_hotplug = force_mounted # so far; this might change.
self.device = device
def __str__(self):
return "Partition(mountpoint=%s,description=%s,device=%s)" % (self.mountpoint,self.description,self.device)
def stat(self):
if self.mountpoint:
return os.statvfs(self.mountpoint)
else:
raise OSError, "Device %s is not mounted" % self.device
def free(self):
try:
s = self.stat()
return s.f_bavail * s.f_bsize
except OSError:
return None
def total(self):
try:
s = self.stat()
return s.f_blocks * s.f_bsize
except OSError:
return None
def tabbedDescription(self):
if self.mountpoint.startswith('/media/net'):
# Network devices have a user defined name
return self.description
return self.description + '\t' + self.mountpoint
def mounted(self, mounts = None):
# THANK YOU PYTHON FOR STRIPPING AWAY f_fsid.
# TODO: can os.path.ismount be used?
if self.force_mounted:
return True
if self.mountpoint:
if mounts is None:
mounts = getProcMounts()
for parts in mounts:
if parts[1] == self.mountpoint:
return True
return False
def filesystem(self, mounts = None):
if self.mountpoint:
if mounts is None:
mounts = getProcMounts()
for fields in mounts:
if fields[1] == self.mountpoint:
return fields[2]
return ''
DEVICEDB = \
{"dm8000":
{
# dm8000:
"/devices/platform/brcm-ehci.0/usb1/1-1/1-1.1/1-1.1:1.0": "Front USB Slot",
"/devices/platform/brcm-ehci.0/usb1/1-1/1-1.2/1-1.2:1.0": "Back, upper USB Slot",
"/devices/platform/brcm-ehci.0/usb1/1-1/1-1.3/1-1.3:1.0": "Back, lower USB Slot",
"/devices/platform/brcm-ehci-1.1/usb2/2-1/2-1:1.0/host1/target1:0:0/1:0:0:0": "DVD Drive",
},
"dm800":
{
# dm800:
"/devices/platform/brcm-ehci.0/usb1/1-2/1-2:1.0": "Upper USB Slot",
"/devices/platform/brcm-ehci.0/usb1/1-1/1-1:1.0": "Lower USB Slot",
},
"dm7025":
{
# dm7025:
"/devices/pci0000:00/0000:00:14.1/ide1/1.0": "CF Card Slot", #hdc
"/devices/pci0000:00/0000:00:14.1/ide0/0.0": "Internal Harddisk"
}
}
def addInstallTask(job, package):
task = Task.LoggingTask(job, "update packages")
task.setTool('opkg')
task.args.append('update')
task = Task.LoggingTask(job, "Install " + package)
task.setTool('opkg')
task.args.append('install')
task.args.append(package)
class HarddiskManager:
def __init__(self):
self.hdd = [ ]
self.cd = ""
self.partitions = [ ]
self.devices_scanned_on_init = [ ]
self.on_partition_list_change = CList()
self.enumerateBlockDevices()
# Find stuff not detected by the enumeration
p = (
("/media/hdd", _("Hard disk")),
("/media/card", _("Card")),
("/media/cf", _("Compact flash")),
("/media/mmc1", _("MMC card")),
("/media/net", _("Network mount")),
("/media/net1", _("Network mount %s") % ("1")),
("/media/net2", _("Network mount %s") % ("2")),
("/media/net3", _("Network mount %s") % ("3")),
("/media/ram", _("Ram disk")),
("/media/usb", _("USB stick")),
("/", _("Internal flash"))
)
known = set([os.path.normpath(a.mountpoint) for a in self.partitions if a.mountpoint])
for m,d in p:
if (m not in known) and os.path.ismount(m):
self.partitions.append(Partition(mountpoint=m, description=d))
def getBlockDevInfo(self, blockdev):
devpath = "/sys/block/" + blockdev
error = False
removable = False
blacklisted = False
is_cdrom = False
partitions = []
try:
removable = bool(int(readFile(devpath + "/removable")))
dev = int(readFile(devpath + "/dev").split(':')[0])
if dev in (1, 7, 31, 253): # ram, loop, mtdblock, romblock
blacklisted = True
if blockdev[0:2] == 'sr':
is_cdrom = True
if blockdev[0:2] == 'hd':
try:
media = readFile("/proc/ide/%s/media" % blockdev)
if "cdrom" in media:
is_cdrom = True
except IOError:
error = True
# check for partitions
if not is_cdrom:
for partition in os.listdir(devpath):
if partition[0:len(blockdev)] != blockdev:
continue
partitions.append(partition)
else:
self.cd = blockdev
except IOError:
error = True
# check for medium
medium_found = True
try:
open("/dev/" + blockdev).close()
except IOError, err:
if err.errno == 159: # no medium present
medium_found = False
return error, blacklisted, removable, is_cdrom, partitions, medium_found
def enumerateBlockDevices(self):
print "[Harddisk] enumerating block devices..."
for blockdev in os.listdir("/sys/block"):
error, blacklisted, removable, is_cdrom, partitions, medium_found = self.addHotplugPartition(blockdev)
if not error and not blacklisted and medium_found:
for part in partitions:
self.addHotplugPartition(part)
self.devices_scanned_on_init.append((blockdev, removable, is_cdrom, medium_found))
def getAutofsMountpoint(self, device):
return "/autofs/%s" % (device)
def getMountpoint(self, device):
dev = "/dev/%s" % device
for item in getProcMounts():
if item[0] == dev:
return item[1]
return None
def addHotplugPartition(self, device, physdev = None):
# device is the device name, without /dev
# physdev is the physical device path, which we (might) use to determine the userfriendly name
if not physdev:
dev, part = self.splitDeviceName(device)
try:
physdev = os.path.realpath('/sys/block/' + dev + '/device')[4:]
except OSError:
physdev = dev
print "couldn't determine blockdev physdev for device", device
error, blacklisted, removable, is_cdrom, partitions, medium_found = self.getBlockDevInfo(device)
if not blacklisted and medium_found:
description = self.getUserfriendlyDeviceName(device, physdev)
p = Partition(mountpoint = self.getMountpoint(device), description = description, force_mounted = True, device = device)
self.partitions.append(p)
if p.mountpoint: # Plugins won't expect unmounted devices
self.on_partition_list_change("add", p)
# see if this is a harddrive
l = len(device)
if l and not device[l-1].isdigit():
self.hdd.append(Harddisk(device, removable))
self.hdd.sort()
SystemInfo["Harddisk"] = True
return error, blacklisted, removable, is_cdrom, partitions, medium_found
def removeHotplugPartition(self, device):
for x in self.partitions[:]:
if x.device == device:
self.partitions.remove(x)
if x.mountpoint: # Plugins won't expect unmounted devices
self.on_partition_list_change("remove", x)
l = len(device)
if l and not device[l-1].isdigit():
for hdd in self.hdd:
if hdd.device == device:
hdd.stop()
self.hdd.remove(hdd)
break
SystemInfo["Harddisk"] = len(self.hdd) > 0
def HDDCount(self):
return len(self.hdd)
def HDDList(self):
list = [ ]
for hd in self.hdd:
hdd = hd.model() + " - " + hd.bus()
cap = hd.capacity()
if cap != "":
hdd += " (" + cap + ")"
list.append((hdd, hd))
return list
def getCD(self):
return self.cd
def getMountedPartitions(self, onlyhotplug = False, mounts=None):
if mounts is None:
mounts = getProcMounts()
parts = [x for x in self.partitions if (x.is_hotplug or not onlyhotplug) and x.mounted(mounts)]
devs = set([x.device for x in parts])
for devname in devs.copy():
if not devname:
continue
dev, part = self.splitDeviceName(devname)
if part and dev in devs: # if this is a partition and we still have the wholedisk, remove wholedisk
devs.remove(dev)
# return all devices which are not removed due to being a wholedisk when a partition exists
return [x for x in parts if not x.device or x.device in devs]
def splitDeviceName(self, devname):
# this works for: sdaX, hdaX, sr0 (which is in fact dev="sr0", part=""). It doesn't work for other names like mtdblock3, but they are blacklisted anyway.
dev = devname[:3]
part = devname[3:]
for p in part:
if not p.isdigit():
return devname, 0
return dev, part and int(part) or 0
def getUserfriendlyDeviceName(self, dev, phys):
dev, part = self.splitDeviceName(dev)
description = "External Storage %s" % dev
try:
description = readFile("/sys" + phys + "/model")
except IOError, s:
print "couldn't read model: ", s
from Tools.HardwareInfo import HardwareInfo
for physdevprefix, pdescription in DEVICEDB.get(HardwareInfo().device_name,{}).items():
if phys.startswith(physdevprefix):
description = pdescription
# not wholedisk and not partition 1
if part and part != 1:
description += " (Partition %d)" % part
return description
def addMountedPartition(self, device, desc):
for x in self.partitions:
if x.mountpoint == device:
#already_mounted
return
self.partitions.append(Partition(mountpoint=device, description=desc))
def removeMountedPartition(self, mountpoint):
for x in self.partitions[:]:
if x.mountpoint == mountpoint:
self.partitions.remove(x)
self.on_partition_list_change("remove", x)
def setDVDSpeed(self, device, speed = 0):
ioctl_flag=int(0x5322)
if not device.startswith('/'):
device = "/dev/" + device
try:
from fcntl import ioctl
cd = open(device)
ioctl(cd.fileno(), ioctl_flag, speed)
cd.close()
except Exception, ex:
print "[Harddisk] Failed to set %s speed to %s" % (device, speed), ex
class UnmountTask(Task.LoggingTask):
def __init__(self, job, hdd):
Task.LoggingTask.__init__(self, job, _("Unmount"))
self.hdd = hdd
self.mountpoints = []
def prepare(self):
try:
dev = self.hdd.disk_path.split('/')[-1]
open('/dev/nomount.%s' % dev, "wb").close()
except Exception, e:
print "ERROR: Failed to create /dev/nomount file:", e
self.setTool('umount')
self.args.append('-f')
for dev in self.hdd.enumMountDevices():
self.args.append(dev)
self.postconditions.append(Task.ReturncodePostcondition())
self.mountpoints.append(dev)
if not self.mountpoints:
print "UnmountTask: No mountpoints found?"
self.cmd = 'true'
self.args = [self.cmd]
def afterRun(self):
for path in self.mountpoints:
try:
os.rmdir(path)
except Exception, ex:
print "Failed to remove path '%s':" % path, ex
class MountTask(Task.LoggingTask):
def __init__(self, job, hdd):
Task.LoggingTask.__init__(self, job, _("Mount"))
self.hdd = hdd
def prepare(self):
try:
dev = self.hdd.disk_path.split('/')[-1]
os.unlink('/dev/nomount.%s' % dev)
except Exception, e:
print "ERROR: Failed to remove /dev/nomount file:", e
# try mounting through fstab first
if self.hdd.mount_device is None:
dev = self.hdd.partitionPath("1")
else:
# if previously mounted, use the same spot
dev = self.hdd.mount_device
fstab = open("/etc/fstab")
lines = fstab.readlines()
fstab.close()
for line in lines:
parts = line.strip().split(" ")
fspath = os.path.realpath(parts[0])
if os.path.realpath(fspath) == dev:
self.setCmdline("mount -t auto " + fspath)
self.postconditions.append(Task.ReturncodePostcondition())
return
# device is not in fstab
if self.hdd.type == DEVTYPE_UDEV:
# we can let udev do the job, re-read the partition table
# Sorry for the sleep 2 hack...
self.setCmdline('sleep 2; sfdisk -R ' + self.hdd.disk_path)
self.postconditions.append(Task.ReturncodePostcondition())
class MkfsTask(Task.LoggingTask):
def prepare(self):
self.fsck_state = None
def processOutput(self, data):
print "[Mkfs]", data
if 'Writing inode tables:' in data:
self.fsck_state = 'inode'
elif 'Creating journal' in data:
self.fsck_state = 'journal'
self.setProgress(80)
elif 'Writing superblocks ' in data:
self.setProgress(95)
elif self.fsck_state == 'inode':
if '/' in data:
try:
d = data.strip(' \x08\r\n').split('/',1)
if ('\x08' in d[1]):
d[1] = d[1].split('\x08',1)[0]
self.setProgress(80*int(d[0])/int(d[1]))
except Exception, e:
print "[Mkfs] E:", e
return # don't log the progess
self.log.append(data)
harddiskmanager = HarddiskManager()
SystemInfo["ext4"] = isFileSystemSupported("ext4")
| gpl-2.0 |
WarriorIng64/patient-data-sharing | calculate_percentage.py | 1 | 4032 | # This file is part of patient-data-sharing.
# Copyright (C) 2014 Christopher Kyle Horton <chorton@ltu.edu>
# patient-data-sharing is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# patient-data-sharing is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with patient-data-sharing. If not, see <http://www.gnu.org/licenses/>.
# MCS 5603 Intro to Bioinformatics, Fall 2014
# Christopher Kyle Horton (000516274), chorton@ltu.edu
# Last modified: 11/18/2014
import argparse
import csv
import suite
desc = suite.SUITE_NAME + "/calculate_percentage " + suite.SUITE_VERSION
desc += """\nCalculates the percentage of selected patients who are negative
for both estrogen and progesterone receptors."""
def are_negative_for(percent, what):
'''Returns a formatted string for output, given what the percentage is
negative for and the percentage itself.'''
return '{:.2%} are negative for {}.'.format(percent, what)
#============================================================================
# Main program code
#============================================================================
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description=desc
)
parser.add_argument("reply", help="Input reply file.")
parser.add_argument("-o",
"--outfile",
type=str,
help="Output file name. Use standard output if omitted.")
args = parser.parse_args()
suite.check_file_exists(args.reply)
# Get the number of patients in the reply and tally those negative for both
# estrogen and progesterone receptors
total_rows = 0
total_both_negative = 0
total_er_negative = 0
total_pgr_negative = 0
with open(args.reply, 'r') as f:
reader = csv.DictReader(f)
for row in reader:
total_rows += 1
if int(row['er']) == suite.NEGATIVE:
if int(row['pgr']) == suite.NEGATIVE:
total_both_negative += 1
else:
total_er_negative += 1
elif int(row['pgr']) == suite.NEGATIVE:
total_pgr_negative += 1
if total_rows < 1:
print "No data found to analyze."
exit(2)
any_negative_total = total_both_negative + total_er_negative + total_pgr_negative
both_percentage = float(total_both_negative) / float(total_rows)
er_percentage = float(total_er_negative) / float(total_rows)
pgr_percentage = float(total_pgr_negative) / float(total_rows)
any_negative_percentage = float(any_negative_total) / float(total_rows)
not_negative_percentage = 1.0 - any_negative_percentage
total_str = '{} total patients were examined.'.format(total_rows)
er_str = are_negative_for(er_percentage, 'only estrogen receptors')
pgr_str = are_negative_for(pgr_percentage, 'only progesterone receptors')
both_str = are_negative_for(both_percentage,
'both estrogen and progesterone receptors')
any_str = are_negative_for(any_negative_percentage,
'either estrogen or progesterone receptors, or both')
not_str = are_negative_for(not_negative_percentage,
'neither estrogen nor progesterone receptors')
if args.outfile:
# Write to file
with open(args.outfile, 'w') as f:
f.write(total_str + '\n')
f.write(er_str + '\n')
f.write(pgr_str + '\n')
f.write(both_str + '\n')
f.write(any_str + '\n')
f.write(not_str + '\n')
else:
# Print to standard output
print total_str
print er_str
print pgr_str
print both_str
print any_str
print not_str
exit(0)
| gpl-3.0 |
mxOBS/deb-pkg_trusty_chromium-browser | third_party/tlslite/tlslite/utils/asn1parser.py | 206 | 1191 | # Author: Trevor Perrin
# Patch from Google adding getChildBytes()
#
# See the LICENSE file for legal information regarding use of this file.
"""Class for parsing ASN.1"""
from .compat import *
from .codec import *
#Takes a byte array which has a DER TLV field at its head
class ASN1Parser(object):
def __init__(self, bytes):
p = Parser(bytes)
p.get(1) #skip Type
#Get Length
self.length = self._getASN1Length(p)
#Get Value
self.value = p.getFixBytes(self.length)
#Assuming this is a sequence...
def getChild(self, which):
return ASN1Parser(self.getChildBytes(which))
def getChildBytes(self, which):
p = Parser(self.value)
for x in range(which+1):
markIndex = p.index
p.get(1) #skip Type
length = self._getASN1Length(p)
p.getFixBytes(length)
return p.bytes[markIndex : p.index]
#Decode the ASN.1 DER length field
def _getASN1Length(self, p):
firstLength = p.get(1)
if firstLength<=127:
return firstLength
else:
lengthLength = firstLength & 0x7F
return p.get(lengthLength)
| bsd-3-clause |
mxOBS/deb-pkg_trusty_chromium-browser | third_party/scons-2.0.1/engine/SCons/Tool/zip.py | 61 | 3317 | """SCons.Tool.zip
Tool-specific initialization for zip.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/zip.py 5134 2010/08/16 23:02:40 bdeegan"
import os.path
import SCons.Builder
import SCons.Defaults
import SCons.Node.FS
import SCons.Util
try:
import zipfile
internal_zip = 1
except ImportError:
internal_zip = 0
if internal_zip:
zipcompression = zipfile.ZIP_DEFLATED
def zip(target, source, env):
compression = env.get('ZIPCOMPRESSION', 0)
zf = zipfile.ZipFile(str(target[0]), 'w', compression)
for s in source:
if s.isdir():
for dirpath, dirnames, filenames in os.walk(str(s)):
for fname in filenames:
path = os.path.join(dirpath, fname)
if os.path.isfile(path):
zf.write(path)
else:
zf.write(str(s))
zf.close()
else:
zipcompression = 0
zip = "$ZIP $ZIPFLAGS ${TARGET.abspath} $SOURCES"
zipAction = SCons.Action.Action(zip, varlist=['ZIPCOMPRESSION'])
ZipBuilder = SCons.Builder.Builder(action = SCons.Action.Action('$ZIPCOM', '$ZIPCOMSTR'),
source_factory = SCons.Node.FS.Entry,
source_scanner = SCons.Defaults.DirScanner,
suffix = '$ZIPSUFFIX',
multi = 1)
def generate(env):
"""Add Builders and construction variables for zip to an Environment."""
try:
bld = env['BUILDERS']['Zip']
except KeyError:
bld = ZipBuilder
env['BUILDERS']['Zip'] = bld
env['ZIP'] = 'zip'
env['ZIPFLAGS'] = SCons.Util.CLVar('')
env['ZIPCOM'] = zipAction
env['ZIPCOMPRESSION'] = zipcompression
env['ZIPSUFFIX'] = '.zip'
def exists(env):
return internal_zip or env.Detect('zip')
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| bsd-3-clause |
banglakit/spaCy | spacy/tests/gold/test_biluo.py | 4 | 1837 | # coding: utf-8
from __future__ import unicode_literals
from ...gold import biluo_tags_from_offsets
from ...tokens.doc import Doc
import pytest
def test_gold_biluo_U(en_vocab):
orths_and_spaces = [('I', True), ('flew', True), ('to', True),
('London', False), ('.', True)]
doc = Doc(en_vocab, orths_and_spaces=orths_and_spaces)
entities = [(len("I flew to "), len("I flew to London"), 'LOC')]
tags = biluo_tags_from_offsets(doc, entities)
assert tags == ['O', 'O', 'O', 'U-LOC', 'O']
def test_gold_biluo_BL(en_vocab):
orths_and_spaces = [('I', True), ('flew', True), ('to', True), ('San', True),
('Francisco', False), ('.', True)]
doc = Doc(en_vocab, orths_and_spaces=orths_and_spaces)
entities = [(len("I flew to "), len("I flew to San Francisco"), 'LOC')]
tags = biluo_tags_from_offsets(doc, entities)
assert tags == ['O', 'O', 'O', 'B-LOC', 'L-LOC', 'O']
def test_gold_biluo_BIL(en_vocab):
orths_and_spaces = [('I', True), ('flew', True), ('to', True), ('San', True),
('Francisco', True), ('Valley', False), ('.', True)]
doc = Doc(en_vocab, orths_and_spaces=orths_and_spaces)
entities = [(len("I flew to "), len("I flew to San Francisco Valley"), 'LOC')]
tags = biluo_tags_from_offsets(doc, entities)
assert tags == ['O', 'O', 'O', 'B-LOC', 'I-LOC', 'L-LOC', 'O']
def test_gold_biluo_misalign(en_vocab):
orths_and_spaces = [('I', True), ('flew', True), ('to', True), ('San', True),
('Francisco', True), ('Valley.', False)]
doc = Doc(en_vocab, orths_and_spaces=orths_and_spaces)
entities = [(len("I flew to "), len("I flew to San Francisco Valley"), 'LOC')]
tags = biluo_tags_from_offsets(doc, entities)
assert tags == ['O', 'O', 'O', '-', '-', '-']
| mit |
elan17/Conway-s-Game-of-Life-Python3 | Interfaz.py | 1 | 5954 | import shutil
import Nucleo
import multiprocessing
import time
class Interfaz:
def __init__(self, center=(0, 0)):
self.archivos = Archivos()
self.altura = shutil.get_terminal_size().lines - 2
self.anchura = shutil.get_terminal_size().columns - 2
self.carga = self.cargar() # [0]carga, [1]descarga
self.mundo = Nucleo.Mundo(coordinates=self.archivos.load(self.carga[0]))
self.printear = False
self.limite = 0
self.tiempo = 0
self.center = center
self.diagonal = self.get_diagonal() # [0]top_left, [1]bot_rigth
def inicio(self):
print("Bienvenido al Juego de la Vida")
print("Idea original de John Conway")
respuesta = ""
while respuesta not in ("1", "2"):
respuesta = input("\n\nElija el modo:\n1-Gráfico\n2-Solo cálculo\n[1/2]-->")
if respuesta == "1":
self.printear = True
tiempo = None
while tiempo is None:
respuesta1 = input("Elija el tiempo entre ciclos\n-->")
try:
tiempo = float(respuesta1)
self.tiempo = tiempo
except ValueError:
pass
self.printear = True
self.limit()
self.control()
def cargar(self): # Pregunta al usuario la ruta de los archivos a cargar
load = input("Elija la ruta del archivo a cargar\n-->")
save = input("Elija la ruta del archivo donde volcar los resultados\n-->")
return load, save
def limit(self):
limite = None
while limite is None:
respuesta2 = input("Elija el limite de ciclos(números negativos = ciclos infinitos)"
"(No acepta decimales)\n-->")
try:
limite = int(respuesta2)
except ValueError:
pass
self.limite = limite
def control(self):
pool = multiprocessing.Pool(multiprocessing.cpu_count())
tiempo_inicial = time.time()
resultados = {}
contador = 0
while contador != self.limite:
tiempo_ciclo = time.time()
resultados = self.mundo.run(pool)
if self.printear:
self.printea(resultados["cells"])
tiempo = self.tiempo - (time.time() - tiempo_ciclo)
if tiempo > 0:
time.sleep(tiempo)
contador += 1
tiempo = time.time() - tiempo_inicial
print("Tiempo total: " + str(tiempo))
print("Número de células: " + str(len(resultados["cells"])))
print("Media por ciclo: " + str(tiempo/self.limite))
print("Ciclos por segundo: " + str(self.limite/tiempo))
self.archivos.save(resultados["cells"], arch=self.carga[1])
def printea(self, cells, living="#", dead=" "):
mapa = ""
top = self.diagonal[0]
bot = self.diagonal[1]
coordinates = top
while coordinates != (top[0], bot[1]-1):
if coordinates in cells:
mapa += living
else:
mapa += dead
if coordinates[0] == bot[0]: # Saltamos a la siguiente línea
coordinates = (top[0], coordinates[1]-1)
mapa += "\n"
else: # Avanzamos un carácter
coordinates = (coordinates[0]+1, coordinates[1])
print(mapa)
def get_diagonal(self):
top = (int(- self.anchura/2 + self.center[0]), int(self.altura/2 - self.center[1]))
bot = (int(self.anchura/2 + self.center[0]), int(self.center[1] - self.altura/2))
return top, bot
class Archivos:
def load(self, arch="./output.txt"):
cells = []
with open(arch, "r") as arch:
archivo = arch.read()
lineas = archivo.split("\n")
counter = 0
for x in lineas:
if x == "" or x[0] == "!":
lineas.pop(counter)
counter += 1
ylen = len(lineas)
xlen = len(lineas[0])
countery = int(ylen / 2)
for y in lineas:
counterx = int(-xlen / 2)
for x in y:
if x == "O":
cells.append((counterx, countery))
counterx += 1
countery -= 1
return cells
def save(self, celulas, arch="./output.txt"):
cells = self.coordenadas_positivas(celulas)
claves = cells.keys()
mapa = ""
top = [0, 0]
bot = [0, 0]
for x in claves:
if x[0] < top[0]:
top[0] = x[0]
elif x[0] > bot[0]:
bot[0] = x[0]
if x[1] > top[1]:
top[1] = x[1]
elif x[1] < bot[1]:
bot[1] = x[1]
coordinates = tuple(top)
limite = tuple(bot)
while coordinates != (0, limite[1]-1):
if coordinates in cells:
mapa += "O"
else:
mapa += "."
if coordinates[0] == bot[0]: # Saltamos a la siguiente línea
coordinates = (top[0], coordinates[1]-1)
mapa += "\n"
else: # Avanzamos un carácter
coordinates = (coordinates[0]+1, coordinates[1])
with open(arch, "w") as arch:
arch.write(mapa)
def coordenadas_positivas(self, cells): # La función save necesita que todas las coordenadas sean positivas
claves = list(cells.keys())
diccionario = {}
minX = 0
minY = 0
for x in claves:
if x[0] < minX:
minX = x[0]
if x[1] < minY:
minY = x[1]
for x in claves:
diccionario[(x[0]-minX, x[1]-minY)] = None
return diccionario
if __name__ == "__main__":
interfaz = Interfaz()
interfaz.inicio()
| gpl-3.0 |
jjaner/essentia-musicbricks | test/src/unittest/rhythm/test_bpmhistogramdescriptors.py | 10 | 3299 | #!/usr/bin/env python
# Copyright (C) 2006-2013 Music Technology Group - Universitat Pompeu Fabra
#
# This file is part of Essentia
#
# Essentia is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation (FSF), either version 3 of the License, or (at your
# option) any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the Affero GNU General Public License
# version 3 along with this program. If not, see http://www.gnu.org/licenses/
from essentia_test import *
class TestBpmHistogramDescriptors(TestCase):
def testEmpty(self):
bpm1, weight1, spread1, bpm2, weight2, spread2 = BpmHistogramDescriptors()([])
self.assertEqual(bpm1, 0)
self.assertEqual(weight1, 0)
self.assertEqual(spread1, 0)
self.assertEqual(bpm2, 0)
self.assertEqual(weight2, 0)
self.assertEqual(spread2, 0)
def testZero(self):
bpm1, weight1, spread1, bpm2, weight2, spread2 = BpmHistogramDescriptors()([0])
self.assertEqual(bpm1, 0)
self.assertEqual(weight1, 0)
self.assertEqual(spread1, 0)
self.assertEqual(bpm2, 0)
self.assertEqual(weight2, 0)
self.assertEqual(spread2, 0)
def testOne(self):
bpm1, weight1, spread1, bpm2, weight2, spread2 = BpmHistogramDescriptors()([0.5])
self.assertEqual(bpm1, 120)
self.assertEqual(weight1, 1)
self.assertEqual(spread1, 0)
self.assertEqual(bpm2, 0)
self.assertEqual(weight2, 0)
self.assertEqual(spread2, 0)
def testAbnormalValues(self):
bpms = [-100, 300]
intervals = []
for bpm in bpms:
intervals.append(60. / bpm)
bpm1, weight1, spread1, bpm2, weight2, spread2 = BpmHistogramDescriptors()(intervals)
self.assertEqual(bpm1, 0)
self.assertEqual(weight1, 0)
self.assertEqual(spread1, 0)
self.assertEqual(bpm2, 0)
self.assertEqual(weight2, 0)
self.assertEqual(spread2, 0)
def testRounding(self):
bpm1, weight1, spread1, bpm2, weight2, spread2 = BpmHistogramDescriptors()([60. / 100.5])
self.assertEqual(bpm1, 101)
def testRegression(self):
bpms = [118, 119, 120, 120, 121, 122, 98, 99, 99, 100, 100, 100, 100, 101, 101, 102]
intervals = []
for bpm in bpms:
intervals.append(60. / bpm)
intervals.append(0) # Add an extra zero to check if it gets properly dropped
bpm1, weight1, spread1, bpm2, weight2, spread2 = BpmHistogramDescriptors()(intervals)
self.assertAlmostEqual(bpm1, 100, 1e-5)
self.assertAlmostEqual(weight1, 0.5, 1e-5)
self.assertAlmostEqual(spread1, 0.2, 1e-5)
self.assertAlmostEqual(bpm2, 120, 1e-5)
self.assertAlmostEqual(weight2, 0.25, 1e-5)
self.assertAlmostEqual(spread2, 0.333333, 1e-5)
suite = allTests(TestBpmHistogramDescriptors)
if __name__ == '__main__':
TextTestRunner(verbosity=2).run(suite)
| agpl-3.0 |
dgzurita/odoo | addons/crm_project_issue/project_issue.py | 380 | 2373 |
from openerp.osv import osv, fields
class crm_lead_to_project_issue_wizard(osv.TransientModel):
""" wizard to convert a Lead into a Project Issue and move the Mail Thread """
_name = "crm.lead2projectissue.wizard"
_inherit = 'crm.partner.binding'
_columns = {
"lead_id": fields.many2one("crm.lead", "Lead", domain=[("type", "=", "lead")]),
"project_id": fields.many2one("project.project", "Project", domain=[("use_issues", "=", True)])
}
_defaults = {
"lead_id": lambda self, cr, uid, context=None: context.get('active_id')
}
def action_lead_to_project_issue(self, cr, uid, ids, context=None):
# get the wizards and models
wizards = self.browse(cr, uid, ids, context=context)
Lead = self.pool["crm.lead"]
Issue = self.pool["project.issue"]
for wizard in wizards:
# get the lead to transform
lead = wizard.lead_id
partner = self._find_matching_partner(cr, uid, context=context)
if not partner and (lead.partner_name or lead.contact_name):
partner_ids = Lead.handle_partner_assignation(cr, uid, [lead.id], context=context)
partner = partner_ids[lead.id]
# create new project.issue
vals = {
"name": lead.name,
"description": lead.description,
"email_from": lead.email_from,
"project_id": wizard.project_id.id,
"partner_id": partner,
"user_id": None
}
issue_id = Issue.create(cr, uid, vals, context=None)
# move the mail thread
Lead.message_change_thread(cr, uid, lead.id, issue_id, "project.issue", context=context)
# delete the lead
Lead.unlink(cr, uid, [lead.id], context=None)
# return the action to go to the form view of the new Issue
view_id = self.pool.get('ir.ui.view').search(cr, uid, [('model', '=', 'project.issue'), ('name', '=', 'project_issue_form_view')])
return {
'name': 'Issue created',
'view_type': 'form',
'view_mode': 'form',
'view_id': view_id,
'res_model': 'project.issue',
'type': 'ir.actions.act_window',
'res_id': issue_id,
'context': context
}
| agpl-3.0 |
blaa/WifiStalker | wifistalker/model/db.py | 1 | 1433 | # Author: Tomasz bla Fortuna
# License: GPLv2
import re
from time import time
import datetime
from frames import Frames
from knowledge import Knowledge
#from map import Map
from geo import Geo
from graph import Graph
class DB(object):
"Link to MongoDB + model factory"
def __init__(self, db_conn, db_name):
from pymongo import MongoClient
print "Opening MongoDB connection"
self.conn = MongoClient(host=db_conn)
self.db = self.conn[db_name]
# Open subcollections
self.knowledge = Knowledge(self)
self.frames = Frames(self)
#self.map = Map(self)
self.geo = Geo(self)
# Logging
from wifistalker import Log
header = 'DB'
self.log = Log(self, use_stdout=True, header=header)
# Log collection
self._log = self['log']
self._log.ensure_index('stamp_utc', expireAfterSeconds=60*60)
def get_graph(self, mac):
"Create graph object"
return Graph(self, mac)
def __getitem__(self, collection):
"Create/get a collection"
return self.db[collection]
def log_add(self, s):
obj = {
'msg': s,
'stamp_utc': datetime.datetime.utcnow(),
}
self._log.insert(obj)
def log_get(self, count=10):
logs = list(self._log.find().sort('stamp_utc', -1)[:count])
return [x['msg'] for x in reversed(logs)]
| gpl-2.0 |
glaubitz/fs-uae-debian | arcade/OpenGL/WGL/NV/vertex_array_range.py | 8 | 4786 | '''OpenGL extension NV.vertex_array_range
This module customises the behaviour of the
OpenGL.raw.WGL.NV.vertex_array_range to provide a more
Python-friendly API
Overview (from the spec)
The goal of this extension is to permit extremely high vertex
processing rates via OpenGL vertex arrays even when the CPU lacks
the necessary data movement bandwidth to keep up with the rate
at which the vertex engine can consume vertices. CPUs can keep
up if they can just pass vertex indices to the hardware and
let the hardware "pull" the actual vertex data via Direct Memory
Access (DMA). Unfortunately, the current OpenGL 1.1 vertex array
functionality has semantic constraints that make such an approach
hard. Hence, the vertex array range extension.
This extension provides a mechanism for deferring the pulling of
vertex array elements to facilitate DMAed pulling of vertices for
fast, efficient vertex array transfers. The OpenGL client need only
pass vertex indices to the hardware which can DMA the actual index's
vertex data directly out of the client address space.
The OpenGL 1.1 vertex array functionality specifies a fairly strict
coherency model for when OpenGL extracts vertex data from a vertex
array and when the application can update the in memory
vertex array data. The OpenGL 1.1 specification says "Changes
made to array data between the execution of Begin and the
corresponding execution of End may affect calls to ArrayElement
that are made within the same Begin/End period in non-sequential
ways. That is, a call to ArrayElement that precedes a change to
array data may access the changed data, and a call that follows
a change to array data may access the original data."
This means that by the time End returns (and DrawArrays and
DrawElements return since they have implicit Ends), the actual vertex
array data must be transferred to OpenGL. This strict coherency model
prevents us from simply passing vertex element indices to the hardware
and having the hardware "pull" the vertex data out (which is often
long after the End for the primitive has returned to the application).
Relaxing this coherency model and bounding the range from which
vertex array data can be pulled is key to making OpenGL vertex
array transfers faster and more efficient.
The first task of the vertex array range extension is to relax
the coherency model so that hardware can indeed "pull" vertex
data from the OpenGL client's address space long after the application
has completed sending the geometry primitives requiring the vertex
data.
The second problem with the OpenGL 1.1 vertex array functionality is
the lack of any guidance from the API about what region of memory
vertices can be pulled from. There is no size limit for OpenGL 1.1
vertex arrays. Any vertex index that points to valid data in all
enabled arrays is fair game. This makes it hard for a vertex DMA
engine to pull vertices since they can be potentially pulled from
anywhere in the OpenGL client address space.
The vertex array range extension specifies a range of the OpenGL
client's address space where vertices can be pulled. Vertex indices
that access any array elements outside the vertex array range
are specified to be undefined. This permits hardware to DMA from
finite regions of OpenGL client address space, making DMA engine
implementation tractable.
The extension is specified such that an (error free) OpenGL client
using the vertex array range functionality could no-op its vertex
array range commands and operate equivalently to using (if slower
than) the vertex array range functionality.
Because different memory types (local graphics memory, AGP memory)
have different DMA bandwidths and caching behavior, this extension
includes a window system dependent memory allocator to allocate
cleanly the most appropriate memory for constructing a vertex array
range. The memory allocator provided allows the application to
tradeoff the desired CPU read frequency, CPU write frequency, and
memory priority while still leaving it up to OpenGL implementation
the exact memory type to be allocated.
The official definition of this extension is available here:
http://www.opengl.org/registry/specs/NV/vertex_array_range.txt
'''
from OpenGL import platform, constant, arrays
from OpenGL import extensions, wrapper
import ctypes
from OpenGL.raw.WGL import _types, _glgets
from OpenGL.raw.WGL.NV.vertex_array_range import *
from OpenGL.raw.WGL.NV.vertex_array_range import _EXTENSION_NAME
def glInitVertexArrayRangeNV():
'''Return boolean indicating whether this extension is available'''
from OpenGL import extensions
return extensions.hasGLExtension( _EXTENSION_NAME )
### END AUTOGENERATED SECTION | gpl-2.0 |
bionicbone/Marlin | buildroot/share/scripts/createTemperatureLookupMarlin.py | 89 | 6252 | #!/usr/bin/python
"""Thermistor Value Lookup Table Generator
Generates lookup to temperature values for use in a microcontroller in C format based on:
http://en.wikipedia.org/wiki/Steinhart-Hart_equation
The main use is for Arduino programs that read data from the circuit board described here:
http://reprap.org/wiki/Temperature_Sensor_v2.0
Usage: python createTemperatureLookup.py [options]
Options:
-h, --help show this help
--rp=... pull-up resistor
--t1=ttt:rrr low temperature temperature:resistance point (around 25 degC)
--t2=ttt:rrr middle temperature temperature:resistance point (around 150 degC)
--t3=ttt:rrr high temperature temperature:resistance point (around 250 degC)
--num-temps=... the number of temperature points to calculate (default: 36)
"""
from math import *
import sys
import getopt
"Constants"
ZERO = 273.15 # zero point of Kelvin scale
VADC = 5 # ADC voltage
VCC = 5 # supply voltage
ARES = pow(2,10) # 10 Bit ADC resolution
VSTEP = VADC / ARES # ADC voltage resolution
TMIN = 0 # lowest temperature in table
TMAX = 350 # highest temperature in table
class Thermistor:
"Class to do the thermistor maths"
def __init__(self, rp, t1, r1, t2, r2, t3, r3):
l1 = log(r1)
l2 = log(r2)
l3 = log(r3)
y1 = 1.0 / (t1 + ZERO) # adjust scale
y2 = 1.0 / (t2 + ZERO)
y3 = 1.0 / (t3 + ZERO)
x = (y2 - y1) / (l2 - l1)
y = (y3 - y1) / (l3 - l1)
c = (y - x) / ((l3 - l2) * (l1 + l2 + l3))
b = x - c * (l1**2 + l2**2 + l1*l2)
a = y1 - (b + l1**2 *c)*l1
if c < 0:
print "//////////////////////////////////////////////////////////////////////////////////////"
print "// WARNING: negative coefficient 'c'! Something may be wrong with the measurements! //"
print "//////////////////////////////////////////////////////////////////////////////////////"
c = -c
self.c1 = a # Steinhart-Hart coefficients
self.c2 = b
self.c3 = c
self.rp = rp # pull-up resistance
def resol(self, adc):
"Convert ADC reading into a resolution"
res = self.temp(adc)-self.temp(adc+1)
return res
def voltage(self, adc):
"Convert ADC reading into a Voltage"
return adc * VSTEP # convert the 10 bit ADC value to a voltage
def resist(self, adc):
"Convert ADC reading into a resistance in Ohms"
r = self.rp * self.voltage(adc) / (VCC - self.voltage(adc)) # resistance of thermistor
return r
def temp(self, adc):
"Convert ADC reading into a temperature in Celcius"
l = log(self.resist(adc))
Tinv = self.c1 + self.c2*l + self.c3* l**3 # inverse temperature
return (1/Tinv) - ZERO # temperature
def adc(self, temp):
"Convert temperature into a ADC reading"
x = (self.c1 - (1.0 / (temp+ZERO))) / (2*self.c3)
y = sqrt((self.c2 / (3*self.c3))**3 + x**2)
r = exp((y-x)**(1.0/3) - (y+x)**(1.0/3))
return (r / (self.rp + r)) * ARES
def main(argv):
"Default values"
t1 = 25 # low temperature in Kelvin (25 degC)
r1 = 100000 # resistance at low temperature (10 kOhm)
t2 = 150 # middle temperature in Kelvin (150 degC)
r2 = 1641.9 # resistance at middle temperature (1.6 KOhm)
t3 = 250 # high temperature in Kelvin (250 degC)
r3 = 226.15 # resistance at high temperature (226.15 Ohm)
rp = 4700; # pull-up resistor (4.7 kOhm)
num_temps = 36; # number of entries for look-up table
try:
opts, args = getopt.getopt(argv, "h", ["help", "rp=", "t1=", "t2=", "t3=", "num-temps="])
except getopt.GetoptError as err:
print str(err)
usage()
sys.exit(2)
for opt, arg in opts:
if opt in ("-h", "--help"):
usage()
sys.exit()
elif opt == "--rp":
rp = int(arg)
elif opt == "--t1":
arg = arg.split(':')
t1 = float(arg[0])
r1 = float(arg[1])
elif opt == "--t2":
arg = arg.split(':')
t2 = float(arg[0])
r2 = float(arg[1])
elif opt == "--t3":
arg = arg.split(':')
t3 = float(arg[0])
r3 = float(arg[1])
elif opt == "--num-temps":
num_temps = int(arg)
t = Thermistor(rp, t1, r1, t2, r2, t3, r3)
increment = int((ARES-1)/(num_temps-1));
step = (TMIN-TMAX) / (num_temps-1)
low_bound = t.temp(ARES-1);
up_bound = t.temp(1);
min_temp = int(TMIN if TMIN > low_bound else low_bound)
max_temp = int(TMAX if TMAX < up_bound else up_bound)
temps = range(max_temp, TMIN+step, step);
print "// Thermistor lookup table for Marlin"
print "// ./createTemperatureLookupMarlin.py --rp=%s --t1=%s:%s --t2=%s:%s --t3=%s:%s --num-temps=%s" % (rp, t1, r1, t2, r2, t3, r3, num_temps)
print "// Steinhart-Hart Coefficients: a=%.15g, b=%.15g, c=%.15g " % (t.c1, t.c2, t.c3)
print "// Theoretical limits of termistor: %.2f to %.2f degC" % (low_bound, up_bound)
print
print "#define NUMTEMPS %s" % (len(temps))
print "const short temptable[NUMTEMPS][2] PROGMEM = {"
for temp in temps:
adc = t.adc(temp)
print " { (short) (%7.2f * OVERSAMPLENR ), %4s }%s // v=%.3f\tr=%.3f\tres=%.3f degC/count" % (adc , temp, \
',' if temp != temps[-1] else ' ', \
t.voltage(adc), \
t.resist( adc), \
t.resol( adc) \
)
print "};"
def usage():
print __doc__
if __name__ == "__main__":
main(sys.argv[1:])
| gpl-3.0 |
paweljasinski/ironpython3 | Src/StdLib/Lib/encodings/cp1254.py | 272 | 13502 | """ Python Character Mapping Codec cp1254 generated from 'MAPPINGS/VENDORS/MICSFT/WINDOWS/CP1254.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='cp1254',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
'\x00' # 0x00 -> NULL
'\x01' # 0x01 -> START OF HEADING
'\x02' # 0x02 -> START OF TEXT
'\x03' # 0x03 -> END OF TEXT
'\x04' # 0x04 -> END OF TRANSMISSION
'\x05' # 0x05 -> ENQUIRY
'\x06' # 0x06 -> ACKNOWLEDGE
'\x07' # 0x07 -> BELL
'\x08' # 0x08 -> BACKSPACE
'\t' # 0x09 -> HORIZONTAL TABULATION
'\n' # 0x0A -> LINE FEED
'\x0b' # 0x0B -> VERTICAL TABULATION
'\x0c' # 0x0C -> FORM FEED
'\r' # 0x0D -> CARRIAGE RETURN
'\x0e' # 0x0E -> SHIFT OUT
'\x0f' # 0x0F -> SHIFT IN
'\x10' # 0x10 -> DATA LINK ESCAPE
'\x11' # 0x11 -> DEVICE CONTROL ONE
'\x12' # 0x12 -> DEVICE CONTROL TWO
'\x13' # 0x13 -> DEVICE CONTROL THREE
'\x14' # 0x14 -> DEVICE CONTROL FOUR
'\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE
'\x16' # 0x16 -> SYNCHRONOUS IDLE
'\x17' # 0x17 -> END OF TRANSMISSION BLOCK
'\x18' # 0x18 -> CANCEL
'\x19' # 0x19 -> END OF MEDIUM
'\x1a' # 0x1A -> SUBSTITUTE
'\x1b' # 0x1B -> ESCAPE
'\x1c' # 0x1C -> FILE SEPARATOR
'\x1d' # 0x1D -> GROUP SEPARATOR
'\x1e' # 0x1E -> RECORD SEPARATOR
'\x1f' # 0x1F -> UNIT SEPARATOR
' ' # 0x20 -> SPACE
'!' # 0x21 -> EXCLAMATION MARK
'"' # 0x22 -> QUOTATION MARK
'#' # 0x23 -> NUMBER SIGN
'$' # 0x24 -> DOLLAR SIGN
'%' # 0x25 -> PERCENT SIGN
'&' # 0x26 -> AMPERSAND
"'" # 0x27 -> APOSTROPHE
'(' # 0x28 -> LEFT PARENTHESIS
')' # 0x29 -> RIGHT PARENTHESIS
'*' # 0x2A -> ASTERISK
'+' # 0x2B -> PLUS SIGN
',' # 0x2C -> COMMA
'-' # 0x2D -> HYPHEN-MINUS
'.' # 0x2E -> FULL STOP
'/' # 0x2F -> SOLIDUS
'0' # 0x30 -> DIGIT ZERO
'1' # 0x31 -> DIGIT ONE
'2' # 0x32 -> DIGIT TWO
'3' # 0x33 -> DIGIT THREE
'4' # 0x34 -> DIGIT FOUR
'5' # 0x35 -> DIGIT FIVE
'6' # 0x36 -> DIGIT SIX
'7' # 0x37 -> DIGIT SEVEN
'8' # 0x38 -> DIGIT EIGHT
'9' # 0x39 -> DIGIT NINE
':' # 0x3A -> COLON
';' # 0x3B -> SEMICOLON
'<' # 0x3C -> LESS-THAN SIGN
'=' # 0x3D -> EQUALS SIGN
'>' # 0x3E -> GREATER-THAN SIGN
'?' # 0x3F -> QUESTION MARK
'@' # 0x40 -> COMMERCIAL AT
'A' # 0x41 -> LATIN CAPITAL LETTER A
'B' # 0x42 -> LATIN CAPITAL LETTER B
'C' # 0x43 -> LATIN CAPITAL LETTER C
'D' # 0x44 -> LATIN CAPITAL LETTER D
'E' # 0x45 -> LATIN CAPITAL LETTER E
'F' # 0x46 -> LATIN CAPITAL LETTER F
'G' # 0x47 -> LATIN CAPITAL LETTER G
'H' # 0x48 -> LATIN CAPITAL LETTER H
'I' # 0x49 -> LATIN CAPITAL LETTER I
'J' # 0x4A -> LATIN CAPITAL LETTER J
'K' # 0x4B -> LATIN CAPITAL LETTER K
'L' # 0x4C -> LATIN CAPITAL LETTER L
'M' # 0x4D -> LATIN CAPITAL LETTER M
'N' # 0x4E -> LATIN CAPITAL LETTER N
'O' # 0x4F -> LATIN CAPITAL LETTER O
'P' # 0x50 -> LATIN CAPITAL LETTER P
'Q' # 0x51 -> LATIN CAPITAL LETTER Q
'R' # 0x52 -> LATIN CAPITAL LETTER R
'S' # 0x53 -> LATIN CAPITAL LETTER S
'T' # 0x54 -> LATIN CAPITAL LETTER T
'U' # 0x55 -> LATIN CAPITAL LETTER U
'V' # 0x56 -> LATIN CAPITAL LETTER V
'W' # 0x57 -> LATIN CAPITAL LETTER W
'X' # 0x58 -> LATIN CAPITAL LETTER X
'Y' # 0x59 -> LATIN CAPITAL LETTER Y
'Z' # 0x5A -> LATIN CAPITAL LETTER Z
'[' # 0x5B -> LEFT SQUARE BRACKET
'\\' # 0x5C -> REVERSE SOLIDUS
']' # 0x5D -> RIGHT SQUARE BRACKET
'^' # 0x5E -> CIRCUMFLEX ACCENT
'_' # 0x5F -> LOW LINE
'`' # 0x60 -> GRAVE ACCENT
'a' # 0x61 -> LATIN SMALL LETTER A
'b' # 0x62 -> LATIN SMALL LETTER B
'c' # 0x63 -> LATIN SMALL LETTER C
'd' # 0x64 -> LATIN SMALL LETTER D
'e' # 0x65 -> LATIN SMALL LETTER E
'f' # 0x66 -> LATIN SMALL LETTER F
'g' # 0x67 -> LATIN SMALL LETTER G
'h' # 0x68 -> LATIN SMALL LETTER H
'i' # 0x69 -> LATIN SMALL LETTER I
'j' # 0x6A -> LATIN SMALL LETTER J
'k' # 0x6B -> LATIN SMALL LETTER K
'l' # 0x6C -> LATIN SMALL LETTER L
'm' # 0x6D -> LATIN SMALL LETTER M
'n' # 0x6E -> LATIN SMALL LETTER N
'o' # 0x6F -> LATIN SMALL LETTER O
'p' # 0x70 -> LATIN SMALL LETTER P
'q' # 0x71 -> LATIN SMALL LETTER Q
'r' # 0x72 -> LATIN SMALL LETTER R
's' # 0x73 -> LATIN SMALL LETTER S
't' # 0x74 -> LATIN SMALL LETTER T
'u' # 0x75 -> LATIN SMALL LETTER U
'v' # 0x76 -> LATIN SMALL LETTER V
'w' # 0x77 -> LATIN SMALL LETTER W
'x' # 0x78 -> LATIN SMALL LETTER X
'y' # 0x79 -> LATIN SMALL LETTER Y
'z' # 0x7A -> LATIN SMALL LETTER Z
'{' # 0x7B -> LEFT CURLY BRACKET
'|' # 0x7C -> VERTICAL LINE
'}' # 0x7D -> RIGHT CURLY BRACKET
'~' # 0x7E -> TILDE
'\x7f' # 0x7F -> DELETE
'\u20ac' # 0x80 -> EURO SIGN
'\ufffe' # 0x81 -> UNDEFINED
'\u201a' # 0x82 -> SINGLE LOW-9 QUOTATION MARK
'\u0192' # 0x83 -> LATIN SMALL LETTER F WITH HOOK
'\u201e' # 0x84 -> DOUBLE LOW-9 QUOTATION MARK
'\u2026' # 0x85 -> HORIZONTAL ELLIPSIS
'\u2020' # 0x86 -> DAGGER
'\u2021' # 0x87 -> DOUBLE DAGGER
'\u02c6' # 0x88 -> MODIFIER LETTER CIRCUMFLEX ACCENT
'\u2030' # 0x89 -> PER MILLE SIGN
'\u0160' # 0x8A -> LATIN CAPITAL LETTER S WITH CARON
'\u2039' # 0x8B -> SINGLE LEFT-POINTING ANGLE QUOTATION MARK
'\u0152' # 0x8C -> LATIN CAPITAL LIGATURE OE
'\ufffe' # 0x8D -> UNDEFINED
'\ufffe' # 0x8E -> UNDEFINED
'\ufffe' # 0x8F -> UNDEFINED
'\ufffe' # 0x90 -> UNDEFINED
'\u2018' # 0x91 -> LEFT SINGLE QUOTATION MARK
'\u2019' # 0x92 -> RIGHT SINGLE QUOTATION MARK
'\u201c' # 0x93 -> LEFT DOUBLE QUOTATION MARK
'\u201d' # 0x94 -> RIGHT DOUBLE QUOTATION MARK
'\u2022' # 0x95 -> BULLET
'\u2013' # 0x96 -> EN DASH
'\u2014' # 0x97 -> EM DASH
'\u02dc' # 0x98 -> SMALL TILDE
'\u2122' # 0x99 -> TRADE MARK SIGN
'\u0161' # 0x9A -> LATIN SMALL LETTER S WITH CARON
'\u203a' # 0x9B -> SINGLE RIGHT-POINTING ANGLE QUOTATION MARK
'\u0153' # 0x9C -> LATIN SMALL LIGATURE OE
'\ufffe' # 0x9D -> UNDEFINED
'\ufffe' # 0x9E -> UNDEFINED
'\u0178' # 0x9F -> LATIN CAPITAL LETTER Y WITH DIAERESIS
'\xa0' # 0xA0 -> NO-BREAK SPACE
'\xa1' # 0xA1 -> INVERTED EXCLAMATION MARK
'\xa2' # 0xA2 -> CENT SIGN
'\xa3' # 0xA3 -> POUND SIGN
'\xa4' # 0xA4 -> CURRENCY SIGN
'\xa5' # 0xA5 -> YEN SIGN
'\xa6' # 0xA6 -> BROKEN BAR
'\xa7' # 0xA7 -> SECTION SIGN
'\xa8' # 0xA8 -> DIAERESIS
'\xa9' # 0xA9 -> COPYRIGHT SIGN
'\xaa' # 0xAA -> FEMININE ORDINAL INDICATOR
'\xab' # 0xAB -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
'\xac' # 0xAC -> NOT SIGN
'\xad' # 0xAD -> SOFT HYPHEN
'\xae' # 0xAE -> REGISTERED SIGN
'\xaf' # 0xAF -> MACRON
'\xb0' # 0xB0 -> DEGREE SIGN
'\xb1' # 0xB1 -> PLUS-MINUS SIGN
'\xb2' # 0xB2 -> SUPERSCRIPT TWO
'\xb3' # 0xB3 -> SUPERSCRIPT THREE
'\xb4' # 0xB4 -> ACUTE ACCENT
'\xb5' # 0xB5 -> MICRO SIGN
'\xb6' # 0xB6 -> PILCROW SIGN
'\xb7' # 0xB7 -> MIDDLE DOT
'\xb8' # 0xB8 -> CEDILLA
'\xb9' # 0xB9 -> SUPERSCRIPT ONE
'\xba' # 0xBA -> MASCULINE ORDINAL INDICATOR
'\xbb' # 0xBB -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
'\xbc' # 0xBC -> VULGAR FRACTION ONE QUARTER
'\xbd' # 0xBD -> VULGAR FRACTION ONE HALF
'\xbe' # 0xBE -> VULGAR FRACTION THREE QUARTERS
'\xbf' # 0xBF -> INVERTED QUESTION MARK
'\xc0' # 0xC0 -> LATIN CAPITAL LETTER A WITH GRAVE
'\xc1' # 0xC1 -> LATIN CAPITAL LETTER A WITH ACUTE
'\xc2' # 0xC2 -> LATIN CAPITAL LETTER A WITH CIRCUMFLEX
'\xc3' # 0xC3 -> LATIN CAPITAL LETTER A WITH TILDE
'\xc4' # 0xC4 -> LATIN CAPITAL LETTER A WITH DIAERESIS
'\xc5' # 0xC5 -> LATIN CAPITAL LETTER A WITH RING ABOVE
'\xc6' # 0xC6 -> LATIN CAPITAL LETTER AE
'\xc7' # 0xC7 -> LATIN CAPITAL LETTER C WITH CEDILLA
'\xc8' # 0xC8 -> LATIN CAPITAL LETTER E WITH GRAVE
'\xc9' # 0xC9 -> LATIN CAPITAL LETTER E WITH ACUTE
'\xca' # 0xCA -> LATIN CAPITAL LETTER E WITH CIRCUMFLEX
'\xcb' # 0xCB -> LATIN CAPITAL LETTER E WITH DIAERESIS
'\xcc' # 0xCC -> LATIN CAPITAL LETTER I WITH GRAVE
'\xcd' # 0xCD -> LATIN CAPITAL LETTER I WITH ACUTE
'\xce' # 0xCE -> LATIN CAPITAL LETTER I WITH CIRCUMFLEX
'\xcf' # 0xCF -> LATIN CAPITAL LETTER I WITH DIAERESIS
'\u011e' # 0xD0 -> LATIN CAPITAL LETTER G WITH BREVE
'\xd1' # 0xD1 -> LATIN CAPITAL LETTER N WITH TILDE
'\xd2' # 0xD2 -> LATIN CAPITAL LETTER O WITH GRAVE
'\xd3' # 0xD3 -> LATIN CAPITAL LETTER O WITH ACUTE
'\xd4' # 0xD4 -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX
'\xd5' # 0xD5 -> LATIN CAPITAL LETTER O WITH TILDE
'\xd6' # 0xD6 -> LATIN CAPITAL LETTER O WITH DIAERESIS
'\xd7' # 0xD7 -> MULTIPLICATION SIGN
'\xd8' # 0xD8 -> LATIN CAPITAL LETTER O WITH STROKE
'\xd9' # 0xD9 -> LATIN CAPITAL LETTER U WITH GRAVE
'\xda' # 0xDA -> LATIN CAPITAL LETTER U WITH ACUTE
'\xdb' # 0xDB -> LATIN CAPITAL LETTER U WITH CIRCUMFLEX
'\xdc' # 0xDC -> LATIN CAPITAL LETTER U WITH DIAERESIS
'\u0130' # 0xDD -> LATIN CAPITAL LETTER I WITH DOT ABOVE
'\u015e' # 0xDE -> LATIN CAPITAL LETTER S WITH CEDILLA
'\xdf' # 0xDF -> LATIN SMALL LETTER SHARP S
'\xe0' # 0xE0 -> LATIN SMALL LETTER A WITH GRAVE
'\xe1' # 0xE1 -> LATIN SMALL LETTER A WITH ACUTE
'\xe2' # 0xE2 -> LATIN SMALL LETTER A WITH CIRCUMFLEX
'\xe3' # 0xE3 -> LATIN SMALL LETTER A WITH TILDE
'\xe4' # 0xE4 -> LATIN SMALL LETTER A WITH DIAERESIS
'\xe5' # 0xE5 -> LATIN SMALL LETTER A WITH RING ABOVE
'\xe6' # 0xE6 -> LATIN SMALL LETTER AE
'\xe7' # 0xE7 -> LATIN SMALL LETTER C WITH CEDILLA
'\xe8' # 0xE8 -> LATIN SMALL LETTER E WITH GRAVE
'\xe9' # 0xE9 -> LATIN SMALL LETTER E WITH ACUTE
'\xea' # 0xEA -> LATIN SMALL LETTER E WITH CIRCUMFLEX
'\xeb' # 0xEB -> LATIN SMALL LETTER E WITH DIAERESIS
'\xec' # 0xEC -> LATIN SMALL LETTER I WITH GRAVE
'\xed' # 0xED -> LATIN SMALL LETTER I WITH ACUTE
'\xee' # 0xEE -> LATIN SMALL LETTER I WITH CIRCUMFLEX
'\xef' # 0xEF -> LATIN SMALL LETTER I WITH DIAERESIS
'\u011f' # 0xF0 -> LATIN SMALL LETTER G WITH BREVE
'\xf1' # 0xF1 -> LATIN SMALL LETTER N WITH TILDE
'\xf2' # 0xF2 -> LATIN SMALL LETTER O WITH GRAVE
'\xf3' # 0xF3 -> LATIN SMALL LETTER O WITH ACUTE
'\xf4' # 0xF4 -> LATIN SMALL LETTER O WITH CIRCUMFLEX
'\xf5' # 0xF5 -> LATIN SMALL LETTER O WITH TILDE
'\xf6' # 0xF6 -> LATIN SMALL LETTER O WITH DIAERESIS
'\xf7' # 0xF7 -> DIVISION SIGN
'\xf8' # 0xF8 -> LATIN SMALL LETTER O WITH STROKE
'\xf9' # 0xF9 -> LATIN SMALL LETTER U WITH GRAVE
'\xfa' # 0xFA -> LATIN SMALL LETTER U WITH ACUTE
'\xfb' # 0xFB -> LATIN SMALL LETTER U WITH CIRCUMFLEX
'\xfc' # 0xFC -> LATIN SMALL LETTER U WITH DIAERESIS
'\u0131' # 0xFD -> LATIN SMALL LETTER DOTLESS I
'\u015f' # 0xFE -> LATIN SMALL LETTER S WITH CEDILLA
'\xff' # 0xFF -> LATIN SMALL LETTER Y WITH DIAERESIS
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
| apache-2.0 |
ecosoft-odoo/odoo | addons/l10n_ar/__openerp__.py | 260 | 1695 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2011 Cubic ERP - Teradata SAC (<http://cubicerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Argentina Localization Chart Account',
'version': '1.0',
'description': """
Argentinian accounting chart and tax localization.
==================================================
Plan contable argentino e impuestos de acuerdo a disposiciones vigentes
""",
'author': ['Cubic ERP'],
'website': 'http://cubicERP.com',
'category': 'Localization/Account Charts',
'depends': ['account_chart'],
'data':[
'account_tax_code.xml',
'l10n_ar_chart.xml',
'account_tax.xml',
'l10n_ar_wizard.xml',
],
'demo': [],
'active': False,
'installable': True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
broferek/ansible | lib/ansible/modules/utilities/logic/import_tasks.py | 45 | 1712 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'core'
}
DOCUMENTATION = r'''
---
author: Ansible Core Team (@ansible)
module: import_tasks
short_description: Import a task list
description:
- Imports a list of tasks to be added to the current playbook for subsequent execution.
version_added: "2.4"
options:
free-form:
description:
- The name of the imported file is specified directly without any other option.
- Most keywords, including loops and conditionals, only applied to the imported tasks, not to this statement itself.
- If you need any of those to apply, use M(include_tasks) instead.
notes:
- This is a core feature of Ansible, rather than a module, and cannot be overridden like a module.
seealso:
- module: import_playbook
- module: import_role
- module: include_role
- module: include_tasks
- ref: playbooks_reuse_includes
description: More information related to including and importing playbooks, roles and tasks.
'''
EXAMPLES = r'''
- hosts: all
tasks:
- debug:
msg: task1
- name: Include task list in play
import_tasks: stuff.yaml
- debug:
msg: task10
- hosts: all
tasks:
- debug:
msg: task1
- name: Apply conditional to all imported tasks
import_tasks: stuff.yaml
when: hostvar is defined
'''
RETURN = r'''
# This module does not return anything except tasks to execute.
'''
| gpl-3.0 |
beck/django | tests/user_commands/management/commands/hal.py | 372 | 1024 | from django.core.management.base import BaseCommand, CommandError
class Command(BaseCommand):
help = "Useless command."
def add_arguments(self, parser):
parser.add_argument('args', metavar='app_label', nargs='*',
help='Specify the app label(s) to works on.')
parser.add_argument('--empty', action='store_true', dest='empty', default=False,
help="Do nothing.")
def handle(self, *app_labels, **options):
app_labels = set(app_labels)
if options['empty']:
self.stdout.write("Dave, I can't do that.")
return
if not app_labels:
raise CommandError("I'm sorry Dave, I'm afraid I can't do that.")
# raise an error if some --parameter is flowing from options to args
for app_label in app_labels:
if app_label.startswith('--'):
raise CommandError("Sorry, Dave, I can't let you do that.")
self.stdout.write("Dave, my mind is going. I can feel it. I can feel it.")
| bsd-3-clause |
vorwerkc/pymatgen | pymatgen/analysis/graphs.py | 1 | 112413 | # coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
"""
Module for graph representations of crystals.
"""
import copy
import logging
import os.path
import subprocess
import warnings
from collections import defaultdict, namedtuple
from itertools import combinations
from operator import itemgetter
import networkx as nx
import networkx.algorithms.isomorphism as iso
import numpy as np
from monty.json import MSONable
from monty.os.path import which
from networkx.drawing.nx_agraph import write_dot
from networkx.readwrite import json_graph
from scipy.spatial import KDTree
from scipy.stats import describe
from pymatgen.core import Lattice, Molecule, PeriodicSite, Structure
from pymatgen.core.structure import FunctionalGroups
from pymatgen.util.coord import lattice_points_in_supercell
from pymatgen.vis.structure_vtk import EL_COLORS
try:
import igraph
IGRAPH_AVAILABLE = True
except ImportError:
IGRAPH_AVAILABLE = False
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
__author__ = "Matthew Horton, Evan Spotte-Smith, Samuel Blau"
__version__ = "0.1"
__maintainer__ = "Matthew Horton"
__email__ = "mkhorton@lbl.gov"
__status__ = "Production"
__date__ = "August 2017"
ConnectedSite = namedtuple("ConnectedSite", "site, jimage, index, weight, dist")
def _compare(g1, g2, i1, i2):
"""
Helper function called by isomorphic to ensure comparison of node identities.
"""
return g1.vs[i1]["species"] == g2.vs[i2]["species"]
def _igraph_from_nxgraph(graph):
"""
Helper function that converts a networkx graph object into an igraph graph object.
"""
nodes = graph.nodes(data=True)
new_igraph = igraph.Graph()
for node in nodes:
new_igraph.add_vertex(name=str(node[0]), species=node[1]["specie"], coords=node[1]["coords"])
new_igraph.add_edges([(str(edge[0]), str(edge[1])) for edge in graph.edges()])
return new_igraph
def _isomorphic(frag1, frag2):
"""
Internal function to check if two graph objects are isomorphic, using igraph if
if is available and networkx if it is not.
"""
f1_nodes = frag1.nodes(data=True)
f2_nodes = frag2.nodes(data=True)
if len(f1_nodes) != len(f2_nodes):
return False
f2_edges = frag2.edges()
if len(f2_edges) != len(f2_edges):
return False
f1_comp_dict = {}
f2_comp_dict = {}
for node in f1_nodes:
if node[1]["specie"] not in f1_comp_dict:
f1_comp_dict[node[1]["specie"]] = 1
else:
f1_comp_dict[node[1]["specie"]] += 1
for node in f2_nodes:
if node[1]["specie"] not in f2_comp_dict:
f2_comp_dict[node[1]["specie"]] = 1
else:
f2_comp_dict[node[1]["specie"]] += 1
if f1_comp_dict != f2_comp_dict:
return False
if IGRAPH_AVAILABLE:
ifrag1 = _igraph_from_nxgraph(frag1)
ifrag2 = _igraph_from_nxgraph(frag2)
return ifrag1.isomorphic_vf2(ifrag2, node_compat_fn=_compare)
nm = iso.categorical_node_match("specie", "ERROR")
return nx.is_isomorphic(frag1.to_undirected(), frag2.to_undirected(), node_match=nm)
class StructureGraph(MSONable):
"""
This is a class for annotating a Structure with
bond information, stored in the form of a graph. A "bond" does
not necessarily have to be a chemical bond, but can store any
kind of information that connects two Sites.
"""
def __init__(self, structure, graph_data=None):
"""
If constructing this class manually, use the `with_empty_graph`
method or `with_local_env_strategy` method (using an algorithm
provided by the `local_env` module, such as O'Keeffe).
This class that contains connection information:
relationships between sites represented by a Graph structure,
and an associated structure object.
This class uses the NetworkX package to store and operate
on the graph itself, but contains a lot of helper methods
to make associating a graph with a given crystallographic
structure easier.
Use cases for this include storing bonding information,
NMR J-couplings, Heisenberg exchange parameters, etc.
For periodic graphs, class stores information on the graph
edges of what lattice image the edge belongs to.
:param structure: a Structure object
:param graph_data: dict containing graph information in
dict format (not intended to be constructed manually,
see as_dict method for format)
"""
if isinstance(structure, StructureGraph):
# just make a copy from input
graph_data = structure.as_dict()["graphs"]
self.structure = structure
self.graph = nx.readwrite.json_graph.adjacency_graph(graph_data)
# tidy up edge attr dicts, reading to/from json duplicates
# information
for u, v, k, d in self.graph.edges(keys=True, data=True):
if "id" in d:
del d["id"]
if "key" in d:
del d["key"]
# ensure images are tuples (conversion to lists happens
# when serializing back from json), it's important images
# are hashable/immutable
if "to_jimage" in d:
d["to_jimage"] = tuple(d["to_jimage"])
if "from_jimage" in d:
d["from_jimage"] = tuple(d["from_jimage"])
@classmethod
def with_empty_graph(cls, structure, name="bonds", edge_weight_name=None, edge_weight_units=None):
"""
Constructor for StructureGraph, returns a StructureGraph
object with an empty graph (no edges, only nodes defined
that correspond to Sites in Structure).
:param structure (Structure):
:param name (str): name of graph, e.g. "bonds"
:param edge_weight_name (str): name of edge weights,
e.g. "bond_length" or "exchange_constant"
:param edge_weight_units (str): name of edge weight units
e.g. "Å" or "eV"
:return (StructureGraph):
"""
if edge_weight_name and (edge_weight_units is None):
raise ValueError(
"Please specify units associated "
"with your edge weights. Can be "
"empty string if arbitrary or "
"dimensionless."
)
# construct graph with one node per site
# graph attributes don't change behavior of graph,
# they're just for book-keeping
graph = nx.MultiDiGraph(
edge_weight_name=edge_weight_name,
edge_weight_units=edge_weight_units,
name=name,
)
graph.add_nodes_from(range(len(structure)))
graph_data = json_graph.adjacency_data(graph)
return cls(structure, graph_data=graph_data)
@staticmethod
def with_edges(structure, edges):
"""
Constructor for MoleculeGraph, using pre-existing or pre-defined edges
with optional edge parameters.
:param molecule: Molecule object
:param edges: dict representing the bonds of the functional
group (format: {(from_index, to_index, from_image, to_image): props},
where props is a dictionary of properties, including weight.
Props should be None if no additional properties are to be
specified.
:return: sg, a StructureGraph
"""
sg = StructureGraph.with_empty_graph(structure, name="bonds", edge_weight_name="weight", edge_weight_units="")
for edge, props in edges.items():
try:
from_index = edge[0]
to_index = edge[1]
from_image = edge[2]
to_image = edge[3]
except TypeError:
raise ValueError("Edges must be given as (from_index, to_index," " from_image, to_image) tuples")
if props is not None:
if "weight" in props.keys():
weight = props["weight"]
del props["weight"]
else:
weight = None
if len(props.items()) == 0:
props = None
else:
weight = None
nodes = sg.graph.nodes
if not (from_index in nodes and to_index in nodes):
raise ValueError(
"Edges cannot be added if nodes are not" " present in the graph. Please check your" " indices."
)
sg.add_edge(
from_index,
to_index,
from_jimage=from_image,
to_jimage=to_image,
weight=weight,
edge_properties=props,
)
sg.set_node_attributes()
return sg
@staticmethod
def with_local_env_strategy(structure, strategy, weights=False):
"""
Constructor for StructureGraph, using a strategy
from :Class: `pymatgen.analysis.local_env`.
:param structure: Structure object
:param strategy: an instance of a
:Class: `pymatgen.analysis.local_env.NearNeighbors` object
:param weights: if True, use weights from local_env class
(consult relevant class for their meaning)
:return:
"""
if not strategy.structures_allowed:
raise ValueError(
"Chosen strategy is not designed for use with structures! " "Please choose another strategy."
)
sg = StructureGraph.with_empty_graph(structure, name="bonds")
for n, neighbors in enumerate(strategy.get_all_nn_info(structure)):
for neighbor in neighbors:
# local_env will always try to add two edges
# for any one bond, one from site u to site v
# and another form site v to site u: this is
# harmless, so warn_duplicates=False
sg.add_edge(
from_index=n,
from_jimage=(0, 0, 0),
to_index=neighbor["site_index"],
to_jimage=neighbor["image"],
weight=neighbor["weight"] if weights else None,
warn_duplicates=False,
)
return sg
@property
def name(self):
"""
:return: Name of graph
"""
return self.graph.graph["name"]
@property
def edge_weight_name(self):
"""
:return: Name of the edge weight property of graph
"""
return self.graph.graph["edge_weight_name"]
@property
def edge_weight_unit(self):
"""
:return: Units of the edge weight property of graph
"""
return self.graph.graph["edge_weight_units"]
def add_edge(
self,
from_index,
to_index,
from_jimage=(0, 0, 0),
to_jimage=None,
weight=None,
warn_duplicates=True,
edge_properties=None,
):
"""
Add edge to graph.
Since physically a 'bond' (or other connection
between sites) doesn't have a direction, from_index,
from_jimage can be swapped with to_index, to_jimage.
However, images will always always be shifted so that
from_index < to_index and from_jimage becomes (0, 0, 0).
:param from_index: index of site connecting from
:param to_index: index of site connecting to
:param from_jimage (tuple of ints): lattice vector of periodic
image, e.g. (1, 0, 0) for periodic image in +x direction
:param to_jimage (tuple of ints): lattice vector of image
:param weight (float): e.g. bond length
:param warn_duplicates (bool): if True, will warn if
trying to add duplicate edges (duplicate edges will not
be added in either case)
:param edge_properties (dict): any other information to
store on graph edges, similar to Structure's site_properties
:return:
"""
# this is not necessary for the class to work, but
# just makes it neater
if to_index < from_index:
to_index, from_index = from_index, to_index
to_jimage, from_jimage = from_jimage, to_jimage
# constrain all from_jimages to be (0, 0, 0),
# initial version of this class worked even if
# from_jimage != (0, 0, 0), but making this
# assumption simplifies logic later
if not np.array_equal(from_jimage, (0, 0, 0)):
shift = from_jimage
from_jimage = np.subtract(from_jimage, shift)
to_jimage = np.subtract(to_jimage, shift)
# automatic detection of to_jimage if user doesn't specify
# will try and detect all equivalent images and add multiple
# edges if appropriate
if to_jimage is None:
# assume we want the closest site
warnings.warn("Please specify to_jimage to be unambiguous, trying to automatically detect.")
dist, to_jimage = self.structure[from_index].distance_and_image(self.structure[to_index])
if dist == 0:
# this will happen when from_index == to_index,
# typically in primitive single-atom lattices
images = [1, 0, 0], [0, 1, 0], [0, 0, 1]
dists = []
for image in images:
dists.append(
self.structure[from_index].distance_and_image(self.structure[from_index], jimage=image)[0]
)
dist = min(dists)
equiv_sites = self.structure.get_neighbors_in_shell(
self.structure[from_index].coords, dist, dist * 0.01, include_index=True
)
for nnsite in equiv_sites:
to_jimage = np.subtract(nnsite.frac_coords, self.structure[from_index].frac_coords)
to_jimage = np.round(to_jimage).astype(int)
self.add_edge(
from_index=from_index,
from_jimage=(0, 0, 0),
to_jimage=to_jimage,
to_index=nnsite.index,
)
return
# sanitize types
from_jimage, to_jimage = (
tuple(map(int, from_jimage)),
tuple(map(int, to_jimage)),
)
from_index, to_index = int(from_index), int(to_index)
# if edge is from site i to site i, constrain direction of edge
# this is a convention to avoid duplicate hops
if to_index == from_index:
if to_jimage == (0, 0, 0):
warnings.warn("Tried to create a bond to itself, " "this doesn't make sense so was ignored.")
return
# ensure that the first non-zero jimage index is positive
# assumes that at least one non-zero index is present
is_positive = [idx for idx in to_jimage if idx != 0][0] > 0
if not is_positive:
# let's flip the jimage,
# e.g. (0, 1, 0) is equivalent to (0, -1, 0) in this case
to_jimage = tuple(-idx for idx in to_jimage)
# check we're not trying to add a duplicate edge
# there should only ever be at most one edge
# between a given (site, jimage) pair and another
# (site, jimage) pair
existing_edge_data = self.graph.get_edge_data(from_index, to_index)
if existing_edge_data:
for key, d in existing_edge_data.items():
if d["to_jimage"] == to_jimage:
if warn_duplicates:
warnings.warn(
"Trying to add an edge that already exists from "
"site {} to site {} in {}.".format(from_index, to_index, to_jimage)
)
return
# generic container for additional edge properties,
# similar to site properties
edge_properties = edge_properties or {}
if weight:
self.graph.add_edge(from_index, to_index, to_jimage=to_jimage, weight=weight, **edge_properties)
else:
self.graph.add_edge(from_index, to_index, to_jimage=to_jimage, **edge_properties)
def insert_node(
self,
i,
species,
coords,
coords_are_cartesian=False,
validate_proximity=False,
site_properties=None,
edges=None,
):
"""
A wrapper around Molecule.insert(), which also incorporates the new
site into the MoleculeGraph.
:param i: Index at which to insert the new site
:param species: Species for the new site
:param coords: 3x1 array representing coordinates of the new site
:param coords_are_cartesian: Whether coordinates are cartesian.
Defaults to False.
:param validate_proximity: For Molecule.insert(); if True (default
False), distance will be checked to ensure that site can be safely
added.
:param site_properties: Site properties for Molecule
:param edges: List of dicts representing edges to be added to the
MoleculeGraph. These edges must include the index of the new site i,
and all indices used for these edges should reflect the
MoleculeGraph AFTER the insertion, NOT before. Each dict should at
least have a "to_index" and "from_index" key, and can also have a
"weight" and a "properties" key.
:return:
"""
self.structure.insert(
i,
species,
coords,
coords_are_cartesian=coords_are_cartesian,
validate_proximity=validate_proximity,
properties=site_properties,
)
mapping = {}
for j in range(len(self.structure) - 1):
if j < i:
mapping[j] = j
else:
mapping[j] = j + 1
nx.relabel_nodes(self.graph, mapping, copy=False)
self.graph.add_node(i)
self.set_node_attributes()
if edges is not None:
for edge in edges:
try:
self.add_edge(
edge["from_index"],
edge["to_index"],
from_jimage=(0, 0, 0),
to_jimage=edge["to_jimage"],
weight=edge.get("weight", None),
edge_properties=edge.get("properties", None),
)
except KeyError:
raise RuntimeError("Some edges are invalid.")
def set_node_attributes(self):
"""
Gives each node a "specie" and a "coords" attribute, updated with the
current species and coordinates.
:return:
"""
species = {}
coords = {}
properties = {}
for node in self.graph.nodes():
species[node] = self.structure[node].specie.symbol
coords[node] = self.structure[node].coords
properties[node] = self.structure[node].properties
nx.set_node_attributes(self.graph, species, "specie")
nx.set_node_attributes(self.graph, coords, "coords")
nx.set_node_attributes(self.graph, properties, "properties")
def alter_edge(
self,
from_index,
to_index,
to_jimage=None,
new_weight=None,
new_edge_properties=None,
):
"""
Alters either the weight or the edge_properties of
an edge in the StructureGraph.
:param from_index: int
:param to_index: int
:param to_jimage: tuple
:param new_weight: alter_edge does not require
that weight be altered. As such, by default, this
is None. If weight is to be changed, it should be a
float.
:param new_edge_properties: alter_edge does not require
that edge_properties be altered. As such, by default,
this is None. If any edge properties are to be changed,
it should be a dictionary of edge properties to be changed.
:return:
"""
existing_edges = self.graph.get_edge_data(from_index, to_index)
# ensure that edge exists before attempting to change it
if not existing_edges:
raise ValueError(
"Edge between {} and {} cannot be altered;\
no edge exists between those sites.".format(
from_index, to_index
)
)
if to_jimage is None:
edge_index = 0
else:
for i, properties in existing_edges.items():
if properties["to_jimage"] == to_jimage:
edge_index = i
if new_weight is not None:
self.graph[from_index][to_index][edge_index]["weight"] = new_weight
if new_edge_properties is not None:
for prop in list(new_edge_properties.keys()):
self.graph[from_index][to_index][edge_index][prop] = new_edge_properties[prop]
def break_edge(self, from_index, to_index, to_jimage=None, allow_reverse=False):
"""
Remove an edge from the StructureGraph. If no image is given, this method will fail.
:param from_index: int
:param to_index: int
:param to_jimage: tuple
:param allow_reverse: If allow_reverse is True, then break_edge will
attempt to break both (from_index, to_index) and, failing that,
will attempt to break (to_index, from_index).
:return:
"""
# ensure that edge exists before attempting to remove it
existing_edges = self.graph.get_edge_data(from_index, to_index)
existing_reverse = None
if to_jimage is None:
raise ValueError("Image must be supplied, to avoid ambiguity.")
if existing_edges:
for i, properties in existing_edges.items():
if properties["to_jimage"] == to_jimage:
edge_index = i
self.graph.remove_edge(from_index, to_index, edge_index)
else:
if allow_reverse:
existing_reverse = self.graph.get_edge_data(to_index, from_index)
if existing_reverse:
for i, properties in existing_reverse.items():
if properties["to_jimage"] == to_jimage:
edge_index = i
self.graph.remove_edge(to_index, from_index, edge_index)
else:
raise ValueError(
"Edge cannot be broken between {} and {};\
no edge exists between those sites.".format(
from_index, to_index
)
)
def remove_nodes(self, indices):
"""
A wrapper for Molecule.remove_sites().
:param indices: list of indices in the current Molecule (and graph) to
be removed.
:return:
"""
self.structure.remove_sites(indices)
self.graph.remove_nodes_from(indices)
mapping = {}
for correct, current in enumerate(sorted(self.graph.nodes)):
mapping[current] = correct
nx.relabel_nodes(self.graph, mapping, copy=False)
self.set_node_attributes()
def substitute_group(
self,
index,
func_grp,
strategy,
bond_order=1,
graph_dict=None,
strategy_params=None,
):
"""
Builds off of Structure.substitute to replace an atom in self.structure
with a functional group. This method also amends self.graph to
incorporate the new functional group.
NOTE: Care must be taken to ensure that the functional group that is
substituted will not place atoms to close to each other, or violate the
dimensions of the Lattice.
:param index: Index of atom to substitute.
:param func_grp: Substituent molecule. There are two options:
1. Providing an actual Molecule as the input. The first atom
must be a DummySpecies X, indicating the position of
nearest neighbor. The second atom must be the next
nearest atom. For example, for a methyl group
substitution, func_grp should be X-CH3, where X is the
first site and C is the second site. What the code will
do is to remove the index site, and connect the nearest
neighbor to the C atom in CH3. The X-C bond indicates the
directionality to connect the atoms.
2. A string name. The molecule will be obtained from the
relevant template in func_groups.json.
:param strategy: Class from pymatgen.analysis.local_env.
:param bond_order: A specified bond order to calculate the bond
length between the attached functional group and the nearest
neighbor site. Defaults to 1.
:param graph_dict: Dictionary representing the bonds of the functional
group (format: {(u, v): props}, where props is a dictionary of
properties, including weight. If None, then the algorithm
will attempt to automatically determine bonds using one of
a list of strategies defined in pymatgen.analysis.local_env.
:param strategy_params: dictionary of keyword arguments for strategy.
If None, default parameters will be used.
:return:
"""
def map_indices(grp):
grp_map = {}
# Get indices now occupied by functional group
# Subtracting 1 because the dummy atom X should not count
atoms = len(grp) - 1
offset = len(self.structure) - atoms
for i in range(atoms):
grp_map[i] = i + offset
return grp_map
if isinstance(func_grp, Molecule):
func_grp = copy.deepcopy(func_grp)
else:
try:
func_grp = copy.deepcopy(FunctionalGroups[func_grp])
except Exception:
raise RuntimeError("Can't find functional group in list. " "Provide explicit coordinate instead")
self.structure.substitute(index, func_grp, bond_order=bond_order)
mapping = map_indices(func_grp)
# Remove dummy atom "X"
func_grp.remove_species("X")
if graph_dict is not None:
for (u, v) in graph_dict.keys():
edge_props = graph_dict[(u, v)]
if "to_jimage" in edge_props.keys():
to_jimage = edge_props["to_jimage"]
del edge_props["to_jimage"]
else:
# By default, assume that all edges should stay remain
# inside the initial image
to_jimage = (0, 0, 0)
if "weight" in edge_props.keys():
weight = edge_props["weight"]
del edge_props["weight"]
self.add_edge(
mapping[u],
mapping[v],
to_jimage=to_jimage,
weight=weight,
edge_properties=edge_props,
)
else:
if strategy_params is None:
strategy_params = {}
strat = strategy(**strategy_params)
for site in mapping.values():
neighbors = strat.get_nn_info(self.structure, site)
for neighbor in neighbors:
self.add_edge(
from_index=site,
from_jimage=(0, 0, 0),
to_index=neighbor["site_index"],
to_jimage=neighbor["image"],
weight=neighbor["weight"],
warn_duplicates=False,
)
def get_connected_sites(self, n, jimage=(0, 0, 0)):
"""
Returns a named tuple of neighbors of site n:
periodic_site, jimage, index, weight.
Index is the index of the corresponding site
in the original structure, weight can be
None if not defined.
:param n: index of Site in Structure
:param jimage: lattice vector of site
:return: list of ConnectedSite tuples,
sorted by closest first
"""
connected_sites = set()
connected_site_images = set()
out_edges = [(u, v, d, "out") for u, v, d in self.graph.out_edges(n, data=True)]
in_edges = [(u, v, d, "in") for u, v, d in self.graph.in_edges(n, data=True)]
for u, v, d, dir in out_edges + in_edges:
to_jimage = d["to_jimage"]
if dir == "in":
u, v = v, u
to_jimage = np.multiply(-1, to_jimage)
to_jimage = tuple(map(int, np.add(to_jimage, jimage)))
site_d = self.structure[v].as_dict()
site_d["abc"] = np.add(site_d["abc"], to_jimage).tolist()
site = PeriodicSite.from_dict(site_d)
# from_site if jimage arg != (0, 0, 0)
relative_jimage = np.subtract(to_jimage, jimage)
dist = self.structure[u].distance(self.structure[v], jimage=relative_jimage)
weight = d.get("weight", None)
if (v, to_jimage) not in connected_site_images:
connected_site = ConnectedSite(site=site, jimage=to_jimage, index=v, weight=weight, dist=dist)
connected_sites.add(connected_site)
connected_site_images.add((v, to_jimage))
# return list sorted by closest sites first
connected_sites = list(connected_sites)
connected_sites.sort(key=lambda x: x.dist)
return connected_sites
def get_coordination_of_site(self, n):
"""
Returns the number of neighbors of site n.
In graph terms, simply returns degree
of node corresponding to site n.
:param n: index of site
:return (int):
"""
number_of_self_loops = sum([1 for n, v in self.graph.edges(n) if n == v])
return self.graph.degree(n) - number_of_self_loops
def draw_graph_to_file(
self,
filename="graph",
diff=None,
hide_unconnected_nodes=False,
hide_image_edges=True,
edge_colors=False,
node_labels=False,
weight_labels=False,
image_labels=False,
color_scheme="VESTA",
keep_dot=False,
algo="fdp",
):
"""
Draws graph using GraphViz.
The networkx graph object itself can also be drawn
with networkx's in-built graph drawing methods, but
note that this might give misleading results for
multigraphs (edges are super-imposed on each other).
If visualization is difficult to interpret,
`hide_image_edges` can help, especially in larger
graphs.
:param filename: filename to output, will detect filetype
from extension (any graphviz filetype supported, such as
pdf or png)
:param diff (StructureGraph): an additional graph to
compare with, will color edges red that do not exist in diff
and edges green that are in diff graph but not in the
reference graph
:param hide_unconnected_nodes: if True, hide unconnected
nodes
:param hide_image_edges: if True, do not draw edges that
go through periodic boundaries
:param edge_colors (bool): if True, use node colors to
color edges
:param node_labels (bool): if True, label nodes with
species and site index
:param weight_labels (bool): if True, label edges with
weights
:param image_labels (bool): if True, label edges with
their periodic images (usually only used for debugging,
edges to periodic images always appear as dashed lines)
:param color_scheme (str): "VESTA" or "JMOL"
:param keep_dot (bool): keep GraphViz .dot file for later
visualization
:param algo: any graphviz algo, "neato" (for simple graphs)
or "fdp" (for more crowded graphs) usually give good outputs
:return:
"""
if not which(algo):
raise RuntimeError("StructureGraph graph drawing requires " "GraphViz binaries to be in the path.")
# Developer note: NetworkX also has methods for drawing
# graphs using matplotlib, these also work here. However,
# a dedicated tool like GraphViz allows for much easier
# control over graph appearance and also correctly displays
# mutli-graphs (matplotlib can superimpose multiple edges).
g = self.graph.copy()
g.graph = {"nodesep": 10.0, "dpi": 300, "overlap": "false"}
# add display options for nodes
for n in g.nodes():
# get label by species name
label = "{}({})".format(str(self.structure[n].specie), n) if node_labels else ""
# use standard color scheme for nodes
c = EL_COLORS[color_scheme].get(str(self.structure[n].specie.symbol), [0, 0, 0])
# get contrasting font color
# magic numbers account for perceived luminescence
# https://stackoverflow.com/questions/1855884/determine-font-color-based-on-background-color
fontcolor = "#000000" if 1 - (c[0] * 0.299 + c[1] * 0.587 + c[2] * 0.114) / 255 < 0.5 else "#ffffff"
# convert color to hex string
color = "#{:02x}{:02x}{:02x}".format(c[0], c[1], c[2])
g.add_node(
n,
fillcolor=color,
fontcolor=fontcolor,
label=label,
fontname="Helvetica-bold",
style="filled",
shape="circle",
)
edges_to_delete = []
# add display options for edges
for u, v, k, d in g.edges(keys=True, data=True):
# retrieve from/to images, set as origin if not defined
to_image = d["to_jimage"]
# set edge style
d["style"] = "solid"
if to_image != (0, 0, 0):
d["style"] = "dashed"
if hide_image_edges:
edges_to_delete.append((u, v, k))
# don't show edge directions
d["arrowhead"] = "none"
# only add labels for images that are not the origin
if image_labels:
d["headlabel"] = "" if to_image == (0, 0, 0) else "to {}".format((to_image))
d["arrowhead"] = "normal" if d["headlabel"] else "none"
# optionally color edges using node colors
color_u = g.nodes[u]["fillcolor"]
color_v = g.nodes[v]["fillcolor"]
d["color_uv"] = "{};0.5:{};0.5".format(color_u, color_v) if edge_colors else "#000000"
# optionally add weights to graph
if weight_labels:
units = g.graph.get("edge_weight_units", "")
if d.get("weight"):
d["label"] = "{:.2f} {}".format(d["weight"], units)
# update edge with our new style attributes
g.edges[u, v, k].update(d)
# optionally remove periodic image edges,
# these can be confusing due to periodic boundaries
if hide_image_edges:
for edge_to_delete in edges_to_delete:
g.remove_edge(*edge_to_delete)
# optionally hide unconnected nodes,
# these can appear when removing periodic edges
if hide_unconnected_nodes:
g = g.subgraph([n for n in g.degree() if g.degree()[n] != 0])
# optionally highlight differences with another graph
if diff:
diff = self.diff(diff, strict=True)
green_edges = []
red_edges = []
for u, v, k, d in g.edges(keys=True, data=True):
if (u, v, d["to_jimage"]) in diff["self"]:
# edge has been deleted
red_edges.append((u, v, k))
elif (u, v, d["to_jimage"]) in diff["other"]:
# edge has been added
green_edges.append((u, v, k))
for u, v, k in green_edges:
g.edges[u, v, k].update({"color_uv": "#00ff00"})
for u, v, k in red_edges:
g.edges[u, v, k].update({"color_uv": "#ff0000"})
basename, extension = os.path.splitext(filename)
extension = extension[1:]
write_dot(g, basename + ".dot")
with open(filename, "w") as f:
args = [algo, "-T", extension, basename + ".dot"]
rs = subprocess.Popen(args, stdout=f, stdin=subprocess.PIPE, close_fds=True)
rs.communicate()
if rs.returncode != 0:
raise RuntimeError("{} exited with return code {}.".format(algo, rs.returncode))
if not keep_dot:
os.remove(basename + ".dot")
@property
def types_and_weights_of_connections(self):
"""
Extract a dictionary summarizing the types and weights
of edges in the graph.
:return: A dictionary with keys specifying the
species involved in a connection in alphabetical order
(e.g. string 'Fe-O') and values which are a list of
weights for those connections (e.g. bond lengths).
"""
def get_label(u, v):
u_label = self.structure[u].species_string
v_label = self.structure[v].species_string
return "-".join(sorted((u_label, v_label)))
types = defaultdict(list)
for u, v, d in self.graph.edges(data=True):
label = get_label(u, v)
types[label].append(d["weight"])
return dict(types)
@property
def weight_statistics(self):
"""
Extract a statistical summary of edge weights present in
the graph.
:return: A dict with an 'all_weights' list, 'minimum',
'maximum', 'median', 'mean', 'std_dev'
"""
all_weights = [d.get("weight", None) for u, v, d in self.graph.edges(data=True)]
stats = describe(all_weights, nan_policy="omit")
return {
"all_weights": all_weights,
"min": stats.minmax[0],
"max": stats.minmax[1],
"mean": stats.mean,
"variance": stats.variance,
}
def types_of_coordination_environments(self, anonymous=False):
"""
Extract information on the different co-ordination environments
present in the graph.
:param anonymous: if anonymous, will replace specie names
with A, B, C, etc.
:return: a list of co-ordination environments,
e.g. ['Mo-S(6)', 'S-Mo(3)']
"""
motifs = set()
for idx, site in enumerate(self.structure):
centre_sp = site.species_string
connected_sites = self.get_connected_sites(idx)
connected_species = [connected_site.site.species_string for connected_site in connected_sites]
labels = []
for sp in set(connected_species):
count = connected_species.count(sp)
labels.append((count, sp))
labels = sorted(labels, reverse=True)
if anonymous:
mapping = {centre_sp: "A"}
available_letters = [chr(66 + i) for i in range(25)]
for label in labels:
sp = label[1]
if sp not in mapping:
mapping[sp] = available_letters.pop(0)
centre_sp = "A"
labels = [(label[0], mapping[label[1]]) for label in labels]
labels = ["{}({})".format(label[1], label[0]) for label in labels]
motif = "{}-{}".format(centre_sp, ",".join(labels))
motifs.add(motif)
return sorted(list(motifs))
def as_dict(self):
"""
As in :Class: `pymatgen.core.Structure` except
with using `to_dict_of_dicts` from NetworkX
to store graph information.
"""
d = {
"@module": self.__class__.__module__,
"@class": self.__class__.__name__,
"structure": self.structure.as_dict(),
"graphs": json_graph.adjacency_data(self.graph),
}
return d
@classmethod
def from_dict(cls, d):
"""
As in :Class: `pymatgen.core.Structure` except
restoring graphs using `from_dict_of_dicts`
from NetworkX to restore graph information.
"""
s = Structure.from_dict(d["structure"])
return cls(s, d["graphs"])
def __mul__(self, scaling_matrix):
"""
Replicates the graph, creating a supercell,
intelligently joining together
edges that lie on periodic boundaries.
In principle, any operations on the expanded
graph could also be done on the original
graph, but a larger graph can be easier to
visualize and reason about.
:param scaling_matrix: same as Structure.__mul__
:return:
"""
# Developer note: a different approach was also trialed, using
# a simple Graph (instead of MultiDiGraph), with node indices
# representing both site index and periodic image. Here, the
# number of nodes != number of sites in the Structure. This
# approach has many benefits, but made it more difficult to
# keep the graph in sync with its corresponding Structure.
# Broadly, it would be easier to multiply the Structure
# *before* generating the StructureGraph, but this isn't
# possible when generating the graph using critic2 from
# charge density.
# Multiplication works by looking for the expected position
# of an image node, and seeing if that node exists in the
# supercell. If it does, the edge is updated. This is more
# computationally expensive than just keeping track of the
# which new lattice images present, but should hopefully be
# easier to extend to a general 3x3 scaling matrix.
# code adapted from Structure.__mul__
scale_matrix = np.array(scaling_matrix, np.int16)
if scale_matrix.shape != (3, 3):
scale_matrix = np.array(scale_matrix * np.eye(3), np.int16)
else:
# TODO: test __mul__ with full 3x3 scaling matrices
raise NotImplementedError("Not tested with 3x3 scaling matrices yet.")
new_lattice = Lattice(np.dot(scale_matrix, self.structure.lattice.matrix))
f_lat = lattice_points_in_supercell(scale_matrix)
c_lat = new_lattice.get_cartesian_coords(f_lat)
new_sites = []
new_graphs = []
for v in c_lat:
# create a map of nodes from original graph to its image
mapping = {n: n + len(new_sites) for n in range(len(self.structure))}
for idx, site in enumerate(self.structure):
s = PeriodicSite(
site.species,
site.coords + v,
new_lattice,
properties=site.properties,
coords_are_cartesian=True,
to_unit_cell=False,
)
new_sites.append(s)
new_graphs.append(nx.relabel_nodes(self.graph, mapping, copy=True))
new_structure = Structure.from_sites(new_sites)
# merge all graphs into one big graph
new_g = nx.MultiDiGraph()
for new_graph in new_graphs:
new_g = nx.union(new_g, new_graph)
edges_to_remove = [] # tuple of (u, v, k)
edges_to_add = [] # tuple of (u, v, attr_dict)
# list of new edges inside supercell
# for duplicate checking
edges_inside_supercell = [{u, v} for u, v, d in new_g.edges(data=True) if d["to_jimage"] == (0, 0, 0)]
new_periodic_images = []
orig_lattice = self.structure.lattice
# use k-d tree to match given position to an
# existing Site in Structure
kd_tree = KDTree(new_structure.cart_coords)
# tolerance in Å for sites to be considered equal
# this could probably be a lot smaller
tol = 0.05
for u, v, k, d in new_g.edges(keys=True, data=True):
to_jimage = d["to_jimage"] # for node v
# reduce unnecessary checking
if to_jimage != (0, 0, 0):
# get index in original site
n_u = u % len(self.structure)
n_v = v % len(self.structure)
# get fractional co-ordinates of where atoms defined
# by edge are expected to be, relative to original
# lattice (keeping original lattice has
# significant benefits)
v_image_frac = np.add(self.structure[n_v].frac_coords, to_jimage)
u_frac = self.structure[n_u].frac_coords
# using the position of node u as a reference,
# get relative Cartesian co-ordinates of where
# atoms defined by edge are expected to be
v_image_cart = orig_lattice.get_cartesian_coords(v_image_frac)
u_cart = orig_lattice.get_cartesian_coords(u_frac)
v_rel = np.subtract(v_image_cart, u_cart)
# now retrieve position of node v in
# new supercell, and get asgolute Cartesian
# co-ordinates of where atoms defined by edge
# are expected to be
v_expec = new_structure[u].coords + v_rel
# now search in new structure for these atoms
# query returns (distance, index)
v_present = kd_tree.query(v_expec)
v_present = v_present[1] if v_present[0] <= tol else None
# check if image sites now present in supercell
# and if so, delete old edge that went through
# periodic boundary
if v_present is not None:
new_u = u
new_v = v_present
new_d = d.copy()
# node now inside supercell
new_d["to_jimage"] = (0, 0, 0)
edges_to_remove.append((u, v, k))
# make sure we don't try to add duplicate edges
# will remove two edges for everyone one we add
if {new_u, new_v} not in edges_inside_supercell:
# normalize direction
if new_v < new_u:
new_u, new_v = new_v, new_u
edges_inside_supercell.append({new_u, new_v})
edges_to_add.append((new_u, new_v, new_d))
else:
# want to find new_v such that we have
# full periodic boundary conditions
# so that nodes on one side of supercell
# are connected to nodes on opposite side
v_expec_frac = new_structure.lattice.get_fractional_coords(v_expec)
# find new to_jimage
# use np.around to fix issues with finite precision leading to incorrect image
v_expec_image = np.around(v_expec_frac, decimals=3)
v_expec_image = v_expec_image - v_expec_image % 1
v_expec_frac = np.subtract(v_expec_frac, v_expec_image)
v_expec = new_structure.lattice.get_cartesian_coords(v_expec_frac)
v_present = kd_tree.query(v_expec)
v_present = v_present[1] if v_present[0] <= tol else None
if v_present is not None:
new_u = u
new_v = v_present
new_d = d.copy()
new_to_jimage = tuple(map(int, v_expec_image))
# normalize direction
if new_v < new_u:
new_u, new_v = new_v, new_u
new_to_jimage = tuple(np.multiply(-1, d["to_jimage"]).astype(int))
new_d["to_jimage"] = new_to_jimage
edges_to_remove.append((u, v, k))
if (new_u, new_v, new_to_jimage) not in new_periodic_images:
edges_to_add.append((new_u, new_v, new_d))
new_periodic_images.append((new_u, new_v, new_to_jimage))
logger.debug("Removing {} edges, adding {} new edges.".format(len(edges_to_remove), len(edges_to_add)))
# add/delete marked edges
for edges_to_remove in edges_to_remove:
new_g.remove_edge(*edges_to_remove)
for (u, v, d) in edges_to_add:
new_g.add_edge(u, v, **d)
# return new instance of StructureGraph with supercell
d = {
"@module": self.__class__.__module__,
"@class": self.__class__.__name__,
"structure": new_structure.as_dict(),
"graphs": json_graph.adjacency_data(new_g),
}
sg = StructureGraph.from_dict(d)
return sg
def __rmul__(self, other):
return self.__mul__(other)
@classmethod
def _edges_to_string(cls, g):
header = "from to to_image "
header_line = "---- ---- ------------"
edge_weight_name = g.graph["edge_weight_name"]
if edge_weight_name:
print_weights = ["weight"]
edge_label = g.graph["edge_weight_name"]
edge_weight_units = g.graph["edge_weight_units"]
if edge_weight_units:
edge_label += " ({})".format(edge_weight_units)
header += " {}".format(edge_label)
header_line += " {}".format("-" * max([18, len(edge_label)]))
else:
print_weights = False
s = header + "\n" + header_line + "\n"
edges = list(g.edges(data=True))
# sort edges for consistent ordering
edges.sort(key=itemgetter(0, 1))
if print_weights:
for u, v, data in edges:
s += "{:4} {:4} {:12} {:.3e}\n".format(
u, v, str(data.get("to_jimage", (0, 0, 0))), data.get("weight", 0)
)
else:
for u, v, data in edges:
s += "{:4} {:4} {:12}\n".format(u, v, str(data.get("to_jimage", (0, 0, 0))))
return s
def __str__(self):
s = "Structure Graph"
s += "\nStructure: \n{}".format(self.structure.__str__())
s += "\nGraph: {}\n".format(self.name)
s += self._edges_to_string(self.graph)
return s
def __repr__(self):
s = "Structure Graph"
s += "\nStructure: \n{}".format(self.structure.__repr__())
s += "\nGraph: {}\n".format(self.name)
s += self._edges_to_string(self.graph)
return s
def __len__(self):
"""
:return: length of Structure / number of nodes in graph
"""
return len(self.structure)
def sort(self, key=None, reverse=False):
"""
Same as Structure.sort(), also remaps nodes in graph.
:param key:
:param reverse:
:return:
"""
old_structure = self.structure.copy()
# sort Structure
self.structure._sites = sorted(self.structure._sites, key=key, reverse=reverse)
# apply Structure ordering to graph
mapping = {idx: self.structure.index(site) for idx, site in enumerate(old_structure)}
self.graph = nx.relabel_nodes(self.graph, mapping, copy=True)
# normalize directions of edges
edges_to_remove = []
edges_to_add = []
for u, v, k, d in self.graph.edges(keys=True, data=True):
if v < u:
new_v, new_u, new_d = u, v, d.copy()
new_d["to_jimage"] = tuple(np.multiply(-1, d["to_jimage"]).astype(int))
edges_to_remove.append((u, v, k))
edges_to_add.append((new_u, new_v, new_d))
# add/delete marked edges
for edges_to_remove in edges_to_remove:
self.graph.remove_edge(*edges_to_remove)
for (u, v, d) in edges_to_add:
self.graph.add_edge(u, v, **d)
def __copy__(self):
return StructureGraph.from_dict(self.as_dict())
def __eq__(self, other):
"""
Two StructureGraphs are equal if they have equal Structures,
and have the same edges between Sites. Edge weights can be
different and StructureGraphs can still be considered equal.
:param other: StructureGraph
:return (bool):
"""
# sort for consistent node indices
# PeriodicSite should have a proper __hash__() value,
# using its frac_coords as a convenient key
mapping = {tuple(site.frac_coords): self.structure.index(site) for site in other.structure}
other_sorted = other.__copy__()
other_sorted.sort(key=lambda site: mapping[tuple(site.frac_coords)])
edges = {(u, v, d["to_jimage"]) for u, v, d in self.graph.edges(keys=False, data=True)}
edges_other = {(u, v, d["to_jimage"]) for u, v, d in other_sorted.graph.edges(keys=False, data=True)}
return (edges == edges_other) and (self.structure == other_sorted.structure)
def diff(self, other, strict=True):
"""
Compares two StructureGraphs. Returns dict with
keys 'self', 'other', 'both' with edges that are
present in only one StructureGraph ('self' and
'other'), and edges that are present in both.
The Jaccard distance is a simple measure of the
dissimilarity between two StructureGraphs (ignoring
edge weights), and is defined by 1 - (size of the
intersection / size of the union) of the sets of
edges. This is returned with key 'dist'.
Important note: all node indices are in terms
of the StructureGraph this method is called
from, not the 'other' StructureGraph: there
is no guarantee the node indices will be the
same if the underlying Structures are ordered
differently.
:param other: StructureGraph
:param strict: if False, will compare bonds
from different Structures, with node indices
replaced by Species strings, will not count
number of occurrences of bonds
:return:
"""
if self.structure != other.structure and strict:
return ValueError("Meaningless to compare StructureGraphs if " "corresponding Structures are different.")
if strict:
# sort for consistent node indices
# PeriodicSite should have a proper __hash__() value,
# using its frac_coords as a convenient key
mapping = {tuple(site.frac_coords): self.structure.index(site) for site in other.structure}
other_sorted = other.__copy__()
other_sorted.sort(key=lambda site: mapping[tuple(site.frac_coords)])
edges = {(u, v, d["to_jimage"]) for u, v, d in self.graph.edges(keys=False, data=True)}
edges_other = {(u, v, d["to_jimage"]) for u, v, d in other_sorted.graph.edges(keys=False, data=True)}
else:
edges = {
(str(self.structure[u].specie), str(self.structure[v].specie))
for u, v, d in self.graph.edges(keys=False, data=True)
}
edges_other = {
(str(other.structure[u].specie), str(other.structure[v].specie))
for u, v, d in other.graph.edges(keys=False, data=True)
}
if len(edges) == 0 and len(edges_other) == 0:
jaccard_dist = 0 # by definition
else:
jaccard_dist = 1 - len(edges.intersection(edges_other)) / len(edges.union(edges_other))
return {
"self": edges - edges_other,
"other": edges_other - edges,
"both": edges.intersection(edges_other),
"dist": jaccard_dist,
}
def get_subgraphs_as_molecules(self, use_weights=False):
"""
Retrieve subgraphs as molecules, useful for extracting
molecules from periodic crystals.
Will only return unique molecules, not any duplicates
present in the crystal (a duplicate defined as an
isomorphic subgraph).
:param use_weights (bool): If True, only treat subgraphs
as isomorphic if edges have the same weights. Typically,
this means molecules will need to have the same bond
lengths to be defined as duplicates, otherwise bond
lengths can differ. This is a fairly robust approach,
but will treat e.g. enantiomers as being duplicates.
:return: list of unique Molecules in Structure
"""
# creating a supercell is an easy way to extract
# molecules (and not, e.g., layers of a 2D crystal)
# without adding extra logic
if getattr(self, "_supercell_sg", None) is None:
self._supercell_sg = supercell_sg = self * (3, 3, 3)
# make undirected to find connected subgraphs
supercell_sg.graph = nx.Graph(supercell_sg.graph)
# find subgraphs
all_subgraphs = [supercell_sg.graph.subgraph(c) for c in nx.connected_components(supercell_sg.graph)]
# discount subgraphs that lie across *supercell* boundaries
# these will subgraphs representing crystals
molecule_subgraphs = []
for subgraph in all_subgraphs:
intersects_boundary = any(d["to_jimage"] != (0, 0, 0) for u, v, d in subgraph.edges(data=True))
if not intersects_boundary:
molecule_subgraphs.append(nx.MultiDiGraph(subgraph))
# add specie names to graph to be able to test for isomorphism
for subgraph in molecule_subgraphs:
for n in subgraph:
subgraph.add_node(n, specie=str(supercell_sg.structure[n].specie))
# now define how we test for isomorphism
def node_match(n1, n2):
return n1["specie"] == n2["specie"]
def edge_match(e1, e2):
if use_weights:
return e1["weight"] == e2["weight"]
return True
# prune duplicate subgraphs
unique_subgraphs = []
for subgraph in molecule_subgraphs:
already_present = [
nx.is_isomorphic(subgraph, g, node_match=node_match, edge_match=edge_match) for g in unique_subgraphs
]
if not any(already_present):
unique_subgraphs.append(subgraph)
# get Molecule objects for each subgraph
molecules = []
for subgraph in unique_subgraphs:
coords = [supercell_sg.structure[n].coords for n in subgraph.nodes()]
species = [supercell_sg.structure[n].specie for n in subgraph.nodes()]
molecule = Molecule(species, coords)
# shift so origin is at center of mass
molecule = molecule.get_centered_molecule()
molecules.append(molecule)
return molecules
class MolGraphSplitError(Exception):
"""
Raised when a molecule graph is failed to split into two disconnected
subgraphs
"""
pass
class MoleculeGraph(MSONable):
"""
This is a class for annotating a Molecule with
bond information, stored in the form of a graph. A "bond" does
not necessarily have to be a chemical bond, but can store any
kind of information that connects two Sites.
"""
def __init__(self, molecule, graph_data=None):
"""
If constructing this class manually, use the `with_empty_graph`
method or `with_local_env_strategy` method (using an algorithm
provided by the `local_env` module, such as O'Keeffe).
This class that contains connection information:
relationships between sites represented by a Graph structure,
and an associated structure object.
This class uses the NetworkX package to store and operate
on the graph itself, but contains a lot of helper methods
to make associating a graph with a given molecule easier.
Use cases for this include storing bonding information,
NMR J-couplings, Heisenberg exchange parameters, etc.
:param molecule: Molecule object
:param graph_data: dict containing graph information in
dict format (not intended to be constructed manually,
see as_dict method for format)
"""
if isinstance(molecule, MoleculeGraph):
# just make a copy from input
graph_data = molecule.as_dict()["graphs"]
self.molecule = molecule
self.graph = nx.readwrite.json_graph.adjacency_graph(graph_data)
# tidy up edge attr dicts, reading to/from json duplicates
# information
for u, v, k, d in self.graph.edges(keys=True, data=True):
if "id" in d:
del d["id"]
if "key" in d:
del d["key"]
# ensure images are tuples (conversion to lists happens
# when serializing back from json), it's important images
# are hashable/immutable
if "to_jimage" in d:
d["to_jimage"] = tuple(d["to_jimage"])
if "from_jimage" in d:
d["from_jimage"] = tuple(d["from_jimage"])
self.set_node_attributes()
@classmethod
def with_empty_graph(cls, molecule, name="bonds", edge_weight_name=None, edge_weight_units=None):
"""
Constructor for MoleculeGraph, returns a MoleculeGraph
object with an empty graph (no edges, only nodes defined
that correspond to Sites in Molecule).
:param molecule (Molecule):
:param name (str): name of graph, e.g. "bonds"
:param edge_weight_name (str): name of edge weights,
e.g. "bond_length" or "exchange_constant"
:param edge_weight_units (str): name of edge weight units
e.g. "Å" or "eV"
:return (MoleculeGraph):
"""
if edge_weight_name and (edge_weight_units is None):
raise ValueError(
"Please specify units associated "
"with your edge weights. Can be "
"empty string if arbitrary or "
"dimensionless."
)
# construct graph with one node per site
# graph attributes don't change behavior of graph,
# they're just for book-keeping
graph = nx.MultiDiGraph(
edge_weight_name=edge_weight_name,
edge_weight_units=edge_weight_units,
name=name,
)
graph.add_nodes_from(range(len(molecule)))
graph_data = json_graph.adjacency_data(graph)
return cls(molecule, graph_data=graph_data)
@staticmethod
def with_edges(molecule, edges):
"""
Constructor for MoleculeGraph, using pre-existing or pre-defined edges
with optional edge parameters.
:param molecule: Molecule object
:param edges: dict representing the bonds of the functional
group (format: {(u, v): props}, where props is a dictionary of
properties, including weight. Props should be None if no
additional properties are to be specified.
:return: mg, a MoleculeGraph
"""
mg = MoleculeGraph.with_empty_graph(molecule, name="bonds", edge_weight_name="weight", edge_weight_units="")
for edge, props in edges.items():
try:
from_index = edge[0]
to_index = edge[1]
except TypeError:
raise ValueError("Edges must be given as (from_index, to_index)" "tuples")
if props is not None:
if "weight" in props.keys():
weight = props["weight"]
del props["weight"]
else:
weight = None
if len(props.items()) == 0:
props = None
else:
weight = None
nodes = mg.graph.nodes
if not (from_index in nodes and to_index in nodes):
raise ValueError(
"Edges cannot be added if nodes are not" " present in the graph. Please check your" " indices."
)
mg.add_edge(from_index, to_index, weight=weight, edge_properties=props)
mg.set_node_attributes()
return mg
@staticmethod
def with_local_env_strategy(molecule, strategy):
"""
Constructor for MoleculeGraph, using a strategy
from :Class: `pymatgen.analysis.local_env`.
:param molecule: Molecule object
:param strategy: an instance of a
:Class: `pymatgen.analysis.local_env.NearNeighbors` object
:return: mg, a MoleculeGraph
"""
if not strategy.molecules_allowed:
raise ValueError(
"Chosen strategy is not designed for use with molecules! " "Please choose another strategy."
)
extend_structure = strategy.extend_structure_molecules
mg = MoleculeGraph.with_empty_graph(molecule, name="bonds", edge_weight_name="weight", edge_weight_units="")
# NearNeighbor classes only (generally) work with structures
# molecules have to be boxed first
coords = molecule.cart_coords
if extend_structure:
a = max(coords[:, 0]) - min(coords[:, 0]) + 100
b = max(coords[:, 1]) - min(coords[:, 1]) + 100
c = max(coords[:, 2]) - min(coords[:, 2]) + 100
structure = molecule.get_boxed_structure(a, b, c, no_cross=True, reorder=False)
else:
structure = None
for n in range(len(molecule)):
if structure is None:
neighbors = strategy.get_nn_info(molecule, n)
else:
neighbors = strategy.get_nn_info(structure, n)
for neighbor in neighbors:
# all bonds in molecules should not cross
# (artificial) periodic boundaries
if not np.array_equal(neighbor["image"], [0, 0, 0]):
continue
if n > neighbor["site_index"]:
from_index = neighbor["site_index"]
to_index = n
else:
from_index = n
to_index = neighbor["site_index"]
mg.add_edge(
from_index=from_index,
to_index=to_index,
weight=neighbor["weight"],
warn_duplicates=False,
)
duplicates = []
for edge in mg.graph.edges:
if edge[2] != 0:
duplicates.append(edge)
for duplicate in duplicates:
mg.graph.remove_edge(duplicate[0], duplicate[1], key=duplicate[2])
mg.set_node_attributes()
return mg
@property
def name(self):
"""
:return: Name of graph
"""
return self.graph.graph["name"]
@property
def edge_weight_name(self):
"""
:return: Name of the edge weight property of graph
"""
return self.graph.graph["edge_weight_name"]
@property
def edge_weight_unit(self):
"""
:return: Units of the edge weight property of graph
"""
return self.graph.graph["edge_weight_units"]
def add_edge(
self,
from_index,
to_index,
weight=None,
warn_duplicates=True,
edge_properties=None,
):
"""
Add edge to graph.
Since physically a 'bond' (or other connection
between sites) doesn't have a direction, from_index,
from_jimage can be swapped with to_index, to_jimage.
However, images will always always be shifted so that
from_index < to_index and from_jimage becomes (0, 0, 0).
:param from_index: index of site connecting from
:param to_index: index of site connecting to
:param weight (float): e.g. bond length
:param warn_duplicates (bool): if True, will warn if
trying to add duplicate edges (duplicate edges will not
be added in either case)
:param edge_properties (dict): any other information to
store on graph edges, similar to Structure's site_properties
:return:
"""
# this is not necessary for the class to work, but
# just makes it neater
if to_index < from_index:
to_index, from_index = from_index, to_index
# sanitize types
from_index, to_index = int(from_index), int(to_index)
# check we're not trying to add a duplicate edge
# there should only ever be at most one edge
# between two sites
existing_edge_data = self.graph.get_edge_data(from_index, to_index)
if existing_edge_data and warn_duplicates:
warnings.warn(
"Trying to add an edge that already exists from " "site {} to site {}.".format(from_index, to_index)
)
return
# generic container for additional edge properties,
# similar to site properties
edge_properties = edge_properties or {}
if weight:
self.graph.add_edge(from_index, to_index, weight=weight, **edge_properties)
else:
self.graph.add_edge(from_index, to_index, **edge_properties)
def insert_node(
self,
i,
species,
coords,
validate_proximity=False,
site_properties=None,
edges=None,
):
"""
A wrapper around Molecule.insert(), which also incorporates the new
site into the MoleculeGraph.
:param i: Index at which to insert the new site
:param species: Species for the new site
:param coords: 3x1 array representing coordinates of the new site
:param validate_proximity: For Molecule.insert(); if True (default
False), distance will be checked to ensure that site can be safely
added.
:param site_properties: Site properties for Molecule
:param edges: List of dicts representing edges to be added to the
MoleculeGraph. These edges must include the index of the new site i,
and all indices used for these edges should reflect the
MoleculeGraph AFTER the insertion, NOT before. Each dict should at
least have a "to_index" and "from_index" key, and can also have a
"weight" and a "properties" key.
:return:
"""
self.molecule.insert(
i,
species,
coords,
validate_proximity=validate_proximity,
properties=site_properties,
)
mapping = {}
for j in range(len(self.molecule) - 1):
if j < i:
mapping[j] = j
else:
mapping[j] = j + 1
nx.relabel_nodes(self.graph, mapping, copy=False)
self.graph.add_node(i)
self.set_node_attributes()
if edges is not None:
for edge in edges:
try:
self.add_edge(
edge["from_index"],
edge["to_index"],
weight=edge.get("weight", None),
edge_properties=edge.get("properties", None),
)
except KeyError:
raise RuntimeError("Some edges are invalid.")
def set_node_attributes(self):
"""
Replicates molecule site properties (specie, coords, etc.) in the
MoleculeGraph.
:return:
"""
species = {}
coords = {}
properties = {}
for node in self.graph.nodes():
species[node] = self.molecule[node].specie.symbol
coords[node] = self.molecule[node].coords
properties[node] = self.molecule[node].properties
nx.set_node_attributes(self.graph, species, "specie")
nx.set_node_attributes(self.graph, coords, "coords")
nx.set_node_attributes(self.graph, properties, "properties")
def alter_edge(self, from_index, to_index, new_weight=None, new_edge_properties=None):
"""
Alters either the weight or the edge_properties of
an edge in the MoleculeGraph.
:param from_index: int
:param to_index: int
:param new_weight: alter_edge does not require
that weight be altered. As such, by default, this
is None. If weight is to be changed, it should be a
float.
:param new_edge_properties: alter_edge does not require
that edge_properties be altered. As such, by default,
this is None. If any edge properties are to be changed,
it should be a dictionary of edge properties to be changed.
:return:
"""
existing_edge = self.graph.get_edge_data(from_index, to_index)
# ensure that edge exists before attempting to change it
if not existing_edge:
raise ValueError(
"Edge between {} and {} cannot be altered;\
no edge exists between those sites.".format(
from_index, to_index
)
)
# Third index should always be 0 because there should only be one edge between any two nodes
if new_weight is not None:
self.graph[from_index][to_index][0]["weight"] = new_weight
if new_edge_properties is not None:
for prop in list(new_edge_properties.keys()):
self.graph[from_index][to_index][0][prop] = new_edge_properties[prop]
def break_edge(self, from_index, to_index, allow_reverse=False):
"""
Remove an edge from the MoleculeGraph
:param from_index: int
:param to_index: int
:param allow_reverse: If allow_reverse is True, then break_edge will
attempt to break both (from_index, to_index) and, failing that,
will attempt to break (to_index, from_index).
:return:
"""
# ensure that edge exists before attempting to remove it
existing_edge = self.graph.get_edge_data(from_index, to_index)
existing_reverse = None
if existing_edge:
self.graph.remove_edge(from_index, to_index)
else:
if allow_reverse:
existing_reverse = self.graph.get_edge_data(to_index, from_index)
if existing_reverse:
self.graph.remove_edge(to_index, from_index)
else:
raise ValueError(
"Edge cannot be broken between {} and {};\
no edge exists between those sites.".format(
from_index, to_index
)
)
def remove_nodes(self, indices):
"""
A wrapper for Molecule.remove_sites().
:param indices: list of indices in the current Molecule (and graph) to
be removed.
:return:
"""
self.molecule.remove_sites(indices)
self.graph.remove_nodes_from(indices)
mapping = {}
for correct, current in enumerate(sorted(self.graph.nodes)):
mapping[current] = correct
nx.relabel_nodes(self.graph, mapping, copy=False)
self.set_node_attributes()
def get_disconnected_fragments(self):
"""
Determine if the MoleculeGraph is connected. If it is not, separate the
MoleculeGraph into different MoleculeGraphs, where each resulting
MoleculeGraph is a disconnected subgraph of the original.
Currently, this function naively assigns the charge
of the total molecule to a single submolecule. A
later effort will be to actually accurately assign
charge.
NOTE: This function does not modify the original
MoleculeGraph. It creates a copy, modifies that, and
returns two or more new MoleculeGraph objects.
:return: list of MoleculeGraphs
"""
if nx.is_weakly_connected(self.graph):
return [copy.deepcopy(self)]
original = copy.deepcopy(self)
sub_mols = list()
# Had to use nx.weakly_connected_components because of deprecation
# of nx.weakly_connected_component_subgraphs
subgraphs = [original.graph.subgraph(c) for c in nx.weakly_connected_components(original.graph)]
for subg in subgraphs:
nodes = sorted(list(subg.nodes))
# Molecule indices are essentially list-based, so node indices
# must be remapped, incrementing from 0
mapping = {}
for i, n in enumerate(nodes):
mapping[n] = i
# just give charge to whatever subgraph has node with index 0
# TODO: actually figure out how to distribute charge
if 0 in nodes:
charge = self.molecule.charge
else:
charge = 0
# relabel nodes in graph to match mapping
new_graph = nx.relabel_nodes(subg, mapping)
species = nx.get_node_attributes(new_graph, "specie")
coords = nx.get_node_attributes(new_graph, "coords")
raw_props = nx.get_node_attributes(new_graph, "properties")
properties = {}
for prop_set in raw_props.values():
for prop in prop_set.keys():
if prop in properties:
properties[prop].append(prop_set[prop])
else:
properties[prop] = [prop_set[prop]]
# Site properties must be present for all atoms in the molecule
# in order to be used for Molecule instantiation
for k, v in properties.items():
if len(v) != len(species):
del properties[k]
new_mol = Molecule(species, coords, charge=charge, site_properties=properties)
graph_data = json_graph.adjacency_data(new_graph)
# create new MoleculeGraph
sub_mols.append(MoleculeGraph(new_mol, graph_data=graph_data))
return sub_mols
def split_molecule_subgraphs(self, bonds, allow_reverse=False, alterations=None):
"""
Split MoleculeGraph into two or more MoleculeGraphs by
breaking a set of bonds. This function uses
MoleculeGraph.break_edge repeatedly to create
disjoint graphs (two or more separate molecules).
This function does not only alter the graph
information, but also changes the underlying
Molecules.
If the bonds parameter does not include sufficient
bonds to separate two molecule fragments, then this
function will fail.
Currently, this function naively assigns the charge
of the total molecule to a single submolecule. A
later effort will be to actually accurately assign
charge.
NOTE: This function does not modify the original
MoleculeGraph. It creates a copy, modifies that, and
returns two or more new MoleculeGraph objects.
:param bonds: list of tuples (from_index, to_index)
representing bonds to be broken to split the MoleculeGraph.
:param alterations: a dict {(from_index, to_index): alt},
where alt is a dictionary including weight and/or edge
properties to be changed following the split.
:param allow_reverse: If allow_reverse is True, then break_edge will
attempt to break both (from_index, to_index) and, failing that,
will attempt to break (to_index, from_index).
:return: list of MoleculeGraphs
"""
self.set_node_attributes()
original = copy.deepcopy(self)
for bond in bonds:
original.break_edge(bond[0], bond[1], allow_reverse=allow_reverse)
if nx.is_weakly_connected(original.graph):
raise MolGraphSplitError(
"Cannot split molecule; \
MoleculeGraph is still connected."
)
# alter any bonds before partition, to avoid remapping
if alterations is not None:
for (u, v) in alterations.keys():
if "weight" in alterations[(u, v)]:
weight = alterations[(u, v)]["weight"]
del alterations[(u, v)]["weight"]
edge_properties = alterations[(u, v)] if len(alterations[(u, v)]) != 0 else None
original.alter_edge(u, v, new_weight=weight, new_edge_properties=edge_properties)
else:
original.alter_edge(u, v, new_edge_properties=alterations[(u, v)])
return original.get_disconnected_fragments()
def build_unique_fragments(self):
"""
Find all possible fragment combinations of the MoleculeGraphs (in other
words, all connected induced subgraphs)
:return:
"""
self.set_node_attributes()
graph = self.graph.to_undirected()
# find all possible fragments, aka connected induced subgraphs
frag_dict = {}
for ii in range(1, len(self.molecule)):
for combination in combinations(graph.nodes, ii):
mycomp = []
for idx in combination:
mycomp.append(str(self.molecule[idx].specie))
mycomp = "".join(sorted(mycomp))
subgraph = nx.subgraph(graph, combination)
if nx.is_connected(subgraph):
mykey = mycomp + str(len(subgraph.edges()))
if mykey not in frag_dict:
frag_dict[mykey] = [copy.deepcopy(subgraph)]
else:
frag_dict[mykey].append(copy.deepcopy(subgraph))
# narrow to all unique fragments using graph isomorphism
unique_frag_dict = {}
for key in frag_dict:
unique_frags = []
for frag in frag_dict[key]:
found = False
for f in unique_frags:
if _isomorphic(frag, f):
found = True
break
if not found:
unique_frags.append(frag)
unique_frag_dict[key] = copy.deepcopy(unique_frags)
# convert back to molecule graphs
unique_mol_graph_dict = {}
for key in unique_frag_dict:
unique_mol_graph_list = []
for fragment in unique_frag_dict[key]:
mapping = {e: i for i, e in enumerate(sorted(fragment.nodes))}
remapped = nx.relabel_nodes(fragment, mapping)
species = nx.get_node_attributes(remapped, "specie")
coords = nx.get_node_attributes(remapped, "coords")
edges = {}
for from_index, to_index, key in remapped.edges:
edge_props = fragment.get_edge_data(from_index, to_index, key=key)
edges[(from_index, to_index)] = edge_props
unique_mol_graph_list.append(
self.with_edges(
Molecule(species=species, coords=coords, charge=self.molecule.charge),
edges,
)
)
frag_key = (
str(unique_mol_graph_list[0].molecule.composition.alphabetical_formula)
+ " E"
+ str(len(unique_mol_graph_list[0].graph.edges()))
)
unique_mol_graph_dict[frag_key] = copy.deepcopy(unique_mol_graph_list)
return unique_mol_graph_dict
def substitute_group(
self,
index,
func_grp,
strategy,
bond_order=1,
graph_dict=None,
strategy_params=None,
):
"""
Builds off of Molecule.substitute to replace an atom in self.molecule
with a functional group. This method also amends self.graph to
incorporate the new functional group.
NOTE: using a MoleculeGraph will generally produce a different graph
compared with using a Molecule or str (when not using graph_dict).
:param index: Index of atom to substitute.
:param func_grp: Substituent molecule. There are three options:
1. Providing an actual molecule as the input. The first atom
must be a DummySpecies X, indicating the position of
nearest neighbor. The second atom must be the next
nearest atom. For example, for a methyl group
substitution, func_grp should be X-CH3, where X is the
first site and C is the second site. What the code will
do is to remove the index site, and connect the nearest
neighbor to the C atom in CH3. The X-C bond indicates the
directionality to connect the atoms.
2. A string name. The molecule will be obtained from the
relevant template in func_groups.json.
3. A MoleculeGraph object.
:param strategy: Class from pymatgen.analysis.local_env.
:param bond_order: A specified bond order to calculate the bond
length between the attached functional group and the nearest
neighbor site. Defaults to 1.
:param graph_dict: Dictionary representing the bonds of the functional
group (format: {(u, v): props}, where props is a dictionary of
properties, including weight. If None, then the algorithm
will attempt to automatically determine bonds using one of
a list of strategies defined in pymatgen.analysis.local_env.
:param strategy_params: dictionary of keyword arguments for strategy.
If None, default parameters will be used.
:return:
"""
def map_indices(grp):
grp_map = {}
# Get indices now occupied by functional group
# Subtracting 1 because the dummy atom X should not count
atoms = len(grp) - 1
offset = len(self.molecule) - atoms
for i in range(atoms):
grp_map[i] = i + offset
return grp_map
# Work is simplified if a graph is already in place
if isinstance(func_grp, MoleculeGraph):
self.molecule.substitute(index, func_grp.molecule, bond_order=bond_order)
mapping = map_indices(func_grp.molecule)
for (u, v) in list(func_grp.graph.edges()):
edge_props = func_grp.graph.get_edge_data(u, v)[0]
weight = None
if "weight" in edge_props.keys():
weight = edge_props["weight"]
del edge_props["weight"]
self.add_edge(mapping[u], mapping[v], weight=weight, edge_properties=edge_props)
else:
if isinstance(func_grp, Molecule):
func_grp = copy.deepcopy(func_grp)
else:
try:
func_grp = copy.deepcopy(FunctionalGroups[func_grp])
except Exception:
raise RuntimeError("Can't find functional group in list. " "Provide explicit coordinate instead")
self.molecule.substitute(index, func_grp, bond_order=bond_order)
mapping = map_indices(func_grp)
# Remove dummy atom "X"
func_grp.remove_species("X")
if graph_dict is not None:
for (u, v) in graph_dict.keys():
edge_props = graph_dict[(u, v)]
if "weight" in edge_props.keys():
weight = edge_props["weight"]
del edge_props["weight"]
self.add_edge(
mapping[u],
mapping[v],
weight=weight,
edge_properties=edge_props,
)
else:
if strategy_params is None:
strategy_params = {}
strat = strategy(**strategy_params)
graph = self.with_local_env_strategy(func_grp, strat)
for (u, v) in list(graph.graph.edges()):
edge_props = graph.graph.get_edge_data(u, v)[0]
weight = None
if "weight" in edge_props.keys():
weight = edge_props["weight"]
del edge_props["weight"]
if 0 not in list(graph.graph.nodes()):
# If graph indices have different indexing
u, v = (u - 1), (v - 1)
self.add_edge(
mapping[u],
mapping[v],
weight=weight,
edge_properties=edge_props,
)
def replace_group(
self,
index,
func_grp,
strategy,
bond_order=1,
graph_dict=None,
strategy_params=None,
):
"""
Builds off of Molecule.substitute and MoleculeGraph.substitute_group
to replace a functional group in self.molecule with a functional group.
This method also amends self.graph to incorporate the new functional
group.
TODO: Figure out how to replace into a ring structure.
:param index: Index of atom to substitute.
:param func_grp: Substituent molecule. There are three options:
1. Providing an actual molecule as the input. The first atom
must be a DummySpecies X, indicating the position of
nearest neighbor. The second atom must be the next
nearest atom. For example, for a methyl group
substitution, func_grp should be X-CH3, where X is the
first site and C is the second site. What the code will
do is to remove the index site, and connect the nearest
neighbor to the C atom in CH3. The X-C bond indicates the
directionality to connect the atoms.
2. A string name. The molecule will be obtained from the
relevant template in func_groups.json.
3. A MoleculeGraph object.
:param strategy: Class from pymatgen.analysis.local_env.
:param bond_order: A specified bond order to calculate the bond
length between the attached functional group and the nearest
neighbor site. Defaults to 1.
:param graph_dict: Dictionary representing the bonds of the functional
group (format: {(u, v): props}, where props is a dictionary of
properties, including weight. If None, then the algorithm
will attempt to automatically determine bonds using one of
a list of strategies defined in pymatgen.analysis.local_env.
:param strategy_params: dictionary of keyword arguments for strategy.
If None, default parameters will be used.
:return:
"""
self.set_node_attributes()
neighbors = self.get_connected_sites(index)
# If the atom at index is terminal
if len(neighbors) == 1:
self.substitute_group(
index,
func_grp,
strategy,
bond_order=bond_order,
graph_dict=graph_dict,
strategy_params=strategy_params,
)
else:
rings = self.find_rings(including=[index])
if len(rings) != 0:
raise RuntimeError(
"Currently functional group replacement" "cannot occur at an atom within a ring" "structure."
)
to_remove = set()
sizes = dict()
disconnected = self.graph.to_undirected()
disconnected.remove_node(index)
for neighbor in neighbors:
sizes[neighbor[2]] = len(nx.descendants(disconnected, neighbor[2]))
keep = max(sizes, key=lambda x: sizes[x])
for i in sizes.keys():
if i != keep:
to_remove.add(i)
self.remove_nodes(list(to_remove))
self.substitute_group(
index,
func_grp,
strategy,
bond_order=bond_order,
graph_dict=graph_dict,
strategy_params=strategy_params,
)
def find_rings(self, including=None):
"""
Find ring structures in the MoleculeGraph.
:param including: list of site indices. If
including is not None, then find_rings will
only return those rings including the specified
sites. By default, this parameter is None, and
all rings will be returned.
:return: dict {index:cycle}. Each
entry will be a ring (cycle, in graph theory terms) including the index
found in the Molecule. If there is no cycle including an index, the
value will be an empty list.
"""
# Copies self.graph such that all edges (u, v) matched by edges (v, u)
undirected = self.graph.to_undirected()
directed = undirected.to_directed()
cycles_nodes = []
cycles_edges = []
# Remove all two-edge cycles
all_cycles = [c for c in nx.simple_cycles(directed) if len(c) > 2]
# Using to_directed() will mean that each cycle always appears twice
# So, we must also remove duplicates
unique_sorted = []
unique_cycles = []
for cycle in all_cycles:
if sorted(cycle) not in unique_sorted:
unique_sorted.append(sorted(cycle))
unique_cycles.append(cycle)
if including is None:
cycles_nodes = unique_cycles
else:
for i in including:
for cycle in unique_cycles:
if i in cycle and cycle not in cycles_nodes:
cycles_nodes.append(cycle)
for cycle in cycles_nodes:
edges = []
for i, e in enumerate(cycle):
edges.append((cycle[i - 1], e))
cycles_edges.append(edges)
return cycles_edges
def get_connected_sites(self, n):
"""
Returns a named tuple of neighbors of site n:
periodic_site, jimage, index, weight.
Index is the index of the corresponding site
in the original structure, weight can be
None if not defined.
:param n: index of Site in Molecule
:param jimage: lattice vector of site
:return: list of ConnectedSite tuples,
sorted by closest first
"""
connected_sites = set()
out_edges = list(self.graph.out_edges(n, data=True))
in_edges = list(self.graph.in_edges(n, data=True))
for u, v, d in out_edges + in_edges:
weight = d.get("weight", None)
if v == n:
site = self.molecule[u]
dist = self.molecule[v].distance(self.molecule[u])
connected_site = ConnectedSite(site=site, jimage=(0, 0, 0), index=u, weight=weight, dist=dist)
else:
site = self.molecule[v]
dist = self.molecule[u].distance(self.molecule[v])
connected_site = ConnectedSite(site=site, jimage=(0, 0, 0), index=v, weight=weight, dist=dist)
connected_sites.add(connected_site)
# return list sorted by closest sites first
connected_sites = list(connected_sites)
connected_sites.sort(key=lambda x: x.dist)
return connected_sites
def get_coordination_of_site(self, n):
"""
Returns the number of neighbors of site n.
In graph terms, simply returns degree
of node corresponding to site n.
:param n: index of site
:return (int):
"""
number_of_self_loops = sum([1 for n, v in self.graph.edges(n) if n == v])
return self.graph.degree(n) - number_of_self_loops
def draw_graph_to_file(
self,
filename="graph",
diff=None,
hide_unconnected_nodes=False,
hide_image_edges=True,
edge_colors=False,
node_labels=False,
weight_labels=False,
image_labels=False,
color_scheme="VESTA",
keep_dot=False,
algo="fdp",
):
"""
Draws graph using GraphViz.
The networkx graph object itself can also be drawn
with networkx's in-built graph drawing methods, but
note that this might give misleading results for
multigraphs (edges are super-imposed on each other).
If visualization is difficult to interpret,
`hide_image_edges` can help, especially in larger
graphs.
:param filename: filename to output, will detect filetype
from extension (any graphviz filetype supported, such as
pdf or png)
:param diff (StructureGraph): an additional graph to
compare with, will color edges red that do not exist in diff
and edges green that are in diff graph but not in the
reference graph
:param hide_unconnected_nodes: if True, hide unconnected
nodes
:param hide_image_edges: if True, do not draw edges that
go through periodic boundaries
:param edge_colors (bool): if True, use node colors to
color edges
:param node_labels (bool): if True, label nodes with
species and site index
:param weight_labels (bool): if True, label edges with
weights
:param image_labels (bool): if True, label edges with
their periodic images (usually only used for debugging,
edges to periodic images always appear as dashed lines)
:param color_scheme (str): "VESTA" or "JMOL"
:param keep_dot (bool): keep GraphViz .dot file for later
visualization
:param algo: any graphviz algo, "neato" (for simple graphs)
or "fdp" (for more crowded graphs) usually give good outputs
:return:
"""
if not which(algo):
raise RuntimeError("StructureGraph graph drawing requires " "GraphViz binaries to be in the path.")
# Developer note: NetworkX also has methods for drawing
# graphs using matplotlib, these also work here. However,
# a dedicated tool like GraphViz allows for much easier
# control over graph appearance and also correctly displays
# mutli-graphs (matplotlib can superimpose multiple edges).
g = self.graph.copy()
g.graph = {"nodesep": 10.0, "dpi": 300, "overlap": "false"}
# add display options for nodes
for n in g.nodes():
# get label by species name
label = "{}({})".format(str(self.molecule[n].specie), n) if node_labels else ""
# use standard color scheme for nodes
c = EL_COLORS[color_scheme].get(str(self.molecule[n].specie.symbol), [0, 0, 0])
# get contrasting font color
# magic numbers account for perceived luminescence
# https://stackoverflow.com/questions/1855884/determine-font-color-based-on-background-color
fontcolor = "#000000" if 1 - (c[0] * 0.299 + c[1] * 0.587 + c[2] * 0.114) / 255 < 0.5 else "#ffffff"
# convert color to hex string
color = "#{:02x}{:02x}{:02x}".format(c[0], c[1], c[2])
g.add_node(
n,
fillcolor=color,
fontcolor=fontcolor,
label=label,
fontname="Helvetica-bold",
style="filled",
shape="circle",
)
edges_to_delete = []
# add display options for edges
for u, v, k, d in g.edges(keys=True, data=True):
# retrieve from/to images, set as origin if not defined
if "to_image" in d:
to_image = d["to_jimage"]
else:
to_image = (0, 0, 0)
# set edge style
d["style"] = "solid"
if to_image != (0, 0, 0):
d["style"] = "dashed"
if hide_image_edges:
edges_to_delete.append((u, v, k))
# don't show edge directions
d["arrowhead"] = "none"
# only add labels for images that are not the origin
if image_labels:
d["headlabel"] = "" if to_image == (0, 0, 0) else "to {}".format((to_image))
d["arrowhead"] = "normal" if d["headlabel"] else "none"
# optionally color edges using node colors
color_u = g.node[u]["fillcolor"]
color_v = g.node[v]["fillcolor"]
d["color_uv"] = "{};0.5:{};0.5".format(color_u, color_v) if edge_colors else "#000000"
# optionally add weights to graph
if weight_labels:
units = g.graph.get("edge_weight_units", "")
if d.get("weight"):
d["label"] = "{:.2f} {}".format(d["weight"], units)
# update edge with our new style attributes
g.edges[u, v, k].update(d)
# optionally remove periodic image edges,
# these can be confusing due to periodic boundaries
if hide_image_edges:
for edge_to_delete in edges_to_delete:
g.remove_edge(*edge_to_delete)
# optionally hide unconnected nodes,
# these can appear when removing periodic edges
if hide_unconnected_nodes:
g = g.subgraph([n for n in g.degree() if g.degree()[n] != 0])
# optionally highlight differences with another graph
if diff:
diff = self.diff(diff, strict=True)
green_edges = []
red_edges = []
for u, v, k, d in g.edges(keys=True, data=True):
if (u, v, d["to_jimage"]) in diff["self"]:
# edge has been deleted
red_edges.append((u, v, k))
elif (u, v, d["to_jimage"]) in diff["other"]:
# edge has been added
green_edges.append((u, v, k))
for u, v, k in green_edges:
g.edges[u, v, k].update({"color_uv": "#00ff00"})
for u, v, k in red_edges:
g.edges[u, v, k].update({"color_uv": "#ff0000"})
basename, extension = os.path.splitext(filename)
extension = extension[1:]
write_dot(g, basename + ".dot")
with open(filename, "w") as f:
args = [algo, "-T", extension, basename + ".dot"]
rs = subprocess.Popen(args, stdout=f, stdin=subprocess.PIPE, close_fds=True)
rs.communicate()
if rs.returncode != 0:
raise RuntimeError("{} exited with return code {}.".format(algo, rs.returncode))
if not keep_dot:
os.remove(basename + ".dot")
def as_dict(self):
"""
As in :Class: `pymatgen.core.Molecule` except
with using `to_dict_of_dicts` from NetworkX
to store graph information.
"""
d = {
"@module": self.__class__.__module__,
"@class": self.__class__.__name__,
"molecule": self.molecule.as_dict(),
"graphs": json_graph.adjacency_data(self.graph),
}
return d
@classmethod
def from_dict(cls, d):
"""
As in :Class: `pymatgen.core.Molecule` except
restoring graphs using `from_dict_of_dicts`
from NetworkX to restore graph information.
"""
m = Molecule.from_dict(d["molecule"])
return cls(m, d["graphs"])
@classmethod
def _edges_to_string(cls, g):
header = "from to to_image "
header_line = "---- ---- ------------"
edge_weight_name = g.graph["edge_weight_name"]
if edge_weight_name:
print_weights = ["weight"]
edge_label = g.graph["edge_weight_name"]
edge_weight_units = g.graph["edge_weight_units"]
if edge_weight_units:
edge_label += " ({})".format(edge_weight_units)
header += " {}".format(edge_label)
header_line += " {}".format("-" * max([18, len(edge_label)]))
else:
print_weights = False
s = header + "\n" + header_line + "\n"
edges = list(g.edges(data=True))
# sort edges for consistent ordering
edges.sort(key=itemgetter(0, 1))
if print_weights:
for u, v, data in edges:
s += "{:4} {:4} {:12} {:.3e}\n".format(
u, v, str(data.get("to_jimage", (0, 0, 0))), data.get("weight", 0)
)
else:
for u, v, data in edges:
s += "{:4} {:4} {:12}\n".format(u, v, str(data.get("to_jimage", (0, 0, 0))))
return s
def __str__(self):
s = "Molecule Graph"
s += "\nMolecule: \n{}".format(self.molecule.__str__())
s += "\nGraph: {}\n".format(self.name)
s += self._edges_to_string(self.graph)
return s
def __repr__(self):
s = "Molecule Graph"
s += "\nMolecule: \n{}".format(self.molecule.__repr__())
s += "\nGraph: {}\n".format(self.name)
s += self._edges_to_string(self.graph)
return s
def __len__(self):
"""
:return: length of Molecule / number of nodes in graph
"""
return len(self.molecule)
def sort(self, key=None, reverse=False):
"""
Same as Molecule.sort(), also remaps nodes in graph.
:param key:
:param reverse:
:return:
"""
old_molecule = self.molecule.copy()
# sort Molecule
self.molecule._sites = sorted(self.molecule._sites, key=key, reverse=reverse)
# apply Molecule ordering to graph
mapping = {idx: self.molecule.index(site) for idx, site in enumerate(old_molecule)}
self.graph = nx.relabel_nodes(self.graph, mapping, copy=True)
# normalize directions of edges
edges_to_remove = []
edges_to_add = []
for u, v, k, d in self.graph.edges(keys=True, data=True):
if v < u:
new_v, new_u, new_d = u, v, d.copy()
new_d["to_jimage"] = (0, 0, 0)
edges_to_remove.append((u, v, k))
edges_to_add.append((new_u, new_v, new_d))
# add/delete marked edges
for edges_to_remove in edges_to_remove:
self.graph.remove_edge(*edges_to_remove)
for (u, v, d) in edges_to_add:
self.graph.add_edge(u, v, **d)
def __copy__(self):
return MoleculeGraph.from_dict(self.as_dict())
def __eq__(self, other):
"""
Two MoleculeGraphs are equal if they have equal Molecules,
and have the same edges between Sites. Edge weights can be
different and MoleculeGraphs can still be considered equal.
:param other: MoleculeGraph
:return (bool):
"""
# sort for consistent node indices
# PeriodicSite should have a proper __hash__() value,
# using its frac_coords as a convenient key
try:
mapping = {tuple(site.coords): self.molecule.index(site) for site in other.molecule}
except ValueError:
return False
other_sorted = other.__copy__()
other_sorted.sort(key=lambda site: mapping[tuple(site.coords)])
edges = {(u, v) for u, v, d in self.graph.edges(keys=False, data=True)}
edges_other = {(u, v) for u, v, d in other_sorted.graph.edges(keys=False, data=True)}
return (edges == edges_other) and (self.molecule == other_sorted.molecule)
def isomorphic_to(self, other):
"""
Checks if the graphs of two MoleculeGraphs are isomorphic to one
another. In order to prevent problems with misdirected edges, both
graphs are converted into undirected nx.Graph objects.
:param other: MoleculeGraph object to be compared.
:return: bool
"""
if len(self.molecule) != len(other.molecule):
return False
if self.molecule.composition.alphabetical_formula != other.molecule.composition.alphabetical_formula:
return False
if len(self.graph.edges()) != len(other.graph.edges()):
return False
return _isomorphic(self.graph, other.graph)
def diff(self, other, strict=True):
"""
Compares two MoleculeGraphs. Returns dict with
keys 'self', 'other', 'both' with edges that are
present in only one MoleculeGraph ('self' and
'other'), and edges that are present in both.
The Jaccard distance is a simple measure of the
dissimilarity between two MoleculeGraphs (ignoring
edge weights), and is defined by 1 - (size of the
intersection / size of the union) of the sets of
edges. This is returned with key 'dist'.
Important note: all node indices are in terms
of the MoleculeGraph this method is called
from, not the 'other' MoleculeGraph: there
is no guarantee the node indices will be the
same if the underlying Molecules are ordered
differently.
:param other: MoleculeGraph
:param strict: if False, will compare bonds
from different Molecules, with node indices
replaced by Species strings, will not count
number of occurrences of bonds
:return:
"""
if self.molecule != other.molecule and strict:
return ValueError("Meaningless to compare MoleculeGraphs if " "corresponding Molecules are different.")
if strict:
# sort for consistent node indices
# PeriodicSite should have a proper __hash__() value,
# using its frac_coords as a convenient key
mapping = {tuple(site.frac_coords): self.molecule.index(site) for site in other.molecule}
other_sorted = other.__copy__()
other_sorted.sort(key=lambda site: mapping[tuple(site.frac_coords)])
edges = {(u, v, d.get("to_jimage", (0, 0, 0))) for u, v, d in self.graph.edges(keys=False, data=True)}
edges_other = {
(u, v, d.get("to_jimage", (0, 0, 0))) for u, v, d in other_sorted.graph.edges(keys=False, data=True)
}
else:
edges = {
(str(self.molecule[u].specie), str(self.molecule[v].specie))
for u, v, d in self.graph.edges(keys=False, data=True)
}
edges_other = {
(str(other.structure[u].specie), str(other.structure[v].specie))
for u, v, d in other.graph.edges(keys=False, data=True)
}
if len(edges) == 0 and len(edges_other) == 0:
jaccard_dist = 0 # by definition
else:
jaccard_dist = 1 - len(edges.intersection(edges_other)) / len(edges.union(edges_other))
return {
"self": edges - edges_other,
"other": edges_other - edges,
"both": edges.intersection(edges_other),
"dist": jaccard_dist,
}
| mit |
cboothe/libkml | examples/python/hellolocation.py | 24 | 2551 | #!/usr/bin/env python
# Copyright 2008, Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# 3. Neither the name of Google Inc. nor the names of its contributors may be
# used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
# EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# This program demonstrates use of the KML DOM Python SWIG bindings
# for determining the location and bounding box of a feature and/or feature
# hierarchy using kmlengine.GetFeatureLatLon(), kmlengine.GetFeatureBounds(),
# and kmlengine.Bbox() methods.
import sys
import kmldom
import kmlengine
def ReadFile(filename):
f = open(filename, 'r')
data = f.read()
return data
# Program main: read the file to memory, parse it, get and visit
# the root feature if such exists.
def main(inputkml):
feature = kmlengine.GetRootFeature(kmldom.ParseKml(ReadFile(inputkml)))
(status, lat, lon) = kmlengine.GetFeatureLatLon(feature)
if status:
print 'center',lat,lon
bbox = kmlengine.Bbox()
status = kmlengine.GetFeatureBounds(feature, bbox)
if status:
print 'north',bbox.get_north()
print 'south',bbox.get_south()
print 'east',bbox.get_east()
print 'west',bbox.get_west()
if __name__ == '__main__':
if len(sys.argv) != 2:
print 'usage: %s input.kml' % sys.argv[0]
sys.exit(1)
main(sys.argv[1])
| bsd-3-clause |
eoneil1942/voltdb-4.7fix | lib/python/voltcli/voltdb.d/rejoin.py | 1 | 1711 | # This file is part of VoltDB.
# Copyright (C) 2008-2014 VoltDB Inc.
#
# This file contains original code and/or modifications of original code.
# Any modifications made by VoltDB Inc. are licensed under the following
# terms and conditions:
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
@VOLT.Command(
bundles = VOLT.ServerBundle('rejoin',
needs_catalog=False,
supports_live=True,
default_host=False,
safemode_available=False,
supports_daemon=True),
description = 'Rejoin the current node to a VoltDB cluster.'
)
def rejoin(runner):
runner.go()
| agpl-3.0 |
virogenesis/django-plugins | djangoplugins/models.py | 1 | 3425 | from __future__ import absolute_import
from dirtyfields import DirtyFieldsMixin
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import python_2_unicode_compatible
from djangoplugins.signals import django_plugin_enabled, django_plugin_disabled
from .utils import get_plugin_name, get_plugin_from_string
ENABLED = 0
DISABLED = 1
REMOVED = 2
STATUS_CHOICES = (
(ENABLED, _('Enabled')),
(DISABLED, _('Disabled')),
(REMOVED, _('Removed')),
)
STATUS_CHOICES_ENABLED = (ENABLED,)
STATUS_CHOICES_DISABLED = (DISABLED, REMOVED,)
class PluginPointManager(models.Manager):
def get_point(self, point):
return self.get(pythonpath=get_plugin_name(point))
@python_2_unicode_compatible
class PluginPoint(models.Model):
pythonpath = models.CharField(max_length=255)
title = models.CharField(max_length=255)
status = models.SmallIntegerField(choices=STATUS_CHOICES, default=ENABLED)
objects = PluginPointManager()
def __str__(self):
return self.title
class PluginManager(models.Manager):
def get_plugin(self, plugin):
return self.get(pythonpath=get_plugin_name(plugin))
def get_plugins_of(self, point):
return self.filter(point__pythonpath=get_plugin_name(point),
status=ENABLED)
def get_by_natural_key(self, name):
return self.get(pythonpath=name)
@python_2_unicode_compatible
class Plugin(DirtyFieldsMixin, models.Model):
"""
Database representation of a plugin.
Fields ``name`` and ``title`` are synchronized from plugin classes.
point
Plugin point.
pythonpath
Full python path to plugin class, including class too.
name
Plugin slug name, must be unique within one plugin point.
title
Eny verbose title of this plugin.
index
Using values from this field plugins are orderd.
status
Plugin status.
"""
point = models.ForeignKey(PluginPoint)
pythonpath = models.CharField(max_length=255, unique=True)
name = models.CharField(max_length=255, null=True, blank=True)
title = models.CharField(max_length=255, default='', blank=True)
index = models.IntegerField(default=0)
status = models.SmallIntegerField(choices=STATUS_CHOICES, default=ENABLED)
objects = PluginManager()
class Meta:
unique_together = (("point", "name"),)
order_with_respect_to = 'point'
# ordering = ('index', 'id')
def __str__(self):
if self.title:
return self.title
if self.name:
return self.name
return self.pythonpath
def natural_key(self):
return (self.pythonpath,)
def is_active(self):
return self.status == ENABLED
def get_plugin(self):
plugin_class = get_plugin_from_string(self.pythonpath)
return plugin_class()
def save(self, *args, **kwargs):
if "status" in self.get_dirty_fields().keys() and self.pk:
if self.status in STATUS_CHOICES_ENABLED:
django_plugin_enabled.send(sender=self.__class__,
plugin=self.get_plugin())
else:
django_plugin_disabled.send(sender=self.__class__,
plugin=self.get_plugin())
return super(Plugin, self).save(*args, **kwargs)
| lgpl-3.0 |
grupoprog3/proyecto_final | Entrega Final/flask/Lib/site-packages/pip/_vendor/requests/packages/chardet/langbulgarianmodel.py | 2965 | 12784 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# 255: Control characters that usually does not exist in any text
# 254: Carriage/Return
# 253: symbol (punctuation) that does not belong to word
# 252: 0 - 9
# Character Mapping Table:
# this table is modified base on win1251BulgarianCharToOrderMap, so
# only number <64 is sure valid
Latin5_BulgarianCharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82, # 40
110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253, # 50
253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71, # 60
116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253, # 70
194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209, # 80
210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225, # 90
81,226,227,228,229,230,105,231,232,233,234,235,236, 45,237,238, # a0
31, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30, # b0
39, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,239, 67,240, 60, 56, # c0
1, 18, 9, 20, 11, 3, 23, 15, 2, 26, 12, 10, 14, 6, 4, 13, # d0
7, 8, 5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,241, 42, 16, # e0
62,242,243,244, 58,245, 98,246,247,248,249,250,251, 91,252,253, # f0
)
win1251BulgarianCharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82, # 40
110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253, # 50
253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71, # 60
116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253, # 70
206,207,208,209,210,211,212,213,120,214,215,216,217,218,219,220, # 80
221, 78, 64, 83,121, 98,117,105,222,223,224,225,226,227,228,229, # 90
88,230,231,232,233,122, 89,106,234,235,236,237,238, 45,239,240, # a0
73, 80,118,114,241,242,243,244,245, 62, 58,246,247,248,249,250, # b0
31, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30, # c0
39, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,251, 67,252, 60, 56, # d0
1, 18, 9, 20, 11, 3, 23, 15, 2, 26, 12, 10, 14, 6, 4, 13, # e0
7, 8, 5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,253, 42, 16, # f0
)
# Model Table:
# total sequences: 100%
# first 512 sequences: 96.9392%
# first 1024 sequences:3.0618%
# rest sequences: 0.2992%
# negative sequences: 0.0020%
BulgarianLangModel = (
0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,2,3,3,3,3,3,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,2,2,1,2,2,
3,1,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,3,3,3,3,3,3,3,0,3,0,1,
0,0,0,0,0,0,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,2,3,3,3,3,3,3,3,3,0,3,1,0,
0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,2,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,3,2,3,2,2,1,3,3,3,3,2,2,2,1,1,2,0,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,2,3,2,2,3,3,1,1,2,3,3,2,3,3,3,3,2,1,2,0,2,0,3,0,0,
0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,1,3,3,3,3,3,2,3,2,3,3,3,3,3,2,3,3,1,3,0,3,0,2,0,0,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,3,1,3,3,2,3,3,3,1,3,3,2,3,2,2,2,0,0,2,0,2,0,2,0,0,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,3,3,1,2,2,3,2,1,1,2,0,2,0,0,0,0,
1,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,2,3,3,1,2,3,2,2,2,3,3,3,3,3,2,2,3,1,2,0,2,1,2,0,0,
0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,1,3,3,3,3,3,2,3,3,3,2,3,3,2,3,2,2,2,3,1,2,0,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,3,3,3,3,1,1,1,2,2,1,3,1,3,2,2,3,0,0,1,0,1,0,1,0,0,
0,0,0,1,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,2,2,3,2,2,3,1,2,1,1,1,2,3,1,3,1,2,2,0,1,1,1,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,1,3,2,2,3,3,1,2,3,1,1,3,3,3,3,1,2,2,1,1,1,0,2,0,2,0,1,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,2,2,3,3,3,2,2,1,1,2,0,2,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,0,1,2,1,3,3,2,3,3,3,3,3,2,3,2,1,0,3,1,2,1,2,1,2,3,2,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,1,1,2,3,3,3,3,3,3,3,3,3,3,3,3,0,0,3,1,3,3,2,3,3,2,2,2,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,3,3,3,3,0,3,3,3,3,3,2,1,1,2,1,3,3,0,3,1,1,1,1,3,2,0,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,1,1,3,1,3,3,2,3,2,2,2,3,0,2,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,2,3,3,2,2,3,2,1,1,1,1,1,3,1,3,1,1,0,0,0,1,0,0,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,2,3,2,0,3,2,0,3,0,2,0,0,2,1,3,1,0,0,1,0,0,0,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,2,1,1,1,1,2,1,1,2,1,1,1,2,2,1,2,1,1,1,0,1,1,0,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,2,1,3,1,1,2,1,3,2,1,1,0,1,2,3,2,1,1,1,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,3,3,3,3,2,2,1,0,1,0,0,1,0,0,0,2,1,0,3,0,0,1,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,2,3,2,3,3,1,3,2,1,1,1,2,1,1,2,1,3,0,1,0,0,0,1,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,1,2,2,3,3,2,3,2,2,2,3,1,2,2,1,1,2,1,1,2,2,0,1,1,0,1,0,2,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,2,1,3,1,0,2,2,1,3,2,1,0,0,2,0,2,0,1,0,0,0,0,0,0,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,1,2,0,2,3,1,2,3,2,0,1,3,1,2,1,1,1,0,0,1,0,0,2,2,2,3,
2,2,2,2,1,2,1,1,2,2,1,1,2,0,1,1,1,0,0,1,1,0,0,1,1,0,0,0,1,1,0,1,
3,3,3,3,3,2,1,2,2,1,2,0,2,0,1,0,1,2,1,2,1,1,0,0,0,1,0,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
3,3,2,3,3,1,1,3,1,0,3,2,1,0,0,0,1,2,0,2,0,1,0,0,0,1,0,1,2,1,2,2,
1,1,1,1,1,1,1,2,2,2,1,1,1,1,1,1,1,0,1,2,1,1,1,0,0,0,0,0,1,1,0,0,
3,1,0,1,0,2,3,2,2,2,3,2,2,2,2,2,1,0,2,1,2,1,1,1,0,1,2,1,2,2,2,1,
1,1,2,2,2,2,1,2,1,1,0,1,2,1,2,2,2,1,1,1,0,1,1,1,1,2,0,1,0,0,0,0,
2,3,2,3,3,0,0,2,1,0,2,1,0,0,0,0,2,3,0,2,0,0,0,0,0,1,0,0,2,0,1,2,
2,1,2,1,2,2,1,1,1,2,1,1,1,0,1,2,2,1,1,1,1,1,0,1,1,1,0,0,1,2,0,0,
3,3,2,2,3,0,2,3,1,1,2,0,0,0,1,0,0,2,0,2,0,0,0,1,0,1,0,1,2,0,2,2,
1,1,1,1,2,1,0,1,2,2,2,1,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,1,0,0,
2,3,2,3,3,0,0,3,0,1,1,0,1,0,0,0,2,2,1,2,0,0,0,0,0,0,0,0,2,0,1,2,
2,2,1,1,1,1,1,2,2,2,1,0,2,0,1,0,1,0,0,1,0,1,0,0,1,0,0,0,0,1,0,0,
3,3,3,3,2,2,2,2,2,0,2,1,1,1,1,2,1,2,1,1,0,2,0,1,0,1,0,0,2,0,1,2,
1,1,1,1,1,1,1,2,2,1,1,0,2,0,1,0,2,0,0,1,1,1,0,0,2,0,0,0,1,1,0,0,
2,3,3,3,3,1,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0,0,0,0,1,2,0,1,2,
2,2,2,1,1,2,1,1,2,2,2,1,2,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,1,1,0,0,
2,3,3,3,3,0,2,2,0,2,1,0,0,0,1,1,1,2,0,2,0,0,0,3,0,0,0,0,2,0,2,2,
1,1,1,2,1,2,1,1,2,2,2,1,2,0,1,1,1,0,1,1,1,1,0,2,1,0,0,0,1,1,0,0,
2,3,3,3,3,0,2,1,0,0,2,0,0,0,0,0,1,2,0,2,0,0,0,0,0,0,0,0,2,0,1,2,
1,1,1,2,1,1,1,1,2,2,2,0,1,0,1,1,1,0,0,1,1,1,0,0,1,0,0,0,0,1,0,0,
3,3,2,2,3,0,1,0,1,0,0,0,0,0,0,0,1,1,0,3,0,0,0,0,0,0,0,0,1,0,2,2,
1,1,1,1,1,2,1,1,2,2,1,2,2,1,0,1,1,1,1,1,0,1,0,0,1,0,0,0,1,1,0,0,
3,1,0,1,0,2,2,2,2,3,2,1,1,1,2,3,0,0,1,0,2,1,1,0,1,1,1,1,2,1,1,1,
1,2,2,1,2,1,2,2,1,1,0,1,2,1,2,2,1,1,1,0,0,1,1,1,2,1,0,1,0,0,0,0,
2,1,0,1,0,3,1,2,2,2,2,1,2,2,1,1,1,0,2,1,2,2,1,1,2,1,1,0,2,1,1,1,
1,2,2,2,2,2,2,2,1,2,0,1,1,0,2,1,1,1,1,1,0,0,1,1,1,1,0,1,0,0,0,0,
2,1,1,1,1,2,2,2,2,1,2,2,2,1,2,2,1,1,2,1,2,3,2,2,1,1,1,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,2,2,3,2,0,1,2,0,1,2,1,1,0,1,0,1,2,1,2,0,0,0,1,1,0,0,0,1,0,0,2,
1,1,0,0,1,1,0,1,1,1,1,0,2,0,1,1,1,0,0,1,1,0,0,0,0,1,0,0,0,1,0,0,
2,0,0,0,0,1,2,2,2,2,2,2,2,1,2,1,1,1,1,1,1,1,0,1,1,1,1,1,2,1,1,1,
1,2,2,2,2,1,1,2,1,2,1,1,1,0,2,1,2,1,1,1,0,2,1,1,1,1,0,1,0,0,0,0,
3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,
1,1,0,1,0,1,1,1,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,2,2,3,2,0,0,0,0,1,0,0,0,0,0,0,1,1,0,2,0,0,0,0,0,0,0,0,1,0,1,2,
1,1,1,1,1,1,0,0,2,2,2,2,2,0,1,1,0,1,1,1,1,1,0,0,1,0,0,0,1,1,0,1,
2,3,1,2,1,0,1,1,0,2,2,2,0,0,1,0,0,1,1,1,1,0,0,0,0,0,0,0,1,0,1,2,
1,1,1,1,2,1,1,1,1,1,1,1,1,0,1,1,0,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0,
2,2,2,2,2,0,0,2,0,0,2,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,2,0,2,2,
1,1,1,1,1,0,0,1,2,1,1,0,1,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
1,2,2,2,2,0,0,2,0,1,1,0,0,0,1,0,0,2,0,2,0,0,0,0,0,0,0,0,0,0,1,1,
0,0,0,1,1,1,1,1,1,1,1,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
1,2,2,3,2,0,0,1,0,0,1,0,0,0,0,0,0,1,0,2,0,0,0,1,0,0,0,0,0,0,0,2,
1,1,0,0,1,0,0,0,1,1,0,0,1,0,1,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
2,1,2,2,2,1,2,1,2,2,1,1,2,1,1,1,0,1,1,1,1,2,0,1,0,1,1,1,1,0,1,1,
1,1,2,1,1,1,1,1,1,0,0,1,2,1,1,1,1,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0,
1,0,0,1,3,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,2,2,2,1,0,0,1,0,2,0,0,0,0,0,1,1,1,0,1,0,0,0,0,0,0,0,0,2,0,0,1,
0,2,0,1,0,0,1,1,2,0,1,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,
1,2,2,2,2,0,1,1,0,2,1,0,1,1,1,0,0,1,0,2,0,1,0,0,0,0,0,0,0,0,0,1,
0,1,0,0,1,0,0,0,1,1,0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
2,2,2,2,2,0,0,1,0,0,0,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1,
0,1,0,1,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
2,0,1,0,0,1,2,1,1,1,1,1,1,2,2,1,0,0,1,0,1,0,0,0,0,1,1,1,1,0,0,0,
1,1,2,1,1,1,1,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,2,1,2,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1,
0,0,0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,1,2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,
0,1,1,0,1,1,1,0,0,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,
1,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,2,0,0,2,0,1,0,0,1,0,0,1,
1,1,0,0,1,1,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,
1,1,1,1,1,1,1,2,0,0,0,0,0,0,2,1,0,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,1,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
)
Latin5BulgarianModel = {
'charToOrderMap': Latin5_BulgarianCharToOrderMap,
'precedenceMatrix': BulgarianLangModel,
'mTypicalPositiveRatio': 0.969392,
'keepEnglishLetter': False,
'charsetName': "ISO-8859-5"
}
Win1251BulgarianModel = {
'charToOrderMap': win1251BulgarianCharToOrderMap,
'precedenceMatrix': BulgarianLangModel,
'mTypicalPositiveRatio': 0.969392,
'keepEnglishLetter': False,
'charsetName': "windows-1251"
}
# flake8: noqa
| apache-2.0 |
tejasbubane/google-appengine-gui-for-linux | launcher/dev_appserver_task_thread_unittest.py | 28 | 2595 | #!/usr/bin/env python
#
# Copyright 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Unit test for dev_appserver_task_thread.py"""
import unittest
import wx
import launcher
class DevAppServerTaskThreadTest(unittest.TestCase):
def setUp(self):
# Some tests wander into wx, so we need a wx.App cranked up.
self.app = wx.PySimpleApp()
def RunStateChanged(self, project):
"""We use ourself as a fake controller for convenience."""
self.project_changed = project
def FakeCallAfter(self, callable, project):
callable(project)
def testBasics(self):
datt = launcher.DevAppServerTaskThread(self, self, None)
# replace wx.CallAfter so we don't have to run an event loop
orig_callafter = wx.CallAfter
wx.CallAfter = self.FakeCallAfter
self.project_changed = None
self.runstate = launcher.Project.STATE_STOP
datt._TaskWillStart()
self.assertEqual(self.project_changed, self)
self.assertEqual(self.runstate, launcher.Project.STATE_STARTING)
self.project_changed = None
self.runstate = None
datt._TaskDidStart()
self.assertEqual(self.project_changed, self)
self.assertEqual(self.runstate, launcher.Project.STATE_RUN)
self.project_changed = None
self.runstate = None
datt._TaskDidStop(42) # 42 falls into the failure zone.
self.assertEqual(self.project_changed, self)
self.assertEqual(self.runstate, launcher.Project.STATE_DIED)
# Exercise a successful result code. Zero is a success for all platforms.
self.project_changed = None
self.runstate = None
datt._TaskDidStop(0)
self.assertEqual(self.project_changed, self)
self.assertEqual(self.runstate, launcher.Project.STATE_STOP)
# Exercise some unsuccessful result codes
for code in (-42, 1, 100):
self.project_changed = None
self.runstate = None
datt._TaskDidStop(code)
self.assertEqual(self.project_changed, self)
self.assertEqual(self.runstate, launcher.Project.STATE_DIED)
wx.CallAfter = orig_callafter
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
jldaniel/Athena | Util/true_solution_single_run.py | 1 | 1400 | __author__ = 'jdaniel'
from AthenaOpt.configuration import Configuration
from AthenaOpt.configuration import encode_configuration
from os import urandom
import json
__author__ = 'jldaniel'
algorithm = 'SERIAL'
model = 'ZDT1'
pop_size = 2048
conv_tol = 0
crossover_index = 15
mutation_index = 20
crossover_probability = 0.8
mutation_probability = 1/30.0
n_runs = 1
generations = 2000
root_path = '/home/jdaniel/Desktop/Athena/True_Solutions'
output_file = '/home/jdaniel/Desktop/Athena/True_Solutions.batch'
write_history = False
write_solution = True
configurations = []
# Create a configuration
config = Configuration()
config.set_algorithm(algorithm)
config.set_model(model)
config.set_population_size(pop_size)
seed = int(urandom(4).encode('hex'), 16)
config.set_seed(seed)
config.set_convergence_tolerance(conv_tol)
config.set_crossover_index(crossover_index)
config.set_crossover_probability(crossover_probability)
config.set_mutation_index(mutation_index)
config.set_mutation_probability(mutation_probability)
config.set_generations(generations)
directory = root_path + '/' + model
config.set_directory(directory)
name = model + '_true_solution'
config.set_name(name)
configurations.append(config)
configuration_json = json.dumps(configurations, default=encode_configuration, sort_keys=True, indent=2)
with open(output_file, 'w+') as data_file:
data_file.write(configuration_json)
| mit |
ElectricSolstice/sudokuSolver | sudokuSolver.py | 1 | 3844 | #Functions that handle the logic of solving a sudoku
def sudokuEqual(one,two):
if one != "" and len(one.split(" ")) == 1:
if one == two:
return True
return False
def checkRows(sudoku):
for row in sudoku:
for i in range(len(row)):
for j in range(i+1,len(row)):
if sudokuEqual(row[i],row[j]):
return False
return True
def checkColumns(sudoku):
#The number of sudoku boxes in a row equals
#the number of columns
for column in range (len(sudoku[0])):
for height in range(len(sudoku)):
for i in range(height+1,len(sudoku)):
if sudokuEqual(sudoku[height][column],sudoku[i][column]):
return False
return True
def checkSquares(sudoku,squareLength,squareHeight):
for square in range(len(sudoku[0])):
for i in range(len(sudoku[0])):
for j in range(i+1,len(sudoku[0])):
#adds the square offset, checking the squares and boxes
#from left to right rather than top to bottom
if sudokuEqual(sudoku[int(i/squareLength)+int(square/squareHeight)*squareHeight]\
[i%squareLength+square%squareHeight*squareLength], \
sudoku[int(j/squareLength)+int(square/squareHeight)*squareHeight]\
[j%squareLength+square%squareHeight*squareLength]):
return False
return True
def checkAll(sudoku,squareLength,squareHeight):
if checkRows(sudoku):
if checkColumns(sudoku):
if checkSquares(sudoku,squareLength,squareHeight):
return True
return False
def bruteSolve(sudoku,symbols,squareLength,squareHeight):
sudokuCopy = sudoku
solution = []
backtrack = []
for row in range(len(sudoku)):
for box in range(len(sudoku[row])):
#skip over squares that don't need to be solved
if len(sudoku[row][box].split()) == 1:
continue
#solve by plugging in symbols and checking
else:
symbol = iter(symbols)
currentRow = row
currentBox = box
#while true, try a symbol
while True:
try:
value = next(symbol)
solution.append([currentRow,currentBox,value,symbol])
sudokuCopy[currentRow][currentBox] = value
if checkAll(sudokuCopy,squareLength,squareHeight) and not backtrack:
break
elif checkAll(sudokuCopy,squareLength,squareHeight):
symbol = backtrack[-1][3]
currentRow = backtrack[-1][0]
currentBox = backtrack[-1][1]
backtrack.pop()
else:
solution.pop()
sudokuCopy[currentRow][currentBox] = ""
#when out of symbols to try for current
#box, try next symbol for previous box
except StopIteration:
if not solution:
return None
symbol = solution[-1][3]
pastRow = solution[-1][0]
pastBox = solution[-1][1]
solution[-1][3] = iter(symbols)
solution[-1][0] = currentRow
solution[-1][1] = currentBox
currentRow = pastRow
currentBox = pastBox
backtrack.append(solution.pop())
sudokuCopy[currentRow][currentBox] = ""
return sudokuCopy
| bsd-2-clause |
itsjustshana/project2 | lib/jinja2/testsuite/tests.py | 497 | 2865 | # -*- coding: utf-8 -*-
"""
jinja2.testsuite.tests
~~~~~~~~~~~~~~~~~~~~~~
Who tests the tests?
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
import unittest
from jinja2.testsuite import JinjaTestCase
from jinja2 import Markup, Environment
env = Environment()
class TestsTestCase(JinjaTestCase):
def test_defined(self):
tmpl = env.from_string('{{ missing is defined }}|{{ true is defined }}')
assert tmpl.render() == 'False|True'
def test_even(self):
tmpl = env.from_string('''{{ 1 is even }}|{{ 2 is even }}''')
assert tmpl.render() == 'False|True'
def test_odd(self):
tmpl = env.from_string('''{{ 1 is odd }}|{{ 2 is odd }}''')
assert tmpl.render() == 'True|False'
def test_lower(self):
tmpl = env.from_string('''{{ "foo" is lower }}|{{ "FOO" is lower }}''')
assert tmpl.render() == 'True|False'
def test_typechecks(self):
tmpl = env.from_string('''
{{ 42 is undefined }}
{{ 42 is defined }}
{{ 42 is none }}
{{ none is none }}
{{ 42 is number }}
{{ 42 is string }}
{{ "foo" is string }}
{{ "foo" is sequence }}
{{ [1] is sequence }}
{{ range is callable }}
{{ 42 is callable }}
{{ range(5) is iterable }}
{{ {} is mapping }}
{{ mydict is mapping }}
{{ [] is mapping }}
''')
class MyDict(dict):
pass
assert tmpl.render(mydict=MyDict()).split() == [
'False', 'True', 'False', 'True', 'True', 'False',
'True', 'True', 'True', 'True', 'False', 'True',
'True', 'True', 'False'
]
def test_sequence(self):
tmpl = env.from_string(
'{{ [1, 2, 3] is sequence }}|'
'{{ "foo" is sequence }}|'
'{{ 42 is sequence }}'
)
assert tmpl.render() == 'True|True|False'
def test_upper(self):
tmpl = env.from_string('{{ "FOO" is upper }}|{{ "foo" is upper }}')
assert tmpl.render() == 'True|False'
def test_sameas(self):
tmpl = env.from_string('{{ foo is sameas false }}|'
'{{ 0 is sameas false }}')
assert tmpl.render(foo=False) == 'True|False'
def test_no_paren_for_arg1(self):
tmpl = env.from_string('{{ foo is sameas none }}')
assert tmpl.render(foo=None) == 'True'
def test_escaped(self):
env = Environment(autoescape=True)
tmpl = env.from_string('{{ x is escaped }}|{{ y is escaped }}')
assert tmpl.render(x='foo', y=Markup('foo')) == 'False|True'
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(TestsTestCase))
return suite
| apache-2.0 |
androidarmv6/android_external_chromium | testing/gmock/scripts/generator/cpp/tokenize.py | 679 | 9703 | #!/usr/bin/env python
#
# Copyright 2007 Neal Norwitz
# Portions Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tokenize C++ source code."""
__author__ = 'nnorwitz@google.com (Neal Norwitz)'
try:
# Python 3.x
import builtins
except ImportError:
# Python 2.x
import __builtin__ as builtins
import sys
from cpp import utils
if not hasattr(builtins, 'set'):
# Nominal support for Python 2.3.
from sets import Set as set
# Add $ as a valid identifier char since so much code uses it.
_letters = 'abcdefghijklmnopqrstuvwxyz'
VALID_IDENTIFIER_CHARS = set(_letters + _letters.upper() + '_0123456789$')
HEX_DIGITS = set('0123456789abcdefABCDEF')
INT_OR_FLOAT_DIGITS = set('01234567890eE-+')
# C++0x string preffixes.
_STR_PREFIXES = set(('R', 'u8', 'u8R', 'u', 'uR', 'U', 'UR', 'L', 'LR'))
# Token types.
UNKNOWN = 'UNKNOWN'
SYNTAX = 'SYNTAX'
CONSTANT = 'CONSTANT'
NAME = 'NAME'
PREPROCESSOR = 'PREPROCESSOR'
# Where the token originated from. This can be used for backtracking.
# It is always set to WHENCE_STREAM in this code.
WHENCE_STREAM, WHENCE_QUEUE = range(2)
class Token(object):
"""Data container to represent a C++ token.
Tokens can be identifiers, syntax char(s), constants, or
pre-processor directives.
start contains the index of the first char of the token in the source
end contains the index of the last char of the token in the source
"""
def __init__(self, token_type, name, start, end):
self.token_type = token_type
self.name = name
self.start = start
self.end = end
self.whence = WHENCE_STREAM
def __str__(self):
if not utils.DEBUG:
return 'Token(%r)' % self.name
return 'Token(%r, %s, %s)' % (self.name, self.start, self.end)
__repr__ = __str__
def _GetString(source, start, i):
i = source.find('"', i+1)
while source[i-1] == '\\':
# Count the trailing backslashes.
backslash_count = 1
j = i - 2
while source[j] == '\\':
backslash_count += 1
j -= 1
# When trailing backslashes are even, they escape each other.
if (backslash_count % 2) == 0:
break
i = source.find('"', i+1)
return i + 1
def _GetChar(source, start, i):
# NOTE(nnorwitz): may not be quite correct, should be good enough.
i = source.find("'", i+1)
while source[i-1] == '\\':
# Need to special case '\\'.
if (i - 2) > start and source[i-2] == '\\':
break
i = source.find("'", i+1)
# Try to handle unterminated single quotes (in a #if 0 block).
if i < 0:
i = start
return i + 1
def GetTokens(source):
"""Returns a sequence of Tokens.
Args:
source: string of C++ source code.
Yields:
Token that represents the next token in the source.
"""
# Cache various valid character sets for speed.
valid_identifier_chars = VALID_IDENTIFIER_CHARS
hex_digits = HEX_DIGITS
int_or_float_digits = INT_OR_FLOAT_DIGITS
int_or_float_digits2 = int_or_float_digits | set('.')
# Only ignore errors while in a #if 0 block.
ignore_errors = False
count_ifs = 0
i = 0
end = len(source)
while i < end:
# Skip whitespace.
while i < end and source[i].isspace():
i += 1
if i >= end:
return
token_type = UNKNOWN
start = i
c = source[i]
if c.isalpha() or c == '_': # Find a string token.
token_type = NAME
while source[i] in valid_identifier_chars:
i += 1
# String and character constants can look like a name if
# they are something like L"".
if (source[i] == "'" and (i - start) == 1 and
source[start:i] in 'uUL'):
# u, U, and L are valid C++0x character preffixes.
token_type = CONSTANT
i = _GetChar(source, start, i)
elif source[i] == "'" and source[start:i] in _STR_PREFIXES:
token_type = CONSTANT
i = _GetString(source, start, i)
elif c == '/' and source[i+1] == '/': # Find // comments.
i = source.find('\n', i)
if i == -1: # Handle EOF.
i = end
continue
elif c == '/' and source[i+1] == '*': # Find /* comments. */
i = source.find('*/', i) + 2
continue
elif c in ':+-<>&|*=': # : or :: (plus other chars).
token_type = SYNTAX
i += 1
new_ch = source[i]
if new_ch == c:
i += 1
elif c == '-' and new_ch == '>':
i += 1
elif new_ch == '=':
i += 1
elif c in '()[]{}~!?^%;/.,': # Handle single char tokens.
token_type = SYNTAX
i += 1
if c == '.' and source[i].isdigit():
token_type = CONSTANT
i += 1
while source[i] in int_or_float_digits:
i += 1
# Handle float suffixes.
for suffix in ('l', 'f'):
if suffix == source[i:i+1].lower():
i += 1
break
elif c.isdigit(): # Find integer.
token_type = CONSTANT
if c == '0' and source[i+1] in 'xX':
# Handle hex digits.
i += 2
while source[i] in hex_digits:
i += 1
else:
while source[i] in int_or_float_digits2:
i += 1
# Handle integer (and float) suffixes.
for suffix in ('ull', 'll', 'ul', 'l', 'f', 'u'):
size = len(suffix)
if suffix == source[i:i+size].lower():
i += size
break
elif c == '"': # Find string.
token_type = CONSTANT
i = _GetString(source, start, i)
elif c == "'": # Find char.
token_type = CONSTANT
i = _GetChar(source, start, i)
elif c == '#': # Find pre-processor command.
token_type = PREPROCESSOR
got_if = source[i:i+3] == '#if' and source[i+3:i+4].isspace()
if got_if:
count_ifs += 1
elif source[i:i+6] == '#endif':
count_ifs -= 1
if count_ifs == 0:
ignore_errors = False
# TODO(nnorwitz): handle preprocessor statements (\ continuations).
while 1:
i1 = source.find('\n', i)
i2 = source.find('//', i)
i3 = source.find('/*', i)
i4 = source.find('"', i)
# NOTE(nnorwitz): doesn't handle comments in #define macros.
# Get the first important symbol (newline, comment, EOF/end).
i = min([x for x in (i1, i2, i3, i4, end) if x != -1])
# Handle #include "dir//foo.h" properly.
if source[i] == '"':
i = source.find('"', i+1) + 1
assert i > 0
continue
# Keep going if end of the line and the line ends with \.
if not (i == i1 and source[i-1] == '\\'):
if got_if:
condition = source[start+4:i].lstrip()
if (condition.startswith('0') or
condition.startswith('(0)')):
ignore_errors = True
break
i += 1
elif c == '\\': # Handle \ in code.
# This is different from the pre-processor \ handling.
i += 1
continue
elif ignore_errors:
# The tokenizer seems to be in pretty good shape. This
# raise is conditionally disabled so that bogus code
# in an #if 0 block can be handled. Since we will ignore
# it anyways, this is probably fine. So disable the
# exception and return the bogus char.
i += 1
else:
sys.stderr.write('Got invalid token in %s @ %d token:%s: %r\n' %
('?', i, c, source[i-10:i+10]))
raise RuntimeError('unexpected token')
if i <= 0:
print('Invalid index, exiting now.')
return
yield Token(token_type, source[start:i], start, i)
if __name__ == '__main__':
def main(argv):
"""Driver mostly for testing purposes."""
for filename in argv[1:]:
source = utils.ReadFile(filename)
if source is None:
continue
for token in GetTokens(source):
print('%-12s: %s' % (token.token_type, token.name))
# print('\r%6.2f%%' % (100.0 * index / token.end),)
sys.stdout.write('\n')
main(sys.argv)
| bsd-3-clause |
YuxuanLing/trunk | trunk/code/study/python/Fluent-Python-example-code/16-coroutine/coro_finally_demo.py | 1 | 1644 | """
Second coroutine closing demonstration::
>>> fin_coro = demo_finally()
>>> next(fin_coro)
-> coroutine started
>>> fin_coro.send(11)
-> coroutine received: 11
>>> fin_coro.send(22)
-> coroutine received: 22
>>> fin_coro.close()
-> coroutine ending
Second coroutine not handling exception::
>>> fin_coro = demo_finally()
>>> next(fin_coro)
-> coroutine started
>>> fin_coro.send(11)
-> coroutine received: 11
>>> fin_coro.throw(ZeroDivisionError) # doctest: +SKIP
-> coroutine ending
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "coro_exception_demos.py", line 109, in demo_finally
print('-> coroutine received: {!r}'.format(x))
ZeroDivisionError
The last test above must be skipped because the output '-> coroutine ending'
is not detected by doctest, which raises a false error. However, if you
run this file as shown below, you'll see that output "leak" into standard
output::
$ python3 -m doctest coro_exception_demos.py
-> coroutine ending
"""
# BEGIN EX_CORO_FINALLY
class DemoException(Exception):
"""An exception type for the demonstration."""
def demo_finally():
print('-> coroutine started')
try:
while True:
try:
x = yield
except DemoException:
print('*** DemoException handled. Continuing...')
else:
print('-> coroutine received: {!r}'.format(x))
finally:
print('-> coroutine ending')
# END EX_CORO_FINALLY
| gpl-3.0 |
jaimeMF/youtube-dl | youtube_dl/extractor/wrzuta.py | 156 | 2652 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
int_or_none,
qualities,
)
class WrzutaIE(InfoExtractor):
IE_NAME = 'wrzuta.pl'
_VALID_URL = r'https?://(?P<uploader>[0-9a-zA-Z]+)\.wrzuta\.pl/(?P<typ>film|audio)/(?P<id>[0-9a-zA-Z]+)'
_TESTS = [{
'url': 'http://laboratoriumdextera.wrzuta.pl/film/aq4hIZWrkBu/nike_football_the_last_game',
'md5': '9e67e05bed7c03b82488d87233a9efe7',
'info_dict': {
'id': 'aq4hIZWrkBu',
'ext': 'mp4',
'title': 'Nike Football: The Last Game',
'duration': 307,
'uploader_id': 'laboratoriumdextera',
'description': 'md5:7fb5ef3c21c5893375fda51d9b15d9cd',
},
}, {
'url': 'http://jolka85.wrzuta.pl/audio/063jOPX5ue2/liber_natalia_szroeder_-_teraz_ty',
'md5': 'bc78077859bea7bcfe4295d7d7fc9025',
'info_dict': {
'id': '063jOPX5ue2',
'ext': 'ogg',
'title': 'Liber & Natalia Szroeder - Teraz Ty',
'duration': 203,
'uploader_id': 'jolka85',
'description': 'md5:2d2b6340f9188c8c4cd891580e481096',
},
}]
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
typ = mobj.group('typ')
uploader = mobj.group('uploader')
webpage = self._download_webpage(url, video_id)
quality = qualities(['SD', 'MQ', 'HQ', 'HD'])
audio_table = {'flv': 'mp3', 'webm': 'ogg', '???': 'mp3'}
embedpage = self._download_json('http://www.wrzuta.pl/npp/embed/%s/%s' % (uploader, video_id), video_id)
formats = []
for media in embedpage['url']:
fmt = media['type'].split('@')[0]
if typ == 'audio':
ext = audio_table.get(fmt, fmt)
else:
ext = fmt
formats.append({
'format_id': '%s_%s' % (ext, media['quality'].lower()),
'url': media['url'],
'ext': ext,
'quality': quality(media['quality']),
})
self._sort_formats(formats)
return {
'id': video_id,
'title': self._og_search_title(webpage),
'thumbnail': self._og_search_thumbnail(webpage),
'formats': formats,
'duration': int_or_none(embedpage['duration']),
'uploader_id': uploader,
'description': self._og_search_description(webpage),
'age_limit': embedpage.get('minimalAge', 0),
}
| unlicense |
Workday/OpenFrame | tools/telemetry/telemetry/testing/decorators_unittest.py | 69 | 1511 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import unittest
from telemetry import decorators
_counter = 0
class Foo(object):
@decorators.Cache
def GetCountCached(self, _):
global _counter
_counter = _counter + 1
return _counter
def CreateFooUncached(_):
return Foo()
@decorators.Cache
def CreateFooCached(_):
return Foo()
class DecoratorsUnitTest(unittest.TestCase):
# pylint: disable=C0102
def testCacheDecorator(self):
self.assertNotEquals(CreateFooUncached(1), CreateFooUncached(2))
self.assertNotEquals(CreateFooCached(1), CreateFooCached(2))
self.assertNotEquals(CreateFooUncached(1), CreateFooUncached(1))
self.assertEquals(CreateFooCached(1), CreateFooCached(1))
def testCacheableMemberCachesOnlyForSameArgs(self):
foo = Foo()
value_of_one = foo.GetCountCached(1)
self.assertEquals(value_of_one, foo.GetCountCached(1))
self.assertNotEquals(value_of_one, foo.GetCountCached(2))
def testCacheableMemberHasSeparateCachesForSiblingInstances(self):
foo = Foo()
sibling_foo = Foo()
self.assertNotEquals(foo.GetCountCached(1), sibling_foo.GetCountCached(1))
def testCacheableMemberHasSeparateCachesForNextGenerationInstances(self):
foo = Foo()
last_generation_count = foo.GetCountCached(1)
foo = None
foo = Foo()
self.assertNotEquals(last_generation_count, foo.GetCountCached(1))
| bsd-3-clause |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.