text
stringlengths 2
999k
|
|---|
"""This file is auto-generated by setup.py, please do not alter."""
__version__ = "0.0.3"
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Python version: 3.6
import yaml
import time
from core.test import test_img
from utils.Fed import FedAvg, FedAvgGradient
from models.SvrgUpdate import LocalUpdate
from utils.options import args_parser
from utils.dataset_normal import load_data
from models.ModelBuilder import build_model
from core.mm_fmnist.ClientManage_fm import ClientManageMM
from utils.my_logging import Logger
from core.function import assign_hyper_gradient
from torch.optim import SGD
import torch
import sys
import numpy as np
import copy
import os
from pathlib import Path
import random
start_time = int(time.time())
if __name__ == '__main__':
# parse args
args = args_parser()
args.model="fmnist_cnn"
args.dataset = "fmnist"
dataset_train, dataset_test, dict_users, args.img_size, dataset_train_real = load_data(args)
torch.manual_seed(1)
net_glob = build_model(args)
#print(net_glob.x, net_glob.y_header)
# copy weights
w_glob = net_glob.state_dict()
if args.output == None:
os.makedirs("./save",exist_ok=True)
logs = Logger(f'./save/minmax_fmnist_{args.optim}_{args.dataset}\
_{args.model}_{args.epochs}_C{args.frac}_iid{args.iid}_\
{args.lr}_blo{not args.no_blo}_\
IE{args.inner_ep}_N{args.neumann}_HLR{args.hlr}_{args.hvp_method}_{start_time}.yaml')
else:
p=Path(args.output)
if not os.path.exists(p.parent):
os.makedirs(p.parent,exist_ok=True)
logs = Logger(args.output)
hyper_param= [k for n,k in net_glob.named_parameters() if "outer" in n]
param= [k for n,k in net_glob.named_parameters() if "inner" in n]
#print(hyper_param)
comm_round=0
hyper_optimizer=SGD(hyper_param, lr=1)
for iter in range(args.epochs):
m = max(int(args.frac * args.num_users), 1)
for _ in range(args.inner_ep):
client_idx = np.random.choice(range(args.num_users), m, replace=False)
client_manage=ClientManageMM(args,net_glob,client_idx, dataset_train, dict_users,hyper_param)
if args.hvp_method=='joint':
w_glob,loss_avg, hg_glob, r = client_manage.fed_joint()
assign_hyper_gradient(hyper_param, hg_glob)
hyper_optimizer.step()
args.no_blo=True
else:
w_glob, loss_avg = client_manage.fed_in()
loss_avg=-loss_avg
if args.optim == 'svrg':
comm_round+=2
else:
comm_round+=1
net_glob.load_state_dict(w_glob)
if args.no_blo== False:
client_idx = np.random.choice(range(args.num_users), m, replace=False)
client_manage=ClientManageMM(args,net_glob,client_idx, dataset_train, dict_users,hyper_param)
hg_glob, r = client_manage.fed_out()
assign_hyper_gradient(hyper_param, hg_glob)
hyper_optimizer.step()
comm_round+=r
# print loss
print('Round {:3d}, Average loss {:.3f}, t-values {}'.format(iter, loss_avg, np.round(net_glob.t_inner.detach().cpu().numpy(),3)))
# testing
net_glob.eval()
acc_train, loss_train = test_img(net_glob, dataset_train_real, args)
acc_test, loss_test = test_img(net_glob, dataset_test, args)
print("Test acc/loss: {:.2f} {:.2f}".format(acc_test, loss_test),
"Train acc/loss: {:.2f} {:.2f}".format(acc_train, loss_train),
f"Comm round: {comm_round}")
logs.logging(client_idx, acc_test, acc_train, loss_test, loss_train, comm_round)
logs.save()
if args.round>0 and comm_round>args.round:
break
logs.save()
|
"""
Simple dynamic model of a LI battery.
"""
from __future__ import print_function, division, absolute_import
import numpy as np
from scipy.interpolate import Akima1DInterpolator
from openmdao.api import ExplicitComponent
# Data for open circuit voltage model.
train_SOC = np.array([0., 0.1, 0.25, 0.5, 0.75, 0.9, 1.0])
train_V_oc = np.array([3.5, 3.55, 3.65, 3.75, 3.9, 4.1, 4.2])
class Battery(ExplicitComponent):
"""
Model of a Lithium Ion battery.
"""
def initialize(self):
self.options.declare('num_nodes', default=1)
self.options.declare('n_series', default=1, desc='number of cells in series')
self.options.declare('n_parallel', default=3, desc='number of cells in parallel')
self.options.declare('Q_max', default=1.05,
desc='Max Energy Capacity of a battery cell in A*h')
self.options.declare('R_0', default=.025,
desc='Internal resistance of the battery (ohms)')
def setup(self):
num_nodes = self.options['num_nodes']
# Inputs
self.add_input('I_Li', val=np.ones(num_nodes), units='A',
desc='Current demanded per cell')
# State Variables
self.add_input('SOC', val=np.ones(num_nodes), units=None, desc='State of charge')
# Outputs
self.add_output('V_L',
val=np.ones(num_nodes),
units='V',
desc='Terminal voltage of the battery')
self.add_output('dXdt:SOC',
val=np.ones(num_nodes),
units='1/s',
desc='Time derivative of state of charge')
self.add_output('V_oc', val=np.ones(num_nodes), units='V',
desc='Open Circuit Voltage')
self.add_output('I_pack', val=0.1*np.ones(num_nodes), units='A',
desc='Total Pack Current')
self.add_output('V_pack', val=9.0*np.ones(num_nodes), units='V',
desc='Total Pack Voltage')
self.add_output('P_pack', val=1.0*np.ones(num_nodes), units='W',
desc='Total Pack Power')
# Derivatives
row_col = np.arange(num_nodes)
self.declare_partials(of='V_oc', wrt=['SOC'], rows=row_col, cols=row_col)
self.declare_partials(of='V_L', wrt=['SOC'], rows=row_col, cols=row_col)
self.declare_partials(of='V_L', wrt=['I_Li'], rows=row_col, cols=row_col)
self.declare_partials(of='dXdt:SOC', wrt=['I_Li'], rows=row_col, cols=row_col)
self.declare_partials(of='I_pack', wrt=['I_Li'], rows=row_col, cols=row_col)
self.declare_partials(of='V_pack', wrt=['SOC', 'I_Li'], rows=row_col, cols=row_col)
self.declare_partials(of='P_pack', wrt=['SOC', 'I_Li'], rows=row_col, cols=row_col)
self.voltage_model = Akima1DInterpolator(train_SOC, train_V_oc)
self.voltage_model_derivative = self.voltage_model.derivative()
def compute(self, inputs, outputs):
opt = self.options
I_Li = inputs['I_Li']
SOC = inputs['SOC']
V_oc = self.voltage_model(SOC, extrapolate=True)
outputs['V_oc'] = V_oc
outputs['V_L'] = V_oc - (I_Li * opt['R_0'])
outputs['dXdt:SOC'] = -I_Li / (3600.0 * opt['Q_max'])
outputs['I_pack'] = I_Li * opt['n_parallel']
outputs['V_pack'] = outputs['V_L'] * opt['n_series']
outputs['P_pack'] = outputs['I_pack'] * outputs['V_pack']
def compute_partials(self, inputs, partials):
opt = self.options
I_Li = inputs['I_Li']
SOC = inputs['SOC']
dV_dSOC = self.voltage_model_derivative(SOC, extrapolate=True)
partials['V_oc', 'SOC'] = dV_dSOC
partials['V_L', 'SOC'] = dV_dSOC
partials['V_L', 'I_Li'] = -opt['R_0']
partials['dXdt:SOC', 'I_Li'] = -1./(3600.0*opt['Q_max'])
n_parallel = opt['n_parallel']
n_series = opt['n_series']
V_oc = self.voltage_model(SOC, extrapolate=True)
V_L = V_oc - (I_Li * opt['R_0'])
partials['I_pack', 'I_Li'] = n_parallel
partials['V_pack', 'I_Li'] = -opt['R_0']
partials['V_pack', 'SOC'] = n_series * dV_dSOC
partials['P_pack', 'I_Li'] = n_parallel * n_series * (V_L - I_Li * opt['R_0'])
partials['P_pack', 'SOC'] = n_parallel * I_Li * n_series * dV_dSOC
if __name__ == '__main__':
from openmdao.api import Problem, IndepVarComp
num_nodes = 1
prob = Problem(model=Battery(num_nodes=num_nodes))
model = prob.model
prob.setup()
prob.set_solver_print(level=2)
prob.run_model()
derivs = prob.check_partials(compact_print=True)
print('done')
|
# Copyright 2022 The EvoJAX Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from abc import ABC, abstractmethod
from functools import partial
from typing import Union
import jax
import jax.numpy as jnp
import numpy as np
@partial(jax.jit, static_argnums=(1,))
def process_scores(x: Union[np.ndarray, jnp.ndarray], use_ranking: bool) -> jnp.ndarray:
"""Convert fitness scores to rank if necessary."""
x = jnp.array(x)
if use_ranking:
ranks = jnp.zeros(x.size, dtype=int)
ranks = ranks.at[x.argsort()].set(jnp.arange(x.size)).reshape(x.shape)
return ranks / ranks.max() - 0.5
else:
return x
class NEAlgorithm(ABC):
"""Interface of all Neuro-evolution algorithms in EvoJAX."""
pop_size: int
@abstractmethod
def ask(self) -> jnp.ndarray:
"""Ask the algorithm for a population of parameters.
Returns
A Jax array of shape (population_size, param_size).
"""
raise NotImplementedError()
@abstractmethod
def tell(self, fitness: Union[np.ndarray, jnp.ndarray]) -> None:
"""Report the fitness of the population to the algorithm.
Args:
fitness - The fitness scores array.
"""
raise NotImplementedError()
@property
def best_params(self) -> jnp.ndarray:
raise NotImplementedError()
@best_params.setter
def best_params(self, params: Union[np.ndarray, jnp.ndarray]) -> None:
raise NotImplementedError()
|
# correct version.py is written by setup.py
# this is a backup in case that one isn't written
version = "Unknown"
full_version = \
"Unknown: Incorrect installation. Use pip or setup.py to install"
|
from __future__ import (absolute_import, division,
print_function, unicode_literals)
from builtins import *
import struct
import socket
from .util import get_random_id, validate_int, validate_bytes, disconnect
from .packet import AlfredVersion, AlfredPacketType
from .exceptions import *
class AlfredClient(object):
def __init__(self, sock='/var/run/alfred.sock'):
self.sock_path = sock
self._connected = False
self.src_mac = bytes(6)
def _connect(self):
self.sock = socket.socket(socket.AF_UNIX)
self.sock.connect(self.sock_path)
self._connected = True
def _disconnect(self):
self.sock.close()
self._connected = False
def _send(self, data):
if not self._connected:
self._connect()
self.sock.sendall(bytes(data))
def _send_recv(self, data, tx_id):
self._send(data)
tlv_hdr = bytes(self.sock.recv(4))
ret_data = {}
while tlv_hdr:
tlv_type = tlv_hdr[0]
tlv_ver = tlv_hdr[1]
tlv_len = struct.unpack('!H', tlv_hdr[2:4])[0]
trans_hdr = bytes(self.sock.recv(4))
trans_id = struct.unpack('!H', trans_hdr[0:2])[0]
trans_seq = struct.unpack('!H', trans_hdr[2:4])[0]
if tlv_type == AlfredPacketType.STATUS_TXEND:
if trans_seq == 1:
raise AlfredError('Error received from server')
elif tlv_type != AlfredPacketType.PUSH_DATA or trans_id != tx_id:
raise AlfredInvalidResponse(
'Invalid response received from server')
recv_data = bytes(self.sock.recv(tlv_len-4))
src_mac = recv_data[0:6]
data_type = recv_data[6]
data_ver = recv_data[7]
data_len = struct.unpack('!H', recv_data[8:10])[0]
data = recv_data[10:]
if len(data) != data_len:
raise AlfredDataError('Failed to receive all data from server. '
'Received {} bytes. Should have received {}'
.format(len(data), data_len))
src_mac = ':'.join(['{:02x}'.format(x) for x in src_mac])
ret_data[src_mac] = data
tlv_hdr = bytes(self.sock.recv(4))
return ret_data
@disconnect
def request_data(self, data_type):
"""
Request data from the Alfred server of the given data type
Params:
data_type : integer of the type of data requested (0-255)
"""
data_type = validate_int(data_type)
request = bytearray([0 for _ in range(7)])
tx_id = get_random_id()
struct.pack_into('!B', request, 0, AlfredPacketType.REQUEST)
struct.pack_into('!B', request, 1, AlfredVersion.v0)
struct.pack_into('!H', request, 2, 3)
struct.pack_into('!B', request, 4, data_type)
struct.pack_into('!H', request, 5, tx_id)
return self._send_recv(request, tx_id)
@disconnect
def send_data(self, data_type, data, version=0):
"""
Set data in the Alfred cloud for the given data type
Params:
data_type : integer of the type of data to be set (0-255)
data : byte string of the data
version : optional version to set for this data (default = 0)
"""
data_type = validate_int(data_type)
data = validate_bytes(data)
data_len = len(data)
update = bytearray([0 for _ in range(8)])
tx_id = get_random_id()
seq_num = 0
struct.pack_into('!B', update, 0, AlfredPacketType.PUSH_DATA)
struct.pack_into('!B', update, 1, AlfredVersion.v0)
struct.pack_into('!H', update, 2, 14+data_len)
struct.pack_into('!H', update, 4, tx_id)
struct.pack_into('!H', update, 6, seq_num)
payload = self.src_mac
payload += struct.pack('!B', data_type)
payload += struct.pack('!B', version)
payload += struct.pack('!H', data_len)
payload += data
push_data = bytes(update) + payload
self._send(push_data)
|
#!/usr/bin/python
from .elements import Outputs, Parameters, Summary
def new_parameters(parms_dict):
return Parameters(parms=parms_dict)
def parameters(boto3_session, stack_name):
return Parameters(_get_stack(boto3_session, stack_name))
def outputs(boto3_session, stack_name):
return Outputs(_get_stack(boto3_session, stack_name))
def summary(boto3_session, stack_name):
return Summary(boto3_session, stack_name)
def _get_stack(session, name):
return session.resource('cloudformation').Stack(name)
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from .. import _utilities, _tables
from . import outputs
from ._inputs import *
__all__ = ['RouteTable']
class RouteTable(pulumi.CustomResource):
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
propagating_vgws: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
routes: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['RouteTableRouteArgs']]]]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
vpc_id: Optional[pulumi.Input[str]] = None,
__props__=None,
__name__=None,
__opts__=None):
"""
Provides a resource to create a VPC routing table.
> **NOTE on Route Tables and Routes:** This provider currently
provides both a standalone Route resource and a Route Table resource with routes
defined in-line. At this time you cannot use a Route Table with in-line routes
in conjunction with any Route resources. Doing so will cause
a conflict of rule settings and will overwrite rules.
> **NOTE on `gateway_id` and `nat_gateway_id`:** The AWS API is very forgiving with these two
attributes and the `ec2.RouteTable` resource can be created with a NAT ID specified as a Gateway ID attribute.
This _will_ lead to a permanent diff between your configuration and statefile, as the API returns the correct
parameters in the returned route table. If you're experiencing constant diffs in your `ec2.RouteTable` resources,
the first thing to check is whether or not you're specifying a NAT ID instead of a Gateway ID, or vice-versa.
> **NOTE on `propagating_vgws` and the `ec2.VpnGatewayRoutePropagation` resource:**
If the `propagating_vgws` argument is present, it's not supported to _also_
define route propagations using `ec2.VpnGatewayRoutePropagation`, since
this resource will delete any propagating gateways not explicitly listed in
`propagating_vgws`. Omit this argument when defining route propagation using
the separate resource.
## Example Usage
```python
import pulumi
import pulumi_aws as aws
route_table = aws.ec2.RouteTable("routeTable",
vpc_id=aws_vpc["default"]["id"],
routes=[
aws.ec2.RouteTableRouteArgs(
cidr_block="10.0.1.0/24",
gateway_id=aws_internet_gateway["main"]["id"],
),
aws.ec2.RouteTableRouteArgs(
ipv6_cidr_block="::/0",
egress_only_gateway_id=aws_egress_only_internet_gateway["foo"]["id"],
),
],
tags={
"Name": "main",
})
```
## Import
Route Tables can be imported using the route table `id`. For example, to import route table `rtb-4e616f6d69`, use this command
```sh
$ pulumi import aws:ec2/routeTable:RouteTable public_rt rtb-4e616f6d69
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] propagating_vgws: A list of virtual gateways for propagation.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['RouteTableRouteArgs']]]] routes: A list of route objects. Their keys are documented below.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource.
:param pulumi.Input[str] vpc_id: The VPC ID.
"""
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
__props__['propagating_vgws'] = propagating_vgws
__props__['routes'] = routes
__props__['tags'] = tags
if vpc_id is None and not opts.urn:
raise TypeError("Missing required property 'vpc_id'")
__props__['vpc_id'] = vpc_id
__props__['owner_id'] = None
super(RouteTable, __self__).__init__(
'aws:ec2/routeTable:RouteTable',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
owner_id: Optional[pulumi.Input[str]] = None,
propagating_vgws: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
routes: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['RouteTableRouteArgs']]]]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
vpc_id: Optional[pulumi.Input[str]] = None) -> 'RouteTable':
"""
Get an existing RouteTable resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] owner_id: The ID of the AWS account that owns the route table.
:param pulumi.Input[Sequence[pulumi.Input[str]]] propagating_vgws: A list of virtual gateways for propagation.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['RouteTableRouteArgs']]]] routes: A list of route objects. Their keys are documented below.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource.
:param pulumi.Input[str] vpc_id: The VPC ID.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
__props__["owner_id"] = owner_id
__props__["propagating_vgws"] = propagating_vgws
__props__["routes"] = routes
__props__["tags"] = tags
__props__["vpc_id"] = vpc_id
return RouteTable(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="ownerId")
def owner_id(self) -> pulumi.Output[str]:
"""
The ID of the AWS account that owns the route table.
"""
return pulumi.get(self, "owner_id")
@property
@pulumi.getter(name="propagatingVgws")
def propagating_vgws(self) -> pulumi.Output[Sequence[str]]:
"""
A list of virtual gateways for propagation.
"""
return pulumi.get(self, "propagating_vgws")
@property
@pulumi.getter
def routes(self) -> pulumi.Output[Sequence['outputs.RouteTableRoute']]:
"""
A list of route objects. Their keys are documented below.
"""
return pulumi.get(self, "routes")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
A mapping of tags to assign to the resource.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter(name="vpcId")
def vpc_id(self) -> pulumi.Output[str]:
"""
The VPC ID.
"""
return pulumi.get(self, "vpc_id")
def translate_output_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return _tables.SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
|
"""ApiGatewayV2Backend class with methods for supported APIs."""
import random
import string
import yaml
from moto.core import BaseBackend, BaseModel
from moto.core.utils import BackendDict, unix_time
from moto.utilities.tagging_service import TaggingService
from .exceptions import (
ApiNotFound,
AuthorizerNotFound,
BadRequestException,
ModelNotFound,
RouteResponseNotFound,
IntegrationNotFound,
IntegrationResponseNotFound,
RouteNotFound,
VpcLinkNotFound,
)
class Authorizer(BaseModel):
def __init__(
self,
auth_creds_arn,
auth_payload_format_version,
auth_result_ttl,
authorizer_type,
authorizer_uri,
enable_simple_response,
identity_source,
identity_validation_expr,
jwt_config,
name,
):
self.id = "".join(random.choice(string.ascii_lowercase) for _ in range(8))
self.auth_creds_arn = auth_creds_arn
self.auth_payload_format_version = auth_payload_format_version
self.auth_result_ttl = auth_result_ttl
self.authorizer_type = authorizer_type
self.authorizer_uri = authorizer_uri
self.enable_simple_response = enable_simple_response
self.identity_source = identity_source
self.identity_validation_expr = identity_validation_expr
self.jwt_config = jwt_config
self.name = name
def update(
self,
auth_creds_arn,
auth_payload_format_version,
auth_result_ttl,
authorizer_type,
authorizer_uri,
enable_simple_response,
identity_source,
identity_validation_expr,
jwt_config,
name,
):
if auth_creds_arn is not None:
self.auth_creds_arn = auth_creds_arn
if auth_payload_format_version is not None:
self.auth_payload_format_version = auth_payload_format_version
if auth_result_ttl is not None:
self.auth_result_ttl = auth_result_ttl
if authorizer_type is not None:
self.authorizer_type is authorizer_type
if authorizer_uri is not None:
self.authorizer_uri = authorizer_uri
if enable_simple_response is not None:
self.enable_simple_response = enable_simple_response
if identity_source is not None:
self.identity_source = identity_source
if identity_validation_expr is not None:
self.identity_validation_expr = identity_validation_expr
if jwt_config is not None:
self.jwt_config = jwt_config
if name is not None:
self.name = name
def to_json(self):
return {
"authorizerId": self.id,
"authorizerCredentialsArn": self.auth_creds_arn,
"authorizerPayloadFormatVersion": self.auth_payload_format_version,
"authorizerResultTtlInSeconds": self.auth_result_ttl,
"authorizerType": self.authorizer_type,
"authorizerUri": self.authorizer_uri,
"enableSimpleResponses": self.enable_simple_response,
"identitySource": self.identity_source,
"identityValidationExpression": self.identity_validation_expr,
"jwtConfiguration": self.jwt_config,
"name": self.name,
}
class Integration(BaseModel):
def __init__(
self,
connection_id,
connection_type,
content_handling_strategy,
credentials_arn,
description,
integration_method,
integration_type,
integration_uri,
passthrough_behavior,
payload_format_version,
integration_subtype,
request_parameters,
request_templates,
response_parameters,
template_selection_expression,
timeout_in_millis,
tls_config,
):
self.id = "".join(random.choice(string.ascii_lowercase) for _ in range(8))
self.connection_id = connection_id
self.connection_type = connection_type
self.content_handling_strategy = content_handling_strategy
self.credentials_arn = credentials_arn
self.description = description
self.integration_method = integration_method
self.integration_response_selection_expression = None
self.integration_type = integration_type
self.integration_subtype = integration_subtype
self.integration_uri = integration_uri
self.passthrough_behavior = passthrough_behavior
self.payload_format_version = payload_format_version
self.request_parameters = request_parameters
self.request_templates = request_templates
self.response_parameters = response_parameters
self.template_selection_expression = template_selection_expression
self.timeout_in_millis = timeout_in_millis
self.tls_config = tls_config
if self.integration_type in ["MOCK", "HTTP"]:
self.integration_response_selection_expression = (
"${integration.response.statuscode}"
)
elif self.integration_type in ["AWS"]:
self.integration_response_selection_expression = (
"${integration.response.body.errorMessage}"
)
if (
self.integration_type in ["AWS", "MOCK", "HTTP"]
and self.passthrough_behavior is None
):
self.passthrough_behavior = "WHEN_NO_MATCH"
if self.integration_uri is not None and self.integration_method is None:
self.integration_method = "POST"
if self.integration_type in ["AWS", "MOCK"]:
self.timeout_in_millis = self.timeout_in_millis or 29000
else:
self.timeout_in_millis = self.timeout_in_millis or 30000
self.responses = dict()
def create_response(
self,
content_handling_strategy,
integration_response_key,
response_parameters,
response_templates,
template_selection_expression,
):
response = IntegrationResponse(
content_handling_strategy=content_handling_strategy,
integration_response_key=integration_response_key,
response_parameters=response_parameters,
response_templates=response_templates,
template_selection_expression=template_selection_expression,
)
self.responses[response.id] = response
return response
def delete_response(self, integration_response_id):
self.responses.pop(integration_response_id)
def get_response(self, integration_response_id):
if integration_response_id not in self.responses:
raise IntegrationResponseNotFound(integration_response_id)
return self.responses[integration_response_id]
def get_responses(self):
return self.responses.values()
def update_response(
self,
integration_response_id,
content_handling_strategy,
integration_response_key,
response_parameters,
response_templates,
template_selection_expression,
):
int_response = self.responses[integration_response_id]
int_response.update(
content_handling_strategy=content_handling_strategy,
integration_response_key=integration_response_key,
response_parameters=response_parameters,
response_templates=response_templates,
template_selection_expression=template_selection_expression,
)
return int_response
def update(
self,
connection_id,
connection_type,
content_handling_strategy,
credentials_arn,
description,
integration_method,
integration_type,
integration_uri,
passthrough_behavior,
payload_format_version,
integration_subtype,
request_parameters,
request_templates,
response_parameters,
template_selection_expression,
timeout_in_millis,
tls_config,
):
if connection_id is not None:
self.connection_id = connection_id
if connection_type is not None:
self.connection_type = connection_type
if content_handling_strategy is not None:
self.content_handling_strategy = content_handling_strategy
if credentials_arn is not None:
self.credentials_arn = credentials_arn
if description is not None:
self.description = description
if integration_method is not None:
self.integration_method = integration_method
if integration_type is not None:
self.integration_type = integration_type
if integration_uri is not None:
self.integration_uri = integration_uri
if passthrough_behavior is not None:
self.passthrough_behavior = passthrough_behavior
if payload_format_version is not None:
self.payload_format_version = payload_format_version
if integration_subtype is not None:
self.integration_subtype = integration_subtype
if request_parameters is not None:
# Skip parameters with an empty value
req_params = {
key: value for (key, value) in request_parameters.items() if value
}
self.request_parameters = req_params
if request_templates is not None:
self.request_templates = request_templates
if response_parameters is not None:
self.response_parameters = response_parameters
if template_selection_expression is not None:
self.template_selection_expression = template_selection_expression
if timeout_in_millis is not None:
self.timeout_in_millis = timeout_in_millis
if tls_config is not None:
self.tls_config = tls_config
def to_json(self):
return {
"connectionId": self.connection_id,
"connectionType": self.connection_type,
"contentHandlingStrategy": self.content_handling_strategy,
"credentialsArn": self.credentials_arn,
"description": self.description,
"integrationId": self.id,
"integrationMethod": self.integration_method,
"integrationResponseSelectionExpression": self.integration_response_selection_expression,
"integrationType": self.integration_type,
"integrationSubtype": self.integration_subtype,
"integrationUri": self.integration_uri,
"passthroughBehavior": self.passthrough_behavior,
"payloadFormatVersion": self.payload_format_version,
"requestParameters": self.request_parameters,
"requestTemplates": self.request_templates,
"responseParameters": self.response_parameters,
"templateSelectionExpression": self.template_selection_expression,
"timeoutInMillis": self.timeout_in_millis,
"tlsConfig": self.tls_config,
}
class IntegrationResponse(BaseModel):
def __init__(
self,
content_handling_strategy,
integration_response_key,
response_parameters,
response_templates,
template_selection_expression,
):
self.id = "".join(random.choice(string.ascii_lowercase) for _ in range(8))
self.content_handling_strategy = content_handling_strategy
self.integration_response_key = integration_response_key
self.response_parameters = response_parameters
self.response_templates = response_templates
self.template_selection_expression = template_selection_expression
def update(
self,
content_handling_strategy,
integration_response_key,
response_parameters,
response_templates,
template_selection_expression,
):
if content_handling_strategy is not None:
self.content_handling_strategy = content_handling_strategy
if integration_response_key is not None:
self.integration_response_key = integration_response_key
if response_parameters is not None:
self.response_parameters = response_parameters
if response_templates is not None:
self.response_templates = response_templates
if template_selection_expression is not None:
self.template_selection_expression = template_selection_expression
def to_json(self):
return {
"integrationResponseId": self.id,
"integrationResponseKey": self.integration_response_key,
"contentHandlingStrategy": self.content_handling_strategy,
"responseParameters": self.response_parameters,
"responseTemplates": self.response_templates,
"templateSelectionExpression": self.template_selection_expression,
}
class Model(BaseModel):
def __init__(self, content_type, description, name, schema):
self.id = "".join(random.choice(string.ascii_lowercase) for _ in range(8))
self.content_type = content_type
self.description = description
self.name = name
self.schema = schema
def update(self, content_type, description, name, schema):
if content_type is not None:
self.content_type = content_type
if description is not None:
self.description = description
if name is not None:
self.name = name
if schema is not None:
self.schema = schema
def to_json(self):
return {
"modelId": self.id,
"contentType": self.content_type,
"description": self.description,
"name": self.name,
"schema": self.schema,
}
class RouteResponse(BaseModel):
def __init__(self, route_response_key, model_selection_expression, response_models):
self.id = "".join(random.choice(string.ascii_lowercase) for _ in range(8))
self.route_response_key = route_response_key
self.model_selection_expression = model_selection_expression
self.response_models = response_models
def to_json(self):
return {
"modelSelectionExpression": self.model_selection_expression,
"responseModels": self.response_models,
"routeResponseId": self.id,
"routeResponseKey": self.route_response_key,
}
class Route(BaseModel):
def __init__(
self,
api_key_required,
authorization_scopes,
authorization_type,
authorizer_id,
model_selection_expression,
operation_name,
request_models,
request_parameters,
route_key,
route_response_selection_expression,
target,
):
self.route_id = "".join(random.choice(string.ascii_lowercase) for _ in range(8))
self.api_key_required = api_key_required
self.authorization_scopes = authorization_scopes
self.authorization_type = authorization_type
self.authorizer_id = authorizer_id
self.model_selection_expression = model_selection_expression
self.operation_name = operation_name
self.request_models = request_models
self.request_parameters = request_parameters or {}
self.route_key = route_key
self.route_response_selection_expression = route_response_selection_expression
self.target = target
self.route_responses = dict()
def create_route_response(
self, route_response_key, model_selection_expression, response_models
):
route_response = RouteResponse(
route_response_key,
model_selection_expression=model_selection_expression,
response_models=response_models,
)
self.route_responses[route_response.id] = route_response
return route_response
def get_route_response(self, route_response_id):
if route_response_id not in self.route_responses:
raise RouteResponseNotFound(route_response_id)
return self.route_responses[route_response_id]
def delete_route_response(self, route_response_id):
self.route_responses.pop(route_response_id, None)
def delete_route_request_parameter(self, request_param):
del self.request_parameters[request_param]
def update(
self,
api_key_required,
authorization_scopes,
authorization_type,
authorizer_id,
model_selection_expression,
operation_name,
request_models,
request_parameters,
route_key,
route_response_selection_expression,
target,
):
if api_key_required is not None:
self.api_key_required = api_key_required
if authorization_scopes:
self.authorization_scopes = authorization_scopes
if authorization_type:
self.authorization_type = authorization_type
if authorizer_id is not None:
self.authorizer_id = authorizer_id
if model_selection_expression:
self.model_selection_expression = model_selection_expression
if operation_name is not None:
self.operation_name = operation_name
if request_models:
self.request_models = request_models
if request_parameters:
self.request_parameters = request_parameters
if route_key:
self.route_key = route_key
if route_response_selection_expression is not None:
self.route_response_selection_expression = (
route_response_selection_expression
)
if target:
self.target = target
def to_json(self):
return {
"apiKeyRequired": self.api_key_required,
"authorizationScopes": self.authorization_scopes,
"authorizationType": self.authorization_type,
"authorizerId": self.authorizer_id,
"modelSelectionExpression": self.model_selection_expression,
"operationName": self.operation_name,
"requestModels": self.request_models,
"requestParameters": self.request_parameters,
"routeId": self.route_id,
"routeKey": self.route_key,
"routeResponseSelectionExpression": self.route_response_selection_expression,
"target": self.target,
}
class Api(BaseModel):
def __init__(
self,
region,
name,
api_key_selection_expression,
cors_configuration,
description,
disable_execute_api_endpoint,
disable_schema_validation,
protocol_type,
route_selection_expression,
tags,
version,
backend,
):
self.api_id = "".join(random.choice(string.ascii_lowercase) for _ in range(8))
self.api_endpoint = f"https://{self.api_id}.execute-api.{region}.amazonaws.com"
self.backend = backend
self.name = name
self.api_key_selection_expression = (
api_key_selection_expression or "$request.header.x-api-key"
)
self.created_date = unix_time()
self.cors_configuration = cors_configuration
self.description = description
self.disable_execute_api_endpoint = disable_execute_api_endpoint or False
self.disable_schema_validation = disable_schema_validation
self.protocol_type = protocol_type
self.route_selection_expression = (
route_selection_expression or "$request.method $request.path"
)
self.version = version
self.authorizers = dict()
self.integrations = dict()
self.models = dict()
self.routes = dict()
self.arn = f"arn:aws:apigateway:{region}::/apis/{self.api_id}"
self.backend.tag_resource(self.arn, tags)
def clear(self):
self.authorizers = dict()
self.integrations = dict()
self.models = dict()
self.routes = dict()
def delete_cors_configuration(self):
self.cors_configuration = None
def create_authorizer(
self,
auth_creds_arn,
auth_payload_format_version,
auth_result_ttl,
authorizer_type,
authorizer_uri,
enable_simple_response,
identity_source,
identity_validation_expr,
jwt_config,
name,
):
authorizer = Authorizer(
auth_creds_arn=auth_creds_arn,
auth_payload_format_version=auth_payload_format_version,
auth_result_ttl=auth_result_ttl,
authorizer_type=authorizer_type,
authorizer_uri=authorizer_uri,
enable_simple_response=enable_simple_response,
identity_source=identity_source,
identity_validation_expr=identity_validation_expr,
jwt_config=jwt_config,
name=name,
)
self.authorizers[authorizer.id] = authorizer
return authorizer
def delete_authorizer(self, authorizer_id):
self.authorizers.pop(authorizer_id, None)
def get_authorizer(self, authorizer_id):
if authorizer_id not in self.authorizers:
raise AuthorizerNotFound(authorizer_id)
return self.authorizers[authorizer_id]
def update_authorizer(
self,
authorizer_id,
auth_creds_arn,
auth_payload_format_version,
auth_result_ttl,
authorizer_type,
authorizer_uri,
enable_simple_response,
identity_source,
identity_validation_expr,
jwt_config,
name,
):
authorizer = self.authorizers[authorizer_id]
authorizer.update(
auth_creds_arn=auth_creds_arn,
auth_payload_format_version=auth_payload_format_version,
auth_result_ttl=auth_result_ttl,
authorizer_type=authorizer_type,
authorizer_uri=authorizer_uri,
enable_simple_response=enable_simple_response,
identity_source=identity_source,
identity_validation_expr=identity_validation_expr,
jwt_config=jwt_config,
name=name,
)
return authorizer
def create_model(self, content_type, description, name, schema):
model = Model(content_type, description, name, schema)
self.models[model.id] = model
return model
def delete_model(self, model_id):
self.models.pop(model_id, None)
def get_model(self, model_id):
if model_id not in self.models:
raise ModelNotFound(model_id)
return self.models[model_id]
def update_model(self, model_id, content_type, description, name, schema):
model = self.models[model_id]
model.update(content_type, description, name, schema)
return model
def import_api(self, body, fail_on_warnings):
self.clear()
body = yaml.safe_load(body)
for path, path_details in body.get("paths", {}).items():
for method, method_details in path_details.items():
route_key = f"{method.upper()} {path}"
for int_type, type_details in method_details.items():
if int_type == "responses":
for status_code, response_details in type_details.items():
content = response_details.get("content", {})
for content_type in content.values():
for ref in content_type.get("schema", {}).values():
if ref not in self.models and fail_on_warnings:
attr = f"paths.'{path}'({method}).{int_type}.{status_code}.content.schema.{ref}"
raise BadRequestException(
f"Warnings found during import:\n\tParse issue: attribute {attr} is missing"
)
if int_type == "x-amazon-apigateway-integration":
integration = self.create_integration(
connection_type="INTERNET",
description="AutoCreate from OpenAPI Import",
integration_type=type_details.get("type"),
integration_method=type_details.get("httpMethod"),
payload_format_version=type_details.get(
"payloadFormatVersion"
),
integration_uri=type_details.get("uri"),
)
self.create_route(
api_key_required=False,
authorization_scopes=[],
route_key=route_key,
target=f"integrations/{integration.id}",
)
if "title" in body.get("info", {}):
self.name = body["info"]["title"]
if "version" in body.get("info", {}):
self.version = str(body["info"]["version"])
if "x-amazon-apigateway-cors" in body:
self.cors_configuration = body["x-amazon-apigateway-cors"]
def update(
self,
api_key_selection_expression,
cors_configuration,
description,
disable_schema_validation,
disable_execute_api_endpoint,
name,
route_selection_expression,
version,
):
if api_key_selection_expression is not None:
self.api_key_selection_expression = api_key_selection_expression
if cors_configuration is not None:
self.cors_configuration = cors_configuration
if description is not None:
self.description = description
if disable_execute_api_endpoint is not None:
self.disable_execute_api_endpoint = disable_execute_api_endpoint
if disable_schema_validation is not None:
self.disable_schema_validation = disable_schema_validation
if name is not None:
self.name = name
if route_selection_expression is not None:
self.route_selection_expression = route_selection_expression
if version is not None:
self.version = version
def create_integration(
self,
connection_type,
description,
integration_method,
integration_type,
integration_uri,
connection_id=None,
content_handling_strategy=None,
credentials_arn=None,
passthrough_behavior=None,
payload_format_version=None,
integration_subtype=None,
request_parameters=None,
request_templates=None,
response_parameters=None,
template_selection_expression=None,
timeout_in_millis=None,
tls_config=None,
):
integration = Integration(
connection_id=connection_id,
connection_type=connection_type,
content_handling_strategy=content_handling_strategy,
credentials_arn=credentials_arn,
description=description,
integration_method=integration_method,
integration_type=integration_type,
integration_uri=integration_uri,
passthrough_behavior=passthrough_behavior,
payload_format_version=payload_format_version,
integration_subtype=integration_subtype,
request_parameters=request_parameters,
request_templates=request_templates,
response_parameters=response_parameters,
template_selection_expression=template_selection_expression,
timeout_in_millis=timeout_in_millis,
tls_config=tls_config,
)
self.integrations[integration.id] = integration
return integration
def delete_integration(self, integration_id):
self.integrations.pop(integration_id, None)
def get_integration(self, integration_id):
if integration_id not in self.integrations:
raise IntegrationNotFound(integration_id)
return self.integrations[integration_id]
def get_integrations(self):
return self.integrations.values()
def update_integration(
self,
integration_id,
connection_id,
connection_type,
content_handling_strategy,
credentials_arn,
description,
integration_method,
integration_type,
integration_uri,
passthrough_behavior,
payload_format_version,
integration_subtype,
request_parameters,
request_templates,
response_parameters,
template_selection_expression,
timeout_in_millis,
tls_config,
):
integration = self.integrations[integration_id]
integration.update(
connection_id=connection_id,
connection_type=connection_type,
content_handling_strategy=content_handling_strategy,
credentials_arn=credentials_arn,
description=description,
integration_method=integration_method,
integration_type=integration_type,
integration_uri=integration_uri,
passthrough_behavior=passthrough_behavior,
payload_format_version=payload_format_version,
integration_subtype=integration_subtype,
request_parameters=request_parameters,
request_templates=request_templates,
response_parameters=response_parameters,
template_selection_expression=template_selection_expression,
timeout_in_millis=timeout_in_millis,
tls_config=tls_config,
)
return integration
def create_integration_response(
self,
integration_id,
content_handling_strategy,
integration_response_key,
response_parameters,
response_templates,
template_selection_expression,
):
integration = self.get_integration(integration_id)
return integration.create_response(
content_handling_strategy=content_handling_strategy,
integration_response_key=integration_response_key,
response_parameters=response_parameters,
response_templates=response_templates,
template_selection_expression=template_selection_expression,
)
def delete_integration_response(self, integration_id, integration_response_id):
integration = self.get_integration(integration_id)
integration.delete_response(integration_response_id)
def get_integration_response(self, integration_id, integration_response_id):
integration = self.get_integration(integration_id)
return integration.get_response(integration_response_id)
def get_integration_responses(self, integration_id):
integration = self.get_integration(integration_id)
return integration.get_responses()
def update_integration_response(
self,
integration_id,
integration_response_id,
content_handling_strategy,
integration_response_key,
response_parameters,
response_templates,
template_selection_expression,
):
integration = self.get_integration(integration_id)
return integration.update_response(
integration_response_id=integration_response_id,
content_handling_strategy=content_handling_strategy,
integration_response_key=integration_response_key,
response_parameters=response_parameters,
response_templates=response_templates,
template_selection_expression=template_selection_expression,
)
def create_route(
self,
api_key_required,
authorization_scopes,
route_key,
target,
authorization_type=None,
authorizer_id=None,
model_selection_expression=None,
operation_name=None,
request_models=None,
request_parameters=None,
route_response_selection_expression=None,
):
route = Route(
api_key_required=api_key_required,
authorization_scopes=authorization_scopes,
authorization_type=authorization_type,
authorizer_id=authorizer_id,
model_selection_expression=model_selection_expression,
operation_name=operation_name,
request_models=request_models,
request_parameters=request_parameters,
route_key=route_key,
route_response_selection_expression=route_response_selection_expression,
target=target,
)
self.routes[route.route_id] = route
return route
def delete_route(self, route_id):
self.routes.pop(route_id, None)
def delete_route_request_parameter(self, route_id, request_param):
route = self.get_route(route_id)
route.delete_route_request_parameter(request_param)
def get_route(self, route_id):
if route_id not in self.routes:
raise RouteNotFound(route_id)
return self.routes[route_id]
def get_routes(self):
return self.routes.values()
def update_route(
self,
route_id,
api_key_required,
authorization_scopes,
authorization_type,
authorizer_id,
model_selection_expression,
operation_name,
request_models,
request_parameters,
route_key,
route_response_selection_expression,
target,
):
route = self.get_route(route_id)
route.update(
api_key_required=api_key_required,
authorization_scopes=authorization_scopes,
authorization_type=authorization_type,
authorizer_id=authorizer_id,
model_selection_expression=model_selection_expression,
operation_name=operation_name,
request_models=request_models,
request_parameters=request_parameters,
route_key=route_key,
route_response_selection_expression=route_response_selection_expression,
target=target,
)
return route
def create_route_response(
self, route_id, route_response_key, model_selection_expression, response_models
):
route = self.get_route(route_id)
return route.create_route_response(
route_response_key,
model_selection_expression=model_selection_expression,
response_models=response_models,
)
def delete_route_response(self, route_id, route_response_id):
route = self.get_route(route_id)
route.delete_route_response(route_response_id)
def get_route_response(self, route_id, route_response_id):
route = self.get_route(route_id)
return route.get_route_response(route_response_id)
def to_json(self):
return {
"apiId": self.api_id,
"apiEndpoint": self.api_endpoint,
"apiKeySelectionExpression": self.api_key_selection_expression,
"createdDate": self.created_date,
"corsConfiguration": self.cors_configuration,
"description": self.description,
"disableExecuteApiEndpoint": self.disable_execute_api_endpoint,
"disableSchemaValidation": self.disable_schema_validation,
"name": self.name,
"protocolType": self.protocol_type,
"routeSelectionExpression": self.route_selection_expression,
"tags": self.backend.get_tags(self.arn),
"version": self.version,
}
class VpcLink(BaseModel):
def __init__(self, name, sg_ids, subnet_ids, tags, backend):
self.created = unix_time()
self.id = "".join(random.choice(string.ascii_lowercase) for _ in range(8))
self.name = name
self.sg_ids = sg_ids
self.subnet_ids = subnet_ids
self.arn = f"arn:aws:apigateway:{backend.region_name}::/vpclinks/{self.id}"
self.backend = backend
self.backend.tag_resource(self.arn, tags)
def update(self, name):
self.name = name
def to_json(self):
return {
"createdDate": self.created,
"name": self.name,
"securityGroupIds": self.sg_ids,
"subnetIds": self.subnet_ids,
"tags": self.backend.get_tags(self.arn),
"vpcLinkId": self.id,
"vpcLinkStatus": "AVAILABLE",
"vpcLinkVersion": "V2",
}
class ApiGatewayV2Backend(BaseBackend):
"""Implementation of ApiGatewayV2 APIs."""
def __init__(self, region_name=None):
self.region_name = region_name
self.apis = dict()
self.vpc_links = dict()
self.tagger = TaggingService()
def reset(self):
"""Re-initialize all attributes for this instance."""
region_name = self.region_name
self.__dict__ = {}
self.__init__(region_name)
def create_api(
self,
api_key_selection_expression,
cors_configuration,
credentials_arn,
description,
disable_schema_validation,
disable_execute_api_endpoint,
name,
protocol_type,
route_key,
route_selection_expression,
tags,
target,
version,
):
"""
The following parameters are not yet implemented:
CredentialsArn, RouteKey, Tags, Target
"""
api = Api(
region=self.region_name,
cors_configuration=cors_configuration,
description=description,
name=name,
api_key_selection_expression=api_key_selection_expression,
disable_execute_api_endpoint=disable_execute_api_endpoint,
disable_schema_validation=disable_schema_validation,
protocol_type=protocol_type,
route_selection_expression=route_selection_expression,
tags=tags,
version=version,
backend=self,
)
self.apis[api.api_id] = api
return api
def delete_api(self, api_id):
self.apis.pop(api_id, None)
def get_api(self, api_id):
if api_id not in self.apis:
raise ApiNotFound(api_id)
return self.apis[api_id]
def get_apis(self):
"""
Pagination is not yet implemented
"""
return self.apis.values()
def update_api(
self,
api_id,
api_key_selection_expression,
cors_configuration,
description,
disable_schema_validation,
disable_execute_api_endpoint,
name,
route_selection_expression,
version,
):
"""
The following parameters have not yet been implemented: CredentialsArn, RouteKey, Target
"""
api = self.get_api(api_id)
api.update(
api_key_selection_expression=api_key_selection_expression,
cors_configuration=cors_configuration,
description=description,
disable_schema_validation=disable_schema_validation,
disable_execute_api_endpoint=disable_execute_api_endpoint,
name=name,
route_selection_expression=route_selection_expression,
version=version,
)
return api
def reimport_api(self, api_id, body, fail_on_warnings):
"""
Only YAML is supported at the moment. Full OpenAPI-support is not guaranteed. Only limited validation is implemented
"""
api = self.get_api(api_id)
api.import_api(body, fail_on_warnings)
return api
def delete_cors_configuration(self, api_id):
api = self.get_api(api_id)
api.delete_cors_configuration()
def create_authorizer(
self,
api_id,
auth_creds_arn,
auth_payload_format_version,
auth_result_ttl,
authorizer_uri,
authorizer_type,
enable_simple_response,
identity_source,
identity_validation_expr,
jwt_config,
name,
):
api = self.get_api(api_id)
authorizer = api.create_authorizer(
auth_creds_arn=auth_creds_arn,
auth_payload_format_version=auth_payload_format_version,
auth_result_ttl=auth_result_ttl,
authorizer_type=authorizer_type,
authorizer_uri=authorizer_uri,
enable_simple_response=enable_simple_response,
identity_source=identity_source,
identity_validation_expr=identity_validation_expr,
jwt_config=jwt_config,
name=name,
)
return authorizer
def delete_authorizer(self, api_id, authorizer_id):
api = self.get_api(api_id)
api.delete_authorizer(authorizer_id=authorizer_id)
def get_authorizer(self, api_id, authorizer_id):
api = self.get_api(api_id)
authorizer = api.get_authorizer(authorizer_id=authorizer_id)
return authorizer
def update_authorizer(
self,
api_id,
authorizer_id,
auth_creds_arn,
auth_payload_format_version,
auth_result_ttl,
authorizer_uri,
authorizer_type,
enable_simple_response,
identity_source,
identity_validation_expr,
jwt_config,
name,
):
api = self.get_api(api_id)
authorizer = api.update_authorizer(
authorizer_id=authorizer_id,
auth_creds_arn=auth_creds_arn,
auth_payload_format_version=auth_payload_format_version,
auth_result_ttl=auth_result_ttl,
authorizer_type=authorizer_type,
authorizer_uri=authorizer_uri,
enable_simple_response=enable_simple_response,
identity_source=identity_source,
identity_validation_expr=identity_validation_expr,
jwt_config=jwt_config,
name=name,
)
return authorizer
def create_model(self, api_id, content_type, description, name, schema):
api = self.get_api(api_id)
model = api.create_model(
content_type=content_type, description=description, name=name, schema=schema
)
return model
def delete_model(self, api_id, model_id):
api = self.get_api(api_id)
api.delete_model(model_id=model_id)
def get_model(self, api_id, model_id):
api = self.get_api(api_id)
return api.get_model(model_id)
def update_model(self, api_id, model_id, content_type, description, name, schema):
api = self.get_api(api_id)
return api.update_model(model_id, content_type, description, name, schema)
def get_tags(self, resource_id):
return self.tagger.get_tag_dict_for_resource(resource_id)
def tag_resource(self, resource_arn, tags):
tags = TaggingService.convert_dict_to_tags_input(tags or {})
self.tagger.tag_resource(resource_arn, tags)
def untag_resource(self, resource_arn, tag_keys):
self.tagger.untag_resource_using_names(resource_arn, tag_keys)
def create_route(
self,
api_id,
api_key_required,
authorization_scopes,
authorization_type,
authorizer_id,
model_selection_expression,
operation_name,
request_models,
request_parameters,
route_key,
route_response_selection_expression,
target,
):
api = self.get_api(api_id)
route = api.create_route(
api_key_required=api_key_required,
authorization_scopes=authorization_scopes,
authorization_type=authorization_type,
authorizer_id=authorizer_id,
model_selection_expression=model_selection_expression,
operation_name=operation_name,
request_models=request_models,
request_parameters=request_parameters,
route_key=route_key,
route_response_selection_expression=route_response_selection_expression,
target=target,
)
return route
def delete_route(self, api_id, route_id):
api = self.get_api(api_id)
api.delete_route(route_id)
def delete_route_request_parameter(self, api_id, route_id, request_param):
api = self.get_api(api_id)
api.delete_route_request_parameter(route_id, request_param)
def get_route(self, api_id, route_id):
api = self.get_api(api_id)
return api.get_route(route_id)
def get_routes(self, api_id):
"""
Pagination is not yet implemented
"""
api = self.get_api(api_id)
return api.get_routes()
def update_route(
self,
api_id,
api_key_required,
authorization_scopes,
authorization_type,
authorizer_id,
model_selection_expression,
operation_name,
request_models,
request_parameters,
route_id,
route_key,
route_response_selection_expression,
target,
):
api = self.get_api(api_id)
route = api.update_route(
route_id=route_id,
api_key_required=api_key_required,
authorization_scopes=authorization_scopes,
authorization_type=authorization_type,
authorizer_id=authorizer_id,
model_selection_expression=model_selection_expression,
operation_name=operation_name,
request_models=request_models,
request_parameters=request_parameters,
route_key=route_key,
route_response_selection_expression=route_response_selection_expression,
target=target,
)
return route
def create_route_response(
self,
api_id,
route_id,
route_response_key,
model_selection_expression,
response_models,
):
"""
The following parameters are not yet implemented: ResponseModels, ResponseParameters
"""
api = self.get_api(api_id)
return api.create_route_response(
route_id,
route_response_key,
model_selection_expression=model_selection_expression,
response_models=response_models,
)
def delete_route_response(self, api_id, route_id, route_response_id):
api = self.get_api(api_id)
api.delete_route_response(route_id, route_response_id)
def get_route_response(self, api_id, route_id, route_response_id):
api = self.get_api(api_id)
return api.get_route_response(route_id, route_response_id)
def create_integration(
self,
api_id,
connection_id,
connection_type,
content_handling_strategy,
credentials_arn,
description,
integration_method,
integration_subtype,
integration_type,
integration_uri,
passthrough_behavior,
payload_format_version,
request_parameters,
request_templates,
response_parameters,
template_selection_expression,
timeout_in_millis,
tls_config,
):
api = self.get_api(api_id)
integration = api.create_integration(
connection_id=connection_id,
connection_type=connection_type,
content_handling_strategy=content_handling_strategy,
credentials_arn=credentials_arn,
description=description,
integration_method=integration_method,
integration_type=integration_type,
integration_uri=integration_uri,
passthrough_behavior=passthrough_behavior,
payload_format_version=payload_format_version,
integration_subtype=integration_subtype,
request_parameters=request_parameters,
request_templates=request_templates,
response_parameters=response_parameters,
template_selection_expression=template_selection_expression,
timeout_in_millis=timeout_in_millis,
tls_config=tls_config,
)
return integration
def get_integration(self, api_id, integration_id):
api = self.get_api(api_id)
integration = api.get_integration(integration_id)
return integration
def get_integrations(self, api_id):
"""
Pagination is not yet implemented
"""
api = self.get_api(api_id)
return api.get_integrations()
def delete_integration(self, api_id, integration_id):
api = self.get_api(api_id)
api.delete_integration(integration_id)
def update_integration(
self,
api_id,
connection_id,
connection_type,
content_handling_strategy,
credentials_arn,
description,
integration_id,
integration_method,
integration_subtype,
integration_type,
integration_uri,
passthrough_behavior,
payload_format_version,
request_parameters,
request_templates,
response_parameters,
template_selection_expression,
timeout_in_millis,
tls_config,
):
api = self.get_api(api_id)
integration = api.update_integration(
integration_id=integration_id,
connection_id=connection_id,
connection_type=connection_type,
content_handling_strategy=content_handling_strategy,
credentials_arn=credentials_arn,
description=description,
integration_method=integration_method,
integration_type=integration_type,
integration_uri=integration_uri,
passthrough_behavior=passthrough_behavior,
payload_format_version=payload_format_version,
integration_subtype=integration_subtype,
request_parameters=request_parameters,
request_templates=request_templates,
response_parameters=response_parameters,
template_selection_expression=template_selection_expression,
timeout_in_millis=timeout_in_millis,
tls_config=tls_config,
)
return integration
def create_integration_response(
self,
api_id,
integration_id,
content_handling_strategy,
integration_response_key,
response_parameters,
response_templates,
template_selection_expression,
):
api = self.get_api(api_id)
integration_response = api.create_integration_response(
integration_id=integration_id,
content_handling_strategy=content_handling_strategy,
integration_response_key=integration_response_key,
response_parameters=response_parameters,
response_templates=response_templates,
template_selection_expression=template_selection_expression,
)
return integration_response
def delete_integration_response(
self, api_id, integration_id, integration_response_id
):
api = self.get_api(api_id)
api.delete_integration_response(
integration_id, integration_response_id=integration_response_id
)
def get_integration_response(self, api_id, integration_id, integration_response_id):
api = self.get_api(api_id)
return api.get_integration_response(
integration_id, integration_response_id=integration_response_id
)
def get_integration_responses(self, api_id, integration_id):
api = self.get_api(api_id)
return api.get_integration_responses(integration_id)
def update_integration_response(
self,
api_id,
integration_id,
integration_response_id,
content_handling_strategy,
integration_response_key,
response_parameters,
response_templates,
template_selection_expression,
):
api = self.get_api(api_id)
integration_response = api.update_integration_response(
integration_id=integration_id,
integration_response_id=integration_response_id,
content_handling_strategy=content_handling_strategy,
integration_response_key=integration_response_key,
response_parameters=response_parameters,
response_templates=response_templates,
template_selection_expression=template_selection_expression,
)
return integration_response
def create_vpc_link(self, name, sg_ids, subnet_ids, tags):
vpc_link = VpcLink(
name, sg_ids=sg_ids, subnet_ids=subnet_ids, tags=tags, backend=self
)
self.vpc_links[vpc_link.id] = vpc_link
return vpc_link
def get_vpc_link(self, vpc_link_id):
if vpc_link_id not in self.vpc_links:
raise VpcLinkNotFound(vpc_link_id)
return self.vpc_links[vpc_link_id]
def delete_vpc_link(self, vpc_link_id):
self.vpc_links.pop(vpc_link_id, None)
def get_vpc_links(self):
return self.vpc_links.values()
def update_vpc_link(self, vpc_link_id, name):
vpc_link = self.get_vpc_link(vpc_link_id)
vpc_link.update(name)
return vpc_link
apigatewayv2_backends = BackendDict(ApiGatewayV2Backend, "apigatewayv2")
|
# Copyright (C) 2013,2014 Nippon Telegraph and Telephone Corporation.
# Copyright (C) 2013,2014 YAMAMOTO Takashi <yamamoto at valinux co jp>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
RFC 4271 BGP-4
"""
# todo
# - notify data
# - RFC 4364 BGP/MPLS IP Virtual Private Networks (VPNs)
import abc
import six
import struct
import copy
import netaddr
import numbers
try:
# Python 3
from functools import reduce
except ImportError:
# Python 2
pass
from ryu.lib.stringify import StringifyMixin
from ryu.lib.packet import afi as addr_family
from ryu.lib.packet import safi as subaddr_family
from ryu.lib.packet import packet_base
from ryu.lib.packet import stream_parser
from ryu.lib import addrconv
from ryu.lib.pack_utils import msg_pack_into
BGP_MSG_OPEN = 1
BGP_MSG_UPDATE = 2
BGP_MSG_NOTIFICATION = 3
BGP_MSG_KEEPALIVE = 4
BGP_MSG_ROUTE_REFRESH = 5 # RFC 2918
_VERSION = 4
_MARKER = 16 * '\xff'
BGP_OPT_CAPABILITY = 2 # RFC 5492
BGP_CAP_MULTIPROTOCOL = 1 # RFC 4760
BGP_CAP_ROUTE_REFRESH = 2 # RFC 2918
BGP_CAP_CARRYING_LABEL_INFO = 4 # RFC 3107
BGP_CAP_GRACEFUL_RESTART = 64 # RFC 4724
BGP_CAP_FOUR_OCTET_AS_NUMBER = 65 # RFC 4893
BGP_CAP_ENHANCED_ROUTE_REFRESH = 70 # https://tools.ietf.org/html/\
# draft-ietf-idr-bgp-enhanced-route-refresh-05
BGP_CAP_ROUTE_REFRESH_CISCO = 128 # in cisco routers, there are two\
# route refresh code: one using the capability code of 128 (old),
# another using the capability code of 2 (new).
BGP_ATTR_FLAG_OPTIONAL = 1 << 7
BGP_ATTR_FLAG_TRANSITIVE = 1 << 6
BGP_ATTR_FLAG_PARTIAL = 1 << 5
BGP_ATTR_FLAG_EXTENDED_LENGTH = 1 << 4
BGP_ATTR_TYPE_ORIGIN = 1 # 0,1,2 (1 byte)
BGP_ATTR_TYPE_AS_PATH = 2 # a list of AS_SET/AS_SEQUENCE eg. {1 2 3} 4 5
BGP_ATTR_TYPE_NEXT_HOP = 3 # an IPv4 address
BGP_ATTR_TYPE_MULTI_EXIT_DISC = 4 # uint32 metric
BGP_ATTR_TYPE_LOCAL_PREF = 5 # uint32
BGP_ATTR_TYPE_ATOMIC_AGGREGATE = 6 # 0 bytes
BGP_ATTR_TYPE_AGGREGATOR = 7 # AS number and IPv4 address
BGP_ATTR_TYPE_COMMUNITIES = 8 # RFC 1997
BGP_ATTR_TYPE_ORIGINATOR_ID = 9 # RFC 4456
BGP_ATTR_TYPE_CLUSTER_LIST = 10 # RFC 4456
BGP_ATTR_TYPE_MP_REACH_NLRI = 14 # RFC 4760
BGP_ATTR_TYPE_MP_UNREACH_NLRI = 15 # RFC 4760
BGP_ATTR_TYPE_EXTENDED_COMMUNITIES = 16 # RFC 4360
BGP_ATTR_TYPE_AS4_PATH = 17 # RFC 4893
BGP_ATTR_TYPE_AS4_AGGREGATOR = 18 # RFC 4893
BGP_ATTR_ORIGIN_IGP = 0x00
BGP_ATTR_ORIGIN_EGP = 0x01
BGP_ATTR_ORIGIN_INCOMPLETE = 0x02
AS_TRANS = 23456 # RFC 4893
# Well known commmunities (RFC 1997)
BGP_COMMUNITY_NO_EXPORT = 0xffffff01
BGP_COMMUNITY_NO_ADVERTISE = 0xffffff02
BGP_COMMUNITY_NO_EXPORT_SUBCONFED = 0xffffff03
# RFC 4360
# The low-order octet of Type field (subtype)
BGP_EXTENDED_COMMUNITY_ROUTE_TARGET = 0x02
BGP_EXTENDED_COMMUNITY_ROUTE_ORIGIN = 0x03
# NOTIFICATION Error Code and SubCode
# Note: 0 is a valid SubCode. (Unspecific)
# NOTIFICATION Error Code RFC 4271 4.5.
BGP_ERROR_MESSAGE_HEADER_ERROR = 1
BGP_ERROR_OPEN_MESSAGE_ERROR = 2
BGP_ERROR_UPDATE_MESSAGE_ERROR = 3
BGP_ERROR_HOLD_TIMER_EXPIRED = 4
BGP_ERROR_FSM_ERROR = 5
BGP_ERROR_CEASE = 6
# NOTIFICATION Error Subcode for BGP_ERROR_MESSAGE_HEADER_ERROR
BGP_ERROR_SUB_CONNECTION_NOT_SYNCHRONIZED = 1
BGP_ERROR_SUB_BAD_MESSAGE_LENGTH = 2 # Data: the erroneous Length field
BGP_ERROR_SUB_BAD_MESSAGE_TYPE = 3 # Data: the erroneous Type field
# NOTIFICATION Error Subcode for BGP_ERROR_OPEN_MESSAGE_ERROR
BGP_ERROR_SUB_UNSUPPORTED_VERSION_NUMBER = 1 # Data: 2 octet version number
BGP_ERROR_SUB_BAD_PEER_AS = 2
BGP_ERROR_SUB_BAD_BGP_IDENTIFIER = 3
BGP_ERROR_SUB_UNSUPPORTED_OPTIONAL_PARAMETER = 4
BGP_ERROR_SUB_AUTHENTICATION_FAILURE = 5 # deprecated RFC 1771
BGP_ERROR_SUB_UNACCEPTABLE_HOLD_TIME = 6
# NOTIFICATION Error Subcode for BGP_ERROR_UPDATE_MESSAGE_ERROR
BGP_ERROR_SUB_MALFORMED_ATTRIBUTE_LIST = 1
BGP_ERROR_SUB_UNRECOGNIZED_WELL_KNOWN_ATTRIBUTE = 2 # Data: type of the attr
BGP_ERROR_SUB_MISSING_WELL_KNOWN_ATTRIBUTE = 3 # Data: ditto
BGP_ERROR_SUB_ATTRIBUTE_FLAGS_ERROR = 4 # Data: the attr (type, len, value)
BGP_ERROR_SUB_ATTRIBUTE_LENGTH_ERROR = 5 # Data: ditto
BGP_ERROR_SUB_INVALID_ORIGIN_ATTRIBUTE = 6 # Data: ditto
BGP_ERROR_SUB_ROUTING_LOOP = 7 # deprecated RFC 1771 AS Routing Loop
BGP_ERROR_SUB_INVALID_NEXT_HOP_ATTRIBUTE = 8 # Data: ditto
BGP_ERROR_SUB_OPTIONAL_ATTRIBUTE_ERROR = 9 # Data: ditto
BGP_ERROR_SUB_INVALID_NETWORK_FIELD = 10
BGP_ERROR_SUB_MALFORMED_AS_PATH = 11
# NOTIFICATION Error Subcode for BGP_ERROR_HOLD_TIMER_EXPIRED
BGP_ERROR_SUB_HOLD_TIMER_EXPIRED = 1
# NOTIFICATION Error Subcode for BGP_ERROR_FSM_ERROR
BGP_ERROR_SUB_FSM_ERROR = 1
# NOTIFICATION Error Subcode for BGP_ERROR_CEASE (RFC 4486)
BGP_ERROR_SUB_MAXIMUM_NUMBER_OF_PREFIXES_REACHED = 1 # Data: optional
BGP_ERROR_SUB_ADMINISTRATIVE_SHUTDOWN = 2
BGP_ERROR_SUB_PEER_DECONFIGURED = 3
BGP_ERROR_SUB_ADMINISTRATIVE_RESET = 4
BGP_ERROR_SUB_CONNECTION_RESET = 5
BGP_ERROR_SUB_OTHER_CONFIGURATION_CHANGE = 6
BGP_ERROR_SUB_CONNECTION_COLLISION_RESOLUTION = 7
BGP_ERROR_SUB_OUT_OF_RESOURCES = 8
class _Value(object):
_VALUE_PACK_STR = None
_VALUE_FIELDS = ['value']
@staticmethod
def do_init(cls, self, kwargs, **extra_kwargs):
ourfields = {}
for f in cls._VALUE_FIELDS:
v = kwargs[f]
del kwargs[f]
ourfields[f] = v
kwargs.update(extra_kwargs)
super(cls, self).__init__(**kwargs)
self.__dict__.update(ourfields)
@classmethod
def parse_value(cls, buf):
values = struct.unpack_from(cls._VALUE_PACK_STR, buffer(buf))
return dict(zip(cls._VALUE_FIELDS, values))
def serialize_value(self):
args = []
for f in self._VALUE_FIELDS:
args.append(getattr(self, f))
buf = bytearray()
msg_pack_into(self._VALUE_PACK_STR, buf, 0, *args)
return buf
class _TypeDisp(object):
_TYPES = {}
_REV_TYPES = None
_UNKNOWN_TYPE = None
@classmethod
def register_unknown_type(cls):
def _register_type(subcls):
cls._UNKNOWN_TYPE = subcls
return subcls
return _register_type
@classmethod
def register_type(cls, type_):
cls._TYPES = cls._TYPES.copy()
def _register_type(subcls):
cls._TYPES[type_] = subcls
cls._REV_TYPES = None
return subcls
return _register_type
@classmethod
def _lookup_type(cls, type_):
try:
return cls._TYPES[type_]
except KeyError:
return cls._UNKNOWN_TYPE
@classmethod
def _rev_lookup_type(cls, targ_cls):
if cls._REV_TYPES is None:
rev = dict((v, k) for k, v in cls._TYPES.iteritems())
cls._REV_TYPES = rev
return cls._REV_TYPES[targ_cls]
class BgpExc(Exception):
"""Base bgp exception."""
CODE = 0
"""BGP error code."""
SUB_CODE = 0
"""BGP error sub-code."""
SEND_ERROR = True
"""Flag if set indicates Notification message should be sent to peer."""
def __init__(self, data=''):
self.data = data
def __str__(self):
return '<%s %r>' % (self.__class__.__name__, self.data)
class BadNotification(BgpExc):
SEND_ERROR = False
# ============================================================================
# Message Header Errors
# ============================================================================
class NotSync(BgpExc):
CODE = BGP_ERROR_MESSAGE_HEADER_ERROR
SUB_CODE = BGP_ERROR_SUB_CONNECTION_NOT_SYNCHRONIZED
class BadLen(BgpExc):
CODE = BGP_ERROR_MESSAGE_HEADER_ERROR
SUB_CODE = BGP_ERROR_SUB_BAD_MESSAGE_LENGTH
def __init__(self, msg_type_code, message_length):
self.msg_type_code = msg_type_code
self.length = message_length
self.data = struct.pack('!H', self.length)
def __str__(self):
return '<BadLen %d msgtype=%d>' % (self.length, self.msg_type_code)
class BadMsg(BgpExc):
"""Error to indicate un-recognized message type.
RFC says: If the Type field of the message header is not recognized, then
the Error Subcode MUST be set to Bad Message Type. The Data field MUST
contain the erroneous Type field.
"""
CODE = BGP_ERROR_MESSAGE_HEADER_ERROR
SUB_CODE = BGP_ERROR_SUB_BAD_MESSAGE_TYPE
def __init__(self, msg_type):
self.msg_type = msg_type
self.data = struct.pack('B', msg_type)
def __str__(self):
return '<BadMsg %d>' % (self.msg_type,)
# ============================================================================
# OPEN Message Errors
# ============================================================================
class MalformedOptionalParam(BgpExc):
"""If recognized optional parameters are malformed.
RFC says: If one of the Optional Parameters in the OPEN message is
recognized, but is malformed, then the Error Subcode MUST be set to 0
(Unspecific).
"""
CODE = BGP_ERROR_OPEN_MESSAGE_ERROR
SUB_CODE = 0
class UnsupportedVersion(BgpExc):
"""Error to indicate unsupport bgp version number.
RFC says: If the version number in the Version field of the received OPEN
message is not supported, then the Error Subcode MUST be set to Unsupported
Version Number. The Data field is a 2-octet unsigned integer, which
indicates the largest, locally-supported version number less than the
version the remote BGP peer bid (as indicated in the received OPEN
message), or if the smallest, locally-supported version number is greater
than the version the remote BGP peer bid, then the smallest, locally-
supported version number.
"""
CODE = BGP_ERROR_OPEN_MESSAGE_ERROR
SUB_CODE = BGP_ERROR_SUB_UNSUPPORTED_VERSION_NUMBER
def __init__(self, locally_support_version):
self.data = struct.pack('H', locally_support_version)
class BadPeerAs(BgpExc):
"""Error to indicate open message has incorrect AS number.
RFC says: If the Autonomous System field of the OPEN message is
unacceptable, then the Error Subcode MUST be set to Bad Peer AS. The
determination of acceptable Autonomous System numbers is configure peer AS.
"""
CODE = BGP_ERROR_OPEN_MESSAGE_ERROR
SUB_CODE = BGP_ERROR_SUB_BAD_PEER_AS
class BadBgpId(BgpExc):
"""Error to indicate incorrect BGP Identifier.
RFC says: If the BGP Identifier field of the OPEN message is syntactically
incorrect, then the Error Subcode MUST be set to Bad BGP Identifier.
Syntactic correctness means that the BGP Identifier field represents a
valid unicast IP host address.
"""
CODE = BGP_ERROR_OPEN_MESSAGE_ERROR
SUB_CODE = BGP_ERROR_SUB_BAD_BGP_IDENTIFIER
class UnsupportedOptParam(BgpExc):
"""Error to indicate unsupported optional parameters.
RFC says: If one of the Optional Parameters in the OPEN message is not
recognized, then the Error Subcode MUST be set to Unsupported Optional
Parameters.
"""
CODE = BGP_ERROR_OPEN_MESSAGE_ERROR
SUB_CODE = BGP_ERROR_SUB_UNSUPPORTED_OPTIONAL_PARAMETER
class AuthFailure(BgpExc):
CODE = BGP_ERROR_OPEN_MESSAGE_ERROR
SUB_CODE = BGP_ERROR_SUB_AUTHENTICATION_FAILURE
class UnacceptableHoldTime(BgpExc):
"""Error to indicate Unacceptable Hold Time in open message.
RFC says: If the Hold Time field of the OPEN message is unacceptable, then
the Error Subcode MUST be set to Unacceptable Hold Time.
"""
CODE = BGP_ERROR_OPEN_MESSAGE_ERROR
SUB_CODE = BGP_ERROR_SUB_UNACCEPTABLE_HOLD_TIME
# ============================================================================
# UPDATE message related errors
# ============================================================================
class MalformedAttrList(BgpExc):
"""Error to indicate UPDATE message is malformed.
RFC says: Error checking of an UPDATE message begins by examining the path
attributes. If the Withdrawn Routes Length or Total Attribute Length is
too large (i.e., if Withdrawn Routes Length + Total Attribute Length + 23
exceeds the message Length), then the Error Subcode MUST be set to
Malformed Attribute List.
"""
CODE = BGP_ERROR_UPDATE_MESSAGE_ERROR
SUB_CODE = BGP_ERROR_SUB_MALFORMED_ATTRIBUTE_LIST
class UnRegWellKnowAttr(BgpExc):
CODE = BGP_ERROR_UPDATE_MESSAGE_ERROR
SUB_CODE = BGP_ERROR_SUB_UNRECOGNIZED_WELL_KNOWN_ATTRIBUTE
class MissingWellKnown(BgpExc):
"""Error to indicate missing well-known attribute.
RFC says: If any of the well-known mandatory attributes are not present,
then the Error Subcode MUST be set to Missing Well-known Attribute. The
Data field MUST contain the Attribute Type Code of the missing, well-known
attribute.
"""
CODE = BGP_ERROR_UPDATE_MESSAGE_ERROR
SUB_CODE = BGP_ERROR_SUB_MISSING_WELL_KNOWN_ATTRIBUTE
def __init__(self, pattr_type_code):
self.pattr_type_code = pattr_type_code
self.data = struct.pack('B', pattr_type_code)
class AttrFlagError(BgpExc):
"""Error to indicate recognized path attributes have incorrect flags.
RFC says: If any recognized attribute has Attribute Flags that conflict
with the Attribute Type Code, then the Error Subcode MUST be set to
Attribute Flags Error. The Data field MUST contain the erroneous attribute
(type, length, and value).
"""
CODE = BGP_ERROR_UPDATE_MESSAGE_ERROR
SUB_CODE = BGP_ERROR_SUB_ATTRIBUTE_FLAGS_ERROR
class AttrLenError(BgpExc):
CODE = BGP_ERROR_UPDATE_MESSAGE_ERROR
SUB_CODE = BGP_ERROR_SUB_ATTRIBUTE_LENGTH_ERROR
class InvalidOriginError(BgpExc):
"""Error indicates undefined Origin attribute value.
RFC says: If the ORIGIN attribute has an undefined value, then the Error
Sub- code MUST be set to Invalid Origin Attribute. The Data field MUST
contain the unrecognized attribute (type, length, and value).
"""
CODE = BGP_ERROR_UPDATE_MESSAGE_ERROR
SUB_CODE = BGP_ERROR_SUB_INVALID_ORIGIN_ATTRIBUTE
class RoutingLoop(BgpExc):
CODE = BGP_ERROR_UPDATE_MESSAGE_ERROR
SUB_CODE = BGP_ERROR_SUB_ROUTING_LOOP
class InvalidNextHop(BgpExc):
CODE = BGP_ERROR_UPDATE_MESSAGE_ERROR
SUB_CODE = BGP_ERROR_SUB_INVALID_NEXT_HOP_ATTRIBUTE
class OptAttrError(BgpExc):
"""Error indicates Optional Attribute is malformed.
RFC says: If an optional attribute is recognized, then the value of this
attribute MUST be checked. If an error is detected, the attribute MUST be
discarded, and the Error Subcode MUST be set to Optional Attribute Error.
The Data field MUST contain the attribute (type, length, and value).
"""
CODE = BGP_ERROR_UPDATE_MESSAGE_ERROR
SUB_CODE = BGP_ERROR_SUB_OPTIONAL_ATTRIBUTE_ERROR
class InvalidNetworkField(BgpExc):
CODE = BGP_ERROR_UPDATE_MESSAGE_ERROR
SUB_CODE = BGP_ERROR_SUB_INVALID_NETWORK_FIELD
class MalformedAsPath(BgpExc):
"""Error to indicate if AP_PATH attribute is syntactically incorrect.
RFC says: The AS_PATH attribute is checked for syntactic correctness. If
the path is syntactically incorrect, then the Error Subcode MUST be set to
Malformed AS_PATH.
"""
CODE = BGP_ERROR_UPDATE_MESSAGE_ERROR
SUB_CODE = BGP_ERROR_SUB_MALFORMED_AS_PATH
# ============================================================================
# Hold Timer Expired
# ============================================================================
class HoldTimerExpired(BgpExc):
"""Error to indicate Hold Timer expired.
RFC says: If a system does not receive successive KEEPALIVE, UPDATE, and/or
NOTIFICATION messages within the period specified in the Hold Time field of
the OPEN message, then the NOTIFICATION message with the Hold Timer Expired
Error Code is sent and the BGP connection is closed.
"""
CODE = BGP_ERROR_HOLD_TIMER_EXPIRED
SUB_CODE = BGP_ERROR_SUB_HOLD_TIMER_EXPIRED
# ============================================================================
# Finite State Machine Error
# ============================================================================
class FiniteStateMachineError(BgpExc):
"""Error to indicate any Finite State Machine Error.
RFC says: Any error detected by the BGP Finite State Machine (e.g., receipt
of an unexpected event) is indicated by sending the NOTIFICATION message
with the Error Code Finite State Machine Error.
"""
CODE = BGP_ERROR_FSM_ERROR
SUB_CODE = BGP_ERROR_SUB_FSM_ERROR
# ============================================================================
# Cease Errors
# ============================================================================
class MaxPrefixReached(BgpExc):
CODE = BGP_ERROR_CEASE
SUB_CODE = BGP_ERROR_SUB_MAXIMUM_NUMBER_OF_PREFIXES_REACHED
class AdminShutdown(BgpExc):
"""Error to indicate Administrative shutdown.
RFC says: If a BGP speaker decides to administratively shut down its
peering with a neighbor, then the speaker SHOULD send a NOTIFICATION
message with the Error Code Cease and the Error Subcode 'Administrative
Shutdown'.
"""
CODE = BGP_ERROR_CEASE
SUB_CODE = BGP_ERROR_SUB_ADMINISTRATIVE_SHUTDOWN
class PeerDeConfig(BgpExc):
CODE = BGP_ERROR_CEASE
SUB_CODE = BGP_ERROR_SUB_PEER_DECONFIGURED
class AdminReset(BgpExc):
CODE = BGP_ERROR_CEASE
SUB_CODE = BGP_ERROR_SUB_ADMINISTRATIVE_RESET
class ConnRejected(BgpExc):
"""Error to indicate Connection Rejected.
RFC says: If a BGP speaker decides to disallow a BGP connection (e.g., the
peer is not configured locally) after the speaker accepts a transport
protocol connection, then the BGP speaker SHOULD send a NOTIFICATION
message with the Error Code Cease and the Error Subcode "Connection
Rejected".
"""
CODE = BGP_ERROR_CEASE
SUB_CODE = BGP_ERROR_SUB_CONNECTION_RESET
class OtherConfChange(BgpExc):
CODE = BGP_ERROR_CEASE
SUB_CODE = BGP_ERROR_SUB_OTHER_CONFIGURATION_CHANGE
class CollisionResolution(BgpExc):
"""Error to indicate Connection Collision Resolution.
RFC says: If a BGP speaker decides to send a NOTIFICATION message with the
Error Code Cease as a result of the collision resolution procedure (as
described in [BGP-4]), then the subcode SHOULD be set to "Connection
Collision Resolution".
"""
CODE = BGP_ERROR_CEASE
SUB_CODE = BGP_ERROR_SUB_CONNECTION_COLLISION_RESOLUTION
class OutOfResource(BgpExc):
CODE = BGP_ERROR_CEASE
SUB_CODE = BGP_ERROR_SUB_OUT_OF_RESOURCES
class RouteFamily(StringifyMixin):
def __init__(self, afi, safi):
self.afi = afi
self.safi = safi
def __cmp__(self, other):
return cmp((other.afi, other.safi), (self.afi, self.safi))
# Route Family Singleton
RF_IPv4_UC = RouteFamily(addr_family.IP, subaddr_family.UNICAST)
RF_IPv6_UC = RouteFamily(addr_family.IP6, subaddr_family.UNICAST)
RF_IPv4_VPN = RouteFamily(addr_family.IP, subaddr_family.MPLS_VPN)
RF_IPv6_VPN = RouteFamily(addr_family.IP6, subaddr_family.MPLS_VPN)
RF_IPv4_MPLS = RouteFamily(addr_family.IP, subaddr_family.MPLS_LABEL)
RF_IPv6_MPLS = RouteFamily(addr_family.IP6, subaddr_family.MPLS_LABEL)
RF_RTC_UC = RouteFamily(addr_family.IP,
subaddr_family.ROUTE_TARGET_CONSTRTAINS)
_rf_map = {
(addr_family.IP, subaddr_family.UNICAST): RF_IPv4_UC,
(addr_family.IP6, subaddr_family.UNICAST): RF_IPv6_UC,
(addr_family.IP, subaddr_family.MPLS_VPN): RF_IPv4_VPN,
(addr_family.IP6, subaddr_family.MPLS_VPN): RF_IPv6_VPN,
(addr_family.IP, subaddr_family.MPLS_LABEL): RF_IPv4_MPLS,
(addr_family.IP6, subaddr_family.MPLS_LABEL): RF_IPv6_MPLS,
(addr_family.IP, subaddr_family.ROUTE_TARGET_CONSTRTAINS): RF_RTC_UC
}
def get_rf(afi, safi):
return _rf_map[(afi, safi)]
def pad(bin, len_):
assert len(bin) <= len_
return bin + (len_ - len(bin)) * '\0'
class _RouteDistinguisher(StringifyMixin, _TypeDisp, _Value):
_PACK_STR = '!H'
TWO_OCTET_AS = 0
IPV4_ADDRESS = 1
FOUR_OCTET_AS = 2
def __init__(self, type_, admin=0, assigned=0):
self.type = type_
self.admin = admin
self.assigned = assigned
@classmethod
def parser(cls, buf):
assert len(buf) == 8
(type_,) = struct.unpack_from(cls._PACK_STR, buffer(buf))
rest = buf[struct.calcsize(cls._PACK_STR):]
subcls = cls._lookup_type(type_)
return subcls(type_=type_, **subcls.parse_value(rest))
@classmethod
def from_str(cls, str_):
assert isinstance(str_, str)
first, second = str_.split(':')
if '.' in first:
type_ = cls.IPV4_ADDRESS
elif int(first) > (1 << 16):
type_ = cls.FOUR_OCTET_AS
first = int(first)
else:
type_ = cls.TWO_OCTET_AS
first = int(first)
subcls = cls._lookup_type(type_)
return subcls(type_=type_, admin=first, assigned=int(second))
def serialize(self):
value = self.serialize_value()
buf = bytearray()
msg_pack_into(self._PACK_STR, buf, 0, self.type)
return buf + value
@property
def formatted_str(self):
return "%s:%s" % (str(self.admin), str(self.assigned))
@_RouteDistinguisher.register_type(_RouteDistinguisher.TWO_OCTET_AS)
class BGPTwoOctetAsRD(_RouteDistinguisher):
_VALUE_PACK_STR = '!HI'
_VALUE_FIELDS = ['admin', 'assigned']
def __init__(self, type_=_RouteDistinguisher.TWO_OCTET_AS, **kwargs):
self.do_init(BGPTwoOctetAsRD, self, kwargs, type_=type_)
@_RouteDistinguisher.register_type(_RouteDistinguisher.IPV4_ADDRESS)
class BGPIPv4AddressRD(_RouteDistinguisher):
_VALUE_PACK_STR = '!4sH'
_VALUE_FIELDS = ['admin', 'assigned']
_TYPE = {
'ascii': [
'admin'
]
}
def __init__(self, type_=_RouteDistinguisher.IPV4_ADDRESS, **kwargs):
self.do_init(BGPIPv4AddressRD, self, kwargs, type_=type_)
@classmethod
def parse_value(cls, buf):
d_ = super(BGPIPv4AddressRD, cls).parse_value(buf)
d_['admin'] = addrconv.ipv4.bin_to_text(d_['admin'])
return d_
def serialize_value(self):
args = []
for f in self._VALUE_FIELDS:
v = getattr(self, f)
if f == 'admin':
v = bytes(addrconv.ipv4.text_to_bin(v))
args.append(v)
buf = bytearray()
msg_pack_into(self._VALUE_PACK_STR, buf, 0, *args)
return buf
@_RouteDistinguisher.register_type(_RouteDistinguisher.FOUR_OCTET_AS)
class BGPFourOctetAsRD(_RouteDistinguisher):
_VALUE_PACK_STR = '!IH'
_VALUE_FIELDS = ['admin', 'assigned']
def __init__(self, type_=_RouteDistinguisher.FOUR_OCTET_AS,
**kwargs):
self.do_init(BGPFourOctetAsRD, self, kwargs, type_=type_)
@six.add_metaclass(abc.ABCMeta)
class _AddrPrefix(StringifyMixin):
_PACK_STR = '!B' # length
def __init__(self, length, addr, prefixes=None):
# length is on-wire bit length of prefixes+addr.
assert prefixes != ()
if isinstance(addr, tuple):
# for _AddrPrefix.parser
# also for _VPNAddrPrefix.__init__ etc
(addr,) = addr
self.length = length
if prefixes:
addr = prefixes + (addr,)
self.addr = addr
@staticmethod
@abc.abstractmethod
def _to_bin(addr):
pass
@staticmethod
@abc.abstractmethod
def _from_bin(addr):
pass
@classmethod
def parser(cls, buf):
(length, ) = struct.unpack_from(cls._PACK_STR, buffer(buf))
rest = buf[struct.calcsize(cls._PACK_STR):]
byte_length = (length + 7) / 8
addr = cls._from_bin(rest[:byte_length])
rest = rest[byte_length:]
return cls(length=length, addr=addr), rest
def serialize(self):
# fixup
byte_length = (self.length + 7) / 8
bin_addr = self._to_bin(self.addr)
if (self.length % 8) == 0:
bin_addr = bin_addr[:byte_length]
else:
# clear trailing bits in the last octet.
# rfc doesn't require this.
mask = 0xff00 >> (self.length % 8)
last_byte = chr(ord(bin_addr[byte_length - 1]) & mask)
bin_addr = bin_addr[:byte_length - 1] + last_byte
self.addr = self._from_bin(bin_addr)
buf = bytearray()
msg_pack_into(self._PACK_STR, buf, 0, self.length)
return buf + bytes(bin_addr)
class _BinAddrPrefix(_AddrPrefix):
@staticmethod
def _to_bin(addr):
return addr
@staticmethod
def _from_bin(addr):
return addr
class _LabelledAddrPrefix(_AddrPrefix):
_LABEL_PACK_STR = '!3B'
def __init__(self, length, addr, labels=[], **kwargs):
assert isinstance(labels, list)
is_tuple = isinstance(addr, tuple)
if is_tuple:
# for _AddrPrefix.parser
assert not labels
labels = addr[0]
addr = addr[1:]
else:
length += struct.calcsize(self._LABEL_PACK_STR) * 8 * len(labels)
assert length > struct.calcsize(self._LABEL_PACK_STR) * 8 * len(labels)
prefixes = (labels,)
super(_LabelledAddrPrefix, self).__init__(prefixes=prefixes,
length=length,
addr=addr,
**kwargs)
@classmethod
def _label_to_bin(cls, label):
buf = bytearray()
msg_pack_into(cls._LABEL_PACK_STR, buf, 0,
(label & 0xff0000) >> 16,
(label & 0x00ff00) >> 8,
(label & 0x0000ff) >> 0)
return buf
@classmethod
def _label_from_bin(cls, bin):
(b1, b2, b3) = struct.unpack_from(cls._LABEL_PACK_STR, buffer(bin))
rest = bin[struct.calcsize(cls._LABEL_PACK_STR):]
return (b1 << 16) | (b2 << 8) | b3, rest
@classmethod
def _to_bin(cls, addr):
labels = addr[0]
rest = addr[1:]
labels = map(lambda x: x << 4, labels)
if labels:
labels[-1] |= 1 # bottom of stack
bin_labels = map(cls._label_to_bin, labels)
return bytes(reduce(lambda x, y: x + y, bin_labels,
bytearray()) + cls._prefix_to_bin(rest))
@classmethod
def _has_no_label(cls, bin_):
try:
length = len(bin_)
labels = []
while True:
(label, bin_) = cls._label_from_bin(bin_)
labels.append(label)
if label & 1: # bottom of stack
break
assert length > struct.calcsize(cls._LABEL_PACK_STR) * len(labels)
except struct.error:
return True
except AssertionError:
return True
return False
@classmethod
def _from_bin(cls, addr):
rest = addr
labels = []
if cls._has_no_label(rest):
return ([],) + cls._prefix_from_bin(rest)
while True:
(label, rest) = cls._label_from_bin(rest)
labels.append(label >> 4)
if label & 1: # bottom of stack
break
return (labels,) + cls._prefix_from_bin(rest)
class _UnlabelledAddrPrefix(_AddrPrefix):
@classmethod
def _to_bin(cls, addr):
return cls._prefix_to_bin((addr,))
@classmethod
def _from_bin(cls, binaddr):
(addr,) = cls._prefix_from_bin(binaddr)
return addr
class _IPAddrPrefix(_AddrPrefix):
@staticmethod
def _prefix_to_bin(addr):
(addr,) = addr
return addrconv.ipv4.text_to_bin(addr)
@staticmethod
def _prefix_from_bin(addr):
return (addrconv.ipv4.bin_to_text(pad(addr, 4)),)
class _IP6AddrPrefix(_AddrPrefix):
@staticmethod
def _prefix_to_bin(addr):
(addr,) = addr
return addrconv.ipv6.text_to_bin(addr)
@staticmethod
def _prefix_from_bin(addr):
return (addrconv.ipv6.bin_to_text(pad(addr, 16)),)
class _VPNAddrPrefix(_AddrPrefix):
_RD_PACK_STR = '!Q'
def __init__(self, length, addr, prefixes=(), route_dist=0):
if isinstance(addr, tuple):
# for _AddrPrefix.parser
assert not route_dist
assert length > struct.calcsize(self._RD_PACK_STR) * 8
route_dist = addr[0]
addr = addr[1:]
else:
length += struct.calcsize(self._RD_PACK_STR) * 8
if isinstance(route_dist, str):
route_dist = _RouteDistinguisher.from_str(route_dist)
prefixes = prefixes + (route_dist,)
super(_VPNAddrPrefix, self).__init__(prefixes=prefixes,
length=length,
addr=addr)
@classmethod
def _prefix_to_bin(cls, addr):
rd = addr[0]
rest = addr[1:]
binrd = rd.serialize()
return binrd + super(_VPNAddrPrefix, cls)._prefix_to_bin(rest)
@classmethod
def _prefix_from_bin(cls, binaddr):
binrd = binaddr[:8]
binrest = binaddr[8:]
rd = _RouteDistinguisher.parser(binrd)
return (rd,) + super(_VPNAddrPrefix, cls)._prefix_from_bin(binrest)
class IPAddrPrefix(_UnlabelledAddrPrefix, _IPAddrPrefix):
ROUTE_FAMILY = RF_IPv4_UC
_TYPE = {
'ascii': [
'addr'
]
}
@property
def prefix(self):
return self.addr + '/{0}'.format(self.length)
@property
def formatted_nlri_str(self):
return self.prefix
class IP6AddrPrefix(_UnlabelledAddrPrefix, _IP6AddrPrefix):
ROUTE_FAMILY = RF_IPv6_UC
_TYPE = {
'ascii': [
'addr'
]
}
@property
def prefix(self):
return self.addr + '/{0}'.format(self.length)
@property
def formatted_nlri_str(self):
return self.prefix
class LabelledIPAddrPrefix(_LabelledAddrPrefix, _IPAddrPrefix):
ROUTE_FAMILY = RF_IPv4_MPLS
class LabelledIP6AddrPrefix(_LabelledAddrPrefix, _IP6AddrPrefix):
ROUTE_FAMILY = RF_IPv6_MPLS
class LabelledVPNIPAddrPrefix(_LabelledAddrPrefix, _VPNAddrPrefix,
_IPAddrPrefix):
ROUTE_FAMILY = RF_IPv4_VPN
@property
def prefix(self):
masklen = self.length - struct.calcsize(self._RD_PACK_STR) * 8 \
- struct.calcsize(self._LABEL_PACK_STR) * 8 * len(self.addr[:-2])
return self.addr[-1] + '/{0}'.format(masklen)
@property
def route_dist(self):
return self.addr[-2].formatted_str
@property
def label_list(self):
return self.addr[0]
@property
def formatted_nlri_str(self):
return "%s:%s" % (self.route_dist, self.prefix)
class LabelledVPNIP6AddrPrefix(_LabelledAddrPrefix, _VPNAddrPrefix,
_IP6AddrPrefix):
ROUTE_FAMILY = RF_IPv6_VPN
@property
def prefix(self):
masklen = self.length - struct.calcsize(self._RD_PACK_STR) * 8 \
- struct.calcsize(self._LABEL_PACK_STR) * 8 * len(self.addr[:-2])
return self.addr[-1] + '/{0}'.format(masklen)
@property
def route_dist(self):
return self.addr[-2].formatted_str
@property
def label_list(self):
return self.addr[0]
@property
def formatted_nlri_str(self):
return "%s:%s" % (self.route_dist, self.prefix)
class RouteTargetMembershipNLRI(StringifyMixin):
"""Route Target Membership NLRI.
Route Target membership NLRI is advertised in BGP UPDATE messages using
the MP_REACH_NLRI and MP_UNREACH_NLRI attributes.
"""
ROUTE_FAMILY = RF_RTC_UC
DEFAULT_AS = '0:0'
DEFAULT_RT = '0:0'
def __init__(self, origin_as, route_target):
# If given is not default_as and default_rt
if not (origin_as is self.DEFAULT_AS and
route_target is self.DEFAULT_RT):
# We validate them
if (not self._is_valid_old_asn(origin_as) or
not self._is_valid_ext_comm_attr(route_target)):
raise ValueError('Invalid params.')
self.origin_as = origin_as
self.route_target = route_target
def _is_valid_old_asn(self, asn):
"""Returns true if given asn is a 16 bit number.
Old AS numbers are 16 but unsigned number.
"""
valid = True
# AS number should be a 16 bit number
if (not isinstance(asn, numbers.Integral) or (asn < 0) or
(asn > ((2 ** 16) - 1))):
valid = False
return valid
def _is_valid_ext_comm_attr(self, attr):
"""Validates *attr* as string representation of RT or SOO.
Returns True if *attr* is as per our convention of RT or SOO, else
False. Our convention is to represent RT/SOO is a string with format:
*global_admin_part:local_admin_path*
"""
is_valid = True
if not isinstance(attr, str):
is_valid = False
else:
first, second = attr.split(':')
try:
if '.' in first:
socket.inet_aton(first)
else:
int(first)
int(second)
except (ValueError, socket.error):
is_valid = False
return is_valid
@property
def formatted_nlri_str(self):
return "%s:%s" % (self.origin_as, self.route_target)
def is_default_rtnlri(self):
if (self._origin_as is self.DEFAULT_AS and
self._route_target is self.DEFAULT_RT):
return True
return False
def __cmp__(self, other):
return cmp(
(self._origin_as, self._route_target),
(other.origin_as, other.route_target),
)
@classmethod
def parser(cls, buf):
idx = 0
# Extract origin AS.
origin_as, = struct.unpack_from('!I', buf, idx)
idx += 4
# Extract route target.
route_target = _ExtendedCommunity(buf[idx:])
return cls(origin_as, route_target)
def serialize(self):
rt_nlri = ''
if not self.is_default_rtnlri():
rt_nlri += struct.pack('!I', self.origin_as)
# Encode route target
rt_nlri += self.route_target.serialize()
# RT Nlri is 12 octets
return struct.pack('B', (8 * 12)) + rt_nlri
_addr_class_key = lambda x: (x.afi, x.safi)
_ADDR_CLASSES = {
_addr_class_key(RF_IPv4_UC): IPAddrPrefix,
_addr_class_key(RF_IPv6_UC): IP6AddrPrefix,
_addr_class_key(RF_IPv4_MPLS): LabelledIPAddrPrefix,
_addr_class_key(RF_IPv6_MPLS): LabelledIP6AddrPrefix,
_addr_class_key(RF_IPv4_VPN): LabelledVPNIPAddrPrefix,
_addr_class_key(RF_IPv6_VPN): LabelledVPNIP6AddrPrefix,
_addr_class_key(RF_RTC_UC): RouteTargetMembershipNLRI,
}
def _get_addr_class(afi, safi):
try:
return _ADDR_CLASSES[(afi, safi)]
except KeyError:
return _BinAddrPrefix
class _OptParam(StringifyMixin, _TypeDisp, _Value):
_PACK_STR = '!BB' # type, length
def __init__(self, type_, value=None, length=None):
if type_ is None:
type_ = self._rev_lookup_type(self.__class__)
self.type = type_
self.length = length
if value is not None:
self.value = value
@classmethod
def parser(cls, buf):
(type_, length) = struct.unpack_from(cls._PACK_STR, buffer(buf))
rest = buf[struct.calcsize(cls._PACK_STR):]
value = bytes(rest[:length])
rest = rest[length:]
subcls = cls._lookup_type(type_)
caps = subcls.parse_value(value)
if type(caps) != list:
caps = [subcls(type_=type_, length=length, **caps[0])]
return caps, rest
def serialize(self):
# fixup
value = self.serialize_value()
self.length = len(value)
buf = bytearray()
msg_pack_into(self._PACK_STR, buf, 0, self.type, self.length)
return buf + value
@_OptParam.register_unknown_type()
class BGPOptParamUnknown(_OptParam):
@classmethod
def parse_value(cls, buf):
return {
'value': buf
}, cls
def serialize_value(self):
return self.value
@_OptParam.register_type(BGP_OPT_CAPABILITY)
class _OptParamCapability(_OptParam, _TypeDisp):
_CAP_HDR_PACK_STR = '!BB'
def __init__(self, cap_code=None, cap_value=None, cap_length=None,
type_=None, length=None):
super(_OptParamCapability, self).__init__(type_=BGP_OPT_CAPABILITY,
length=length)
if cap_code is None:
cap_code = self._rev_lookup_type(self.__class__)
self.cap_code = cap_code
if cap_value is not None:
self.cap_value = cap_value
if cap_length is not None:
self.cap_length = cap_length
@classmethod
def parse_value(cls, buf):
caps = []
while len(buf) > 0:
(code, length) = struct.unpack_from(cls._CAP_HDR_PACK_STR,
buffer(buf))
value = buf[struct.calcsize(cls._CAP_HDR_PACK_STR):]
buf = buf[length + 2:]
kwargs = {
'cap_code': code,
'cap_length': length,
}
subcls = cls._lookup_type(code)
kwargs.update(subcls.parse_cap_value(value))
caps.append(subcls(type_=BGP_OPT_CAPABILITY, length=length + 2,
**kwargs))
return caps
def serialize_value(self):
# fixup
cap_value = self.serialize_cap_value()
self.cap_length = len(cap_value)
buf = bytearray()
msg_pack_into(self._CAP_HDR_PACK_STR, buf, 0, self.cap_code,
self.cap_length)
return buf + cap_value
class _OptParamEmptyCapability(_OptParamCapability):
@classmethod
def parse_cap_value(cls, buf):
return {}
def serialize_cap_value(self):
return bytearray()
@_OptParamCapability.register_unknown_type()
class BGPOptParamCapabilityUnknown(_OptParamCapability):
@classmethod
def parse_cap_value(cls, buf):
return {'cap_value': buf}
def serialize_cap_value(self):
return self.cap_value
@_OptParamCapability.register_type(BGP_CAP_ROUTE_REFRESH)
class BGPOptParamCapabilityRouteRefresh(_OptParamEmptyCapability):
pass
@_OptParamCapability.register_type(BGP_CAP_ROUTE_REFRESH_CISCO)
class BGPOptParamCapabilityCiscoRouteRefresh(_OptParamEmptyCapability):
pass
@_OptParamCapability.register_type(BGP_CAP_ENHANCED_ROUTE_REFRESH)
class BGPOptParamCapabilityEnhancedRouteRefresh(_OptParamEmptyCapability):
pass
@_OptParamCapability.register_type(BGP_CAP_GRACEFUL_RESTART)
class BGPOptParamCapabilityGracefulRestart(_OptParamCapability):
_CAP_PACK_STR = "!H"
def __init__(self, flags, time, tuples, **kwargs):
super(BGPOptParamCapabilityGracefulRestart, self).__init__(**kwargs)
self.flags = flags
self.time = time
self.tuples = tuples
@classmethod
def parse_cap_value(cls, buf):
(restart, ) = struct.unpack_from(cls._CAP_PACK_STR, buffer(buf))
buf = buf[2:]
l = []
while len(buf) > 0:
l.append(struct.unpack_from("!HBB", buffer(buf)))
buf = buf[4:]
return {'flags': restart >> 12, 'time': restart & 0xfff, 'tuples': l}
def serialize_cap_value(self):
buf = bytearray()
msg_pack_into(self._CAP_PACK_STR, buf, 0, self.flags << 12 | self.time)
tuples = self.tuples
i = 0
offset = 2
for i in self.tuples:
afi, safi, flags = i
msg_pack_into("!HBB", buf, offset, afi, safi, flags)
offset += 4
return buf
@_OptParamCapability.register_type(BGP_CAP_FOUR_OCTET_AS_NUMBER)
class BGPOptParamCapabilityFourOctetAsNumber(_OptParamCapability):
_CAP_PACK_STR = '!I'
def __init__(self, as_number, **kwargs):
super(BGPOptParamCapabilityFourOctetAsNumber, self).__init__(**kwargs)
self.as_number = as_number
@classmethod
def parse_cap_value(cls, buf):
(as_number, ) = struct.unpack_from(cls._CAP_PACK_STR, buffer(buf))
return {'as_number': as_number}
def serialize_cap_value(self):
buf = bytearray()
msg_pack_into(self._CAP_PACK_STR, buf, 0, self.as_number)
return buf
@_OptParamCapability.register_type(BGP_CAP_MULTIPROTOCOL)
class BGPOptParamCapabilityMultiprotocol(_OptParamCapability):
_CAP_PACK_STR = '!HBB' # afi, reserved, safi
def __init__(self, afi, safi, reserved=0, **kwargs):
super(BGPOptParamCapabilityMultiprotocol, self).__init__(**kwargs)
self.afi = afi
self.reserved = reserved
self.safi = safi
@classmethod
def parse_cap_value(cls, buf):
(afi, reserved, safi,) = struct.unpack_from(cls._CAP_PACK_STR,
buffer(buf))
return {
'afi': afi,
'reserved': reserved,
'safi': safi,
}
def serialize_cap_value(self):
# fixup
self.reserved = 0
buf = bytearray()
msg_pack_into(self._CAP_PACK_STR, buf, 0,
self.afi, self.reserved, self.safi)
return buf
@_OptParamCapability.register_type(BGP_CAP_CARRYING_LABEL_INFO)
class BGPOptParamCapabilityCarryingLabelInfo(_OptParamEmptyCapability):
pass
class BGPWithdrawnRoute(IPAddrPrefix):
pass
class _PathAttribute(StringifyMixin, _TypeDisp, _Value):
_PACK_STR = '!BB' # flags, type
_PACK_STR_LEN = '!B' # length
_PACK_STR_EXT_LEN = '!H' # length w/ BGP_ATTR_FLAG_EXTENDED_LENGTH
_ATTR_FLAGS = None
def __init__(self, value=None, flags=0, type_=None, length=None):
if type_ is None:
type_ = self._rev_lookup_type(self.__class__)
self.flags = flags
self.type = type_
self.length = length
if value is not None:
self.value = value
@classmethod
def parser(cls, buf):
(flags, type_) = struct.unpack_from(cls._PACK_STR, buffer(buf))
rest = buf[struct.calcsize(cls._PACK_STR):]
if (flags & BGP_ATTR_FLAG_EXTENDED_LENGTH) != 0:
len_pack_str = cls._PACK_STR_EXT_LEN
else:
len_pack_str = cls._PACK_STR_LEN
(length,) = struct.unpack_from(len_pack_str, buffer(rest))
rest = rest[struct.calcsize(len_pack_str):]
value = bytes(rest[:length])
rest = rest[length:]
subcls = cls._lookup_type(type_)
return subcls(flags=flags, type_=type_, length=length,
**subcls.parse_value(value)), rest
def serialize(self):
# fixup
if self._ATTR_FLAGS is not None:
self.flags = self.flags \
& ~(BGP_ATTR_FLAG_OPTIONAL | BGP_ATTR_FLAG_TRANSITIVE) \
| self._ATTR_FLAGS
value = self.serialize_value()
self.length = len(value)
if self.length > 255:
self.flags |= BGP_ATTR_FLAG_EXTENDED_LENGTH
len_pack_str = self._PACK_STR_EXT_LEN
else:
self.flags &= ~BGP_ATTR_FLAG_EXTENDED_LENGTH
len_pack_str = self._PACK_STR_LEN
buf = bytearray()
msg_pack_into(self._PACK_STR, buf, 0, self.flags, self.type)
msg_pack_into(len_pack_str, buf, len(buf), self.length)
return buf + value
@_PathAttribute.register_unknown_type()
class BGPPathAttributeUnknown(_PathAttribute):
@classmethod
def parse_value(cls, buf):
return {
'value': buf
}
def serialize_value(self):
return self.value
class _PathAttributeUint32(_PathAttribute):
_VALUE_PACK_STR = '!I'
@_PathAttribute.register_type(BGP_ATTR_TYPE_ORIGIN)
class BGPPathAttributeOrigin(_PathAttribute):
_VALUE_PACK_STR = '!B'
_ATTR_FLAGS = BGP_ATTR_FLAG_TRANSITIVE
class _BGPPathAttributeAsPathCommon(_PathAttribute):
_AS_SET = 1
_AS_SEQUENCE = 2
_SEG_HDR_PACK_STR = '!BB'
_AS_PACK_STR = None
_ATTR_FLAGS = BGP_ATTR_FLAG_TRANSITIVE
def __init__(self, value, as_pack_str=None, flags=0, type_=None,
length=None):
super(_BGPPathAttributeAsPathCommon, self).__init__(value=value,
flags=flags,
type_=type_,
length=length)
if as_pack_str:
self._AS_PACK_STR = as_pack_str
@property
def path_seg_list(self):
return copy.deepcopy(self.value)
def get_as_path_len(self):
count = 0
for seg in self.value:
if isinstance(seg, list):
# Segment type 2 stored in list and all AS counted.
count += len(seg)
else:
# Segment type 1 stored in set and count as one.
count += 1
return count
def has_local_as(self, local_as):
"""Check if *local_as* is already present on path list."""
for as_path_seg in self.value:
for as_num in as_path_seg:
if as_num == local_as:
return True
return False
def has_matching_leftmost(self, remote_as):
"""Check if leftmost AS matches *remote_as*."""
if not self.value or not remote_as:
return False
leftmost_seg = self.path_seg_list[0]
if leftmost_seg and leftmost_seg[0] == remote_as:
return True
return False
@classmethod
def _is_valid_16bit_as_path(cls, buf):
two_byte_as_size = struct.calcsize('!H')
while buf:
(type_, num_as) = struct.unpack_from(cls._SEG_HDR_PACK_STR,
buffer(buf))
if type_ is not cls._AS_SET and type_ is not cls._AS_SEQUENCE:
return False
buf = buf[struct.calcsize(cls._SEG_HDR_PACK_STR):]
if len(buf) < num_as * two_byte_as_size:
return False
buf = buf[num_as * two_byte_as_size:]
return True
@classmethod
def parse_value(cls, buf):
result = []
if cls._is_valid_16bit_as_path(buf):
as_pack_str = '!H'
else:
as_pack_str = '!I'
while buf:
(type_, num_as) = struct.unpack_from(cls._SEG_HDR_PACK_STR,
buffer(buf))
buf = buf[struct.calcsize(cls._SEG_HDR_PACK_STR):]
l = []
for i in range(0, num_as):
(as_number,) = struct.unpack_from(as_pack_str,
buffer(buf))
buf = buf[struct.calcsize(as_pack_str):]
l.append(as_number)
if type_ == cls._AS_SET:
result.append(set(l))
elif type_ == cls._AS_SEQUENCE:
result.append(l)
else:
assert(0) # protocol error
return {
'value': result,
'as_pack_str': as_pack_str,
}
def serialize_value(self):
buf = bytearray()
offset = 0
for e in self.value:
if isinstance(e, set):
type_ = self._AS_SET
elif isinstance(e, list):
type_ = self._AS_SEQUENCE
l = list(e)
num_as = len(l)
if num_as == 0:
continue
msg_pack_into(self._SEG_HDR_PACK_STR, buf, offset, type_, num_as)
offset += struct.calcsize(self._SEG_HDR_PACK_STR)
for i in l:
msg_pack_into(self._AS_PACK_STR, buf, offset, i)
offset += struct.calcsize(self._AS_PACK_STR)
return buf
@_PathAttribute.register_type(BGP_ATTR_TYPE_AS_PATH)
class BGPPathAttributeAsPath(_BGPPathAttributeAsPathCommon):
# XXX depends on negotiated capability, AS numbers can be 32 bit.
# while wireshark seems to attempt auto-detect, it seems that
# there's no way to detect it reliably. for example, the
# following byte sequence can be interpreted in two ways.
# 01 02 99 88 77 66 02 01 55 44
# AS_SET num=2 9988 7766 AS_SEQUENCE num=1 5544
# AS_SET num=2 99887766 02015544
# we first check whether AS path can be parsed in 16bit format and if
# it fails, we try to parse as 32bit
_AS_PACK_STR = '!H'
@_PathAttribute.register_type(BGP_ATTR_TYPE_AS4_PATH)
class BGPPathAttributeAs4Path(_BGPPathAttributeAsPathCommon):
_AS_PACK_STR = '!I'
@classmethod
def _is_valid_16bit_as_path(cls, buf):
return False
@_PathAttribute.register_type(BGP_ATTR_TYPE_NEXT_HOP)
class BGPPathAttributeNextHop(_PathAttribute):
_VALUE_PACK_STR = '!4s'
_ATTR_FLAGS = BGP_ATTR_FLAG_TRANSITIVE
_TYPE = {
'ascii': [
'value'
]
}
@classmethod
def parse_value(cls, buf):
(ip_addr,) = struct.unpack_from(cls._VALUE_PACK_STR, buffer(buf))
return {
'value': addrconv.ipv4.bin_to_text(ip_addr),
}
def serialize_value(self):
buf = bytearray()
msg_pack_into(self._VALUE_PACK_STR, buf, 0,
addrconv.ipv4.text_to_bin(self.value))
return buf
@_PathAttribute.register_type(BGP_ATTR_TYPE_MULTI_EXIT_DISC)
class BGPPathAttributeMultiExitDisc(_PathAttributeUint32):
_ATTR_FLAGS = BGP_ATTR_FLAG_OPTIONAL
@_PathAttribute.register_type(BGP_ATTR_TYPE_LOCAL_PREF)
class BGPPathAttributeLocalPref(_PathAttributeUint32):
_ATTR_FLAGS = BGP_ATTR_FLAG_TRANSITIVE
@_PathAttribute.register_type(BGP_ATTR_TYPE_ATOMIC_AGGREGATE)
class BGPPathAttributeAtomicAggregate(_PathAttribute):
_ATTR_FLAGS = BGP_ATTR_FLAG_TRANSITIVE
@classmethod
def parse_value(cls, buf):
return {}
def serialize_value(self):
return ''
class _BGPPathAttributeAggregatorCommon(_PathAttribute):
_VALUE_PACK_STR = None
_ATTR_FLAGS = BGP_ATTR_FLAG_OPTIONAL | BGP_ATTR_FLAG_TRANSITIVE
_TYPE = {
'ascii': [
'addr'
]
}
def __init__(self, as_number, addr, flags=0, type_=None, length=None):
super(_BGPPathAttributeAggregatorCommon, self).__init__(flags=flags,
type_=type_,
length=length)
self.as_number = as_number
self.addr = addr
@classmethod
def parse_value(cls, buf):
(as_number, addr) = struct.unpack_from(cls._VALUE_PACK_STR,
buffer(buf))
return {
'as_number': as_number,
'addr': addrconv.ipv4.bin_to_text(addr),
}
def serialize_value(self):
buf = bytearray()
msg_pack_into(self._VALUE_PACK_STR, buf, 0, self.as_number,
addrconv.ipv4.text_to_bin(self.addr))
return buf
@_PathAttribute.register_type(BGP_ATTR_TYPE_AGGREGATOR)
class BGPPathAttributeAggregator(_BGPPathAttributeAggregatorCommon):
# XXX currently this implementation assumes 16 bit AS numbers.
_VALUE_PACK_STR = '!H4s'
@_PathAttribute.register_type(BGP_ATTR_TYPE_AS4_AGGREGATOR)
class BGPPathAttributeAs4Aggregator(_BGPPathAttributeAggregatorCommon):
_VALUE_PACK_STR = '!I4s'
@_PathAttribute.register_type(BGP_ATTR_TYPE_COMMUNITIES)
class BGPPathAttributeCommunities(_PathAttribute):
_VALUE_PACK_STR = '!I'
_ATTR_FLAGS = BGP_ATTR_FLAG_OPTIONAL | BGP_ATTR_FLAG_TRANSITIVE
# String constants of well-known-communities
NO_EXPORT = int('0xFFFFFF01', 16)
NO_ADVERTISE = int('0xFFFFFF02', 16)
NO_EXPORT_SUBCONFED = int('0xFFFFFF03', 16)
WELL_KNOW_COMMUNITIES = (NO_EXPORT, NO_ADVERTISE, NO_EXPORT_SUBCONFED)
def __init__(self, communities,
flags=0, type_=None, length=None):
super(BGPPathAttributeCommunities, self).__init__(flags=flags,
type_=type_,
length=length)
self.communities = communities
@classmethod
def parse_value(cls, buf):
rest = buf
communities = []
elem_size = struct.calcsize(cls._VALUE_PACK_STR)
while len(rest) >= elem_size:
(comm, ) = struct.unpack_from(cls._VALUE_PACK_STR, buffer(rest))
communities.append(comm)
rest = rest[elem_size:]
return {
'communities': communities,
}
def serialize_value(self):
buf = bytearray()
for comm in self.communities:
bincomm = bytearray()
msg_pack_into(self._VALUE_PACK_STR, bincomm, 0, comm)
buf += bincomm
return buf
@staticmethod
def is_no_export(comm_attr):
"""Returns True if given value matches well-known community NO_EXPORT
attribute value.
"""
return comm_attr == BGPPathAttributeCommunities.NO_EXPORT
@staticmethod
def is_no_advertise(comm_attr):
"""Returns True if given value matches well-known community
NO_ADVERTISE attribute value.
"""
return comm_attr == BGPPathAttributeCommunities.NO_ADVERTISE
@staticmethod
def is_no_export_subconfed(comm_attr):
"""Returns True if given value matches well-known community
NO_EXPORT_SUBCONFED attribute value.
"""
return comm_attr == BGPPathAttributeCommunities.NO_EXPORT_SUBCONFED
def has_comm_attr(self, attr):
"""Returns True if given community attribute is present."""
for comm_attr in self.communities:
if comm_attr == attr:
return True
return False
@_PathAttribute.register_type(BGP_ATTR_TYPE_ORIGINATOR_ID)
class BGPPathAttributeOriginatorId(_PathAttribute):
# ORIGINATOR_ID is a new optional, non-transitive BGP attribute of Type
# code 9. This attribute is 4 bytes long and it will be created by an
# RR in reflecting a route.
_VALUE_PACK_STR = '!4s'
_ATTR_FLAGS = BGP_ATTR_FLAG_OPTIONAL
_TYPE = {
'ascii': [
'value'
]
}
@classmethod
def parse_value(cls, buf):
(originator_id,) = struct.unpack_from(cls._VALUE_PACK_STR, buffer(buf))
return {
'value': addrconv.ipv4.bin_to_text(originator_id),
}
def serialize_value(self):
buf = bytearray()
msg_pack_into(self._VALUE_PACK_STR, buf, 0,
addrconv.ipv4.text_to_bin(self.value))
return buf
@_PathAttribute.register_type(BGP_ATTR_TYPE_CLUSTER_LIST)
class BGPPathAttributeClusterList(_PathAttribute):
# CLUSTER_LIST is a new, optional, non-transitive BGP attribute of Type
# code 10. It is a sequence of CLUSTER_ID values representing the
# reflection path that the route has passed.
_VALUE_PACK_STR = '!4s'
_ATTR_FLAGS = BGP_ATTR_FLAG_OPTIONAL
_TYPE = {
'ascii': [
'value'
]
}
@classmethod
def parse_value(cls, buf):
rest = buf
cluster_list = []
elem_size = struct.calcsize(cls._VALUE_PACK_STR)
while len(rest) >= elem_size:
(cluster_id, ) = struct.unpack_from(
cls._VALUE_PACK_STR, buffer(rest))
cluster_list.append(addrconv.ipv4.bin_to_text(cluster_id))
rest = rest[elem_size:]
return {
'value': cluster_list,
}
def serialize_value(self):
buf = bytearray()
offset = 0
for cluster_id in self.value:
msg_pack_into(
self._VALUE_PACK_STR,
buf,
offset,
addrconv.ipv4.text_to_bin(cluster_id))
offset += struct.calcsize(self._VALUE_PACK_STR)
return buf
# Extended Communities
# RFC 4360
# RFC 5668
# IANA registry:
# https://www.iana.org/assignments/bgp-extended-communities/
# bgp-extended-communities.xml
#
# type
# high low
# 00 sub-type Two-Octet AS Specific Extended Community (transitive)
# 40 sub-type Two-Octet AS Specific Extended Community
# payload:
# 2 byte Global Administrator (AS number)
# 4 byte Local Administrator (defined by sub-type)
# 01 sub-type IPv4 Address Specific Extended Community (transitive)
# 41 sub-type IPv4 Address Specific Extended Community
# payload:
# 4 byte Global Administrator (IPv4 address)
# 2 byte Local Administrator (defined by sub-type)
# 03 sub-type Opaque Extended Community (transitive)
# 43 sub-type Opaque Extended Community
# payload:
# 6 byte opaque value (defined by sub-type)
#
# 00 02 Route Target Community (two-octet AS specific)
# 01 02 Route Target Community (IPv4 address specific)
# 02 02 Route Target Community (four-octet AS specific, RFC 5668)
# 00 03 Route Origin Community (two-octet AS specific)
# 01 03 Route Origin Community (IPv4 address specific)
# 02 03 Route Origin Community (four-octet AS specific, RFC 5668)
@_PathAttribute.register_type(BGP_ATTR_TYPE_EXTENDED_COMMUNITIES)
class BGPPathAttributeExtendedCommunities(_PathAttribute):
_ATTR_FLAGS = BGP_ATTR_FLAG_OPTIONAL | BGP_ATTR_FLAG_TRANSITIVE
_class_prefixes = ['BGP']
def __init__(self, communities,
flags=0, type_=None, length=None):
super(BGPPathAttributeExtendedCommunities,
self).__init__(flags=flags,
type_=type_,
length=length)
self.communities = communities
@classmethod
def parse_value(cls, buf):
rest = buf
communities = []
while rest:
comm, rest = _ExtendedCommunity.parse(rest)
communities.append(comm)
return {
'communities': communities,
}
def serialize_value(self):
buf = bytearray()
for comm in self.communities:
buf += comm.serialize()
return buf
def _community_list(self, subtype):
_list = []
for comm in (c for c in self.communities
if hasattr(c, "subtype") and c.subtype == subtype):
if comm.type == 0 or comm.type == 2:
_list.append('%d:%d' % (comm.as_number,
comm.local_administrator))
elif comm.type == 1:
_list.append('%s:%d' % (comm.ipv4_address,
comm.local_administrator))
return _list
@property
def rt_list(self):
return self._community_list(2)
@property
def soo_list(self):
return self._community_list(3)
class _ExtendedCommunity(StringifyMixin, _TypeDisp, _Value):
_PACK_STR = '!B7s' # type high (+ type low) + value
IANA_AUTHORITY = 0x80
TRANSITIVE = 0x40
_TYPE_HIGH_MASK = ~TRANSITIVE
TWO_OCTET_AS_SPECIFIC = 0x00
IPV4_ADDRESS_SPECIFIC = 0x01
FOUR_OCTET_AS_SPECIFIC = 0x02
OPAQUE = 0x03
def __init__(self, type_):
self.type = type_
@classmethod
def parse(cls, buf):
(type_high, payload) = struct.unpack_from(cls._PACK_STR, buffer(buf))
rest = buf[struct.calcsize(cls._PACK_STR):]
type_ = type_high & cls._TYPE_HIGH_MASK
subcls = cls._lookup_type(type_)
return subcls(type_=type_high,
**subcls.parse_value(payload)), rest
def serialize(self):
buf = bytearray()
msg_pack_into(self._PACK_STR, buf, 0, self.type,
bytes(self.serialize_value()))
return buf
@_ExtendedCommunity.register_type(_ExtendedCommunity.TWO_OCTET_AS_SPECIFIC)
class BGPTwoOctetAsSpecificExtendedCommunity(_ExtendedCommunity):
_VALUE_PACK_STR = '!BHI' # sub type, as number, local adm
_VALUE_FIELDS = ['subtype', 'as_number', 'local_administrator']
def __init__(self, type_=_ExtendedCommunity.TWO_OCTET_AS_SPECIFIC,
**kwargs):
self.do_init(BGPTwoOctetAsSpecificExtendedCommunity, self, kwargs,
type_=type_)
@_ExtendedCommunity.register_type(_ExtendedCommunity.IPV4_ADDRESS_SPECIFIC)
class BGPIPv4AddressSpecificExtendedCommunity(_ExtendedCommunity):
_VALUE_PACK_STR = '!B4sH' # sub type, IPv4 address, local adm
_VALUE_FIELDS = ['subtype', 'ipv4_address', 'local_administrator']
_TYPE = {
'ascii': [
'ipv4_address'
]
}
def __init__(self, type_=_ExtendedCommunity.IPV4_ADDRESS_SPECIFIC,
**kwargs):
self.do_init(BGPIPv4AddressSpecificExtendedCommunity, self, kwargs,
type_=type_)
@classmethod
def parse_value(cls, buf):
d_ = super(BGPIPv4AddressSpecificExtendedCommunity,
cls).parse_value(buf)
d_['ipv4_address'] = addrconv.ipv4.bin_to_text(d_['ipv4_address'])
return d_
def serialize_value(self):
args = []
for f in self._VALUE_FIELDS:
v = getattr(self, f)
if f == 'ipv4_address':
v = bytes(addrconv.ipv4.text_to_bin(v))
args.append(v)
buf = bytearray()
msg_pack_into(self._VALUE_PACK_STR, buf, 0, *args)
return buf
@_ExtendedCommunity.register_type(_ExtendedCommunity.FOUR_OCTET_AS_SPECIFIC)
class BGPFourOctetAsSpecificExtendedCommunity(_ExtendedCommunity):
_VALUE_PACK_STR = '!BIH' # sub type, as number, local adm
_VALUE_FIELDS = ['subtype', 'as_number', 'local_administrator']
def __init__(self, type_=_ExtendedCommunity.FOUR_OCTET_AS_SPECIFIC,
**kwargs):
self.do_init(BGPFourOctetAsSpecificExtendedCommunity, self, kwargs,
type_=type_)
@_ExtendedCommunity.register_type(_ExtendedCommunity.OPAQUE)
class BGPOpaqueExtendedCommunity(_ExtendedCommunity):
_VALUE_PACK_STR = '!7s' # opaque value
_VALUE_FIELDS = ['opaque']
def __init__(self, type_=_ExtendedCommunity.OPAQUE,
**kwargs):
self.do_init(BGPOpaqueExtendedCommunity, self, kwargs,
type_=type_)
@_ExtendedCommunity.register_unknown_type()
class BGPUnknownExtendedCommunity(_ExtendedCommunity):
_VALUE_PACK_STR = '!7s' # opaque value
def __init__(self, **kwargs):
self.do_init(BGPUnknownExtendedCommunity, self, kwargs)
@_PathAttribute.register_type(BGP_ATTR_TYPE_MP_REACH_NLRI)
class BGPPathAttributeMpReachNLRI(_PathAttribute):
_VALUE_PACK_STR = '!HBB' # afi, safi, next hop len
_ATTR_FLAGS = BGP_ATTR_FLAG_OPTIONAL
_class_suffixes = ['AddrPrefix']
_rd_length = 8
_TYPE = {
'ascii': [
'next_hop'
]
}
def __init__(self, afi, safi, next_hop, nlri,
next_hop_len=0, reserved='\0',
flags=0, type_=None, length=None):
super(BGPPathAttributeMpReachNLRI, self).__init__(flags=flags,
type_=type_,
length=length)
self.afi = afi
self.safi = safi
self.next_hop_len = next_hop_len
self.next_hop = next_hop
if afi == addr_family.IP:
self._next_hop_bin = addrconv.ipv4.text_to_bin(next_hop)
elif afi == addr_family.IP6:
self._next_hop_bin = addrconv.ipv6.text_to_bin(next_hop)
else:
raise ValueError('Invalid address familly(%d)' % afi)
self._reserved = reserved
self.nlri = nlri
addr_cls = _get_addr_class(afi, safi)
for i in nlri:
assert isinstance(i, addr_cls)
@classmethod
def parse_value(cls, buf):
(afi, safi, next_hop_len,) = struct.unpack_from(cls._VALUE_PACK_STR,
buffer(buf))
rest = buf[struct.calcsize(cls._VALUE_PACK_STR):]
next_hop_bin = rest[:next_hop_len]
rest = rest[next_hop_len:]
reserved = rest[:1]
assert reserved == '\0'
binnlri = rest[1:]
addr_cls = _get_addr_class(afi, safi)
nlri = []
while binnlri:
n, binnlri = addr_cls.parser(binnlri)
nlri.append(n)
rf = RouteFamily(afi, safi)
if rf == RF_IPv6_VPN:
next_hop = addrconv.ipv6.bin_to_text(next_hop_bin[cls._rd_length:])
next_hop_len -= cls._rd_length
elif rf == RF_IPv4_VPN:
next_hop = addrconv.ipv4.bin_to_text(next_hop_bin[cls._rd_length:])
next_hop_len -= cls._rd_length
elif afi == addr_family.IP:
next_hop = addrconv.ipv4.bin_to_text(next_hop_bin)
elif afi == addr_family.IP6:
# next_hop_bin can include global address and link-local address
# according to RFC2545. Since a link-local address isn't needed in
# Ryu BGPSpeaker, we ignore it if both addresses were sent.
# The link-local address is supposed to follow after
# a global address and next_hop_len will be 32 bytes,
# so we use the first 16 bytes, which is a global address,
# as a next_hop and change the next_hop_len to 16.
if next_hop_len == 32:
next_hop_bin = next_hop_bin[:16]
next_hop_len = 16
next_hop = addrconv.ipv6.bin_to_text(next_hop_bin)
else:
raise ValueError('Invalid address familly(%d)' % afi)
return {
'afi': afi,
'safi': safi,
'next_hop_len': next_hop_len,
'next_hop': next_hop,
'reserved': reserved,
'nlri': nlri,
}
def serialize_value(self):
# fixup
self.next_hop_len = len(self._next_hop_bin)
if RouteFamily(self.afi, self.safi) in (RF_IPv4_VPN, RF_IPv6_VPN):
empty_label_stack = '\x00' * self._rd_length
next_hop_len = len(self._next_hop_bin) + len(empty_label_stack)
next_hop_bin = empty_label_stack
next_hop_bin += self._next_hop_bin
else:
next_hop_len = self.next_hop_len
next_hop_bin = self._next_hop_bin
self._reserved = '\0'
buf = bytearray()
msg_pack_into(self._VALUE_PACK_STR, buf, 0, self.afi,
self.safi, next_hop_len)
buf += next_hop_bin
buf += self._reserved
binnlri = bytearray()
for n in self.nlri:
binnlri += n.serialize()
buf += binnlri
return buf
@property
def route_family(self):
return _rf_map[(self.afi, self.safi)]
@_PathAttribute.register_type(BGP_ATTR_TYPE_MP_UNREACH_NLRI)
class BGPPathAttributeMpUnreachNLRI(_PathAttribute):
_VALUE_PACK_STR = '!HB' # afi, safi
_ATTR_FLAGS = BGP_ATTR_FLAG_OPTIONAL
_class_suffixes = ['AddrPrefix']
def __init__(self, afi, safi, withdrawn_routes,
flags=0, type_=None, length=None):
super(BGPPathAttributeMpUnreachNLRI, self).__init__(flags=flags,
type_=type_,
length=length)
self.afi = afi
self.safi = safi
self.withdrawn_routes = withdrawn_routes
addr_cls = _get_addr_class(afi, safi)
for i in withdrawn_routes:
assert isinstance(i, addr_cls)
@classmethod
def parse_value(cls, buf):
(afi, safi,) = struct.unpack_from(cls._VALUE_PACK_STR, buffer(buf))
binnlri = buf[struct.calcsize(cls._VALUE_PACK_STR):]
addr_cls = _get_addr_class(afi, safi)
nlri = []
while binnlri:
n, binnlri = addr_cls.parser(binnlri)
nlri.append(n)
return {
'afi': afi,
'safi': safi,
'withdrawn_routes': nlri,
}
def serialize_value(self):
buf = bytearray()
msg_pack_into(self._VALUE_PACK_STR, buf, 0, self.afi, self.safi)
binnlri = bytearray()
for n in self.withdrawn_routes:
binnlri += n.serialize()
buf += binnlri
return buf
@property
def route_family(self):
return _rf_map[(self.afi, self.safi)]
class BGPNLRI(IPAddrPrefix):
pass
class BGPMessage(packet_base.PacketBase, _TypeDisp):
"""Base class for BGP-4 messages.
An instance has the following attributes at least.
Most of them are same to the on-wire counterparts but in host byte
order.
__init__ takes the corresponding args in this order.
========================== ===============================================
Attribute Description
========================== ===============================================
marker Marker field. Ignored when encoding.
len Length field. Ignored when encoding.
type Type field. one of BGP\_MSG\_ constants.
========================== ===============================================
"""
_HDR_PACK_STR = '!16sHB' # marker, len, type
_HDR_LEN = struct.calcsize(_HDR_PACK_STR)
_class_prefixes = ['BGP']
def __init__(self, type_, len_=None, marker=None):
if marker is None:
self._marker = _MARKER
else:
self._marker = marker
self.len = len_
self.type = type_
@classmethod
def parser(cls, buf):
if len(buf) < cls._HDR_LEN:
raise stream_parser.StreamParser.TooSmallException(
'%d < %d' % (len(buf), cls._HDR_LEN))
(marker, len_, type_) = struct.unpack_from(cls._HDR_PACK_STR,
buffer(buf))
msglen = len_
if len(buf) < msglen:
raise stream_parser.StreamParser.TooSmallException(
'%d < %d' % (len(buf), msglen))
binmsg = buf[cls._HDR_LEN:msglen]
rest = buf[msglen:]
subcls = cls._lookup_type(type_)
kwargs = subcls.parser(binmsg)
return subcls(marker=marker, len_=len_, type_=type_, **kwargs), rest
def serialize(self):
# fixup
self._marker = _MARKER
tail = self.serialize_tail()
self.len = self._HDR_LEN + len(tail)
hdr = bytearray(struct.pack(self._HDR_PACK_STR, self._marker,
self.len, self.type))
return hdr + tail
def __len__(self):
# XXX destructive
buf = self.serialize()
return len(buf)
@BGPMessage.register_type(BGP_MSG_OPEN)
class BGPOpen(BGPMessage):
"""BGP-4 OPEN Message encoder/decoder class.
An instance has the following attributes at least.
Most of them are same to the on-wire counterparts but in host byte
order.
__init__ takes the corresponding args in this order.
========================== ===============================================
Attribute Description
========================== ===============================================
marker Marker field. Ignored when encoding.
len Length field. Ignored when encoding.
type Type field. The default is BGP_MSG_OPEN.
version Version field. The default is 4.
my_as My Autonomous System field. 2 octet unsigned
integer.
hold_time Hold Time field. 2 octet unsigned integer.
The default is 0.
bgp_identifier BGP Identifier field. An IPv4 address.
For example, '192.0.2.1'
opt_param_len Optional Parameters Length field.
Ignored when encoding.
opt_param Optional Parameters field. A list of
BGPOptParam instances. The default is [].
========================== ===============================================
"""
_PACK_STR = '!BHH4sB'
_MIN_LEN = BGPMessage._HDR_LEN + struct.calcsize(_PACK_STR)
_TYPE = {
'ascii': [
'bgp_identifier'
]
}
def __init__(self, my_as, bgp_identifier, type_=BGP_MSG_OPEN,
opt_param_len=0, opt_param=[],
version=_VERSION, hold_time=0, len_=None, marker=None):
super(BGPOpen, self).__init__(marker=marker, len_=len_, type_=type_)
self.version = version
self.my_as = my_as
self.bgp_identifier = bgp_identifier
self.hold_time = hold_time
self.opt_param_len = opt_param_len
self.opt_param = opt_param
@classmethod
def parser(cls, buf):
(version, my_as, hold_time,
bgp_identifier, opt_param_len) = struct.unpack_from(cls._PACK_STR,
buffer(buf))
rest = buf[struct.calcsize(cls._PACK_STR):]
binopts = rest[:opt_param_len]
opt_param = []
while binopts:
opt, binopts = _OptParam.parser(binopts)
opt_param.extend(opt)
return {
"version": version,
"my_as": my_as,
"hold_time": hold_time,
"bgp_identifier": addrconv.ipv4.bin_to_text(bgp_identifier),
"opt_param_len": opt_param_len,
"opt_param": opt_param,
}
def serialize_tail(self):
# fixup
self.version = _VERSION
binopts = bytearray()
for opt in self.opt_param:
binopts += opt.serialize()
self.opt_param_len = len(binopts)
msg = bytearray(struct.pack(self._PACK_STR,
self.version,
self.my_as,
self.hold_time,
addrconv.ipv4.text_to_bin(
self.bgp_identifier),
self.opt_param_len))
msg += binopts
return msg
@BGPMessage.register_type(BGP_MSG_UPDATE)
class BGPUpdate(BGPMessage):
"""BGP-4 UPDATE Message encoder/decoder class.
An instance has the following attributes at least.
Most of them are same to the on-wire counterparts but in host byte
order.
__init__ takes the corresponding args in this order.
.. tabularcolumns:: |l|L|
========================== ===============================================
Attribute Description
========================== ===============================================
marker Marker field. Ignored when encoding.
len Length field. Ignored when encoding.
type Type field. The default is BGP_MSG_UPDATE.
withdrawn_routes_len Withdrawn Routes Length field.
Ignored when encoding.
withdrawn_routes Withdrawn Routes field. A list of
BGPWithdrawnRoute instances.
The default is [].
total_path_attribute_len Total Path Attribute Length field.
Ignored when encoding.
path_attributes Path Attributes field. A list of
BGPPathAttribute instances.
The default is [].
nlri Network Layer Reachability Information field.
A list of BGPNLRI instances.
The default is [].
========================== ===============================================
"""
_MIN_LEN = BGPMessage._HDR_LEN
def __init__(self, type_=BGP_MSG_UPDATE,
withdrawn_routes_len=None,
withdrawn_routes=[],
total_path_attribute_len=None,
path_attributes=[],
nlri=[],
len_=None, marker=None):
super(BGPUpdate, self).__init__(marker=marker, len_=len_, type_=type_)
self.withdrawn_routes_len = withdrawn_routes_len
self.withdrawn_routes = withdrawn_routes
self.total_path_attribute_len = total_path_attribute_len
self.path_attributes = path_attributes
self._pathattr_map = {}
for attr in path_attributes:
self._pathattr_map[attr.type] = attr
self.nlri = nlri
@property
def pathattr_map(self):
return self._pathattr_map
def get_path_attr(self, attr_name):
return self._pathattr_map.get(attr_name)
@classmethod
def parser(cls, buf):
offset = 0
(withdrawn_routes_len,) = struct.unpack_from('!H', buffer(buf), offset)
binroutes = buffer(buf[offset + 2:
offset + 2 + withdrawn_routes_len])
offset += 2 + withdrawn_routes_len
(total_path_attribute_len,) = struct.unpack_from('!H', buffer(buf),
offset)
binpathattrs = buffer(buf[offset + 2:
offset + 2 + total_path_attribute_len])
binnlri = buffer(buf[offset + 2 + total_path_attribute_len:])
withdrawn_routes = []
while binroutes:
r, binroutes = BGPWithdrawnRoute.parser(binroutes)
withdrawn_routes.append(r)
path_attributes = []
while binpathattrs:
pa, binpathattrs = _PathAttribute.parser(binpathattrs)
path_attributes.append(pa)
offset += 2 + total_path_attribute_len
nlri = []
while binnlri:
n, binnlri = BGPNLRI.parser(binnlri)
nlri.append(n)
return {
"withdrawn_routes_len": withdrawn_routes_len,
"withdrawn_routes": withdrawn_routes,
"total_path_attribute_len": total_path_attribute_len,
"path_attributes": path_attributes,
"nlri": nlri,
}
def serialize_tail(self):
# fixup
binroutes = bytearray()
for r in self.withdrawn_routes:
binroutes += r.serialize()
self.withdrawn_routes_len = len(binroutes)
binpathattrs = bytearray()
for pa in self.path_attributes:
binpathattrs += pa.serialize()
self.total_path_attribute_len = len(binpathattrs)
binnlri = bytearray()
for n in self.nlri:
binnlri += n.serialize()
msg = bytearray()
offset = 0
msg_pack_into('!H', msg, offset, self.withdrawn_routes_len)
msg += binroutes
offset += 2 + self.withdrawn_routes_len
msg_pack_into('!H', msg, offset, self.total_path_attribute_len)
msg += binpathattrs
offset += 2 + self.total_path_attribute_len
msg += binnlri
return msg
@BGPMessage.register_type(BGP_MSG_KEEPALIVE)
class BGPKeepAlive(BGPMessage):
"""BGP-4 KEEPALIVE Message encoder/decoder class.
An instance has the following attributes at least.
Most of them are same to the on-wire counterparts but in host byte
order.
__init__ takes the corresponding args in this order.
========================== ===============================================
Attribute Description
========================== ===============================================
marker Marker field. Ignored when encoding.
len Length field. Ignored when encoding.
type Type field. The default is BGP_MSG_KEEPALIVE.
========================== ===============================================
"""
_MIN_LEN = BGPMessage._HDR_LEN
def __init__(self, type_=BGP_MSG_KEEPALIVE, len_=None, marker=None):
super(BGPKeepAlive, self).__init__(marker=marker, len_=len_,
type_=type_)
@classmethod
def parser(cls, buf):
return {}
def serialize_tail(self):
return bytearray()
@BGPMessage.register_type(BGP_MSG_NOTIFICATION)
class BGPNotification(BGPMessage):
"""BGP-4 NOTIFICATION Message encoder/decoder class.
An instance has the following attributes at least.
Most of them are same to the on-wire counterparts but in host byte
order.
__init__ takes the corresponding args in this order.
========================== ===============================================
Attribute Description
========================== ===============================================
marker Marker field. Ignored when encoding.
len Length field. Ignored when encoding.
type Type field. The default is
BGP_MSG_NOTIFICATION.
error_code Error code field.
error_subcode Error subcode field.
data Data field. The default is ''.
========================== ===============================================
"""
_PACK_STR = '!BB'
_MIN_LEN = BGPMessage._HDR_LEN + struct.calcsize(_PACK_STR)
_REASONS = {
(1, 1): 'Message Header Error: not synchronised',
(1, 2): 'Message Header Error: bad message len',
(1, 3): 'Message Header Error: bad message type',
(2, 1): 'Open Message Error: unsupported version',
(2, 2): 'Open Message Error: bad peer AS',
(2, 3): 'Open Message Error: bad BGP identifier',
(2, 4): 'Open Message Error: unsupported optional param',
(2, 5): 'Open Message Error: authentication failure',
(2, 6): 'Open Message Error: unacceptable hold time',
(2, 7): 'Open Message Error: Unsupported Capability',
(2, 8): 'Open Message Error: Unassigned',
(3, 1): 'Update Message Error: malformed attribute list',
(3, 2): 'Update Message Error: unrecognized well-known attr',
(3, 3): 'Update Message Error: missing well-known attr',
(3, 4): 'Update Message Error: attribute flags error',
(3, 5): 'Update Message Error: attribute length error',
(3, 6): 'Update Message Error: invalid origin attr',
(3, 7): 'Update Message Error: as routing loop',
(3, 8): 'Update Message Error: invalid next hop attr',
(3, 9): 'Update Message Error: optional attribute error',
(3, 10): 'Update Message Error: invalid network field',
(3, 11): 'Update Message Error: malformed AS_PATH',
(4, 1): 'Hold Timer Expired',
(5, 1): 'Finite State Machine Error',
(6, 1): 'Cease: Maximum Number of Prefixes Reached',
(6, 2): 'Cease: Administrative Shutdown',
(6, 3): 'Cease: Peer De-configured',
(6, 4): 'Cease: Administrative Reset',
(6, 5): 'Cease: Connection Rejected',
(6, 6): 'Cease: Other Configuration Change',
(6, 7): 'Cease: Connection Collision Resolution',
(6, 8): 'Cease: Out of Resources',
}
def __init__(self,
error_code,
error_subcode,
data='',
type_=BGP_MSG_NOTIFICATION, len_=None, marker=None):
super(BGPNotification, self).__init__(marker=marker, len_=len_,
type_=type_)
self.error_code = error_code
self.error_subcode = error_subcode
self.data = data
@classmethod
def parser(cls, buf):
(error_code, error_subcode,) = struct.unpack_from(cls._PACK_STR,
buffer(buf))
data = bytes(buf[2:])
return {
"error_code": error_code,
"error_subcode": error_subcode,
"data": data,
}
def serialize_tail(self):
msg = bytearray(struct.pack(self._PACK_STR, self.error_code,
self.error_subcode))
msg += self.data
return msg
@property
def reason(self):
return self._REASONS.get((self.error_code, self.error_subcode))
@BGPMessage.register_type(BGP_MSG_ROUTE_REFRESH)
class BGPRouteRefresh(BGPMessage):
"""BGP-4 ROUTE REFRESH Message (RFC 2918) encoder/decoder class.
An instance has the following attributes at least.
Most of them are same to the on-wire counterparts but in host byte
order.
__init__ takes the corresponding args in this order.
========================== ===============================================
Attribute Description
========================== ===============================================
marker Marker field. Ignored when encoding.
len Length field. Ignored when encoding.
type Type field. The default is
BGP_MSG_ROUTE_REFRESH.
afi Address Family Identifier
safi Subsequent Address Family Identifier
========================== ===============================================
"""
_PACK_STR = '!HBB'
_MIN_LEN = BGPMessage._HDR_LEN + struct.calcsize(_PACK_STR)
def __init__(self,
afi, safi, demarcation=0,
type_=BGP_MSG_ROUTE_REFRESH, len_=None, marker=None):
super(BGPRouteRefresh, self).__init__(marker=marker, len_=len_,
type_=type_)
self.afi = afi
self.safi = safi
self.demarcation = demarcation
@classmethod
def parser(cls, buf):
(afi, demarcation, safi,) = struct.unpack_from(cls._PACK_STR,
buffer(buf))
return {
"afi": afi,
"safi": safi,
"demarcation": demarcation,
}
def serialize_tail(self):
return bytearray(struct.pack(self._PACK_STR, self.afi,
self.demarcation, self.safi))
class StreamParser(stream_parser.StreamParser):
"""Streaming parser for BGP-4 messages.
This is a subclass of ryu.lib.packet.stream_parser.StreamParser.
Its parse method returns a list of BGPMessage subclass instances.
"""
def try_parse(self, data):
return BGPMessage.parser(data)
|
from datetime import timedelta
from enum import Enum
import re
import os
from typing import Any, Dict, List, Optional, Union
import yaml
from .utils import resolve_type, substitute_env_vars
"""
This implements a simple semi-declarative configuration system based on type
annotations. Configuration objects derive from BaseConfig, and configuration fields
are found by looking at the annotated attributes of the class.
Validity checks and special handling are implemented by overriding the __init__
method of the configuration class.
Example:
::
class MyConfig(BaseConfig):
x: int = 42 # field with a default
y: str # required field
z: str = configfield(skip=True)
def __init__(self, lookup):
super().__init__(lookup)
# Validation
if self.x > 100:
raise ConfigError("x must be less than 100")
# Special handling
z = lookup.get_str("z", default=None)
if z is None:
z = lookup.get_str("oldZ")
self.z = z
my_config = MyConfig.from_yaml("config.yaml")
"""
class ConfigError(Exception):
pass
class Defaults(Enum):
REQUIRED = 1
class ConfigField:
def __init__(self, *, skip, default, extra):
self.skip = skip
self.default = default
self.extra = extra
def configfield(*, skip=False, default=Defaults.REQUIRED, **kwargs) -> Any:
return ConfigField(skip=skip, default=default, extra=kwargs)
class Lookup:
def __init__(self, attrs: Dict[str, Any], path: Optional[str] = None):
self.path = path
self.attrs = attrs
def _get_path(self, key: str):
if self.path is not None:
return self.path + '/' + key
else:
return key
def sublookup(self, parent_key: str):
attrs = self.attrs.get(parent_key, {})
return Lookup(attrs, parent_key)
def iterate_objects(self, parent_key: str):
objects = self.attrs.get(parent_key)
if not objects:
return
for name, attrs in objects.items():
yield name, Lookup(attrs, parent_key + '/' + name)
def _get(self, key: str, default: Any):
if default is Defaults.REQUIRED:
try:
return self.attrs[key]
except KeyError:
raise ConfigError("A value is required for {}".format(self._get_path(key))) \
from None
else:
return self.attrs.get(key, default)
def get_str(
self, key: str,
default: Union[str, None, Defaults] = Defaults.REQUIRED,
force_trailing_slash: bool = False
) -> Optional[str]:
val = self._get(key, default)
if val is None:
return None
if not isinstance(val, str):
raise ConfigError("{} must be a string".format(self._get_path(key)))
val = substitute_env_vars(val)
if force_trailing_slash and not val.endswith('/'):
val += '/'
return val
def get_bool(self, key: str, default: Union[bool, Defaults] = Defaults.REQUIRED) -> bool:
val = self._get(key, default)
if not isinstance(val, bool):
raise ConfigError("{} must be a boolean".format(self._get_path(key)))
return val
def get_int(self, key: str, default: Union[int, Defaults] = Defaults.REQUIRED) -> int:
val = self._get(key, default)
if not isinstance(val, int):
raise ConfigError("{} must be an integer".format(self._get_path(key)))
return val
def get_str_list(
self, key: str, default: Union[List[str], Defaults] = Defaults.REQUIRED
) -> List[str]:
val = self._get(key, default)
if not isinstance(val, list) or not all(isinstance(v, str) for v in val):
raise ConfigError("{} must be a list of strings".format(self._get_path(key)))
return [substitute_env_vars(v) for v in val]
def get_str_dict(
self, key: str, default: Union[Dict[str, str], Defaults] = Defaults.REQUIRED
) -> Dict[str, str]:
val = self._get(key, default)
if not isinstance(val, dict) or not all(isinstance(v, str) for v in val.values()):
raise ConfigError("{} must be a mapping with string values".format(self._get_path(key)))
return {substitute_env_vars(k): substitute_env_vars(v) for k, v in val.items()}
def get_timedelta(
self, key: str,
default: Union[timedelta, None, Defaults] = Defaults.REQUIRED,
force_suffix: bool = True
) -> Optional[timedelta]:
val = self._get(key, default=default)
if val is None:
return None
if isinstance(val, timedelta): # the default
return val
if isinstance(val, int) and not force_suffix:
return timedelta(seconds=val)
if isinstance(val, str):
m = re.match(r'^(\d+)([dhms])$', val)
if m:
if m.group(2) == "d":
return timedelta(days=int(m.group(1)))
elif m.group(2) == "h":
return timedelta(hours=int(m.group(1)))
elif m.group(2) == "m":
return timedelta(minutes=int(m.group(1)))
else:
return timedelta(seconds=int(m.group(1)))
raise ConfigError("{} should be a time interval of the form <digits>[dhms]"
.format(self._get_path(key)))
class BaseConfig:
def __init__(self, lookup: Lookup):
annotations = getattr(self, '__annotations__', None)
if annotations:
for name, v in annotations.items():
resolved, args, optional = resolve_type(v)
classval = getattr(self, name, Defaults.REQUIRED)
if isinstance(classval, ConfigField):
if classval.skip:
continue
kwargs = {"default": classval.default}
kwargs.update(classval.extra)
else:
kwargs = {"default": classval}
if resolved == str:
val: Any = lookup.get_str(name, **kwargs)
elif resolved == bool:
val = lookup.get_bool(name, **kwargs)
elif resolved == int:
val = lookup.get_int(name, **kwargs)
elif resolved == list and args[0] == str:
val = lookup.get_str_list(name, **kwargs)
elif resolved == dict and args[0] == str and args[1] == str:
val = lookup.get_str_dict(name, **kwargs)
elif resolved == timedelta:
val = lookup.get_timedelta(name, **kwargs)
elif issubclass(resolved, BaseConfig):
val = resolved(lookup.sublookup(name))
else:
raise RuntimeError(f"Don't know how to handle config field of type {v}")
setattr(self, name, val)
@classmethod
def from_path(cls, path: str):
with open(path, 'r') as f:
yml = yaml.safe_load(f)
if not isinstance(yml, dict):
raise ConfigError(f"Top level of {os.path.basename(path)} must be an object with keys")
return cls(Lookup(yml))
|
from textwrap import dedent
from numbers import Number
import warnings
from colorsys import rgb_to_hls
from functools import partial
import numpy as np
import pandas as pd
try:
from scipy.stats import gaussian_kde
_no_scipy = False
except ImportError:
from .external.kde import gaussian_kde
_no_scipy = True
import matplotlib as mpl
from matplotlib.collections import PatchCollection
import matplotlib.patches as Patches
import matplotlib.pyplot as plt
from ._core import (
VectorPlotter,
variable_type,
infer_orient,
categorical_order,
)
from . import utils
from .utils import remove_na, _normal_quantile_func, _draw_figure, _default_color
from .algorithms import bootstrap
from .palettes import color_palette, husl_palette, light_palette, dark_palette
from .axisgrid import FacetGrid, _facet_docs
from ._decorators import _deprecate_positional_args
__all__ = [
"catplot", "factorplot",
"stripplot", "swarmplot",
"boxplot", "violinplot", "boxenplot",
"pointplot", "barplot", "countplot",
]
class _CategoricalPlotterNew(VectorPlotter):
semantics = "x", "y", "hue", "units"
wide_structure = {"x": "@columns", "y": "@values", "hue": "@columns"}
flat_structure = {"x": "@index", "y": "@values"}
def __init__(
self,
data=None,
variables={},
order=None,
orient=None,
require_numeric=False,
):
super().__init__(data=data, variables=variables)
# This method takes care of some bookkeeping that is necessary because the
# original categorical plots (prior to the 2021 refactor) had some rules that
# don't fit exactly into the logic of _core. It may be wise to have a second
# round of refactoring that moves the logic deeper, but this will keep things
# relatively sensible for now.
# The concept of an "orientation" is important to the original categorical
# plots, but there's no provision for it in _core, so we need to do it here.
# Note that it could be useful for the other functions in at least two ways
# (orienting a univariate distribution plot from long-form data and selecting
# the aggregation axis in lineplot), so we may want to eventually refactor it.
self.orient = infer_orient(
x=self.plot_data.get("x", None),
y=self.plot_data.get("y", None),
orient=orient,
require_numeric=require_numeric,
)
# Short-circuit in the case of an empty plot
if not self.has_xy_data:
return
# For wide data, orient determines assignment to x/y differently from the
# wide_structure rules in _core. If we do decide to make orient part of the
# _core variable assignment, we'll want to figure out how to express that.
if self.input_format == "wide" and self.orient == "h":
self.plot_data = self.plot_data.rename(columns={"x": "y", "y": "x"})
orig_x, orig_y = self.variables["x"], self.variables["y"]
self.variables.update({"x": orig_y, "y": orig_x})
orig_x_type, orig_y_type = self.var_types["x"], self.var_types["y"]
self.var_types.update({"x": orig_y_type, "y": orig_x_type})
# Categorical plots can be "univariate" in which case they get an anonymous
# category label on the opposite axis. Note: this duplicates code in the core
# scale_categorical function. We need to do it here because of the next line.
if self.cat_axis not in self.variables:
self.variables[self.cat_axis] = None
self.var_types[self.cat_axis] = "categorical"
self.plot_data[self.cat_axis] = ""
# Categorical variables have discrete levels that we need to track
cat_levels = categorical_order(self.plot_data[self.cat_axis], order)
self.var_levels[self.cat_axis] = cat_levels
def _hue_backcompat(self, color, palette, hue_order, force_hue=False):
"""Implement backwards compatibility for hue parametrization.
Note: the force_hue parameter is used so that functions can be shown to
pass existing tests during refactoring and then tested for new behavior.
It can be removed after completion of the work.
"""
# The original categorical functions applied a palette to the categorical axis
# by default. We want to require an explicit hue mapping, to be more consistent
# with how things work elsewhere now. I don't think there's any good way to
# do this gently -- because it's triggered by the default value of hue=None,
# users would always get a warning, unless we introduce some sentinel "default"
# argument for this change. That's possible, but asking users to set `hue=None`
# on every call is annoying.
# We are keeping the logic for implementing the old behavior in with the current
# system so that (a) we can punt on that decision and (b) we can ensure that
# refactored code passes old tests.
default_behavior = color is None or palette is not None
if force_hue and "hue" not in self.variables and default_behavior:
self._redundant_hue = True
self.plot_data["hue"] = self.plot_data[self.cat_axis]
self.variables["hue"] = self.variables[self.cat_axis]
self.var_types["hue"] = "categorical"
hue_order = self.var_levels[self.cat_axis]
# Because we convert the categorical axis variable to string,
# we need to update a dictionary palette too
if isinstance(palette, dict):
palette = {str(k): v for k, v in palette.items()}
else:
self._redundant_hue = False
# Previously, categorical plots had a trick where color= could seed the palette.
# Because that's an explicit parameterization, we are going to give it one
# release cycle with a warning before removing.
if "hue" in self.variables and palette is None and color is not None:
if not isinstance(color, str):
color = mpl.colors.to_hex(color)
palette = f"dark:{color}"
msg = (
"Setting a gradient palette using color= is deprecated and will be "
f"removed in version 0.13. Set `palette='{palette}'` for same effect."
)
warnings.warn(msg, FutureWarning)
return palette, hue_order
@property
def cat_axis(self):
return {"v": "x", "h": "y"}[self.orient]
def _get_gray(self, colors):
"""Get a grayscale value that looks good with color."""
if not len(colors):
return None
unique_colors = np.unique(colors, axis=0)
light_vals = [rgb_to_hls(*rgb[:3])[1] for rgb in unique_colors]
lum = min(light_vals) * .6
return (lum, lum, lum)
def _adjust_cat_axis(self, ax, axis):
"""Set ticks and limits for a categorical variable."""
# Note: in theory, this could happen in _attach for all categorical axes
# But two reasons not to do that:
# - If it happens before plotting, autoscaling messes up the plot limits
# - It would change existing plots from other seaborn functions
if self.var_types[axis] != "categorical":
return
# We can infer the total number of categories (including those from previous
# plots that are not part of the plot we are currently making) from the number
# of ticks, which matplotlib sets up while doing unit conversion. This feels
# slightly risky, as if we are relying on something that may be a matplotlib
# implementation detail. But I cannot think of a better way to keep track of
# the state from previous categorical calls (see GH2516 for context)
n = len(getattr(ax, f"get_{axis}ticks")())
if axis == "x":
ax.xaxis.grid(False)
ax.set_xlim(-.5, n - .5, auto=None)
else:
ax.yaxis.grid(False)
# Note limits that correspond to previously-inverted y axis
ax.set_ylim(n - .5, -.5, auto=None)
@property
def _native_width(self):
"""Return unit of width separating categories on native numeric scale."""
unique_values = np.unique(self.comp_data[self.cat_axis])
if len(unique_values) > 1:
native_width = np.nanmin(np.diff(unique_values))
else:
native_width = 1
return native_width
def _nested_offsets(self, width, dodge):
"""Return offsets for each hue level for dodged plots."""
offsets = None
if "hue" in self.variables:
n_levels = len(self._hue_map.levels)
if dodge:
each_width = width / n_levels
offsets = np.linspace(0, width - each_width, n_levels)
offsets -= offsets.mean()
else:
offsets = np.zeros(n_levels)
return offsets
# Note that the plotting methods here aim (in most cases) to produce the
# exact same artists as the original (pre 0.12) version of the code, so
# there is some weirdness that might not otherwise be clean or make sense in
# this context, such as adding empty artists for combinations of variables
# with no observations
def plot_strips(
self,
jitter,
dodge,
color,
edgecolor,
plot_kws,
):
width = .8 * self._native_width
offsets = self._nested_offsets(width, dodge)
if jitter is True:
jlim = 0.1
else:
jlim = float(jitter)
if "hue" in self.variables and dodge:
jlim /= len(self._hue_map.levels)
jlim *= self._native_width
jitterer = partial(np.random.uniform, low=-jlim, high=+jlim)
iter_vars = [self.cat_axis]
if dodge:
iter_vars.append("hue")
ax = self.ax
dodge_move = jitter_move = 0
for sub_vars, sub_data in self.iter_data(iter_vars,
from_comp_data=True,
allow_empty=True):
if offsets is not None:
dodge_move = offsets[sub_data["hue"].map(self._hue_map.levels.index)]
jitter_move = jitterer(size=len(sub_data)) if len(sub_data) > 1 else 0
adjusted_data = sub_data[self.cat_axis] + dodge_move + jitter_move
sub_data.loc[:, self.cat_axis] = adjusted_data
for var in "xy":
if self._log_scaled(var):
sub_data[var] = np.power(10, sub_data[var])
ax = self._get_axes(sub_vars)
points = ax.scatter(sub_data["x"], sub_data["y"], color=color, **plot_kws)
if "hue" in self.variables:
points.set_facecolors(self._hue_map(sub_data["hue"]))
if edgecolor == "gray": # XXX TODO change to "auto"
points.set_edgecolors(self._get_gray(points.get_facecolors()))
else:
points.set_edgecolors(edgecolor)
# TODO XXX fully implement legend
show_legend = not self._redundant_hue and self.input_format != "wide"
if "hue" in self.variables and show_legend:
for level in self._hue_map.levels:
color = self._hue_map(level)
ax.scatter([], [], s=60, color=mpl.colors.rgb2hex(color), label=level)
ax.legend(loc="best", title=self.variables["hue"])
def plot_swarms(
self,
dodge,
color,
edgecolor,
warn_thresh,
plot_kws,
):
width = .8 * self._native_width
offsets = self._nested_offsets(width, dodge)
iter_vars = [self.cat_axis]
if dodge:
iter_vars.append("hue")
ax = self.ax
point_collections = {}
dodge_move = 0
for sub_vars, sub_data in self.iter_data(iter_vars,
from_comp_data=True,
allow_empty=True):
if offsets is not None:
dodge_move = offsets[sub_data["hue"].map(self._hue_map.levels.index)]
if not sub_data.empty:
sub_data.loc[:, self.cat_axis] = sub_data[self.cat_axis] + dodge_move
for var in "xy":
if self._log_scaled(var):
sub_data[var] = np.power(10, sub_data[var])
ax = self._get_axes(sub_vars)
points = ax.scatter(sub_data["x"], sub_data["y"], color=color, **plot_kws)
if "hue" in self.variables:
points.set_facecolors(self._hue_map(sub_data["hue"]))
if edgecolor == "gray": # XXX TODO change to "auto"
points.set_edgecolors(self._get_gray(points.get_facecolors()))
else:
points.set_edgecolors(edgecolor)
if not sub_data.empty:
point_collections[sub_data[self.cat_axis].iloc[0]] = points
beeswarm = Beeswarm(
width=width, orient=self.orient, warn_thresh=warn_thresh,
)
for center, points in point_collections.items():
if points.get_offsets().shape[0] > 1:
def draw(points, renderer, *, center=center):
beeswarm(points, center)
ax = points.axes
if self.orient == "h":
scalex = False
scaley = ax.get_autoscaley_on()
else:
scalex = ax.get_autoscalex_on()
scaley = False
# This prevents us from undoing the nice categorical axis limits
# set in _adjust_cat_axis, because that method currently leave
# the autoscale flag in its original setting. It may be better
# to disable autoscaling there to avoid needing to do this.
fixed_scale = self.var_types[self.cat_axis] == "categorical"
ax.update_datalim(points.get_datalim(ax.transData))
if not fixed_scale and (scalex or scaley):
ax.autoscale_view(scalex=scalex, scaley=scaley)
super(points.__class__, points).draw(renderer)
points.draw = draw.__get__(points)
_draw_figure(ax.figure)
# TODO XXX fully implement legend
show_legend = not self._redundant_hue and self.input_format != "wide"
if "hue" in self.variables and show_legend: # TODO and legend:
for level in self._hue_map.levels:
color = self._hue_map(level)
ax.scatter([], [], s=60, color=mpl.colors.rgb2hex(color), label=level)
ax.legend(loc="best", title=self.variables["hue"])
class _CategoricalFacetPlotter(_CategoricalPlotterNew):
semantics = _CategoricalPlotterNew.semantics + ("col", "row")
class _CategoricalPlotter(object):
width = .8
default_palette = "light"
require_numeric = True
def establish_variables(self, x=None, y=None, hue=None, data=None,
orient=None, order=None, hue_order=None,
units=None):
"""Convert input specification into a common representation."""
# Option 1:
# We are plotting a wide-form dataset
# -----------------------------------
if x is None and y is None:
# Do a sanity check on the inputs
if hue is not None:
error = "Cannot use `hue` without `x` and `y`"
raise ValueError(error)
# No hue grouping with wide inputs
plot_hues = None
hue_title = None
hue_names = None
# No statistical units with wide inputs
plot_units = None
# We also won't get a axes labels here
value_label = None
group_label = None
# Option 1a:
# The input data is a Pandas DataFrame
# ------------------------------------
if isinstance(data, pd.DataFrame):
# Order the data correctly
if order is None:
order = []
# Reduce to just numeric columns
for col in data:
if variable_type(data[col]) == "numeric":
order.append(col)
plot_data = data[order]
group_names = order
group_label = data.columns.name
# Convert to a list of arrays, the common representation
iter_data = plot_data.iteritems()
plot_data = [np.asarray(s, float) for k, s in iter_data]
# Option 1b:
# The input data is an array or list
# ----------------------------------
else:
# We can't reorder the data
if order is not None:
error = "Input data must be a pandas object to reorder"
raise ValueError(error)
# The input data is an array
if hasattr(data, "shape"):
if len(data.shape) == 1:
if np.isscalar(data[0]):
plot_data = [data]
else:
plot_data = list(data)
elif len(data.shape) == 2:
nr, nc = data.shape
if nr == 1 or nc == 1:
plot_data = [data.ravel()]
else:
plot_data = [data[:, i] for i in range(nc)]
else:
error = ("Input `data` can have no "
"more than 2 dimensions")
raise ValueError(error)
# Check if `data` is None to let us bail out here (for testing)
elif data is None:
plot_data = [[]]
# The input data is a flat list
elif np.isscalar(data[0]):
plot_data = [data]
# The input data is a nested list
# This will catch some things that might fail later
# but exhaustive checks are hard
else:
plot_data = data
# Convert to a list of arrays, the common representation
plot_data = [np.asarray(d, float) for d in plot_data]
# The group names will just be numeric indices
group_names = list(range((len(plot_data))))
# Figure out the plotting orientation
orient = "h" if str(orient).startswith("h") else "v"
# Option 2:
# We are plotting a long-form dataset
# -----------------------------------
else:
# See if we need to get variables from `data`
if data is not None:
x = data.get(x, x)
y = data.get(y, y)
hue = data.get(hue, hue)
units = data.get(units, units)
# Validate the inputs
for var in [x, y, hue, units]:
if isinstance(var, str):
err = "Could not interpret input '{}'".format(var)
raise ValueError(err)
# Figure out the plotting orientation
orient = infer_orient(
x, y, orient, require_numeric=self.require_numeric
)
# Option 2a:
# We are plotting a single set of data
# ------------------------------------
if x is None or y is None:
# Determine where the data are
vals = y if x is None else x
# Put them into the common representation
plot_data = [np.asarray(vals)]
# Get a label for the value axis
if hasattr(vals, "name"):
value_label = vals.name
else:
value_label = None
# This plot will not have group labels or hue nesting
groups = None
group_label = None
group_names = []
plot_hues = None
hue_names = None
hue_title = None
plot_units = None
# Option 2b:
# We are grouping the data values by another variable
# ---------------------------------------------------
else:
# Determine which role each variable will play
if orient == "v":
vals, groups = y, x
else:
vals, groups = x, y
# Get the categorical axis label
group_label = None
if hasattr(groups, "name"):
group_label = groups.name
# Get the order on the categorical axis
group_names = categorical_order(groups, order)
# Group the numeric data
plot_data, value_label = self._group_longform(vals, groups,
group_names)
# Now handle the hue levels for nested ordering
if hue is None:
plot_hues = None
hue_title = None
hue_names = None
else:
# Get the order of the hue levels
hue_names = categorical_order(hue, hue_order)
# Group the hue data
plot_hues, hue_title = self._group_longform(hue, groups,
group_names)
# Now handle the units for nested observations
if units is None:
plot_units = None
else:
plot_units, _ = self._group_longform(units, groups,
group_names)
# Assign object attributes
# ------------------------
self.orient = orient
self.plot_data = plot_data
self.group_label = group_label
self.value_label = value_label
self.group_names = group_names
self.plot_hues = plot_hues
self.hue_title = hue_title
self.hue_names = hue_names
self.plot_units = plot_units
def _group_longform(self, vals, grouper, order):
"""Group a long-form variable by another with correct order."""
# Ensure that the groupby will work
if not isinstance(vals, pd.Series):
if isinstance(grouper, pd.Series):
index = grouper.index
else:
index = None
vals = pd.Series(vals, index=index)
# Group the val data
grouped_vals = vals.groupby(grouper)
out_data = []
for g in order:
try:
g_vals = grouped_vals.get_group(g)
except KeyError:
g_vals = np.array([])
out_data.append(g_vals)
# Get the vals axis label
label = vals.name
return out_data, label
def establish_colors(self, color, palette, saturation):
"""Get a list of colors for the main component of the plots."""
if self.hue_names is None:
n_colors = len(self.plot_data)
else:
n_colors = len(self.hue_names)
# Determine the main colors
if color is None and palette is None:
# Determine whether the current palette will have enough values
# If not, we'll default to the husl palette so each is distinct
current_palette = utils.get_color_cycle()
if n_colors <= len(current_palette):
colors = color_palette(n_colors=n_colors)
else:
colors = husl_palette(n_colors, l=.7) # noqa
elif palette is None:
# When passing a specific color, the interpretation depends
# on whether there is a hue variable or not.
# If so, we will make a blend palette so that the different
# levels have some amount of variation.
if self.hue_names is None:
colors = [color] * n_colors
else:
if self.default_palette == "light":
colors = light_palette(color, n_colors)
elif self.default_palette == "dark":
colors = dark_palette(color, n_colors)
else:
raise RuntimeError("No default palette specified")
else:
# Let `palette` be a dict mapping level to color
if isinstance(palette, dict):
if self.hue_names is None:
levels = self.group_names
else:
levels = self.hue_names
palette = [palette[l] for l in levels]
colors = color_palette(palette, n_colors)
# Desaturate a bit because these are patches
if saturation < 1:
colors = color_palette(colors, desat=saturation)
# Convert the colors to a common representations
rgb_colors = color_palette(colors)
# Determine the gray color to use for the lines framing the plot
light_vals = [rgb_to_hls(*c)[1] for c in rgb_colors]
lum = min(light_vals) * .6
gray = mpl.colors.rgb2hex((lum, lum, lum))
# Assign object attributes
self.colors = rgb_colors
self.gray = gray
@property
def hue_offsets(self):
"""A list of center positions for plots when hue nesting is used."""
n_levels = len(self.hue_names)
if self.dodge:
each_width = self.width / n_levels
offsets = np.linspace(0, self.width - each_width, n_levels)
offsets -= offsets.mean()
else:
offsets = np.zeros(n_levels)
return offsets
@property
def nested_width(self):
"""A float with the width of plot elements when hue nesting is used."""
if self.dodge:
width = self.width / len(self.hue_names) * .98
else:
width = self.width
return width
def annotate_axes(self, ax):
"""Add descriptive labels to an Axes object."""
if self.orient == "v":
xlabel, ylabel = self.group_label, self.value_label
else:
xlabel, ylabel = self.value_label, self.group_label
if xlabel is not None:
ax.set_xlabel(xlabel)
if ylabel is not None:
ax.set_ylabel(ylabel)
group_names = self.group_names
if not group_names:
group_names = ["" for _ in range(len(self.plot_data))]
if self.orient == "v":
ax.set_xticks(np.arange(len(self.plot_data)))
ax.set_xticklabels(group_names)
else:
ax.set_yticks(np.arange(len(self.plot_data)))
ax.set_yticklabels(group_names)
if self.orient == "v":
ax.xaxis.grid(False)
ax.set_xlim(-.5, len(self.plot_data) - .5, auto=None)
else:
ax.yaxis.grid(False)
ax.set_ylim(-.5, len(self.plot_data) - .5, auto=None)
if self.hue_names is not None:
ax.legend(loc="best", title=self.hue_title)
def add_legend_data(self, ax, color, label):
"""Add a dummy patch object so we can get legend data."""
rect = plt.Rectangle([0, 0], 0, 0,
linewidth=self.linewidth / 2,
edgecolor=self.gray,
facecolor=color,
label=label)
ax.add_patch(rect)
class _BoxPlotter(_CategoricalPlotter):
def __init__(self, x, y, hue, data, order, hue_order,
orient, color, palette, saturation,
width, dodge, fliersize, linewidth):
self.establish_variables(x, y, hue, data, orient, order, hue_order)
self.establish_colors(color, palette, saturation)
self.dodge = dodge
self.width = width
self.fliersize = fliersize
if linewidth is None:
linewidth = mpl.rcParams["lines.linewidth"]
self.linewidth = linewidth
def draw_boxplot(self, ax, kws):
"""Use matplotlib to draw a boxplot on an Axes."""
vert = self.orient == "v"
props = {}
for obj in ["box", "whisker", "cap", "median", "flier"]:
props[obj] = kws.pop(obj + "props", {})
for i, group_data in enumerate(self.plot_data):
if self.plot_hues is None:
# Handle case where there is data at this level
if group_data.size == 0:
continue
# Draw a single box or a set of boxes
# with a single level of grouping
box_data = np.asarray(remove_na(group_data))
# Handle case where there is no non-null data
if box_data.size == 0:
continue
artist_dict = ax.boxplot(box_data,
vert=vert,
patch_artist=True,
positions=[i],
widths=self.width,
**kws)
color = self.colors[i]
self.restyle_boxplot(artist_dict, color, props)
else:
# Draw nested groups of boxes
offsets = self.hue_offsets
for j, hue_level in enumerate(self.hue_names):
# Add a legend for this hue level
if not i:
self.add_legend_data(ax, self.colors[j], hue_level)
# Handle case where there is data at this level
if group_data.size == 0:
continue
hue_mask = self.plot_hues[i] == hue_level
box_data = np.asarray(remove_na(group_data[hue_mask]))
# Handle case where there is no non-null data
if box_data.size == 0:
continue
center = i + offsets[j]
artist_dict = ax.boxplot(box_data,
vert=vert,
patch_artist=True,
positions=[center],
widths=self.nested_width,
**kws)
self.restyle_boxplot(artist_dict, self.colors[j], props)
# Add legend data, but just for one set of boxes
def restyle_boxplot(self, artist_dict, color, props):
"""Take a drawn matplotlib boxplot and make it look nice."""
for box in artist_dict["boxes"]:
box.update(dict(facecolor=color,
zorder=.9,
edgecolor=self.gray,
linewidth=self.linewidth))
box.update(props["box"])
for whisk in artist_dict["whiskers"]:
whisk.update(dict(color=self.gray,
linewidth=self.linewidth,
linestyle="-"))
whisk.update(props["whisker"])
for cap in artist_dict["caps"]:
cap.update(dict(color=self.gray,
linewidth=self.linewidth))
cap.update(props["cap"])
for med in artist_dict["medians"]:
med.update(dict(color=self.gray,
linewidth=self.linewidth))
med.update(props["median"])
for fly in artist_dict["fliers"]:
fly.update(dict(markerfacecolor=self.gray,
marker="d",
markeredgecolor=self.gray,
markersize=self.fliersize))
fly.update(props["flier"])
def plot(self, ax, boxplot_kws):
"""Make the plot."""
self.draw_boxplot(ax, boxplot_kws)
self.annotate_axes(ax)
if self.orient == "h":
ax.invert_yaxis()
class _ViolinPlotter(_CategoricalPlotter):
def __init__(self, x, y, hue, data, order, hue_order,
bw, cut, scale, scale_hue, gridsize,
width, inner, split, dodge, orient, linewidth,
color, palette, saturation):
self.establish_variables(x, y, hue, data, orient, order, hue_order)
self.establish_colors(color, palette, saturation)
self.estimate_densities(bw, cut, scale, scale_hue, gridsize)
self.gridsize = gridsize
self.width = width
self.dodge = dodge
if inner is not None:
if not any([inner.startswith("quart"),
inner.startswith("box"),
inner.startswith("stick"),
inner.startswith("point")]):
err = "Inner style '{}' not recognized".format(inner)
raise ValueError(err)
self.inner = inner
if split and self.hue_names is not None and len(self.hue_names) != 2:
msg = "There must be exactly two hue levels to use `split`.'"
raise ValueError(msg)
self.split = split
if linewidth is None:
linewidth = mpl.rcParams["lines.linewidth"]
self.linewidth = linewidth
def estimate_densities(self, bw, cut, scale, scale_hue, gridsize):
"""Find the support and density for all of the data."""
# Initialize data structures to keep track of plotting data
if self.hue_names is None:
support = []
density = []
counts = np.zeros(len(self.plot_data))
max_density = np.zeros(len(self.plot_data))
else:
support = [[] for _ in self.plot_data]
density = [[] for _ in self.plot_data]
size = len(self.group_names), len(self.hue_names)
counts = np.zeros(size)
max_density = np.zeros(size)
for i, group_data in enumerate(self.plot_data):
# Option 1: we have a single level of grouping
# --------------------------------------------
if self.plot_hues is None:
# Strip missing datapoints
kde_data = remove_na(group_data)
# Handle special case of no data at this level
if kde_data.size == 0:
support.append(np.array([]))
density.append(np.array([1.]))
counts[i] = 0
max_density[i] = 0
continue
# Handle special case of a single unique datapoint
elif np.unique(kde_data).size == 1:
support.append(np.unique(kde_data))
density.append(np.array([1.]))
counts[i] = 1
max_density[i] = 0
continue
# Fit the KDE and get the used bandwidth size
kde, bw_used = self.fit_kde(kde_data, bw)
# Determine the support grid and get the density over it
support_i = self.kde_support(kde_data, bw_used, cut, gridsize)
density_i = kde.evaluate(support_i)
# Update the data structures with these results
support.append(support_i)
density.append(density_i)
counts[i] = kde_data.size
max_density[i] = density_i.max()
# Option 2: we have nested grouping by a hue variable
# ---------------------------------------------------
else:
for j, hue_level in enumerate(self.hue_names):
# Handle special case of no data at this category level
if not group_data.size:
support[i].append(np.array([]))
density[i].append(np.array([1.]))
counts[i, j] = 0
max_density[i, j] = 0
continue
# Select out the observations for this hue level
hue_mask = self.plot_hues[i] == hue_level
# Strip missing datapoints
kde_data = remove_na(group_data[hue_mask])
# Handle special case of no data at this level
if kde_data.size == 0:
support[i].append(np.array([]))
density[i].append(np.array([1.]))
counts[i, j] = 0
max_density[i, j] = 0
continue
# Handle special case of a single unique datapoint
elif np.unique(kde_data).size == 1:
support[i].append(np.unique(kde_data))
density[i].append(np.array([1.]))
counts[i, j] = 1
max_density[i, j] = 0
continue
# Fit the KDE and get the used bandwidth size
kde, bw_used = self.fit_kde(kde_data, bw)
# Determine the support grid and get the density over it
support_ij = self.kde_support(kde_data, bw_used,
cut, gridsize)
density_ij = kde.evaluate(support_ij)
# Update the data structures with these results
support[i].append(support_ij)
density[i].append(density_ij)
counts[i, j] = kde_data.size
max_density[i, j] = density_ij.max()
# Scale the height of the density curve.
# For a violinplot the density is non-quantitative.
# The objective here is to scale the curves relative to 1 so that
# they can be multiplied by the width parameter during plotting.
if scale == "area":
self.scale_area(density, max_density, scale_hue)
elif scale == "width":
self.scale_width(density)
elif scale == "count":
self.scale_count(density, counts, scale_hue)
else:
raise ValueError("scale method '{}' not recognized".format(scale))
# Set object attributes that will be used while plotting
self.support = support
self.density = density
def fit_kde(self, x, bw):
"""Estimate a KDE for a vector of data with flexible bandwidth."""
kde = gaussian_kde(x, bw)
# Extract the numeric bandwidth from the KDE object
bw_used = kde.factor
# At this point, bw will be a numeric scale factor.
# To get the actual bandwidth of the kernel, we multiple by the
# unbiased standard deviation of the data, which we will use
# elsewhere to compute the range of the support.
bw_used = bw_used * x.std(ddof=1)
return kde, bw_used
def kde_support(self, x, bw, cut, gridsize):
"""Define a grid of support for the violin."""
support_min = x.min() - bw * cut
support_max = x.max() + bw * cut
return np.linspace(support_min, support_max, gridsize)
def scale_area(self, density, max_density, scale_hue):
"""Scale the relative area under the KDE curve.
This essentially preserves the "standard" KDE scaling, but the
resulting maximum density will be 1 so that the curve can be
properly multiplied by the violin width.
"""
if self.hue_names is None:
for d in density:
if d.size > 1:
d /= max_density.max()
else:
for i, group in enumerate(density):
for d in group:
if scale_hue:
max = max_density[i].max()
else:
max = max_density.max()
if d.size > 1:
d /= max
def scale_width(self, density):
"""Scale each density curve to the same height."""
if self.hue_names is None:
for d in density:
d /= d.max()
else:
for group in density:
for d in group:
d /= d.max()
def scale_count(self, density, counts, scale_hue):
"""Scale each density curve by the number of observations."""
if self.hue_names is None:
if counts.max() == 0:
d = 0
else:
for count, d in zip(counts, density):
d /= d.max()
d *= count / counts.max()
else:
for i, group in enumerate(density):
for j, d in enumerate(group):
if counts[i].max() == 0:
d = 0
else:
count = counts[i, j]
if scale_hue:
scaler = count / counts[i].max()
else:
scaler = count / counts.max()
d /= d.max()
d *= scaler
@property
def dwidth(self):
if self.hue_names is None or not self.dodge:
return self.width / 2
elif self.split:
return self.width / 2
else:
return self.width / (2 * len(self.hue_names))
def draw_violins(self, ax):
"""Draw the violins onto `ax`."""
fill_func = ax.fill_betweenx if self.orient == "v" else ax.fill_between
for i, group_data in enumerate(self.plot_data):
kws = dict(edgecolor=self.gray, linewidth=self.linewidth)
# Option 1: we have a single level of grouping
# --------------------------------------------
if self.plot_hues is None:
support, density = self.support[i], self.density[i]
# Handle special case of no observations in this bin
if support.size == 0:
continue
# Handle special case of a single observation
elif support.size == 1:
val = support.item()
d = density.item()
self.draw_single_observation(ax, i, val, d)
continue
# Draw the violin for this group
grid = np.ones(self.gridsize) * i
fill_func(support,
grid - density * self.dwidth,
grid + density * self.dwidth,
facecolor=self.colors[i],
**kws)
# Draw the interior representation of the data
if self.inner is None:
continue
# Get a nan-free vector of datapoints
violin_data = remove_na(group_data)
# Draw box and whisker information
if self.inner.startswith("box"):
self.draw_box_lines(ax, violin_data, support, density, i)
# Draw quartile lines
elif self.inner.startswith("quart"):
self.draw_quartiles(ax, violin_data, support, density, i)
# Draw stick observations
elif self.inner.startswith("stick"):
self.draw_stick_lines(ax, violin_data, support, density, i)
# Draw point observations
elif self.inner.startswith("point"):
self.draw_points(ax, violin_data, i)
# Option 2: we have nested grouping by a hue variable
# ---------------------------------------------------
else:
offsets = self.hue_offsets
for j, hue_level in enumerate(self.hue_names):
support, density = self.support[i][j], self.density[i][j]
kws["facecolor"] = self.colors[j]
# Add legend data, but just for one set of violins
if not i:
self.add_legend_data(ax, self.colors[j], hue_level)
# Handle the special case where we have no observations
if support.size == 0:
continue
# Handle the special case where we have one observation
elif support.size == 1:
val = support.item()
d = density.item()
if self.split:
d = d / 2
at_group = i + offsets[j]
self.draw_single_observation(ax, at_group, val, d)
continue
# Option 2a: we are drawing a single split violin
# -----------------------------------------------
if self.split:
grid = np.ones(self.gridsize) * i
if j:
fill_func(support,
grid,
grid + density * self.dwidth,
**kws)
else:
fill_func(support,
grid - density * self.dwidth,
grid,
**kws)
# Draw the interior representation of the data
if self.inner is None:
continue
# Get a nan-free vector of datapoints
hue_mask = self.plot_hues[i] == hue_level
violin_data = remove_na(group_data[hue_mask])
# Draw quartile lines
if self.inner.startswith("quart"):
self.draw_quartiles(ax, violin_data,
support, density, i,
["left", "right"][j])
# Draw stick observations
elif self.inner.startswith("stick"):
self.draw_stick_lines(ax, violin_data,
support, density, i,
["left", "right"][j])
# The box and point interior plots are drawn for
# all data at the group level, so we just do that once
if not j:
continue
# Get the whole vector for this group level
violin_data = remove_na(group_data)
# Draw box and whisker information
if self.inner.startswith("box"):
self.draw_box_lines(ax, violin_data,
support, density, i)
# Draw point observations
elif self.inner.startswith("point"):
self.draw_points(ax, violin_data, i)
# Option 2b: we are drawing full nested violins
# -----------------------------------------------
else:
grid = np.ones(self.gridsize) * (i + offsets[j])
fill_func(support,
grid - density * self.dwidth,
grid + density * self.dwidth,
**kws)
# Draw the interior representation
if self.inner is None:
continue
# Get a nan-free vector of datapoints
hue_mask = self.plot_hues[i] == hue_level
violin_data = remove_na(group_data[hue_mask])
# Draw box and whisker information
if self.inner.startswith("box"):
self.draw_box_lines(ax, violin_data,
support, density,
i + offsets[j])
# Draw quartile lines
elif self.inner.startswith("quart"):
self.draw_quartiles(ax, violin_data,
support, density,
i + offsets[j])
# Draw stick observations
elif self.inner.startswith("stick"):
self.draw_stick_lines(ax, violin_data,
support, density,
i + offsets[j])
# Draw point observations
elif self.inner.startswith("point"):
self.draw_points(ax, violin_data, i + offsets[j])
def draw_single_observation(self, ax, at_group, at_quant, density):
"""Draw a line to mark a single observation."""
d_width = density * self.dwidth
if self.orient == "v":
ax.plot([at_group - d_width, at_group + d_width],
[at_quant, at_quant],
color=self.gray,
linewidth=self.linewidth)
else:
ax.plot([at_quant, at_quant],
[at_group - d_width, at_group + d_width],
color=self.gray,
linewidth=self.linewidth)
def draw_box_lines(self, ax, data, support, density, center):
"""Draw boxplot information at center of the density."""
# Compute the boxplot statistics
q25, q50, q75 = np.percentile(data, [25, 50, 75])
whisker_lim = 1.5 * (q75 - q25)
h1 = np.min(data[data >= (q25 - whisker_lim)])
h2 = np.max(data[data <= (q75 + whisker_lim)])
# Draw a boxplot using lines and a point
if self.orient == "v":
ax.plot([center, center], [h1, h2],
linewidth=self.linewidth,
color=self.gray)
ax.plot([center, center], [q25, q75],
linewidth=self.linewidth * 3,
color=self.gray)
ax.scatter(center, q50,
zorder=3,
color="white",
edgecolor=self.gray,
s=np.square(self.linewidth * 2))
else:
ax.plot([h1, h2], [center, center],
linewidth=self.linewidth,
color=self.gray)
ax.plot([q25, q75], [center, center],
linewidth=self.linewidth * 3,
color=self.gray)
ax.scatter(q50, center,
zorder=3,
color="white",
edgecolor=self.gray,
s=np.square(self.linewidth * 2))
def draw_quartiles(self, ax, data, support, density, center, split=False):
"""Draw the quartiles as lines at width of density."""
q25, q50, q75 = np.percentile(data, [25, 50, 75])
self.draw_to_density(ax, center, q25, support, density, split,
linewidth=self.linewidth,
dashes=[self.linewidth * 1.5] * 2)
self.draw_to_density(ax, center, q50, support, density, split,
linewidth=self.linewidth,
dashes=[self.linewidth * 3] * 2)
self.draw_to_density(ax, center, q75, support, density, split,
linewidth=self.linewidth,
dashes=[self.linewidth * 1.5] * 2)
def draw_points(self, ax, data, center):
"""Draw individual observations as points at middle of the violin."""
kws = dict(s=np.square(self.linewidth * 2),
color=self.gray,
edgecolor=self.gray)
grid = np.ones(len(data)) * center
if self.orient == "v":
ax.scatter(grid, data, **kws)
else:
ax.scatter(data, grid, **kws)
def draw_stick_lines(self, ax, data, support, density,
center, split=False):
"""Draw individual observations as sticks at width of density."""
for val in data:
self.draw_to_density(ax, center, val, support, density, split,
linewidth=self.linewidth * .5)
def draw_to_density(self, ax, center, val, support, density, split, **kws):
"""Draw a line orthogonal to the value axis at width of density."""
idx = np.argmin(np.abs(support - val))
width = self.dwidth * density[idx] * .99
kws["color"] = self.gray
if self.orient == "v":
if split == "left":
ax.plot([center - width, center], [val, val], **kws)
elif split == "right":
ax.plot([center, center + width], [val, val], **kws)
else:
ax.plot([center - width, center + width], [val, val], **kws)
else:
if split == "left":
ax.plot([val, val], [center - width, center], **kws)
elif split == "right":
ax.plot([val, val], [center, center + width], **kws)
else:
ax.plot([val, val], [center - width, center + width], **kws)
def plot(self, ax):
"""Make the violin plot."""
self.draw_violins(ax)
self.annotate_axes(ax)
if self.orient == "h":
ax.invert_yaxis()
class _CategoricalStatPlotter(_CategoricalPlotter):
require_numeric = True
@property
def nested_width(self):
"""A float with the width of plot elements when hue nesting is used."""
if self.dodge:
width = self.width / len(self.hue_names)
else:
width = self.width
return width
def estimate_statistic(self, estimator, ci, n_boot, seed):
if self.hue_names is None:
statistic = []
confint = []
else:
statistic = [[] for _ in self.plot_data]
confint = [[] for _ in self.plot_data]
for i, group_data in enumerate(self.plot_data):
# Option 1: we have a single layer of grouping
# --------------------------------------------
if self.plot_hues is None:
if self.plot_units is None:
stat_data = remove_na(group_data)
unit_data = None
else:
unit_data = self.plot_units[i]
have = pd.notnull(np.c_[group_data, unit_data]).all(axis=1)
stat_data = group_data[have]
unit_data = unit_data[have]
# Estimate a statistic from the vector of data
if not stat_data.size:
statistic.append(np.nan)
else:
statistic.append(estimator(stat_data))
# Get a confidence interval for this estimate
if ci is not None:
if stat_data.size < 2:
confint.append([np.nan, np.nan])
continue
if ci == "sd":
estimate = estimator(stat_data)
sd = np.std(stat_data)
confint.append((estimate - sd, estimate + sd))
else:
boots = bootstrap(stat_data, func=estimator,
n_boot=n_boot,
units=unit_data,
seed=seed)
confint.append(utils.ci(boots, ci))
# Option 2: we are grouping by a hue layer
# ----------------------------------------
else:
for j, hue_level in enumerate(self.hue_names):
if not self.plot_hues[i].size:
statistic[i].append(np.nan)
if ci is not None:
confint[i].append((np.nan, np.nan))
continue
hue_mask = self.plot_hues[i] == hue_level
if self.plot_units is None:
stat_data = remove_na(group_data[hue_mask])
unit_data = None
else:
group_units = self.plot_units[i]
have = pd.notnull(
np.c_[group_data, group_units]
).all(axis=1)
stat_data = group_data[hue_mask & have]
unit_data = group_units[hue_mask & have]
# Estimate a statistic from the vector of data
if not stat_data.size:
statistic[i].append(np.nan)
else:
statistic[i].append(estimator(stat_data))
# Get a confidence interval for this estimate
if ci is not None:
if stat_data.size < 2:
confint[i].append([np.nan, np.nan])
continue
if ci == "sd":
estimate = estimator(stat_data)
sd = np.std(stat_data)
confint[i].append((estimate - sd, estimate + sd))
else:
boots = bootstrap(stat_data, func=estimator,
n_boot=n_boot,
units=unit_data,
seed=seed)
confint[i].append(utils.ci(boots, ci))
# Save the resulting values for plotting
self.statistic = np.array(statistic)
self.confint = np.array(confint)
def draw_confints(self, ax, at_group, confint, colors,
errwidth=None, capsize=None, **kws):
if errwidth is not None:
kws.setdefault("lw", errwidth)
else:
kws.setdefault("lw", mpl.rcParams["lines.linewidth"] * 1.8)
for at, (ci_low, ci_high), color in zip(at_group,
confint,
colors):
if self.orient == "v":
ax.plot([at, at], [ci_low, ci_high], color=color, **kws)
if capsize is not None:
ax.plot([at - capsize / 2, at + capsize / 2],
[ci_low, ci_low], color=color, **kws)
ax.plot([at - capsize / 2, at + capsize / 2],
[ci_high, ci_high], color=color, **kws)
else:
ax.plot([ci_low, ci_high], [at, at], color=color, **kws)
if capsize is not None:
ax.plot([ci_low, ci_low],
[at - capsize / 2, at + capsize / 2],
color=color, **kws)
ax.plot([ci_high, ci_high],
[at - capsize / 2, at + capsize / 2],
color=color, **kws)
class _BarPlotter(_CategoricalStatPlotter):
"""Show point estimates and confidence intervals with bars."""
def __init__(self, x, y, hue, data, order, hue_order,
estimator, ci, n_boot, units, seed,
orient, color, palette, saturation, errcolor,
errwidth, capsize, dodge):
"""Initialize the plotter."""
self.establish_variables(x, y, hue, data, orient,
order, hue_order, units)
self.establish_colors(color, palette, saturation)
self.estimate_statistic(estimator, ci, n_boot, seed)
self.dodge = dodge
self.errcolor = errcolor
self.errwidth = errwidth
self.capsize = capsize
def draw_bars(self, ax, kws):
"""Draw the bars onto `ax`."""
# Get the right matplotlib function depending on the orientation
import plotly.express as px
from bornly import _get_colors, _cartesian
data = pd.DataFrame({"y": self.statistic.flatten()})
plotting_kwargs = dict(
data_frame=data,
x="x",
y="y",
color_discrete_sequence=_get_colors(-1, 1, self.colors),
)
if self.plot_hues is None:
data["x"] = self.group_names
data["err"] = self.confint[:, 1] - data["y"]
plotting_kwargs["color"] = "x"
else:
data[["hue", "x"]] = _cartesian(self.hue_names, self.group_names)
data["err"] = self.confint[:, :, 1].flatten() - data["y"]
plotting_kwargs["color"] = "hue"
plotting_kwargs["barmode"] = "group"
plotting_kwargs["category_orders"] = {"x": self.group_names}
if not np.isnan(data["err"]).all():
plotting_kwargs["error_y"] = "err"
if self.orient == "h":
plotting_kwargs["x"], plotting_kwargs["y"] = (
plotting_kwargs["y"],
plotting_kwargs["x"],
)
plotting_kwargs["orientation"] = "h"
if "error_y" in plotting_kwargs:
plotting_kwargs["error_x"] = plotting_kwargs.pop("error_y")
fig = px.bar(**plotting_kwargs)
ax(fig)
if self.orient == "v":
xlabel, ylabel = self.group_label, self.value_label
else:
xlabel, ylabel = self.value_label, self.group_label
ax.set(xlabel=xlabel, ylabel=ylabel)
if self.hue_names is not None:
ax.figure.layout.legend.title = self.hue_title
else:
ax.figure.update_layout(showlegend=False)
def plot(self, ax, bar_kws):
"""Make the plot."""
self.draw_bars(ax, bar_kws)
class _PointPlotter(_CategoricalStatPlotter):
default_palette = "dark"
"""Show point estimates and confidence intervals with (joined) points."""
def __init__(self, x, y, hue, data, order, hue_order,
estimator, ci, n_boot, units, seed,
markers, linestyles, dodge, join, scale,
orient, color, palette, errwidth=None, capsize=None):
"""Initialize the plotter."""
self.establish_variables(x, y, hue, data, orient,
order, hue_order, units)
self.establish_colors(color, palette, 1)
self.estimate_statistic(estimator, ci, n_boot, seed)
# Override the default palette for single-color plots
if hue is None and color is None and palette is None:
self.colors = [color_palette()[0]] * len(self.colors)
# Don't join single-layer plots with different colors
if hue is None and palette is not None:
join = False
# Use a good default for `dodge=True`
if dodge is True and self.hue_names is not None:
dodge = .025 * len(self.hue_names)
# Make sure we have a marker for each hue level
if isinstance(markers, str):
markers = [markers] * len(self.colors)
self.markers = markers
# Make sure we have a line style for each hue level
if isinstance(linestyles, str):
linestyles = [linestyles] * len(self.colors)
self.linestyles = linestyles
# Set the other plot components
self.dodge = dodge
self.join = join
self.scale = scale
self.errwidth = errwidth
self.capsize = capsize
@property
def hue_offsets(self):
"""Offsets relative to the center position for each hue level."""
if self.dodge:
offset = np.linspace(0, self.dodge, len(self.hue_names))
offset -= offset.mean()
else:
offset = np.zeros(len(self.hue_names))
return offset
def draw_points(self, ax):
"""Draw the main data components of the plot."""
# Get the center positions on the categorical axis
pointpos = np.arange(len(self.statistic))
# Get the size of the plot elements
lw = mpl.rcParams["lines.linewidth"] * 1.8 * self.scale
mew = lw * .75
markersize = np.pi * np.square(lw) * 2
if self.plot_hues is None:
# Draw lines joining each estimate point
if self.join:
color = self.colors[0]
ls = self.linestyles[0]
if self.orient == "h":
ax.plot(self.statistic, pointpos,
color=color, ls=ls, lw=lw)
else:
ax.plot(pointpos, self.statistic,
color=color, ls=ls, lw=lw)
# Draw the confidence intervals
self.draw_confints(ax, pointpos, self.confint, self.colors,
self.errwidth, self.capsize)
# Draw the estimate points
marker = self.markers[0]
colors = [mpl.colors.colorConverter.to_rgb(c) for c in self.colors]
if self.orient == "h":
x, y = self.statistic, pointpos
else:
x, y = pointpos, self.statistic
ax.scatter(x, y,
linewidth=mew, marker=marker, s=markersize,
facecolor=colors, edgecolor=colors)
else:
offsets = self.hue_offsets
for j, hue_level in enumerate(self.hue_names):
# Determine the values to plot for this level
statistic = self.statistic[:, j]
# Determine the position on the categorical and z axes
offpos = pointpos + offsets[j]
z = j + 1
# Draw lines joining each estimate point
if self.join:
color = self.colors[j]
ls = self.linestyles[j]
if self.orient == "h":
ax.plot(statistic, offpos, color=color,
zorder=z, ls=ls, lw=lw)
else:
ax.plot(offpos, statistic, color=color,
zorder=z, ls=ls, lw=lw)
# Draw the confidence intervals
if self.confint.size:
confint = self.confint[:, j]
errcolors = [self.colors[j]] * len(offpos)
self.draw_confints(ax, offpos, confint, errcolors,
self.errwidth, self.capsize,
zorder=z)
# Draw the estimate points
n_points = len(remove_na(offpos))
marker = self.markers[j]
color = mpl.colors.colorConverter.to_rgb(self.colors[j])
if self.orient == "h":
x, y = statistic, offpos
else:
x, y = offpos, statistic
if not len(remove_na(statistic)):
x = y = [np.nan] * n_points
ax.scatter(x, y, label=hue_level,
facecolor=color, edgecolor=color,
linewidth=mew, marker=marker, s=markersize,
zorder=z)
def plot(self, ax):
"""Make the plot."""
self.draw_points(ax)
self.annotate_axes(ax)
if self.orient == "h":
ax.invert_yaxis()
class _CountPlotter(_BarPlotter):
require_numeric = False
class _LVPlotter(_CategoricalPlotter):
def __init__(self, x, y, hue, data, order, hue_order,
orient, color, palette, saturation,
width, dodge, k_depth, linewidth, scale, outlier_prop,
trust_alpha, showfliers=True):
self.width = width
self.dodge = dodge
self.saturation = saturation
k_depth_methods = ['proportion', 'tukey', 'trustworthy', 'full']
if not (k_depth in k_depth_methods or isinstance(k_depth, Number)):
msg = (f'k_depth must be one of {k_depth_methods} or a number, '
f'but {k_depth} was passed.')
raise ValueError(msg)
self.k_depth = k_depth
if linewidth is None:
linewidth = mpl.rcParams["lines.linewidth"]
self.linewidth = linewidth
scales = ['linear', 'exponential', 'area']
if scale not in scales:
msg = f'scale must be one of {scales}, but {scale} was passed.'
raise ValueError(msg)
self.scale = scale
if ((outlier_prop > 1) or (outlier_prop <= 0)):
msg = f'outlier_prop {outlier_prop} not in range (0, 1]'
raise ValueError(msg)
self.outlier_prop = outlier_prop
if not 0 < trust_alpha < 1:
msg = f'trust_alpha {trust_alpha} not in range (0, 1)'
raise ValueError(msg)
self.trust_alpha = trust_alpha
self.showfliers = showfliers
self.establish_variables(x, y, hue, data, orient, order, hue_order)
self.establish_colors(color, palette, saturation)
def _lv_box_ends(self, vals):
"""Get the number of data points and calculate `depth` of
letter-value plot."""
vals = np.asarray(vals)
# Remove infinite values while handling a 'object' dtype
# that can come from pd.Float64Dtype() input
with pd.option_context('mode.use_inf_as_null', True):
vals = vals[~pd.isnull(vals)]
n = len(vals)
p = self.outlier_prop
# Select the depth, i.e. number of boxes to draw, based on the method
if self.k_depth == 'full':
# extend boxes to 100% of the data
k = int(np.log2(n)) + 1
elif self.k_depth == 'tukey':
# This results with 5-8 points in each tail
k = int(np.log2(n)) - 3
elif self.k_depth == 'proportion':
k = int(np.log2(n)) - int(np.log2(n * p)) + 1
elif self.k_depth == 'trustworthy':
point_conf = 2 * _normal_quantile_func((1 - self.trust_alpha / 2)) ** 2
k = int(np.log2(n / point_conf)) + 1
else:
k = int(self.k_depth) # allow having k as input
# If the number happens to be less than 1, set k to 1
if k < 1:
k = 1
# Calculate the upper end for each of the k boxes
upper = [100 * (1 - 0.5 ** (i + 1)) for i in range(k, 0, -1)]
# Calculate the lower end for each of the k boxes
lower = [100 * (0.5 ** (i + 1)) for i in range(k, 0, -1)]
# Stitch the box ends together
percentile_ends = [(i, j) for i, j in zip(lower, upper)]
box_ends = [np.percentile(vals, q) for q in percentile_ends]
return box_ends, k
def _lv_outliers(self, vals, k):
"""Find the outliers based on the letter value depth."""
box_edge = 0.5 ** (k + 1)
perc_ends = (100 * box_edge, 100 * (1 - box_edge))
edges = np.percentile(vals, perc_ends)
lower_out = vals[np.where(vals < edges[0])[0]]
upper_out = vals[np.where(vals > edges[1])[0]]
return np.concatenate((lower_out, upper_out))
def _width_functions(self, width_func):
# Dictionary of functions for computing the width of the boxes
width_functions = {'linear': lambda h, i, k: (i + 1.) / k,
'exponential': lambda h, i, k: 2**(-k + i - 1),
'area': lambda h, i, k: (1 - 2**(-k + i - 2)) / h}
return width_functions[width_func]
def _lvplot(self, box_data, positions,
color=[255. / 256., 185. / 256., 0.],
widths=1, ax=None, **kws):
vert = self.orient == "v"
x = positions[0]
box_data = np.asarray(box_data)
# If we only have one data point, plot a line
if len(box_data) == 1:
kws.update({
'color': self.gray, 'linestyle': '-', 'linewidth': self.linewidth
})
ys = [box_data[0], box_data[0]]
xs = [x - widths / 2, x + widths / 2]
if vert:
xx, yy = xs, ys
else:
xx, yy = ys, xs
ax.plot(xx, yy, **kws)
else:
# Get the number of data points and calculate "depth" of
# letter-value plot
box_ends, k = self._lv_box_ends(box_data)
# Anonymous functions for calculating the width and height
# of the letter value boxes
width = self._width_functions(self.scale)
# Function to find height of boxes
def height(b):
return b[1] - b[0]
# Functions to construct the letter value boxes
def vert_perc_box(x, b, i, k, w):
rect = Patches.Rectangle((x - widths * w / 2, b[0]),
widths * w,
height(b), fill=True)
return rect
def horz_perc_box(x, b, i, k, w):
rect = Patches.Rectangle((b[0], x - widths * w / 2),
height(b), widths * w,
fill=True)
return rect
# Scale the width of the boxes so the biggest starts at 1
w_area = np.array([width(height(b), i, k)
for i, b in enumerate(box_ends)])
w_area = w_area / np.max(w_area)
# Calculate the medians
y = np.median(box_data)
# Calculate the outliers and plot (only if showfliers == True)
outliers = []
if self.showfliers:
outliers = self._lv_outliers(box_data, k)
hex_color = mpl.colors.rgb2hex(color)
if vert:
box_func = vert_perc_box
xs_median = [x - widths / 2, x + widths / 2]
ys_median = [y, y]
xs_outliers = np.full(len(outliers), x)
ys_outliers = outliers
else:
box_func = horz_perc_box
xs_median = [y, y]
ys_median = [x - widths / 2, x + widths / 2]
xs_outliers = outliers
ys_outliers = np.full(len(outliers), x)
boxes = [box_func(x, b[0], i, k, b[1])
for i, b in enumerate(zip(box_ends, w_area))]
# Plot the medians
ax.plot(
xs_median,
ys_median,
c=".15",
alpha=0.45,
solid_capstyle="butt",
linewidth=self.linewidth,
**kws
)
# Plot outliers (if any)
if len(outliers) > 0:
ax.scatter(xs_outliers, ys_outliers, marker='d',
c=self.gray, **kws)
# Construct a color map from the input color
rgb = [hex_color, (1, 1, 1)]
cmap = mpl.colors.LinearSegmentedColormap.from_list('new_map', rgb)
# Make sure that the last boxes contain hue and are not pure white
rgb = [hex_color, cmap(.85)]
cmap = mpl.colors.LinearSegmentedColormap.from_list('new_map', rgb)
collection = PatchCollection(
boxes, cmap=cmap, edgecolor=self.gray, linewidth=self.linewidth
)
# Set the color gradation, first box will have color=hex_color
collection.set_array(np.array(np.linspace(1, 0, len(boxes))))
# Plot the boxes
ax.add_collection(collection)
def draw_letter_value_plot(self, ax, kws):
"""Use matplotlib to draw a letter value plot on an Axes."""
for i, group_data in enumerate(self.plot_data):
if self.plot_hues is None:
# Handle case where there is data at this level
if group_data.size == 0:
continue
# Draw a single box or a set of boxes
# with a single level of grouping
box_data = remove_na(group_data)
# Handle case where there is no non-null data
if box_data.size == 0:
continue
color = self.colors[i]
self._lvplot(box_data,
positions=[i],
color=color,
widths=self.width,
ax=ax,
**kws)
else:
# Draw nested groups of boxes
offsets = self.hue_offsets
for j, hue_level in enumerate(self.hue_names):
# Add a legend for this hue level
if not i:
self.add_legend_data(ax, self.colors[j], hue_level)
# Handle case where there is data at this level
if group_data.size == 0:
continue
hue_mask = self.plot_hues[i] == hue_level
box_data = remove_na(group_data[hue_mask])
# Handle case where there is no non-null data
if box_data.size == 0:
continue
color = self.colors[j]
center = i + offsets[j]
self._lvplot(box_data,
positions=[center],
color=color,
widths=self.nested_width,
ax=ax,
**kws)
# Autoscale the values axis to make sure all patches are visible
ax.autoscale_view(scalex=self.orient == "h", scaley=self.orient == "v")
def plot(self, ax, boxplot_kws):
"""Make the plot."""
self.draw_letter_value_plot(ax, boxplot_kws)
self.annotate_axes(ax)
if self.orient == "h":
ax.invert_yaxis()
_categorical_docs = dict(
# Shared narrative docs
categorical_narrative=dedent("""\
This function always treats one of the variables as categorical and
draws data at ordinal positions (0, 1, ... n) on the relevant axis, even
when the data has a numeric or date type.
See the :ref:`tutorial <categorical_tutorial>` for more information.\
"""),
main_api_narrative=dedent("""\
Input data can be passed in a variety of formats, including:
- Vectors of data represented as lists, numpy arrays, or pandas Series
objects passed directly to the ``x``, ``y``, and/or ``hue`` parameters.
- A "long-form" DataFrame, in which case the ``x``, ``y``, and ``hue``
variables will determine how the data are plotted.
- A "wide-form" DataFrame, such that each numeric column will be plotted.
- An array or list of vectors.
In most cases, it is possible to use numpy or Python objects, but pandas
objects are preferable because the associated names will be used to
annotate the axes. Additionally, you can use Categorical types for the
grouping variables to control the order of plot elements.\
"""),
# Shared function parameters
input_params=dedent("""\
x, y, hue : names of variables in ``data`` or vector data, optional
Inputs for plotting long-form data. See examples for interpretation.\
"""),
string_input_params=dedent("""\
x, y, hue : names of variables in ``data``
Inputs for plotting long-form data. See examples for interpretation.\
"""),
categorical_data=dedent("""\
data : DataFrame, array, or list of arrays, optional
Dataset for plotting. If ``x`` and ``y`` are absent, this is
interpreted as wide-form. Otherwise it is expected to be long-form.\
"""),
long_form_data=dedent("""\
data : DataFrame
Long-form (tidy) dataset for plotting. Each column should correspond
to a variable, and each row should correspond to an observation.\
"""),
order_vars=dedent("""\
order, hue_order : lists of strings, optional
Order to plot the categorical levels in, otherwise the levels are
inferred from the data objects.\
"""),
stat_api_params=dedent("""\
estimator : callable that maps vector -> scalar, optional
Statistical function to estimate within each categorical bin.
ci : float or "sd" or None, optional
Size of confidence intervals to draw around estimated values. If
"sd", skip bootstrapping and draw the standard deviation of the
observations. If ``None``, no bootstrapping will be performed, and
error bars will not be drawn.
n_boot : int, optional
Number of bootstrap iterations to use when computing confidence
intervals.
units : name of variable in ``data`` or vector data, optional
Identifier of sampling units, which will be used to perform a
multilevel bootstrap and account for repeated measures design.
seed : int, numpy.random.Generator, or numpy.random.RandomState, optional
Seed or random number generator for reproducible bootstrapping.\
"""),
orient=dedent("""\
orient : "v" | "h", optional
Orientation of the plot (vertical or horizontal). This is usually
inferred based on the type of the input variables, but it can be used
to resolve ambiguity when both `x` and `y` are numeric or when
plotting wide-form data.\
"""),
color=dedent("""\
color : matplotlib color, optional
Color for all of the elements, or seed for a gradient palette.\
"""),
palette=dedent("""\
palette : palette name, list, or dict, optional
Color palette that maps either the grouping variable or the hue
variable. If the palette is a dictionary, keys should be names of
levels and values should be matplotlib colors.\
"""),
saturation=dedent("""\
saturation : float, optional
Proportion of the original saturation to draw colors at. Large patches
often look better with slightly desaturated colors, but set this to
``1`` if you want the plot colors to perfectly match the input color
spec.\
"""),
capsize=dedent("""\
capsize : float, optional
Width of the "caps" on error bars.
"""),
errwidth=dedent("""\
errwidth : float, optional
Thickness of error bar lines (and caps).\
"""),
width=dedent("""\
width : float, optional
Width of a full element when not using hue nesting, or width of all the
elements for one level of the major grouping variable.\
"""),
dodge=dedent("""\
dodge : bool, optional
When hue nesting is used, whether elements should be shifted along the
categorical axis.\
"""),
linewidth=dedent("""\
linewidth : float, optional
Width of the gray lines that frame the plot elements.\
"""),
ax_in=dedent("""\
ax : matplotlib Axes, optional
Axes object to draw the plot onto, otherwise uses the current Axes.\
"""),
ax_out=dedent("""\
ax : matplotlib Axes
Returns the Axes object with the plot drawn onto it.\
"""),
# Shared see also
boxplot=dedent("""\
boxplot : A traditional box-and-whisker plot with a similar API.\
"""),
violinplot=dedent("""\
violinplot : A combination of boxplot and kernel density estimation.\
"""),
stripplot=dedent("""\
stripplot : A scatterplot where one variable is categorical. Can be used
in conjunction with other plots to show each observation.\
"""),
swarmplot=dedent("""\
swarmplot : A categorical scatterplot where the points do not overlap. Can
be used with other plots to show each observation.\
"""),
barplot=dedent("""\
barplot : Show point estimates and confidence intervals using bars.\
"""),
countplot=dedent("""\
countplot : Show the counts of observations in each categorical bin.\
"""),
pointplot=dedent("""\
pointplot : Show point estimates and confidence intervals using scatterplot
glyphs.\
"""),
catplot=dedent("""\
catplot : Combine a categorical plot with a :class:`FacetGrid`.\
"""),
boxenplot=dedent("""\
boxenplot : An enhanced boxplot for larger datasets.\
"""),
)
_categorical_docs.update(_facet_docs)
@_deprecate_positional_args
def boxplot(
*,
x=None, y=None,
hue=None, data=None,
order=None, hue_order=None,
orient=None, color=None, palette=None, saturation=.75,
width=.8, dodge=True, fliersize=5, linewidth=None,
whis=1.5, ax=None,
**kwargs
):
plotter = _BoxPlotter(x, y, hue, data, order, hue_order,
orient, color, palette, saturation,
width, dodge, fliersize, linewidth)
if ax is None:
ax = plt.gca()
kwargs.update(dict(whis=whis))
plotter.plot(ax, kwargs)
return ax
boxplot.__doc__ = dedent("""\
Draw a box plot to show distributions with respect to categories.
A box plot (or box-and-whisker plot) shows the distribution of quantitative
data in a way that facilitates comparisons between variables or across
levels of a categorical variable. The box shows the quartiles of the
dataset while the whiskers extend to show the rest of the distribution,
except for points that are determined to be "outliers" using a method
that is a function of the inter-quartile range.
{main_api_narrative}
{categorical_narrative}
Parameters
----------
{input_params}
{categorical_data}
{order_vars}
{orient}
{color}
{palette}
{saturation}
{width}
{dodge}
fliersize : float, optional
Size of the markers used to indicate outlier observations.
{linewidth}
whis : float, optional
Maximum length of the plot whiskers as proportion of the
interquartile range. Whiskers extend to the furthest datapoint
within that range. More extreme points are marked as outliers.
{ax_in}
kwargs : key, value mappings
Other keyword arguments are passed through to
:meth:`matplotlib.axes.Axes.boxplot`.
Returns
-------
{ax_out}
See Also
--------
{violinplot}
{stripplot}
{swarmplot}
{catplot}
Examples
--------
Draw a single horizontal boxplot:
.. plot::
:context: close-figs
>>> import seaborn as sns
>>> sns.set_theme(style="whitegrid")
>>> tips = sns.load_dataset("tips")
>>> ax = sns.boxplot(x=tips["total_bill"])
Draw a vertical boxplot grouped by a categorical variable:
.. plot::
:context: close-figs
>>> ax = sns.boxplot(x="day", y="total_bill", data=tips)
Draw a boxplot with nested grouping by two categorical variables:
.. plot::
:context: close-figs
>>> ax = sns.boxplot(x="day", y="total_bill", hue="smoker",
... data=tips, palette="Set3")
Draw a boxplot with nested grouping when some bins are empty:
.. plot::
:context: close-figs
>>> ax = sns.boxplot(x="day", y="total_bill", hue="time",
... data=tips, linewidth=2.5)
Control box order by passing an explicit order:
.. plot::
:context: close-figs
>>> ax = sns.boxplot(x="time", y="tip", data=tips,
... order=["Dinner", "Lunch"])
Draw a boxplot for each numeric variable in a DataFrame:
.. plot::
:context: close-figs
>>> iris = sns.load_dataset("iris")
>>> ax = sns.boxplot(data=iris, orient="h", palette="Set2")
Use ``hue`` without changing box position or width:
.. plot::
:context: close-figs
>>> tips["weekend"] = tips["day"].isin(["Sat", "Sun"])
>>> ax = sns.boxplot(x="day", y="total_bill", hue="weekend",
... data=tips, dodge=False)
Use :func:`swarmplot` to show the datapoints on top of the boxes:
.. plot::
:context: close-figs
>>> ax = sns.boxplot(x="day", y="total_bill", data=tips)
>>> ax = sns.swarmplot(x="day", y="total_bill", data=tips, color=".25")
Use :func:`catplot` to combine a :func:`boxplot` and a
:class:`FacetGrid`. This allows grouping within additional categorical
variables. Using :func:`catplot` is safer than using :class:`FacetGrid`
directly, as it ensures synchronization of variable order across facets:
.. plot::
:context: close-figs
>>> g = sns.catplot(x="sex", y="total_bill",
... hue="smoker", col="time",
... data=tips, kind="box",
... height=4, aspect=.7);
""").format(**_categorical_docs)
@_deprecate_positional_args
def violinplot(
*,
x=None, y=None,
hue=None, data=None,
order=None, hue_order=None,
bw="scott", cut=2, scale="area", scale_hue=True, gridsize=100,
width=.8, inner="box", split=False, dodge=True, orient=None,
linewidth=None, color=None, palette=None, saturation=.75,
ax=None, **kwargs,
):
plotter = _ViolinPlotter(x, y, hue, data, order, hue_order,
bw, cut, scale, scale_hue, gridsize,
width, inner, split, dodge, orient, linewidth,
color, palette, saturation)
if ax is None:
ax = plt.gca()
plotter.plot(ax)
return ax
violinplot.__doc__ = dedent("""\
Draw a combination of boxplot and kernel density estimate.
A violin plot plays a similar role as a box and whisker plot. It shows the
distribution of quantitative data across several levels of one (or more)
categorical variables such that those distributions can be compared. Unlike
a box plot, in which all of the plot components correspond to actual
datapoints, the violin plot features a kernel density estimation of the
underlying distribution.
This can be an effective and attractive way to show multiple distributions
of data at once, but keep in mind that the estimation procedure is
influenced by the sample size, and violins for relatively small samples
might look misleadingly smooth.
{main_api_narrative}
{categorical_narrative}
Parameters
----------
{input_params}
{categorical_data}
{order_vars}
bw : {{'scott', 'silverman', float}}, optional
Either the name of a reference rule or the scale factor to use when
computing the kernel bandwidth. The actual kernel size will be
determined by multiplying the scale factor by the standard deviation of
the data within each bin.
cut : float, optional
Distance, in units of bandwidth size, to extend the density past the
extreme datapoints. Set to 0 to limit the violin range within the range
of the observed data (i.e., to have the same effect as ``trim=True`` in
``ggplot``.
scale : {{"area", "count", "width"}}, optional
The method used to scale the width of each violin. If ``area``, each
violin will have the same area. If ``count``, the width of the violins
will be scaled by the number of observations in that bin. If ``width``,
each violin will have the same width.
scale_hue : bool, optional
When nesting violins using a ``hue`` variable, this parameter
determines whether the scaling is computed within each level of the
major grouping variable (``scale_hue=True``) or across all the violins
on the plot (``scale_hue=False``).
gridsize : int, optional
Number of points in the discrete grid used to compute the kernel
density estimate.
{width}
inner : {{"box", "quartile", "point", "stick", None}}, optional
Representation of the datapoints in the violin interior. If ``box``,
draw a miniature boxplot. If ``quartiles``, draw the quartiles of the
distribution. If ``point`` or ``stick``, show each underlying
datapoint. Using ``None`` will draw unadorned violins.
split : bool, optional
When using hue nesting with a variable that takes two levels, setting
``split`` to True will draw half of a violin for each level. This can
make it easier to directly compare the distributions.
{dodge}
{orient}
{linewidth}
{color}
{palette}
{saturation}
{ax_in}
Returns
-------
{ax_out}
See Also
--------
{boxplot}
{stripplot}
{swarmplot}
{catplot}
Examples
--------
Draw a single horizontal violinplot:
.. plot::
:context: close-figs
>>> import seaborn as sns
>>> sns.set_theme(style="whitegrid")
>>> tips = sns.load_dataset("tips")
>>> ax = sns.violinplot(x=tips["total_bill"])
Draw a vertical violinplot grouped by a categorical variable:
.. plot::
:context: close-figs
>>> ax = sns.violinplot(x="day", y="total_bill", data=tips)
Draw a violinplot with nested grouping by two categorical variables:
.. plot::
:context: close-figs
>>> ax = sns.violinplot(x="day", y="total_bill", hue="smoker",
... data=tips, palette="muted")
Draw split violins to compare the across the hue variable:
.. plot::
:context: close-figs
>>> ax = sns.violinplot(x="day", y="total_bill", hue="smoker",
... data=tips, palette="muted", split=True)
Control violin order by passing an explicit order:
.. plot::
:context: close-figs
>>> ax = sns.violinplot(x="time", y="tip", data=tips,
... order=["Dinner", "Lunch"])
Scale the violin width by the number of observations in each bin:
.. plot::
:context: close-figs
>>> ax = sns.violinplot(x="day", y="total_bill", hue="sex",
... data=tips, palette="Set2", split=True,
... scale="count")
Draw the quartiles as horizontal lines instead of a mini-box:
.. plot::
:context: close-figs
>>> ax = sns.violinplot(x="day", y="total_bill", hue="sex",
... data=tips, palette="Set2", split=True,
... scale="count", inner="quartile")
Show each observation with a stick inside the violin:
.. plot::
:context: close-figs
>>> ax = sns.violinplot(x="day", y="total_bill", hue="sex",
... data=tips, palette="Set2", split=True,
... scale="count", inner="stick")
Scale the density relative to the counts across all bins:
.. plot::
:context: close-figs
>>> ax = sns.violinplot(x="day", y="total_bill", hue="sex",
... data=tips, palette="Set2", split=True,
... scale="count", inner="stick", scale_hue=False)
Use a narrow bandwidth to reduce the amount of smoothing:
.. plot::
:context: close-figs
>>> ax = sns.violinplot(x="day", y="total_bill", hue="sex",
... data=tips, palette="Set2", split=True,
... scale="count", inner="stick",
... scale_hue=False, bw=.2)
Draw horizontal violins:
.. plot::
:context: close-figs
>>> planets = sns.load_dataset("planets")
>>> ax = sns.violinplot(x="orbital_period", y="method",
... data=planets[planets.orbital_period < 1000],
... scale="width", palette="Set3")
Don't let density extend past extreme values in the data:
.. plot::
:context: close-figs
>>> ax = sns.violinplot(x="orbital_period", y="method",
... data=planets[planets.orbital_period < 1000],
... cut=0, scale="width", palette="Set3")
Use ``hue`` without changing violin position or width:
.. plot::
:context: close-figs
>>> tips["weekend"] = tips["day"].isin(["Sat", "Sun"])
>>> ax = sns.violinplot(x="day", y="total_bill", hue="weekend",
... data=tips, dodge=False)
Use :func:`catplot` to combine a :func:`violinplot` and a
:class:`FacetGrid`. This allows grouping within additional categorical
variables. Using :func:`catplot` is safer than using :class:`FacetGrid`
directly, as it ensures synchronization of variable order across facets:
.. plot::
:context: close-figs
>>> g = sns.catplot(x="sex", y="total_bill",
... hue="smoker", col="time",
... data=tips, kind="violin", split=True,
... height=4, aspect=.7);
""").format(**_categorical_docs)
@_deprecate_positional_args
def boxenplot(
*,
x=None, y=None,
hue=None, data=None,
order=None, hue_order=None,
orient=None, color=None, palette=None, saturation=.75,
width=.8, dodge=True, k_depth='tukey', linewidth=None,
scale='exponential', outlier_prop=0.007, trust_alpha=0.05, showfliers=True,
ax=None, **kwargs
):
plotter = _LVPlotter(x, y, hue, data, order, hue_order,
orient, color, palette, saturation,
width, dodge, k_depth, linewidth, scale,
outlier_prop, trust_alpha, showfliers)
if ax is None:
ax = plt.gca()
plotter.plot(ax, kwargs)
return ax
boxenplot.__doc__ = dedent("""\
Draw an enhanced box plot for larger datasets.
This style of plot was originally named a "letter value" plot because it
shows a large number of quantiles that are defined as "letter values". It
is similar to a box plot in plotting a nonparametric representation of a
distribution in which all features correspond to actual observations. By
plotting more quantiles, it provides more information about the shape of
the distribution, particularly in the tails. For a more extensive
explanation, you can read the paper that introduced the plot:
https://vita.had.co.nz/papers/letter-value-plot.html
{main_api_narrative}
{categorical_narrative}
Parameters
----------
{input_params}
{categorical_data}
{order_vars}
{orient}
{color}
{palette}
{saturation}
{width}
{dodge}
k_depth : {{"tukey", "proportion", "trustworthy", "full"}} or scalar,\
optional
The number of boxes, and by extension number of percentiles, to draw.
All methods are detailed in Wickham's paper. Each makes different
assumptions about the number of outliers and leverages different
statistical properties. If "proportion", draw no more than
`outlier_prop` extreme observations. If "full", draw `log(n)+1` boxes.
{linewidth}
scale : {{"exponential", "linear", "area"}}, optional
Method to use for the width of the letter value boxes. All give similar
results visually. "linear" reduces the width by a constant linear
factor, "exponential" uses the proportion of data not covered, "area"
is proportional to the percentage of data covered.
outlier_prop : float, optional
Proportion of data believed to be outliers. Must be in the range
(0, 1]. Used to determine the number of boxes to plot when
`k_depth="proportion"`.
trust_alpha : float, optional
Confidence level for a box to be plotted. Used to determine the
number of boxes to plot when `k_depth="trustworthy"`. Must be in the
range (0, 1).
showfliers : bool, optional
If False, suppress the plotting of outliers.
{ax_in}
kwargs : key, value mappings
Other keyword arguments are passed through to
:meth:`matplotlib.axes.Axes.plot` and
:meth:`matplotlib.axes.Axes.scatter`.
Returns
-------
{ax_out}
See Also
--------
{violinplot}
{boxplot}
{catplot}
Examples
--------
Draw a single horizontal boxen plot:
.. plot::
:context: close-figs
>>> import seaborn as sns
>>> sns.set_theme(style="whitegrid")
>>> tips = sns.load_dataset("tips")
>>> ax = sns.boxenplot(x=tips["total_bill"])
Draw a vertical boxen plot grouped by a categorical variable:
.. plot::
:context: close-figs
>>> ax = sns.boxenplot(x="day", y="total_bill", data=tips)
Draw a letter value plot with nested grouping by two categorical variables:
.. plot::
:context: close-figs
>>> ax = sns.boxenplot(x="day", y="total_bill", hue="smoker",
... data=tips, palette="Set3")
Draw a boxen plot with nested grouping when some bins are empty:
.. plot::
:context: close-figs
>>> ax = sns.boxenplot(x="day", y="total_bill", hue="time",
... data=tips, linewidth=2.5)
Control box order by passing an explicit order:
.. plot::
:context: close-figs
>>> ax = sns.boxenplot(x="time", y="tip", data=tips,
... order=["Dinner", "Lunch"])
Draw a boxen plot for each numeric variable in a DataFrame:
.. plot::
:context: close-figs
>>> iris = sns.load_dataset("iris")
>>> ax = sns.boxenplot(data=iris, orient="h", palette="Set2")
Use :func:`stripplot` to show the datapoints on top of the boxes:
.. plot::
:context: close-figs
>>> ax = sns.boxenplot(x="day", y="total_bill", data=tips,
... showfliers=False)
>>> ax = sns.stripplot(x="day", y="total_bill", data=tips,
... size=4, color=".26")
Use :func:`catplot` to combine :func:`boxenplot` and a :class:`FacetGrid`.
This allows grouping within additional categorical variables. Using
:func:`catplot` is safer than using :class:`FacetGrid` directly, as it
ensures synchronization of variable order across facets:
.. plot::
:context: close-figs
>>> g = sns.catplot(x="sex", y="total_bill",
... hue="smoker", col="time",
... data=tips, kind="boxen",
... height=4, aspect=.7);
""").format(**_categorical_docs)
@_deprecate_positional_args
def stripplot(
*,
x=None, y=None,
hue=None, data=None,
order=None, hue_order=None,
jitter=True, dodge=False, orient=None, color=None, palette=None,
size=5, edgecolor="gray", linewidth=0, ax=None,
hue_norm=None, fixed_scale=True, formatter=None,
**kwargs
):
# XXX we need to add a legend= param!!!
p = _CategoricalPlotterNew(
data=data,
variables=_CategoricalPlotterNew.get_semantics(locals()),
order=order,
orient=orient,
require_numeric=False,
)
if ax is None:
ax = plt.gca()
if fixed_scale or p.var_types[p.cat_axis] == "categorical":
p.scale_categorical(p.cat_axis, order=order, formatter=formatter)
p._attach(ax)
palette, hue_order = p._hue_backcompat(color, palette, hue_order)
color = _default_color(ax.scatter, hue, color, kwargs)
p.map_hue(palette=palette, order=hue_order, norm=hue_norm)
# XXX Copying possibly bad default decisions from original code for now
kwargs.setdefault("zorder", 3)
size = kwargs.get("s", size)
kwargs.update(dict(
s=size ** 2,
edgecolor=edgecolor,
linewidth=linewidth)
)
p.plot_strips(
jitter=jitter,
dodge=dodge,
color=color,
edgecolor=edgecolor,
plot_kws=kwargs,
)
# XXX this happens inside a plotting method in the distribution plots
# but maybe it's better out here? Alternatively, we have an open issue
# suggesting that _attach could add default axes labels, which seems smart.
p._add_axis_labels(ax)
p._adjust_cat_axis(ax, axis=p.cat_axis)
return ax
stripplot.__doc__ = dedent("""\
Draw a scatterplot where one variable is categorical.
A strip plot can be drawn on its own, but it is also a good complement
to a box or violin plot in cases where you want to show all observations
along with some representation of the underlying distribution.
{main_api_narrative}
{categorical_narrative}
Parameters
----------
{input_params}
{categorical_data}
{order_vars}
jitter : float, ``True``/``1`` is special-cased, optional
Amount of jitter (only along the categorical axis) to apply. This
can be useful when you have many points and they overlap, so that
it is easier to see the distribution. You can specify the amount
of jitter (half the width of the uniform random variable support),
or just use ``True`` for a good default.
dodge : bool, optional
When using ``hue`` nesting, setting this to ``True`` will separate
the strips for different hue levels along the categorical axis.
Otherwise, the points for each level will be plotted on top of
each other.
{orient}
{color}
{palette}
size : float, optional
Radius of the markers, in points.
edgecolor : matplotlib color, "gray" is special-cased, optional
Color of the lines around each point. If you pass ``"gray"``, the
brightness is determined by the color palette used for the body
of the points.
{linewidth}
{ax_in}
kwargs : key, value mappings
Other keyword arguments are passed through to
:meth:`matplotlib.axes.Axes.scatter`.
Returns
-------
{ax_out}
See Also
--------
{swarmplot}
{boxplot}
{violinplot}
{catplot}
Examples
--------
.. include:: ../docstrings/stripplot.rst
""").format(**_categorical_docs)
@_deprecate_positional_args
def swarmplot(
*,
x=None, y=None,
hue=None, data=None,
order=None, hue_order=None,
dodge=False, orient=None, color=None, palette=None,
size=5, edgecolor="gray", linewidth=0, ax=None,
hue_norm=None, fixed_scale=True, formatter=None, warn_thresh=.05,
**kwargs
):
p = _CategoricalPlotterNew(
data=data,
variables=_CategoricalPlotterNew.get_semantics(locals()),
order=order,
orient=orient,
require_numeric=False,
)
if ax is None:
ax = plt.gca()
if fixed_scale or p.var_types[p.cat_axis] == "categorical":
p.scale_categorical(p.cat_axis, order=order, formatter=formatter)
p._attach(ax)
if not p.has_xy_data:
return ax
palette, hue_order = p._hue_backcompat(color, palette, hue_order)
color = _default_color(ax.scatter, hue, color, kwargs)
p.map_hue(palette=palette, order=hue_order, norm=hue_norm)
# XXX Copying possibly bad default decisions from original code for now
kwargs.setdefault("zorder", 3)
size = kwargs.get("s", size)
if linewidth is None:
linewidth = size / 10
kwargs.update(dict(
s=size ** 2,
linewidth=linewidth,
))
p.plot_swarms(
dodge=dodge,
color=color,
edgecolor=edgecolor,
warn_thresh=warn_thresh,
plot_kws=kwargs,
)
# XXX this happens inside a plotting method in the distribution plots
# but maybe it's better out here? Alternatively, we have an open issue
# suggesting that _attach could add default axes labels, which seems smart.
p._add_axis_labels(ax)
p._adjust_cat_axis(ax, axis=p.cat_axis)
return ax
swarmplot.__doc__ = dedent("""\
Draw a categorical scatterplot with non-overlapping points.
This function is similar to :func:`stripplot`, but the points are adjusted
(only along the categorical axis) so that they don't overlap. This gives a
better representation of the distribution of values, but it does not scale
well to large numbers of observations. This style of plot is sometimes
called a "beeswarm".
A swarm plot can be drawn on its own, but it is also a good complement
to a box or violin plot in cases where you want to show all observations
along with some representation of the underlying distribution.
Arranging the points properly requires an accurate transformation between
data and point coordinates. This means that non-default axis limits must
be set *before* drawing the plot.
{main_api_narrative}
{categorical_narrative}
Parameters
----------
{input_params}
{categorical_data}
{order_vars}
dodge : bool, optional
When using ``hue`` nesting, setting this to ``True`` will separate
the strips for different hue levels along the categorical axis.
Otherwise, the points for each level will be plotted in one swarm.
{orient}
{color}
{palette}
size : float, optional
Radius of the markers, in points.
edgecolor : matplotlib color, "gray" is special-cased, optional
Color of the lines around each point. If you pass ``"gray"``, the
brightness is determined by the color palette used for the body
of the points.
{linewidth}
{ax_in}
kwargs : key, value mappings
Other keyword arguments are passed through to
:meth:`matplotlib.axes.Axes.scatter`.
Returns
-------
{ax_out}
See Also
--------
{boxplot}
{violinplot}
{stripplot}
{catplot}
Examples
--------
.. include:: ../docstrings/swarmplot.rst
""").format(**_categorical_docs)
@_deprecate_positional_args
def barplot(
*,
x=None, y=None,
hue=None, data=None,
order=None, hue_order=None,
estimator=np.mean, ci=95, n_boot=1000, units=None, seed=None,
orient=None, color=None, palette=None, saturation=.75,
errcolor=".26", errwidth=None, capsize=None, dodge=True,
ax=None,
**kwargs,
):
plotter = _BarPlotter(x, y, hue, data, order, hue_order,
estimator, ci, n_boot, units, seed,
orient, color, palette, saturation,
errcolor, errwidth, capsize, dodge)
if ax is None:
ax = plt.gca()
plotter.plot(ax, kwargs)
return ax
barplot.__doc__ = dedent("""\
Show point estimates and confidence intervals as rectangular bars.
A bar plot represents an estimate of central tendency for a numeric
variable with the height of each rectangle and provides some indication of
the uncertainty around that estimate using error bars. Bar plots include 0
in the quantitative axis range, and they are a good choice when 0 is a
meaningful value for the quantitative variable, and you want to make
comparisons against it.
For datasets where 0 is not a meaningful value, a point plot will allow you
to focus on differences between levels of one or more categorical
variables.
It is also important to keep in mind that a bar plot shows only the mean
(or other estimator) value, but in many cases it may be more informative to
show the distribution of values at each level of the categorical variables.
In that case, other approaches such as a box or violin plot may be more
appropriate.
{main_api_narrative}
{categorical_narrative}
Parameters
----------
{input_params}
{categorical_data}
{order_vars}
{stat_api_params}
{orient}
{color}
{palette}
{saturation}
errcolor : matplotlib color
Color for the lines that represent the confidence interval.
{errwidth}
{capsize}
{dodge}
{ax_in}
kwargs : key, value mappings
Other keyword arguments are passed through to
:meth:`matplotlib.axes.Axes.bar`.
Returns
-------
{ax_out}
See Also
--------
{countplot}
{pointplot}
{catplot}
Examples
--------
Draw a set of vertical bar plots grouped by a categorical variable:
.. plot::
:context: close-figs
>>> import seaborn as sns
>>> sns.set_theme(style="whitegrid")
>>> tips = sns.load_dataset("tips")
>>> ax = sns.barplot(x="day", y="total_bill", data=tips)
Draw a set of vertical bars with nested grouping by a two variables:
.. plot::
:context: close-figs
>>> ax = sns.barplot(x="day", y="total_bill", hue="sex", data=tips)
Draw a set of horizontal bars:
.. plot::
:context: close-figs
>>> ax = sns.barplot(x="tip", y="day", data=tips)
Control bar order by passing an explicit order:
.. plot::
:context: close-figs
>>> ax = sns.barplot(x="time", y="tip", data=tips,
... order=["Dinner", "Lunch"])
Use median as the estimate of central tendency:
.. plot::
:context: close-figs
>>> from numpy import median
>>> ax = sns.barplot(x="day", y="tip", data=tips, estimator=median)
Show the standard error of the mean with the error bars:
.. plot::
:context: close-figs
>>> ax = sns.barplot(x="day", y="tip", data=tips, ci=68)
Show standard deviation of observations instead of a confidence interval:
.. plot::
:context: close-figs
>>> ax = sns.barplot(x="day", y="tip", data=tips, ci="sd")
Add "caps" to the error bars:
.. plot::
:context: close-figs
>>> ax = sns.barplot(x="day", y="tip", data=tips, capsize=.2)
Use a different color palette for the bars:
.. plot::
:context: close-figs
>>> ax = sns.barplot(x="size", y="total_bill", data=tips,
... palette="Blues_d")
Use ``hue`` without changing bar position or width:
.. plot::
:context: close-figs
>>> tips["weekend"] = tips["day"].isin(["Sat", "Sun"])
>>> ax = sns.barplot(x="day", y="total_bill", hue="weekend",
... data=tips, dodge=False)
Plot all bars in a single color:
.. plot::
:context: close-figs
>>> ax = sns.barplot(x="size", y="total_bill", data=tips,
... color="salmon", saturation=.5)
Use :meth:`matplotlib.axes.Axes.bar` parameters to control the style.
.. plot::
:context: close-figs
>>> ax = sns.barplot(x="day", y="total_bill", data=tips,
... linewidth=2.5, facecolor=(1, 1, 1, 0),
... errcolor=".2", edgecolor=".2")
Use :func:`catplot` to combine a :func:`barplot` and a :class:`FacetGrid`.
This allows grouping within additional categorical variables. Using
:func:`catplot` is safer than using :class:`FacetGrid` directly, as it
ensures synchronization of variable order across facets:
.. plot::
:context: close-figs
>>> g = sns.catplot(x="sex", y="total_bill",
... hue="smoker", col="time",
... data=tips, kind="bar",
... height=4, aspect=.7);
""").format(**_categorical_docs)
@_deprecate_positional_args
def pointplot(
*,
x=None, y=None,
hue=None, data=None,
order=None, hue_order=None,
estimator=np.mean, ci=95, n_boot=1000, units=None, seed=None,
markers="o", linestyles="-", dodge=False, join=True, scale=1,
orient=None, color=None, palette=None, errwidth=None,
capsize=None, ax=None,
**kwargs
):
plotter = _PointPlotter(x, y, hue, data, order, hue_order,
estimator, ci, n_boot, units, seed,
markers, linestyles, dodge, join, scale,
orient, color, palette, errwidth, capsize)
if ax is None:
ax = plt.gca()
plotter.plot(ax)
return ax
pointplot.__doc__ = dedent("""\
Show point estimates and confidence intervals using scatter plot glyphs.
A point plot represents an estimate of central tendency for a numeric
variable by the position of scatter plot points and provides some
indication of the uncertainty around that estimate using error bars.
Point plots can be more useful than bar plots for focusing comparisons
between different levels of one or more categorical variables. They are
particularly adept at showing interactions: how the relationship between
levels of one categorical variable changes across levels of a second
categorical variable. The lines that join each point from the same ``hue``
level allow interactions to be judged by differences in slope, which is
easier for the eyes than comparing the heights of several groups of points
or bars.
It is important to keep in mind that a point plot shows only the mean (or
other estimator) value, but in many cases it may be more informative to
show the distribution of values at each level of the categorical variables.
In that case, other approaches such as a box or violin plot may be more
appropriate.
{main_api_narrative}
{categorical_narrative}
Parameters
----------
{input_params}
{categorical_data}
{order_vars}
{stat_api_params}
markers : string or list of strings, optional
Markers to use for each of the ``hue`` levels.
linestyles : string or list of strings, optional
Line styles to use for each of the ``hue`` levels.
dodge : bool or float, optional
Amount to separate the points for each level of the ``hue`` variable
along the categorical axis.
join : bool, optional
If ``True``, lines will be drawn between point estimates at the same
``hue`` level.
scale : float, optional
Scale factor for the plot elements.
{orient}
{color}
{palette}
{errwidth}
{capsize}
{ax_in}
Returns
-------
{ax_out}
See Also
--------
{barplot}
{catplot}
Examples
--------
Draw a set of vertical point plots grouped by a categorical variable:
.. plot::
:context: close-figs
>>> import seaborn as sns
>>> sns.set_theme(style="darkgrid")
>>> tips = sns.load_dataset("tips")
>>> ax = sns.pointplot(x="time", y="total_bill", data=tips)
Draw a set of vertical points with nested grouping by a two variables:
.. plot::
:context: close-figs
>>> ax = sns.pointplot(x="time", y="total_bill", hue="smoker",
... data=tips)
Separate the points for different hue levels along the categorical axis:
.. plot::
:context: close-figs
>>> ax = sns.pointplot(x="time", y="total_bill", hue="smoker",
... data=tips, dodge=True)
Use a different marker and line style for the hue levels:
.. plot::
:context: close-figs
>>> ax = sns.pointplot(x="time", y="total_bill", hue="smoker",
... data=tips,
... markers=["o", "x"],
... linestyles=["-", "--"])
Draw a set of horizontal points:
.. plot::
:context: close-figs
>>> ax = sns.pointplot(x="tip", y="day", data=tips)
Don't draw a line connecting each point:
.. plot::
:context: close-figs
>>> ax = sns.pointplot(x="tip", y="day", data=tips, join=False)
Use a different color for a single-layer plot:
.. plot::
:context: close-figs
>>> ax = sns.pointplot(x="time", y="total_bill", data=tips,
... color="#bb3f3f")
Use a different color palette for the points:
.. plot::
:context: close-figs
>>> ax = sns.pointplot(x="time", y="total_bill", hue="smoker",
... data=tips, palette="Set2")
Control point order by passing an explicit order:
.. plot::
:context: close-figs
>>> ax = sns.pointplot(x="time", y="tip", data=tips,
... order=["Dinner", "Lunch"])
Use median as the estimate of central tendency:
.. plot::
:context: close-figs
>>> from numpy import median
>>> ax = sns.pointplot(x="day", y="tip", data=tips, estimator=median)
Show the standard error of the mean with the error bars:
.. plot::
:context: close-figs
>>> ax = sns.pointplot(x="day", y="tip", data=tips, ci=68)
Show standard deviation of observations instead of a confidence interval:
.. plot::
:context: close-figs
>>> ax = sns.pointplot(x="day", y="tip", data=tips, ci="sd")
Add "caps" to the error bars:
.. plot::
:context: close-figs
>>> ax = sns.pointplot(x="day", y="tip", data=tips, capsize=.2)
Use :func:`catplot` to combine a :func:`pointplot` and a
:class:`FacetGrid`. This allows grouping within additional categorical
variables. Using :func:`catplot` is safer than using :class:`FacetGrid`
directly, as it ensures synchronization of variable order across facets:
.. plot::
:context: close-figs
>>> g = sns.catplot(x="sex", y="total_bill",
... hue="smoker", col="time",
... data=tips, kind="point",
... dodge=True,
... height=4, aspect=.7);
""").format(**_categorical_docs)
@_deprecate_positional_args
def countplot(
*,
x=None, y=None,
hue=None, data=None,
order=None, hue_order=None,
orient=None, color=None, palette=None, saturation=.75,
dodge=True, ax=None, **kwargs
):
estimator = len
ci = None
n_boot = 0
units = None
seed = None
errcolor = None
errwidth = None
capsize = None
if x is None and y is not None:
orient = "h"
x = y
elif y is None and x is not None:
orient = "v"
y = x
elif x is not None and y is not None:
raise ValueError("Cannot pass values for both `x` and `y`")
plotter = _CountPlotter(
x, y, hue, data, order, hue_order,
estimator, ci, n_boot, units, seed,
orient, color, palette, saturation,
errcolor, errwidth, capsize, dodge
)
plotter.value_label = "count"
if ax is None:
ax = plt.gca()
plotter.plot(ax, kwargs)
return ax
countplot.__doc__ = dedent("""\
Show the counts of observations in each categorical bin using bars.
A count plot can be thought of as a histogram across a categorical, instead
of quantitative, variable. The basic API and options are identical to those
for :func:`barplot`, so you can compare counts across nested variables.
{main_api_narrative}
{categorical_narrative}
Parameters
----------
{input_params}
{categorical_data}
{order_vars}
{orient}
{color}
{palette}
{saturation}
{dodge}
{ax_in}
kwargs : key, value mappings
Other keyword arguments are passed through to
:meth:`matplotlib.axes.Axes.bar`.
Returns
-------
{ax_out}
See Also
--------
{barplot}
{catplot}
Examples
--------
Show value counts for a single categorical variable:
.. plot::
:context: close-figs
>>> import seaborn as sns
>>> sns.set_theme(style="darkgrid")
>>> titanic = sns.load_dataset("titanic")
>>> ax = sns.countplot(x="class", data=titanic)
Show value counts for two categorical variables:
.. plot::
:context: close-figs
>>> ax = sns.countplot(x="class", hue="who", data=titanic)
Plot the bars horizontally:
.. plot::
:context: close-figs
>>> ax = sns.countplot(y="class", hue="who", data=titanic)
Use a different color palette:
.. plot::
:context: close-figs
>>> ax = sns.countplot(x="who", data=titanic, palette="Set3")
Use :meth:`matplotlib.axes.Axes.bar` parameters to control the style.
.. plot::
:context: close-figs
>>> ax = sns.countplot(x="who", data=titanic,
... facecolor=(0, 0, 0, 0),
... linewidth=5,
... edgecolor=sns.color_palette("dark", 3))
Use :func:`catplot` to combine a :func:`countplot` and a
:class:`FacetGrid`. This allows grouping within additional categorical
variables. Using :func:`catplot` is safer than using :class:`FacetGrid`
directly, as it ensures synchronization of variable order across facets:
.. plot::
:context: close-figs
>>> g = sns.catplot(x="class", hue="who", col="survived",
... data=titanic, kind="count",
... height=4, aspect=.7);
""").format(**_categorical_docs)
def factorplot(*args, **kwargs):
"""Deprecated; please use `catplot` instead."""
msg = (
"The `factorplot` function has been renamed to `catplot`. The "
"original name will be removed in a future release. Please update "
"your code. Note that the default `kind` in `factorplot` (`'point'`) "
"has changed `'strip'` in `catplot`."
)
warnings.warn(msg)
if "size" in kwargs:
kwargs["height"] = kwargs.pop("size")
msg = ("The `size` parameter has been renamed to `height`; "
"please update your code.")
warnings.warn(msg, UserWarning)
kwargs.setdefault("kind", "point")
return catplot(*args, **kwargs)
@_deprecate_positional_args
def catplot(
*,
x=None, y=None,
hue=None, data=None,
row=None, col=None, # TODO move in front of data when * is enforced
col_wrap=None, estimator=np.mean, ci=95, n_boot=1000,
units=None, seed=None, order=None, hue_order=None, row_order=None,
col_order=None, kind="strip", height=5, aspect=1,
orient=None, color=None, palette=None,
legend=True, legend_out=True, sharex=True, sharey=True,
margin_titles=False, facet_kws=None,
hue_norm=None, fixed_scale=True, formatter=None,
**kwargs
):
# Handle deprecations
if "size" in kwargs:
height = kwargs.pop("size")
msg = ("The `size` parameter has been renamed to `height`; "
"please update your code.")
warnings.warn(msg, UserWarning)
# Determine the plotting function
try:
plot_func = globals()[kind + "plot"]
except KeyError:
err = "Plot kind '{}' is not recognized".format(kind)
raise ValueError(err)
# Check for attempt to plot onto specific axes and warn
if "ax" in kwargs:
msg = ("catplot is a figure-level function and does not accept "
f"target axes. You may wish to try {kind}plot")
warnings.warn(msg, UserWarning)
kwargs.pop("ax")
refactored_kinds = [
"strip", "swarm",
]
if kind in refactored_kinds:
p = _CategoricalFacetPlotter(
data=data,
variables=_CategoricalFacetPlotter.get_semantics(locals()),
order=order,
orient=orient,
require_numeric=False,
)
# XXX Copying a fair amount from displot, which is not ideal
for var in ["row", "col"]:
# Handle faceting variables that lack name information
if var in p.variables and p.variables[var] is None:
p.variables[var] = f"_{var}_"
# Adapt the plot_data dataframe for use with FacetGrid
data = p.plot_data.rename(columns=p.variables)
data = data.loc[:, ~data.columns.duplicated()]
col_name = p.variables.get("col", None)
row_name = p.variables.get("row", None)
if facet_kws is None:
facet_kws = {}
g = FacetGrid(
data=data, row=row_name, col=col_name,
col_wrap=col_wrap, row_order=row_order,
col_order=col_order, height=height,
sharex=sharex, sharey=sharey,
aspect=aspect,
**facet_kws,
)
if fixed_scale or p.var_types[p.cat_axis] == "categorical":
p.scale_categorical(p.cat_axis, order=order, formatter=formatter)
p._attach(g)
if not p.has_xy_data:
return g
palette, hue_order = p._hue_backcompat(color, palette, hue_order)
p.map_hue(palette=palette, order=hue_order, norm=hue_norm)
if kind == "strip":
# TODO get these defaults programmatically?
jitter = kwargs.pop("jitter", True)
dodge = kwargs.pop("dodge", False)
edgecolor = kwargs.pop("edgecolor", "gray") # XXX TODO default
plot_kws = kwargs.copy()
# XXX Copying possibly bad default decisions from original code for now
plot_kws.setdefault("zorder", 3)
plot_kws.setdefault("s", 25)
plot_kws.setdefault("linewidth", 0)
p.plot_strips(
jitter=jitter,
dodge=dodge,
color=color,
edgecolor=edgecolor,
plot_kws=plot_kws,
)
elif kind == "swarm":
# TODO get these defaults programmatically?
dodge = kwargs.pop("dodge", False)
edgecolor = kwargs.pop("edgecolor", "gray") # XXX TODO default
warn_thresh = kwargs.pop("warn_thresh", .05)
plot_kws = kwargs.copy()
# XXX Copying possibly bad default decisions from original code for now
plot_kws.setdefault("zorder", 3)
plot_kws.setdefault("s", 25)
if plot_kws.setdefault("linewidth", 0) is None:
plot_kws["linewidth"] = np.sqrt(plot_kws["s"]) / 10
p.plot_swarms(
dodge=dodge,
color=color,
edgecolor=edgecolor,
warn_thresh=warn_thresh,
plot_kws=plot_kws,
)
# XXX best way to do this housekeeping?
for ax in g.axes.flat:
p._adjust_cat_axis(ax, axis=p.cat_axis)
g.set_axis_labels(
p.variables.get("x", None),
p.variables.get("y", None),
)
g.set_titles()
g.tight_layout()
# XXX Hack to get the legend data in the right place
for ax in g.axes.flat:
g._update_legend_data(ax)
ax.legend_ = None
if legend and (hue is not None) and (hue not in [x, row, col]):
g.add_legend(title=hue, label_order=hue_order)
return g
# Alias the input variables to determine categorical order and palette
# correctly in the case of a count plot
if kind == "count":
if x is None and y is not None:
x_, y_, orient = y, y, "h"
elif y is None and x is not None:
x_, y_, orient = x, x, "v"
else:
raise ValueError("Either `x` or `y` must be None for kind='count'")
else:
x_, y_ = x, y
# Determine the order for the whole dataset, which will be used in all
# facets to ensure representation of all data in the final plot
plotter_class = {
"box": _BoxPlotter,
"violin": _ViolinPlotter,
"boxen": _LVPlotter,
"bar": _BarPlotter,
"point": _PointPlotter,
"count": _CountPlotter,
}[kind]
p = _CategoricalPlotter()
p.require_numeric = plotter_class.require_numeric
p.establish_variables(x_, y_, hue, data, orient, order, hue_order)
if (
order is not None
or (sharex and p.orient == "v")
or (sharey and p.orient == "h")
):
# Sync categorical axis between facets to have the same categories
order = p.group_names
elif color is None and hue is None:
msg = (
"Setting `{}=False` with `color=None` may cause different levels of the "
"`{}` variable to share colors. This will change in a future version."
)
if not sharex and p.orient == "v":
warnings.warn(msg.format("sharex", "x"), UserWarning)
if not sharey and p.orient == "h":
warnings.warn(msg.format("sharey", "y"), UserWarning)
hue_order = p.hue_names
# Determine the palette to use
# (FacetGrid will pass a value for ``color`` to the plotting function
# so we need to define ``palette`` to get default behavior for the
# categorical functions
p.establish_colors(color, palette, 1)
if (
(kind != "point" or hue is not None)
# XXX changing this to temporarily support bad sharex=False behavior where
# cat variables could take different colors, which we already warned
# about "breaking" (aka fixing) in the future
and ((sharex and p.orient == "v") or (sharey and p.orient == "h"))
):
if p.hue_names is None:
palette = dict(zip(p.group_names, p.colors))
else:
palette = dict(zip(p.hue_names, p.colors))
# Determine keyword arguments for the facets
facet_kws = {} if facet_kws is None else facet_kws
facet_kws.update(
data=data, row=row, col=col,
row_order=row_order, col_order=col_order,
col_wrap=col_wrap, height=height, aspect=aspect,
sharex=sharex, sharey=sharey,
legend_out=legend_out, margin_titles=margin_titles,
dropna=False,
)
# Determine keyword arguments for the plotting function
plot_kws = dict(
order=order, hue_order=hue_order,
orient=orient, color=color, palette=palette,
)
plot_kws.update(kwargs)
if kind in ["bar", "point"]:
plot_kws.update(
estimator=estimator, ci=ci, n_boot=n_boot, units=units, seed=seed,
)
# Initialize the facets
g = FacetGrid(**facet_kws)
# Draw the plot onto the facets
g.map_dataframe(plot_func, x=x, y=y, hue=hue, **plot_kws)
if p.orient == "h":
g.set_axis_labels(p.value_label, p.group_label)
else:
g.set_axis_labels(p.group_label, p.value_label)
# Special case axis labels for a count type plot
if kind == "count":
if x is None:
g.set_axis_labels(x_var="count")
if y is None:
g.set_axis_labels(y_var="count")
if legend and (hue is not None) and (hue not in [x, row, col]):
hue_order = list(map(utils.to_utf8, hue_order))
g.add_legend(title=hue, label_order=hue_order)
return g
catplot.__doc__ = dedent("""\
Figure-level interface for drawing categorical plots onto a FacetGrid.
This function provides access to several axes-level functions that
show the relationship between a numerical and one or more categorical
variables using one of several visual representations. The ``kind``
parameter selects the underlying axes-level function to use:
Categorical scatterplots:
- :func:`stripplot` (with ``kind="strip"``; the default)
- :func:`swarmplot` (with ``kind="swarm"``)
Categorical distribution plots:
- :func:`boxplot` (with ``kind="box"``)
- :func:`violinplot` (with ``kind="violin"``)
- :func:`boxenplot` (with ``kind="boxen"``)
Categorical estimate plots:
- :func:`pointplot` (with ``kind="point"``)
- :func:`barplot` (with ``kind="bar"``)
- :func:`countplot` (with ``kind="count"``)
Extra keyword arguments are passed to the underlying function, so you
should refer to the documentation for each to see kind-specific options.
Note that unlike when using the axes-level functions directly, data must be
passed in a long-form DataFrame with variables specified by passing strings
to ``x``, ``y``, ``hue``, etc.
As in the case with the underlying plot functions, if variables have a
``categorical`` data type, the levels of the categorical variables, and
their order will be inferred from the objects. Otherwise you may have to
use alter the dataframe sorting or use the function parameters (``orient``,
``order``, ``hue_order``, etc.) to set up the plot correctly.
{categorical_narrative}
After plotting, the :class:`FacetGrid` with the plot is returned and can
be used directly to tweak supporting plot details or add other layers.
Parameters
----------
{string_input_params}
{long_form_data}
row, col : names of variables in ``data``, optional
Categorical variables that will determine the faceting of the grid.
{col_wrap}
{stat_api_params}
{order_vars}
row_order, col_order : lists of strings, optional
Order to organize the rows and/or columns of the grid in, otherwise the
orders are inferred from the data objects.
kind : str, optional
The kind of plot to draw, corresponds to the name of a categorical
axes-level plotting function. Options are: "strip", "swarm", "box", "violin",
"boxen", "point", "bar", or "count".
{height}
{aspect}
{orient}
{color}
{palette}
legend : bool, optional
If ``True`` and there is a ``hue`` variable, draw a legend on the plot.
{legend_out}
{share_xy}
{margin_titles}
facet_kws : dict, optional
Dictionary of other keyword arguments to pass to :class:`FacetGrid`.
kwargs : key, value pairings
Other keyword arguments are passed through to the underlying plotting
function.
Returns
-------
g : :class:`FacetGrid`
Returns the :class:`FacetGrid` object with the plot on it for further
tweaking.
Examples
--------
Draw a single facet to use the :class:`FacetGrid` legend placement:
.. plot::
:context: close-figs
>>> import seaborn as sns
>>> sns.set_theme(style="ticks")
>>> exercise = sns.load_dataset("exercise")
>>> g = sns.catplot(x="time", y="pulse", hue="kind", data=exercise)
Use a different plot kind to visualize the same data:
.. plot::
:context: close-figs
>>> g = sns.catplot(x="time", y="pulse", hue="kind",
... data=exercise, kind="violin")
Facet along the columns to show a third categorical variable:
.. plot::
:context: close-figs
>>> g = sns.catplot(x="time", y="pulse", hue="kind",
... col="diet", data=exercise)
Use a different height and aspect ratio for the facets:
.. plot::
:context: close-figs
>>> g = sns.catplot(x="time", y="pulse", hue="kind",
... col="diet", data=exercise,
... height=5, aspect=.8)
Make many column facets and wrap them into the rows of the grid:
.. plot::
:context: close-figs
>>> titanic = sns.load_dataset("titanic")
>>> g = sns.catplot(x="alive", col="deck", col_wrap=4,
... data=titanic[titanic.deck.notnull()],
... kind="count", height=2.5, aspect=.8)
Plot horizontally and pass other keyword arguments to the plot function:
.. plot::
:context: close-figs
>>> g = sns.catplot(x="age", y="embark_town",
... hue="sex", row="class",
... data=titanic[titanic.embark_town.notnull()],
... orient="h", height=2, aspect=3, palette="Set3",
... kind="violin", dodge=True, cut=0, bw=.2)
Use methods on the returned :class:`FacetGrid` to tweak the presentation:
.. plot::
:context: close-figs
>>> g = sns.catplot(x="who", y="survived", col="class",
... data=titanic, saturation=.5,
... kind="bar", ci=None, aspect=.6)
>>> (g.set_axis_labels("", "Survival Rate")
... .set_xticklabels(["Men", "Women", "Children"])
... .set_titles("{{col_name}} {{col_var}}")
... .set(ylim=(0, 1))
... .despine(left=True)) #doctest: +ELLIPSIS
<seaborn.axisgrid.FacetGrid object at 0x...>
""").format(**_categorical_docs)
class Beeswarm:
"""Modifies a scatterplot artist to show a beeswarm plot."""
def __init__(self, orient="v", width=0.8, warn_thresh=.05):
# XXX should we keep the orient parameterization or specify the swarm axis?
self.orient = orient
self.width = width
self.warn_thresh = warn_thresh
def __call__(self, points, center):
"""Swarm `points`, a PathCollection, around the `center` position."""
# Convert from point size (area) to diameter
ax = points.axes
dpi = ax.figure.dpi
# Get the original positions of the points
orig_xy_data = points.get_offsets()
# Reset the categorical positions to the center line
cat_idx = 1 if self.orient == "h" else 0
orig_xy_data[:, cat_idx] = center
# Transform the data coordinates to point coordinates.
# We'll figure out the swarm positions in the latter
# and then convert back to data coordinates and replot
orig_x_data, orig_y_data = orig_xy_data.T
orig_xy = ax.transData.transform(orig_xy_data)
# Order the variables so that x is the categorical axis
if self.orient == "h":
orig_xy = orig_xy[:, [1, 0]]
# Add a column with each point's radius
sizes = points.get_sizes()
if sizes.size == 1:
sizes = np.repeat(sizes, orig_xy.shape[0])
edge = points.get_linewidth().item()
radii = (np.sqrt(sizes) + edge) / 2 * (dpi / 72)
orig_xy = np.c_[orig_xy, radii]
# Sort along the value axis to facilitate the beeswarm
sorter = np.argsort(orig_xy[:, 1])
orig_xyr = orig_xy[sorter]
# Adjust points along the categorical axis to prevent overlaps
new_xyr = np.empty_like(orig_xyr)
new_xyr[sorter] = self.beeswarm(orig_xyr)
# Transform the point coordinates back to data coordinates
if self.orient == "h":
new_xy = new_xyr[:, [1, 0]]
else:
new_xy = new_xyr[:, :2]
new_x_data, new_y_data = ax.transData.inverted().transform(new_xy).T
swarm_axis = {"h": "y", "v": "x"}[self.orient]
log_scale = getattr(ax, f"get_{swarm_axis}scale")() == "log"
# Add gutters
if self.orient == "h":
self.add_gutters(new_y_data, center, log_scale=log_scale)
else:
self.add_gutters(new_x_data, center, log_scale=log_scale)
# Reposition the points so they do not overlap
if self.orient == "h":
points.set_offsets(np.c_[orig_x_data, new_y_data])
else:
points.set_offsets(np.c_[new_x_data, orig_y_data])
def beeswarm(self, orig_xyr):
"""Adjust x position of points to avoid overlaps."""
# In this method, `x` is always the categorical axis
# Center of the swarm, in point coordinates
midline = orig_xyr[0, 0]
# Start the swarm with the first point
swarm = np.atleast_2d(orig_xyr[0])
# Loop over the remaining points
for xyr_i in orig_xyr[1:]:
# Find the points in the swarm that could possibly
# overlap with the point we are currently placing
neighbors = self.could_overlap(xyr_i, swarm)
# Find positions that would be valid individually
# with respect to each of the swarm neighbors
candidates = self.position_candidates(xyr_i, neighbors)
# Sort candidates by their centrality
offsets = np.abs(candidates[:, 0] - midline)
candidates = candidates[np.argsort(offsets)]
# Find the first candidate that does not overlap any neighbors
new_xyr_i = self.first_non_overlapping_candidate(candidates, neighbors)
# Place it into the swarm
swarm = np.vstack([swarm, new_xyr_i])
return swarm
def could_overlap(self, xyr_i, swarm):
"""Return a list of all swarm points that could overlap with target."""
# Because we work backwards through the swarm and can short-circuit,
# the for-loop is faster than vectorization
_, y_i, r_i = xyr_i
neighbors = []
for xyr_j in reversed(swarm):
_, y_j, r_j = xyr_j
if (y_i - y_j) < (r_i + r_j):
neighbors.append(xyr_j)
else:
break
return np.array(neighbors)[::-1]
def position_candidates(self, xyr_i, neighbors):
"""Return a list of coordinates that might be valid by adjusting x."""
candidates = [xyr_i]
x_i, y_i, r_i = xyr_i
left_first = True
for x_j, y_j, r_j in neighbors:
dy = y_i - y_j
dx = np.sqrt(max((r_i + r_j) ** 2 - dy ** 2, 0)) * 1.05
cl, cr = (x_j - dx, y_i, r_i), (x_j + dx, y_i, r_i)
if left_first:
new_candidates = [cl, cr]
else:
new_candidates = [cr, cl]
candidates.extend(new_candidates)
left_first = not left_first
return np.array(candidates)
def first_non_overlapping_candidate(self, candidates, neighbors):
"""Find the first candidate that does not overlap with the swarm."""
# If we have no neighbors, all candidates are good.
if len(neighbors) == 0:
return candidates[0]
neighbors_x = neighbors[:, 0]
neighbors_y = neighbors[:, 1]
neighbors_r = neighbors[:, 2]
for xyr_i in candidates:
x_i, y_i, r_i = xyr_i
dx = neighbors_x - x_i
dy = neighbors_y - y_i
sq_distances = np.square(dx) + np.square(dy)
sep_needed = np.square(neighbors_r + r_i)
# Good candidate does not overlap any of neighbors which means that
# squared distance between candidate and any of the neighbors has
# to be at least square of the summed radii
good_candidate = np.all(sq_distances >= sep_needed)
if good_candidate:
return xyr_i
raise RuntimeError(
"No non-overlapping candidates found. This should not happen."
)
def add_gutters(self, points, center, log_scale=False):
"""Stop points from extending beyond their territory."""
half_width = self.width / 2
if log_scale:
low_gutter = 10 ** (np.log10(center) - half_width)
else:
low_gutter = center - half_width
off_low = points < low_gutter
if off_low.any():
points[off_low] = low_gutter
if log_scale:
high_gutter = 10 ** (np.log10(center) + half_width)
else:
high_gutter = center + half_width
off_high = points > high_gutter
if off_high.any():
points[off_high] = high_gutter
gutter_prop = (off_high + off_low).sum() / len(points)
if gutter_prop > self.warn_thresh:
msg = (
"{:.1%} of the points cannot be placed; you may want "
"to decrease the size of the markers or use stripplot."
).format(gutter_prop)
warnings.warn(msg, UserWarning)
return points
|
import subprocess
import sys
def check_language(language):
#checks if the language input is valid. If invalid,program quits
accepted=["c", "c++", "java", "pasc", "m2", "lisp", "mira", "8086"]
if language not in accepted:
print("language not accepted")
quit()
################
#Take user input
################
lang=input("Enter language")
check_language(lang)
files=[]
print("Enter name of file")
files.append(input())
##########################################
#invoke exe file corresponding to language
##########################################
sim_string="C:\\sim_"+lang+".exe"
temp=subprocess.check_output([sim_string,"-pe",files[0]])
###############################################
#capture output and extract required percentage
###############################################
string=temp.decode("utf-8").split()
#print(string)
sim_ptr=len(string)-1
sim_results=''
while string[sim_ptr]!='tokens':
if string[sim_ptr]!='material':
sim_results=string[sim_ptr]+" "+sim_results
else:
sim_results='\n'+sim_results
sim_ptr=sim_ptr-1
print(sim_results)
|
try:
from PyQt5.QtGui import *
from PyQt5.QtCore import *
from PyQt5.QtWidgets import *
except ImportError:
from PyQt4.QtGui import *
from PyQt4.QtCore import *
from libs.lib import newIcon, labelValidator
BB = QDialogButtonBox
class LabelDialog(QDialog):
def __init__(self, text="Enter object label", parent=None, listItem=None):
super(LabelDialog, self).__init__(parent)
self.edit = QLineEdit()
self.edit.setText(text)
self.edit.setValidator(labelValidator())
self.edit.editingFinished.connect(self.postProcess)
self.listItem = listItem
model = QStringListModel()
model.setStringList(listItem)
completer = QCompleter()
completer.setModel(model)
self.edit.setCompleter(completer)
layout = QVBoxLayout()
layout.addWidget(self.edit)
self.buttonBox = bb = BB(BB.Ok | BB.Cancel, Qt.Horizontal, self)
bb.button(BB.Ok).setIcon(newIcon('done'))
bb.button(BB.Cancel).setIcon(newIcon('undo'))
bb.accepted.connect(self.validate)
bb.rejected.connect(self.reject)
layout.addWidget(bb)
if listItem is not None and len(listItem) > 0:
self.listWidget = QListWidget(self)
for item in listItem:
self.listWidget.addItem(item)
self.listWidget.itemDoubleClicked.connect(self.listItemClick)
layout.addWidget(self.listWidget)
self.setLayout(layout)
def select(self, index):
self.listWidget.setCurrentRow(index)
# print(self.listItem[index],index)
# self.edit.setText(self.listItem[index])
def validate(self):
try:
if self.edit.text().trimmed():
self.accept()
except AttributeError:
# PyQt5: AttributeError: 'str' object has no attribute 'trimmed'
if self.edit.text().strip():
self.accept()
def postProcess(self):
try:
self.edit.setText(self.edit.text().trimmed())
except AttributeError:
# PyQt5: AttributeError: 'str' object has no attribute 'trimmed'
self.edit.setText(self.edit.text())
def popUp(self, text='', move=True):
self.edit.setText(text)
self.edit.setSelection(0, len(text))
self.edit.setFocus(Qt.PopupFocusReason)
if move:
self.move(QCursor.pos())
return self.edit.text() if self.exec_() else None
def listItemClick(self, tQListWidgetItem):
try:
text = tQListWidgetItem.text().trimmed()
except AttributeError:
# PyQt5: AttributeError: 'str' object has no attribute 'trimmed'
text = tQListWidgetItem.text().strip()
self.edit.setText(text)
self.validate()
|
class PartItemController():
def __init__(self, part_item, model_part):
self._part_item = part_item
self._model_part = model_part
# end def
connections = [
('partZDimensionsChangedSignal', 'partZDimensionsChangedSlot'), # noqa
('partParentChangedSignal', 'partParentChangedSlot'), # noqa
('partRemovedSignal', 'partRemovedSlot'), # noqa
('partPropertyChangedSignal', 'partPropertyChangedSlot'), # noqa
('partSelectedChangedSignal', 'partSelectedChangedSlot'), # noqa
('partDocumentSettingChangedSignal', 'partDocumentSettingChangedSlot'), # noqa
]
def connectSignals(self):
m_p = self._model_part
p_i = self._part_item
for signal, slot in self.connections:
getattr(m_p, signal).connect(getattr(p_i, slot))
# end def
def disconnectSignals(self):
m_p = self._model_part
p_i = self._part_item
for signal, slot in self.connections:
getattr(m_p, signal).disconnect(getattr(p_i, slot))
# end def
|
class Solution:
def romanToInt(self, s: str) -> int:
values = {"I": 1, "V": 5, "X": 10, "L": 50, "C": 100, "D": 500, "M": 1000}
ans = 0
for i, c in enumerate(s):
ans += values[c]
if i and (values[s[i]] > values[s[i - 1]]):
ans -= 2 * values[s[i - 1]]
return ans
|
import utils, torch, time, os, pickle
import numpy as np
import torch.nn as nn
import torch.optim as optim
from dataloader import dataloader
import copy
class generator(nn.Module):
# Network Architecture is exactly same as in infoGAN (https://arxiv.org/abs/1606.03657)
# Architecture : FC1024_BR-FC7x7x128_BR-(64)4dc2s_BR-(1)4dc2s_S
def __init__(self, input_dim=100, output_dim=1, input_size=32, class_num=10):
super(generator, self).__init__()
self.input_dim = input_dim
self.output_dim = output_dim
self.input_size = input_size
self.class_num = class_num
self.fc = nn.Sequential(
nn.Linear(self.input_dim + self.class_num, 1024),
nn.BatchNorm1d(1024),
nn.ReLU(),
nn.Linear(1024, 128 * (self.input_size // 4) * (self.input_size // 4)),
nn.BatchNorm1d(128 * (self.input_size // 4) * (self.input_size // 4)),
nn.ReLU(),
)
self.deconv = nn.Sequential(
nn.ConvTranspose2d(128, 64, 4, 2, 1),
nn.BatchNorm2d(64),
nn.ReLU(),
nn.ConvTranspose2d(64, self.output_dim, 4, 2, 1),
nn.Tanh(),
)
utils.initialize_weights(self)
def forward(self, input, label):
x = torch.cat([input, label], 1)
x = self.fc(x)
x = x.view(-1, 128, (self.input_size // 4), (self.input_size // 4))
x = self.deconv(x)
return x
class generator_noise(nn.Module):
# Network Architecture is exactly same as in infoGAN (https://arxiv.org/abs/1606.03657)
# Architecture : FC1024_BR-FC7x7x128_BR-(64)4dc2s_BR-(1)4dc2s_S
def __init__(self, input_dim=100, output_dim=1, input_size=32, class_num=10):
super(generator_noise, self).__init__()
self.input_dim = input_dim
self.output_dim = output_dim
self.input_size = input_size
self.class_num = class_num
self.fc = nn.Sequential(
nn.Linear(self.input_dim + self.class_num, 256),
nn.BatchNorm1d(256),
nn.ReLU(),
nn.Linear(256, 16 * (self.input_size // 4) * (self.input_size // 4)),
nn.BatchNorm1d(16 * (self.input_size // 4) * (self.input_size // 4)),
nn.ReLU(),
)
self.deconv = nn.Sequential(
nn.ConvTranspose2d(16, 8, 4, 2, 1),
nn.BatchNorm2d(8),
nn.ReLU(),
nn.ConvTranspose2d(8, self.output_dim, 4, 2, 1),
nn.Tanh(),
)
utils.initialize_weights(self)
def forward(self, input, label):
x = torch.cat([input, label], 1)
x = self.fc(x)
x = x.view(-1, 16, (self.input_size // 4), (self.input_size // 4))
x = self.deconv(x)
return x
class discriminator(nn.Module):
# Network Architecture is exactly same as in infoGAN (https://arxiv.org/abs/1606.03657)
# Architecture : (64)4c2s-(128)4c2s_BL-FC1024_BL-FC1_S
def __init__(self, input_dim=1, output_dim=1, input_size=32, class_num=10):
super(discriminator, self).__init__()
self.input_dim = input_dim
self.output_dim = output_dim
self.input_size = input_size
self.class_num = class_num
self.conv = nn.Sequential(
nn.Conv2d(self.input_dim + self.class_num, 64, 4, 2, 1),
nn.LeakyReLU(0.2),
nn.Conv2d(64, 128, 4, 2, 1),
nn.BatchNorm2d(128),
nn.LeakyReLU(0.2),
)
self.fc = nn.Sequential(
nn.Linear(128 * (self.input_size // 4) * (self.input_size // 4), 1024),
nn.BatchNorm1d(1024),
nn.LeakyReLU(0.2),
nn.Linear(1024, self.output_dim),
nn.Sigmoid(),
)
utils.initialize_weights(self)
def forward(self, input, label):
x = torch.cat([input, label], 1)
x = self.conv(x)
x = x.view(-1, 128 * (self.input_size // 4) * (self.input_size // 4))
x = self.fc(x)
return x
def update_average(model_tgt, model_src, beta=0.999):
with torch.no_grad():
param_dict_src = dict(model_src.named_parameters())
for p_name, p_tgt in model_tgt.named_parameters():
p_src = param_dict_src[p_name]
assert(p_src is not p_tgt)
p_tgt.copy_(beta*p_tgt + (1. - beta)*p_src)
class CGAN_DFGAN(object):
def __init__(self, args):
# parameters
self.epoch = args.epoch
self.batch_size = args.batch_size
self.save_dir = args.save_dir
self.result_dir = args.result_dir
self.dataset = args.dataset
self.log_dir = args.log_dir
self.gpu_mode = args.gpu_mode
self.model_name = args.gan_type
self.input_size = args.input_size
self.z_dim = 62
self.class_num = 10
self.sample_num = self.class_num ** 2
# load dataset
self.data_loader = dataloader(self.dataset, self.input_size, self.batch_size)
data = self.data_loader.__iter__().__next__()[0]
# networks init
self.G = generator(input_dim=self.z_dim, output_dim=data.shape[1], input_size=self.input_size, class_num=self.class_num)
self.G_noise = generator_noise(input_dim=self.z_dim, output_dim=data.shape[1], input_size=self.input_size, class_num=self.class_num)
self.D = discriminator(input_dim=data.shape[1], output_dim=1, input_size=self.input_size, class_num=self.class_num)
self.G_optimizer = optim.Adam(self.G.parameters(), lr=args.lrG, betas=(args.beta1, args.beta2))
self.D_optimizer = optim.Adam(self.D.parameters(), lr=args.lrD, betas=(args.beta1, args.beta2))
self.G_n_optimizer = optim.Adam(self.G_noise.parameters(), lr=args.lrD, betas=(args.beta1, args.beta2))
if self.gpu_mode:
self.G.cuda()
self.G_noise.cuda()
self.D.cuda()
self.BCE_loss = nn.BCELoss().cuda()
self.MSE_loss = nn.MSELoss().cuda()
#self.BCE_sigmoid_Loss = nn.BCEWithLogitsLoss().cuda()
else:
self.BCE_loss = nn.BCELoss()
print('---------- Networks architecture -------------')
utils.print_network(self.G)
utils.print_network(self.D)
print('-----------------------------------------------')
# fixed noise & condition
self.sample_z_ = torch.zeros((self.sample_num, self.z_dim))
for i in range(self.class_num):
self.sample_z_[i*self.class_num] = torch.rand(1, self.z_dim)
for j in range(1, self.class_num):
self.sample_z_[i*self.class_num + j] = self.sample_z_[i*self.class_num]
temp = torch.zeros((self.class_num, 1))
for i in range(self.class_num):
temp[i, 0] = i
temp_y = torch.zeros((self.sample_num, 1))
for i in range(self.class_num):
temp_y[i*self.class_num: (i+1)*self.class_num] = temp
self.sample_y_ = torch.zeros((self.sample_num, self.class_num)).scatter_(1, temp_y.type(torch.LongTensor), 1)
if self.gpu_mode:
self.sample_z_, self.sample_y_ = self.sample_z_.cuda(), self.sample_y_.cuda()
def make_fake(self, img, noise):
f_img = img+noise
return f_img
def train(self):
self.train_hist = {}
self.train_hist['D_loss'] = []
self.train_hist['G_loss'] = []
self.train_hist['per_epoch_time'] = []
self.train_hist['total_time'] = []
self.y_real_, self.y_fake_ = torch.ones(self.batch_size, 1), torch.zeros(self.batch_size, 1)
if self.gpu_mode:
self.y_real_, self.y_fake_ = self.y_real_.cuda(), self.y_fake_.cuda()
self.D.train()
print('training start!!')
start_time = time.time()
for epoch in range(self.epoch):
self.G.train()
epoch_start_time = time.time()
for iter, (x_, y_) in enumerate(self.data_loader):
if iter == self.data_loader.dataset.__len__() // self.batch_size:
break
# make Noise
z_ = torch.rand((self.batch_size, self.z_dim))
y_vec_ = torch.zeros((self.batch_size, self.class_num)).scatter_(1, y_.type(torch.LongTensor).unsqueeze(1), 1)
y_fill_ = y_vec_.unsqueeze(2).unsqueeze(3).expand(self.batch_size, self.class_num, self.input_size, self.input_size)
if self.gpu_mode:
x_, z_, y_vec_, y_fill_ = x_.cuda(), z_.cuda(), y_vec_.cuda(), y_fill_.cuda()
# update D network
self.D_optimizer.zero_grad()
# Generate Fake and Noise
G_ = self.G(z_, y_vec_)
noise_G_ = self.G_noise(z_, y_vec_)
# batch is [ real+noise, real]
disc_input_real = torch.cat([self.make_fake(x_, noise_G_), x_], 0)
disc_input_real_y = torch.cat([y_fill_]*2, 0)
preds_disc_real = self.D(disc_input_real, disc_input_real_y)
D_real_n, D_real = preds_disc_real.chunk(2, 0)
# batch is [ fake+noise, fake]
disc_input_fake = torch.cat([self.make_fake(G_, noise_G_), G_], 0)
disc_input_fake_y = torch.cat([y_fill_]*2, 0)
preds_disc_fake = self.D(disc_input_fake, disc_input_fake_y)
D_fake_n, D_fake = preds_disc_fake.chunk(2, 0)
D_real_loss = (self.BCE_loss(D_real, self.y_real_) + self.BCE_loss(D_real_n, self.y_real_)) / 2.0
D_fake_loss = (self.BCE_loss(D_fake, self.y_fake_) + self.BCE_loss(D_fake_n, self.y_fake_)) / 2.0
D_loss = D_real_loss + D_fake_loss
# Compute n_loss
loss_real = -0.5 * self.BCE_loss(D_real_n, torch.ones_like(D_real_n).cuda())
loss_fake = -0.5 * self.BCE_loss(D_fake_n, torch.zeros_like(D_fake_n).cuda())
loss_eps = self.MSE_loss(z_, torch.zeros_like(z_).cuda())
loss_n = loss_real + loss_fake + loss_eps
self.train_hist['D_loss'].append(D_loss.item())
D_loss.backward(retain_graph=True)
loss_n.backward()
self.D_optimizer.step()
self.G_n_optimizer.step()
# update G network
self.gen_test = copy.deepcopy(self.G)
self.G_optimizer.zero_grad()
G_ = self.G(z_, y_vec_)
G_noise = self.G_noise(z_, y_vec_)
D_input_fake = torch.cat([self.make_fake(G_, G_noise), G_], 0)
D_input_fake_y = torch.cat([y_fill_]*2, 0)
preds_d_fake = self.D(D_input_fake, D_input_fake_y)
D_fake_n, D_fake = preds_d_fake.chunk(2, 0)
G_loss = (self.BCE_loss(D_fake, torch.ones_like(D_fake)) + self.BCE_loss(D_fake_n, torch.ones_like(D_fake_n))) / 2.0
self.train_hist['G_loss'].append(G_loss.item())
G_loss.backward()
update_average(self.gen_test, self.G)
self.G_optimizer.step()
if ((iter + 1) % 50) == 0:
print("Epoch: [%2d] [%4d/%4d] D_loss: %.8f, G_loss: %.8f, n_loss: %.8f" %
((epoch + 1), (iter + 1), self.data_loader.dataset.__len__() // self.batch_size, D_loss.item(), G_loss.item(), loss_n.item()))
self.train_hist['per_epoch_time'].append(time.time() - epoch_start_time)
with torch.no_grad():
self.visualize_results((epoch+1))
self.train_hist['total_time'].append(time.time() - start_time)
print("Avg one epoch time: %.2f, total %d epochs time: %.2f" % (np.mean(self.train_hist['per_epoch_time']),
self.epoch, self.train_hist['total_time'][0]))
print("Training finish!... save training results")
self.save()
utils.generate_animation(self.result_dir + '/' + self.dataset + '/' + self.model_name + '/' + self.model_name,
self.epoch)
utils.loss_plot(self.train_hist, os.path.join(self.save_dir, self.dataset, self.model_name), self.model_name)
def visualize_results(self, epoch, fix=True):
self.G.eval()
if not os.path.exists(self.result_dir + '/' + self.dataset + '/' + self.model_name):
os.makedirs(self.result_dir + '/' + self.dataset + '/' + self.model_name)
image_frame_dim = int(np.floor(np.sqrt(self.sample_num)))
if fix:
""" fixed noise """
samples = self.G(self.sample_z_, self.sample_y_)
else:
""" random noise """
sample_y_ = torch.zeros(self.batch_size, self.class_num).scatter_(1, torch.randint(0, self.class_num - 1, (self.batch_size, 1)).type(torch.LongTensor), 1)
sample_z_ = torch.rand((self.batch_size, self.z_dim))
if self.gpu_mode:
sample_z_, sample_y_ = sample_z_.cuda(), sample_y_.cuda()
samples = self.G(sample_z_, sample_y_)
if self.gpu_mode:
samples = samples.cpu().data.numpy().transpose(0, 2, 3, 1)
else:
samples = samples.data.numpy().transpose(0, 2, 3, 1)
samples = (samples + 1) / 2
utils.save_images(samples[:image_frame_dim * image_frame_dim, :, :, :], [image_frame_dim, image_frame_dim],
self.result_dir + '/' + self.dataset + '/' + self.model_name + '/' + self.model_name + '_epoch%03d' % epoch + '.png')
def save(self):
save_dir = os.path.join(self.save_dir, self.dataset, self.model_name)
if not os.path.exists(save_dir):
os.makedirs(save_dir)
torch.save(self.G.state_dict(), os.path.join(save_dir, self.model_name + '_G.pkl'))
torch.save(self.D.state_dict(), os.path.join(save_dir, self.model_name + '_D.pkl'))
with open(os.path.join(save_dir, self.model_name + '_history.pkl'), 'wb') as f:
pickle.dump(self.train_hist, f)
def load(self):
save_dir = os.path.join(self.save_dir, self.dataset, self.model_name)
self.G.load_state_dict(torch.load(os.path.join(save_dir, self.model_name + '_G.pkl')))
self.D.load_state_dict(torch.load(os.path.join(save_dir, self.model_name + '_D.pkl')))
|
from django.contrib import admin
# Register your models here.
from .models import bitly
admin.site.register(bitly)
|
#!/usr/bin/env python
# -*- coding: utf8 -*-
# @Date : 2020/6/29
# @Author : mingming.xu
# @Email : xv44586@gmail.com
from setuptools import setup, find_packages
setup(
name='toolkit4nlp',
version='0.6.0',
description='an toolkit for nlp research',
long_description='toolkit4nlp: https://github.com/xv44586/toolkit4nlp',
license='Apache License 2.0',
url='https://github.com/xv44586/toolkit4nlp',
author='xv44586',
author_email='xv44586@gmail.com',
install_requires=['keras<=2.3.1'],
packages=find_packages()
)
|
from django.contrib import admin
from ticker_app import models
class ExchangeTickerAdmin(admin.ModelAdmin):
list_display = [field.name for field in models.ExchangeTicker._meta.fields]
class Meta:
model = models.ExchangeTicker
admin.site.register(models.ExchangeTicker, ExchangeTickerAdmin)
|
from os.path import join
import logging
########################################################################################################################
# Connection/Auth
########################################################################################################################
# API URL.
BASE_URL = "https://testnet.bitmex.com/api/v1/"
# BASE_URL = "https://www.bitmex.com/api/v1/" # Once you're ready, uncomment this.
# The BitMEX API requires permanent API keys. Go to https://testnet.bitmex.com/api/apiKeys to fill these out.
API_KEY = "PgmeYwvj0Yxg0DLBuxKeIAmK"
API_SECRET = "2lb2WZHiydCDXoOrW4I3dd4vk9SZr6G5lJMdhKc6Z2HbiWwx"
########################################################################################################################
# Target
########################################################################################################################
# Instrument to market make on BitMEX.
SYMBOL = "XBTU18"
########################################################################################################################
# Order Size & Spread
########################################################################################################################
# How many pairs of buy/sell orders to keep open
ORDER_PAIRS = 6
# ORDER_START_SIZE will be the number of contracts submitted on level 1
# Number of contracts from level 1 to ORDER_PAIRS - 1 will follow the function
# [ORDER_START_SIZE + ORDER_STEP_SIZE (Level -1)]
ORDER_START_SIZE = 100
ORDER_STEP_SIZE = 100
# Distance between successive orders, as a percentage (example: 0.005 for 0.5%)
INTERVAL = 0.005
# Minimum spread to maintain, in percent, between asks & bids
MIN_SPREAD = 0.01
# If True, market-maker will place orders just inside the existing spread and work the interval % outwards,
# rather than starting in the middle and killing potentially profitable spreads.
MAINTAIN_SPREADS = True
# This number defines far much the price of an existing order can be from a desired order before it is amended.
# This is useful for avoiding unnecessary calls and maintaining your ratelimits.
#
# Further information:
# Each order is designed to be (INTERVAL*n)% away from the spread.
# If the spread changes and the order has moved outside its bound defined as
# abs((desired_order['price'] / order['price']) - 1) > settings.RELIST_INTERVAL)
# it will be resubmitted.
#
# 0.01 == 1%
RELIST_INTERVAL = 0.01
########################################################################################################################
# Trading Behavior
########################################################################################################################
# Position limits - set to True to activate. Values are in contracts.
# If you exceed a position limit, the bot will log and stop quoting that side.
CHECK_POSITION_LIMITS = False
MIN_POSITION = -10000
MAX_POSITION = 10000
# If True, will only send orders that rest in the book (ExecInst: ParticipateDoNotInitiate).
# Use to guarantee a maker rebate.
# However -- orders that would have matched immediately will instead cancel, and you may end up with
# unexpected delta. Be careful.
POST_ONLY = False
########################################################################################################################
# Misc Behavior, Technicals
########################################################################################################################
# If true, don't set up any orders, just say what we would do
# DRY_RUN = True
DRY_RUN = False
# How often to re-check and replace orders.
# Generally, it's safe to make this short because we're fetching from websockets. But if too many
# order amend/replaces are done, you may hit a ratelimit. If so, email BitMEX if you feel you need a higher limit.
LOOP_INTERVAL = 5
# Wait times between orders / errors
API_REST_INTERVAL = 1
API_ERROR_INTERVAL = 10
TIMEOUT = 7
# If we're doing a dry run, use these numbers for BTC balances
DRY_BTC = 50
# Available levels: logging.(DEBUG|INFO|WARN|ERROR)
LOG_LEVEL = logging.INFO
# To uniquely identify orders placed by this bot, the bot sends a ClOrdID (Client order ID) that is attached
# to each order so its source can be identified. This keeps the market maker from cancelling orders that are
# manually placed, or orders placed by another bot.
#
# If you are running multiple bots on the same symbol, give them unique ORDERID_PREFIXes - otherwise they will
# cancel each others' orders.
# Max length is 13 characters.
ORDERID_PREFIX = "mm_bitmex_"
# If any of these files (and this file) changes, reload the bot.
WATCHED_FILES = [join('market_maker', 'market_maker.py'), join('market_maker', 'bitmex.py'), 'settings.py']
########################################################################################################################
# BitMEX Portfolio
########################################################################################################################
# Specify the contracts that you hold. These will be used in portfolio calculations.
CONTRACTS = ['XBTUSD']
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utilities for API compatibility between TensorFlow release versions.
See [Version
Compatibility](https://tensorflow.org/guide/version_compat#backward_forward)
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import datetime
from tensorflow.python.util import tf_contextlib
from tensorflow.python.util.tf_export import tf_export
_FORWARD_COMPATIBILITY_HORIZON = datetime.date(2019, 1, 28)
@tf_export("compat.forward_compatible")
def forward_compatible(year, month, day):
"""Return true if the forward compatibility window has expired.
See [Version
compatibility](https://tensorflow.org/guide/version_compat#backward_forward).
Forward-compatibility refers to scenarios where the producer of a TensorFlow
model (a GraphDef or SavedModel) is compiled against a version of the
TensorFlow library newer than what the consumer was compiled against. The
"producer" is typically a Python program that constructs and trains a model
while the "consumer" is typically another program that loads and serves the
model.
TensorFlow has been supporting a 3 week forward-compatibility window for
programs compiled from source at HEAD.
For example, consider the case where a new operation `MyNewAwesomeAdd` is
created with the intent of replacing the implementation of an existing Python
wrapper - `tf.add`. The Python wrapper implementation should change from
something like:
```python
def add(inputs, name=None):
return gen_math_ops.add(inputs, name)
```
to:
```python
from tensorflow.python.compat import compat
def add(inputs, name=None):
if compat.forward_compatible(year, month, day):
# Can use the awesome new implementation.
return gen_math_ops.my_new_awesome_add(inputs, name)
# To maintain forward compatibiltiy, use the old implementation.
return gen_math_ops.add(inputs, name)
```
Where `year`, `month`, and `day` specify the date beyond which binaries
that consume a model are expected to have been updated to include the
new operations. This date is typically at least 3 weeks beyond the date
the code that adds the new operation is committed.
Args:
year: A year (e.g., 2018).
month: A month (1 <= month <= 12) in year.
day: A day (1 <= day <= 31, or 30, or 29, or 28) in month.
Returns:
True if the caller can expect that serialized TensorFlow graphs produced
can be consumed by programs that are compiled with the TensorFlow library
source code after (year, month, day).
"""
return _FORWARD_COMPATIBILITY_HORIZON > datetime.date(year, month, day)
@tf_export("compat.forward_compatibility_horizon")
@tf_contextlib.contextmanager
def forward_compatibility_horizon(year, month, day):
"""Context manager for testing forward compatibility of generated graphs.
See [Version
compatibility](https://tensorflow.org/guide/version_compat#backward_forward).
To ensure forward compatibility of generated graphs (see `forward_compatible`)
with older binaries, new features can be gated with:
```python
if compat.forward_compatible(year=2018, month=08, date=01):
generate_graph_with_new_features()
else:
generate_graph_so_older_binaries_can_consume_it()
```
However, when adding new features, one may want to unittest it before
the forward compatibility window expires. This context manager enables
such tests. For example:
```python
from tensorflow.python.compat import compat
def testMyNewFeature(self):
with compat.forward_compatibility_horizon(2018, 08, 02):
# Test that generate_graph_with_new_features() has an effect
```
Args :
year: A year (e.g. 2018).
month: A month (1 <= month <= 12) in year.
day: A day (1 <= day <= 31, or 30, or 29, or 28) in month.
Yields:
Nothing.
"""
global _FORWARD_COMPATIBILITY_HORIZON
try:
old_compat_date = _FORWARD_COMPATIBILITY_HORIZON
_FORWARD_COMPATIBILITY_HORIZON = datetime.date(year, month, day)
yield
finally:
_FORWARD_COMPATIBILITY_HORIZON = old_compat_date
|
"""
Desenvolva um script que calcule a soma dos N primeiros números naturais. Utilize a recursividade para obter o resultado.
"""
def soma(n):
if n == 1:
return 1
else:
return n + soma(n - 1)
print(soma(3))
|
import numbers
import torch
import torch.nn as nn
class DCGAN_D(nn.Module):
def __init__(self, isize, nz, nc, ndf, n_extra_layers=0, use_batch_norm=True):
super(DCGAN_D, self).__init__()
if isinstance(isize, numbers.Number):
isize = (int(isize), int(isize))
assert len(isize) == 2, "Size has to be a tuple of length 2 or a single integer"
assert isize[0] % 2 == 0 and isize[1] % 2 == 0, "Bad image size: has to be divisible by 2 enough"
csize = (isize[0] // 2, isize[1] // 2)
cndf = ndf
main = nn.Sequential()
# input is nc x isize x isize
main.add_module('initial.{0}-{1}.conv'.format(nc, ndf),
nn.Conv2d(nc, ndf, 4, 2, 1, bias=False))
main.add_module('initial.{0}.LeakyReLU'.format(ndf),
nn.LeakyReLU(0.2, inplace=True))
# Extra layers
for t in range(n_extra_layers):
main.add_module('extra-layers-{0}.{1}.conv'.format(t, cndf),
nn.Conv2d(cndf, cndf, 3, 1, 1, bias=False))
if use_batch_norm:
main.add_module('extra-layers-{0}.{1}.batchnorm'.format(t, cndf),
nn.BatchNorm2d(cndf))
main.add_module('extra-layers-{0}.{1}.LeakyReLU'.format(t, cndf),
nn.LeakyReLU(0.2, inplace=True))
# Tower
while csize[0] > 4 and csize[1] > 4:
in_feat = cndf
out_feat = cndf * 2
main.add_module('pyramid.{0}-{1}.conv'.format(in_feat, out_feat),
nn.Conv2d(in_feat, out_feat, 4, 2, 1, bias=False))
if use_batch_norm:
main.add_module('pyramid.{0}.batchnorm'.format(out_feat),
nn.BatchNorm2d(out_feat))
main.add_module('pyramid.{0}.LeakyReLU'.format(out_feat),
nn.LeakyReLU(0.2, inplace=True))
assert isize[0] % 2 == 0 and isize[1] % 2 == 0, "Bad image size: has to be divisible by 2 enough"
csize = (csize[0] // 2, csize[1] // 2)
cndf = cndf * 2
# Final layers
main.add_module('final.{0}-{1}.conv'.format(cndf, 1),
nn.Conv2d(cndf, 1, csize, 1, 0, bias=False))
self.main = main
def forward(self, input):
output = self.main(input)
return output.view(-1)
class DCGAN_G(nn.Module):
def __init__(self, isize, nz, nc, ngf, n_extra_layers=0, red_portion=None):
super(DCGAN_G, self).__init__()
if isinstance(isize, numbers.Number):
isize = (int(isize), int(isize))
assert len(isize) == 2, "Size has to be a tuple of length 2 or a single integer"
tisize = (isize[0], isize[1])
cngf = ngf // 2
while tisize[0] > 4 and tisize[1] > 4:
assert tisize[0] % 2 == 0 and tisize[1] % 2 == 0, "Bad image size: has to be divisible by 2 enough"
tisize = (tisize[0] // 2, tisize[1] // 2)
cngf = cngf * 2
is_separable = red_portion is not None
if is_separable:
assert isinstance(red_portion, numbers.Number) and 0.0 <= red_portion <= 1.0
convt_name = 'convt-sep'
convt = lambda n_ch_in, n_ch_out, size, stride, pad, bias: CnvTranspose2d_separable(n_ch_in, n_ch_out, size, stride, pad, bias=bias, red_portion=red_portion)
conv_name = 'conv-sep'
conv = lambda n_ch_in, n_ch_out, size, stride, pad, bias: Cnv2d_separable(n_ch_in, n_ch_out, size, stride, pad, bias=bias)
else:
convt_name = 'convt'
convt = lambda n_ch_in, n_ch_out, size, stride, pad, bias: nn.ConvTranspose2d(n_ch_in, n_ch_out, size, stride, pad, bias=bias)
conv_name = 'conv'
conv = lambda n_ch_in, n_ch_out, size, stride, pad, bias: nn.Conv2d(n_ch_in, n_ch_out, size, stride, pad, bias=bias)
main = nn.Sequential()
# input is Z, going into a convolution
main.add_module('initial.{0}-{1}.{2}'.format(nz, cngf, convt_name),
convt(nz, cngf, tisize, 1, 0, bias=False))
main.add_module('initial.{0}.batchnorm'.format(cngf),
nn.BatchNorm2d(cngf))
main.add_module('initial.{0}.ReLU'.format(cngf),
nn.ReLU(True))
# Tower
csize = tisize[0]
while csize < isize[0] // 2:
main.add_module('pyramid.{0}-{1}.{2}'.format(cngf, cngf // 2, convt_name),
convt(cngf, cngf // 2, 4, 2, 1, bias=False))
main.add_module('pyramid.{0}.batchnorm'.format(cngf // 2),
nn.BatchNorm2d(cngf // 2))
main.add_module('pyramid.{0}.ReLU'.format(cngf // 2),
nn.ReLU(True))
cngf = cngf // 2
csize = csize * 2
# Extra layers
for t in range(n_extra_layers):
main.add_module('extra-layers-{0}.{1}.{2}'.format(t, cngf, conv_name),
conv(cngf, cngf, 3, 1, 1, bias=False))
main.add_module('extra-layers-{0}.{1}.batchnorm'.format(t, cngf),
nn.BatchNorm2d(cngf))
main.add_module('extra-layers-{0}.{1}.ReLU'.format(t, cngf),
nn.ReLU(True))
# Final layers
main.add_module('final.{0}-{1}.{2}'.format(cngf, nc, convt_name),
convt(cngf, nc, 4, 2, 1, bias=False))
main.add_module('final.{0}.tanh'.format(nc),
nn.Tanh())
self.main = main
def forward(self, input):
output = self.main(input)
return output
########## Layers for separable DCGAN generator ###########################################
class CnvTranspose2d_separable(nn.Module):
def __init__(self, n_input_ch, n_output_ch, kernel_size, stride, padding, bias=False, red_portion=0.5):
super(CnvTranspose2d_separable, self).__init__()
self.n_input_ch = n_input_ch
self.n_input_ch_red = int(n_input_ch * red_portion)
self.n_output_ch = n_output_ch
self.n_output_ch_red = int(n_output_ch * red_portion)
self.n_output_ch_green = n_output_ch - self.n_output_ch_red
self.convt_half = nn.ConvTranspose2d(self.n_input_ch_red, self.n_output_ch_red,
kernel_size, stride, padding, bias=bias)
self.convt_all = nn.ConvTranspose2d(self.n_input_ch, self.n_output_ch_green,
kernel_size, stride, padding, bias=bias)
def forward(self, input):
first_half = input[:, :self.n_input_ch_red, :, :]
first_half_conv = self.convt_half(first_half)
full_conv = self.convt_all(input)
all_conv = torch.cat((first_half_conv, full_conv), 1)
return all_conv
class Cnv2d_separable(nn.Module):
def __init__(self, n_input_ch, n_output_ch, kernel_size, stride, padding, bias=False, red_portion=0.5):
super(Cnv2d_separable, self).__init__()
self.n_input_ch = n_input_ch
self.n_input_ch_red = int(n_input_ch * red_portion)
self.n_output_ch = n_output_ch
self.n_output_ch_red = int(n_output_ch * red_portion)
self.n_output_ch_green = n_output_ch - self.n_output_ch_red
self.conv_half = nn.Conv2d(self.n_input_ch_red, self.n_output_ch_red,
kernel_size, stride, padding, bias=bias)
self.conv_all = nn.Conv2d(self.n_input_ch, self.n_output_ch_green,
kernel_size, stride, padding, bias=bias)
def forward(self, input):
first_half = input[:, :self.n_input_ch_red, :, :]
first_half_conv = self.conv_half(first_half)
full_conv = self.conv_all(input)
all_conv = torch.cat((first_half_conv, full_conv), 1)
return all_conv
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
__all__ = [
'GetAvailabilitySetResult',
'AwaitableGetAvailabilitySetResult',
'get_availability_set',
]
@pulumi.output_type
class GetAvailabilitySetResult:
"""
Specifies information about the availability set that the virtual machine should be assigned to. Virtual machines specified in the same availability set are allocated to different nodes to maximize availability. For more information about availability sets, see [Manage the availability of virtual machines](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-windows-manage-availability?toc=%2fazure%2fvirtual-machines%2fwindows%2ftoc.json). <br><br> For more information on Azure planned maintenance, see [Planned maintenance for virtual machines in Azure](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-windows-planned-maintenance?toc=%2fazure%2fvirtual-machines%2fwindows%2ftoc.json) <br><br> Currently, a VM can only be added to availability set at creation time. An existing VM cannot be added to an availability set.
"""
def __init__(__self__, location=None, name=None, platform_fault_domain_count=None, platform_update_domain_count=None, sku=None, statuses=None, tags=None, type=None, virtual_machines=None):
if location and not isinstance(location, str):
raise TypeError("Expected argument 'location' to be a str")
pulumi.set(__self__, "location", location)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if platform_fault_domain_count and not isinstance(platform_fault_domain_count, int):
raise TypeError("Expected argument 'platform_fault_domain_count' to be a int")
pulumi.set(__self__, "platform_fault_domain_count", platform_fault_domain_count)
if platform_update_domain_count and not isinstance(platform_update_domain_count, int):
raise TypeError("Expected argument 'platform_update_domain_count' to be a int")
pulumi.set(__self__, "platform_update_domain_count", platform_update_domain_count)
if sku and not isinstance(sku, dict):
raise TypeError("Expected argument 'sku' to be a dict")
pulumi.set(__self__, "sku", sku)
if statuses and not isinstance(statuses, list):
raise TypeError("Expected argument 'statuses' to be a list")
pulumi.set(__self__, "statuses", statuses)
if tags and not isinstance(tags, dict):
raise TypeError("Expected argument 'tags' to be a dict")
pulumi.set(__self__, "tags", tags)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
if virtual_machines and not isinstance(virtual_machines, list):
raise TypeError("Expected argument 'virtual_machines' to be a list")
pulumi.set(__self__, "virtual_machines", virtual_machines)
@property
@pulumi.getter
def location(self) -> str:
"""
Resource location
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> str:
"""
Resource name
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="platformFaultDomainCount")
def platform_fault_domain_count(self) -> Optional[int]:
"""
Fault Domain count.
"""
return pulumi.get(self, "platform_fault_domain_count")
@property
@pulumi.getter(name="platformUpdateDomainCount")
def platform_update_domain_count(self) -> Optional[int]:
"""
Update Domain count.
"""
return pulumi.get(self, "platform_update_domain_count")
@property
@pulumi.getter
def sku(self) -> Optional['outputs.SkuResponse']:
"""
Sku of the availability set
"""
return pulumi.get(self, "sku")
@property
@pulumi.getter
def statuses(self) -> Sequence['outputs.InstanceViewStatusResponse']:
"""
The resource status information.
"""
return pulumi.get(self, "statuses")
@property
@pulumi.getter
def tags(self) -> Optional[Mapping[str, str]]:
"""
Resource tags
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> str:
"""
Resource type
"""
return pulumi.get(self, "type")
@property
@pulumi.getter(name="virtualMachines")
def virtual_machines(self) -> Optional[Sequence['outputs.SubResourceResponse']]:
"""
A list of references to all virtual machines in the availability set.
"""
return pulumi.get(self, "virtual_machines")
class AwaitableGetAvailabilitySetResult(GetAvailabilitySetResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetAvailabilitySetResult(
location=self.location,
name=self.name,
platform_fault_domain_count=self.platform_fault_domain_count,
platform_update_domain_count=self.platform_update_domain_count,
sku=self.sku,
statuses=self.statuses,
tags=self.tags,
type=self.type,
virtual_machines=self.virtual_machines)
def get_availability_set(availability_set_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetAvailabilitySetResult:
"""
Use this data source to access information about an existing resource.
:param str availability_set_name: The name of the availability set.
:param str resource_group_name: The name of the resource group.
"""
__args__ = dict()
__args__['availabilitySetName'] = availability_set_name
__args__['resourceGroupName'] = resource_group_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-nextgen:compute/v20171201:getAvailabilitySet', __args__, opts=opts, typ=GetAvailabilitySetResult).value
return AwaitableGetAvailabilitySetResult(
location=__ret__.location,
name=__ret__.name,
platform_fault_domain_count=__ret__.platform_fault_domain_count,
platform_update_domain_count=__ret__.platform_update_domain_count,
sku=__ret__.sku,
statuses=__ret__.statuses,
tags=__ret__.tags,
type=__ret__.type,
virtual_machines=__ret__.virtual_machines)
|
import pytest
from optconstruct.types.argument import Argument
from optconstruct.types.toggle import Toggle
from optconstruct.types.dummy import Dummy
from optconstruct.types.prefixed import Prefixed
from optconstruct.optionabstract import OptionAbstract
def test_abstract_generate():
obj = OptionAbstract('', '')
with pytest.raises(NotImplementedError):
obj.generate({'': ''})
@pytest.mark.parametrize("data, expected", [
({'help': True}, True),
({}, False),
])
def test_abstract_satisfied(data, expected):
obj = OptionAbstract('help', '--help')
assert obj.satisfied(data) is expected
@pytest.mark.parametrize("test_input, expected", [
("help", "--help"),
("link_durable", "--link-durable"),
])
def test_toggle(test_input, expected):
obj = Toggle(test_input, expected)
data = {test_input: True}
assert obj.generate(data) == expected
@pytest.mark.parametrize("test_input,expected", [
("dynamic", ""),
("help", ""),
])
def test_dummy(test_input, expected):
obj = Dummy(test_input)
data = {test_input: True}
assert obj.generate(data) == expected
def test_dummy_satisfied():
obj = Dummy("test")
assert obj.satisfied() is False
@pytest.mark.parametrize("test_input, prefix, expected", [
("dynamic", "--dynamic", "False"),
("help", "--help", "True"),
])
def test_argument(test_input, prefix, expected):
obj = Argument(test_input, prefix)
data = {test_input: expected}
assert obj.generate(data) == expected
@pytest.mark.parametrize("test_input, prefix, expected", [
("duration", "--duration", "--duration 5"),
("count", "--count", "--count 5"),
("timeout", "--timeout", "--timeout 5"),
])
def test_prefixed(test_input, prefix, expected):
obj = Prefixed(test_input, prefix)
data = {test_input: 5}
assert obj.generate(data) == expected
|
#!/usr/bin/python3
# -*- coding:utf-8 -*-
# @Time : 2018/8/17 0:28
# @Author : Bill Steve
# @Email : billsteve@126.com
# @File : City.py
# @Software : PyCharm
from sqlalchemy import Column, Integer, String
from sqlalchemy.ext.declarative import declarative_base
from .BaseModel import *
Base = declarative_base()
metadata = Base.metadata
class TestModel(Base, BaseModel):
__tablename__ = 'test'
id = Column(Integer, primary_key=True)
name = Column(String(255))
age = Column(Integer)
is_del = Column(Integer)
cts = Column(Integer)
def __init__(self, *arg, **kw):
self.id = kw.get("id", None)
self.name = kw.get("name", None)
self.age = kw.get("age", None)
self.is_del = kw.get("is_del", None)
self.cts = kw.get("cts", None)
if __name__ == '__main__':
createInitFunction(TestModel)
|
#------------------------------------------------------------------------------
# Copyright (c) 2005, Enthought, Inc.
# Copyright (c) 2009, Richard Lincoln
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#------------------------------------------------------------------------------
""" Defines the concrete implementations of the traits Toolkit interface for
the Pyjamas web application user interface.
"""
__import__('pkg_resources').declare_namespace(__name__)
#------------------------------------------------------------------------------
# Define the reference to the exported GUIToolkit object:
#------------------------------------------------------------------------------
import toolkit
# Reference to the GUIToolkit object for Pyjamas.
toolkit = toolkit.GUIToolkit()
# EOF -------------------------------------------------------------------------
|
# -*- coding: utf-8 -*-
# @Author : Yupeng Hou
# @Email : houyupeng@ruc.edu.cn
# @File : sampler.py
# UPDATE
# @Time : 2020/8/17, 2020/8/31, 2020/10/6, 2020/9/18
# @Author : Xingyu Pan, Kaiyuan Li, Yupeng Hou, Yushuo Chen
# @email : panxy@ruc.edu.cn, tsotfsk@outlook.com, houyupeng@ruc.edu.cn, chenyushuo@ruc.edu.cn
"""
recbole.sampler
########################
"""
import random
import copy
import numpy as np
class AbstractSampler(object):
""":class:`AbstractSampler` is a abstract class, all sampler should inherit from it. This sampler supports returning
a certain number of random value_ids according to the input key_id, and it also supports to prohibit
certain key-value pairs by setting used_ids. Besides, in order to improve efficiency, we use :attr:`random_pr`
to move around the :attr:`random_list` to generate random numbers, so we need to implement the
:meth:`get_random_list` method in the subclass.
Args:
distribution (str): The string of distribution, which is used for subclass.
Attributes:
random_list (list or numpy.ndarray): The shuffled result of :meth:`get_random_list`.
used_ids (numpy.ndarray): The result of :meth:`get_used_ids`.
"""
def __init__(self, distribution):
self.distribution = distribution
self.random_list = self.get_random_list()
random.shuffle(self.random_list)
self.random_pr = 0
self.random_list_length = len(self.random_list)
self.used_ids = self.get_used_ids()
def get_random_list(self):
"""
Returns:
np.ndarray or list: Random list of value_id.
"""
raise NotImplementedError('method [get_random_list] should be implemented')
def get_used_ids(self):
"""
Returns:
np.ndarray: Used ids. Index is key_id, and element is a set of value_ids.
"""
raise NotImplementedError('method [get_used_ids] should be implemented')
def random(self):
"""
Returns:
value_id (int): Random value_id. Generated by :attr:`random_list`.
"""
value_id = self.random_list[self.random_pr % self.random_list_length]
self.random_pr += 1
return value_id
def sample_by_key_ids(self, key_ids, num, used_ids):
"""Sampling by key_ids.
Args:
key_ids (np.ndarray or list): Input key_ids.
num (int): Number of sampled value_ids for each key_id.
used_ids (np.ndarray): Used ids. index is key_id, and element is a set of value_ids.
Returns:
np.ndarray: Sampled value_ids.
value_ids[0], value_ids[len(key_ids)], value_ids[len(key_ids) * 2], ..., value_id[len(key_ids) * (num - 1)]
is sampled for key_ids[0];
value_ids[1], value_ids[len(key_ids) + 1], value_ids[len(key_ids) * 2 + 1], ...,
value_id[len(key_ids) * (num - 1) + 1] is sampled for key_ids[1]; ...; and so on.
"""
key_num = len(key_ids)
total_num = key_num * num
value_ids = np.zeros(total_num, dtype=np.int64)
used_id_list = np.tile(used_ids, num)
for i, used_ids in enumerate(used_id_list):
cur = self.random()
while cur in used_ids:
cur = self.random()
value_ids[i] = cur
return value_ids
class Sampler(AbstractSampler):
""":class:`Sampler` is used to sample negative items for each input user. In order to avoid positive items
in train-phase to be sampled in vaild-phase, and positive items in train-phase or vaild-phase to be sampled
in test-phase, we need to input the datasets of all phases for pre-processing. And, before using this sampler,
it is needed to call :meth:`set_phase` to get the sampler of corresponding phase.
Args:
phases (str or list of str): All the phases of input.
datasets (Dataset or list of Dataset): All the dataset for each phase.
distribution (str, optional): Distribution of the negative items. Defaults to 'uniform'.
Attributes:
phase (str): the phase of sampler. It will not be set until :meth:`set_phase` is called.
"""
def __init__(self, phases, datasets, distribution='uniform'):
if not isinstance(phases, list):
phases = [phases]
if not isinstance(datasets, list):
datasets = [datasets]
if len(phases) != len(datasets):
raise ValueError('phases {} and datasets {} should have the same length'.format(phases, datasets))
self.phases = phases
self.datasets = datasets
self.uid_field = datasets[0].uid_field
self.iid_field = datasets[0].iid_field
self.n_users = datasets[0].user_num
self.n_items = datasets[0].item_num
super().__init__(distribution=distribution)
def get_random_list(self):
"""
Returns:
np.ndarray or list: Random list of item_id.
"""
if self.distribution == 'uniform':
return list(range(1, self.n_items))
elif self.distribution == 'popularity':
random_item_list = []
for dataset in self.datasets:
random_item_list.extend(dataset.inter_feat[self.iid_field].values)
return random_item_list
else:
raise NotImplementedError('Distribution [{}] has not been implemented'.format(self.distribution))
def get_used_ids(self):
"""
Returns:
dict: Used item_ids is the same as positive item_ids.
Key is phase, and value is a np.ndarray which index is user_id, and element is a set of item_ids.
"""
used_item_id = dict()
last = [set() for i in range(self.n_users)]
for phase, dataset in zip(self.phases, self.datasets):
cur = np.array([set(s) for s in last])
for uid, iid in dataset.inter_feat[[self.uid_field, self.iid_field]].values:
cur[uid].add(iid)
last = used_item_id[phase] = cur
return used_item_id
def set_phase(self, phase):
"""Get the sampler of corresponding phase.
Args:
phase (str): The phase of new sampler.
Returns:
Sampler: the copy of this sampler, :attr:`phase` is set the same as input phase, and :attr:`used_ids`
is set to the value of corresponding phase.
"""
if phase not in self.phases:
raise ValueError('phase [{}] not exist'.format(phase))
new_sampler = copy.copy(self)
new_sampler.phase = phase
new_sampler.used_ids = new_sampler.used_ids[phase]
return new_sampler
def sample_by_user_ids(self, user_ids, num):
"""Sampling by user_ids.
Args:
user_ids (np.ndarray or list): Input user_ids.
num (int): Number of sampled item_ids for each user_id.
Returns:
np.ndarray: Sampled item_ids.
item_ids[0], item_ids[len(user_ids)], item_ids[len(user_ids) * 2], ..., item_id[len(user_ids) * (num - 1)]
is sampled for user_ids[0];
item_ids[1], item_ids[len(user_ids) + 1], item_ids[len(user_ids) * 2 + 1], ...,
item_id[len(user_ids) * (num - 1) + 1] is sampled for user_ids[1]; ...; and so on.
"""
try:
return self.sample_by_key_ids(user_ids, num, self.used_ids[user_ids])
except IndexError:
for user_id in user_ids:
if user_id < 0 or user_id >= self.n_users:
raise ValueError('user_id [{}] not exist'.format(user_id))
class KGSampler(AbstractSampler):
""":class:`KGSampler` is used to sample negative entities in a knowledge graph.
Args:
dataset (Dataset): The knowledge graph dataset, which contains triplets in a knowledge graph.
distribution (str, optional): Distribution of the negative entities. Defaults to 'uniform'.
"""
def __init__(self, dataset, distribution='uniform'):
self.dataset = dataset
self.hid_field = dataset.head_entity_field
self.tid_field = dataset.tail_entity_field
self.hid_list = dataset.head_entities
self.tid_list = dataset.tail_entities
self.head_entities = set(dataset.head_entities)
self.entity_num = dataset.entity_num
super().__init__(distribution=distribution)
def get_random_list(self):
"""
Returns:
np.ndarray or list: Random list of entity_id.
"""
if self.distribution == 'uniform':
return list(range(1, self.entity_num))
elif self.distribution == 'popularity':
return list(self.hid_list) + list(self.tid_list)
else:
raise NotImplementedError('Distribution [{}] has not been implemented'.format(self.distribution))
def get_used_ids(self):
"""
Returns:
np.ndarray: Used entity_ids is the same as tail_entity_ids in knowledge graph.
Index is head_entity_id, and element is a set of tail_entity_ids.
"""
used_tail_entity_id = np.array([set() for i in range(self.entity_num)])
for hid, tid in zip(self.hid_list, self.tid_list):
used_tail_entity_id[hid].add(tid)
return used_tail_entity_id
def sample_by_entity_ids(self, head_entity_ids, num=1):
"""Sampling by head_entity_ids.
Args:
head_entity_ids (np.ndarray or list): Input head_entity_ids.
num (int, optional): Number of sampled entity_ids for each head_entity_id. Defaults to ``1``.
Returns:
np.ndarray: Sampled entity_ids.
entity_ids[0], entity_ids[len(head_entity_ids)], entity_ids[len(head_entity_ids) * 2], ...,
entity_id[len(head_entity_ids) * (num - 1)] is sampled for head_entity_ids[0];
entity_ids[1], entity_ids[len(head_entity_ids) + 1], entity_ids[len(head_entity_ids) * 2 + 1], ...,
entity_id[len(head_entity_ids) * (num - 1) + 1] is sampled for head_entity_ids[1]; ...; and so on.
"""
try:
return self.sample_by_key_ids(head_entity_ids, num, self.used_ids[head_entity_ids])
except IndexError:
for head_entity_id in head_entity_ids:
if head_entity_id not in self.head_entities:
raise ValueError('head_entity_id [{}] not exist'.format(head_entity_id))
class RepeatableSampler(AbstractSampler):
""":class:`RepeatableSampler` is used to sample negative items for each input user. The difference from
:class:`Sampler` is it can only sampling the items that have not appeared at all phases.
Args:
phases (str or list of str): All the phases of input.
dataset (Dataset): The union of all datasets for each phase.
distribution (str, optional): Distribution of the negative items. Defaults to 'uniform'.
Attributes:
phase (str): the phase of sampler. It will not be set until :meth:`set_phase` is called.
"""
def __init__(self, phases, dataset, distribution='uniform'):
if not isinstance(phases, list):
phases = [phases]
self.phases = phases
self.dataset = dataset
self.iid_field = dataset.iid_field
self.user_num = dataset.user_num
self.item_num = dataset.item_num
super().__init__(distribution=distribution)
def get_random_list(self):
"""
Returns:
np.ndarray or list: Random list of item_id.
"""
if self.distribution == 'uniform':
return list(range(1, self.item_num))
elif self.distribution == 'popularity':
return self.dataset.inter_feat[self.iid_field].values
else:
raise NotImplementedError('Distribution [{}] has not been implemented'.format(self.distribution))
def get_used_ids(self):
"""
Returns:
np.ndarray: Used item_ids is the same as positive item_ids.
Index is user_id, and element is a set of item_ids.
"""
return np.array([set() for i in range(self.user_num)])
def sample_by_user_ids(self, user_ids, num):
"""Sampling by user_ids.
Args:
user_ids (np.ndarray or list): Input user_ids.
num (int): Number of sampled item_ids for each user_id.
Returns:
np.ndarray: Sampled item_ids.
item_ids[0], item_ids[len(user_ids)], item_ids[len(user_ids) * 2], ..., item_id[len(user_ids) * (num - 1)]
is sampled for user_ids[0];
item_ids[1], item_ids[len(user_ids) + 1], item_ids[len(user_ids) * 2 + 1], ...,
item_id[len(user_ids) * (num - 1) + 1] is sampled for user_ids[1]; ...; and so on.
"""
try:
return self.sample_by_key_ids(user_ids, num, self.used_ids[user_ids])
except IndexError:
for user_id in user_ids:
if user_id < 0 or user_id >= self.n_users:
raise ValueError('user_id [{}] not exist'.format(user_id))
def set_phase(self, phase):
"""Get the sampler of corresponding phase.
Args:
phase (str): The phase of new sampler.
Returns:
Sampler: the copy of this sampler, and :attr:`phase` is set the same as input phase.
"""
if phase not in self.phases:
raise ValueError('phase [{}] not exist'.format(phase))
new_sampler = copy.copy(self)
new_sampler.phase = phase
return new_sampler
|
from flask import abort, jsonify, request, g
from application.misc.query_wrapper import QueryWrapper
from application.auth.required import auth_required
class JobTargetTemplate(QueryWrapper):
decorators = [auth_required] # Jobs are bound to a user, so we must authenticate
def get(self):
job_id = request.args.get('job_id')
if job_id is None:
abort(400, "Job ID not specified")
user_id = g.user.id
response = self._processor_get(
job_id=job_id,
user_id=user_id
)
return jsonify(response)
|
from ..graph.node import Node
class Environment(object):
def __init__(self, envMap):
self.envMap = envMap
def isValidPoint(self, point):
pass
class GridEnvironment(Environment):
def __init__(self, envMap, rows, cols):
super(GridEnvironment, self).__init__(envMap)
self.rows = rows
self.cols = cols
self.graph = {}
def goal(self, node=None):
if node:
self.goal = node
else:
return self.goal
def squaredDistanceBetween( self, start, end ):
startPoint, endPoint = map(self.getPointFromId, [start, end])
return ((startPoint[0] - endPoint[0])**2 + (startPoint[1] -
endPoint[1])**2)
def distanceBetweenNodes(self, start, end):
"""Calculates Euclidean distance between two nodes"""
return self.squaredDistanceBetween( start, end )**.5
def isValidPoint(self, point):
if self.envMap[point[0], point[1]] < 50:
return False
else:
return True
def getNeighbours(self, row, col):
"""Returns 8 connected neighbours from the grid. Does validity check f
the neighbours"""
neighbours = []
edgeCosts = []
for i in [-1, 0, 1]:
for j in [-1, 0, 1]:
if(not(i == 0 and j == 0)):
r = row + i
c = col + j
if((r >= 0) and (r < self.rows) and (c >= 0) and (c <
self.cols)):
if self.isValidPoint((r, c)):
neighbours.append((r, c))
if(i == 0):
edgeCosts.append(1)
elif j == 0:
edgeCosts.append(1)
else:
edgeCosts.append(1.5)
#print(neighbours)
return (neighbours, edgeCosts)
def getChildrenAndCosts(self, node):
if(not self.graph.has_key(node.getNodeId())):
self.graph[node.getNodeId()] = node
point = self.getPointFromId(node.getNodeId())
children, edgeCosts = self.getNeighbours(point[0], point[1])
childrenNodes = []
for child in children:
nodeId = self.getIdFromPoint(child)
self.addNode(nodeId)
childNode = self.graph[nodeId]
childrenNodes.append(childNode)
return (childrenNodes, edgeCosts)
def getIdFromPoint(self, gridPoint):
return gridPoint[0]*self.cols + gridPoint[1]
def getPointFromId(self, Id):
return (Id//self.cols, Id%self.cols)
def ancestoryContainsNode(self, currNode, nodeToFind, thresh):
node = currNode.getParent()
while(node != None and self.distanceBetweenNodes(node.getNodeId(),
nodeToFind.getNodeId()) < thresh and node != nodeToFind):
node = node.getParent()
if node == nodeToFind:
return True
else:
return False
def addNode(self, nodeId):
if not self.graph.has_key(nodeId):
node = Node(nodeId)
node.g1, node.h1 = float('inf'), float('inf')
self.graph[nodeId] = node
def euclideanHeuristic(self, currNode, goalNode):
currPoint = self.getPointFromId(currNode.getNodeId())
goalPoint = self.getPointFromId(goalNode.getNodeId())
return ((currPoint[0] - goalPoint[0])**2 + (currPoint[1] -
goalPoint[1])**2)**.5
def diagonalHeuristic(self, currNode, goalNode):
currPoint = self.getPointFromId(currNode.getNodeId())
goalPoint = self.getPointFromId(goalNode.getNodeId())
dr = abs(currPoint[0] - goalPoint[0])
dc = abs(currPoint[1] - goalPoint[1])
D = 1
D2 = 1.5
return D * (dr + dc) + (D2 - 2 * D) * min(dr, dc)
def setHeuristicType(self, heuristicType=1):
self.heuristicType = heuristicType
def setHeuristic(self, heuristic):
"""heuristic takes in currNode and goalNode and returns the heuristic
cost."""
self.heuristic = heuristic
def heuristic(self, currNode, goalNode, *args):
if self.heuristicType == 0:
return self.euclideanHeuristic(currNode, goalNode, *args)
else:
return self.diagonalHeuristic(currNode, goalNode, *args)
def h(self, node):
return heuristic(node, goal)
|
import numpy as np
from tensorflow.keras.layers import *
from tensorflow.keras import backend as K
import tensorflow as tf
__all__ =["SubpixelLayer2D","conv_up","SubpixelLayer2D_log"]
class SubpixelLayer2D(Layer):
def __init__(self,filters=None,ksz=1, scale=2, **kwargs):
self.scale=scale
self.out_channels=filters
self.ksz=ksz
super(SubpixelLayer2D, self).__init__(**kwargs)
def kinit(self,shape,dtype=None,partition_info=None):
h,w,cin,cout=shape
#Multiplica el kernel para evitar efecto tablero.
y=tf.initializers.variance_scaling()(shape=(h,w,cin,cout))
y=tf.tile(y,[1,1,1,self.scale**2])
sp_weights=tf.Variable(y,
dtype=dtype,
name="kernel")
return sp_weights
def build(self, input_shape):
b,h,w,cin=input_shape
if self.out_channels==None:
self.out_channels=(cin.value)//(self.scale**2)
self.kernel = self.add_weight(shape=(self.ksz,self.ksz,cin.value,self.out_channels),
initializer=self.kinit,
name='kernel')
super(SubpixelLayer2D, self).build(input_shape)
def call(self,input):
y = K.conv2d(input, self.kernel, strides=(1, 1), padding='same', data_format="channels_last",
dilation_rate=(1, 1))
y = K.relu(y)
y = tf.depth_to_space(y, self.scale)
y = K.pool2d(y, pool_size=(self.scale,self.scale), strides=(1, 1), padding='same', data_format="channels_last", pool_mode='avg')
return y
def compute_output_shape(self,input_shape):
shape=input_shape
return(shape[0],
shape[1] * self.scale,
shape[2] * self.scale,
self.out_channels)
def get_config(self):
base_config = super(SubpixelLayer2D, self).get_config()
base_config['filters'] = self.out_channels
base_config['scale'] = self.scale
base_config['ksz'] = self.ksz
return base_config
class conv_up(Layer):
def __init__(self,filters=None,ksz=1, scale=2, **kwargs):
self.scale=scale
self.out_channels=filters
self.ksz=ksz
super(conv_up, self).__init__(**kwargs)
def build(self, input_shape):
b,h,w,cin=input_shape
if self.out_channels==None:
self.out_channels=(cin.value)
self.kernel = self.add_weight(shape=(self.ksz,self.ksz,cin.value,self.out_channels),
initializer=tf.initializers.variance_scaling,
name='kernel')
super(conv_up, self).build(input_shape)
def call(self,input):
y = tf.keras.backend.conv2d(input, self.kernel, strides=(1, 1), padding='same', data_format="channels_last",
dilation_rate=(1, 1))
y = tf.keras.backend.relu(y)
y = tf.keras.backend.resize_images(y, height_factor=self.scale, width_factor=self.scale, data_format="channels_last",
interpolation='nearest')
return y
def compute_output_shape(self,input_shape):
shape=input_shape
return(shape[0],
shape[1] * self.scale,
shape[2] * self.scale,
self.out_channels)
def get_config(self):
base_config = super(conv_up, self).get_config()
base_config['filters'] = self.out_channels
base_config['scale'] = self.scale
base_config['ksz'] = self.ksz
return base_config
class SubpixelLayer2D_log(Layer):
def __init__(self,filters=None,ksz=1, scale=2, **kwargs):
self.loop=int(np.log2(scale))-1
self.scale=scale
self.prime_scale=2
self.out_channels=filters
self.ksz=ksz
self.loop_kernel={}
super(SubpixelLayer2D_log, self).__init__(**kwargs)
def kinit(self,shape,dtype=None,partition_info=None):
h,w,cin,cout=shape
#Multiplica el kernel para evitar efecto tablero. Aunque no lo creas lo entendiste
y=tf.initializers.variance_scaling()(shape=(h,w,cin,cout))
y=tf.tile(y,[1,1,1,self.prime_scale**2])
sp_weights=tf.Variable(y,
dtype=dtype,
name="kernel")
return sp_weights
def build(self, input_shape):
b,h,w,cin=input_shape
if self.out_channels==None:
self.out_channels=(cin.value)//(self.prime_scale**2)
self.kernel = self.add_weight(shape=(self.ksz,self.ksz,cin.value,self.out_channels),
initializer=self.kinit,
name='kernel')
for i in range(self.loop):
self.loop_kernel[i] = self.add_weight(shape=(self.ksz,self.ksz,self.out_channels,self.out_channels),
initializer=self.kinit,
name='loop_kernel%d'%i)
super(SubpixelLayer2D_log, self).build(input_shape)
def call(self,input):
for i in range(self.loop+1):
kernel=self.kernel if i==0 else self.loop_kernel[i-1]
x= input if i==0 else y
y = tf.keras.backend.conv2d(x, kernel, strides=(1, 1), padding='same', data_format="channels_last",
dilation_rate=(1, 1))
y = tf.keras.backend.relu(y)
y = tf.depth_to_space(y, self.prime_scale)
y = tf.keras.backend.pool2d(y, pool_size=(self.prime_scale,self.prime_scale), strides=(1, 1), padding='same', data_format="channels_last", pool_mode='avg')
return y
def compute_output_shape(self,input_shape):
shape=input_shape
return(shape[0],
shape[1] * self.scale,
shape[2] * self.scale,
self.out_channels)
def get_config(self):
base_config = super(SubpixelLayer2D_log, self).get_config()
base_config['filters'] = self.out_channels
base_config['scale'] = self.scale
base_config['ksz'] = self.ksz
return base_config
|
# (c) 2012, Jan-Piet Mens <jpmens(at)gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
from ansible.plugins.lookup import LookupBase
class LookupModule(LookupBase):
def run(self, terms, variables, **kwargs):
ret = []
for term in terms:
var = term.split()[0]
ret.append(os.getenv(var, ''))
return ret
|
'''Autogenerated by xml_generate script, do not edit!'''
from OpenGL import platform as _p, arrays
# Code generation uses this
from OpenGL.raw.GLES1 import _types as _cs
# End users want this...
from OpenGL.raw.GLES1._types import *
from OpenGL.raw.GLES1 import _errors
from OpenGL.constant import Constant as _C
import ctypes
_EXTENSION_NAME = 'GLES1_OES_query_matrix'
def _f( function ):
return _p.createFunction( function,_p.PLATFORM.GLES1,'GLES1_OES_query_matrix',error_checker=_errors._error_checker)
@_f
@_p.types(_cs.GLbitfield,arrays.GLfixedArray,arrays.GLintArray)
def glQueryMatrixxOES(mantissa,exponent):pass
|
import RPi.GPIO as GPIO
import time
# Set counter to limit running time
def count(timer):
for i in range(1,timer):
time.sleep(1)
timer-=1
print(timer)
while True:
# Setup trigger and echo
GPIO.setmode(GPIO.BOARD)
TRIG=11
ECHO=13
GPIO.setup(TRIG,GPIO.OUT)
GPIO.setup(ECHO,GPIO.IN)
GPIO.output(TRIG,True)
time.sleep(0.0001)
GPIO.output(TRIG,False)
while GPIO.input(ECHO)==False:
start=time.time()
while GPIO.input(ECHO)==True:
end=time.time()
sig_time=end-start
distance=sig_time/0.000058
dis=round(distance,0) # Distance between sensor and closest object
GPIO.cleanup()
|
#!/usr/bin/python
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = '''
---
module: rax_keypair
short_description: Create a keypair for use with Rackspace Cloud Servers
description:
- Create a keypair for use with Rackspace Cloud Servers
options:
name:
type: str
description:
- Name of keypair
required: true
public_key:
type: str
description:
- Public Key string to upload. Can be a file path or string
state:
type: str
description:
- Indicate desired state of the resource
choices:
- present
- absent
default: present
author: "Matt Martz (@sivel)"
notes:
- Keypairs cannot be manipulated, only created and deleted. To "update" a
keypair you must first delete and then recreate.
- The ability to specify a file path for the public key was added in 1.7
extends_documentation_fragment:
- community.general.rackspace.openstack
'''
EXAMPLES = '''
- name: Create a keypair
hosts: localhost
gather_facts: False
tasks:
- name: Keypair request
local_action:
module: rax_keypair
credentials: ~/.raxpub
name: my_keypair
region: DFW
register: keypair
- name: Create local public key
local_action:
module: copy
content: "{{ keypair.keypair.public_key }}"
dest: "{{ inventory_dir }}/{{ keypair.keypair.name }}.pub"
- name: Create local private key
local_action:
module: copy
content: "{{ keypair.keypair.private_key }}"
dest: "{{ inventory_dir }}/{{ keypair.keypair.name }}"
- name: Create a keypair
hosts: localhost
gather_facts: False
tasks:
- name: Keypair request
local_action:
module: rax_keypair
credentials: ~/.raxpub
name: my_keypair
public_key: "{{ lookup('file', 'authorized_keys/id_rsa.pub') }}"
region: DFW
register: keypair
'''
import os
try:
import pyrax
HAS_PYRAX = True
except ImportError:
HAS_PYRAX = False
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.community.general.plugins.module_utils.rax import (rax_argument_spec,
rax_required_together,
rax_to_dict,
setup_rax_module,
)
def rax_keypair(module, name, public_key, state):
changed = False
cs = pyrax.cloudservers
if cs is None:
module.fail_json(msg='Failed to instantiate client. This '
'typically indicates an invalid region or an '
'incorrectly capitalized region name.')
keypair = {}
if state == 'present':
if public_key and os.path.isfile(public_key):
try:
f = open(public_key)
public_key = f.read()
f.close()
except Exception as e:
module.fail_json(msg='Failed to load %s' % public_key)
try:
keypair = cs.keypairs.find(name=name)
except cs.exceptions.NotFound:
try:
keypair = cs.keypairs.create(name, public_key)
changed = True
except Exception as e:
module.fail_json(msg='%s' % e.message)
except Exception as e:
module.fail_json(msg='%s' % e.message)
elif state == 'absent':
try:
keypair = cs.keypairs.find(name=name)
except Exception:
pass
if keypair:
try:
keypair.delete()
changed = True
except Exception as e:
module.fail_json(msg='%s' % e.message)
module.exit_json(changed=changed, keypair=rax_to_dict(keypair))
def main():
argument_spec = rax_argument_spec()
argument_spec.update(
dict(
name=dict(required=True),
public_key=dict(),
state=dict(default='present', choices=['absent', 'present']),
)
)
module = AnsibleModule(
argument_spec=argument_spec,
required_together=rax_required_together(),
)
if not HAS_PYRAX:
module.fail_json(msg='pyrax is required for this module')
name = module.params.get('name')
public_key = module.params.get('public_key')
state = module.params.get('state')
setup_rax_module(module, pyrax)
rax_keypair(module, name, public_key, state)
if __name__ == '__main__':
main()
|
#!/usr/bin/env python3
import numpy as np
from astropy.wcs import WCS
from astropy.utils.exceptions import AstropyWarning
import os,time,vos,warnings
from astropy.io import fits
warnings.filterwarnings('ignore')
def CFIS_tile_radec(ra,dec):
# find tile (see Stacking in docs)
# https://www.cadc-ccda.hia-iha.nrc-cnrc.gc.ca/en/community/cfis/datadoc.html
# CFIS tiles centers are on a cartesian grid spaced by exactly 0.5 degrees apart.
yyy = int(np.rint((dec+90)*2))
cosf = np.cos((yyy/2-90) * np.pi/180.)
xxx = int(np.rint(ra*2*cosf))
tile = f'CFIS.{xxx:03d}.{yyy:03d}.r.fits'
return tile
def CFIS_cutout_radec(ra,dec,cutout_name='CFIS_cutout_radec.fits',fov_arcsec=100,dr='DR3'):
'''
Obtain CFIS cutout at ra,dec coordinates (degrees) with a square FOV set by `fov_arcsec` (arcsec).
Parts of a FOV outside a tile edge are converted to NaNs.
'''
tile = CFIS_tile_radec(ra,dec)
# get output FOV dimensions
arcsec_per_pixel = 0.1857
fov_pixels = int(fov_arcsec / arcsec_per_pixel)
if fov_pixels%2: fov_pixels+=1
hw = int(fov_pixels/2)
# get wcs file
wcs_name = 'WCS-{}'.format(tile)
if os.access(wcs_name,0): os.remove(wcs_name)
attempts, max_attempts = 0, 10
while not os.access(wcs_name,0) and attempts<max_attempts:
attempts+=1
vos_cmd = f'vcp vos:cfis/tiles_{dr}/{tile}[1:2,1:2] {wcs_name}'
try:
os.system(vos_cmd)
except:
time.sleep(0.1)
# get wcs mapping
wcs = WCS(wcs_name)
colc,rowc = wcs.all_world2pix(ra,dec,1,ra_dec_order=True)
colc,rowc = int(np.around(colc)),int(np.around(rowc))
if os.access(wcs_name,0): os.remove(wcs_name)
# actual field of view params
row_min = rowc-hw+1
row_max = rowc+hw
col_min = colc-hw+1
col_max = colc+hw
# cropped field of view params (for edges)
crop_row_min = 0
crop_row_max = fov_pixels
crop_col_min = 0
crop_col_max = fov_pixels
if row_min < 1:
crop_row_min = 1-row_min
row_min = 1
if row_max > 10000:
crop_row_max -= (row_max-10000)
row_max = 10000
if col_min < 1:
crop_col_min = 1-col_min
col_min = 1
if col_max > 10000:
crop_col_max -= (col_max-10000)
col_max = 10000
# get temporary cutout file
tmp_name = 'TMP-{}'.format(tile)
if os.access(tmp_name,0): os.remove(tmp_name)
attempts, max_attempts = 0, 10
if not os.access(tmp_name,0) and attempts<max_attempts:
attempts+=1
vos_cmd = f'vcp vos:cfis/tiles_{dr}/{tile}[{col_min}:{col_max},{row_min}:{row_max}] {tmp_name}'
try:
os.system(vos_cmd)
except:
time.sleep(0.1)
# extract data and header and delete
with fits.open(tmp_name,mode='readonly') as hdu:
header = hdu[0].header
img_data = hdu[0].data
if os.access(tmp_name,0): os.remove(tmp_name)
# add to section of FOV where data are within the tile and save
cutout_data = np.empty((fov_pixels,fov_pixels))*np.nan
cutout_data[crop_row_min:crop_row_max,crop_col_min:crop_col_max]=img_data
if os.access(cutout_name,0): os.remove(cutout_name)
hdu_pri = fits.PrimaryHDU(cutout_data)
hdu_pri.header = header
hdu_pri.writeto(cutout_name)
return tile
def main():
# ra,dec of target
ra,dec = 236.4707075027352,36.93748162164066
CFIS_cutout_radec(ra,dec,cutout_name='CFIS_cutout.fits',fov_arcsec=100)
if __name__ == '__main__':
main()
|
CLASSIFIERS = {
"Development Status :: 1 - Planning",
"Development Status :: 2 - Pre-Alpha",
"Development Status :: 3 - Alpha",
"Development Status :: 4 - Beta",
"Development Status :: 5 - Production/Stable",
"Development Status :: 6 - Mature",
"Development Status :: 7 - Inactive",
"Environment :: Console",
"Environment :: Console :: Curses",
"Environment :: Console :: Framebuffer",
"Environment :: Console :: Newt",
"Environment :: Console :: svgalib",
"Environment :: Handhelds/PDA's",
"Environment :: MacOS X",
"Environment :: MacOS X :: Aqua",
"Environment :: MacOS X :: Carbon",
"Environment :: MacOS X :: Cocoa",
"Environment :: No Input/Output (Daemon)",
"Environment :: OpenStack",
"Environment :: Other Environment",
"Environment :: Plugins",
"Environment :: Web Environment",
"Environment :: Web Environment :: Buffet",
"Environment :: Web Environment :: Mozilla",
"Environment :: Web Environment :: ToscaWidgets",
"Environment :: Win32 (MS Windows)",
"Environment :: X11 Applications",
"Environment :: X11 Applications :: Gnome",
"Environment :: X11 Applications :: GTK",
"Environment :: X11 Applications :: KDE",
"Environment :: X11 Applications :: Qt",
"Framework :: AiiDA",
"Framework :: AsyncIO",
"Framework :: BEAT",
"Framework :: BFG",
"Framework :: Bob",
"Framework :: Bottle",
"Framework :: Buildout",
"Framework :: Buildout :: Extension",
"Framework :: Buildout :: Recipe",
"Framework :: CastleCMS",
"Framework :: CastleCMS :: Theme",
"Framework :: Chandler",
"Framework :: CherryPy",
"Framework :: CubicWeb",
"Framework :: Django",
"Framework :: Django :: 1.10",
"Framework :: Django :: 1.11",
"Framework :: Django :: 1.4",
"Framework :: Django :: 1.5",
"Framework :: Django :: 1.6",
"Framework :: Django :: 1.7",
"Framework :: Django :: 1.8",
"Framework :: Django :: 1.9",
"Framework :: Django :: 2.0",
"Framework :: Django :: 2.1",
"Framework :: Django :: 2.2",
"Framework :: Django :: 3.0",
"Framework :: Django CMS",
"Framework :: Django CMS :: 3.4",
"Framework :: Django CMS :: 3.5",
"Framework :: Django CMS :: 3.6",
"Framework :: Django CMS :: 3.7",
"Framework :: Flake8",
"Framework :: Flask",
"Framework :: Hypothesis",
"Framework :: IDLE",
"Framework :: IPython",
"Framework :: Jupyter",
"Framework :: Lektor",
"Framework :: Masonite",
"Framework :: Nengo",
"Framework :: Odoo",
"Framework :: Opps",
"Framework :: Paste",
"Framework :: Pelican",
"Framework :: Pelican :: Plugins",
"Framework :: Pelican :: Themes",
"Framework :: Plone",
"Framework :: Plone :: 3.2",
"Framework :: Plone :: 3.3",
"Framework :: Plone :: 4.0",
"Framework :: Plone :: 4.1",
"Framework :: Plone :: 4.2",
"Framework :: Plone :: 4.3",
"Framework :: Plone :: 5.0",
"Framework :: Plone :: 5.1",
"Framework :: Plone :: 5.2",
"Framework :: Plone :: 5.3",
"Framework :: Plone :: Addon",
"Framework :: Plone :: Core",
"Framework :: Plone :: Theme",
"Framework :: Pylons",
"Framework :: Pyramid",
"Framework :: Pytest",
"Framework :: Review Board",
"Framework :: Robot Framework",
"Framework :: Robot Framework :: Library",
"Framework :: Robot Framework :: Tool",
"Framework :: Scrapy",
"Framework :: Setuptools Plugin",
"Framework :: Sphinx",
"Framework :: Sphinx :: Extension",
"Framework :: Sphinx :: Theme",
"Framework :: tox",
"Framework :: Trac",
"Framework :: Trio",
"Framework :: Tryton",
"Framework :: TurboGears",
"Framework :: TurboGears :: Applications",
"Framework :: TurboGears :: Widgets",
"Framework :: Twisted",
"Framework :: Wagtail",
"Framework :: Wagtail :: 1",
"Framework :: Wagtail :: 2",
"Framework :: ZODB",
"Framework :: Zope",
"Framework :: Zope2",
"Framework :: Zope :: 2",
"Framework :: Zope3",
"Framework :: Zope :: 3",
"Framework :: Zope :: 4",
"Framework :: Zope :: 5",
"Intended Audience :: Customer Service",
"Intended Audience :: Developers",
"Intended Audience :: Education",
"Intended Audience :: End Users/Desktop",
"Intended Audience :: Financial and Insurance Industry",
"Intended Audience :: Healthcare Industry",
"Intended Audience :: Information Technology",
"Intended Audience :: Legal Industry",
"Intended Audience :: Manufacturing",
"Intended Audience :: Other Audience",
"Intended Audience :: Religion",
"Intended Audience :: Science/Research",
"Intended Audience :: System Administrators",
"Intended Audience :: Telecommunications Industry",
"License :: Aladdin Free Public License (AFPL)",
"License :: CC0 1.0 Universal (CC0 1.0) Public Domain Dedication",
"License :: CeCILL-B Free Software License Agreement (CECILL-B)",
"License :: CeCILL-C Free Software License Agreement (CECILL-C)",
"License :: DFSG approved",
"License :: Eiffel Forum License (EFL)",
"License :: Free For Educational Use",
"License :: Free For Home Use",
"License :: Free for non-commercial use",
"License :: Freely Distributable",
"License :: Free To Use But Restricted",
"License :: Freeware",
"License :: GUST Font License 1.0",
"License :: GUST Font License 2006-09-30",
"License :: Netscape Public License (NPL)",
"License :: Nokia Open Source License (NOKOS)",
"License :: OSI Approved",
"License :: OSI Approved :: Academic Free License (AFL)",
"License :: OSI Approved :: Apache Software License",
"License :: OSI Approved :: Apple Public Source License",
"License :: OSI Approved :: Artistic License",
"License :: OSI Approved :: Attribution Assurance License",
"License :: OSI Approved :: Boost Software License 1.0 (BSL-1.0)",
"License :: OSI Approved :: BSD License",
"License :: OSI Approved :: CEA CNRS Inria Logiciel Libre License, version 2.1 (CeCILL-2.1)",
"License :: OSI Approved :: Common Development and Distribution License 1.0 (CDDL-1.0)",
"License :: OSI Approved :: Common Public License",
"License :: OSI Approved :: Eclipse Public License 1.0 (EPL-1.0)",
"License :: OSI Approved :: Eclipse Public License 2.0 (EPL-2.0)",
"License :: OSI Approved :: Eiffel Forum License",
"License :: OSI Approved :: European Union Public Licence 1.0 (EUPL 1.0)",
"License :: OSI Approved :: European Union Public Licence 1.1 (EUPL 1.1)",
"License :: OSI Approved :: European Union Public Licence 1.2 (EUPL 1.2)",
"License :: OSI Approved :: GNU Affero General Public License v3",
"License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)",
"License :: OSI Approved :: GNU Free Documentation License (FDL)",
"License :: OSI Approved :: GNU General Public License (GPL)",
"License :: OSI Approved :: GNU General Public License v2 (GPLv2)",
"License :: OSI Approved :: GNU General Public License v2 or later (GPLv2+)",
"License :: OSI Approved :: GNU General Public License v3 (GPLv3)",
"License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)",
"License :: OSI Approved :: GNU Lesser General Public License v2 (LGPLv2)",
"License :: OSI Approved :: GNU Lesser General Public License v2 or later (LGPLv2+)",
"License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)",
"License :: OSI Approved :: GNU Lesser General Public License v3 or later (LGPLv3+)",
"License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)",
"License :: OSI Approved :: Historical Permission Notice and Disclaimer (HPND)",
"License :: OSI Approved :: IBM Public License",
"License :: OSI Approved :: Intel Open Source License",
"License :: OSI Approved :: ISC License (ISCL)",
"License :: OSI Approved :: Jabber Open Source License",
"License :: OSI Approved :: MirOS License (MirOS)",
"License :: OSI Approved :: MIT License",
"License :: OSI Approved :: MITRE Collaborative Virtual Workspace License (CVW)",
"License :: OSI Approved :: Motosoto License",
"License :: OSI Approved :: Mozilla Public License 1.0 (MPL)",
"License :: OSI Approved :: Mozilla Public License 1.1 (MPL 1.1)",
"License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)",
"License :: OSI Approved :: Nethack General Public License",
"License :: OSI Approved :: Nokia Open Source License",
"License :: OSI Approved :: Open Group Test Suite License",
"License :: OSI Approved :: Open Software License 3.0 (OSL-3.0)",
"License :: OSI Approved :: PostgreSQL License",
"License :: OSI Approved :: Python License (CNRI Python License)",
"License :: OSI Approved :: Python Software Foundation License",
"License :: OSI Approved :: Qt Public License (QPL)",
"License :: OSI Approved :: Ricoh Source Code Public License",
"License :: OSI Approved :: SIL Open Font License 1.1 (OFL-1.1)",
"License :: OSI Approved :: Sleepycat License",
"License :: OSI Approved :: Sun Industry Standards Source License (SISSL)",
"License :: OSI Approved :: Sun Public License",
"License :: OSI Approved :: Universal Permissive License (UPL)",
"License :: OSI Approved :: University of Illinois/NCSA Open Source License",
"License :: OSI Approved :: Vovida Software License 1.0",
"License :: OSI Approved :: W3C License",
"License :: OSI Approved :: X.Net License",
"License :: OSI Approved :: zlib/libpng License",
"License :: OSI Approved :: Zope Public License",
"License :: Other/Proprietary License",
"License :: Public Domain",
"License :: Repoze Public License",
"Natural Language :: Afrikaans",
"Natural Language :: Arabic",
"Natural Language :: Bengali",
"Natural Language :: Bosnian",
"Natural Language :: Bulgarian",
"Natural Language :: Cantonese",
"Natural Language :: Catalan",
"Natural Language :: Chinese (Simplified)",
"Natural Language :: Chinese (Traditional)",
"Natural Language :: Croatian",
"Natural Language :: Czech",
"Natural Language :: Danish",
"Natural Language :: Dutch",
"Natural Language :: English",
"Natural Language :: Esperanto",
"Natural Language :: Finnish",
"Natural Language :: French",
"Natural Language :: Galician",
"Natural Language :: German",
"Natural Language :: Greek",
"Natural Language :: Hebrew",
"Natural Language :: Hindi",
"Natural Language :: Hungarian",
"Natural Language :: Icelandic",
"Natural Language :: Indonesian",
"Natural Language :: Italian",
"Natural Language :: Japanese",
"Natural Language :: Javanese",
"Natural Language :: Korean",
"Natural Language :: Latin",
"Natural Language :: Latvian",
"Natural Language :: Macedonian",
"Natural Language :: Malay",
"Natural Language :: Marathi",
"Natural Language :: Norwegian",
"Natural Language :: Panjabi",
"Natural Language :: Persian",
"Natural Language :: Polish",
"Natural Language :: Portuguese",
"Natural Language :: Portuguese (Brazilian)",
"Natural Language :: Romanian",
"Natural Language :: Russian",
"Natural Language :: Serbian",
"Natural Language :: Slovak",
"Natural Language :: Slovenian",
"Natural Language :: Spanish",
"Natural Language :: Swedish",
"Natural Language :: Tamil",
"Natural Language :: Telugu",
"Natural Language :: Thai",
"Natural Language :: Tibetan",
"Natural Language :: Turkish",
"Natural Language :: Ukrainian",
"Natural Language :: Urdu",
"Natural Language :: Vietnamese",
"Operating System :: Android",
"Operating System :: BeOS",
"Operating System :: iOS",
"Operating System :: MacOS",
"Operating System :: MacOS :: MacOS 9",
"Operating System :: MacOS :: MacOS X",
"Operating System :: Microsoft",
"Operating System :: Microsoft :: MS-DOS",
"Operating System :: Microsoft :: Windows",
"Operating System :: Microsoft :: Windows :: Windows 10",
"Operating System :: Microsoft :: Windows :: Windows 3.1 or Earlier",
"Operating System :: Microsoft :: Windows :: Windows 7",
"Operating System :: Microsoft :: Windows :: Windows 8",
"Operating System :: Microsoft :: Windows :: Windows 8.1",
"Operating System :: Microsoft :: Windows :: Windows 95/98/2000",
"Operating System :: Microsoft :: Windows :: Windows CE",
"Operating System :: Microsoft :: Windows :: Windows NT/2000",
"Operating System :: Microsoft :: Windows :: Windows Server 2003",
"Operating System :: Microsoft :: Windows :: Windows Server 2008",
"Operating System :: Microsoft :: Windows :: Windows Vista",
"Operating System :: Microsoft :: Windows :: Windows XP",
"Operating System :: OS/2",
"Operating System :: OS Independent",
"Operating System :: Other OS",
"Operating System :: PalmOS",
"Operating System :: PDA Systems",
"Operating System :: POSIX",
"Operating System :: POSIX :: AIX",
"Operating System :: POSIX :: BSD",
"Operating System :: POSIX :: BSD :: BSD/OS",
"Operating System :: POSIX :: BSD :: FreeBSD",
"Operating System :: POSIX :: BSD :: NetBSD",
"Operating System :: POSIX :: BSD :: OpenBSD",
"Operating System :: POSIX :: GNU Hurd",
"Operating System :: POSIX :: HP-UX",
"Operating System :: POSIX :: IRIX",
"Operating System :: POSIX :: Linux",
"Operating System :: POSIX :: Other",
"Operating System :: POSIX :: SCO",
"Operating System :: POSIX :: SunOS/Solaris",
"Operating System :: Unix",
"Programming Language :: Ada",
"Programming Language :: APL",
"Programming Language :: ASP",
"Programming Language :: Assembly",
"Programming Language :: Awk",
"Programming Language :: Basic",
"Programming Language :: C",
"Programming Language :: C#",
"Programming Language :: C++",
"Programming Language :: Cold Fusion",
"Programming Language :: Cython",
"Programming Language :: Delphi/Kylix",
"Programming Language :: Dylan",
"Programming Language :: Eiffel",
"Programming Language :: Emacs-Lisp",
"Programming Language :: Erlang",
"Programming Language :: Euler",
"Programming Language :: Euphoria",
"Programming Language :: F#",
"Programming Language :: Forth",
"Programming Language :: Fortran",
"Programming Language :: Haskell",
"Programming Language :: Java",
"Programming Language :: JavaScript",
"Programming Language :: Lisp",
"Programming Language :: Logo",
"Programming Language :: ML",
"Programming Language :: Modula",
"Programming Language :: Objective C",
"Programming Language :: Object Pascal",
"Programming Language :: OCaml",
"Programming Language :: Other",
"Programming Language :: Other Scripting Engines",
"Programming Language :: Pascal",
"Programming Language :: Perl",
"Programming Language :: PHP",
"Programming Language :: Pike",
"Programming Language :: Pliant",
"Programming Language :: PL/SQL",
"Programming Language :: PROGRESS",
"Programming Language :: Prolog",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.3",
"Programming Language :: Python :: 2.4",
"Programming Language :: Python :: 2.5",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 2 :: Only",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.0",
"Programming Language :: Python :: 3.1",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: Implementation",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: IronPython",
"Programming Language :: Python :: Implementation :: Jython",
"Programming Language :: Python :: Implementation :: MicroPython",
"Programming Language :: Python :: Implementation :: PyPy",
"Programming Language :: Python :: Implementation :: Stackless",
"Programming Language :: R",
"Programming Language :: REBOL",
"Programming Language :: Rexx",
"Programming Language :: Ruby",
"Programming Language :: Rust",
"Programming Language :: Scheme",
"Programming Language :: Simula",
"Programming Language :: Smalltalk",
"Programming Language :: SQL",
"Programming Language :: Tcl",
"Programming Language :: Unix Shell",
"Programming Language :: Visual Basic",
"Programming Language :: XBasic",
"Programming Language :: YACC",
"Programming Language :: Zope",
"Topic :: Adaptive Technologies",
"Topic :: Artistic Software",
"Topic :: Communications",
"Topic :: Communications :: BBS",
"Topic :: Communications :: Chat",
"Topic :: Communications :: Chat :: ICQ",
"Topic :: Communications :: Chat :: Internet Relay Chat",
"Topic :: Communications :: Chat :: Unix Talk",
"Topic :: Communications :: Conferencing",
"Topic :: Communications :: Email",
"Topic :: Communications :: Email :: Address Book",
"Topic :: Communications :: Email :: Email Clients (MUA)",
"Topic :: Communications :: Email :: Filters",
"Topic :: Communications :: Email :: Mailing List Servers",
"Topic :: Communications :: Email :: Mail Transport Agents",
"Topic :: Communications :: Email :: Post-Office",
"Topic :: Communications :: Email :: Post-Office :: IMAP",
"Topic :: Communications :: Email :: Post-Office :: POP3",
"Topic :: Communications :: Fax",
"Topic :: Communications :: FIDO",
"Topic :: Communications :: File Sharing",
"Topic :: Communications :: File Sharing :: Gnutella",
"Topic :: Communications :: File Sharing :: Napster",
"Topic :: Communications :: Ham Radio",
"Topic :: Communications :: Internet Phone",
"Topic :: Communications :: Telephony",
"Topic :: Communications :: Usenet News",
"Topic :: Database",
"Topic :: Database :: Database Engines/Servers",
"Topic :: Database :: Front-Ends",
"Topic :: Desktop Environment",
"Topic :: Desktop Environment :: File Managers",
"Topic :: Desktop Environment :: Gnome",
"Topic :: Desktop Environment :: GNUstep",
"Topic :: Desktop Environment :: K Desktop Environment (KDE)",
"Topic :: Desktop Environment :: K Desktop Environment (KDE) :: Themes",
"Topic :: Desktop Environment :: PicoGUI",
"Topic :: Desktop Environment :: PicoGUI :: Applications",
"Topic :: Desktop Environment :: PicoGUI :: Themes",
"Topic :: Desktop Environment :: Screen Savers",
"Topic :: Desktop Environment :: Window Managers",
"Topic :: Desktop Environment :: Window Managers :: Afterstep",
"Topic :: Desktop Environment :: Window Managers :: Afterstep :: Themes",
"Topic :: Desktop Environment :: Window Managers :: Applets",
"Topic :: Desktop Environment :: Window Managers :: Blackbox",
"Topic :: Desktop Environment :: Window Managers :: Blackbox :: Themes",
"Topic :: Desktop Environment :: Window Managers :: CTWM",
"Topic :: Desktop Environment :: Window Managers :: CTWM :: Themes",
"Topic :: Desktop Environment :: Window Managers :: Enlightenment",
"Topic :: Desktop Environment :: Window Managers :: Enlightenment :: Epplets",
"Topic :: Desktop Environment :: Window Managers :: Enlightenment :: Themes DR15",
"Topic :: Desktop Environment :: Window Managers :: Enlightenment :: Themes DR16",
"Topic :: Desktop Environment :: Window Managers :: Enlightenment :: Themes DR17",
"Topic :: Desktop Environment :: Window Managers :: Fluxbox",
"Topic :: Desktop Environment :: Window Managers :: Fluxbox :: Themes",
"Topic :: Desktop Environment :: Window Managers :: FVWM",
"Topic :: Desktop Environment :: Window Managers :: FVWM :: Themes",
"Topic :: Desktop Environment :: Window Managers :: IceWM",
"Topic :: Desktop Environment :: Window Managers :: IceWM :: Themes",
"Topic :: Desktop Environment :: Window Managers :: MetaCity",
"Topic :: Desktop Environment :: Window Managers :: MetaCity :: Themes",
"Topic :: Desktop Environment :: Window Managers :: Oroborus",
"Topic :: Desktop Environment :: Window Managers :: Oroborus :: Themes",
"Topic :: Desktop Environment :: Window Managers :: Sawfish",
"Topic :: Desktop Environment :: Window Managers :: Sawfish :: Themes 0.30",
"Topic :: Desktop Environment :: Window Managers :: Sawfish :: Themes pre-0.30",
"Topic :: Desktop Environment :: Window Managers :: Waimea",
"Topic :: Desktop Environment :: Window Managers :: Waimea :: Themes",
"Topic :: Desktop Environment :: Window Managers :: Window Maker",
"Topic :: Desktop Environment :: Window Managers :: Window Maker :: Applets",
"Topic :: Desktop Environment :: Window Managers :: Window Maker :: Themes",
"Topic :: Desktop Environment :: Window Managers :: XFCE",
"Topic :: Desktop Environment :: Window Managers :: XFCE :: Themes",
"Topic :: Documentation",
"Topic :: Documentation :: Sphinx",
"Topic :: Education",
"Topic :: Education :: Computer Aided Instruction (CAI)",
"Topic :: Education :: Testing",
"Topic :: Games/Entertainment",
"Topic :: Games/Entertainment :: Arcade",
"Topic :: Games/Entertainment :: Board Games",
"Topic :: Games/Entertainment :: First Person Shooters",
"Topic :: Games/Entertainment :: Fortune Cookies",
"Topic :: Games/Entertainment :: Multi-User Dungeons (MUD)",
"Topic :: Games/Entertainment :: Puzzle Games",
"Topic :: Games/Entertainment :: Real Time Strategy",
"Topic :: Games/Entertainment :: Role-Playing",
"Topic :: Games/Entertainment :: Side-Scrolling/Arcade Games",
"Topic :: Games/Entertainment :: Simulation",
"Topic :: Games/Entertainment :: Turn Based Strategy",
"Topic :: Home Automation",
"Topic :: Internet",
"Topic :: Internet :: File Transfer Protocol (FTP)",
"Topic :: Internet :: Finger",
"Topic :: Internet :: Log Analysis",
"Topic :: Internet :: Name Service (DNS)",
"Topic :: Internet :: Proxy Servers",
"Topic :: Internet :: WAP",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: Browsers",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content :: CGI Tools/Libraries",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content :: Content Management System",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content :: Message Boards",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content :: News/Diary",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content :: Page Counters",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content :: Wiki",
"Topic :: Internet :: WWW/HTTP :: HTTP Servers",
"Topic :: Internet :: WWW/HTTP :: Indexing/Search",
"Topic :: Internet :: WWW/HTTP :: Session",
"Topic :: Internet :: WWW/HTTP :: Site Management",
"Topic :: Internet :: WWW/HTTP :: Site Management :: Link Checking",
"Topic :: Internet :: WWW/HTTP :: WSGI",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Server",
"Topic :: Internet :: XMPP",
"Topic :: Internet :: Z39.50",
"Topic :: Multimedia",
"Topic :: Multimedia :: Graphics",
"Topic :: Multimedia :: Graphics :: 3D Modeling",
"Topic :: Multimedia :: Graphics :: 3D Rendering",
"Topic :: Multimedia :: Graphics :: Capture",
"Topic :: Multimedia :: Graphics :: Capture :: Digital Camera",
"Topic :: Multimedia :: Graphics :: Capture :: Scanners",
"Topic :: Multimedia :: Graphics :: Capture :: Screen Capture",
"Topic :: Multimedia :: Graphics :: Editors",
"Topic :: Multimedia :: Graphics :: Editors :: Raster-Based",
"Topic :: Multimedia :: Graphics :: Editors :: Vector-Based",
"Topic :: Multimedia :: Graphics :: Graphics Conversion",
"Topic :: Multimedia :: Graphics :: Presentation",
"Topic :: Multimedia :: Graphics :: Viewers",
"Topic :: Multimedia :: Sound/Audio",
"Topic :: Multimedia :: Sound/Audio :: Analysis",
"Topic :: Multimedia :: Sound/Audio :: Capture/Recording",
"Topic :: Multimedia :: Sound/Audio :: CD Audio",
"Topic :: Multimedia :: Sound/Audio :: CD Audio :: CD Playing",
"Topic :: Multimedia :: Sound/Audio :: CD Audio :: CD Ripping",
"Topic :: Multimedia :: Sound/Audio :: CD Audio :: CD Writing",
"Topic :: Multimedia :: Sound/Audio :: Conversion",
"Topic :: Multimedia :: Sound/Audio :: Editors",
"Topic :: Multimedia :: Sound/Audio :: MIDI",
"Topic :: Multimedia :: Sound/Audio :: Mixers",
"Topic :: Multimedia :: Sound/Audio :: Players",
"Topic :: Multimedia :: Sound/Audio :: Players :: MP3",
"Topic :: Multimedia :: Sound/Audio :: Sound Synthesis",
"Topic :: Multimedia :: Sound/Audio :: Speech",
"Topic :: Multimedia :: Video",
"Topic :: Multimedia :: Video :: Capture",
"Topic :: Multimedia :: Video :: Conversion",
"Topic :: Multimedia :: Video :: Display",
"Topic :: Multimedia :: Video :: Non-Linear Editor",
"Topic :: Office/Business",
"Topic :: Office/Business :: Financial",
"Topic :: Office/Business :: Financial :: Accounting",
"Topic :: Office/Business :: Financial :: Investment",
"Topic :: Office/Business :: Financial :: Point-Of-Sale",
"Topic :: Office/Business :: Financial :: Spreadsheet",
"Topic :: Office/Business :: Groupware",
"Topic :: Office/Business :: News/Diary",
"Topic :: Office/Business :: Office Suites",
"Topic :: Office/Business :: Scheduling",
"Topic :: Other/Nonlisted Topic",
"Topic :: Printing",
"Topic :: Religion",
"Topic :: Scientific/Engineering",
"Topic :: Scientific/Engineering :: Artificial Intelligence",
"Topic :: Scientific/Engineering :: Artificial Life",
"Topic :: Scientific/Engineering :: Astronomy",
"Topic :: Scientific/Engineering :: Atmospheric Science",
"Topic :: Scientific/Engineering :: Bio-Informatics",
"Topic :: Scientific/Engineering :: Chemistry",
"Topic :: Scientific/Engineering :: Electronic Design Automation (EDA)",
"Topic :: Scientific/Engineering :: GIS",
"Topic :: Scientific/Engineering :: Human Machine Interfaces",
"Topic :: Scientific/Engineering :: Hydrology",
"Topic :: Scientific/Engineering :: Image Recognition",
"Topic :: Scientific/Engineering :: Information Analysis",
"Topic :: Scientific/Engineering :: Interface Engine/Protocol Translator",
"Topic :: Scientific/Engineering :: Mathematics",
"Topic :: Scientific/Engineering :: Medical Science Apps.",
"Topic :: Scientific/Engineering :: Physics",
"Topic :: Scientific/Engineering :: Visualization",
"Topic :: Security",
"Topic :: Security :: Cryptography",
"Topic :: Sociology",
"Topic :: Sociology :: Genealogy",
"Topic :: Sociology :: History",
"Topic :: Software Development",
"Topic :: Software Development :: Assemblers",
"Topic :: Software Development :: Bug Tracking",
"Topic :: Software Development :: Build Tools",
"Topic :: Software Development :: Code Generators",
"Topic :: Software Development :: Compilers",
"Topic :: Software Development :: Debuggers",
"Topic :: Software Development :: Disassemblers",
"Topic :: Software Development :: Documentation",
"Topic :: Software Development :: Embedded Systems",
"Topic :: Software Development :: Internationalization",
"Topic :: Software Development :: Interpreters",
"Topic :: Software Development :: Libraries",
"Topic :: Software Development :: Libraries :: Application Frameworks",
"Topic :: Software Development :: Libraries :: Java Libraries",
"Topic :: Software Development :: Libraries :: Perl Modules",
"Topic :: Software Development :: Libraries :: PHP Classes",
"Topic :: Software Development :: Libraries :: Pike Modules",
"Topic :: Software Development :: Libraries :: pygame",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Software Development :: Libraries :: Ruby Modules",
"Topic :: Software Development :: Libraries :: Tcl Extensions",
"Topic :: Software Development :: Localization",
"Topic :: Software Development :: Object Brokering",
"Topic :: Software Development :: Object Brokering :: CORBA",
"Topic :: Software Development :: Pre-processors",
"Topic :: Software Development :: Quality Assurance",
"Topic :: Software Development :: Testing",
"Topic :: Software Development :: Testing :: Acceptance",
"Topic :: Software Development :: Testing :: BDD",
"Topic :: Software Development :: Testing :: Mocking",
"Topic :: Software Development :: Testing :: Traffic Generation",
"Topic :: Software Development :: Testing :: Unit",
"Topic :: Software Development :: User Interfaces",
"Topic :: Software Development :: Version Control",
"Topic :: Software Development :: Version Control :: Bazaar",
"Topic :: Software Development :: Version Control :: CVS",
"Topic :: Software Development :: Version Control :: Git",
"Topic :: Software Development :: Version Control :: Mercurial",
"Topic :: Software Development :: Version Control :: RCS",
"Topic :: Software Development :: Version Control :: SCCS",
"Topic :: Software Development :: Widget Sets",
"Topic :: System",
"Topic :: System :: Archiving",
"Topic :: System :: Archiving :: Backup",
"Topic :: System :: Archiving :: Compression",
"Topic :: System :: Archiving :: Mirroring",
"Topic :: System :: Archiving :: Packaging",
"Topic :: System :: Benchmark",
"Topic :: System :: Boot",
"Topic :: System :: Boot :: Init",
"Topic :: System :: Clustering",
"Topic :: System :: Console Fonts",
"Topic :: System :: Distributed Computing",
"Topic :: System :: Emulators",
"Topic :: System :: Filesystems",
"Topic :: System :: Hardware",
"Topic :: System :: Hardware :: Hardware Drivers",
"Topic :: System :: Hardware :: Mainframes",
"Topic :: System :: Hardware :: Symmetric Multi-processing",
"Topic :: System :: Installation/Setup",
"Topic :: System :: Logging",
"Topic :: System :: Monitoring",
"Topic :: System :: Networking",
"Topic :: System :: Networking :: Firewalls",
"Topic :: System :: Networking :: Monitoring",
"Topic :: System :: Networking :: Monitoring :: Hardware Watchdog",
"Topic :: System :: Networking :: Time Synchronization",
"Topic :: System :: Operating System",
"Topic :: System :: Operating System Kernels",
"Topic :: System :: Operating System Kernels :: BSD",
"Topic :: System :: Operating System Kernels :: GNU Hurd",
"Topic :: System :: Operating System Kernels :: Linux",
"Topic :: System :: Power (UPS)",
"Topic :: System :: Recovery Tools",
"Topic :: System :: Shells",
"Topic :: System :: Software Distribution",
"Topic :: System :: Systems Administration",
"Topic :: System :: Systems Administration :: Authentication/Directory",
"Topic :: System :: Systems Administration :: Authentication/Directory :: LDAP",
"Topic :: System :: Systems Administration :: Authentication/Directory :: NIS",
"Topic :: System :: System Shells",
"Topic :: Terminals",
"Topic :: Terminals :: Serial",
"Topic :: Terminals :: Telnet",
"Topic :: Terminals :: Terminal Emulators/X Terminals",
"Topic :: Text Editors",
"Topic :: Text Editors :: Documentation",
"Topic :: Text Editors :: Emacs",
"Topic :: Text Editors :: Integrated Development Environments (IDE)",
"Topic :: Text Editors :: Text Processing",
"Topic :: Text Editors :: Word Processors",
"Topic :: Text Processing",
"Topic :: Text Processing :: Filters",
"Topic :: Text Processing :: Fonts",
"Topic :: Text Processing :: General",
"Topic :: Text Processing :: Indexing",
"Topic :: Text Processing :: Linguistic",
"Topic :: Text Processing :: Markup",
"Topic :: Text Processing :: Markup :: HTML",
"Topic :: Text Processing :: Markup :: LaTeX",
"Topic :: Text Processing :: Markup :: SGML",
"Topic :: Text Processing :: Markup :: VRML",
"Topic :: Text Processing :: Markup :: XML",
"Topic :: Utilities",
"Typing :: Typed",
}
LICENSE_CODES = {
"License :: OSI Approved :: Eclipse Public License 2.0 (EPL-2.0)": {"EPL-2.0"},
"License :: CC0 1.0 Universal (CC0 1.0) Public Domain Dedication": {"CC0 1.0"},
"License :: OSI Approved :: Sun Industry Standards Source License (SISSL)": {
"SISSL"
},
"License :: OSI Approved :: GNU General Public License v2 or later (GPLv2+)": {
"GPLv2+",
"GPL",
},
"License :: OSI Approved :: Eclipse Public License 1.0 (EPL-1.0)": {"EPL-1.0"},
"License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)": {
"LGPL",
"LGPLv3",
},
"License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)": {"MPL 2.0"},
"License :: OSI Approved :: European Union Public Licence 1.0 (EUPL 1.0)": {
"EUPL 1.0"
},
"License :: CeCILL-C Free Software License Agreement (CECILL-C)": {"CECILL-C"},
"License :: OSI Approved :: GNU Lesser General Public License v2 or later (LGPLv2+)": {
"LGPL",
"LGPLv2+",
},
"License :: OSI Approved :: GNU General Public License v3 (GPLv3)": {
"GPL",
"GPLv3",
},
"License :: CeCILL-B Free Software License Agreement (CECILL-B)": {"CECILL-B"},
"License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)": {
"AGPLv3+"
},
"License :: OSI Approved :: MirOS License (MirOS)": {"MirOS"},
"License :: OSI Approved :: CEA CNRS Inria Logiciel Libre License, version 2.1 (CeCILL-2.1)": {
"CeCILL-2.1"
},
"License :: OSI Approved :: European Union Public Licence 1.2 (EUPL 1.2)": {
"EUPL 1.2"
},
"License :: OSI Approved :: MITRE Collaborative Virtual Workspace License (CVW)": {
"CVW"
},
"License :: OSI Approved :: GNU Free Documentation License (FDL)": {"FDL"},
"License :: Aladdin Free Public License (AFPL)": {"AFPL"},
"License :: OSI Approved :: GNU General Public License (GPL)": {"GPL"},
"License :: Eiffel Forum License (EFL)": {"EFL"},
"License :: OSI Approved :: Mozilla Public License 1.1 (MPL 1.1)": {"MPL 1.1"},
"License :: OSI Approved :: Python License (CNRI Python License)": {
"CNRI Python License"
},
"License :: OSI Approved :: Open Software License 3.0 (OSL-3.0)": {"OSL-3.0"},
"License :: Nokia Open Source License (NOKOS)": {"NOKOS"},
"License :: OSI Approved :: Mozilla Public License 1.0 (MPL)": {"MPL"},
"License :: OSI Approved :: GNU Lesser General Public License v3 or later (LGPLv3+)": {
"LGPLv3+",
"LGPL",
},
"License :: OSI Approved :: Qt Public License (QPL)": {"QPL"},
"License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)": {
"LGPL"
},
"License :: OSI Approved :: SIL Open Font License 1.1 (OFL-1.1)": {"OFL-1.1"},
"License :: OSI Approved :: Common Development and Distribution License 1.0 (CDDL-1.0)": {
"CDDL-1.0"
},
"License :: OSI Approved :: GNU General Public License v2 (GPLv2)": {
"GPL",
"GPLv2",
},
"License :: OSI Approved :: MIT License": {"MIT"},
"License :: OSI Approved :: Zope Public License": {"ZPL"},
"License :: OSI Approved :: Historical Permission Notice and Disclaimer (HPND)": {
"HPND"
},
"License :: Netscape Public License (NPL)": {"NPL"},
"License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)": {
"GPL",
"GPLv3+",
},
"License :: OSI Approved :: GNU Lesser General Public License v2 (LGPLv2)": {
"LGPL",
"LGPLv2",
},
"License :: OSI Approved :: Boost Software License 1.0 (BSL-1.0)": {"BSL-1.0"},
"License :: OSI Approved :: European Union Public Licence 1.1 (EUPL 1.1)": {
"EUPL 1.1"
},
"License :: OSI Approved :: ISC License (ISCL)": {"ISCL"},
"License :: OSI Approved :: Academic Free License (AFL)": {"AFL"},
"License :: OSI Approved :: Universal Permissive License (UPL)": {"UPL"},
}
CODE_LICENSES = {
"GPLv2+": {
"License :: OSI Approved :: GNU General Public License v2 or later (GPLv2+)"
},
"MirOS": {"License :: OSI Approved :: MirOS License (MirOS)"},
"GPLv3+": {
"License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)"
},
"CECILL-C": {"License :: CeCILL-C Free Software License Agreement (CECILL-C)"},
"CECILL-B": {"License :: CeCILL-B Free Software License Agreement (CECILL-B)"},
"GPLv2": {"License :: OSI Approved :: GNU General Public License v2 (GPLv2)"},
"GPLv3": {"License :: OSI Approved :: GNU General Public License v3 (GPLv3)"},
"BSL-1.0": {"License :: OSI Approved :: Boost Software License 1.0 (BSL-1.0)"},
"FDL": {"License :: OSI Approved :: GNU Free Documentation License (FDL)"},
"AFPL": {"License :: Aladdin Free Public License (AFPL)"},
"CDDL-1.0": {
"License :: OSI Approved :: Common Development and Distribution License 1.0 (CDDL-1.0)"
},
"MPL 1.1": {"License :: OSI Approved :: Mozilla Public License 1.1 (MPL 1.1)"},
"LGPLv2+": {
"License :: OSI Approved :: GNU Lesser General Public License v2 or later (LGPLv2+)"
},
"MPL 2.0": {"License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)"},
"CNRI Python License": {
"License :: OSI Approved :: Python License (CNRI Python License)"
},
"QPL": {"License :: OSI Approved :: Qt Public License (QPL)"},
"OSL-3.0": {"License :: OSI Approved :: Open Software License 3.0 (OSL-3.0)"},
"MIT": {"License :: OSI Approved :: MIT License"},
"EPL-2.0": {"License :: OSI Approved :: Eclipse Public License 2.0 (EPL-2.0)"},
"CVW": {
"License :: OSI Approved :: MITRE Collaborative Virtual Workspace License (CVW)"
},
"EUPL 1.1": {
"License :: OSI Approved :: European Union Public Licence 1.1 (EUPL 1.1)"
},
"EUPL 1.0": {
"License :: OSI Approved :: European Union Public Licence 1.0 (EUPL 1.0)"
},
"EPL-1.0": {"License :: OSI Approved :: Eclipse Public License 1.0 (EPL-1.0)"},
"EUPL 1.2": {
"License :: OSI Approved :: European Union Public Licence 1.2 (EUPL 1.2)"
},
"CC0 1.0": {"License :: CC0 1.0 Universal (CC0 1.0) Public Domain Dedication"},
"NOKOS": {"License :: Nokia Open Source License (NOKOS)"},
"AFL": {"License :: OSI Approved :: Academic Free License (AFL)"},
"LGPLv3+": {
"License :: OSI Approved :: GNU Lesser General Public License v3 or later (LGPLv3+)"
},
"SISSL": {
"License :: OSI Approved :: Sun Industry Standards Source License (SISSL)"
},
"EFL": {"License :: Eiffel Forum License (EFL)"},
"ZPL": {"License :: OSI Approved :: Zope Public License"},
"ISCL": {"License :: OSI Approved :: ISC License (ISCL)"},
"HPND": {
"License :: OSI Approved :: Historical Permission Notice and Disclaimer (HPND)"
},
"OFL-1.1": {"License :: OSI Approved :: SIL Open Font License 1.1 (OFL-1.1)"},
"NPL": {"License :: Netscape Public License (NPL)"},
"LGPL": {
"License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)",
"License :: OSI Approved :: GNU Lesser General Public License v3 or later (LGPLv3+)",
"License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)",
"License :: OSI Approved :: GNU Lesser General Public License v2 (LGPLv2)",
"License :: OSI Approved :: GNU Lesser General Public License v2 or later (LGPLv2+)",
},
"GPL": {
"License :: OSI Approved :: GNU General Public License (GPL)",
"License :: OSI Approved :: GNU General Public License v2 (GPLv2)",
"License :: OSI Approved :: GNU General Public License v3 (GPLv3)",
"License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)",
"License :: OSI Approved :: GNU General Public License v2 or later (GPLv2+)",
},
"MPL": {"License :: OSI Approved :: Mozilla Public License 1.0 (MPL)"},
"CeCILL-2.1": {
"License :: OSI Approved :: CEA CNRS Inria Logiciel Libre License, version 2.1 (CeCILL-2.1)"
},
"AGPLv3+": {
"License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)"
},
"UPL": {"License :: OSI Approved :: Universal Permissive License (UPL)"},
"LGPLv3": {
"License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)"
},
"LGPLv2": {
"License :: OSI Approved :: GNU Lesser General Public License v2 (LGPLv2)"
},
}
|
from flask import Flask, render_template, flash, redirect, url_for, session, logging, request, Response
import requests
from urllib.request import urlopen
import os
import static.files.running as running
from pyDes import des
from pyfladesk import init_gui
import subprocess
import hashlib
import time
from random import randint
import subprocess
import sys
# Crear instancia Flask
app = Flask(__name__)
# VARIABLES GLOBALES
global init_dir
global act
global passhsh
global debian
debian="/static/css/img/debian.png"
passhsh = "f5a617102abb078c922452642ea57f3b"
init_dir = os.path.dirname(os.path.abspath(__file__))
# RUTAS DE LA APLICACIÓN WEBVIEW
@app.route('/')
def index():
return render_template('init.html')
@app.route('/home')
def home():
checkonline()
return render_template('mainv1.html', activos=activos, on_off=on_off, inet_connection=inet_connection)
@app.route('/raspissh')
def raspissh():
return render_template('ssh1.html')
@app.route('/ssh')
def ssh():
checkonline()
return render_template('ssh.html', on_off_ssh=on_off_ssh, inet_connection=inet_connection, debian=debian)
@app.route('/archivoe')
def archivoe():
checkonline()
return render_template('archivoe.html', on_off=on_off, inet_connection=inet_connection)
@app.route('/encriptar', methods=['POST', 'GET'])
def encriptar():
print("ENCRIPTANDO.......")
encriptaFiles(str(passhsh))
return render_template('endencrypt.html')
@app.route('/desencriptar', methods=['POST', 'GET'])
def desencriptar():
print("DESENCRIPTANDO.......")
desencriptaFiles(str(passhsh))
return render_template('enddecrypt.html')
@app.route('/main')
def log():
return render_template('login.html')
@app.route('/login', methods=['POST', 'GET'])
def login():
if request.method == 'POST':
passwd_tmp = request.form['passwd']
hsh_tmp = hashlib.md5(passwd_tmp.encode()).hexdigest()
if passhsh == hsh_tmp:
return render_template('mainv0.html', activos=activos, on_off=on_off, inet_connection=inet_connection)
else:
return render_template('errorpopup.html')
@app.route('/tgtbot')
def tgtbot():
checkonline()
return render_template('tgtbot.html', on_off_ssh=on_off_ssh, inet_connection=inet_connection)
@app.route('/tgtbotrun',methods=['POST', 'GET'])
def tgtbotrun():
if request.method == 'POST':
botno = request.form['botno']
if botno == "":
errortxt="POR FAVOR INTRODUCE ALGÚN ID VALIDO"
return render_template('boterror.html', on_off_ssh=on_off_ssh, inet_connection=inet_connection, errortxt=errortxt)
elif int(botno) > activos:
errortxt="EL ID INTRODUCIDO NO SE ENCUENTRA ONLINE"
return render_template('boterror.html', on_off_ssh=on_off_ssh, inet_connection=inet_connection, errortxt=errortxt)
elif int(botno) == 0:
errortxt="POR FAVOR INTRODUCE ALGÚN ID VALIDO"
return render_template('boterror.html', on_off_ssh=on_off_ssh, inet_connection=inet_connection, errortxt=errortxt)
return render_template('tgtbotssh.html', on_off_ssh=on_off_ssh, inet_connection=inet_connection)
@app.route('/sendssh', methods=['POST', 'GET'])
def sendssh():
command = request.form['command']
requests.post("http://p3rl4.me:1324/sendssh", data={'command': command})
time.sleep(3)
sshobj = urlopen('http://p3rl4.me:1324/sendssh')
output = sshobj.read().decode('utf-8')
return render_template('ssh1.html', ssh_output=output)
@app.route('/progress')
def progress():
def generate():
x = 0
while x <= 100:
yield "data:" + str(x) + "\n\n"
x = x + 10
time.sleep(randint(0, 1))
return Response(generate(), mimetype='text/event-stream')
@app.route('/logout')
def logout():
return render_template('logout.html')
# FUNCIONES
def checkonline():
global activos
global on_off
global on_off_ssh
global inet_connection
global debian
try:
activos=running.check()
if activos > 0:
debian="/static/css/img/debian.png"
on_off_ssh=''
else:
debian="/static/css/img/debian_off.png"
on_off_ssh='disconnected'
inet_connection='Modo Online'
on_off=''
except:
activos=0
on_off='disconnected'
debian="/static/css/img/debian.png"
inet_connection='Modo Offline'
def encriptaFiles(user_pass):
os.chdir(filesdir)
for filen in os.listdir(filesdir):
f = open(filen, 'rb+')
d = f.read()
f.close()
key = des(user_pass)
d = key.encrypt(d, ' ')
f = open(filen, 'wb+')
f.write(d)
f.close()
os.chdir(init_dir)
return True
def desencriptaFiles(user_pass):
os.chdir(filesdir)
for filen in os.listdir(filesdir):
f = open(filen, 'rb+')
d = f.read()
f.close()
key = des(user_pass)
d = key.decrypt(d, ' ')
f = open(filen, 'wb+')
f.write(d)
f.close()
os.chdir(init_dir)
return True
|
import requests
import zipfile
import shutil
import csv
import pandas as pd
from datetime import date
from datetime import datetime
from pathlib import Path
from urllib.parse import urlparse
class BhavCopy(object):
"""description of class"""
def __init__(self, date: date):
self.date = date
self._url_eq = urlparse(f'https://www.nseindia.com/content/historical/EQUITIES/{date.strftime("%Y")}/{date.strftime("%b").upper()}/cm{date.strftime("%d%b%Y").upper()}bhav.csv.zip')
self._file_eq_zip = Path(self._url_eq.path[1:])
self._file_eq = Path(self._url_eq.path[1:-4])
self._url_fo = urlparse(f'https://www.nseindia.com/content/historical/DERIVATIVES/{date.strftime("%Y")}/{date.strftime("%b").upper()}/fo{date.strftime("%d%b%Y").upper()}bhav.csv.zip')
self._file_fo_zip = Path(self._url_fo.path[1:])
self._file_fo = Path(self._url_fo.path[1:-4])
self.market_close = False
self._initialize()
def _initialize(self):
if self.date.weekday() == 5 or self.date.weekday() == 6:
self.market_close = True
return
self._try_download(self._url_eq, self._file_eq_zip)
self._try_download(self._url_fo, self._file_fo_zip)
def _try_download(self, url: urlparse, path: Path):
if not path.is_file():
path.parent.mkdir(parents=True, exist_ok=True)
with requests.get(url.geturl(), stream=True) as r:
if r.status_code == 200:
with path.open('wb') as f:
r.raw.decode_content = True
shutil.copyfileobj(r.raw, f)
with zipfile.ZipFile(path, 'r') as zf:
zf.extractall(path.parent)
if r.status_code == 404:
self.market_close = True
def read_fo(self):
if self._file_fo.is_file():
with self._file_fo.open('rt') as f:
csv_reader = csv.reader(f, delimiter=',')
self.headers_fo = next(csv_reader, None)
for row in csv_reader:
yield (row[0], row[1], datetime.strptime(row[2], '%d-%b-%Y').strftime('%Y-%m-%d'), row[3], row[4], row[5], row[6], row[7], row[8], row[9], row[10], row[11], row[12], row[13], datetime.strptime(row[14], '%d-%b-%Y').strftime('%Y-%m-%d'))
def read_eq(self):
if self._file_eq.is_file():
with self._file_eq.open('rt') as f:
csv_reader = csv.reader(f, delimiter=',')
self.headers_eq = next(csv_reader, None)
for row in csv.reader(f, delimiter=','):
yield row
def read_eq_as_pd(self):
if self.market_close:
return
return pd.read_csv(self._file_eq)
def read_fo_as_pd(self):
if self.market_close:
return
return pd.read_csv(self._file_fo)
|
from setuptools import setup, find_packages
from setuptools.extension import Extension
import os
import glob
version = '2.0'
platform = os.uname()[0]
if not platform == 'Darwin':
c_ext = Extension("facs/_facs", define_macros = [('NODEBUG', '1'), ('FILE_OFFSET_BITS', '64'), ('LARGE_FILE', '1')],
sources = [f for f in glob.glob('facs/*.c') if 'mpi' not in f],
extra_compile_args = ['-fopenmp'],
extra_link_args=['-lgomp', '-lz'])
else:
c_ext = Extension("facs/_facs", define_macros = [('NODEBUG', '1')],
sources = [f for f in glob.glob('facs/*.c') if 'mpi' not in f],
extra_compile_args = ['-Wno-unknown-pragmas', '-Wno-unused-value'],
extra_link_args=['-lz'])
setup(name='facs',
version=version,
description="FACS bloom filter implementation",
long_description="""FACS you""",
ext_modules=[c_ext],
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Console",
"Intended Audience :: Science/Research",
"Intended Audience :: Healthcare Industry",
"License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)",
"Natural Language :: English",
"Operating System :: POSIX :: Linux",
"Programming Language :: C",
"Programming Language :: Python",
"Topic :: Scientific/Engineering :: Bio-Informatics"
],
keywords='bloom filter probabilistic bioinformatics',
author='Enze Liu, Lars Arvestad, Henrik Stranneheim, Roman Valls Guimera',
author_email='roman@scilifelab.se',
url='http://facs.scilifelab.se/',
license='GPLv3',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[
"nose",
"nose-timer",
"jinja2",
"requests",
"couchdb"
],
entry_points="""
# -*- Entry points: -*-
""",
)
|
#!/usr/bin/python
# Generate arbitrary onset and offset timing gratings.
#
# Copyright (C) 2010-2011 Huang Xin
#
# See LICENSE.TXT that came with this file.
from __future__ import division
import sys
import random
import numpy as np
from StimControl.LightStim.SweepSeque import TimingSeque
from StimControl.LightStim.LightData import dictattr
from StimControl.LightStim.FrameControl import FrameSweep
from StimControl.LightStim.Grating import TimingController,RandPhaseController
from StimControl.LightStim.SweepController import SweepSequeStimulusController
from StimControl.LightStim.SEDGrating import SEDGrating
from StimControl.LightStim.Target import Fixation
from StimControl.LightStim.Core import DefaultScreen
class RandOriController(SweepSequeStimulusController):
def __init__(self,*args,**kwargs):
super(RandOriController, self).__init__(*args,**kwargs)
self.gp = self.stimulus.gp
self.ori = np.linspace(0.0, 360.0, 16, endpoint=False)
self.index = 0
def during_go_eval(self):
self.index = self.index + 1
random.seed(self.index)
self.gp.orientation = random.choice(self.ori)
class OrthOriController(RandOriController):
def __init__(self,*args,**kwargs):
super(OrthOriController, self).__init__(*args,**kwargs)
def during_go_eval(self):
super(OrthOriController, self).during_go_eval()
self.gp.orientation = self.gp.orientation + 90
class TimingSetGrating(SEDGrating):
def register_controllers(self):
super(TimingSetGrating, self).register_controllers()
self.logger.info('Register TimingController.')
self.controllers.append(TimingController(self))
self.controllers.append(OrthOriController(self))
class RandPhaseTimingSetGrating(SEDGrating):
def register_controllers(self):
super(RandPhaseTimingSetGrating, self).register_controllers()
self.logger.info('Register TimingController.')
self.controllers.append(TimingController(self))
self.controllers.append(RandPhaseController(self))
self.controllers.append(OrthOriController(self))
class RandOriTimingSetGrating(SEDGrating):
def register_controllers(self):
super(RandOriTimingSetGrating, self).register_controllers()
self.logger.info('Register TimingController.')
self.controllers.append(TimingController(self))
self.controllers.append(RandPhaseController(self))
self.controllers.append(RandOriController(self))
class OrthOriTimingSetGrating(SEDGrating):
def register_controllers(self):
super(OrthOriTimingSetGrating, self).register_controllers()
self.logger.info('Register TimingController.')
self.controllers.append(TimingController(self))
self.controllers.append(RandPhaseController(self))
self.controllers.append(OrthOriController(self))
DefaultScreen(['left','right'], bgcolor=(0.5,0.5,0.5))
p_left = dictattr()
p_left.ml = 0.5
p_left.tfreqCycSec = 0.0
p_left.bgbrightness = 0.5
p_left.contrast = 1
p_left.phase0 = 0
p_right = dictattr()
p_right.ml = 0.5
p_right.tfreqCycSec = 0.0
p_right.bgbrightness = 0.5
p_right.contrast = 1
p_right.phase0 = 0
argv = list(sys.argv)
subject = None
if len(argv) >= 2:
subject = argv[1]
while subject is None:
sys.stdout.write('Please input lowercase initials of subject name: ')
subject = raw_input()
interval = None
if len(argv) >= 3:
interval = int(argv[2]) / 1000
while interval is None:
sys.stdout.write('Please input stimulus interval in miliseconds: ')
interval = int(raw_input()) / 1000
stim_interval = interval
pre_left = 0.0 if stim_interval > 0 else abs(stim_interval)
pre_right = 0.0 if stim_interval <= 0 else stim_interval
repeats = 1000
rand_phase = True
cycle_left = dictattr(duration=0.132, pre=pre_left, stimulus=0.016)
cycle_right = dictattr(duration=0.132, pre=pre_right, stimulus=0.016)
block_left = dictattr(repeat=repeats, cycle=cycle_left, interval=0.0)
block_right = dictattr(repeat=repeats, cycle=cycle_right, interval=0.0)
sequence_left = TimingSeque(repeat=1, block=block_left, shuffle=True)
sequence_right = TimingSeque(repeat=1, block=block_right, shuffle=True)
fp = dictattr()
fp.color = (1.0, 0.0, 0.0, 1.0)
fp.width = 0.25
fixation_left = Fixation(viewport='left', subject=subject, params=fp)
fixation_right = Fixation(viewport='right', subject=subject, params=fp)
for i in range(6):
sweep = FrameSweep()
grating_left = RandOriTimingSetGrating(viewport='left', params=p_left, subject=subject, sweepseq=sequence_left)
grating_right = OrthOriTimingSetGrating(viewport='right', params=p_right, subject=subject, sweepseq=sequence_right)
sweep.add_stimulus(grating_left)
sweep.add_stimulus(grating_right)
sweep.add_stimulus(fixation_left)
sweep.add_stimulus(fixation_right)
sweep.go(prestim=5.0,poststim=5.0,duration=(150.0,'seconds'))
|
from functools import partial
from ignite.metrics import EpochMetric
def average_precision_compute_fn(y_preds, y_targets, activation=None):
try:
from sklearn.metrics import average_precision_score
except ImportError:
raise RuntimeError("This contrib module requires sklearn to be installed.")
y_true = y_targets.numpy()
if activation is not None:
y_preds = activation(y_preds)
y_pred = y_preds.numpy()
return average_precision_score(y_true, y_pred)
class AveragePrecision(EpochMetric):
"""Computes Average Precision accumulating predictions and the ground-truth during an epoch
and applying `sklearn.metrics.average_precision_score <http://scikit-learn.org/stable/modules/generated/
sklearn.metrics.average_precision_score.html#sklearn.metrics.average_precision_score>`_ .
Args:
activation (callable, optional): optional function to apply on prediction tensors,
e.g. `activation=torch.sigmoid` to transform logits.
output_transform (callable, optional): a callable that is used to transform the
:class:`~ignite.engine.Engine`'s `process_function`'s output into the
form expected by the metric. This can be useful if, for example, you have a multi-output model and
you want to compute the metric with respect to one of the outputs.
"""
def __init__(self, activation=None, output_transform=lambda x: x):
super(AveragePrecision, self).__init__(partial(average_precision_compute_fn, activation=activation),
output_transform=output_transform)
|
class Solution:
def isAlienSorted(self, words: List[str], order: str) -> bool:
# handle the edge case, as well as a cheap optimization
if len(words) <= 1:
return True
letter_order = {}
for rank, letter in enumerate(order):
letter_order[letter] = rank
def is_less_than(word1, word2):
bound = min(len(word1), len(word2))
index = 0
while index < bound:
if letter_order[word1[index]] < letter_order[word2[index]]:
return True
elif letter_order[word1[index]] > letter_order[word2[index]]:
return False
index += 1
return True if len(word1) <= len(word2) else False
prev_word = words[0]
for next_word in words[1:]:
if not is_less_than(prev_word, next_word):
return False
prev_word = next_word
return True
|
# coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.14.4
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class ExtensionsV1beta1DeploymentCondition(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'last_transition_time': 'datetime',
'last_update_time': 'datetime',
'message': 'str',
'reason': 'str',
'status': 'str',
'type': 'str'
}
attribute_map = {
'last_transition_time': 'lastTransitionTime',
'last_update_time': 'lastUpdateTime',
'message': 'message',
'reason': 'reason',
'status': 'status',
'type': 'type'
}
def __init__(self, last_transition_time=None, last_update_time=None, message=None, reason=None, status=None, type=None):
"""
ExtensionsV1beta1DeploymentCondition - a model defined in Swagger
"""
self._last_transition_time = None
self._last_update_time = None
self._message = None
self._reason = None
self._status = None
self._type = None
self.discriminator = None
if last_transition_time is not None:
self.last_transition_time = last_transition_time
if last_update_time is not None:
self.last_update_time = last_update_time
if message is not None:
self.message = message
if reason is not None:
self.reason = reason
self.status = status
self.type = type
@property
def last_transition_time(self):
"""
Gets the last_transition_time of this ExtensionsV1beta1DeploymentCondition.
Last time the condition transitioned from one status to another.
:return: The last_transition_time of this ExtensionsV1beta1DeploymentCondition.
:rtype: datetime
"""
return self._last_transition_time
@last_transition_time.setter
def last_transition_time(self, last_transition_time):
"""
Sets the last_transition_time of this ExtensionsV1beta1DeploymentCondition.
Last time the condition transitioned from one status to another.
:param last_transition_time: The last_transition_time of this ExtensionsV1beta1DeploymentCondition.
:type: datetime
"""
self._last_transition_time = last_transition_time
@property
def last_update_time(self):
"""
Gets the last_update_time of this ExtensionsV1beta1DeploymentCondition.
The last time this condition was updated.
:return: The last_update_time of this ExtensionsV1beta1DeploymentCondition.
:rtype: datetime
"""
return self._last_update_time
@last_update_time.setter
def last_update_time(self, last_update_time):
"""
Sets the last_update_time of this ExtensionsV1beta1DeploymentCondition.
The last time this condition was updated.
:param last_update_time: The last_update_time of this ExtensionsV1beta1DeploymentCondition.
:type: datetime
"""
self._last_update_time = last_update_time
@property
def message(self):
"""
Gets the message of this ExtensionsV1beta1DeploymentCondition.
A human readable message indicating details about the transition.
:return: The message of this ExtensionsV1beta1DeploymentCondition.
:rtype: str
"""
return self._message
@message.setter
def message(self, message):
"""
Sets the message of this ExtensionsV1beta1DeploymentCondition.
A human readable message indicating details about the transition.
:param message: The message of this ExtensionsV1beta1DeploymentCondition.
:type: str
"""
self._message = message
@property
def reason(self):
"""
Gets the reason of this ExtensionsV1beta1DeploymentCondition.
The reason for the condition's last transition.
:return: The reason of this ExtensionsV1beta1DeploymentCondition.
:rtype: str
"""
return self._reason
@reason.setter
def reason(self, reason):
"""
Sets the reason of this ExtensionsV1beta1DeploymentCondition.
The reason for the condition's last transition.
:param reason: The reason of this ExtensionsV1beta1DeploymentCondition.
:type: str
"""
self._reason = reason
@property
def status(self):
"""
Gets the status of this ExtensionsV1beta1DeploymentCondition.
Status of the condition, one of True, False, Unknown.
:return: The status of this ExtensionsV1beta1DeploymentCondition.
:rtype: str
"""
return self._status
@status.setter
def status(self, status):
"""
Sets the status of this ExtensionsV1beta1DeploymentCondition.
Status of the condition, one of True, False, Unknown.
:param status: The status of this ExtensionsV1beta1DeploymentCondition.
:type: str
"""
if status is None:
raise ValueError("Invalid value for `status`, must not be `None`")
self._status = status
@property
def type(self):
"""
Gets the type of this ExtensionsV1beta1DeploymentCondition.
Type of deployment condition.
:return: The type of this ExtensionsV1beta1DeploymentCondition.
:rtype: str
"""
return self._type
@type.setter
def type(self, type):
"""
Sets the type of this ExtensionsV1beta1DeploymentCondition.
Type of deployment condition.
:param type: The type of this ExtensionsV1beta1DeploymentCondition.
:type: str
"""
if type is None:
raise ValueError("Invalid value for `type`, must not be `None`")
self._type = type
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, ExtensionsV1beta1DeploymentCondition):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
import logging
from django.db.models import Q
from django.http import HttpResponse
from django.utils import timezone
from zentral.contrib.mdm.models import (ArtifactType, ArtifactVersion,
Channel, CommandStatus,
DeviceCommand, UserCommand)
from .account_configuration import AccountConfiguration
from .declarative_management import DeclarativeManagement
from .device_configured import DeviceConfigured
from .install_profile import InstallProfile
from .install_enterprise_application import InstallEnterpriseApplication
from .remove_profile import RemoveProfile
from .base import registered_commands
logger = logging.getLogger("zentral.contrib.mdm.commands.utils")
def get_command(channel, uuid):
if channel == Channel.Device:
db_model_class = DeviceCommand
else:
db_model_class = UserCommand
try:
db_command = (db_model_class.objects.select_related("artifact_version__artifact",
"artifact_version__enterprise_app",
"artifact_version__profile")
.get(uuid=uuid))
except db_model_class.DoesNotExist:
logger.error("Unknown command: %s %s", channel.name, uuid)
return
try:
model_class = registered_commands[db_command.name]
except KeyError:
logger.error("Unknown command model class: %s", db_command.name)
else:
return model_class(channel, db_command)
def load_command(db_command):
try:
model_class = registered_commands[db_command.name]
except KeyError:
raise ValueError(f"Unknown command model class: {db_command.name}")
if isinstance(db_command, DeviceCommand):
return model_class(Channel.Device, db_command)
else:
return model_class(Channel.User, db_command)
# Next command
def _get_next_queued_command(channel, enrollment_session, enrolled_device, enrolled_user):
kwargs = {}
if channel == Channel.Device:
command_model = DeviceCommand
kwargs["enrolled_device"] = enrolled_device
else:
command_model = UserCommand
kwargs["enrolled_user"] = enrolled_user
# TODO reschedule the NotNow commands
queryset = (command_model.objects.select_for_update()
.filter(time__isnull=True)
.filter(Q(not_before__isnull=True) | Q(not_before__lte=timezone.now())))
db_command = queryset.filter(**kwargs).order_by("created_at").first()
if db_command:
command = load_command(db_command)
command.set_time()
return command
def _configure_dep_enrollment_accounts(channel, enrollment_session, enrolled_device, enrolled_user):
if channel != Channel.Device:
return
if not enrolled_device.awaiting_configuration:
return
dep_enrollment = getattr(enrollment_session, "dep_enrollment", None)
if not dep_enrollment:
# should never happen
logger.error("Enrolled device %s AwaintingConfiguration but no DEP enrollment", enrolled_device.udid)
return
if not dep_enrollment.requires_account_configuration():
return
realm_user = enrollment_session.realm_user
if not realm_user:
# should never happen
logger.error("Enrolled device %s AwaintingConfiguration with missing realm user", enrolled_device.udid)
return
if DeviceCommand.objects.filter(name=AccountConfiguration.request_type,
enrolled_device=enrolled_device,
status=CommandStatus.Acknowledged.value).count():
# account configuration already done
return
return AccountConfiguration.create_for_device(enrolled_device)
def _renew_mdm_payload(channel, enrollment_session, enrolled_device, enrolled_user):
if channel != Channel.Device:
return
# TODO implement MDM payload renewal
def _install_artifacts(channel, enrollment_session, enrolled_device, enrolled_user):
if enrolled_device.declarative_management:
return
if channel == Channel.Device:
target = enrolled_device
else:
target = enrolled_user
artifact_version = ArtifactVersion.objects.next_to_install(target)
if artifact_version:
if artifact_version.artifact.type == ArtifactType.Profile.name:
command_class = InstallProfile
elif artifact_version.artifact.type == ArtifactType.EnterpriseApp.name:
command_class = InstallEnterpriseApplication
else:
# should never happen
raise ValueError(f"Cannot install artifact type {artifact_version.artifact.type}")
if channel == Channel.Device:
return command_class.create_for_device(enrolled_device, artifact_version)
else:
return command_class.create_for_user(enrolled_user, artifact_version)
def _remove_artifacts(channel, enrollment_session, enrolled_device, enrolled_user):
if enrolled_device.declarative_management:
return
if channel == Channel.Device:
target = enrolled_device
else:
target = enrolled_user
artifact_version = ArtifactVersion.objects.next_to_remove(target)
if artifact_version:
if artifact_version.artifact.type == ArtifactType.Profile.name:
command_class = RemoveProfile
else:
# should never happen
raise ValueError(f"Cannot remove artifact type {artifact_version.artifact.type}")
if channel == Channel.Device:
return command_class.create_for_device(enrolled_device, artifact_version)
else:
return command_class.create_for_user(enrolled_user, artifact_version)
def _trigger_declarative_management(channel, enrollment_session, enrolled_device, enrolled_user):
if not enrolled_device.declarative_management:
return
if channel != Channel.Device:
return
if (
enrolled_device.blueprint
and enrolled_device.declarations_token != enrolled_device.blueprint.declarations_token
):
return DeclarativeManagement.create_for_device(enrolled_device)
def _finish_dep_enrollment_configuration(channel, enrollment_session, enrolled_device, enrolled_user):
if channel != Channel.Device:
return
if not enrolled_device.awaiting_configuration:
return
return DeviceConfigured.create_for_device(enrolled_device)
def get_next_command_response(channel, enrollment_session, enrolled_device, enrolled_user):
for next_command_func in (_get_next_queued_command,
_configure_dep_enrollment_accounts,
_renew_mdm_payload,
_install_artifacts,
_remove_artifacts,
_trigger_declarative_management,
_finish_dep_enrollment_configuration):
command = next_command_func(channel, enrollment_session, enrolled_device, enrolled_user)
if command:
return command.build_http_response(enrollment_session)
return HttpResponse()
|
import unittest
from .exceptions import *
class ExceptionsTests(unittest.TestCase):
def test_str(self):
for exception, exception_str in [
(
InvalidHandshake("Invalid request"),
"Invalid request",
),
(
AbortHandshake(200, [], b'OK\n'),
"HTTP 200, 0 headers, 3 bytes",
),
(
InvalidMessage("Malformed HTTP message"),
"Malformed HTTP message",
),
(
InvalidHeader("Expected token", "a=|", 3),
"Expected token at 3 in a=|",
),
(
InvalidOrigin("Origin not allowed: ''"),
"Origin not allowed: ''",
),
(
InvalidStatusCode(403),
"Status code not 101: 403",
),
(
NegotiationError("Unsupported subprotocol: spam"),
"Unsupported subprotocol: spam",
),
(
InvalidParameterName('|'),
"Invalid parameter name: |",
),
(
InvalidParameterValue('a', '|'),
"Invalid value for parameter a: |",
),
(
DuplicateParameter('a'),
"Duplicate parameter: a",
),
(
InvalidState("WebSocket connection isn't established yet"),
"WebSocket connection isn't established yet",
),
(
ConnectionClosed(1000, ''),
"WebSocket connection is closed: code = 1000 "
"(OK), no reason",
),
(
ConnectionClosed(1001, 'bye'),
"WebSocket connection is closed: code = 1001 "
"(going away), reason = bye",
),
(
ConnectionClosed(1006, None),
"WebSocket connection is closed: code = 1006 "
"(connection closed abnormally [internal]), no reason"
),
(
ConnectionClosed(1016, None),
"WebSocket connection is closed: code = 1016 "
"(unknown), no reason"
),
(
ConnectionClosed(3000, None),
"WebSocket connection is closed: code = 3000 "
"(registered), no reason"
),
(
ConnectionClosed(4000, None),
"WebSocket connection is closed: code = 4000 "
"(private use), no reason"
),
(
InvalidURI("| isn't a valid URI"),
"| isn't a valid URI",
),
(
PayloadTooBig("Payload length exceeds limit: 2 > 1 bytes"),
"Payload length exceeds limit: 2 > 1 bytes",
),
(
WebSocketProtocolError("Invalid opcode: 7"),
"Invalid opcode: 7",
),
]:
with self.subTest(exception=exception):
self.assertEqual(str(exception), exception_str)
|
class Node:
def __init__(self, info):
self.info = info
self.left = None
self.right = None
self.level = None
def __str__(self):
return str(self.info)
class BinarySearchTree:
def __init__(self):
self.root = None
def create(self, val):
if self.root == None:
self.root = Node(val)
else:
current = self.root
while True:
if val < current.info:
if current.left:
current = current.left
else:
current.left = Node(val)
break
elif val > current.info:
if current.right:
current = current.right
else:
current.right = Node(val)
break
else:
break
# Enter your code here. Read input from STDIN. Print output to STDOUT
'''
class Node:
def __init__(self,info):
self.info = info
self.left = None
self.right = None
// this is a node of the tree , which contains info as data, left , right
'''
def height(root):
if root == None:
return -1
else:
return (1 + max(height(root.left), height(root.right)))
tree = BinarySearchTree()
t = int(input())
arr = list(map(int, input().split()))
for i in range(t):
tree.create(arr[i])
print(height(tree.root))
|
import json
import numpy as np
from scipy import sparse
from nilearn._utils import rename_parameters
from .. import datasets
from . import cm
from .js_plotting_utils import (add_js_lib, mesh_to_plotly,
encode, colorscale, get_html_template,
to_color_strings)
from nilearn.reporting import HTMLDocument
class ConnectomeView(HTMLDocument):
pass
def _prepare_line(edges, nodes):
path_edges = np.zeros(len(edges) * 3, dtype=int)
path_edges[::3] = edges
path_edges[1::3] = edges
path_nodes = np.zeros(len(nodes) * 3, dtype=int)
path_nodes[::3] = nodes[:, 0]
path_nodes[1::3] = nodes[:, 1]
return path_edges, path_nodes
def _get_connectome(adjacency_matrix, coords, threshold=None,
marker_size=None, cmap=cm.cold_hot, symmetric_cmap=True):
connectome = {}
coords = np.asarray(coords, dtype='<f4')
adjacency_matrix = np.nan_to_num(adjacency_matrix, copy=True)
colors = colorscale(
cmap, adjacency_matrix.ravel(), threshold=threshold,
symmetric_cmap=symmetric_cmap)
connectome['colorscale'] = colors['colors']
connectome['cmin'] = float(colors['vmin'])
connectome['cmax'] = float(colors['vmax'])
if threshold is not None:
adjacency_matrix[
np.abs(adjacency_matrix) <= colors['abs_threshold']] = 0
s = sparse.coo_matrix(adjacency_matrix)
nodes = np.asarray([s.row, s.col], dtype=int).T
edges = np.arange(len(nodes))
path_edges, path_nodes = _prepare_line(edges, nodes)
connectome["_con_w"] = encode(np.asarray(s.data, dtype='<f4')[path_edges])
c = coords[path_nodes]
if np.ndim(marker_size) > 0:
marker_size = np.asarray(marker_size)
marker_size = marker_size[path_nodes]
x, y, z = c.T
for coord, cname in [(x, "x"), (y, "y"), (z, "z")]:
connectome["_con_{}".format(cname)] = encode(
np.asarray(coord, dtype='<f4'))
connectome["markers_only"] = False
if hasattr(marker_size, 'tolist'):
marker_size = marker_size.tolist()
connectome['marker_size'] = marker_size
return connectome
def _get_markers(coords, colors):
connectome = {}
coords = np.asarray(coords, dtype='<f4')
x, y, z = coords.T
for coord, cname in [(x, "x"), (y, "y"), (z, "z")]:
connectome["_con_{}".format(cname)] = encode(
np.asarray(coord, dtype='<f4'))
connectome["marker_color"] = to_color_strings(colors)
connectome["markers_only"] = True
return connectome
def _make_connectome_html(connectome_info, embed_js=True):
plot_info = {"connectome": connectome_info}
mesh = datasets.fetch_surf_fsaverage()
for hemi in ['pial_left', 'pial_right']:
plot_info[hemi] = mesh_to_plotly(mesh[hemi])
as_json = json.dumps(plot_info)
as_html = get_html_template(
'connectome_plot_template.html').safe_substitute(
{'INSERT_CONNECTOME_JSON_HERE': as_json,
'INSERT_PAGE_TITLE_HERE': (
connectome_info["title"] or "Connectome plot")})
as_html = add_js_lib(as_html, embed_js=embed_js)
return ConnectomeView(as_html)
def view_connectome(adjacency_matrix, node_coords, edge_threshold=None,
edge_cmap=cm.bwr, symmetric_cmap=True,
linewidth=6., node_size=3., colorbar=True,
colorbar_height=.5, colorbar_fontsize=25,
title=None, title_fontsize=25):
"""
Insert a 3d plot of a connectome into an HTML page.
Parameters
----------
adjacency_matrix : ndarray, shape=(n_nodes, n_nodes)
the weights of the edges.
node_coords : ndarray, shape=(n_nodes, 3)
the coordinates of the nodes in MNI space.
edge_threshold : str, number or None, optional (default=None)
If None, no thresholding.
If it is a number only connections of amplitude greater
than threshold will be shown.
If it is a string it must finish with a percent sign,
e.g. "25.3%", and only connections of amplitude above the
given percentile will be shown.
edge_cmap : str or matplotlib colormap, optional
symmetric_cmap : bool, optional (default=True)
Make colormap symmetric (ranging from -vmax to vmax).
linewidth : float, optional (default=6.)
Width of the lines that show connections.
node_size : float, optional (default=3.)
Size of the markers showing the seeds in pixels.
colorbar : bool, optional (default=True)
add a colorbar
colorbar_height : float, optional (default=.5)
height of the colorbar, relative to the figure height
colorbar_fontsize : int, optional (default=25)
fontsize of the colorbar tick labels
title : str, optional (default=None)
title for the plot
title_fontsize : int, optional (default=25)
fontsize of the title
Returns
-------
ConnectomeView : plot of the connectome.
It can be saved as an html page or rendered (transparently) by the
Jupyter notebook. Useful methods are :
- 'resize' to resize the plot displayed in a Jupyter notebook
- 'save_as_html' to save the plot to a file
- 'open_in_browser' to save the plot and open it in a web browser.
See Also
--------
nilearn.plotting.plot_connectome:
projected views of a connectome in a glass brain.
nilearn.plotting.view_markers:
interactive plot of colored markers
nilearn.plotting.view_surf, nilearn.plotting.view_img_on_surf:
interactive view of statistical maps or surface atlases on the cortical
surface.
"""
connectome_info = _get_connectome(
adjacency_matrix, node_coords,
threshold=edge_threshold, cmap=edge_cmap,
symmetric_cmap=symmetric_cmap, marker_size=node_size)
connectome_info['line_width'] = linewidth
connectome_info['colorbar'] = colorbar
connectome_info['cbar_height'] = colorbar_height
connectome_info['cbar_fontsize'] = colorbar_fontsize
connectome_info['title'] = title
connectome_info['title_fontsize'] = title_fontsize
return _make_connectome_html(connectome_info)
def view_markers(marker_coords, marker_color=None, marker_size=5.,
title=None, title_fontsize=25):
"""
Insert a 3d plot of markers in a brain into an HTML page.
Parameters
----------
marker_coords : ndarray, shape=(n_nodes, 3)
the coordinates of the nodes in MNI space.
marker_color : ndarray, shape=(n_nodes,)
colors of the markers: list of strings, hex rgb or rgba strings, rgb
triplets, or rgba triplets (i.e. formats accepted by matplotlib, see
https://matplotlib.org/users/colors.html#specifying-colors)
marker_size : float or array-like, optional (default=3.)
Size of the markers showing the seeds in pixels.
title : str, optional (default=None)
title for the plot
title_fontsize : int, optional (default=25)
fontsize of the title
Returns
-------
ConnectomeView : plot of the markers.
It can be saved as an html page or rendered (transparently) by the
Jupyter notebook. Useful methods are :
- 'resize' to resize the plot displayed in a Jupyter notebook
- 'save_as_html' to save the plot to a file
- 'open_in_browser' to save the plot and open it in a web browser.
See Also
--------
nilearn.plotting.plot_connectome:
projected views of a connectome in a glass brain.
nilearn.plotting.view_connectome:
interactive plot of a connectome.
nilearn.plotting.view_surf, nilearn.plotting.view_img_on_surf:
interactive view of statistical maps or surface atlases on the cortical
surface.
"""
if marker_color is None:
marker_color = ['red' for i in range(len(marker_coords))]
connectome_info = _get_markers(marker_coords, marker_color)
if hasattr(marker_size, 'tolist'):
marker_size = marker_size.tolist()
connectome_info["marker_size"] = marker_size
connectome_info['title'] = title
connectome_info['title_fontsize'] = title_fontsize
return _make_connectome_html(connectome_info)
|
"""
numpy and scipy based backend.
Transparently handles scipy.sparse matrices as input.
"""
from __future__ import division, absolute_import
import numpy as np
import scipy.sparse
import scipy.sparse.linalg
import scipy.linalg
def inv(matrix):
"""
Calculate the inverse of a matrix.
Uses the standard ``numpy.linalg.inv`` if *matrix* is dense. If it is
sparse (from ``scipy.sparse``) then will use ``scipy.sparse.linalg.inv``.
"""
if scipy.sparse.issparse(matrix):
return scipy.sparse.linalg.inv(matrix)
else:
return scipy.linalg.inv(matrix)
def solve(matrix, vector):
"""
Solve a linear system.
"""
if scipy.sparse.issparse(matrix) or scipy.sparse.issparse(vector):
estimate, status = scipy.sparse.linalg.cg(matrix, vector)
if status >= 0:
return estimate
else:
raise ValueError('CGS exited with input error')
else:
return scipy.linalg.solve(matrix, vector)
def dot(a, b):
"""
Make the dot product using the appropriate method.
"""
return a.dot(b)
def diagonal(matrix):
"""
Get the diagonal of a matrix using the appropriate method.
"""
if scipy.sparse.issparse(matrix):
return np.array(matrix.diagonal())
else:
return np.diagonal(matrix)
|
import os
import subprocess
from avatar2 import *
filename = 'a.out'
GDB_PORT = 1234
# This is a bare minimum elf-file, gracefully compiled from
# https://github.com/abraithwaite/teensy
tiny_elf = (b'\x7f\x45\x4c\x46\x02\x01\x01\x00\xb3\x2a\x31\xc0\xff\xc0\xcd\x80'
b'\x02\x00\x3e\x00\x01\x00\x00\x00\x08\x00\x40\x00\x00\x00\x00\x00'
b'\x40\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x40\x00\x38\x00\x01\x00\x00\x00\x00\x00\x00\x00'
b'\x01\x00\x00\x00\x05\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x40\x00\x00\x00\x00\x00\x00\x00\x40\x00\x00\x00\x00\x00'
b'\x78\x00\x00\x00\x00\x00\x00\x00\x78\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x20\x00\x00\x00\x00\x00')
# Hello world shellcode
shellcode = (b'\x68\x72\x6c\x64\x21\x48\xb8\x48\x65\x6c\x6c\x6f\x20\x57\x6f\x50'
b'\x48\x89\xef\x48\x89\xe6\x6a\x0c\x5a\x6a\x01\x58\x0f\x05')
# Save our executable to disk
with open(filename, 'wb') as f:
f.write(tiny_elf)
os.chmod(filename, 0o744)
# Create the avatar instance and specify the architecture for this analysis
avatar = Avatar(arch=archs.x86.X86_64)
# Create the endpoint: a gdbserver connected to our tiny ELF file
gdbserver = subprocess.Popen('gdbserver --once 127.0.0.1:%d a.out' % GDB_PORT, shell=True)
# Create the corresponding target, using the GDBTarget backend
target = avatar.add_target(GDBTarget, gdb_port=GDB_PORT)
# Initialize the target.
# This usually connects the target to the endpoint
target.init()
# Now it is possible to interact with the target.
# For example, we can insert our shellcode at the current point of execution
target.write_memory(target.read_register('pc'), len(shellcode),
shellcode, raw=True)
# We can now resume the execution in our target
# You should see hello world printed on your screen! :)
target.cont()
# Clean up!
os.remove(filename)
avatar.shutdown()
|
import collections.abc
import io
import os
import sys
import errno
import pathlib
import pickle
import socket
import stat
import tempfile
import unittest
from unittest import mock
from test import support
from test.support import TESTFN, FakePath
try:
import grp, pwd
except ImportError:
grp = pwd = None
class _BaseFlavourTest(object):
def _check_parse_parts(self, arg, expected):
f = self.flavour.parse_parts
sep = self.flavour.sep
altsep = self.flavour.altsep
actual = f([x.replace('/', sep) for x in arg])
self.assertEqual(actual, expected)
if altsep:
actual = f([x.replace('/', altsep) for x in arg])
self.assertEqual(actual, expected)
def test_parse_parts_common(self):
check = self._check_parse_parts
sep = self.flavour.sep
# Unanchored parts.
check([], ('', '', []))
check(['a'], ('', '', ['a']))
check(['a/'], ('', '', ['a']))
check(['a', 'b'], ('', '', ['a', 'b']))
# Expansion.
check(['a/b'], ('', '', ['a', 'b']))
check(['a/b/'], ('', '', ['a', 'b']))
check(['a', 'b/c', 'd'], ('', '', ['a', 'b', 'c', 'd']))
# Collapsing and stripping excess slashes.
check(['a', 'b//c', 'd'], ('', '', ['a', 'b', 'c', 'd']))
check(['a', 'b/c/', 'd'], ('', '', ['a', 'b', 'c', 'd']))
# Eliminating standalone dots.
check(['.'], ('', '', []))
check(['.', '.', 'b'], ('', '', ['b']))
check(['a', '.', 'b'], ('', '', ['a', 'b']))
check(['a', '.', '.'], ('', '', ['a']))
# The first part is anchored.
check(['/a/b'], ('', sep, [sep, 'a', 'b']))
check(['/a', 'b'], ('', sep, [sep, 'a', 'b']))
check(['/a/', 'b'], ('', sep, [sep, 'a', 'b']))
# Ignoring parts before an anchored part.
check(['a', '/b', 'c'], ('', sep, [sep, 'b', 'c']))
check(['a', '/b', '/c'], ('', sep, [sep, 'c']))
class PosixFlavourTest(_BaseFlavourTest, unittest.TestCase):
flavour = pathlib._posix_flavour
def test_parse_parts(self):
check = self._check_parse_parts
# Collapsing of excess leading slashes, except for the double-slash
# special case.
check(['//a', 'b'], ('', '//', ['//', 'a', 'b']))
check(['///a', 'b'], ('', '/', ['/', 'a', 'b']))
check(['////a', 'b'], ('', '/', ['/', 'a', 'b']))
# Paths which look like NT paths aren't treated specially.
check(['c:a'], ('', '', ['c:a']))
check(['c:\\a'], ('', '', ['c:\\a']))
check(['\\a'], ('', '', ['\\a']))
def test_splitroot(self):
f = self.flavour.splitroot
self.assertEqual(f(''), ('', '', ''))
self.assertEqual(f('a'), ('', '', 'a'))
self.assertEqual(f('a/b'), ('', '', 'a/b'))
self.assertEqual(f('a/b/'), ('', '', 'a/b/'))
self.assertEqual(f('/a'), ('', '/', 'a'))
self.assertEqual(f('/a/b'), ('', '/', 'a/b'))
self.assertEqual(f('/a/b/'), ('', '/', 'a/b/'))
# The root is collapsed when there are redundant slashes
# except when there are exactly two leading slashes, which
# is a special case in POSIX.
self.assertEqual(f('//a'), ('', '//', 'a'))
self.assertEqual(f('///a'), ('', '/', 'a'))
self.assertEqual(f('///a/b'), ('', '/', 'a/b'))
# Paths which look like NT paths aren't treated specially.
self.assertEqual(f('c:/a/b'), ('', '', 'c:/a/b'))
self.assertEqual(f('\\/a/b'), ('', '', '\\/a/b'))
self.assertEqual(f('\\a\\b'), ('', '', '\\a\\b'))
class NTFlavourTest(_BaseFlavourTest, unittest.TestCase):
flavour = pathlib._windows_flavour
def test_parse_parts(self):
check = self._check_parse_parts
# First part is anchored.
check(['c:'], ('c:', '', ['c:']))
check(['c:/'], ('c:', '\\', ['c:\\']))
check(['/'], ('', '\\', ['\\']))
check(['c:a'], ('c:', '', ['c:', 'a']))
check(['c:/a'], ('c:', '\\', ['c:\\', 'a']))
check(['/a'], ('', '\\', ['\\', 'a']))
# UNC paths.
check(['//a/b'], ('\\\\a\\b', '\\', ['\\\\a\\b\\']))
check(['//a/b/'], ('\\\\a\\b', '\\', ['\\\\a\\b\\']))
check(['//a/b/c'], ('\\\\a\\b', '\\', ['\\\\a\\b\\', 'c']))
# Second part is anchored, so that the first part is ignored.
check(['a', 'Z:b', 'c'], ('Z:', '', ['Z:', 'b', 'c']))
check(['a', 'Z:/b', 'c'], ('Z:', '\\', ['Z:\\', 'b', 'c']))
# UNC paths.
check(['a', '//b/c', 'd'], ('\\\\b\\c', '\\', ['\\\\b\\c\\', 'd']))
# Collapsing and stripping excess slashes.
check(['a', 'Z://b//c/', 'd/'], ('Z:', '\\', ['Z:\\', 'b', 'c', 'd']))
# UNC paths.
check(['a', '//b/c//', 'd'], ('\\\\b\\c', '\\', ['\\\\b\\c\\', 'd']))
# Extended paths.
check(['//?/c:/'], ('\\\\?\\c:', '\\', ['\\\\?\\c:\\']))
check(['//?/c:/a'], ('\\\\?\\c:', '\\', ['\\\\?\\c:\\', 'a']))
check(['//?/c:/a', '/b'], ('\\\\?\\c:', '\\', ['\\\\?\\c:\\', 'b']))
# Extended UNC paths (format is "\\?\UNC\server\share").
check(['//?/UNC/b/c'], ('\\\\?\\UNC\\b\\c', '\\', ['\\\\?\\UNC\\b\\c\\']))
check(['//?/UNC/b/c/d'], ('\\\\?\\UNC\\b\\c', '\\', ['\\\\?\\UNC\\b\\c\\', 'd']))
# Second part has a root but not drive.
check(['a', '/b', 'c'], ('', '\\', ['\\', 'b', 'c']))
check(['Z:/a', '/b', 'c'], ('Z:', '\\', ['Z:\\', 'b', 'c']))
check(['//?/Z:/a', '/b', 'c'], ('\\\\?\\Z:', '\\', ['\\\\?\\Z:\\', 'b', 'c']))
def test_splitroot(self):
f = self.flavour.splitroot
self.assertEqual(f(''), ('', '', ''))
self.assertEqual(f('a'), ('', '', 'a'))
self.assertEqual(f('a\\b'), ('', '', 'a\\b'))
self.assertEqual(f('\\a'), ('', '\\', 'a'))
self.assertEqual(f('\\a\\b'), ('', '\\', 'a\\b'))
self.assertEqual(f('c:a\\b'), ('c:', '', 'a\\b'))
self.assertEqual(f('c:\\a\\b'), ('c:', '\\', 'a\\b'))
# Redundant slashes in the root are collapsed.
self.assertEqual(f('\\\\a'), ('', '\\', 'a'))
self.assertEqual(f('\\\\\\a/b'), ('', '\\', 'a/b'))
self.assertEqual(f('c:\\\\a'), ('c:', '\\', 'a'))
self.assertEqual(f('c:\\\\\\a/b'), ('c:', '\\', 'a/b'))
# Valid UNC paths.
self.assertEqual(f('\\\\a\\b'), ('\\\\a\\b', '\\', ''))
self.assertEqual(f('\\\\a\\b\\'), ('\\\\a\\b', '\\', ''))
self.assertEqual(f('\\\\a\\b\\c\\d'), ('\\\\a\\b', '\\', 'c\\d'))
# These are non-UNC paths (according to ntpath.py and test_ntpath).
# However, command.com says such paths are invalid, so it's
# difficult to know what the right semantics are.
self.assertEqual(f('\\\\\\a\\b'), ('', '\\', 'a\\b'))
self.assertEqual(f('\\\\a'), ('', '\\', 'a'))
#
# Tests for the pure classes.
#
class _BasePurePathTest(object):
# Keys are canonical paths, values are list of tuples of arguments
# supposed to produce equal paths.
equivalences = {
'a/b': [
('a', 'b'), ('a/', 'b'), ('a', 'b/'), ('a/', 'b/'),
('a/b/',), ('a//b',), ('a//b//',),
# Empty components get removed.
('', 'a', 'b'), ('a', '', 'b'), ('a', 'b', ''),
],
'/b/c/d': [
('a', '/b/c', 'd'), ('a', '///b//c', 'd/'),
('/a', '/b/c', 'd'),
# Empty components get removed.
('/', 'b', '', 'c/d'), ('/', '', 'b/c/d'), ('', '/b/c/d'),
],
}
def setUp(self):
p = self.cls('a')
self.flavour = p._flavour
self.sep = self.flavour.sep
self.altsep = self.flavour.altsep
def test_constructor_common(self):
P = self.cls
p = P('a')
self.assertIsInstance(p, P)
P('a', 'b', 'c')
P('/a', 'b', 'c')
P('a/b/c')
P('/a/b/c')
P(FakePath("a/b/c"))
self.assertEqual(P(P('a')), P('a'))
self.assertEqual(P(P('a'), 'b'), P('a/b'))
self.assertEqual(P(P('a'), P('b')), P('a/b'))
self.assertEqual(P(P('a'), P('b'), P('c')), P(FakePath("a/b/c")))
def _check_str_subclass(self, *args):
# Issue #21127: it should be possible to construct a PurePath object
# from a str subclass instance, and it then gets converted to
# a pure str object.
class StrSubclass(str):
pass
P = self.cls
p = P(*(StrSubclass(x) for x in args))
self.assertEqual(p, P(*args))
for part in p.parts:
self.assertIs(type(part), str)
def test_str_subclass_common(self):
self._check_str_subclass('')
self._check_str_subclass('.')
self._check_str_subclass('a')
self._check_str_subclass('a/b.txt')
self._check_str_subclass('/a/b.txt')
def test_join_common(self):
P = self.cls
p = P('a/b')
pp = p.joinpath('c')
self.assertEqual(pp, P('a/b/c'))
self.assertIs(type(pp), type(p))
pp = p.joinpath('c', 'd')
self.assertEqual(pp, P('a/b/c/d'))
pp = p.joinpath(P('c'))
self.assertEqual(pp, P('a/b/c'))
pp = p.joinpath('/c')
self.assertEqual(pp, P('/c'))
def test_div_common(self):
# Basically the same as joinpath().
P = self.cls
p = P('a/b')
pp = p / 'c'
self.assertEqual(pp, P('a/b/c'))
self.assertIs(type(pp), type(p))
pp = p / 'c/d'
self.assertEqual(pp, P('a/b/c/d'))
pp = p / 'c' / 'd'
self.assertEqual(pp, P('a/b/c/d'))
pp = 'c' / p / 'd'
self.assertEqual(pp, P('c/a/b/d'))
pp = p / P('c')
self.assertEqual(pp, P('a/b/c'))
pp = p/ '/c'
self.assertEqual(pp, P('/c'))
def _check_str(self, expected, args):
p = self.cls(*args)
self.assertEqual(str(p), expected.replace('/', self.sep))
def test_str_common(self):
# Canonicalized paths roundtrip.
for pathstr in ('a', 'a/b', 'a/b/c', '/', '/a/b', '/a/b/c'):
self._check_str(pathstr, (pathstr,))
# Special case for the empty path.
self._check_str('.', ('',))
# Other tests for str() are in test_equivalences().
def test_as_posix_common(self):
P = self.cls
for pathstr in ('a', 'a/b', 'a/b/c', '/', '/a/b', '/a/b/c'):
self.assertEqual(P(pathstr).as_posix(), pathstr)
# Other tests for as_posix() are in test_equivalences().
def test_as_bytes_common(self):
sep = os.fsencode(self.sep)
P = self.cls
self.assertEqual(bytes(P('a/b')), b'a' + sep + b'b')
def test_as_uri_common(self):
P = self.cls
with self.assertRaises(ValueError):
P('a').as_uri()
with self.assertRaises(ValueError):
P().as_uri()
def test_repr_common(self):
for pathstr in ('a', 'a/b', 'a/b/c', '/', '/a/b', '/a/b/c'):
p = self.cls(pathstr)
clsname = p.__class__.__name__
r = repr(p)
# The repr() is in the form ClassName("forward-slashes path").
self.assertTrue(r.startswith(clsname + '('), r)
self.assertTrue(r.endswith(')'), r)
inner = r[len(clsname) + 1 : -1]
self.assertEqual(eval(inner), p.as_posix())
# The repr() roundtrips.
q = eval(r, pathlib.__dict__)
self.assertIs(q.__class__, p.__class__)
self.assertEqual(q, p)
self.assertEqual(repr(q), r)
def test_eq_common(self):
P = self.cls
self.assertEqual(P('a/b'), P('a/b'))
self.assertEqual(P('a/b'), P('a', 'b'))
self.assertNotEqual(P('a/b'), P('a'))
self.assertNotEqual(P('a/b'), P('/a/b'))
self.assertNotEqual(P('a/b'), P())
self.assertNotEqual(P('/a/b'), P('/'))
self.assertNotEqual(P(), P('/'))
self.assertNotEqual(P(), "")
self.assertNotEqual(P(), {})
self.assertNotEqual(P(), int)
def test_match_common(self):
P = self.cls
self.assertRaises(ValueError, P('a').match, '')
self.assertRaises(ValueError, P('a').match, '.')
# Simple relative pattern.
self.assertTrue(P('b.py').match('b.py'))
self.assertTrue(P('a/b.py').match('b.py'))
self.assertTrue(P('/a/b.py').match('b.py'))
self.assertFalse(P('a.py').match('b.py'))
self.assertFalse(P('b/py').match('b.py'))
self.assertFalse(P('/a.py').match('b.py'))
self.assertFalse(P('b.py/c').match('b.py'))
# Wilcard relative pattern.
self.assertTrue(P('b.py').match('*.py'))
self.assertTrue(P('a/b.py').match('*.py'))
self.assertTrue(P('/a/b.py').match('*.py'))
self.assertFalse(P('b.pyc').match('*.py'))
self.assertFalse(P('b./py').match('*.py'))
self.assertFalse(P('b.py/c').match('*.py'))
# Multi-part relative pattern.
self.assertTrue(P('ab/c.py').match('a*/*.py'))
self.assertTrue(P('/d/ab/c.py').match('a*/*.py'))
self.assertFalse(P('a.py').match('a*/*.py'))
self.assertFalse(P('/dab/c.py').match('a*/*.py'))
self.assertFalse(P('ab/c.py/d').match('a*/*.py'))
# Absolute pattern.
self.assertTrue(P('/b.py').match('/*.py'))
self.assertFalse(P('b.py').match('/*.py'))
self.assertFalse(P('a/b.py').match('/*.py'))
self.assertFalse(P('/a/b.py').match('/*.py'))
# Multi-part absolute pattern.
self.assertTrue(P('/a/b.py').match('/a/*.py'))
self.assertFalse(P('/ab.py').match('/a/*.py'))
self.assertFalse(P('/a/b/c.py').match('/a/*.py'))
# Multi-part glob-style pattern.
self.assertFalse(P('/a/b/c.py').match('/**/*.py'))
self.assertTrue(P('/a/b/c.py').match('/a/**/*.py'))
def test_ordering_common(self):
# Ordering is tuple-alike.
def assertLess(a, b):
self.assertLess(a, b)
self.assertGreater(b, a)
P = self.cls
a = P('a')
b = P('a/b')
c = P('abc')
d = P('b')
assertLess(a, b)
assertLess(a, c)
assertLess(a, d)
assertLess(b, c)
assertLess(c, d)
P = self.cls
a = P('/a')
b = P('/a/b')
c = P('/abc')
d = P('/b')
assertLess(a, b)
assertLess(a, c)
assertLess(a, d)
assertLess(b, c)
assertLess(c, d)
with self.assertRaises(TypeError):
P() < {}
def test_parts_common(self):
# `parts` returns a tuple.
sep = self.sep
P = self.cls
p = P('a/b')
parts = p.parts
self.assertEqual(parts, ('a', 'b'))
# The object gets reused.
self.assertIs(parts, p.parts)
# When the path is absolute, the anchor is a separate part.
p = P('/a/b')
parts = p.parts
self.assertEqual(parts, (sep, 'a', 'b'))
def test_fspath_common(self):
P = self.cls
p = P('a/b')
self._check_str(p.__fspath__(), ('a/b',))
self._check_str(os.fspath(p), ('a/b',))
def test_equivalences(self):
for k, tuples in self.equivalences.items():
canon = k.replace('/', self.sep)
posix = k.replace(self.sep, '/')
if canon != posix:
tuples = tuples + [
tuple(part.replace('/', self.sep) for part in t)
for t in tuples
]
tuples.append((posix, ))
pcanon = self.cls(canon)
for t in tuples:
p = self.cls(*t)
self.assertEqual(p, pcanon, "failed with args {}".format(t))
self.assertEqual(hash(p), hash(pcanon))
self.assertEqual(str(p), canon)
self.assertEqual(p.as_posix(), posix)
def test_parent_common(self):
# Relative
P = self.cls
p = P('a/b/c')
self.assertEqual(p.parent, P('a/b'))
self.assertEqual(p.parent.parent, P('a'))
self.assertEqual(p.parent.parent.parent, P())
self.assertEqual(p.parent.parent.parent.parent, P())
# Anchored
p = P('/a/b/c')
self.assertEqual(p.parent, P('/a/b'))
self.assertEqual(p.parent.parent, P('/a'))
self.assertEqual(p.parent.parent.parent, P('/'))
self.assertEqual(p.parent.parent.parent.parent, P('/'))
def test_parents_common(self):
# Relative
P = self.cls
p = P('a/b/c')
par = p.parents
self.assertEqual(len(par), 3)
self.assertEqual(par[0], P('a/b'))
self.assertEqual(par[1], P('a'))
self.assertEqual(par[2], P('.'))
self.assertEqual(list(par), [P('a/b'), P('a'), P('.')])
with self.assertRaises(IndexError):
par[-1]
with self.assertRaises(IndexError):
par[3]
with self.assertRaises(TypeError):
par[0] = p
# Anchored
p = P('/a/b/c')
par = p.parents
self.assertEqual(len(par), 3)
self.assertEqual(par[0], P('/a/b'))
self.assertEqual(par[1], P('/a'))
self.assertEqual(par[2], P('/'))
self.assertEqual(list(par), [P('/a/b'), P('/a'), P('/')])
with self.assertRaises(IndexError):
par[3]
def test_drive_common(self):
P = self.cls
self.assertEqual(P('a/b').drive, '')
self.assertEqual(P('/a/b').drive, '')
self.assertEqual(P('').drive, '')
def test_root_common(self):
P = self.cls
sep = self.sep
self.assertEqual(P('').root, '')
self.assertEqual(P('a/b').root, '')
self.assertEqual(P('/').root, sep)
self.assertEqual(P('/a/b').root, sep)
def test_anchor_common(self):
P = self.cls
sep = self.sep
self.assertEqual(P('').anchor, '')
self.assertEqual(P('a/b').anchor, '')
self.assertEqual(P('/').anchor, sep)
self.assertEqual(P('/a/b').anchor, sep)
def test_name_common(self):
P = self.cls
self.assertEqual(P('').name, '')
self.assertEqual(P('.').name, '')
self.assertEqual(P('/').name, '')
self.assertEqual(P('a/b').name, 'b')
self.assertEqual(P('/a/b').name, 'b')
self.assertEqual(P('/a/b/.').name, 'b')
self.assertEqual(P('a/b.py').name, 'b.py')
self.assertEqual(P('/a/b.py').name, 'b.py')
def test_suffix_common(self):
P = self.cls
self.assertEqual(P('').suffix, '')
self.assertEqual(P('.').suffix, '')
self.assertEqual(P('..').suffix, '')
self.assertEqual(P('/').suffix, '')
self.assertEqual(P('a/b').suffix, '')
self.assertEqual(P('/a/b').suffix, '')
self.assertEqual(P('/a/b/.').suffix, '')
self.assertEqual(P('a/b.py').suffix, '.py')
self.assertEqual(P('/a/b.py').suffix, '.py')
self.assertEqual(P('a/.hgrc').suffix, '')
self.assertEqual(P('/a/.hgrc').suffix, '')
self.assertEqual(P('a/.hg.rc').suffix, '.rc')
self.assertEqual(P('/a/.hg.rc').suffix, '.rc')
self.assertEqual(P('a/b.tar.gz').suffix, '.gz')
self.assertEqual(P('/a/b.tar.gz').suffix, '.gz')
self.assertEqual(P('a/Some name. Ending with a dot.').suffix, '')
self.assertEqual(P('/a/Some name. Ending with a dot.').suffix, '')
def test_suffixes_common(self):
P = self.cls
self.assertEqual(P('').suffixes, [])
self.assertEqual(P('.').suffixes, [])
self.assertEqual(P('/').suffixes, [])
self.assertEqual(P('a/b').suffixes, [])
self.assertEqual(P('/a/b').suffixes, [])
self.assertEqual(P('/a/b/.').suffixes, [])
self.assertEqual(P('a/b.py').suffixes, ['.py'])
self.assertEqual(P('/a/b.py').suffixes, ['.py'])
self.assertEqual(P('a/.hgrc').suffixes, [])
self.assertEqual(P('/a/.hgrc').suffixes, [])
self.assertEqual(P('a/.hg.rc').suffixes, ['.rc'])
self.assertEqual(P('/a/.hg.rc').suffixes, ['.rc'])
self.assertEqual(P('a/b.tar.gz').suffixes, ['.tar', '.gz'])
self.assertEqual(P('/a/b.tar.gz').suffixes, ['.tar', '.gz'])
self.assertEqual(P('a/Some name. Ending with a dot.').suffixes, [])
self.assertEqual(P('/a/Some name. Ending with a dot.').suffixes, [])
def test_stem_common(self):
P = self.cls
self.assertEqual(P('').stem, '')
self.assertEqual(P('.').stem, '')
self.assertEqual(P('..').stem, '..')
self.assertEqual(P('/').stem, '')
self.assertEqual(P('a/b').stem, 'b')
self.assertEqual(P('a/b.py').stem, 'b')
self.assertEqual(P('a/.hgrc').stem, '.hgrc')
self.assertEqual(P('a/.hg.rc').stem, '.hg')
self.assertEqual(P('a/b.tar.gz').stem, 'b.tar')
self.assertEqual(P('a/Some name. Ending with a dot.').stem,
'Some name. Ending with a dot.')
def test_with_name_common(self):
P = self.cls
self.assertEqual(P('a/b').with_name('d.xml'), P('a/d.xml'))
self.assertEqual(P('/a/b').with_name('d.xml'), P('/a/d.xml'))
self.assertEqual(P('a/b.py').with_name('d.xml'), P('a/d.xml'))
self.assertEqual(P('/a/b.py').with_name('d.xml'), P('/a/d.xml'))
self.assertEqual(P('a/Dot ending.').with_name('d.xml'), P('a/d.xml'))
self.assertEqual(P('/a/Dot ending.').with_name('d.xml'), P('/a/d.xml'))
self.assertRaises(ValueError, P('').with_name, 'd.xml')
self.assertRaises(ValueError, P('.').with_name, 'd.xml')
self.assertRaises(ValueError, P('/').with_name, 'd.xml')
self.assertRaises(ValueError, P('a/b').with_name, '')
self.assertRaises(ValueError, P('a/b').with_name, '/c')
self.assertRaises(ValueError, P('a/b').with_name, 'c/')
self.assertRaises(ValueError, P('a/b').with_name, 'c/d')
def test_with_stem_common(self):
P = self.cls
self.assertEqual(P('a/b').with_stem('d'), P('a/d'))
self.assertEqual(P('/a/b').with_stem('d'), P('/a/d'))
self.assertEqual(P('a/b.py').with_stem('d'), P('a/d.py'))
self.assertEqual(P('/a/b.py').with_stem('d'), P('/a/d.py'))
self.assertEqual(P('/a/b.tar.gz').with_stem('d'), P('/a/d.gz'))
self.assertEqual(P('a/Dot ending.').with_stem('d'), P('a/d'))
self.assertEqual(P('/a/Dot ending.').with_stem('d'), P('/a/d'))
self.assertRaises(ValueError, P('').with_stem, 'd')
self.assertRaises(ValueError, P('.').with_stem, 'd')
self.assertRaises(ValueError, P('/').with_stem, 'd')
self.assertRaises(ValueError, P('a/b').with_stem, '')
self.assertRaises(ValueError, P('a/b').with_stem, '/c')
self.assertRaises(ValueError, P('a/b').with_stem, 'c/')
self.assertRaises(ValueError, P('a/b').with_stem, 'c/d')
def test_with_suffix_common(self):
P = self.cls
self.assertEqual(P('a/b').with_suffix('.gz'), P('a/b.gz'))
self.assertEqual(P('/a/b').with_suffix('.gz'), P('/a/b.gz'))
self.assertEqual(P('a/b.py').with_suffix('.gz'), P('a/b.gz'))
self.assertEqual(P('/a/b.py').with_suffix('.gz'), P('/a/b.gz'))
# Stripping suffix.
self.assertEqual(P('a/b.py').with_suffix(''), P('a/b'))
self.assertEqual(P('/a/b').with_suffix(''), P('/a/b'))
# Path doesn't have a "filename" component.
self.assertRaises(ValueError, P('').with_suffix, '.gz')
self.assertRaises(ValueError, P('.').with_suffix, '.gz')
self.assertRaises(ValueError, P('/').with_suffix, '.gz')
# Invalid suffix.
self.assertRaises(ValueError, P('a/b').with_suffix, 'gz')
self.assertRaises(ValueError, P('a/b').with_suffix, '/')
self.assertRaises(ValueError, P('a/b').with_suffix, '.')
self.assertRaises(ValueError, P('a/b').with_suffix, '/.gz')
self.assertRaises(ValueError, P('a/b').with_suffix, 'c/d')
self.assertRaises(ValueError, P('a/b').with_suffix, '.c/.d')
self.assertRaises(ValueError, P('a/b').with_suffix, './.d')
self.assertRaises(ValueError, P('a/b').with_suffix, '.d/.')
self.assertRaises(ValueError, P('a/b').with_suffix,
(self.flavour.sep, 'd'))
def test_relative_to_common(self):
P = self.cls
p = P('a/b')
self.assertRaises(TypeError, p.relative_to)
self.assertRaises(TypeError, p.relative_to, b'a')
self.assertEqual(p.relative_to(P()), P('a/b'))
self.assertEqual(p.relative_to(''), P('a/b'))
self.assertEqual(p.relative_to(P('a')), P('b'))
self.assertEqual(p.relative_to('a'), P('b'))
self.assertEqual(p.relative_to('a/'), P('b'))
self.assertEqual(p.relative_to(P('a/b')), P())
self.assertEqual(p.relative_to('a/b'), P())
# With several args.
self.assertEqual(p.relative_to('a', 'b'), P())
# Unrelated paths.
self.assertRaises(ValueError, p.relative_to, P('c'))
self.assertRaises(ValueError, p.relative_to, P('a/b/c'))
self.assertRaises(ValueError, p.relative_to, P('a/c'))
self.assertRaises(ValueError, p.relative_to, P('/a'))
p = P('/a/b')
self.assertEqual(p.relative_to(P('/')), P('a/b'))
self.assertEqual(p.relative_to('/'), P('a/b'))
self.assertEqual(p.relative_to(P('/a')), P('b'))
self.assertEqual(p.relative_to('/a'), P('b'))
self.assertEqual(p.relative_to('/a/'), P('b'))
self.assertEqual(p.relative_to(P('/a/b')), P())
self.assertEqual(p.relative_to('/a/b'), P())
# Unrelated paths.
self.assertRaises(ValueError, p.relative_to, P('/c'))
self.assertRaises(ValueError, p.relative_to, P('/a/b/c'))
self.assertRaises(ValueError, p.relative_to, P('/a/c'))
self.assertRaises(ValueError, p.relative_to, P())
self.assertRaises(ValueError, p.relative_to, '')
self.assertRaises(ValueError, p.relative_to, P('a'))
def test_is_relative_to_common(self):
P = self.cls
p = P('a/b')
self.assertRaises(TypeError, p.is_relative_to)
self.assertRaises(TypeError, p.is_relative_to, b'a')
self.assertTrue(p.is_relative_to(P()))
self.assertTrue(p.is_relative_to(''))
self.assertTrue(p.is_relative_to(P('a')))
self.assertTrue(p.is_relative_to('a/'))
self.assertTrue(p.is_relative_to(P('a/b')))
self.assertTrue(p.is_relative_to('a/b'))
# With several args.
self.assertTrue(p.is_relative_to('a', 'b'))
# Unrelated paths.
self.assertFalse(p.is_relative_to(P('c')))
self.assertFalse(p.is_relative_to(P('a/b/c')))
self.assertFalse(p.is_relative_to(P('a/c')))
self.assertFalse(p.is_relative_to(P('/a')))
p = P('/a/b')
self.assertTrue(p.is_relative_to(P('/')))
self.assertTrue(p.is_relative_to('/'))
self.assertTrue(p.is_relative_to(P('/a')))
self.assertTrue(p.is_relative_to('/a'))
self.assertTrue(p.is_relative_to('/a/'))
self.assertTrue(p.is_relative_to(P('/a/b')))
self.assertTrue(p.is_relative_to('/a/b'))
# Unrelated paths.
self.assertFalse(p.is_relative_to(P('/c')))
self.assertFalse(p.is_relative_to(P('/a/b/c')))
self.assertFalse(p.is_relative_to(P('/a/c')))
self.assertFalse(p.is_relative_to(P()))
self.assertFalse(p.is_relative_to(''))
self.assertFalse(p.is_relative_to(P('a')))
def test_pickling_common(self):
P = self.cls
p = P('/a/b')
for proto in range(0, pickle.HIGHEST_PROTOCOL + 1):
dumped = pickle.dumps(p, proto)
pp = pickle.loads(dumped)
self.assertIs(pp.__class__, p.__class__)
self.assertEqual(pp, p)
self.assertEqual(hash(pp), hash(p))
self.assertEqual(str(pp), str(p))
class PurePosixPathTest(_BasePurePathTest, unittest.TestCase):
cls = pathlib.PurePosixPath
def test_root(self):
P = self.cls
self.assertEqual(P('/a/b').root, '/')
self.assertEqual(P('///a/b').root, '/')
# POSIX special case for two leading slashes.
self.assertEqual(P('//a/b').root, '//')
def test_eq(self):
P = self.cls
self.assertNotEqual(P('a/b'), P('A/b'))
self.assertEqual(P('/a'), P('///a'))
self.assertNotEqual(P('/a'), P('//a'))
def test_as_uri(self):
P = self.cls
self.assertEqual(P('/').as_uri(), 'file:///')
self.assertEqual(P('/a/b.c').as_uri(), 'file:///a/b.c')
self.assertEqual(P('/a/b%#c').as_uri(), 'file:///a/b%25%23c')
def test_as_uri_non_ascii(self):
from urllib.parse import quote_from_bytes
P = self.cls
try:
os.fsencode('\xe9')
except UnicodeEncodeError:
self.skipTest("\\xe9 cannot be encoded to the filesystem encoding")
self.assertEqual(P('/a/b\xe9').as_uri(),
'file:///a/b' + quote_from_bytes(os.fsencode('\xe9')))
def test_match(self):
P = self.cls
self.assertFalse(P('A.py').match('a.PY'))
def test_is_absolute(self):
P = self.cls
self.assertFalse(P().is_absolute())
self.assertFalse(P('a').is_absolute())
self.assertFalse(P('a/b/').is_absolute())
self.assertTrue(P('/').is_absolute())
self.assertTrue(P('/a').is_absolute())
self.assertTrue(P('/a/b/').is_absolute())
self.assertTrue(P('//a').is_absolute())
self.assertTrue(P('//a/b').is_absolute())
def test_is_reserved(self):
P = self.cls
self.assertIs(False, P('').is_reserved())
self.assertIs(False, P('/').is_reserved())
self.assertIs(False, P('/foo/bar').is_reserved())
self.assertIs(False, P('/dev/con/PRN/NUL').is_reserved())
def test_join(self):
P = self.cls
p = P('//a')
pp = p.joinpath('b')
self.assertEqual(pp, P('//a/b'))
pp = P('/a').joinpath('//c')
self.assertEqual(pp, P('//c'))
pp = P('//a').joinpath('/c')
self.assertEqual(pp, P('/c'))
def test_div(self):
# Basically the same as joinpath().
P = self.cls
p = P('//a')
pp = p / 'b'
self.assertEqual(pp, P('//a/b'))
pp = P('/a') / '//c'
self.assertEqual(pp, P('//c'))
pp = P('//a') / '/c'
self.assertEqual(pp, P('/c'))
class PureWindowsPathTest(_BasePurePathTest, unittest.TestCase):
cls = pathlib.PureWindowsPath
equivalences = _BasePurePathTest.equivalences.copy()
equivalences.update({
'c:a': [ ('c:', 'a'), ('c:', 'a/'), ('/', 'c:', 'a') ],
'c:/a': [
('c:/', 'a'), ('c:', '/', 'a'), ('c:', '/a'),
('/z', 'c:/', 'a'), ('//x/y', 'c:/', 'a'),
],
'//a/b/': [ ('//a/b',) ],
'//a/b/c': [
('//a/b', 'c'), ('//a/b/', 'c'),
],
})
def test_str(self):
p = self.cls('a/b/c')
self.assertEqual(str(p), 'a\\b\\c')
p = self.cls('c:/a/b/c')
self.assertEqual(str(p), 'c:\\a\\b\\c')
p = self.cls('//a/b')
self.assertEqual(str(p), '\\\\a\\b\\')
p = self.cls('//a/b/c')
self.assertEqual(str(p), '\\\\a\\b\\c')
p = self.cls('//a/b/c/d')
self.assertEqual(str(p), '\\\\a\\b\\c\\d')
def test_str_subclass(self):
self._check_str_subclass('c:')
self._check_str_subclass('c:a')
self._check_str_subclass('c:a\\b.txt')
self._check_str_subclass('c:\\')
self._check_str_subclass('c:\\a')
self._check_str_subclass('c:\\a\\b.txt')
self._check_str_subclass('\\\\some\\share')
self._check_str_subclass('\\\\some\\share\\a')
self._check_str_subclass('\\\\some\\share\\a\\b.txt')
def test_eq(self):
P = self.cls
self.assertEqual(P('c:a/b'), P('c:a/b'))
self.assertEqual(P('c:a/b'), P('c:', 'a', 'b'))
self.assertNotEqual(P('c:a/b'), P('d:a/b'))
self.assertNotEqual(P('c:a/b'), P('c:/a/b'))
self.assertNotEqual(P('/a/b'), P('c:/a/b'))
# Case-insensitivity.
self.assertEqual(P('a/B'), P('A/b'))
self.assertEqual(P('C:a/B'), P('c:A/b'))
self.assertEqual(P('//Some/SHARE/a/B'), P('//somE/share/A/b'))
def test_as_uri(self):
P = self.cls
with self.assertRaises(ValueError):
P('/a/b').as_uri()
with self.assertRaises(ValueError):
P('c:a/b').as_uri()
self.assertEqual(P('c:/').as_uri(), 'file:///c:/')
self.assertEqual(P('c:/a/b.c').as_uri(), 'file:///c:/a/b.c')
self.assertEqual(P('c:/a/b%#c').as_uri(), 'file:///c:/a/b%25%23c')
self.assertEqual(P('c:/a/b\xe9').as_uri(), 'file:///c:/a/b%C3%A9')
self.assertEqual(P('//some/share/').as_uri(), 'file://some/share/')
self.assertEqual(P('//some/share/a/b.c').as_uri(),
'file://some/share/a/b.c')
self.assertEqual(P('//some/share/a/b%#c\xe9').as_uri(),
'file://some/share/a/b%25%23c%C3%A9')
def test_match_common(self):
P = self.cls
# Absolute patterns.
self.assertTrue(P('c:/b.py').match('/*.py'))
self.assertTrue(P('c:/b.py').match('c:*.py'))
self.assertTrue(P('c:/b.py').match('c:/*.py'))
self.assertFalse(P('d:/b.py').match('c:/*.py')) # wrong drive
self.assertFalse(P('b.py').match('/*.py'))
self.assertFalse(P('b.py').match('c:*.py'))
self.assertFalse(P('b.py').match('c:/*.py'))
self.assertFalse(P('c:b.py').match('/*.py'))
self.assertFalse(P('c:b.py').match('c:/*.py'))
self.assertFalse(P('/b.py').match('c:*.py'))
self.assertFalse(P('/b.py').match('c:/*.py'))
# UNC patterns.
self.assertTrue(P('//some/share/a.py').match('/*.py'))
self.assertTrue(P('//some/share/a.py').match('//some/share/*.py'))
self.assertFalse(P('//other/share/a.py').match('//some/share/*.py'))
self.assertFalse(P('//some/share/a/b.py').match('//some/share/*.py'))
# Case-insensitivity.
self.assertTrue(P('B.py').match('b.PY'))
self.assertTrue(P('c:/a/B.Py').match('C:/A/*.pY'))
self.assertTrue(P('//Some/Share/B.Py').match('//somE/sharE/*.pY'))
def test_ordering_common(self):
# Case-insensitivity.
def assertOrderedEqual(a, b):
self.assertLessEqual(a, b)
self.assertGreaterEqual(b, a)
P = self.cls
p = P('c:A/b')
q = P('C:a/B')
assertOrderedEqual(p, q)
self.assertFalse(p < q)
self.assertFalse(p > q)
p = P('//some/Share/A/b')
q = P('//Some/SHARE/a/B')
assertOrderedEqual(p, q)
self.assertFalse(p < q)
self.assertFalse(p > q)
def test_parts(self):
P = self.cls
p = P('c:a/b')
parts = p.parts
self.assertEqual(parts, ('c:', 'a', 'b'))
p = P('c:/a/b')
parts = p.parts
self.assertEqual(parts, ('c:\\', 'a', 'b'))
p = P('//a/b/c/d')
parts = p.parts
self.assertEqual(parts, ('\\\\a\\b\\', 'c', 'd'))
def test_parent(self):
# Anchored
P = self.cls
p = P('z:a/b/c')
self.assertEqual(p.parent, P('z:a/b'))
self.assertEqual(p.parent.parent, P('z:a'))
self.assertEqual(p.parent.parent.parent, P('z:'))
self.assertEqual(p.parent.parent.parent.parent, P('z:'))
p = P('z:/a/b/c')
self.assertEqual(p.parent, P('z:/a/b'))
self.assertEqual(p.parent.parent, P('z:/a'))
self.assertEqual(p.parent.parent.parent, P('z:/'))
self.assertEqual(p.parent.parent.parent.parent, P('z:/'))
p = P('//a/b/c/d')
self.assertEqual(p.parent, P('//a/b/c'))
self.assertEqual(p.parent.parent, P('//a/b'))
self.assertEqual(p.parent.parent.parent, P('//a/b'))
def test_parents(self):
# Anchored
P = self.cls
p = P('z:a/b/')
par = p.parents
self.assertEqual(len(par), 2)
self.assertEqual(par[0], P('z:a'))
self.assertEqual(par[1], P('z:'))
self.assertEqual(list(par), [P('z:a'), P('z:')])
with self.assertRaises(IndexError):
par[2]
p = P('z:/a/b/')
par = p.parents
self.assertEqual(len(par), 2)
self.assertEqual(par[0], P('z:/a'))
self.assertEqual(par[1], P('z:/'))
self.assertEqual(list(par), [P('z:/a'), P('z:/')])
with self.assertRaises(IndexError):
par[2]
p = P('//a/b/c/d')
par = p.parents
self.assertEqual(len(par), 2)
self.assertEqual(par[0], P('//a/b/c'))
self.assertEqual(par[1], P('//a/b'))
self.assertEqual(list(par), [P('//a/b/c'), P('//a/b')])
with self.assertRaises(IndexError):
par[2]
def test_drive(self):
P = self.cls
self.assertEqual(P('c:').drive, 'c:')
self.assertEqual(P('c:a/b').drive, 'c:')
self.assertEqual(P('c:/').drive, 'c:')
self.assertEqual(P('c:/a/b/').drive, 'c:')
self.assertEqual(P('//a/b').drive, '\\\\a\\b')
self.assertEqual(P('//a/b/').drive, '\\\\a\\b')
self.assertEqual(P('//a/b/c/d').drive, '\\\\a\\b')
def test_root(self):
P = self.cls
self.assertEqual(P('c:').root, '')
self.assertEqual(P('c:a/b').root, '')
self.assertEqual(P('c:/').root, '\\')
self.assertEqual(P('c:/a/b/').root, '\\')
self.assertEqual(P('//a/b').root, '\\')
self.assertEqual(P('//a/b/').root, '\\')
self.assertEqual(P('//a/b/c/d').root, '\\')
def test_anchor(self):
P = self.cls
self.assertEqual(P('c:').anchor, 'c:')
self.assertEqual(P('c:a/b').anchor, 'c:')
self.assertEqual(P('c:/').anchor, 'c:\\')
self.assertEqual(P('c:/a/b/').anchor, 'c:\\')
self.assertEqual(P('//a/b').anchor, '\\\\a\\b\\')
self.assertEqual(P('//a/b/').anchor, '\\\\a\\b\\')
self.assertEqual(P('//a/b/c/d').anchor, '\\\\a\\b\\')
def test_name(self):
P = self.cls
self.assertEqual(P('c:').name, '')
self.assertEqual(P('c:/').name, '')
self.assertEqual(P('c:a/b').name, 'b')
self.assertEqual(P('c:/a/b').name, 'b')
self.assertEqual(P('c:a/b.py').name, 'b.py')
self.assertEqual(P('c:/a/b.py').name, 'b.py')
self.assertEqual(P('//My.py/Share.php').name, '')
self.assertEqual(P('//My.py/Share.php/a/b').name, 'b')
def test_suffix(self):
P = self.cls
self.assertEqual(P('c:').suffix, '')
self.assertEqual(P('c:/').suffix, '')
self.assertEqual(P('c:a/b').suffix, '')
self.assertEqual(P('c:/a/b').suffix, '')
self.assertEqual(P('c:a/b.py').suffix, '.py')
self.assertEqual(P('c:/a/b.py').suffix, '.py')
self.assertEqual(P('c:a/.hgrc').suffix, '')
self.assertEqual(P('c:/a/.hgrc').suffix, '')
self.assertEqual(P('c:a/.hg.rc').suffix, '.rc')
self.assertEqual(P('c:/a/.hg.rc').suffix, '.rc')
self.assertEqual(P('c:a/b.tar.gz').suffix, '.gz')
self.assertEqual(P('c:/a/b.tar.gz').suffix, '.gz')
self.assertEqual(P('c:a/Some name. Ending with a dot.').suffix, '')
self.assertEqual(P('c:/a/Some name. Ending with a dot.').suffix, '')
self.assertEqual(P('//My.py/Share.php').suffix, '')
self.assertEqual(P('//My.py/Share.php/a/b').suffix, '')
def test_suffixes(self):
P = self.cls
self.assertEqual(P('c:').suffixes, [])
self.assertEqual(P('c:/').suffixes, [])
self.assertEqual(P('c:a/b').suffixes, [])
self.assertEqual(P('c:/a/b').suffixes, [])
self.assertEqual(P('c:a/b.py').suffixes, ['.py'])
self.assertEqual(P('c:/a/b.py').suffixes, ['.py'])
self.assertEqual(P('c:a/.hgrc').suffixes, [])
self.assertEqual(P('c:/a/.hgrc').suffixes, [])
self.assertEqual(P('c:a/.hg.rc').suffixes, ['.rc'])
self.assertEqual(P('c:/a/.hg.rc').suffixes, ['.rc'])
self.assertEqual(P('c:a/b.tar.gz').suffixes, ['.tar', '.gz'])
self.assertEqual(P('c:/a/b.tar.gz').suffixes, ['.tar', '.gz'])
self.assertEqual(P('//My.py/Share.php').suffixes, [])
self.assertEqual(P('//My.py/Share.php/a/b').suffixes, [])
self.assertEqual(P('c:a/Some name. Ending with a dot.').suffixes, [])
self.assertEqual(P('c:/a/Some name. Ending with a dot.').suffixes, [])
def test_stem(self):
P = self.cls
self.assertEqual(P('c:').stem, '')
self.assertEqual(P('c:.').stem, '')
self.assertEqual(P('c:..').stem, '..')
self.assertEqual(P('c:/').stem, '')
self.assertEqual(P('c:a/b').stem, 'b')
self.assertEqual(P('c:a/b.py').stem, 'b')
self.assertEqual(P('c:a/.hgrc').stem, '.hgrc')
self.assertEqual(P('c:a/.hg.rc').stem, '.hg')
self.assertEqual(P('c:a/b.tar.gz').stem, 'b.tar')
self.assertEqual(P('c:a/Some name. Ending with a dot.').stem,
'Some name. Ending with a dot.')
def test_with_name(self):
P = self.cls
self.assertEqual(P('c:a/b').with_name('d.xml'), P('c:a/d.xml'))
self.assertEqual(P('c:/a/b').with_name('d.xml'), P('c:/a/d.xml'))
self.assertEqual(P('c:a/Dot ending.').with_name('d.xml'), P('c:a/d.xml'))
self.assertEqual(P('c:/a/Dot ending.').with_name('d.xml'), P('c:/a/d.xml'))
self.assertRaises(ValueError, P('c:').with_name, 'd.xml')
self.assertRaises(ValueError, P('c:/').with_name, 'd.xml')
self.assertRaises(ValueError, P('//My/Share').with_name, 'd.xml')
self.assertRaises(ValueError, P('c:a/b').with_name, 'd:')
self.assertRaises(ValueError, P('c:a/b').with_name, 'd:e')
self.assertRaises(ValueError, P('c:a/b').with_name, 'd:/e')
self.assertRaises(ValueError, P('c:a/b').with_name, '//My/Share')
def test_with_stem(self):
P = self.cls
self.assertEqual(P('c:a/b').with_stem('d'), P('c:a/d'))
self.assertEqual(P('c:/a/b').with_stem('d'), P('c:/a/d'))
self.assertEqual(P('c:a/Dot ending.').with_stem('d'), P('c:a/d'))
self.assertEqual(P('c:/a/Dot ending.').with_stem('d'), P('c:/a/d'))
self.assertRaises(ValueError, P('c:').with_stem, 'd')
self.assertRaises(ValueError, P('c:/').with_stem, 'd')
self.assertRaises(ValueError, P('//My/Share').with_stem, 'd')
self.assertRaises(ValueError, P('c:a/b').with_stem, 'd:')
self.assertRaises(ValueError, P('c:a/b').with_stem, 'd:e')
self.assertRaises(ValueError, P('c:a/b').with_stem, 'd:/e')
self.assertRaises(ValueError, P('c:a/b').with_stem, '//My/Share')
def test_with_suffix(self):
P = self.cls
self.assertEqual(P('c:a/b').with_suffix('.gz'), P('c:a/b.gz'))
self.assertEqual(P('c:/a/b').with_suffix('.gz'), P('c:/a/b.gz'))
self.assertEqual(P('c:a/b.py').with_suffix('.gz'), P('c:a/b.gz'))
self.assertEqual(P('c:/a/b.py').with_suffix('.gz'), P('c:/a/b.gz'))
# Path doesn't have a "filename" component.
self.assertRaises(ValueError, P('').with_suffix, '.gz')
self.assertRaises(ValueError, P('.').with_suffix, '.gz')
self.assertRaises(ValueError, P('/').with_suffix, '.gz')
self.assertRaises(ValueError, P('//My/Share').with_suffix, '.gz')
# Invalid suffix.
self.assertRaises(ValueError, P('c:a/b').with_suffix, 'gz')
self.assertRaises(ValueError, P('c:a/b').with_suffix, '/')
self.assertRaises(ValueError, P('c:a/b').with_suffix, '\\')
self.assertRaises(ValueError, P('c:a/b').with_suffix, 'c:')
self.assertRaises(ValueError, P('c:a/b').with_suffix, '/.gz')
self.assertRaises(ValueError, P('c:a/b').with_suffix, '\\.gz')
self.assertRaises(ValueError, P('c:a/b').with_suffix, 'c:.gz')
self.assertRaises(ValueError, P('c:a/b').with_suffix, 'c/d')
self.assertRaises(ValueError, P('c:a/b').with_suffix, 'c\\d')
self.assertRaises(ValueError, P('c:a/b').with_suffix, '.c/d')
self.assertRaises(ValueError, P('c:a/b').with_suffix, '.c\\d')
def test_relative_to(self):
P = self.cls
p = P('C:Foo/Bar')
self.assertEqual(p.relative_to(P('c:')), P('Foo/Bar'))
self.assertEqual(p.relative_to('c:'), P('Foo/Bar'))
self.assertEqual(p.relative_to(P('c:foO')), P('Bar'))
self.assertEqual(p.relative_to('c:foO'), P('Bar'))
self.assertEqual(p.relative_to('c:foO/'), P('Bar'))
self.assertEqual(p.relative_to(P('c:foO/baR')), P())
self.assertEqual(p.relative_to('c:foO/baR'), P())
# Unrelated paths.
self.assertRaises(ValueError, p.relative_to, P())
self.assertRaises(ValueError, p.relative_to, '')
self.assertRaises(ValueError, p.relative_to, P('d:'))
self.assertRaises(ValueError, p.relative_to, P('/'))
self.assertRaises(ValueError, p.relative_to, P('Foo'))
self.assertRaises(ValueError, p.relative_to, P('/Foo'))
self.assertRaises(ValueError, p.relative_to, P('C:/Foo'))
self.assertRaises(ValueError, p.relative_to, P('C:Foo/Bar/Baz'))
self.assertRaises(ValueError, p.relative_to, P('C:Foo/Baz'))
p = P('C:/Foo/Bar')
self.assertEqual(p.relative_to(P('c:')), P('/Foo/Bar'))
self.assertEqual(p.relative_to('c:'), P('/Foo/Bar'))
self.assertEqual(str(p.relative_to(P('c:'))), '\\Foo\\Bar')
self.assertEqual(str(p.relative_to('c:')), '\\Foo\\Bar')
self.assertEqual(p.relative_to(P('c:/')), P('Foo/Bar'))
self.assertEqual(p.relative_to('c:/'), P('Foo/Bar'))
self.assertEqual(p.relative_to(P('c:/foO')), P('Bar'))
self.assertEqual(p.relative_to('c:/foO'), P('Bar'))
self.assertEqual(p.relative_to('c:/foO/'), P('Bar'))
self.assertEqual(p.relative_to(P('c:/foO/baR')), P())
self.assertEqual(p.relative_to('c:/foO/baR'), P())
# Unrelated paths.
self.assertRaises(ValueError, p.relative_to, P('C:/Baz'))
self.assertRaises(ValueError, p.relative_to, P('C:/Foo/Bar/Baz'))
self.assertRaises(ValueError, p.relative_to, P('C:/Foo/Baz'))
self.assertRaises(ValueError, p.relative_to, P('C:Foo'))
self.assertRaises(ValueError, p.relative_to, P('d:'))
self.assertRaises(ValueError, p.relative_to, P('d:/'))
self.assertRaises(ValueError, p.relative_to, P('/'))
self.assertRaises(ValueError, p.relative_to, P('/Foo'))
self.assertRaises(ValueError, p.relative_to, P('//C/Foo'))
# UNC paths.
p = P('//Server/Share/Foo/Bar')
self.assertEqual(p.relative_to(P('//sErver/sHare')), P('Foo/Bar'))
self.assertEqual(p.relative_to('//sErver/sHare'), P('Foo/Bar'))
self.assertEqual(p.relative_to('//sErver/sHare/'), P('Foo/Bar'))
self.assertEqual(p.relative_to(P('//sErver/sHare/Foo')), P('Bar'))
self.assertEqual(p.relative_to('//sErver/sHare/Foo'), P('Bar'))
self.assertEqual(p.relative_to('//sErver/sHare/Foo/'), P('Bar'))
self.assertEqual(p.relative_to(P('//sErver/sHare/Foo/Bar')), P())
self.assertEqual(p.relative_to('//sErver/sHare/Foo/Bar'), P())
# Unrelated paths.
self.assertRaises(ValueError, p.relative_to, P('/Server/Share/Foo'))
self.assertRaises(ValueError, p.relative_to, P('c:/Server/Share/Foo'))
self.assertRaises(ValueError, p.relative_to, P('//z/Share/Foo'))
self.assertRaises(ValueError, p.relative_to, P('//Server/z/Foo'))
def test_is_relative_to(self):
P = self.cls
p = P('C:Foo/Bar')
self.assertTrue(p.is_relative_to(P('c:')))
self.assertTrue(p.is_relative_to('c:'))
self.assertTrue(p.is_relative_to(P('c:foO')))
self.assertTrue(p.is_relative_to('c:foO'))
self.assertTrue(p.is_relative_to('c:foO/'))
self.assertTrue(p.is_relative_to(P('c:foO/baR')))
self.assertTrue(p.is_relative_to('c:foO/baR'))
# Unrelated paths.
self.assertFalse(p.is_relative_to(P()))
self.assertFalse(p.is_relative_to(''))
self.assertFalse(p.is_relative_to(P('d:')))
self.assertFalse(p.is_relative_to(P('/')))
self.assertFalse(p.is_relative_to(P('Foo')))
self.assertFalse(p.is_relative_to(P('/Foo')))
self.assertFalse(p.is_relative_to(P('C:/Foo')))
self.assertFalse(p.is_relative_to(P('C:Foo/Bar/Baz')))
self.assertFalse(p.is_relative_to(P('C:Foo/Baz')))
p = P('C:/Foo/Bar')
self.assertTrue(p.is_relative_to('c:'))
self.assertTrue(p.is_relative_to(P('c:/')))
self.assertTrue(p.is_relative_to(P('c:/foO')))
self.assertTrue(p.is_relative_to('c:/foO/'))
self.assertTrue(p.is_relative_to(P('c:/foO/baR')))
self.assertTrue(p.is_relative_to('c:/foO/baR'))
# Unrelated paths.
self.assertFalse(p.is_relative_to(P('C:/Baz')))
self.assertFalse(p.is_relative_to(P('C:/Foo/Bar/Baz')))
self.assertFalse(p.is_relative_to(P('C:/Foo/Baz')))
self.assertFalse(p.is_relative_to(P('C:Foo')))
self.assertFalse(p.is_relative_to(P('d:')))
self.assertFalse(p.is_relative_to(P('d:/')))
self.assertFalse(p.is_relative_to(P('/')))
self.assertFalse(p.is_relative_to(P('/Foo')))
self.assertFalse(p.is_relative_to(P('//C/Foo')))
# UNC paths.
p = P('//Server/Share/Foo/Bar')
self.assertTrue(p.is_relative_to(P('//sErver/sHare')))
self.assertTrue(p.is_relative_to('//sErver/sHare'))
self.assertTrue(p.is_relative_to('//sErver/sHare/'))
self.assertTrue(p.is_relative_to(P('//sErver/sHare/Foo')))
self.assertTrue(p.is_relative_to('//sErver/sHare/Foo'))
self.assertTrue(p.is_relative_to('//sErver/sHare/Foo/'))
self.assertTrue(p.is_relative_to(P('//sErver/sHare/Foo/Bar')))
self.assertTrue(p.is_relative_to('//sErver/sHare/Foo/Bar'))
# Unrelated paths.
self.assertFalse(p.is_relative_to(P('/Server/Share/Foo')))
self.assertFalse(p.is_relative_to(P('c:/Server/Share/Foo')))
self.assertFalse(p.is_relative_to(P('//z/Share/Foo')))
self.assertFalse(p.is_relative_to(P('//Server/z/Foo')))
def test_is_absolute(self):
P = self.cls
# Under NT, only paths with both a drive and a root are absolute.
self.assertFalse(P().is_absolute())
self.assertFalse(P('a').is_absolute())
self.assertFalse(P('a/b/').is_absolute())
self.assertFalse(P('/').is_absolute())
self.assertFalse(P('/a').is_absolute())
self.assertFalse(P('/a/b/').is_absolute())
self.assertFalse(P('c:').is_absolute())
self.assertFalse(P('c:a').is_absolute())
self.assertFalse(P('c:a/b/').is_absolute())
self.assertTrue(P('c:/').is_absolute())
self.assertTrue(P('c:/a').is_absolute())
self.assertTrue(P('c:/a/b/').is_absolute())
# UNC paths are absolute by definition.
self.assertTrue(P('//a/b').is_absolute())
self.assertTrue(P('//a/b/').is_absolute())
self.assertTrue(P('//a/b/c').is_absolute())
self.assertTrue(P('//a/b/c/d').is_absolute())
def test_join(self):
P = self.cls
p = P('C:/a/b')
pp = p.joinpath('x/y')
self.assertEqual(pp, P('C:/a/b/x/y'))
pp = p.joinpath('/x/y')
self.assertEqual(pp, P('C:/x/y'))
# Joining with a different drive => the first path is ignored, even
# if the second path is relative.
pp = p.joinpath('D:x/y')
self.assertEqual(pp, P('D:x/y'))
pp = p.joinpath('D:/x/y')
self.assertEqual(pp, P('D:/x/y'))
pp = p.joinpath('//host/share/x/y')
self.assertEqual(pp, P('//host/share/x/y'))
# Joining with the same drive => the first path is appended to if
# the second path is relative.
pp = p.joinpath('c:x/y')
self.assertEqual(pp, P('C:/a/b/x/y'))
pp = p.joinpath('c:/x/y')
self.assertEqual(pp, P('C:/x/y'))
def test_div(self):
# Basically the same as joinpath().
P = self.cls
p = P('C:/a/b')
self.assertEqual(p / 'x/y', P('C:/a/b/x/y'))
self.assertEqual(p / 'x' / 'y', P('C:/a/b/x/y'))
self.assertEqual(p / '/x/y', P('C:/x/y'))
self.assertEqual(p / '/x' / 'y', P('C:/x/y'))
# Joining with a different drive => the first path is ignored, even
# if the second path is relative.
self.assertEqual(p / 'D:x/y', P('D:x/y'))
self.assertEqual(p / 'D:' / 'x/y', P('D:x/y'))
self.assertEqual(p / 'D:/x/y', P('D:/x/y'))
self.assertEqual(p / 'D:' / '/x/y', P('D:/x/y'))
self.assertEqual(p / '//host/share/x/y', P('//host/share/x/y'))
# Joining with the same drive => the first path is appended to if
# the second path is relative.
self.assertEqual(p / 'c:x/y', P('C:/a/b/x/y'))
self.assertEqual(p / 'c:/x/y', P('C:/x/y'))
def test_is_reserved(self):
P = self.cls
self.assertIs(False, P('').is_reserved())
self.assertIs(False, P('/').is_reserved())
self.assertIs(False, P('/foo/bar').is_reserved())
# UNC paths are never reserved.
self.assertIs(False, P('//my/share/nul/con/aux').is_reserved())
# Case-insenstive DOS-device names are reserved.
self.assertIs(True, P('nul').is_reserved())
self.assertIs(True, P('aux').is_reserved())
self.assertIs(True, P('prn').is_reserved())
self.assertIs(True, P('con').is_reserved())
self.assertIs(True, P('conin$').is_reserved())
self.assertIs(True, P('conout$').is_reserved())
# COM/LPT + 1-9 or + superscript 1-3 are reserved.
self.assertIs(True, P('COM1').is_reserved())
self.assertIs(True, P('LPT9').is_reserved())
self.assertIs(True, P('com\xb9').is_reserved())
self.assertIs(True, P('com\xb2').is_reserved())
self.assertIs(True, P('lpt\xb3').is_reserved())
# DOS-device name mataching ignores characters after a dot or
# a colon and also ignores trailing spaces.
self.assertIs(True, P('NUL.txt').is_reserved())
self.assertIs(True, P('PRN ').is_reserved())
self.assertIs(True, P('AUX .txt').is_reserved())
self.assertIs(True, P('COM1:bar').is_reserved())
self.assertIs(True, P('LPT9 :bar').is_reserved())
# DOS-device names are only matched at the beginning
# of a path component.
self.assertIs(False, P('bar.com9').is_reserved())
self.assertIs(False, P('bar.lpt9').is_reserved())
# Only the last path component matters.
self.assertIs(True, P('c:/baz/con/NUL').is_reserved())
self.assertIs(False, P('c:/NUL/con/baz').is_reserved())
class PurePathTest(_BasePurePathTest, unittest.TestCase):
cls = pathlib.PurePath
def test_concrete_class(self):
p = self.cls('a')
self.assertIs(type(p),
pathlib.PureWindowsPath if os.name == 'nt' else pathlib.PurePosixPath)
def test_different_flavours_unequal(self):
p = pathlib.PurePosixPath('a')
q = pathlib.PureWindowsPath('a')
self.assertNotEqual(p, q)
def test_different_flavours_unordered(self):
p = pathlib.PurePosixPath('a')
q = pathlib.PureWindowsPath('a')
with self.assertRaises(TypeError):
p < q
with self.assertRaises(TypeError):
p <= q
with self.assertRaises(TypeError):
p > q
with self.assertRaises(TypeError):
p >= q
#
# Tests for the concrete classes.
#
# Make sure any symbolic links in the base test path are resolved.
BASE = os.path.realpath(TESTFN)
join = lambda *x: os.path.join(BASE, *x)
rel_join = lambda *x: os.path.join(TESTFN, *x)
only_nt = unittest.skipIf(os.name != 'nt',
'test requires a Windows-compatible system')
only_posix = unittest.skipIf(os.name == 'nt',
'test requires a POSIX-compatible system')
@only_posix
class PosixPathAsPureTest(PurePosixPathTest):
cls = pathlib.PosixPath
@only_nt
class WindowsPathAsPureTest(PureWindowsPathTest):
cls = pathlib.WindowsPath
def test_owner(self):
P = self.cls
with self.assertRaises(NotImplementedError):
P('c:/').owner()
def test_group(self):
P = self.cls
with self.assertRaises(NotImplementedError):
P('c:/').group()
class _BasePathTest(object):
"""Tests for the FS-accessing functionalities of the Path classes."""
# (BASE)
# |
# |-- brokenLink -> non-existing
# |-- dirA
# | `-- linkC -> ../dirB
# |-- dirB
# | |-- fileB
# | `-- linkD -> ../dirB
# |-- dirC
# | |-- dirD
# | | `-- fileD
# | `-- fileC
# |-- dirE # No permissions
# |-- fileA
# |-- linkA -> fileA
# |-- linkB -> dirB
# `-- brokenLinkLoop -> brokenLinkLoop
#
def setUp(self):
def cleanup():
os.chmod(join('dirE'), 0o777)
support.rmtree(BASE)
self.addCleanup(cleanup)
os.mkdir(BASE)
os.mkdir(join('dirA'))
os.mkdir(join('dirB'))
os.mkdir(join('dirC'))
os.mkdir(join('dirC', 'dirD'))
os.mkdir(join('dirE'))
with open(join('fileA'), 'wb') as f:
f.write(b"this is file A\n")
with open(join('dirB', 'fileB'), 'wb') as f:
f.write(b"this is file B\n")
with open(join('dirC', 'fileC'), 'wb') as f:
f.write(b"this is file C\n")
with open(join('dirC', 'dirD', 'fileD'), 'wb') as f:
f.write(b"this is file D\n")
os.chmod(join('dirE'), 0)
if support.can_symlink():
# Relative symlinks.
os.symlink('fileA', join('linkA'))
os.symlink('non-existing', join('brokenLink'))
self.dirlink('dirB', join('linkB'))
self.dirlink(os.path.join('..', 'dirB'), join('dirA', 'linkC'))
# This one goes upwards, creating a loop.
self.dirlink(os.path.join('..', 'dirB'), join('dirB', 'linkD'))
# Broken symlink (pointing to itself).
os.symlink('brokenLinkLoop', join('brokenLinkLoop'))
if os.name == 'nt':
# Workaround for http://bugs.python.org/issue13772.
def dirlink(self, src, dest):
os.symlink(src, dest, target_is_directory=True)
else:
def dirlink(self, src, dest):
os.symlink(src, dest)
def assertSame(self, path_a, path_b):
self.assertTrue(os.path.samefile(str(path_a), str(path_b)),
"%r and %r don't point to the same file" %
(path_a, path_b))
def assertFileNotFound(self, func, *args, **kwargs):
with self.assertRaises(FileNotFoundError) as cm:
func(*args, **kwargs)
self.assertEqual(cm.exception.errno, errno.ENOENT)
def assertEqualNormCase(self, path_a, path_b):
self.assertEqual(os.path.normcase(path_a), os.path.normcase(path_b))
def _test_cwd(self, p):
q = self.cls(os.getcwd())
self.assertEqual(p, q)
self.assertEqualNormCase(str(p), str(q))
self.assertIs(type(p), type(q))
self.assertTrue(p.is_absolute())
def test_cwd(self):
p = self.cls.cwd()
self._test_cwd(p)
def _test_home(self, p):
q = self.cls(os.path.expanduser('~'))
self.assertEqual(p, q)
self.assertEqualNormCase(str(p), str(q))
self.assertIs(type(p), type(q))
self.assertTrue(p.is_absolute())
def test_home(self):
with support.EnvironmentVarGuard() as env:
self._test_home(self.cls.home())
env.clear()
env['USERPROFILE'] = os.path.join(BASE, 'userprofile')
self._test_home(self.cls.home())
# bpo-38883: ignore `HOME` when set on windows
env['HOME'] = os.path.join(BASE, 'home')
self._test_home(self.cls.home())
def test_samefile(self):
fileA_path = os.path.join(BASE, 'fileA')
fileB_path = os.path.join(BASE, 'dirB', 'fileB')
p = self.cls(fileA_path)
pp = self.cls(fileA_path)
q = self.cls(fileB_path)
self.assertTrue(p.samefile(fileA_path))
self.assertTrue(p.samefile(pp))
self.assertFalse(p.samefile(fileB_path))
self.assertFalse(p.samefile(q))
# Test the non-existent file case
non_existent = os.path.join(BASE, 'foo')
r = self.cls(non_existent)
self.assertRaises(FileNotFoundError, p.samefile, r)
self.assertRaises(FileNotFoundError, p.samefile, non_existent)
self.assertRaises(FileNotFoundError, r.samefile, p)
self.assertRaises(FileNotFoundError, r.samefile, non_existent)
self.assertRaises(FileNotFoundError, r.samefile, r)
self.assertRaises(FileNotFoundError, r.samefile, non_existent)
def test_empty_path(self):
# The empty path points to '.'
p = self.cls('')
self.assertEqual(p.stat(), os.stat('.'))
def test_expanduser_common(self):
P = self.cls
p = P('~')
self.assertEqual(p.expanduser(), P(os.path.expanduser('~')))
p = P('foo')
self.assertEqual(p.expanduser(), p)
p = P('/~')
self.assertEqual(p.expanduser(), p)
p = P('../~')
self.assertEqual(p.expanduser(), p)
p = P(P('').absolute().anchor) / '~'
self.assertEqual(p.expanduser(), p)
def test_exists(self):
P = self.cls
p = P(BASE)
self.assertIs(True, p.exists())
self.assertIs(True, (p / 'dirA').exists())
self.assertIs(True, (p / 'fileA').exists())
self.assertIs(False, (p / 'fileA' / 'bah').exists())
if support.can_symlink():
self.assertIs(True, (p / 'linkA').exists())
self.assertIs(True, (p / 'linkB').exists())
self.assertIs(True, (p / 'linkB' / 'fileB').exists())
self.assertIs(False, (p / 'linkA' / 'bah').exists())
self.assertIs(False, (p / 'foo').exists())
self.assertIs(False, P('/xyzzy').exists())
self.assertIs(False, P(BASE + '\udfff').exists())
self.assertIs(False, P(BASE + '\x00').exists())
def test_open_common(self):
p = self.cls(BASE)
with (p / 'fileA').open('r') as f:
self.assertIsInstance(f, io.TextIOBase)
self.assertEqual(f.read(), "this is file A\n")
with (p / 'fileA').open('rb') as f:
self.assertIsInstance(f, io.BufferedIOBase)
self.assertEqual(f.read().strip(), b"this is file A")
with (p / 'fileA').open('rb', buffering=0) as f:
self.assertIsInstance(f, io.RawIOBase)
self.assertEqual(f.read().strip(), b"this is file A")
def test_read_write_bytes(self):
p = self.cls(BASE)
(p / 'fileA').write_bytes(b'abcdefg')
self.assertEqual((p / 'fileA').read_bytes(), b'abcdefg')
# Check that trying to write str does not truncate the file.
self.assertRaises(TypeError, (p / 'fileA').write_bytes, 'somestr')
self.assertEqual((p / 'fileA').read_bytes(), b'abcdefg')
def test_read_write_text(self):
p = self.cls(BASE)
(p / 'fileA').write_text('äbcdefg', encoding='latin-1')
self.assertEqual((p / 'fileA').read_text(
encoding='utf-8', errors='ignore'), 'bcdefg')
# Check that trying to write bytes does not truncate the file.
self.assertRaises(TypeError, (p / 'fileA').write_text, b'somebytes')
self.assertEqual((p / 'fileA').read_text(encoding='latin-1'), 'äbcdefg')
def test_iterdir(self):
P = self.cls
p = P(BASE)
it = p.iterdir()
paths = set(it)
expected = ['dirA', 'dirB', 'dirC', 'dirE', 'fileA']
if support.can_symlink():
expected += ['linkA', 'linkB', 'brokenLink', 'brokenLinkLoop']
self.assertEqual(paths, { P(BASE, q) for q in expected })
@support.skip_unless_symlink
def test_iterdir_symlink(self):
# __iter__ on a symlink to a directory.
P = self.cls
p = P(BASE, 'linkB')
paths = set(p.iterdir())
expected = { P(BASE, 'linkB', q) for q in ['fileB', 'linkD'] }
self.assertEqual(paths, expected)
def test_iterdir_nodir(self):
# __iter__ on something that is not a directory.
p = self.cls(BASE, 'fileA')
with self.assertRaises(OSError) as cm:
next(p.iterdir())
# ENOENT or EINVAL under Windows, ENOTDIR otherwise
# (see issue #12802).
self.assertIn(cm.exception.errno, (errno.ENOTDIR,
errno.ENOENT, errno.EINVAL))
def test_glob_common(self):
def _check(glob, expected):
self.assertEqual(set(glob), { P(BASE, q) for q in expected })
P = self.cls
p = P(BASE)
it = p.glob("fileA")
self.assertIsInstance(it, collections.abc.Iterator)
_check(it, ["fileA"])
_check(p.glob("fileB"), [])
_check(p.glob("dir*/file*"), ["dirB/fileB", "dirC/fileC"])
if not support.can_symlink():
_check(p.glob("*A"), ['dirA', 'fileA'])
else:
_check(p.glob("*A"), ['dirA', 'fileA', 'linkA'])
if not support.can_symlink():
_check(p.glob("*B/*"), ['dirB/fileB'])
else:
_check(p.glob("*B/*"), ['dirB/fileB', 'dirB/linkD',
'linkB/fileB', 'linkB/linkD'])
if not support.can_symlink():
_check(p.glob("*/fileB"), ['dirB/fileB'])
else:
_check(p.glob("*/fileB"), ['dirB/fileB', 'linkB/fileB'])
def test_rglob_common(self):
def _check(glob, expected):
self.assertEqual(set(glob), { P(BASE, q) for q in expected })
P = self.cls
p = P(BASE)
it = p.rglob("fileA")
self.assertIsInstance(it, collections.abc.Iterator)
_check(it, ["fileA"])
_check(p.rglob("fileB"), ["dirB/fileB"])
_check(p.rglob("*/fileA"), [])
if not support.can_symlink():
_check(p.rglob("*/fileB"), ["dirB/fileB"])
else:
_check(p.rglob("*/fileB"), ["dirB/fileB", "dirB/linkD/fileB",
"linkB/fileB", "dirA/linkC/fileB"])
_check(p.rglob("file*"), ["fileA", "dirB/fileB",
"dirC/fileC", "dirC/dirD/fileD"])
p = P(BASE, "dirC")
_check(p.rglob("file*"), ["dirC/fileC", "dirC/dirD/fileD"])
_check(p.rglob("*/*"), ["dirC/dirD/fileD"])
@support.skip_unless_symlink
def test_rglob_symlink_loop(self):
# Don't get fooled by symlink loops (Issue #26012).
P = self.cls
p = P(BASE)
given = set(p.rglob('*'))
expect = {'brokenLink',
'dirA', 'dirA/linkC',
'dirB', 'dirB/fileB', 'dirB/linkD',
'dirC', 'dirC/dirD', 'dirC/dirD/fileD', 'dirC/fileC',
'dirE',
'fileA',
'linkA',
'linkB',
'brokenLinkLoop',
}
self.assertEqual(given, {p / x for x in expect})
def test_glob_many_open_files(self):
depth = 30
P = self.cls
base = P(BASE) / 'deep'
p = P(base, *(['d']*depth))
p.mkdir(parents=True)
pattern = '/'.join(['*'] * depth)
iters = [base.glob(pattern) for j in range(100)]
for it in iters:
self.assertEqual(next(it), p)
iters = [base.rglob('d') for j in range(100)]
p = base
for i in range(depth):
p = p / 'd'
for it in iters:
self.assertEqual(next(it), p)
def test_glob_dotdot(self):
# ".." is not special in globs.
P = self.cls
p = P(BASE)
self.assertEqual(set(p.glob("..")), { P(BASE, "..") })
self.assertEqual(set(p.glob("dirA/../file*")), { P(BASE, "dirA/../fileA") })
self.assertEqual(set(p.glob("../xyzzy")), set())
@support.skip_unless_symlink
def test_glob_permissions(self):
# See bpo-38894
P = self.cls
base = P(BASE) / 'permissions'
base.mkdir()
file1 = base / "file1"
file1.touch()
file2 = base / "file2"
file2.touch()
subdir = base / "subdir"
file3 = base / "file3"
file3.symlink_to(subdir / "other")
# Patching is needed to avoid relying on the filesystem
# to return the order of the files as the error will not
# happen if the symlink is the last item.
with mock.patch("os.scandir") as scandir:
scandir.return_value = sorted(os.scandir(base))
self.assertEqual(len(set(base.glob("*"))), 3)
subdir.mkdir()
with mock.patch("os.scandir") as scandir:
scandir.return_value = sorted(os.scandir(base))
self.assertEqual(len(set(base.glob("*"))), 4)
subdir.chmod(000)
with mock.patch("os.scandir") as scandir:
scandir.return_value = sorted(os.scandir(base))
self.assertEqual(len(set(base.glob("*"))), 4)
def _check_resolve(self, p, expected, strict=True):
q = p.resolve(strict)
self.assertEqual(q, expected)
# This can be used to check both relative and absolute resolutions.
_check_resolve_relative = _check_resolve_absolute = _check_resolve
@support.skip_unless_symlink
def test_resolve_common(self):
P = self.cls
p = P(BASE, 'foo')
with self.assertRaises(OSError) as cm:
p.resolve(strict=True)
self.assertEqual(cm.exception.errno, errno.ENOENT)
# Non-strict
self.assertEqualNormCase(str(p.resolve(strict=False)),
os.path.join(BASE, 'foo'))
p = P(BASE, 'foo', 'in', 'spam')
self.assertEqualNormCase(str(p.resolve(strict=False)),
os.path.join(BASE, 'foo', 'in', 'spam'))
p = P(BASE, '..', 'foo', 'in', 'spam')
self.assertEqualNormCase(str(p.resolve(strict=False)),
os.path.abspath(os.path.join('foo', 'in', 'spam')))
# These are all relative symlinks.
p = P(BASE, 'dirB', 'fileB')
self._check_resolve_relative(p, p)
p = P(BASE, 'linkA')
self._check_resolve_relative(p, P(BASE, 'fileA'))
p = P(BASE, 'dirA', 'linkC', 'fileB')
self._check_resolve_relative(p, P(BASE, 'dirB', 'fileB'))
p = P(BASE, 'dirB', 'linkD', 'fileB')
self._check_resolve_relative(p, P(BASE, 'dirB', 'fileB'))
# Non-strict
p = P(BASE, 'dirA', 'linkC', 'fileB', 'foo', 'in', 'spam')
self._check_resolve_relative(p, P(BASE, 'dirB', 'fileB', 'foo', 'in',
'spam'), False)
p = P(BASE, 'dirA', 'linkC', '..', 'foo', 'in', 'spam')
if os.name == 'nt':
# In Windows, if linkY points to dirB, 'dirA\linkY\..'
# resolves to 'dirA' without resolving linkY first.
self._check_resolve_relative(p, P(BASE, 'dirA', 'foo', 'in',
'spam'), False)
else:
# In Posix, if linkY points to dirB, 'dirA/linkY/..'
# resolves to 'dirB/..' first before resolving to parent of dirB.
self._check_resolve_relative(p, P(BASE, 'foo', 'in', 'spam'), False)
# Now create absolute symlinks.
d = support._longpath(tempfile.mkdtemp(suffix='-dirD', dir=os.getcwd()))
self.addCleanup(support.rmtree, d)
os.symlink(os.path.join(d), join('dirA', 'linkX'))
os.symlink(join('dirB'), os.path.join(d, 'linkY'))
p = P(BASE, 'dirA', 'linkX', 'linkY', 'fileB')
self._check_resolve_absolute(p, P(BASE, 'dirB', 'fileB'))
# Non-strict
p = P(BASE, 'dirA', 'linkX', 'linkY', 'foo', 'in', 'spam')
self._check_resolve_relative(p, P(BASE, 'dirB', 'foo', 'in', 'spam'),
False)
p = P(BASE, 'dirA', 'linkX', 'linkY', '..', 'foo', 'in', 'spam')
if os.name == 'nt':
# In Windows, if linkY points to dirB, 'dirA\linkY\..'
# resolves to 'dirA' without resolving linkY first.
self._check_resolve_relative(p, P(d, 'foo', 'in', 'spam'), False)
else:
# In Posix, if linkY points to dirB, 'dirA/linkY/..'
# resolves to 'dirB/..' first before resolving to parent of dirB.
self._check_resolve_relative(p, P(BASE, 'foo', 'in', 'spam'), False)
@support.skip_unless_symlink
def test_resolve_dot(self):
# See https://bitbucket.org/pitrou/pathlib/issue/9/pathresolve-fails-on-complex-symlinks
p = self.cls(BASE)
self.dirlink('.', join('0'))
self.dirlink(os.path.join('0', '0'), join('1'))
self.dirlink(os.path.join('1', '1'), join('2'))
q = p / '2'
self.assertEqual(q.resolve(strict=True), p)
r = q / '3' / '4'
self.assertRaises(FileNotFoundError, r.resolve, strict=True)
# Non-strict
self.assertEqual(r.resolve(strict=False), p / '3' / '4')
def test_with(self):
p = self.cls(BASE)
it = p.iterdir()
it2 = p.iterdir()
next(it2)
with p:
pass
# Using a path as a context manager is a no-op, thus the following
# operations should still succeed after the context manage exits.
next(it)
next(it2)
p.exists()
p.resolve()
p.absolute()
with p:
pass
def test_chmod(self):
p = self.cls(BASE) / 'fileA'
mode = p.stat().st_mode
# Clear writable bit.
new_mode = mode & ~0o222
p.chmod(new_mode)
self.assertEqual(p.stat().st_mode, new_mode)
# Set writable bit.
new_mode = mode | 0o222
p.chmod(new_mode)
self.assertEqual(p.stat().st_mode, new_mode)
# XXX also need a test for lchmod.
def test_stat(self):
p = self.cls(BASE) / 'fileA'
st = p.stat()
self.assertEqual(p.stat(), st)
# Change file mode by flipping write bit.
p.chmod(st.st_mode ^ 0o222)
self.addCleanup(p.chmod, st.st_mode)
self.assertNotEqual(p.stat(), st)
@support.skip_unless_symlink
def test_lstat(self):
p = self.cls(BASE)/ 'linkA'
st = p.stat()
self.assertNotEqual(st, p.lstat())
def test_lstat_nosymlink(self):
p = self.cls(BASE) / 'fileA'
st = p.stat()
self.assertEqual(st, p.lstat())
@unittest.skipUnless(pwd, "the pwd module is needed for this test")
def test_owner(self):
p = self.cls(BASE) / 'fileA'
uid = p.stat().st_uid
try:
name = pwd.getpwuid(uid).pw_name
except KeyError:
self.skipTest(
"user %d doesn't have an entry in the system database" % uid)
self.assertEqual(name, p.owner())
@unittest.skipUnless(grp, "the grp module is needed for this test")
def test_group(self):
p = self.cls(BASE) / 'fileA'
gid = p.stat().st_gid
try:
name = grp.getgrgid(gid).gr_name
except KeyError:
self.skipTest(
"group %d doesn't have an entry in the system database" % gid)
self.assertEqual(name, p.group())
def test_unlink(self):
p = self.cls(BASE) / 'fileA'
p.unlink()
self.assertFileNotFound(p.stat)
self.assertFileNotFound(p.unlink)
def test_unlink_missing_ok(self):
p = self.cls(BASE) / 'fileAAA'
self.assertFileNotFound(p.unlink)
p.unlink(missing_ok=True)
def test_rmdir(self):
p = self.cls(BASE) / 'dirA'
for q in p.iterdir():
q.unlink()
p.rmdir()
self.assertFileNotFound(p.stat)
self.assertFileNotFound(p.unlink)
@unittest.skipUnless(hasattr(os, "link"), "os.link() is not present")
def test_link_to(self):
P = self.cls(BASE)
p = P / 'fileA'
size = p.stat().st_size
# linking to another path.
q = P / 'dirA' / 'fileAA'
try:
p.link_to(q)
except PermissionError as e:
self.skipTest('os.link(): %s' % e)
self.assertEqual(q.stat().st_size, size)
self.assertEqual(os.path.samefile(p, q), True)
self.assertTrue(p.stat)
# Linking to a str of a relative path.
r = rel_join('fileAAA')
q.link_to(r)
self.assertEqual(os.stat(r).st_size, size)
self.assertTrue(q.stat)
@unittest.skipIf(hasattr(os, "link"), "os.link() is present")
def test_link_to_not_implemented(self):
P = self.cls(BASE)
p = P / 'fileA'
# linking to another path.
q = P / 'dirA' / 'fileAA'
with self.assertRaises(NotImplementedError):
p.link_to(q)
def test_rename(self):
P = self.cls(BASE)
p = P / 'fileA'
size = p.stat().st_size
# Renaming to another path.
q = P / 'dirA' / 'fileAA'
renamed_p = p.rename(q)
self.assertEqual(renamed_p, q)
self.assertEqual(q.stat().st_size, size)
self.assertFileNotFound(p.stat)
# Renaming to a str of a relative path.
r = rel_join('fileAAA')
renamed_q = q.rename(r)
self.assertEqual(renamed_q, self.cls(r))
self.assertEqual(os.stat(r).st_size, size)
self.assertFileNotFound(q.stat)
def test_replace(self):
P = self.cls(BASE)
p = P / 'fileA'
size = p.stat().st_size
# Replacing a non-existing path.
q = P / 'dirA' / 'fileAA'
replaced_p = p.replace(q)
self.assertEqual(replaced_p, q)
self.assertEqual(q.stat().st_size, size)
self.assertFileNotFound(p.stat)
# Replacing another (existing) path.
r = rel_join('dirB', 'fileB')
replaced_q = q.replace(r)
self.assertEqual(replaced_q, self.cls(r))
self.assertEqual(os.stat(r).st_size, size)
self.assertFileNotFound(q.stat)
@support.skip_unless_symlink
def test_readlink(self):
P = self.cls(BASE)
self.assertEqual((P / 'linkA').readlink(), self.cls('fileA'))
self.assertEqual((P / 'brokenLink').readlink(),
self.cls('non-existing'))
self.assertEqual((P / 'linkB').readlink(), self.cls('dirB'))
with self.assertRaises(OSError):
(P / 'fileA').readlink()
def test_touch_common(self):
P = self.cls(BASE)
p = P / 'newfileA'
self.assertFalse(p.exists())
p.touch()
self.assertTrue(p.exists())
st = p.stat()
old_mtime = st.st_mtime
old_mtime_ns = st.st_mtime_ns
# Rewind the mtime sufficiently far in the past to work around
# filesystem-specific timestamp granularity.
os.utime(str(p), (old_mtime - 10, old_mtime - 10))
# The file mtime should be refreshed by calling touch() again.
p.touch()
st = p.stat()
self.assertGreaterEqual(st.st_mtime_ns, old_mtime_ns)
self.assertGreaterEqual(st.st_mtime, old_mtime)
# Now with exist_ok=False.
p = P / 'newfileB'
self.assertFalse(p.exists())
p.touch(mode=0o700, exist_ok=False)
self.assertTrue(p.exists())
self.assertRaises(OSError, p.touch, exist_ok=False)
def test_touch_nochange(self):
P = self.cls(BASE)
p = P / 'fileA'
p.touch()
with p.open('rb') as f:
self.assertEqual(f.read().strip(), b"this is file A")
def test_mkdir(self):
P = self.cls(BASE)
p = P / 'newdirA'
self.assertFalse(p.exists())
p.mkdir()
self.assertTrue(p.exists())
self.assertTrue(p.is_dir())
with self.assertRaises(OSError) as cm:
p.mkdir()
self.assertEqual(cm.exception.errno, errno.EEXIST)
def test_mkdir_parents(self):
# Creating a chain of directories.
p = self.cls(BASE, 'newdirB', 'newdirC')
self.assertFalse(p.exists())
with self.assertRaises(OSError) as cm:
p.mkdir()
self.assertEqual(cm.exception.errno, errno.ENOENT)
p.mkdir(parents=True)
self.assertTrue(p.exists())
self.assertTrue(p.is_dir())
with self.assertRaises(OSError) as cm:
p.mkdir(parents=True)
self.assertEqual(cm.exception.errno, errno.EEXIST)
# Test `mode` arg.
mode = stat.S_IMODE(p.stat().st_mode) # Default mode.
p = self.cls(BASE, 'newdirD', 'newdirE')
p.mkdir(0o555, parents=True)
self.assertTrue(p.exists())
self.assertTrue(p.is_dir())
if os.name != 'nt':
# The directory's permissions follow the mode argument.
self.assertEqual(stat.S_IMODE(p.stat().st_mode), 0o7555 & mode)
# The parent's permissions follow the default process settings.
self.assertEqual(stat.S_IMODE(p.parent.stat().st_mode), mode)
def test_mkdir_exist_ok(self):
p = self.cls(BASE, 'dirB')
st_ctime_first = p.stat().st_ctime
self.assertTrue(p.exists())
self.assertTrue(p.is_dir())
with self.assertRaises(FileExistsError) as cm:
p.mkdir()
self.assertEqual(cm.exception.errno, errno.EEXIST)
p.mkdir(exist_ok=True)
self.assertTrue(p.exists())
self.assertEqual(p.stat().st_ctime, st_ctime_first)
def test_mkdir_exist_ok_with_parent(self):
p = self.cls(BASE, 'dirC')
self.assertTrue(p.exists())
with self.assertRaises(FileExistsError) as cm:
p.mkdir()
self.assertEqual(cm.exception.errno, errno.EEXIST)
p = p / 'newdirC'
p.mkdir(parents=True)
st_ctime_first = p.stat().st_ctime
self.assertTrue(p.exists())
with self.assertRaises(FileExistsError) as cm:
p.mkdir(parents=True)
self.assertEqual(cm.exception.errno, errno.EEXIST)
p.mkdir(parents=True, exist_ok=True)
self.assertTrue(p.exists())
self.assertEqual(p.stat().st_ctime, st_ctime_first)
def test_mkdir_exist_ok_root(self):
# Issue #25803: A drive root could raise PermissionError on Windows.
self.cls('/').resolve().mkdir(exist_ok=True)
self.cls('/').resolve().mkdir(parents=True, exist_ok=True)
@only_nt # XXX: not sure how to test this on POSIX.
def test_mkdir_with_unknown_drive(self):
for d in 'ZYXWVUTSRQPONMLKJIHGFEDCBA':
p = self.cls(d + ':\\')
if not p.is_dir():
break
else:
self.skipTest("cannot find a drive that doesn't exist")
with self.assertRaises(OSError):
(p / 'child' / 'path').mkdir(parents=True)
def test_mkdir_with_child_file(self):
p = self.cls(BASE, 'dirB', 'fileB')
self.assertTrue(p.exists())
# An exception is raised when the last path component is an existing
# regular file, regardless of whether exist_ok is true or not.
with self.assertRaises(FileExistsError) as cm:
p.mkdir(parents=True)
self.assertEqual(cm.exception.errno, errno.EEXIST)
with self.assertRaises(FileExistsError) as cm:
p.mkdir(parents=True, exist_ok=True)
self.assertEqual(cm.exception.errno, errno.EEXIST)
def test_mkdir_no_parents_file(self):
p = self.cls(BASE, 'fileA')
self.assertTrue(p.exists())
# An exception is raised when the last path component is an existing
# regular file, regardless of whether exist_ok is true or not.
with self.assertRaises(FileExistsError) as cm:
p.mkdir()
self.assertEqual(cm.exception.errno, errno.EEXIST)
with self.assertRaises(FileExistsError) as cm:
p.mkdir(exist_ok=True)
self.assertEqual(cm.exception.errno, errno.EEXIST)
def test_mkdir_concurrent_parent_creation(self):
for pattern_num in range(32):
p = self.cls(BASE, 'dirCPC%d' % pattern_num)
self.assertFalse(p.exists())
def my_mkdir(path, mode=0o777):
path = str(path)
# Emulate another process that would create the directory
# just before we try to create it ourselves. We do it
# in all possible pattern combinations, assuming that this
# function is called at most 5 times (dirCPC/dir1/dir2,
# dirCPC/dir1, dirCPC, dirCPC/dir1, dirCPC/dir1/dir2).
if pattern.pop():
os.mkdir(path, mode) # From another process.
concurrently_created.add(path)
os.mkdir(path, mode) # Our real call.
pattern = [bool(pattern_num & (1 << n)) for n in range(5)]
concurrently_created = set()
p12 = p / 'dir1' / 'dir2'
try:
with mock.patch("pathlib._normal_accessor.mkdir", my_mkdir):
p12.mkdir(parents=True, exist_ok=False)
except FileExistsError:
self.assertIn(str(p12), concurrently_created)
else:
self.assertNotIn(str(p12), concurrently_created)
self.assertTrue(p.exists())
@support.skip_unless_symlink
def test_symlink_to(self):
P = self.cls(BASE)
target = P / 'fileA'
# Symlinking a path target.
link = P / 'dirA' / 'linkAA'
link.symlink_to(target)
self.assertEqual(link.stat(), target.stat())
self.assertNotEqual(link.lstat(), target.stat())
# Symlinking a str target.
link = P / 'dirA' / 'linkAAA'
link.symlink_to(str(target))
self.assertEqual(link.stat(), target.stat())
self.assertNotEqual(link.lstat(), target.stat())
self.assertFalse(link.is_dir())
# Symlinking to a directory.
target = P / 'dirB'
link = P / 'dirA' / 'linkAAAA'
link.symlink_to(target, target_is_directory=True)
self.assertEqual(link.stat(), target.stat())
self.assertNotEqual(link.lstat(), target.stat())
self.assertTrue(link.is_dir())
self.assertTrue(list(link.iterdir()))
def test_is_dir(self):
P = self.cls(BASE)
self.assertTrue((P / 'dirA').is_dir())
self.assertFalse((P / 'fileA').is_dir())
self.assertFalse((P / 'non-existing').is_dir())
self.assertFalse((P / 'fileA' / 'bah').is_dir())
if support.can_symlink():
self.assertFalse((P / 'linkA').is_dir())
self.assertTrue((P / 'linkB').is_dir())
self.assertFalse((P/ 'brokenLink').is_dir(), False)
self.assertIs((P / 'dirA\udfff').is_dir(), False)
self.assertIs((P / 'dirA\x00').is_dir(), False)
def test_is_file(self):
P = self.cls(BASE)
self.assertTrue((P / 'fileA').is_file())
self.assertFalse((P / 'dirA').is_file())
self.assertFalse((P / 'non-existing').is_file())
self.assertFalse((P / 'fileA' / 'bah').is_file())
if support.can_symlink():
self.assertTrue((P / 'linkA').is_file())
self.assertFalse((P / 'linkB').is_file())
self.assertFalse((P/ 'brokenLink').is_file())
self.assertIs((P / 'fileA\udfff').is_file(), False)
self.assertIs((P / 'fileA\x00').is_file(), False)
@only_posix
def test_is_mount(self):
P = self.cls(BASE)
R = self.cls('/') # TODO: Work out Windows.
self.assertFalse((P / 'fileA').is_mount())
self.assertFalse((P / 'dirA').is_mount())
self.assertFalse((P / 'non-existing').is_mount())
self.assertFalse((P / 'fileA' / 'bah').is_mount())
self.assertTrue(R.is_mount())
if support.can_symlink():
self.assertFalse((P / 'linkA').is_mount())
self.assertIs(self.cls('/\udfff').is_mount(), False)
self.assertIs(self.cls('/\x00').is_mount(), False)
def test_is_symlink(self):
P = self.cls(BASE)
self.assertFalse((P / 'fileA').is_symlink())
self.assertFalse((P / 'dirA').is_symlink())
self.assertFalse((P / 'non-existing').is_symlink())
self.assertFalse((P / 'fileA' / 'bah').is_symlink())
if support.can_symlink():
self.assertTrue((P / 'linkA').is_symlink())
self.assertTrue((P / 'linkB').is_symlink())
self.assertTrue((P/ 'brokenLink').is_symlink())
self.assertIs((P / 'fileA\udfff').is_file(), False)
self.assertIs((P / 'fileA\x00').is_file(), False)
if support.can_symlink():
self.assertIs((P / 'linkA\udfff').is_file(), False)
self.assertIs((P / 'linkA\x00').is_file(), False)
def test_is_fifo_false(self):
P = self.cls(BASE)
self.assertFalse((P / 'fileA').is_fifo())
self.assertFalse((P / 'dirA').is_fifo())
self.assertFalse((P / 'non-existing').is_fifo())
self.assertFalse((P / 'fileA' / 'bah').is_fifo())
self.assertIs((P / 'fileA\udfff').is_fifo(), False)
self.assertIs((P / 'fileA\x00').is_fifo(), False)
@unittest.skipUnless(hasattr(os, "mkfifo"), "os.mkfifo() required")
def test_is_fifo_true(self):
P = self.cls(BASE, 'myfifo')
try:
os.mkfifo(str(P))
except PermissionError as e:
self.skipTest('os.mkfifo(): %s' % e)
self.assertTrue(P.is_fifo())
self.assertFalse(P.is_socket())
self.assertFalse(P.is_file())
self.assertIs(self.cls(BASE, 'myfifo\udfff').is_fifo(), False)
self.assertIs(self.cls(BASE, 'myfifo\x00').is_fifo(), False)
def test_is_socket_false(self):
P = self.cls(BASE)
self.assertFalse((P / 'fileA').is_socket())
self.assertFalse((P / 'dirA').is_socket())
self.assertFalse((P / 'non-existing').is_socket())
self.assertFalse((P / 'fileA' / 'bah').is_socket())
self.assertIs((P / 'fileA\udfff').is_socket(), False)
self.assertIs((P / 'fileA\x00').is_socket(), False)
@unittest.skipUnless(hasattr(socket, "AF_UNIX"), "Unix sockets required")
def test_is_socket_true(self):
P = self.cls(BASE, 'mysock')
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
self.addCleanup(sock.close)
try:
sock.bind(str(P))
except OSError as e:
if (isinstance(e, PermissionError) or
"AF_UNIX path too long" in str(e)):
self.skipTest("cannot bind Unix socket: " + str(e))
self.assertTrue(P.is_socket())
self.assertFalse(P.is_fifo())
self.assertFalse(P.is_file())
self.assertIs(self.cls(BASE, 'mysock\udfff').is_socket(), False)
self.assertIs(self.cls(BASE, 'mysock\x00').is_socket(), False)
def test_is_block_device_false(self):
P = self.cls(BASE)
self.assertFalse((P / 'fileA').is_block_device())
self.assertFalse((P / 'dirA').is_block_device())
self.assertFalse((P / 'non-existing').is_block_device())
self.assertFalse((P / 'fileA' / 'bah').is_block_device())
self.assertIs((P / 'fileA\udfff').is_block_device(), False)
self.assertIs((P / 'fileA\x00').is_block_device(), False)
def test_is_char_device_false(self):
P = self.cls(BASE)
self.assertFalse((P / 'fileA').is_char_device())
self.assertFalse((P / 'dirA').is_char_device())
self.assertFalse((P / 'non-existing').is_char_device())
self.assertFalse((P / 'fileA' / 'bah').is_char_device())
self.assertIs((P / 'fileA\udfff').is_char_device(), False)
self.assertIs((P / 'fileA\x00').is_char_device(), False)
def test_is_char_device_true(self):
# Under Unix, /dev/null should generally be a char device.
P = self.cls('/dev/null')
if not P.exists():
self.skipTest("/dev/null required")
self.assertTrue(P.is_char_device())
self.assertFalse(P.is_block_device())
self.assertFalse(P.is_file())
self.assertIs(self.cls('/dev/null\udfff').is_char_device(), False)
self.assertIs(self.cls('/dev/null\x00').is_char_device(), False)
def test_pickling_common(self):
p = self.cls(BASE, 'fileA')
for proto in range(0, pickle.HIGHEST_PROTOCOL + 1):
dumped = pickle.dumps(p, proto)
pp = pickle.loads(dumped)
self.assertEqual(pp.stat(), p.stat())
def test_parts_interning(self):
P = self.cls
p = P('/usr/bin/foo')
q = P('/usr/local/bin')
# 'usr'
self.assertIs(p.parts[1], q.parts[1])
# 'bin'
self.assertIs(p.parts[2], q.parts[3])
def _check_complex_symlinks(self, link0_target):
# Test solving a non-looping chain of symlinks (issue #19887).
P = self.cls(BASE)
self.dirlink(os.path.join('link0', 'link0'), join('link1'))
self.dirlink(os.path.join('link1', 'link1'), join('link2'))
self.dirlink(os.path.join('link2', 'link2'), join('link3'))
self.dirlink(link0_target, join('link0'))
# Resolve absolute paths.
p = (P / 'link0').resolve()
self.assertEqual(p, P)
self.assertEqualNormCase(str(p), BASE)
p = (P / 'link1').resolve()
self.assertEqual(p, P)
self.assertEqualNormCase(str(p), BASE)
p = (P / 'link2').resolve()
self.assertEqual(p, P)
self.assertEqualNormCase(str(p), BASE)
p = (P / 'link3').resolve()
self.assertEqual(p, P)
self.assertEqualNormCase(str(p), BASE)
# Resolve relative paths.
old_path = os.getcwd()
os.chdir(BASE)
try:
p = self.cls('link0').resolve()
self.assertEqual(p, P)
self.assertEqualNormCase(str(p), BASE)
p = self.cls('link1').resolve()
self.assertEqual(p, P)
self.assertEqualNormCase(str(p), BASE)
p = self.cls('link2').resolve()
self.assertEqual(p, P)
self.assertEqualNormCase(str(p), BASE)
p = self.cls('link3').resolve()
self.assertEqual(p, P)
self.assertEqualNormCase(str(p), BASE)
finally:
os.chdir(old_path)
@support.skip_unless_symlink
def test_complex_symlinks_absolute(self):
self._check_complex_symlinks(BASE)
@support.skip_unless_symlink
def test_complex_symlinks_relative(self):
self._check_complex_symlinks('.')
@support.skip_unless_symlink
def test_complex_symlinks_relative_dot_dot(self):
self._check_complex_symlinks(os.path.join('dirA', '..'))
class PathTest(_BasePathTest, unittest.TestCase):
cls = pathlib.Path
def test_class_getitem(self):
self.assertIs(self.cls[str], self.cls)
def test_concrete_class(self):
p = self.cls('a')
self.assertIs(type(p),
pathlib.WindowsPath if os.name == 'nt' else pathlib.PosixPath)
def test_unsupported_flavour(self):
if os.name == 'nt':
self.assertRaises(NotImplementedError, pathlib.PosixPath)
else:
self.assertRaises(NotImplementedError, pathlib.WindowsPath)
def test_glob_empty_pattern(self):
p = self.cls()
with self.assertRaisesRegex(ValueError, 'Unacceptable pattern'):
list(p.glob(''))
@only_posix
class PosixPathTest(_BasePathTest, unittest.TestCase):
cls = pathlib.PosixPath
def _check_symlink_loop(self, *args, strict=True):
path = self.cls(*args)
with self.assertRaises(RuntimeError):
print(path.resolve(strict))
def test_open_mode(self):
old_mask = os.umask(0)
self.addCleanup(os.umask, old_mask)
p = self.cls(BASE)
with (p / 'new_file').open('wb'):
pass
st = os.stat(join('new_file'))
self.assertEqual(stat.S_IMODE(st.st_mode), 0o666)
os.umask(0o022)
with (p / 'other_new_file').open('wb'):
pass
st = os.stat(join('other_new_file'))
self.assertEqual(stat.S_IMODE(st.st_mode), 0o644)
def test_resolve_root(self):
current_directory = os.getcwd()
try:
os.chdir('/')
p = self.cls('spam')
self.assertEqual(str(p.resolve()), '/spam')
finally:
os.chdir(current_directory)
def test_touch_mode(self):
old_mask = os.umask(0)
self.addCleanup(os.umask, old_mask)
p = self.cls(BASE)
(p / 'new_file').touch()
st = os.stat(join('new_file'))
self.assertEqual(stat.S_IMODE(st.st_mode), 0o666)
os.umask(0o022)
(p / 'other_new_file').touch()
st = os.stat(join('other_new_file'))
self.assertEqual(stat.S_IMODE(st.st_mode), 0o644)
(p / 'masked_new_file').touch(mode=0o750)
st = os.stat(join('masked_new_file'))
self.assertEqual(stat.S_IMODE(st.st_mode), 0o750)
@support.skip_unless_symlink
def test_resolve_loop(self):
# Loops with relative symlinks.
os.symlink('linkX/inside', join('linkX'))
self._check_symlink_loop(BASE, 'linkX')
os.symlink('linkY', join('linkY'))
self._check_symlink_loop(BASE, 'linkY')
os.symlink('linkZ/../linkZ', join('linkZ'))
self._check_symlink_loop(BASE, 'linkZ')
# Non-strict
self._check_symlink_loop(BASE, 'linkZ', 'foo', strict=False)
# Loops with absolute symlinks.
os.symlink(join('linkU/inside'), join('linkU'))
self._check_symlink_loop(BASE, 'linkU')
os.symlink(join('linkV'), join('linkV'))
self._check_symlink_loop(BASE, 'linkV')
os.symlink(join('linkW/../linkW'), join('linkW'))
self._check_symlink_loop(BASE, 'linkW')
# Non-strict
self._check_symlink_loop(BASE, 'linkW', 'foo', strict=False)
def test_glob(self):
P = self.cls
p = P(BASE)
given = set(p.glob("FILEa"))
expect = set() if not support.fs_is_case_insensitive(BASE) else given
self.assertEqual(given, expect)
self.assertEqual(set(p.glob("FILEa*")), set())
def test_rglob(self):
P = self.cls
p = P(BASE, "dirC")
given = set(p.rglob("FILEd"))
expect = set() if not support.fs_is_case_insensitive(BASE) else given
self.assertEqual(given, expect)
self.assertEqual(set(p.rglob("FILEd*")), set())
@unittest.skipUnless(hasattr(pwd, 'getpwall'),
'pwd module does not expose getpwall()')
def test_expanduser(self):
P = self.cls
support.import_module('pwd')
import pwd
pwdent = pwd.getpwuid(os.getuid())
username = pwdent.pw_name
userhome = pwdent.pw_dir.rstrip('/') or '/'
# Find arbitrary different user (if exists).
for pwdent in pwd.getpwall():
othername = pwdent.pw_name
otherhome = pwdent.pw_dir.rstrip('/')
if othername != username and otherhome:
break
else:
othername = username
otherhome = userhome
p1 = P('~/Documents')
p2 = P('~' + username + '/Documents')
p3 = P('~' + othername + '/Documents')
p4 = P('../~' + username + '/Documents')
p5 = P('/~' + username + '/Documents')
p6 = P('')
p7 = P('~fakeuser/Documents')
with support.EnvironmentVarGuard() as env:
env.pop('HOME', None)
self.assertEqual(p1.expanduser(), P(userhome) / 'Documents')
self.assertEqual(p2.expanduser(), P(userhome) / 'Documents')
self.assertEqual(p3.expanduser(), P(otherhome) / 'Documents')
self.assertEqual(p4.expanduser(), p4)
self.assertEqual(p5.expanduser(), p5)
self.assertEqual(p6.expanduser(), p6)
self.assertRaises(RuntimeError, p7.expanduser)
env['HOME'] = '/tmp'
self.assertEqual(p1.expanduser(), P('/tmp/Documents'))
self.assertEqual(p2.expanduser(), P(userhome) / 'Documents')
self.assertEqual(p3.expanduser(), P(otherhome) / 'Documents')
self.assertEqual(p4.expanduser(), p4)
self.assertEqual(p5.expanduser(), p5)
self.assertEqual(p6.expanduser(), p6)
self.assertRaises(RuntimeError, p7.expanduser)
@unittest.skipIf(sys.platform != "darwin",
"Bad file descriptor in /dev/fd affects only macOS")
def test_handling_bad_descriptor(self):
try:
file_descriptors = list(pathlib.Path('/dev/fd').rglob("*"))[3:]
if not file_descriptors:
self.skipTest("no file descriptors - issue was not reproduced")
# Checking all file descriptors because there is no guarantee
# which one will fail.
for f in file_descriptors:
f.exists()
f.is_dir()
f.is_file()
f.is_symlink()
f.is_block_device()
f.is_char_device()
f.is_fifo()
f.is_socket()
except OSError as e:
if e.errno == errno.EBADF:
self.fail("Bad file descriptor not handled.")
raise
@only_nt
class WindowsPathTest(_BasePathTest, unittest.TestCase):
cls = pathlib.WindowsPath
def test_glob(self):
P = self.cls
p = P(BASE)
self.assertEqual(set(p.glob("FILEa")), { P(BASE, "fileA") })
self.assertEqual(set(p.glob("F*a")), { P(BASE, "fileA") })
self.assertEqual(set(map(str, p.glob("FILEa"))), {f"{p}\\FILEa"})
self.assertEqual(set(map(str, p.glob("F*a"))), {f"{p}\\fileA"})
def test_rglob(self):
P = self.cls
p = P(BASE, "dirC")
self.assertEqual(set(p.rglob("FILEd")), { P(BASE, "dirC/dirD/fileD") })
self.assertEqual(set(map(str, p.rglob("FILEd"))), {f"{p}\\dirD\\FILEd"})
def test_expanduser(self):
P = self.cls
with support.EnvironmentVarGuard() as env:
env.pop('HOME', None)
env.pop('USERPROFILE', None)
env.pop('HOMEPATH', None)
env.pop('HOMEDRIVE', None)
env['USERNAME'] = 'alice'
# test that the path returns unchanged
p1 = P('~/My Documents')
p2 = P('~alice/My Documents')
p3 = P('~bob/My Documents')
p4 = P('/~/My Documents')
p5 = P('d:~/My Documents')
p6 = P('')
self.assertRaises(RuntimeError, p1.expanduser)
self.assertRaises(RuntimeError, p2.expanduser)
self.assertRaises(RuntimeError, p3.expanduser)
self.assertEqual(p4.expanduser(), p4)
self.assertEqual(p5.expanduser(), p5)
self.assertEqual(p6.expanduser(), p6)
def check():
env.pop('USERNAME', None)
self.assertEqual(p1.expanduser(),
P('C:/Users/alice/My Documents'))
self.assertRaises(KeyError, p2.expanduser)
env['USERNAME'] = 'alice'
self.assertEqual(p2.expanduser(),
P('C:/Users/alice/My Documents'))
self.assertEqual(p3.expanduser(),
P('C:/Users/bob/My Documents'))
self.assertEqual(p4.expanduser(), p4)
self.assertEqual(p5.expanduser(), p5)
self.assertEqual(p6.expanduser(), p6)
env['HOMEPATH'] = 'C:\\Users\\alice'
check()
env['HOMEDRIVE'] = 'C:\\'
env['HOMEPATH'] = 'Users\\alice'
check()
env.pop('HOMEDRIVE', None)
env.pop('HOMEPATH', None)
env['USERPROFILE'] = 'C:\\Users\\alice'
check()
# bpo-38883: ignore `HOME` when set on windows
env['HOME'] = 'C:\\Users\\eve'
check()
class CompatiblePathTest(unittest.TestCase):
"""
Test that a type can be made compatible with PurePath
derivatives by implementing division operator overloads.
"""
class CompatPath:
"""
Minimum viable class to test PurePath compatibility.
Simply uses the division operator to join a given
string and the string value of another object with
a forward slash.
"""
def __init__(self, string):
self.string = string
def __truediv__(self, other):
return type(self)(f"{self.string}/{other}")
def __rtruediv__(self, other):
return type(self)(f"{other}/{self.string}")
def test_truediv(self):
result = pathlib.PurePath("test") / self.CompatPath("right")
self.assertIsInstance(result, self.CompatPath)
self.assertEqual(result.string, "test/right")
with self.assertRaises(TypeError):
# Verify improper operations still raise a TypeError
pathlib.PurePath("test") / 10
def test_rtruediv(self):
result = self.CompatPath("left") / pathlib.PurePath("test")
self.assertIsInstance(result, self.CompatPath)
self.assertEqual(result.string, "left/test")
with self.assertRaises(TypeError):
# Verify improper operations still raise a TypeError
10 / pathlib.PurePath("test")
if __name__ == "__main__":
unittest.main()
|
from torch import nn
#from torch.autograd import Variable
import torch
from torch.nn.utils.rnn import pack_padded_sequence, pad_packed_sequence
import numpy as np
#from models.vgg_tro_channel1 import vgg16_bn
from recognizer.models.vgg_tro_channel3 import vgg16_bn, vgg19_bn
#torch.cuda.set_device(1)
cuda = torch.device('cuda')
DROP_OUT = False
LSTM = False
SUM_UP = True
PRE_TRAIN_VGG = False
class Encoder(nn.Module):
def __init__(self, hidden_size, height, width, bgru, step, flip):
super(Encoder, self).__init__()
self.hidden_size = hidden_size
self.height = height
self.width = width
self.bi = bgru
self.step = step
self.flip = flip
self.n_layers = 2
self.dropout = 0.5
#self.layer = vgg16_bn(PRE_TRAIN_VGG)
self.layer = vgg19_bn(PRE_TRAIN_VGG)
if DROP_OUT:
self.layer_dropout = nn.Dropout2d(p=0.5)
if self.step is not None:
#self.output_proj = nn.Linear((((((self.height-2)//2)-2)//2-2-2-2)//2)*128*self.step, self.hidden_size)
self.output_proj = nn.Linear(self.height//16*512*self.step, self.height//16*512)
if LSTM:
RNN = nn.LSTM
else:
RNN = nn.GRU
if self.bi: #8: 3 MaxPool->2**3 128: last hidden_size of layer4
self.rnn = RNN(self.height//16*512, self.hidden_size, self.n_layers, dropout=self.dropout, bidirectional=True)
if SUM_UP:
self.enc_out_merge = lambda x: x[:,:,:x.shape[-1]//2] + x[:,:,x.shape[-1]//2:]
self.enc_hidden_merge = lambda x: (x[0] + x[1]).unsqueeze(0)
else:
self.rnn = RNN(self.height//16*512, self.hidden_size, self.n_layers, dropout=self.dropout, bidirectional=False)
# (32, 1, 80, 1400)
def forward(self, in_data, in_data_len, hidden=None):
batch_size = in_data.shape[0]
out = self.layer(in_data) # torch.Size([32, 512, 4, 63])
if DROP_OUT and self.training:
out = self.layer_dropout(out)
#out.register_hook(print)
out = out.permute(3, 0, 2, 1) # (width, batch, height, channels)
#out = out.view(-1, batch_size, (((((self.height-2)//2)-2)//2-2-2-2)//2)*128) # (t, b, f) (173, 32, 1024)
out = out.reshape(-1, batch_size, self.height//16*512)
if self.step is not None:
time_step, batch_size, n_feature = out.shape[0], out.shape[1], out.shape[2]
#out_short = Variable(torch.zeros(time_step//self.step, batch_size, n_feature*self.step)).cuda() # t//STEP, b, f*STEP
out_short = torch.zeros(time_step//self.step, batch_size, n_feature*self.step, requires_grad=True).to(cuda) # t//STEP, b, f*STEP
for i in range(0, time_step//self.step):
part_out = [out[j] for j in range(i*self.step, (i+1)*self.step)]
# reverse the image feature map
out_short[i] = torch.cat(part_out, 1) # b, f*STEP
out = self.output_proj(out_short) # t//STEP, b, hidden_size
width = out.shape[0]
src_len = in_data_len.numpy()*(width/self.width)
src_len = src_len + 0.999 # in case of 0 length value from float to int
src_len = src_len.astype('int')
out = pack_padded_sequence(out, src_len.tolist(), batch_first=False)
output, hidden = self.rnn(out, hidden)
# output: t, b, f*2 hidden: 2, b, f
output, output_len = pad_packed_sequence(output, batch_first=False)
if self.bi and SUM_UP:
output = self.enc_out_merge(output)
#hidden = self.enc_hidden_merge(hidden)
# # output: t, b, f hidden: b, f
odd_idx = [1, 3, 5, 7, 9, 11]
hidden_idx = odd_idx[:self.n_layers]
final_hidden = hidden[hidden_idx]
#if self.flip:
# hidden = output[-1]
# #hidden = hidden.permute(1, 0, 2) # b, 2, f
# #hidden = hidden.contiguous().view(batch_size, -1) # b, f*2
#else:
# hidden = output[0] # b, f*2
return output, final_hidden # t, b, f*2 b, f*2
# matrix: b, c, h, w lens: list size of batch_size
def conv_mask(self, matrix, lens):
lens = np.array(lens)
width = matrix.shape[-1]
lens2 = lens * (width / self.width)
lens2 = lens2 + 0.999 # in case le == 0
lens2 = lens2.astype('int')
matrix_new = matrix.permute(0, 3, 1, 2) # b, w, c, h
#matrix_out = Variable(torch.zeros(matrix_new.shape)).cuda()
matrix_out = torch.zeros(matrix_new.shape, requires_grad=True).to(cuda)
for i, le in enumerate(lens2):
if self.flip:
matrix_out[i, -le:] = matrix_new[i, -le:]
else:
matrix_out[i, :le] = matrix_new[i, :le]
matrix_out = matrix_out.permute(0, 2, 3, 1) # b, c, h, w
return matrix_out
if __name__ == '__main__':
print(vgg16_bn())
|
from __future__ import unicode_literals
from django.contrib.auth.forms import AuthenticationForm
from django import forms
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Layout, Div, Submit, HTML, Button, Row, Field
from crispy_forms.bootstrap import AppendedText, PrependedText, FormActions
from authtools import forms as authtoolsforms
from django.contrib.auth import forms as authforms
from django.core.urlresolvers import reverse
class LoginForm(AuthenticationForm):
remember_me = forms.BooleanField(required=False, initial=False)
def __init__(self, *args, **kwargs):
super(LoginForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.fields["username"].widget.input_type = "email" # ugly hack
self.helper.layout = Layout(
Field('username', placeholder="Enter Email", autofocus=""),
Field('password', placeholder="Enter Password"),
HTML('<a href="{}">Forgot Password?</a>'.format(
reverse("accounts:password-reset"))),
Field('remember_me'),
Submit('sign_in', 'Log in',
css_class="btn btn-lg btn-primary btn-block"),
)
class SignupForm(authtoolsforms.UserCreationForm):
def __init__(self, *args, **kwargs):
super(SignupForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.fields["email"].widget.input_type = "email" # ugly hack
self.helper.layout = Layout(
Field('email', placeholder="Enter Email", autofocus=""),
Field('name', placeholder="Enter Full Name"),
Field('password1', placeholder="Enter Password"),
Field('password2', placeholder="Re-enter Password"),
Submit('sign_up', 'Sign up', css_class="btn-warning"),
)
class PasswordChangeForm(authforms.PasswordChangeForm):
def __init__(self, *args, **kwargs):
super(PasswordChangeForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.layout = Layout(
Field('old_password', placeholder="Enter old password",
autofocus=""),
Field('new_password1', placeholder="Enter new password"),
Field('new_password2', placeholder="Enter new password (again)"),
Submit('pass_change', 'Change Password', css_class="btn-warning"),
)
class PasswordResetForm(authtoolsforms.FriendlyPasswordResetForm):
def __init__(self, *args, **kwargs):
super(PasswordResetForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.layout = Layout(
Field('email', placeholder="Enter email",
autofocus=""),
Submit('pass_reset', 'Reset Password', css_class="btn-warning"),
)
class SetPasswordForm(authforms.SetPasswordForm):
def __init__(self, *args, **kwargs):
super(SetPasswordForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.layout = Layout(
Field('new_password1', placeholder="Enter new password",
autofocus=""),
Field('new_password2', placeholder="Enter new password (again)"),
Submit('pass_change', 'Change Password', css_class="btn-warning"),
)
|
"""Contains the main display widget used for representing an entire device."""
import enum
import inspect
import logging
import os
import pathlib
import webbrowser
from typing import List, Optional, Union
import ophyd
import pcdsutils
import pydm.display
import pydm.exception
import pydm.utilities
from pcdsutils.qt import forward_property
from qtpy import QtCore, QtGui, QtWidgets
from qtpy.QtCore import Q_ENUMS, Property, Qt, Slot
from . import cache
from . import panel as typhos_panel
from . import utils, web, widgets
from .jira import TyphosJiraIssueWidget
logger = logging.getLogger(__name__)
class DisplayTypes(enum.IntEnum):
"""Enumeration of template types that can be used in displays."""
embedded_screen = 0
detailed_screen = 1
engineering_screen = 2
_DisplayTypes = utils.pyqt_class_from_enum(DisplayTypes)
DisplayTypes.names = [view.name for view in DisplayTypes]
class ScrollOptions(enum.IntEnum):
"""Enumeration of scrollable options for displays."""
auto = 0
scrollbar = 1
no_scroll = 2
_ScrollOptions = utils.pyqt_class_from_enum(ScrollOptions)
ScrollOptions.names = [view.name for view in ScrollOptions]
DEFAULT_TEMPLATES = {
name: [(utils.ui_dir / 'core' / f'{name}.ui').resolve()]
for name in DisplayTypes.names
}
DETAILED_TREE_TEMPLATE = (utils.ui_dir / 'core' / 'detailed_tree.ui').resolve()
DEFAULT_TEMPLATES['detailed_screen'].append(DETAILED_TREE_TEMPLATE)
DEFAULT_TEMPLATES_FLATTEN = [f for _, files in DEFAULT_TEMPLATES.items()
for f in files]
def normalize_display_type(
display_type: Union[DisplayTypes, str, int]
) -> DisplayTypes:
"""
Normalize a given display type.
Parameters
----------
display_type : DisplayTypes, str, or int
The display type.
Returns
-------
display_type : DisplayTypes
The normalized :class:`DisplayTypes`.
Raises
------
ValueError
If the input cannot be made a :class:`DisplayTypes`.
"""
try:
return DisplayTypes(display_type)
except ValueError:
try:
return DisplayTypes[display_type]
except KeyError:
raise ValueError(
f'Unrecognized display type: {display_type}'
)
def normalize_scroll_option(
scroll_option: Union[ScrollOptions, str, int]
) -> ScrollOptions:
"""
Normalize a given scroll option.
Parameters
----------
display_type : ScrollOptions, str, or int
The display type.
Returns
-------
display_type : ScrollOptions
The normalized :class:`ScrollOptions`.
Raises
------
ValueError
If the input cannot be made a :class:`ScrollOptions`.
"""
try:
return ScrollOptions(scroll_option)
except ValueError:
try:
return ScrollOptions[scroll_option]
except KeyError:
raise ValueError(
f'Unrecognized scroll option: {scroll_option}'
)
class TyphosToolButton(QtWidgets.QToolButton):
"""
Base class for tool buttons used in the TyphosDisplaySwitcher.
Parameters
----------
icon : QIcon or str, optional
See :meth:`.get_icon` for options.
parent : QtWidgets.QWidget, optional
The parent widget.
Attributes
----------
DEFAULT_ICON : str
The default icon from fontawesome to use.
"""
DEFAULT_ICON = 'circle'
def __init__(self, icon=None, *, parent=None):
super().__init__(parent=parent)
self.setContextMenuPolicy(Qt.DefaultContextMenu)
self.contextMenuEvent = self.open_context_menu
self.clicked.connect(self._clicked)
self.setIcon(self.get_icon(icon))
self.setMinimumSize(24, 24)
def _clicked(self):
"""Clicked callback: override in a subclass."""
menu = self.generate_context_menu()
if menu:
menu.exec_(QtGui.QCursor.pos())
def generate_context_menu(self):
"""Context menu request: override in subclasses."""
return None
@classmethod
def get_icon(cls, icon=None):
"""
Get a QIcon, if specified, or fall back to the default.
Parameters
----------
icon : str or QtGui.QIcon
If a string, assume it is from fontawesome.
Otherwise, use the icon instance as-is.
"""
icon = icon or cls.DEFAULT_ICON
if isinstance(icon, str):
return pydm.utilities.IconFont().icon(icon)
return icon
def open_context_menu(self, ev):
"""
Open the instance-specific context menu.
Parameters
----------
ev : QEvent
"""
menu = self.generate_context_menu()
if menu:
menu.exec_(self.mapToGlobal(ev.pos()))
class TyphosDisplayConfigButton(TyphosToolButton):
"""
The configuration button used in the :class:`TyphosDisplaySwitcher`.
This uses the common "vertical ellipse" icon by default.
"""
DEFAULT_ICON = 'ellipsis-v'
_kind_to_property = typhos_panel.TyphosSignalPanel._kind_to_property
def __init__(self, icon=None, *, parent=None):
super().__init__(icon=icon, parent=parent)
self.setPopupMode(self.InstantPopup)
self.setArrowType(Qt.NoArrow)
self.templates = None
self.device_display = None
def set_device_display(self, device_display):
"""Typhos callback: set the :class:`TyphosDeviceDisplay`."""
self.device_display = device_display
def create_kind_filter_menu(self, panels, base_menu, *, only):
"""
Create the "Kind" filter menu.
Parameters
----------
panels : list of TyphosSignalPanel
The panels to filter upon triggering of menu actions.
base_menu : QMenu
The menu to add actions to.
only : bool
False - create "Show Kind" actions.
True - create "Show only Kind" actions.
"""
for kind, prop in self._kind_to_property.items():
def selected(new_value, *, prop=prop):
if only:
# Show *only* the specific kind for all panels
for kind, current_prop in self._kind_to_property.items():
visible = (current_prop == prop)
for panel in panels:
setattr(panel, current_prop, visible)
else:
# Toggle visibility of the specific kind for all panels
for panel in panels:
setattr(panel, prop, new_value)
self.hide_empty()
title = f'Show only &{kind}' if only else f'Show &{kind}'
action = base_menu.addAction(title)
if not only:
action.setCheckable(True)
action.setChecked(all(getattr(panel, prop)
for panel in panels))
action.triggered.connect(selected)
def create_name_filter_menu(self, panels, base_menu):
"""
Create the name-based filtering menu.
Parameters
----------
panels : list of TyphosSignalPanel
The panels to filter upon triggering of menu actions.
base_menu : QMenu
The menu to add actions to.
"""
def text_filter_updated():
text = line_edit.text().strip()
for panel in panels:
panel.nameFilter = text
self.hide_empty()
line_edit = QtWidgets.QLineEdit()
filters = list(set(panel.nameFilter for panel in panels
if panel.nameFilter))
if len(filters) == 1:
line_edit.setText(filters[0])
else:
line_edit.setPlaceholderText('/ '.join(filters))
line_edit.editingFinished.connect(text_filter_updated)
line_edit.setObjectName('menu_action')
action = base_menu.addAction('Filter by name:')
action.setEnabled(False)
action = QtWidgets.QWidgetAction(self)
action.setDefaultWidget(line_edit)
base_menu.addAction(action)
def hide_empty(self, search=True):
"""
Wrap hide_empty calls for use with search functions and action clicks.
Parameters
----------
search : bool
Whether or not this method is being called from a search/filter
method.
"""
if self.device_display.hideEmpty:
if search:
show_empty(self.device_display)
hide_empty(self.device_display, process_widget=False)
def create_hide_empty_menu(self, panels, base_menu):
"""
Create the hide empty filtering menu.
Parameters
----------
panels : list of TyphosSignalPanel
The panels to filter upon triggering of menu actions.
base_menu : QMenu
The menu to add actions to.
"""
def handle_menu(checked):
self.device_display.hideEmpty = checked
if not checked:
# Force a reboot of the filters
# since we no longer can figure what was supposed to be
# visible or not
for p in panels:
p._update_panel()
show_empty(self.device_display)
else:
self.hide_empty(search=False)
action = base_menu.addAction('Hide Empty Panels')
action.setCheckable(True)
action.setChecked(self.device_display.hideEmpty)
action.triggered.connect(handle_menu)
def generate_context_menu(self):
"""
Generate the custom context menu.
.. code::
Embedded
Detailed
Engineering
-------------
Refresh templates
-------------
Kind filter > Show hinted
...
Show only hinted
Filter by name
Hide Empty Panels
"""
base_menu = QtWidgets.QMenu(parent=self)
display = self.device_display
if not display:
return base_menu
panels = display.findChildren(typhos_panel.TyphosSignalPanel) or []
if not panels:
return base_menu
base_menu.addSection('Templates')
display._generate_template_menu(base_menu)
base_menu.addSection('Filters')
filter_menu = base_menu.addMenu("&Kind filter")
self.create_kind_filter_menu(panels, filter_menu, only=False)
filter_menu.addSeparator()
self.create_kind_filter_menu(panels, filter_menu, only=True)
self.create_name_filter_menu(panels, base_menu)
base_menu.addSeparator()
self.create_hide_empty_menu(panels, base_menu)
if utils.DEBUG_MODE:
base_menu.addSection('Debug')
action = base_menu.addAction('&Copy to clipboard')
action.triggered.connect(display.copy_to_clipboard)
return base_menu
class TyphosDisplaySwitcherButton(TyphosToolButton):
"""A button which switches the TyphosDeviceDisplay template on click."""
template_selected = QtCore.Signal(pathlib.Path)
icons = {'embedded_screen': 'compress',
'detailed_screen': 'braille',
'engineering_screen': 'cogs'
}
def __init__(self, display_type, *, parent=None):
super().__init__(icon=self.icons[display_type], parent=parent)
self.templates = None
def _clicked(self):
"""Clicked callback - set the template."""
if self.templates is None:
logger.warning('set_device_display not called on %s', self)
return
try:
template = self.templates[0]
except IndexError:
return
self.template_selected.emit(template)
def generate_context_menu(self):
"""Context menu request."""
if not self.templates:
return
menu = QtWidgets.QMenu(parent=self)
for template in self.templates:
def selected(*, template=template):
self.template_selected.emit(template)
action = menu.addAction(template.name)
action.triggered.connect(selected)
return menu
class TyphosDisplaySwitcher(QtWidgets.QFrame, widgets.TyphosDesignerMixin):
"""Display switcher set of buttons for use with a TyphosDeviceDisplay."""
template_selected = QtCore.Signal(pathlib.Path)
def __init__(self, parent=None, **kwargs):
# Intialize background variable
super().__init__(parent=None)
self.device_display = None
self.buttons = {}
layout = QtWidgets.QHBoxLayout()
self.setLayout(layout)
layout.setSpacing(0)
layout.setContentsMargins(0, 0, 0, 0)
self.setContextMenuPolicy(Qt.DefaultContextMenu)
self.contextMenuEvent = self.open_context_menu
if parent:
self.setParent(parent)
self._create_ui()
def _create_ui(self):
layout = self.layout()
self.buttons.clear()
self.help_button = None
self.config_button = None
self.help_toggle_button = TyphosHelpToggleButton()
layout.addWidget(self.help_toggle_button, 0, Qt.AlignRight)
for template_type in DisplayTypes.names:
button = TyphosDisplaySwitcherButton(template_type)
self.buttons[template_type] = button
button.template_selected.connect(self._template_selected)
layout.addWidget(button, 0, Qt.AlignRight)
friendly_name = template_type.replace('_', ' ')
button.setToolTip(f'Switch to {friendly_name}')
self.config_button = TyphosDisplayConfigButton()
layout.addWidget(self.config_button, 0, Qt.AlignRight)
self.config_button.setToolTip('Display settings...')
def _template_selected(self, template):
"""Template selected hook."""
self.template_selected.emit(template)
if self.device_display is not None:
self.device_display.force_template = template
def set_device_display(self, display):
"""Typhos hook for setting the associated device display."""
self.device_display = display
for template_type in self.buttons:
templates = display.templates.get(template_type, [])
self.buttons[template_type].templates = templates
self.config_button.set_device_display(display)
def add_device(self, device):
"""Typhos hook for setting the associated device."""
...
class TyphosTitleLabel(QtWidgets.QLabel):
"""
A label class intended for use as a standardized title.
Attributes
----------
toggle_requested : QtCore.Signal
A Qt signal indicating that the user clicked on the title. By default,
this hides any nested panels underneath the title.
"""
toggle_requested = QtCore.Signal()
def mousePressEvent(self, event):
"""Overridden qt hook for a mouse press."""
if event.button() == Qt.LeftButton:
self.toggle_requested.emit()
super().mousePressEvent(event)
class TyphosHelpToggleButton(TyphosToolButton):
"""
A standard button used to toggle help information display.
Attributes
----------
pop_out : QtCore.Signal
A Qt signal indicating a request to pop out the help widget.
open_in_browser : QtCore.Signal
A Qt signal indicating a request to open the help in a browser.
open_python_docs : QtCore.Signal
A Qt signal indicating a request to open the Python docstring
information.
report_jira_issue : QtCore.Signal
A Qt signal indicating a request to open the Jira issue reporting
widget.
toggle_help : QtCore.Signal
A Qt signal indicating a request to toggle the related help display
frame.
"""
pop_out = QtCore.Signal()
open_in_browser = QtCore.Signal()
open_python_docs = QtCore.Signal()
report_jira_issue = QtCore.Signal()
toggle_help = QtCore.Signal(bool)
def __init__(self, icon="question", parent=None):
super().__init__(icon, parent=parent)
self.setCheckable(True)
def _clicked(self):
"""Hook for QToolButton.clicked."""
self.toggle_help.emit(self.isChecked())
def generate_context_menu(self):
menu = QtWidgets.QMenu(parent=self)
open_in_browser = menu.addAction("Pop &out documentation...")
open_in_browser.triggered.connect(self.pop_out.emit)
open_in_browser = menu.addAction("Open in &browser...")
open_in_browser.triggered.connect(self.open_in_browser.emit)
open_python_docs = menu.addAction("Open &Python docs...")
open_python_docs.triggered.connect(self.open_python_docs.emit)
def toggle():
self.setChecked(not self.isChecked())
self._clicked()
toggle_help = menu.addAction("Toggle &help")
toggle_help.triggered.connect(toggle)
if utils.JIRA_URL:
menu.addSeparator()
report_issue = menu.addAction("&Report Jira issue...")
report_issue.triggered.connect(self.report_jira_issue.emit)
return menu
class TyphosHelpFrame(QtWidgets.QFrame, widgets.TyphosDesignerMixin):
"""
A frame for help information display.
Attributes
----------
tooltip_updated : QtCore.Signal
A signal indicating the help tooltip has changed.
"""
tooltip_updated = QtCore.Signal(str)
def __init__(self, parent=None):
super().__init__(parent=parent)
self.help = None
self.help_web_view = None
self._delete_timer = None
self.python_docs_browser = None
self.setContentsMargins(0, 0, 0, 0)
layout = QtWidgets.QVBoxLayout()
self.setLayout(layout)
self.devices = []
self._jira_widget = None
def new_jira_widget(self):
"""Open a new Jira issue reporting widget."""
device = self.devices[0] if self.devices else None
self._jira_widget = TyphosJiraIssueWidget(device=device)
self._jira_widget.show()
def open_in_browser(self, new=0, autoraise=True):
"""
Open the associated help documentation in the browser.
Parameters
----------
new : int, optional
0: the same browser window (the default).
1: a new browser window.
2: a new browser page ("tab").
autoraise : bool, optional
If possible, autoraise raises the window (the default) or not.
"""
return webbrowser.open(
self.help_url.toString(), new=new, autoraise=autoraise
)
def open_python_docs(self):
"""Open the Python docstring information in a new window."""
if self.python_docs_browser is not None:
self.python_docs_browser.raise_()
return
self.python_docs_browser = QtWidgets.QTextBrowser()
help_document = QtGui.QTextDocument()
contents = self._tooltip or "Unset"
first_line = contents.splitlines()[0]
# TODO: later versions of qt will support setMarkdown
help_document.setPlainText(contents)
self.python_docs_browser.setWindowTitle(first_line)
font = QtGui.QFont("Monospace")
font.setStyleHint(QtGui.QFont.TypeWriter)
# font.setStyleHint(QtGui.QFont.Monospace)
self.python_docs_browser.setFont(font)
self.python_docs_browser.setDocument(help_document)
self.python_docs_browser.show()
return self.python_docs_browser
def _get_tooltip(self):
"""Update the tooltip based on device information."""
tooltip = []
# BUG: I'm seeing two devices in `self.devices` for
# $ typhos --fake-device 'ophyd.EpicsMotor[{"prefix":"b"}]'
for device in sorted(
set(self.devices),
key=lambda dev: self.devices.index(dev)
):
heading = device.name or type(device).__name__
tooltip.extend([
heading,
"-" * len(heading),
""
])
tooltip.append(
inspect.getdoc(device) or
inspect.getdoc(type(device)) or
"No docstring"
)
tooltip.append("")
return "\n".join(tooltip)
def add_device(self, device):
self.devices.append(device)
self._tooltip = self._get_tooltip()
self.tooltip_updated.emit(self._tooltip)
self.setWindowTitle(f"Help: {device.name}")
@property
def help_url(self):
"""The full help URL, generated from ``TYPHOS_HELP_URL``."""
if not self.devices:
return QtCore.QUrl("about:blank")
device, *_ = self.devices
try:
device_url = utils.HELP_URL.format(device=device)
except Exception:
logger.exception("Failed to format confluence URL for device %s",
device)
return QtCore.QUrl("about:blank")
return QtCore.QUrl(device_url)
def show_help(self):
"""Show the help information in a QWebEngineView."""
if web.TyphosWebEngineView is None:
logger.error(
"Failed to import QWebEngineView; "
"help view is unavailable."
)
return
if self.help_web_view:
self.help_web_view.show()
return
self.help_web_view = web.TyphosWebEngineView()
self.help_web_view.page().setUrl(self.help_url)
self.help_web_view.setEnabled(True)
self.help_web_view.setMinimumSize(QtCore.QSize(100, 400))
self.layout().addWidget(self.help_web_view)
def hide_help(self):
"""Hide the help information QWebEngineView."""
if not self.help_web_view:
return
self.help_web_view.hide()
if self._delete_timer is None:
self._delete_timer = QtCore.QTimer()
self._delete_timer.setInterval(20000)
self._delete_timer.setSingleShot(True)
self._delete_timer.timeout.connect(self._delete_help_if_hidden)
self._delete_timer.start()
def _delete_help_if_hidden(self):
"""
Slowly react to the help display removal, as setting it back up can be
slow and painful.
"""
self._delete_timer = None
if self.help_web_view and not self.help_web_view.isVisible():
self.layout().removeWidget(self.help_web_view)
self.help_web_view.deleteLater()
self.help_web_view = None
def toggle_help(self, show):
"""
Toggle the visibility of the help information QWebEngineView.
Parameters
----------
show : bool
Show the help (True) or hide it (False).
"""
if not self.devices:
logger.warning("No devices added -> no help")
return
if show:
self.show_help()
else:
self.hide_help()
class TyphosDisplayTitle(QtWidgets.QFrame, widgets.TyphosDesignerMixin):
"""
Standardized Typhos Device Display title.
Parameters
----------
title : str, optional
The initial title text, which may contain macros.
show_switcher : bool, optional
Show the :class:`TyphosDisplaySwitcher`.
show_underline : bool, optional
Show the underline separator.
parent : QtWidgets.QWidget, optional
The parent widget.
"""
def __init__(self, title='${name}', *, show_switcher=True,
show_underline=True, parent=None):
self._show_underline = show_underline
self._show_switcher = show_switcher
super().__init__(parent=parent)
self.label = TyphosTitleLabel(title)
self.switcher = TyphosDisplaySwitcher()
self.underline = QtWidgets.QFrame()
self.underline.setFrameShape(self.underline.HLine)
self.underline.setFrameShadow(self.underline.Plain)
self.underline.setLineWidth(10)
self.grid_layout = QtWidgets.QGridLayout()
self.grid_layout.addWidget(self.label, 0, 0)
self.grid_layout.addWidget(self.switcher, 0, 1, Qt.AlignRight)
self.grid_layout.addWidget(self.underline, 1, 0, 1, 2)
if not utils.HELP_URL:
# The help widget is entirely optional, based on environment
# settings.
self.help = None
else:
self.help = TyphosHelpFrame()
self.switcher.help_toggle_button.toggle_help.connect(
self.toggle_help
)
self.switcher.help_toggle_button.pop_out.connect(
self.pop_out_help
)
self.switcher.help_toggle_button.open_in_browser.connect(
self.help.open_in_browser
)
self.switcher.help_toggle_button.open_python_docs.connect(
self.help.open_python_docs
)
self.switcher.help_toggle_button.report_jira_issue.connect(
self.help.new_jira_widget
)
self.help.tooltip_updated.connect(
self.switcher.help_toggle_button.setToolTip
)
self.grid_layout.addWidget(self.help, 2, 0, 1, 2)
self.grid_layout.setSizeConstraint(self.grid_layout.SetMinimumSize)
self.setLayout(self.grid_layout)
# Set the property:
self.show_switcher = show_switcher
self.show_underline = show_underline
def toggle_help(self, show):
"""Toggle the help visibility."""
if self.help is None:
return
self.help.toggle_help(show)
if self.help.parent() is None:
self.grid_layout.addWidget(self.help, 2, 0, 1, 2)
def pop_out_help(self):
"""Pop out the help widget."""
if self.help is None:
return
self.help.setParent(None)
self.switcher.help_toggle_button.setChecked(True)
self.help.show_help()
self.help.show()
self.help.raise_()
@Property(bool)
def show_switcher(self):
"""Get or set whether to show the display switcher."""
return self._show_switcher
@show_switcher.setter
def show_switcher(self, value):
self._show_switcher = bool(value)
self.switcher.setVisible(self._show_switcher)
def add_device(self, device):
"""Typhos hook for setting the associated device."""
if not self.label.text():
self.label.setText(device.name)
if self.help is not None:
self.help.add_device(device)
@QtCore.Property(bool)
def show_underline(self):
"""Get or set whether to show the underline."""
return self._show_underline
@show_underline.setter
def show_underline(self, value):
self._show_underline = bool(value)
self.underline.setVisible(self._show_underline)
def set_device_display(self, display):
"""Typhos callback: set the :class:`TyphosDeviceDisplay`."""
self.device_display = display
def toggle():
toggle_display(display.display_widget)
self.label.toggle_requested.connect(toggle)
# Make designable properties from the title label available here as well
label_alignment = forward_property('label', QtWidgets.QLabel, 'alignment')
label_font = forward_property('label', QtWidgets.QLabel, 'font')
label_indent = forward_property('label', QtWidgets.QLabel, 'indent')
label_margin = forward_property('label', QtWidgets.QLabel, 'margin')
label_openExternalLinks = forward_property('label', QtWidgets.QLabel,
'openExternalLinks')
label_pixmap = forward_property('label', QtWidgets.QLabel, 'pixmap')
label_text = forward_property('label', QtWidgets.QLabel, 'text')
label_textFormat = forward_property('label', QtWidgets.QLabel,
'textFormat')
label_textInteractionFlags = forward_property('label', QtWidgets.QLabel,
'textInteractionFlags')
label_wordWrap = forward_property('label', QtWidgets.QLabel, 'wordWrap')
# Make designable properties from the grid_layout
layout_margin = forward_property('grid_layout', QtWidgets.QHBoxLayout,
'margin')
layout_spacing = forward_property('grid_layout', QtWidgets.QHBoxLayout,
'spacing')
# Make designable properties from the underline
underline_palette = forward_property('underline', QtWidgets.QFrame,
'palette')
underline_styleSheet = forward_property('underline', QtWidgets.QFrame,
'styleSheet')
underline_lineWidth = forward_property('underline', QtWidgets.QFrame,
'lineWidth')
underline_midLineWidth = forward_property('underline', QtWidgets.QFrame,
'midLineWidth')
class TyphosDeviceDisplay(utils.TyphosBase, widgets.TyphosDesignerMixin,
_DisplayTypes):
"""
Main display for a single ophyd Device.
This contains the widgets for all of the root devices signals, and any
methods you would like to display. By typhos convention, the base
initialization sets up the widgets and the :meth:`.from_device` class
method will automatically populate the resulting display.
Parameters
----------
parent : QWidget, optional
The parent widget.
scrollable : bool, optional
Semi-deprecated parameter. Use scroll_option instead.
If ``True``, put the loaded template into a :class:`QScrollArea`.
If ``False``, the display widget will go directly in this widget's
layout.
If omitted, scroll_option is used instead.
composite_heuristics : bool, optional
Enable composite heuristics, which may change the suggested detailed
screen based on the contents of the added device. See also
:meth:`.suggest_composite_screen`.
embedded_templates : list, optional
List of embedded templates to use in addition to those found on disk.
detailed_templates : list, optional
List of detailed templates to use in addition to those found on disk.
engineering_templates : list, optional
List of engineering templates to use in addition to those found on
disk.
display_type : DisplayTypes, str, or int, optional
The default display type.
scroll_option : ScrollOptions, str, or int, optional
The scroll behavior.
nested : bool, optional
An optional annotation for a display that may be nested inside another.
"""
# Template types and defaults
Q_ENUMS(_DisplayTypes)
TemplateEnum = DisplayTypes # For convenience
device_count_threshold = 0
signal_count_threshold = 30
def __init__(
self,
parent: Optional[QtWidgets.QWidget] = None,
*,
scrollable: Optional[bool] = None,
composite_heuristics: bool = True,
embedded_templates: Optional[List[str]] = None,
detailed_templates: Optional[List[str]] = None,
engineering_templates: Optional[List[str]] = None,
display_type: Union[DisplayTypes, str, int] = 'detailed_screen',
scroll_option: Union[ScrollOptions, str, int] = 'auto',
nested: bool = False,
):
self._composite_heuristics = composite_heuristics
self._current_template = None
self._forced_template = ''
self._macros = {}
self._display_widget = None
self._scroll_option = ScrollOptions.no_scroll
self._searched = False
self._hide_empty = False
self._nested = nested
self.templates = {name: [] for name in DisplayTypes.names}
self._display_type = normalize_display_type(display_type)
instance_templates = {
'embedded_screen': embedded_templates or [],
'detailed_screen': detailed_templates or [],
'engineering_screen': engineering_templates or [],
}
for view, path_list in instance_templates.items():
paths = [pathlib.Path(p).expanduser().resolve() for p in path_list]
self.templates[view].extend(paths)
self._scroll_area = QtWidgets.QScrollArea()
self._scroll_area.setAlignment(Qt.AlignTop)
self._scroll_area.setObjectName('scroll_area')
self._scroll_area.setFrameShape(QtWidgets.QFrame.StyledPanel)
self._scroll_area.setVerticalScrollBarPolicy(Qt.ScrollBarAsNeeded)
self._scroll_area.setWidgetResizable(True)
self._scroll_area.setFrameStyle(QtWidgets.QFrame.NoFrame)
super().__init__(parent=parent)
layout = QtWidgets.QHBoxLayout()
self.setLayout(layout)
layout.setContentsMargins(0, 0, 0, 0)
layout.addWidget(self._scroll_area)
if scrollable is None:
self.scroll_option = scroll_option
else:
if scrollable:
self.scroll_option = ScrollOptions.scrollbar
else:
self.scroll_option = ScrollOptions.no_scroll
@Property(bool)
def composite_heuristics(self):
"""Allow composite screen to be suggested first by heuristics."""
return self._composite_heuristics
@composite_heuristics.setter
def composite_heuristics(self, composite_heuristics):
self._composite_heuristics = bool(composite_heuristics)
@Property(_ScrollOptions)
def scroll_option(self) -> ScrollOptions:
"""Place the display in a scrollable area."""
return self._scroll_option
@scroll_option.setter
def scroll_option(self, scrollable: ScrollOptions):
# Switch the scroll area behavior
opt = normalize_scroll_option(scrollable)
if opt == self._scroll_option:
return
self._scroll_option = opt
self._move_display_to_layout(self._display_widget)
@Property(bool)
def hideEmpty(self):
"""Toggle hiding or showing empty panels."""
return self._hide_empty
@hideEmpty.setter
def hideEmpty(self, checked):
if checked != self._hide_empty:
self._hide_empty = checked
def _move_display_to_layout(self, widget):
if not widget:
return
widget.setParent(None)
if self.scroll_option == ScrollOptions.auto:
if self.display_type == DisplayTypes.embedded_screen:
scrollable = False
else:
scrollable = True
elif self.scroll_option == ScrollOptions.scrollbar:
scrollable = True
elif self.scroll_option == ScrollOptions.no_scroll:
scrollable = False
else:
scrollable = True
if scrollable:
self._scroll_area.setWidget(widget)
else:
self.layout().addWidget(widget)
self._scroll_area.setVisible(scrollable)
def _generate_template_menu(self, base_menu):
"""Generate the template switcher menu, adding it to ``base_menu``."""
for view, filenames in self.templates.items():
if view.endswith('_screen'):
view = view.split('_screen')[0]
menu = base_menu.addMenu(view.capitalize())
for filename in filenames:
def switch_template(*, filename=filename):
self.force_template = filename
action = menu.addAction(os.path.split(filename)[-1])
action.triggered.connect(switch_template)
refresh_action = base_menu.addAction("Refresh Templates")
refresh_action.triggered.connect(self._refresh_templates)
def _refresh_templates(self):
"""Context menu 'Refresh Templates' clicked."""
# Force an update of the display cache.
cache.get_global_display_path_cache().update()
self.search_for_templates()
self.load_best_template()
@property
def current_template(self):
"""Get the current template being displayed."""
return self._current_template
@Property(_DisplayTypes)
def display_type(self):
"""Get or set the current display type."""
return self._display_type
@display_type.setter
def display_type(self, value):
value = normalize_display_type(value)
if self._display_type != value:
self._display_type = value
self.load_best_template()
@property
def macros(self):
"""Get or set the macros for the display."""
return dict(self._macros)
@macros.setter
def macros(self, macros):
self._macros.clear()
self._macros.update(**(macros or {}))
# If any display macros are specified, re-search for templates:
if any(view in self._macros for view in DisplayTypes.names):
self.search_for_templates()
@Property(str, designable=False)
def device_class(self):
"""Get the full class with module name of loaded device."""
device = self.device
cls = self.device.__class__
return f'{cls.__module__}.{cls.__name__}' if device else ''
@Property(str, designable=False)
def device_name(self):
"""Get the name of the loaded device."""
device = self.device
return device.name if device else ''
@property
def device(self):
"""Get the device associated with this Device Display."""
try:
device, = self.devices
return device
except ValueError:
...
def get_best_template(self, display_type, macros):
"""
Get the best template for the given display type.
Parameters
----------
display_type : DisplayTypes, str, or int
The display type.
macros : dict
Macros to use when loading the template.
"""
display_type = normalize_display_type(display_type).name
templates = self.templates[display_type]
if templates:
return templates[0]
logger.warning("No templates available for display type: %s",
self._display_type)
def _remove_display(self):
"""Remove the display widget, readying for a new template."""
display_widget = self._display_widget
if display_widget:
if self._scroll_area.widget():
self._scroll_area.takeWidget()
self.layout().removeWidget(display_widget)
display_widget.deleteLater()
self._display_widget = None
def load_best_template(self):
"""Load the best available template for the current display type."""
if self.layout() is None:
# If we are not fully initialized yet do not try and add anything
# to the layout. This will happen if the QApplication has a
# stylesheet that forces a template prior to the creation of this
# display
return
if not self._searched:
self.search_for_templates()
self._remove_display()
template = (self._forced_template or
self.get_best_template(self._display_type, self.macros))
if not template:
widget = QtWidgets.QWidget()
template = None
else:
template = pathlib.Path(template)
try:
widget = self._load_template(template)
except Exception as ex:
logger.exception("Unable to load file %r", template)
# If we have a previously defined template
if self._current_template is not None:
# Fallback to it so users have a choice
try:
widget = self._load_template(self._current_template)
except Exception:
logger.exception(
"Failed to fall back to previous template: %s",
self._current_template
)
template = None
widget = None
pydm.exception.raise_to_operator(ex)
else:
widget = QtWidgets.QWidget()
template = None
if widget:
widget.setObjectName('display_widget')
if widget.layout() is None and widget.minimumSize().width() == 0:
# If the widget has no layout, use a fixed size for it.
# Without this, the widget may not display at all.
widget.setMinimumSize(widget.size())
self._display_widget = widget
self._current_template = template
def size_hint(*args, **kwargs):
return widget.size()
# sizeHint is not defined so we suggest the widget size
widget.sizeHint = size_hint
# We should _move_display_to_layout as soon as it is created. This
# allow us to speed up since if the widget is too complex it takes
# seconds to set it to the QScrollArea
self._move_display_to_layout(self._display_widget)
self._update_children()
utils.reload_widget_stylesheet(self)
@property
def display_widget(self):
"""Get the widget generated from the template."""
return self._display_widget
@staticmethod
def _get_templates_from_macros(macros):
ret = {}
paths = cache.get_global_display_path_cache().paths
for display_type in DisplayTypes.names:
ret[display_type] = None
try:
value = macros[display_type]
except KeyError:
...
else:
if not value:
continue
try:
value = pathlib.Path(value)
except ValueError as ex:
logger.debug('Invalid path specified in macro: %s=%s',
display_type, value, exc_info=ex)
else:
ret[display_type] = list(utils.find_file_in_paths(
value, paths=paths))
return ret
def _load_template(self, filename):
"""Load template from file and return the widget."""
filename = pathlib.Path(filename)
loader = (pydm.display.load_py_file if filename.suffix == '.py'
else pydm.display.load_ui_file)
logger.debug('Load template using %s: %r', loader.__name__, filename)
return loader(str(filename), macros=self._macros)
def _update_children(self):
"""Notify child widgets of this device display + the device."""
device = self.device
display = self._display_widget
designer = display.findChildren(widgets.TyphosDesignerMixin) or []
bases = display.findChildren(utils.TyphosBase) or []
for widget in set(bases + designer):
if device and hasattr(widget, 'add_device'):
widget.add_device(device)
if hasattr(widget, 'set_device_display'):
widget.set_device_display(self)
@Property(str)
def force_template(self):
"""Force a specific template."""
return self._forced_template
@force_template.setter
def force_template(self, value):
if value != self._forced_template:
self._forced_template = value
self.load_best_template()
@staticmethod
def _build_macros_from_device(device, macros=None):
result = {}
if hasattr(device, 'md'):
if isinstance(device.md, dict):
result = dict(device.md)
else:
result = dict(device.md.post())
if 'name' not in result:
result['name'] = device.name
if 'prefix' not in result and hasattr(device, 'prefix'):
result['prefix'] = device.prefix
result.update(**(macros or {}))
return result
def add_device(self, device, macros=None):
"""
Add a Device and signals to the TyphosDeviceDisplay.
The full dictionary of macros is built with the following order of
precedence::
1. Macros from the device metadata itself.
2. If available, `name`, and `prefix` will be added from the device.
3. The argument ``macros`` is then used to fill/update the final
macro dictionary.
Parameters
----------
device : ophyd.Device
The device to add.
macros : dict, optional
Additional macros to use/replace the defaults.
"""
# We only allow one device at a time
if self.devices:
logger.debug("Removing devices %r", self.devices)
self.devices.clear()
# Add the device to the cache
super().add_device(device)
self._searched = False
self.macros = self._build_macros_from_device(device, macros=macros)
self.load_best_template()
def search_for_templates(self):
"""Search the filesystem for device-specific templates."""
device = self.device
if not device:
logger.debug('Cannot search for templates without device')
return
self._searched = True
cls = device.__class__
logger.debug('Searching for templates for %s', cls.__name__)
macro_templates = self._get_templates_from_macros(self._macros)
paths = cache.get_global_display_path_cache().paths
for display_type in DisplayTypes.names:
view = display_type
if view.endswith('_screen'):
view = view.split('_screen')[0]
template_list = self.templates[display_type]
template_list.clear()
# 1. Highest priority: macros
for template in set(macro_templates[display_type] or []):
template_list.append(template)
logger.debug('Adding macro template %s: %s (total=%d)',
display_type, template, len(template_list))
# 2. Composite heuristics, if enabled
if self._composite_heuristics and view == 'detailed':
if self.suggest_composite_screen(cls):
template_list.append(DETAILED_TREE_TEMPLATE)
# 3. Templates based on class hierarchy names
filenames = utils.find_templates_for_class(cls, view, paths)
for filename in filenames:
if filename not in template_list:
template_list.append(filename)
logger.debug('Found new template %s: %s (total=%d)',
display_type, filename, len(template_list))
# 4. Default templates
template_list.extend(
[templ for templ in DEFAULT_TEMPLATES[display_type]
if templ not in template_list]
)
@classmethod
def suggest_composite_screen(cls, device_cls):
"""
Suggest to use the composite screen for the given class.
Returns
-------
composite : bool
If True, favor the composite screen.
"""
num_devices = 0
num_signals = 0
for attr, component in utils._get_top_level_components(device_cls):
num_devices += issubclass(component.cls, ophyd.Device)
num_signals += issubclass(component.cls, ophyd.Signal)
specific_screens = cls._get_specific_screens(device_cls)
if (len(specific_screens) or
(num_devices <= cls.device_count_threshold and
num_signals >= cls.signal_count_threshold)):
# 1. There's a custom screen - we probably should use them
# 2. There aren't many devices, so the composite display isn't
# useful
# 3. There are many signals, which should be broken up somehow
composite = False
else:
# 1. No custom screen, or
# 2. Many devices or a relatively small number of signals
composite = True
logger.debug(
'%s screens=%s num_signals=%d num_devices=%d -> composite=%s',
device_cls, specific_screens, num_signals, num_devices, composite
)
return composite
@classmethod
def from_device(cls, device, template=None, macros=None, **kwargs):
"""
Create a new TyphosDeviceDisplay from a Device.
Loads the signals in to the appropriate positions and sets the title to
a cleaned version of the device name
Parameters
----------
device : ophyd.Device
template : str, optional
Set the ``display_template``.
macros : dict, optional
Macro substitutions to be placed in template.
**kwargs
Passed to the class init.
"""
display = cls(**kwargs)
# Reset the template if provided
if template:
display.force_template = template
# Add the device
display.add_device(device, macros=macros)
return display
@classmethod
def from_class(cls, klass, *, template=None, macros=None, **kwargs):
"""
Create a new TyphosDeviceDisplay from a Device class.
Loads the signals in to the appropriate positions and sets the title to
a cleaned version of the device name.
Parameters
----------
klass : str or class
template : str, optional
Set the ``display_template``.
macros : dict, optional
Macro substitutions to be placed in template.
**kwargs
Extra arguments are used at device instantiation.
Returns
-------
TyphosDeviceDisplay
"""
try:
obj = pcdsutils.utils.get_instance_by_name(klass, **kwargs)
except Exception:
logger.exception('Failed to generate TyphosDeviceDisplay from '
'class %s', klass)
return None
return cls.from_device(obj, template=template, macros=macros)
@classmethod
def _get_specific_screens(cls, device_cls):
"""
Get the list of specific screens for a given device class.
That is, screens that are not default Typhos-provided screens.
"""
return [
template for template in utils.find_templates_for_class(
device_cls, 'detailed', utils.DISPLAY_PATHS)
if not utils.is_standard_template(template)
]
def to_image(self):
"""
Return the entire display as a QtGui.QImage.
Returns
-------
QtGui.QImage
The display, as an image.
"""
if self._display_widget is not None:
return utils.widget_to_image(self._display_widget)
@Slot()
def copy_to_clipboard(self):
"""Copy the display image to the clipboard."""
image = self.to_image()
if image is not None:
clipboard = QtGui.QGuiApplication.clipboard()
clipboard.setImage(image)
@Slot(object)
def _tx(self, value):
"""Receive information from happi channel."""
self.add_device(value['obj'], macros=value['md'])
def __repr__(self):
"""Get a custom representation for TyphosDeviceDisplay."""
return (
f'<{self.__class__.__name__} at {hex(id(self))} '
f'device={self.device_class}[{self.device_name!r}] '
f'nested={self._nested}'
f'>'
)
def toggle_display(widget, force_state=None):
"""
Toggle the visibility of all :class:`TyphosSignalPanel` in a display.
Parameters
----------
widget : QWidget
The widget in which to look for Panels.
force_state : bool
If set to True or False, it will change visibility to the value of
force_state.
If not set or set to None, it will flip the current panels state.
"""
panels = widget.findChildren(typhos_panel.TyphosSignalPanel) or []
visible = all(panel.isVisible() for panel in panels)
state = not visible
if force_state is not None:
state = force_state
for panel in panels:
panel.setVisible(state)
def show_empty(widget):
"""
Recursively shows all panels and widgets, empty or not.
Parameters
----------
widget : QWidget
"""
children = widget.findChildren(TyphosDeviceDisplay) or []
for ch in children:
show_empty(ch)
widget.setVisible(True)
toggle_display(widget, force_state=True)
def hide_empty(widget, process_widget=True):
"""
Recursively hide empty panels and widgets.
Parameters
----------
widget : QWidget
The widget in which to start the recursive search.
process_widget : bool
Whether or not to process the visibility for the widget.
This is useful since we don't want to hide the top-most
widget otherwise users can't change the visibility back on.
"""
def process(item, recursive=True):
if isinstance(item, TyphosDeviceDisplay) and recursive:
hide_empty(item)
elif isinstance(item, typhos_panel.TyphosSignalPanel):
if recursive:
hide_empty(item)
visible = bool(item._panel_layout.visible_elements)
item.setVisible(visible)
if isinstance(widget, TyphosDeviceDisplay):
# Check if the template at this display is one of the defaults
# otherwise we are not sure if we can safely change it.
if widget.current_template not in DEFAULT_TEMPLATES_FLATTEN:
logger.info("Can't hide empty entries in non built-in templates")
return
children = widget.findChildren(utils.TyphosBase) or []
for w in children:
process(w)
if process_widget:
if isinstance(widget, TyphosDeviceDisplay):
overall_status = any(w.isVisible() for w in children)
elif isinstance(widget, typhos_panel.TyphosSignalPanel):
overall_status = bool(widget._panel_layout.visible_elements)
widget.setVisible(overall_status)
|
"""
`feature_utils.py`
-------------------
Extract different types of features based on the properties of nodes within the AST or within the graph.
@author: Thao Nguyen (@thaonguyen19)
License: CC-BY 4.0
"""
import numpy as np
import ast_utils
import gensim
import re
class FeatureExtractor():
def __init__(self):
return
def get_node_type(self, node):
return ast_utils.get_token_id(node)
def load_model(model_path):
return gensim.models.Word2Vec.load(model_path)
def camel_case_split(identifier):
matches = re.finditer('.+?(?:(?<=[a-z])(?=[A-Z])|(?<=[A-Z])(?=[A-Z][a-z])|$)', identifier)
return [m.group(0) for m in matches]
def token2vec(token, slot=None):
MODEL_PATH = "../token2vec.model"
model = load_model(MODEL_PATH)
flatten = lambda l: [y for x in l for y in x]
strtok = type(token).__name__
if ast_utils.is_variable(token):
strtok = ast_utils.get_varname(token)
if strtok == slot:
print("[MASK] replaced {}".format(strtok))
embd = model["[MASK]"]
else:
subtoks = flatten([camel_case_split(v) for v in strtok.split('_')])
try:
embd = np.mean([model[sk] for sk in subtoks], axis=0)
except Exception as e:
print("[UNK] replaced token {}".format(strtok))
embd = model["[UNK]"]
return embd
|
import numpy as np
import matplotlib.pyplot as plt
Q= 8
N=5
A=2
step_size=0.5
iterations =400
gamma =.92
al = 0.05
phi = np.ones(Q)
chi= np.ones((Q,A))
shi= np.ones((N,Q,N,Q,A)) # Dimension(i,q0,j,q_next,a)
def softmax_intialmemory(phi):
alpha = np.exp(phi)
alpha /= np.sum(alpha)
q_0 = np.random.choice(Q,p=alpha )
#print("in function ",q_0)
return alpha, q_0
def softmax_action(chi,q0):
x = np.exp(chi)
xi =(x)/np.sum(x,keepdims=True, axis=1) #(et, keepdims=True,axis=3)
w=(xi.T)
#print(xi)
a = np.random.choice(A,p=xi[q0,:].T)
#p=xi[q0,:].T
#a = np.random.choice(Q,p=[0.1, 0, 0.3, 0.4, 0,0.2] )
return xi,a,w
def dirichlet_sample(alphas):
r = np.random.standard_gamma(alphas)
r /= r.sum(-1).reshape(-1, 1)
return r
if __name__ == "__main__":
alphas1 = np.array([[200,1,800,2,3],[200,1,800,2,3],[200,1,2,800,3],[200,1,2,3,800],[200,1,2,4,800]])
alphas2= np.array([[800,200,1,2,3],[800,2,200,1,3],[800,1,12,200,2],[800,1,4,1,200],[800,4,2,1,200]])
transition_probablity1 = dirichlet_sample(alphas1)
transition_probablity2 = dirichlet_sample(alphas2)
transitionMatrix= np.dstack((transition_probablity2,transition_probablity1)) #order is i,j,a
P= transitionMatrix
def softmax_transition(shi,q0,i,j_next,a):
et = np.exp(shi)
eta =(et)/np.sum(et, keepdims=True,axis=3)
q_next = np.random.choice(Q,p=eta[i,q0,j_next,:,a].T)
return eta, q_next
for _ in range (iterations):
P= transitionMatrix
#print(P.shape)
i0=1
alpha ,q_0 = softmax_intialmemory(phi)
#print("initial memory state",q_0)
trajectory =600
t=1
gradient_phi=[]
gradient_chi=[]
gradient_shi=[]
time = []
value_reward =[]
while t<trajectory:
i =i0
q0 = q_0
#print("without function",q0)
xi,a,w = softmax_action(chi,q0)
#print("action",a)
j_next= np.random.choice(N,p=P[i,:,a].T)
#print("next_state",j_next)
eta, q_next = softmax_transition(shi,q0,i,j_next,a)
#print("next memory state",q_next)
c = np.array([2, 0, 0, 0, 10])
r_new = np.tile(c, (5,1))
R=np.dstack((r_new,r_new))
A_a = np.einsum("qa,ija,iqjpa -> iqjp", xi,P,eta)
A_new = A_a.reshape((N*Q,N*Q))
#print(A_new)
b_new = np.einsum("qa,ija,ija -> iq", xi,P,R).reshape((N*Q))
u = np.linalg.solve(np.eye(N*Q) - gamma * A_new, b_new).reshape((N, Q))
V=u.T
# Ekhane dekho FOr loop ta
for i1 in range(N-1):
for q1 in range(Q):
u_one = u[i1,q1] + step_size * (R[i,j_next,a] + gamma * u[i1+1,q1] - u[i1,q1])
u[i1,q1] = u_one
v=u.T
#print("value",v)
value_function = u[i,q0]
#b_xi_final =np.zeros((N*Q))
b_xi_final =np.zeros((N*Q))
for i2 in range (N):
b_xi_one =np.sum(P[i2,j_next,a]* (R[i2,j_next,a] + (gamma * np.sum(eta[i2,q0,j_next,q_next,a]*u[j_next,q_next]))))
b_xi_two =np.sum(xi[q0,a]*(b_xi_one))
b_xi_three= (b_xi_one-b_xi_two)
b_xi_four = xi[q0,a]*(b_xi_three)
b_xi_final[(i2-1)*Q+q0] = b_xi_four
#print(b_xi_final)
dell_xi_init = np.linalg.solve(np.eye(N*Q) - gamma * A_new, b_xi_final).reshape((N, Q))
#print(dell_xi_init)
b_shi_final = np.zeros((N*Q))
b_shi_one = u[j_next,q0]-np.sum(eta[i,q0,j_next,q_next,a]*u[j_next,q0])
b_shi_two =gamma*P[i,j_next,a]*eta[i,q0,j_next,q_next,a]*(b_shi_one)
b_shi_final[(i-1)*Q+q0] = b_shi_two
dell_shi_init = np.linalg.solve(np.eye(N*Q) - gamma * A_new, b_shi_final).reshape((N, Q))
#print(dell_shi_init)
dell_phi=np.zeros(Q)
for q2 in range(Q):
dell_phi_init= alpha*(u[i,q2]- np.sum(alpha*u[i,q0]))
dell_phi =dell_phi_init[i]
#print('gradient1',dell_phi)
phi = phi+ (1/t)*dell_phi
dell_xi_one = np.sum(alpha* dell_xi_init,axis=1)
dell_xi = dell_xi_one[i]
#print ("gradient",dell_xi_final)
chi[q0,a] = chi[q0,a]+ (1/t)*dell_xi
#print("action",a,"memory state",q0,"Add:",(1/t)*dell_xi)
#print(chi)
dell_shi_one = np.sum(alpha* dell_shi_init,axis=1)
dell_shi = dell_shi_one[i]
shi[i,q0,j_next,q_next,a] = shi[i,q0,j_next,q_next,a]+ (1/t)*dell_shi
t = t+1
gradient_phi.append(dell_phi)
gradient_chi.append(dell_xi)
gradient_shi.append(dell_shi)
time.append(t)
value_reward.append(value_function)
#plt.show()
#print("value update",v)
#print(b_xi_final)
#phi = phi+ al*beta_phi
#print(phi)
#chi = chi+ al*beta_chi
#print(chi)
#shi = shi+ al*beta_shi
print(shi)
#print("value", v)
print(chi)
#print(P)
print(b_xi_final)
plt.plot(time,gradient_phi, label="gradient phi")
plt.plot(time,gradient_chi, label="gradient chi")
plt.plot(time,gradient_shi, label="gradient shi")
plt.legend(bbox_to_anchor=(1, 1), loc=2, borderaxespad=0.)
#plt.show()
|
# terrascript/data/digitalocean.py
import terrascript
class digitalocean_account(terrascript.Data):
pass
class digitalocean_certificate(terrascript.Data):
pass
class digitalocean_container_registry(terrascript.Data):
pass
class digitalocean_database_cluster(terrascript.Data):
pass
class digitalocean_domain(terrascript.Data):
pass
class digitalocean_droplet(terrascript.Data):
pass
class digitalocean_droplets(terrascript.Data):
pass
class digitalocean_droplet_snapshot(terrascript.Data):
pass
class digitalocean_floating_ip(terrascript.Data):
pass
class digitalocean_image(terrascript.Data):
pass
class digitalocean_images(terrascript.Data):
pass
class digitalocean_kubernetes_cluster(terrascript.Data):
pass
class digitalocean_kubernetes_versions(terrascript.Data):
pass
class digitalocean_loadbalancer(terrascript.Data):
pass
class digitalocean_project(terrascript.Data):
pass
class digitalocean_projects(terrascript.Data):
pass
class digitalocean_record(terrascript.Data):
pass
class digitalocean_region(terrascript.Data):
pass
class digitalocean_regions(terrascript.Data):
pass
class digitalocean_sizes(terrascript.Data):
pass
class digitalocean_spaces_bucket(terrascript.Data):
pass
class digitalocean_spaces_buckets(terrascript.Data):
pass
class digitalocean_spaces_bucket_object(terrascript.Data):
pass
class digitalocean_spaces_bucket_objects(terrascript.Data):
pass
class digitalocean_ssh_key(terrascript.Data):
pass
class digitalocean_tag(terrascript.Data):
pass
class digitalocean_tags(terrascript.Data):
pass
class digitalocean_volume_snapshot(terrascript.Data):
pass
class digitalocean_volume(terrascript.Data):
pass
class digitalocean_vpc(terrascript.Data):
pass
__all__ = [
"digitalocean_account",
"digitalocean_certificate",
"digitalocean_container_registry",
"digitalocean_database_cluster",
"digitalocean_domain",
"digitalocean_droplet",
"digitalocean_droplets",
"digitalocean_droplet_snapshot",
"digitalocean_floating_ip",
"digitalocean_image",
"digitalocean_images",
"digitalocean_kubernetes_cluster",
"digitalocean_kubernetes_versions",
"digitalocean_loadbalancer",
"digitalocean_project",
"digitalocean_projects",
"digitalocean_record",
"digitalocean_region",
"digitalocean_regions",
"digitalocean_sizes",
"digitalocean_spaces_bucket",
"digitalocean_spaces_buckets",
"digitalocean_spaces_bucket_object",
"digitalocean_spaces_bucket_objects",
"digitalocean_ssh_key",
"digitalocean_tag",
"digitalocean_tags",
"digitalocean_volume_snapshot",
"digitalocean_volume",
"digitalocean_vpc",
]
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
__all__ = [
'GetScheduleResult',
'AwaitableGetScheduleResult',
'get_schedule',
]
@pulumi.output_type
class GetScheduleResult:
"""
A schedule.
"""
def __init__(__self__, created_date=None, daily_recurrence=None, hourly_recurrence=None, location=None, name=None, notification_settings=None, provisioning_state=None, status=None, tags=None, target_resource_id=None, task_type=None, time_zone_id=None, type=None, unique_identifier=None, weekly_recurrence=None):
if created_date and not isinstance(created_date, str):
raise TypeError("Expected argument 'created_date' to be a str")
pulumi.set(__self__, "created_date", created_date)
if daily_recurrence and not isinstance(daily_recurrence, dict):
raise TypeError("Expected argument 'daily_recurrence' to be a dict")
pulumi.set(__self__, "daily_recurrence", daily_recurrence)
if hourly_recurrence and not isinstance(hourly_recurrence, dict):
raise TypeError("Expected argument 'hourly_recurrence' to be a dict")
pulumi.set(__self__, "hourly_recurrence", hourly_recurrence)
if location and not isinstance(location, str):
raise TypeError("Expected argument 'location' to be a str")
pulumi.set(__self__, "location", location)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if notification_settings and not isinstance(notification_settings, dict):
raise TypeError("Expected argument 'notification_settings' to be a dict")
pulumi.set(__self__, "notification_settings", notification_settings)
if provisioning_state and not isinstance(provisioning_state, str):
raise TypeError("Expected argument 'provisioning_state' to be a str")
pulumi.set(__self__, "provisioning_state", provisioning_state)
if status and not isinstance(status, str):
raise TypeError("Expected argument 'status' to be a str")
pulumi.set(__self__, "status", status)
if tags and not isinstance(tags, dict):
raise TypeError("Expected argument 'tags' to be a dict")
pulumi.set(__self__, "tags", tags)
if target_resource_id and not isinstance(target_resource_id, str):
raise TypeError("Expected argument 'target_resource_id' to be a str")
pulumi.set(__self__, "target_resource_id", target_resource_id)
if task_type and not isinstance(task_type, str):
raise TypeError("Expected argument 'task_type' to be a str")
pulumi.set(__self__, "task_type", task_type)
if time_zone_id and not isinstance(time_zone_id, str):
raise TypeError("Expected argument 'time_zone_id' to be a str")
pulumi.set(__self__, "time_zone_id", time_zone_id)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
if unique_identifier and not isinstance(unique_identifier, str):
raise TypeError("Expected argument 'unique_identifier' to be a str")
pulumi.set(__self__, "unique_identifier", unique_identifier)
if weekly_recurrence and not isinstance(weekly_recurrence, dict):
raise TypeError("Expected argument 'weekly_recurrence' to be a dict")
pulumi.set(__self__, "weekly_recurrence", weekly_recurrence)
@property
@pulumi.getter(name="createdDate")
def created_date(self) -> str:
"""
The creation date of the schedule.
"""
return pulumi.get(self, "created_date")
@property
@pulumi.getter(name="dailyRecurrence")
def daily_recurrence(self) -> Optional['outputs.DayDetailsResponse']:
"""
If the schedule will occur once each day of the week, specify the daily recurrence.
"""
return pulumi.get(self, "daily_recurrence")
@property
@pulumi.getter(name="hourlyRecurrence")
def hourly_recurrence(self) -> Optional['outputs.HourDetailsResponse']:
"""
If the schedule will occur multiple times a day, specify the hourly recurrence.
"""
return pulumi.get(self, "hourly_recurrence")
@property
@pulumi.getter
def location(self) -> Optional[str]:
"""
The location of the resource.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the resource.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="notificationSettings")
def notification_settings(self) -> Optional['outputs.NotificationSettingsResponse']:
"""
Notification settings.
"""
return pulumi.get(self, "notification_settings")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> Optional[str]:
"""
The provisioning status of the resource.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter
def status(self) -> Optional[str]:
"""
The status of the schedule (i.e. Enabled, Disabled)
"""
return pulumi.get(self, "status")
@property
@pulumi.getter
def tags(self) -> Optional[Mapping[str, str]]:
"""
The tags of the resource.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter(name="targetResourceId")
def target_resource_id(self) -> Optional[str]:
"""
The resource ID to which the schedule belongs
"""
return pulumi.get(self, "target_resource_id")
@property
@pulumi.getter(name="taskType")
def task_type(self) -> Optional[str]:
"""
The task type of the schedule (e.g. LabVmsShutdownTask, LabVmAutoStart).
"""
return pulumi.get(self, "task_type")
@property
@pulumi.getter(name="timeZoneId")
def time_zone_id(self) -> Optional[str]:
"""
The time zone ID (e.g. Pacific Standard time).
"""
return pulumi.get(self, "time_zone_id")
@property
@pulumi.getter
def type(self) -> str:
"""
The type of the resource.
"""
return pulumi.get(self, "type")
@property
@pulumi.getter(name="uniqueIdentifier")
def unique_identifier(self) -> Optional[str]:
"""
The unique immutable identifier of a resource (Guid).
"""
return pulumi.get(self, "unique_identifier")
@property
@pulumi.getter(name="weeklyRecurrence")
def weekly_recurrence(self) -> Optional['outputs.WeekDetailsResponse']:
"""
If the schedule will occur only some days of the week, specify the weekly recurrence.
"""
return pulumi.get(self, "weekly_recurrence")
class AwaitableGetScheduleResult(GetScheduleResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetScheduleResult(
created_date=self.created_date,
daily_recurrence=self.daily_recurrence,
hourly_recurrence=self.hourly_recurrence,
location=self.location,
name=self.name,
notification_settings=self.notification_settings,
provisioning_state=self.provisioning_state,
status=self.status,
tags=self.tags,
target_resource_id=self.target_resource_id,
task_type=self.task_type,
time_zone_id=self.time_zone_id,
type=self.type,
unique_identifier=self.unique_identifier,
weekly_recurrence=self.weekly_recurrence)
def get_schedule(expand: Optional[str] = None,
lab_name: Optional[str] = None,
name: Optional[str] = None,
resource_group_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetScheduleResult:
"""
Use this data source to access information about an existing resource.
:param str expand: Specify the $expand query. Example: 'properties($select=status)'
:param str lab_name: The name of the lab.
:param str name: The name of the schedule.
:param str resource_group_name: The name of the resource group.
"""
__args__ = dict()
__args__['expand'] = expand
__args__['labName'] = lab_name
__args__['name'] = name
__args__['resourceGroupName'] = resource_group_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-nextgen:devtestlab/v20160515:getSchedule', __args__, opts=opts, typ=GetScheduleResult).value
return AwaitableGetScheduleResult(
created_date=__ret__.created_date,
daily_recurrence=__ret__.daily_recurrence,
hourly_recurrence=__ret__.hourly_recurrence,
location=__ret__.location,
name=__ret__.name,
notification_settings=__ret__.notification_settings,
provisioning_state=__ret__.provisioning_state,
status=__ret__.status,
tags=__ret__.tags,
target_resource_id=__ret__.target_resource_id,
task_type=__ret__.task_type,
time_zone_id=__ret__.time_zone_id,
type=__ret__.type,
unique_identifier=__ret__.unique_identifier,
weekly_recurrence=__ret__.weekly_recurrence)
|
from __future__ import division
import logbook
import numpy as np
import pandas as pd
from pandas.lib import checknull
try:
# optional cython based OrderedDict
from cyordereddict import OrderedDict
except ImportError:
from collections import OrderedDict
from six import iteritems, itervalues
from zipline.protocol import Event, DATASOURCE_TYPE
from zipline.finance.slippage import Transaction
from zipline.utils.serialization_utils import (
VERSION_LABEL
)
import zipline.protocol as zp
from zipline.assets import (
Equity, Future
)
from zipline.errors import PositionTrackerMissingAssetFinder
from . position import positiondict
log = logbook.Logger('Performance')
class PositionTracker(object):
def __init__(self, asset_finder):
self.asset_finder = asset_finder
# sid => position object
self.positions = positiondict()
# Arrays for quick calculations of positions value
self._position_amounts = OrderedDict()
self._position_last_sale_prices = OrderedDict()
self._position_value_multipliers = OrderedDict()
self._position_exposure_multipliers = OrderedDict()
self._position_payout_multipliers = OrderedDict()
self._unpaid_dividends = pd.DataFrame(
columns=zp.DIVIDEND_PAYMENT_FIELDS,
)
self._positions_store = zp.Positions()
# Dict, keyed on dates, that contains lists of close position events
# for any Assets in this tracker's positions
self._auto_close_position_sids = {}
def _update_asset(self, sid):
try:
self._position_value_multipliers[sid]
self._position_exposure_multipliers[sid]
self._position_payout_multipliers[sid]
except KeyError:
# Check if there is an AssetFinder
if self.asset_finder is None:
raise PositionTrackerMissingAssetFinder()
# Collect the value multipliers from applicable sids
asset = self.asset_finder.retrieve_asset(sid)
if isinstance(asset, Equity):
self._position_value_multipliers[sid] = 1
self._position_exposure_multipliers[sid] = 1
self._position_payout_multipliers[sid] = 0
if isinstance(asset, Future):
self._position_value_multipliers[sid] = 0
self._position_exposure_multipliers[sid] = \
asset.contract_multiplier
self._position_payout_multipliers[sid] = \
asset.contract_multiplier
# Futures are closed on their notice_date
if asset.notice_date:
self._insert_auto_close_position_date(
dt=asset.notice_date,
sid=sid
)
# If the Future does not have a notice_date, it will be closed
# on its expiration_date
elif asset.expiration_date:
self._insert_auto_close_position_date(
dt=asset.expiration_date,
sid=sid
)
def _insert_auto_close_position_date(self, dt, sid):
"""
Inserts the given SID in to the list of positions to be auto-closed by
the given dt.
Parameters
----------
dt : pandas.Timestamp
The date before-which the given SID will be auto-closed
sid : int
The SID of the Asset to be auto-closed
"""
self._auto_close_position_sids.setdefault(dt, set()).add(sid)
def auto_close_position_events(self, next_trading_day):
"""
Generates CLOSE_POSITION events for any SIDs whose auto-close date is
before or equal to the given date.
Parameters
----------
next_trading_day : pandas.Timestamp
The time before-which certain Assets need to be closed
Yields
------
Event
A close position event for any sids that should be closed before
the next_trading_day parameter
"""
past_asset_end_dates = set()
# Check the auto_close_position_dates dict for SIDs to close
for date, sids in self._auto_close_position_sids.items():
if date > next_trading_day:
continue
past_asset_end_dates.add(date)
for sid in sids:
# Yield a CLOSE_POSITION event
event = Event({
'dt': date,
'type': DATASOURCE_TYPE.CLOSE_POSITION,
'sid': sid,
})
yield event
# Clear out past dates
while past_asset_end_dates:
self._auto_close_position_sids.pop(past_asset_end_dates.pop())
def update_last_sale(self, event):
# NOTE, PerformanceTracker already vetted as TRADE type
sid = event.sid
if sid not in self.positions:
return 0
price = event.price
if checknull(price):
return 0
pos = self.positions[sid]
old_price = pos.last_sale_price
pos.last_sale_date = event.dt
pos.last_sale_price = price
self._position_last_sale_prices[sid] = price
# Calculate cash adjustment on assets with multipliers
return ((price - old_price) * self._position_payout_multipliers[sid]
* pos.amount)
def update_positions(self, positions):
# update positions in batch
self.positions.update(positions)
for sid, pos in iteritems(positions):
self._position_amounts[sid] = pos.amount
self._position_last_sale_prices[sid] = pos.last_sale_price
self._update_asset(sid)
def update_position(self, sid, amount=None, last_sale_price=None,
last_sale_date=None, cost_basis=None):
pos = self.positions[sid]
if amount is not None:
pos.amount = amount
self._position_amounts[sid] = amount
self._position_values = None # invalidate cache
self._update_asset(sid=sid)
if last_sale_price is not None:
pos.last_sale_price = last_sale_price
self._position_last_sale_prices[sid] = last_sale_price
self._position_values = None # invalidate cache
if last_sale_date is not None:
pos.last_sale_date = last_sale_date
if cost_basis is not None:
pos.cost_basis = cost_basis
def execute_transaction(self, txn):
# Update Position
# ----------------
sid = txn.sid
position = self.positions[sid]
position.update(txn)
self._position_amounts[sid] = position.amount
self._position_last_sale_prices[sid] = position.last_sale_price
self._update_asset(sid)
def handle_commission(self, commission):
# Adjust the cost basis of the stock if we own it
if commission.sid in self.positions:
self.positions[commission.sid].\
adjust_commission_cost_basis(commission)
@property
def position_values(self):
iter_amount_price_multiplier = zip(
itervalues(self._position_amounts),
itervalues(self._position_last_sale_prices),
itervalues(self._position_value_multipliers),
)
return [
price * amount * multiplier for
price, amount, multiplier in iter_amount_price_multiplier
]
@property
def position_exposures(self):
iter_amount_price_multiplier = zip(
itervalues(self._position_amounts),
itervalues(self._position_last_sale_prices),
itervalues(self._position_exposure_multipliers),
)
return [
price * amount * multiplier for
price, amount, multiplier in iter_amount_price_multiplier
]
def calculate_positions_value(self):
if len(self.position_values) == 0:
return np.float64(0)
return sum(self.position_values)
def calculate_positions_exposure(self):
if len(self.position_exposures) == 0:
return np.float64(0)
return sum(self.position_exposures)
def _longs_count(self):
return sum(1 for i in self.position_exposures if i > 0)
def _long_exposure(self):
return sum(i for i in self.position_exposures if i > 0)
def _long_value(self):
return sum(i for i in self.position_values if i > 0)
def _shorts_count(self):
return sum(1 for i in self.position_exposures if i < 0)
def _short_exposure(self):
return sum(i for i in self.position_exposures if i < 0)
def _short_value(self):
return sum(i for i in self.position_values if i < 0)
def _gross_exposure(self):
return self._long_exposure() + abs(self._short_exposure())
def _gross_value(self):
return self._long_value() + abs(self._short_value())
def _net_exposure(self):
return self.calculate_positions_exposure()
def _net_value(self):
return self.calculate_positions_value()
def handle_split(self, split):
if split.sid in self.positions:
# Make the position object handle the split. It returns the
# leftover cash from a fractional share, if there is any.
position = self.positions[split.sid]
leftover_cash = position.handle_split(split)
self._position_amounts[split.sid] = position.amount
self._position_last_sale_prices[split.sid] = \
position.last_sale_price
self._update_asset(split.sid)
return leftover_cash
def _maybe_earn_dividend(self, dividend):
"""
Take a historical dividend record and return a Series with fields in
zipline.protocol.DIVIDEND_FIELDS (plus an 'id' field) representing
the cash/stock amount we are owed when the dividend is paid.
"""
if dividend['sid'] in self.positions:
return self.positions[dividend['sid']].earn_dividend(dividend)
else:
return zp.dividend_payment()
def earn_dividends(self, dividend_frame):
"""
Given a frame of dividends whose ex_dates are all the next trading day,
calculate and store the cash and/or stock payments to be paid on each
dividend's pay date.
"""
earned = dividend_frame.apply(self._maybe_earn_dividend, axis=1)\
.dropna(how='all')
if len(earned) > 0:
# Store the earned dividends so that they can be paid on the
# dividends' pay_dates.
self._unpaid_dividends = pd.concat(
[self._unpaid_dividends, earned],
)
def _maybe_pay_dividend(self, dividend):
"""
Take a historical dividend record, look up any stored record of
cash/stock we are owed for that dividend, and return a Series
with fields drawn from zipline.protocol.DIVIDEND_PAYMENT_FIELDS.
"""
try:
unpaid_dividend = self._unpaid_dividends.loc[dividend['id']]
return unpaid_dividend
except KeyError:
return zp.dividend_payment()
def pay_dividends(self, dividend_frame):
"""
Given a frame of dividends whose pay_dates are all the next trading
day, grant the cash and/or stock payments that were calculated on the
given dividends' ex dates.
"""
payments = dividend_frame.apply(self._maybe_pay_dividend, axis=1)\
.dropna(how='all')
# Mark these dividends as paid by dropping them from our unpaid
# table.
self._unpaid_dividends.drop(payments.index)
# Add stock for any stock dividends paid. Again, the values here may
# be negative in the case of short positions.
stock_payments = payments[payments['payment_sid'].notnull()]
for _, row in stock_payments.iterrows():
stock = row['payment_sid']
share_count = row['share_count']
# note we create a Position for stock dividend if we don't
# already own the asset
position = self.positions[stock]
position.amount += share_count
self._position_amounts[stock] = position.amount
self._position_last_sale_prices[stock] = position.last_sale_price
self._update_asset(stock)
# Add cash equal to the net cash payed from all dividends. Note that
# "negative cash" is effectively paid if we're short an asset,
# representing the fact that we're required to reimburse the owner of
# the stock for any dividends paid while borrowing.
net_cash_payment = payments['cash_amount'].fillna(0).sum()
return net_cash_payment
def maybe_create_close_position_transaction(self, event):
if not self._position_amounts.get(event.sid):
return None
if 'price' in event:
price = event.price
else:
price = self._position_last_sale_prices[event.sid]
txn = Transaction(
sid=event.sid,
amount=(-1 * self._position_amounts[event.sid]),
dt=event.dt,
price=price,
commission=0,
order_id=0
)
return txn
def get_positions(self):
positions = self._positions_store
for sid, pos in iteritems(self.positions):
if pos.amount == 0:
# Clear out the position if it has become empty since the last
# time get_positions was called. Catching the KeyError is
# faster than checking `if sid in positions`, and this can be
# potentially called in a tight inner loop.
try:
del positions[sid]
except KeyError:
pass
continue
# Note that this will create a position if we don't currently have
# an entry
position = positions[sid]
position.amount = pos.amount
position.cost_basis = pos.cost_basis
position.last_sale_price = pos.last_sale_price
return positions
def get_positions_list(self):
positions = []
for sid, pos in iteritems(self.positions):
if pos.amount != 0:
positions.append(pos.to_dict())
return positions
def __getstate__(self):
state_dict = {}
state_dict['asset_finder'] = self.asset_finder
state_dict['positions'] = dict(self.positions)
state_dict['unpaid_dividends'] = self._unpaid_dividends
state_dict['auto_close_position_sids'] = self._auto_close_position_sids
STATE_VERSION = 3
state_dict[VERSION_LABEL] = STATE_VERSION
return state_dict
def __setstate__(self, state):
OLDEST_SUPPORTED_STATE = 3
version = state.pop(VERSION_LABEL)
if version < OLDEST_SUPPORTED_STATE:
raise BaseException("PositionTracker saved state is too old.")
self.asset_finder = state['asset_finder']
self.positions = positiondict()
# note that positions_store is temporary and gets regened from
# .positions
self._positions_store = zp.Positions()
self._unpaid_dividends = state['unpaid_dividends']
self._auto_close_position_sids = state['auto_close_position_sids']
# Arrays for quick calculations of positions value
self._position_amounts = OrderedDict()
self._position_last_sale_prices = OrderedDict()
self._position_value_multipliers = OrderedDict()
self._position_exposure_multipliers = OrderedDict()
self._position_payout_multipliers = OrderedDict()
# Update positions is called without a finder
self.update_positions(state['positions'])
|
""" Calls functions with RFont object's children.
Calls functions with RFont object's children. RFont object's child is one of the
RGlyph, RContour and RPoint. This module helps you to iterate RFont object easily.
Last modified date: 2019/09/26
Created by Seongju Woo.
"""
from functools import wraps
def iter_with_func(iter_func):
""" Decorator for iterating over font objects with functions.
A decorator that iterate font objects with functions. Functions can
be used with conditions. This conditions must be a predicate(functions
that returns True or False).
Examples:
from fontParts.world import CurrentFont
# For all glyph objects in current font.
# If glyph's name starts with 'AB', print glyph's name.
def print_glyph(glyph):
print(glyph)
def print_condition(glyph):
return glyph.name.startswith('AB')
@iter_with_func
def generate_glyph(font, *functions, **conditions):
return (font.getGlyph(key) for key in font.keys())
generate_glyph(CurrentFont(), print_glyph, print_glyph=print_condition)
"""
@wraps(iter_func)
def call_func_with_cond(data, *args, **kwargs):
objects = iter_func(data, *args, **kwargs)
for object_ in objects:
for function in args:
condition = kwargs.get(function.__name__)
if condition is None:
function(object_)
else:
if condition(object_):
function(object_)
return call_func_with_cond
@iter_with_func
def point_generator(font, *functions, **conditions):
""" Calls functions with RPoint objects in RFont object.
Args:
font:: RFont
*functions:: (function object, ...)
This functions must have only one parameter, which is an RPoint object.
**conditions:: {str: function object, ...}
The key of conditions is function name and the value is function object.
This function object must be predicate and have only one parameter,
which is an RPoint object. If this function object returns True, the key
of conditions will be executed.
Examples:
from fontParts.world import CurrentFont
def print_func(point):
print(point)
def print_condition(point):
return point.index == 3
point_generator(CurrentFont(), print_func, print_func=print_condition)
"""
return (point for key in font.keys() \
for contour in font.getGlyph(key) \
for point in contour.points)
@iter_with_func
def contour_generator(font, *functions, **conditions):
""" Calls functions with RContour objects in RFont object.
Args:
font:: RFont
*functions:: (function object, ...)
This functions must have only one parameter, which is an RContour object.
**conditions:: {str: function object, ...}
The key of conditions is function name and the value is function object.
This function object must be predicate and have only one parameter,
which is an RContour object. If this function object returns True, the key
of conditions will be executed.
Examples:
from fontParts.world import CurrentFont
def print_func(contour):
print(contour)
def print_condition(contour):
return len(contour.points) == 3
contour_generator(CurrentFont(), print_func, print_func=print_condition)
"""
return (contour for key in font.keys() \
for contour in font.getGlyph(key))
@iter_with_func
def glyph_generator(font, *functions, **conditions):
""" Calls functions with RGlyph objects in RFont object.
Args:
font:: RFont
*functions:: (function object, ...)
This functions must have only one parameter, which is an RGlyph object.
**conditions:: {str: function object, ...}
The key of conditions is function name and the value is function object.
This function object must be predicate and have only one parameter,
which is an RGlyph object. If this function object returns True, the key
of conditions will be executed.
Examples:
from fontParts.world import CurrentFont
def print_func(glyph):
print(glyph.name)
def print_condition(glyph):
return glyph.name.startswith('uni')
glyph_generator(CurrentFont(), print_func, print_func=print_condition)
"""
return (font.getGlyph(key) for key in font.keys())
|
# Copyright 2018 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""The Limited-Memory BFGS minimization algorithm.
Limited-memory quasi-Newton methods are useful for solving large problems
whose Hessian matrices cannot be computed at a reasonable cost or are not
sparse. Instead of storing fully dense n x n approximations of Hessian
matrices, they only save a few vectors of length n that represent the
approximations implicitly.
This module implements the algorithm known as L-BFGS, which, as its name
suggests, is a limited-memory version of the BFGS algorithm.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
# Dependency imports
import tensorflow.compat.v2 as tf
from tensorflow_probability.python.internal import distribution_util
from tensorflow_probability.python.internal import dtype_util
from tensorflow_probability.python.internal import prefer_static
from tensorflow_probability.python.optimizer import bfgs_utils
LBfgsOptimizerResults = collections.namedtuple(
'LBfgsOptimizerResults', [
'converged', # Scalar boolean tensor indicating whether the minimum
# was found within tolerance.
'failed', # Scalar boolean tensor indicating whether a line search
# step failed to find a suitable step size satisfying Wolfe
# conditions. In the absence of any constraints on the
# number of objective evaluations permitted, this value will
# be the complement of `converged`. However, if there is
# a constraint and the search stopped due to available
# evaluations being exhausted, both `failed` and `converged`
# will be simultaneously False.
'num_iterations', # The number of iterations of the BFGS update.
'num_objective_evaluations', # The total number of objective
# evaluations performed.
'position', # A tensor containing the last argument value found
# during the search. If the search converged, then
# this value is the argmin of the objective function.
'objective_value', # A tensor containing the value of the objective
# function at the `position`. If the search
# converged, then this is the (local) minimum of
# the objective function.
'objective_gradient', # A tensor containing the gradient of the
# objective function at the
# `final_position`. If the search converged
# the max-norm of this tensor should be
# below the tolerance.
'position_deltas', # A tensor encoding information about the latest
# changes in `position` during the algorithm
# execution. Its shape is of the form
# `(num_correction_pairs,) + position.shape` where
# `num_correction_pairs` is given as an argument to
# the minimize function.
'gradient_deltas', # A tensor encoding information about the latest
# changes in `objective_gradient` during the
# algorithm execution. Has the same shape as
# position_deltas.
])
def minimize(value_and_gradients_function,
initial_position,
previous_optimizer_results=None,
num_correction_pairs=10,
tolerance=1e-8,
x_tolerance=0,
f_relative_tolerance=0,
initial_inverse_hessian_estimate=None,
max_iterations=50,
parallel_iterations=1,
stopping_condition=None,
max_line_search_iterations=50,
name=None):
"""Applies the L-BFGS algorithm to minimize a differentiable function.
Performs unconstrained minimization of a differentiable function using the
L-BFGS scheme. See [Nocedal and Wright(2006)][1] for details of the algorithm.
### Usage:
The following example demonstrates the L-BFGS optimizer attempting to find the
minimum for a simple high-dimensional quadratic objective function.
```python
# A high-dimensional quadratic bowl.
ndims = 60
minimum = np.ones([ndims], dtype='float64')
scales = np.arange(ndims, dtype='float64') + 1.0
# The objective function and the gradient.
def quadratic_loss_and_gradient(x):
return tfp.math.value_and_gradient(
lambda x: tf.reduce_sum(
scales * tf.math.squared_difference(x, minimum), axis=-1),
x)
start = np.arange(ndims, 0, -1, dtype='float64')
optim_results = tfp.optimizer.lbfgs_minimize(
quadratic_loss_and_gradient,
initial_position=start,
num_correction_pairs=10,
tolerance=1e-8)
# Check that the search converged
assert(optim_results.converged)
# Check that the argmin is close to the actual value.
np.testing.assert_allclose(optim_results.position, minimum)
```
### References:
[1] Jorge Nocedal, Stephen Wright. Numerical Optimization. Springer Series
in Operations Research. pp 176-180. 2006
http://pages.mtu.edu/~struther/Courses/OLD/Sp2013/5630/Jorge_Nocedal_Numerical_optimization_267490.pdf
Args:
value_and_gradients_function: A Python callable that accepts a point as a
real `Tensor` and returns a tuple of `Tensor`s of real dtype containing
the value of the function and its gradient at that point. The function
to be minimized. The input is of shape `[..., n]`, where `n` is the size
of the domain of input points, and all others are batching dimensions.
The first component of the return value is a real `Tensor` of matching
shape `[...]`. The second component (the gradient) is also of shape
`[..., n]` like the input value to the function.
initial_position: Real `Tensor` of shape `[..., n]`. The starting point, or
points when using batching dimensions, of the search procedure. At these
points the function value and the gradient norm should be finite.
Exactly one of `initial_position` and `previous_optimizer_results` can be
non-None.
previous_optimizer_results: An `LBfgsOptimizerResults` namedtuple to
intialize the optimizer state from, instead of an `initial_position`.
This can be passed in from a previous return value to resume optimization
with a different `stopping_condition`. Exactly one of `initial_position`
and `previous_optimizer_results` can be non-None.
num_correction_pairs: Positive integer. Specifies the maximum number of
(position_delta, gradient_delta) correction pairs to keep as implicit
approximation of the Hessian matrix.
tolerance: Scalar `Tensor` of real dtype. Specifies the gradient tolerance
for the procedure. If the supremum norm of the gradient vector is below
this number, the algorithm is stopped.
x_tolerance: Scalar `Tensor` of real dtype. If the absolute change in the
position between one iteration and the next is smaller than this number,
the algorithm is stopped.
f_relative_tolerance: Scalar `Tensor` of real dtype. If the relative change
in the objective value between one iteration and the next is smaller
than this value, the algorithm is stopped.
initial_inverse_hessian_estimate: None. Option currently not supported.
max_iterations: Scalar positive int32 `Tensor`. The maximum number of
iterations for L-BFGS updates.
parallel_iterations: Positive integer. The number of iterations allowed to
run in parallel.
stopping_condition: (Optional) A Python function that takes as input two
Boolean tensors of shape `[...]`, and returns a Boolean scalar tensor.
The input tensors are `converged` and `failed`, indicating the current
status of each respective batch member; the return value states whether
the algorithm should stop. The default is tfp.optimizer.converged_all
which only stops when all batch members have either converged or failed.
An alternative is tfp.optimizer.converged_any which stops as soon as one
batch member has converged, or when all have failed.
max_line_search_iterations: Python int. The maximum number of iterations
for the `hager_zhang` line search algorithm.
name: (Optional) Python str. The name prefixed to the ops created by this
function. If not supplied, the default name 'minimize' is used.
Returns:
optimizer_results: A namedtuple containing the following items:
converged: Scalar boolean tensor indicating whether the minimum was
found within tolerance.
failed: Scalar boolean tensor indicating whether a line search
step failed to find a suitable step size satisfying Wolfe
conditions. In the absence of any constraints on the
number of objective evaluations permitted, this value will
be the complement of `converged`. However, if there is
a constraint and the search stopped due to available
evaluations being exhausted, both `failed` and `converged`
will be simultaneously False.
num_objective_evaluations: The total number of objective
evaluations performed.
position: A tensor containing the last argument value found
during the search. If the search converged, then
this value is the argmin of the objective function.
objective_value: A tensor containing the value of the objective
function at the `position`. If the search converged, then this is
the (local) minimum of the objective function.
objective_gradient: A tensor containing the gradient of the objective
function at the `position`. If the search converged the
max-norm of this tensor should be below the tolerance.
position_deltas: A tensor encoding information about the latest
changes in `position` during the algorithm execution.
gradient_deltas: A tensor encoding information about the latest
changes in `objective_gradient` during the algorithm execution.
"""
if initial_inverse_hessian_estimate is not None:
raise NotImplementedError(
'Support of initial_inverse_hessian_estimate arg not yet implemented')
if stopping_condition is None:
stopping_condition = bfgs_utils.converged_all
with tf.name_scope(name or 'minimize'):
if (initial_position is None) == (previous_optimizer_results is None):
raise ValueError(
'Exactly one of `initial_position` or '
'`previous_optimizer_results` may be specified.')
if initial_position is not None:
initial_position = tf.convert_to_tensor(
initial_position, name='initial_position')
dtype = dtype_util.base_dtype(initial_position.dtype)
if previous_optimizer_results is not None:
dtype = dtype_util.base_dtype(previous_optimizer_results.position.dtype)
tolerance = tf.convert_to_tensor(
tolerance, dtype=dtype, name='grad_tolerance')
f_relative_tolerance = tf.convert_to_tensor(
f_relative_tolerance, dtype=dtype, name='f_relative_tolerance')
x_tolerance = tf.convert_to_tensor(
x_tolerance, dtype=dtype, name='x_tolerance')
max_iterations = tf.convert_to_tensor(max_iterations, name='max_iterations')
# The `state` here is a `LBfgsOptimizerResults` tuple with values for the
# current state of the algorithm computation.
def _cond(state):
"""Continue if iterations remain and stopping condition is not met."""
return ((state.num_iterations < max_iterations) &
tf.logical_not(stopping_condition(state.converged, state.failed)))
def _body(current_state):
"""Main optimization loop."""
search_direction = _get_search_direction(current_state)
# TODO(b/120134934): Check if the derivative at the start point is not
# negative, if so then reset position/gradient deltas and recompute
# search direction.
next_state = bfgs_utils.line_search_step(
current_state,
value_and_gradients_function, search_direction,
tolerance, f_relative_tolerance, x_tolerance, stopping_condition,
max_line_search_iterations)
# If not failed or converged, update the Hessian estimate.
should_update = ~(next_state.converged | next_state.failed)
state_after_inv_hessian_update = bfgs_utils.update_fields(
next_state,
position_deltas=_queue_push(
current_state.position_deltas, should_update,
next_state.position - current_state.position),
gradient_deltas=_queue_push(
current_state.gradient_deltas, should_update,
next_state.objective_gradient - current_state.objective_gradient))
return [state_after_inv_hessian_update]
if previous_optimizer_results is None:
assert initial_position is not None
initial_state = _get_initial_state(value_and_gradients_function,
initial_position,
num_correction_pairs,
tolerance)
else:
initial_state = previous_optimizer_results
return tf.while_loop(
cond=_cond,
body=_body,
loop_vars=[initial_state],
parallel_iterations=parallel_iterations)[0]
def _get_initial_state(value_and_gradients_function,
initial_position,
num_correction_pairs,
tolerance):
"""Create LBfgsOptimizerResults with initial state of search procedure."""
init_args = bfgs_utils.get_initial_state_args(
value_and_gradients_function,
initial_position,
tolerance)
empty_queue = _make_empty_queue_for(num_correction_pairs, initial_position)
init_args.update(position_deltas=empty_queue, gradient_deltas=empty_queue)
return LBfgsOptimizerResults(**init_args)
def _get_search_direction(state):
"""Computes the search direction to follow at the current state.
On the `k`-th iteration of the main L-BFGS algorithm, the state has collected
the most recent `m` correction pairs in position_deltas and gradient_deltas,
where `k = state.num_iterations` and `m = min(k, num_correction_pairs)`.
Assuming these, the code below is an implementation of the L-BFGS two-loop
recursion algorithm given by [Nocedal and Wright(2006)][1]:
```None
q_direction = objective_gradient
for i in reversed(range(m)): # First loop.
inv_rho[i] = gradient_deltas[i]^T * position_deltas[i]
alpha[i] = position_deltas[i]^T * q_direction / inv_rho[i]
q_direction = q_direction - alpha[i] * gradient_deltas[i]
kth_inv_hessian_factor = (gradient_deltas[-1]^T * position_deltas[-1] /
gradient_deltas[-1]^T * gradient_deltas[-1])
r_direction = kth_inv_hessian_factor * I * q_direction
for i in range(m): # Second loop.
beta = gradient_deltas[i]^T * r_direction / inv_rho[i]
r_direction = r_direction + position_deltas[i] * (alpha[i] - beta)
return -r_direction # Approximates - H_k * objective_gradient.
```
Args:
state: A `LBfgsOptimizerResults` tuple with the current state of the
search procedure.
Returns:
A real `Tensor` of the same shape as the `state.position`. The direction
along which to perform line search.
"""
# The number of correction pairs that have been collected so far.
num_elements = tf.minimum(
state.num_iterations,
distribution_util.prefer_static_shape(state.position_deltas)[0])
def _two_loop_algorithm():
"""L-BFGS two-loop algorithm."""
# Correction pairs are always appended to the end, so only the latest
# `num_elements` vectors have valid position/gradient deltas. Vectors
# that haven't been computed yet are zero.
position_deltas = state.position_deltas
gradient_deltas = state.gradient_deltas
# Pre-compute all `inv_rho[i]`s.
inv_rhos = tf.reduce_sum(
gradient_deltas * position_deltas, axis=-1)
def first_loop(acc, args):
_, q_direction = acc
position_delta, gradient_delta, inv_rho = args
alpha = tf.math.divide_no_nan(
tf.reduce_sum(position_delta * q_direction, axis=-1), inv_rho)
direction_delta = alpha[..., tf.newaxis] * gradient_delta
return (alpha, q_direction - direction_delta)
# Run first loop body computing and collecting `alpha[i]`s, while also
# computing the updated `q_direction` at each step.
zero = tf.zeros_like(inv_rhos[-num_elements])
alphas, q_directions = tf.scan(
first_loop, [position_deltas, gradient_deltas, inv_rhos],
initializer=(zero, state.objective_gradient), reverse=True)
# We use `H^0_k = gamma_k * I` as an estimate for the initial inverse
# hessian for the k-th iteration; then `r_direction = H^0_k * q_direction`.
gamma_k = inv_rhos[-1] / tf.reduce_sum(
gradient_deltas[-1] * gradient_deltas[-1], axis=-1)
r_direction = gamma_k[..., tf.newaxis] * q_directions[-num_elements]
def second_loop(r_direction, args):
alpha, position_delta, gradient_delta, inv_rho = args
beta = tf.math.divide_no_nan(
tf.reduce_sum(gradient_delta * r_direction, axis=-1), inv_rho)
direction_delta = (alpha - beta)[..., tf.newaxis] * position_delta
return r_direction + direction_delta
# Finally, run second loop body computing the updated `r_direction` at each
# step.
r_directions = tf.scan(
second_loop, [alphas, position_deltas, gradient_deltas, inv_rhos],
initializer=r_direction)
return -r_directions[-1]
return prefer_static.cond(tf.equal(num_elements, 0),
(lambda: -state.objective_gradient),
_two_loop_algorithm)
def _make_empty_queue_for(k, element):
"""Creates a `tf.Tensor` suitable to hold `k` element-shaped tensors.
For example:
```python
element = tf.constant([[0., 1., 2., 3., 4.],
[5., 6., 7., 8., 9.]])
# A queue capable of holding 3 elements.
_make_empty_queue_for(3, element)
# => [[[ 0., 0., 0., 0., 0.],
# [ 0., 0., 0., 0., 0.]],
#
# [[ 0., 0., 0., 0., 0.],
# [ 0., 0., 0., 0., 0.]],
#
# [[ 0., 0., 0., 0., 0.],
# [ 0., 0., 0., 0., 0.]]]
```
Args:
k: A positive scalar integer, number of elements that each queue will hold.
element: A `tf.Tensor`, only its shape and dtype information are relevant.
Returns:
A zero-filed `tf.Tensor` of shape `(k,) + tf.shape(element)` and same dtype
as `element`.
"""
queue_shape = tf.concat(
[[k], distribution_util.prefer_static_shape(element)], axis=0)
return tf.zeros(queue_shape, dtype=dtype_util.base_dtype(element.dtype))
def _queue_push(queue, should_update, new_vecs):
"""Conditionally push new vectors into a batch of first-in-first-out queues.
The `queue` of shape `[k, ..., n]` can be thought of as a batch of queues,
each holding `k` n-D vectors; while `new_vecs` of shape `[..., n]` is a
fresh new batch of n-D vectors. The `should_update` batch of Boolean scalars,
i.e. shape `[...]`, indicates batch members whose corresponding n-D vector in
`new_vecs` should be added at the back of its queue, pushing out the
corresponding n-D vector from the front. Batch members in `new_vecs` for
which `should_update` is False are ignored.
Note: the choice of placing `k` at the dimension 0 of the queue is
constrained by the L-BFGS two-loop algorithm above. The algorithm uses
tf.scan to iterate over the `k` correction pairs simulatneously across all
batches, and tf.scan itself can only iterate over dimension 0.
For example:
```python
k, b, n = (3, 2, 5)
queue = tf.reshape(tf.range(30), (k, b, n))
# => [[[ 0, 1, 2, 3, 4],
# [ 5, 6, 7, 8, 9]],
#
# [[10, 11, 12, 13, 14],
# [15, 16, 17, 18, 19]],
#
# [[20, 21, 22, 23, 24],
# [25, 26, 27, 28, 29]]]
element = tf.reshape(tf.range(30, 40), (b, n))
# => [[30, 31, 32, 33, 34],
[35, 36, 37, 38, 39]]
should_update = tf.constant([True, False]) # Shape: (b,)
_queue_add(should_update, queue, element)
# => [[[10, 11, 12, 13, 14],
# [ 5, 6, 7, 8, 9]],
#
# [[20, 21, 22, 23, 24],
# [15, 16, 17, 18, 19]],
#
# [[30, 31, 32, 33, 34],
# [25, 26, 27, 28, 29]]]
```
Args:
queue: A `tf.Tensor` of shape `[k, ..., n]`; a batch of queues each with
`k` n-D vectors.
should_update: A Boolean `tf.Tensor` of shape `[...]` indicating batch
members where new vectors should be added to their queues.
new_vecs: A `tf.Tensor` of shape `[..., n]`; a batch of n-D vectors to add
at the end of their respective queues, pushing out the first element from
each.
Returns:
A new `tf.Tensor` of shape `[k, ..., n]`.
"""
new_queue = tf.concat([queue[1:], [new_vecs]], axis=0)
return tf.where(
should_update[tf.newaxis, ..., tf.newaxis], new_queue, queue)
|
from flask import Flask
app = Flask(__name__)
@app.route('/')
def index():
return "<h1>Hello Jenkins</h1>"
if __name__ == '__main__':
app.run(host="0.0.0.0", port=5001)
|
'''
@author: DiedeKemper
Trains a random forest to the data with features per business.
Gives a classification for the test data.
'''
from sklearn import cross_validation
from sklearn.ensemble import RandomForestClassifier
from CreateClassification import create
from CreateClassification import createProbFile
from LoadData import load, load_features
import pandas as pd # data processing, CSV file I/O (e.g. pd.read_csv)
import numpy as np
import matplotlib.pyplot as plt
'''trains a random forest on the given train data. Returns the forest.'''
def trainForest(Xtrain, Ytrain):
'''RANDOM FOREST'''
forest = RandomForestClassifier()
forest.fit(Xtrain,Ytrain)
return forest
'''applies 10 fold cross validation on the given forest for the given traindata. Returns the scores.'''
def validateForest(forest, Xtrain, Ytrain):
#accuracy on train set, 10 fold cross validation
scores = cross_validation.cross_val_score(forest, Xtrain, Ytrain, cv=10, scoring='f1_weighted')
print("Accuracy RF: %0.2f (+/- %0.2f)" % (scores.mean(), scores.std() * 2))
return scores
'''Predicts the classes for the test data and saves it in a csv file. Returns true if csv file is created.'''
def createClassification(forest,Xtest, XtestDF):
Ypred = forest.predict(Xtest)
return create(Ypred, XtestDF)
'''Predicts the probabilities for the classes for the test data and saves it in a csv file. Returns true if csv file is created.'''
def createProbabilities(forest,Xtest):
Yprob = forest.predict_proba(Xtest)
return createProbFile(Yprob, 'probColorValidationSet')
'''Loads the data and turns it into arrays of the correct shape. Returns the data in a dictionary.'''
def getData():
'''LOAD TRAIN DATA'''
# Load train data: X
featureData = load_features('input')
XtrainAll = featureData['TRAIN_F']
Xcols = XtrainAll.columns.tolist() #business_id, r_mean, r_sd, g_mean, g_sd, b_mean, b_sd, imagecount, h_mean, h_sd, w_mean, w_sd
# Load train data: Y
data = load('input')
YtrainAll = data['Y_TRAIN']
Ycols = YtrainAll.columns.tolist() #business_id, 0, 1, 2, 3, 4, 5, 6, 7, 8
'''SPLIT TRAINALL, TRAIN AND VALIDATION SET'''
#merge X and Y. Reasons: order should be the same. Labels could contain businesses that are removed during preprocessing.
trainAllData = pd.merge(XtrainAll, YtrainAll, on='business_id')
#load which business ids should be in the train set and which should be in the validation set
trainSetIds = np.load('input/trainSet.npy')
validSetIds = np.load('input/verifSet.npy')
#create dataframes of photo indices for train and validation set
trainData = trainAllData[trainAllData.business_id.isin(trainSetIds)]
validationData = trainAllData[trainAllData.business_id.isin(validSetIds)]
#save business_id order of all sets
busIdsTrainAll = trainAllData['business_id'].values
busIdsTrain = trainData['business_id'].values
busIdsVal = validationData['business_id'].values
#split X and Y data, remove business_ids
del Xcols[0] #remove business_id from list
del Ycols[0] #remove business_id from list
XtrainAll = trainAllData[Xcols].values
YtrainAll = trainAllData[Ycols].values
Xtrain = trainData[Xcols].values
Ytrain = trainData[Ycols].values
Xvalidation = validationData[Xcols].values
Yvalidation = validationData[Ycols].values
'''LOAD TEST DATA'''
#create array from test data
XtestDF = featureData['TEST_F']
Xtest = XtestDF[Xcols].values
'''SAVE ALL DATA IN DICTIONARY'''
data = {
'Xtrain' : Xtrain,
'busIdsTrain' : busIdsTrain,
'XtrainAll' : XtrainAll,
'busIdsTrainAll' : busIdsTrainAll,
'Xvalidation' : Xvalidation,
'busIdsVal' : busIdsVal,
'Ytrain' : Ytrain,
'YtrainAll' : YtrainAll,
'Yvalidation' : Yvalidation,
'Xtest' : Xtest,
'XtestDF' : XtestDF,
}
return data
''' Source: http://scikit-learn.org/stable/auto_examples/ensemble/plot_forest_importances.html'''
def showFeatureImportance(forest,Xtrain):
# Build a forest and compute the feature importances
importances = forest.feature_importances_
std = np.std([tree.feature_importances_ for tree in forest.estimators_],
axis=0)
indices = np.argsort(importances)[::-1]
# Print the feature ranking
print("Feature ranking:")
for f in range(Xtrain.shape[1]):
print("%d. feature %d (%f)" % (f + 1, indices[f], importances[indices[f]]))
# Plot the feature importances of the forest
plt.figure()
plt.title("Feature importances")
plt.bar(range(Xtrain.shape[1]), importances[indices],
color="r", yerr=std[indices], align="center")
plt.xticks(range(Xtrain.shape[1]), indices)
plt.xlim([-1, Xtrain.shape[1]])
plt.show()
|
import pytest
import bluesky.plan_stubs as bps
from bluesky_adaptive.per_start import adaptive_plan
from bluesky_adaptive.on_stop import recommender_factory
def test_scipy_minimize_recommender(RE, hw):
pytest.importorskip("scipy")
from bluesky_adaptive.scipy_reccomendations import MinimizerReccomender
results_list = []
def do_the_thing(det, det_key):
recommender = MinimizerReccomender(scale=-1)
cb, queue = recommender_factory(
adaptive_obj=recommender,
independent_keys=["np.mean(motor)"],
dependent_keys=[det_key],
target_keys=["motor"],
max_count=100,
)
yield from adaptive_plan(
[det], {hw.motor: 1}, to_recommender=cb, from_recommender=queue
)
yield from bps.mv(hw.motor, recommender.result.x)
print(recommender.result)
results_list.append(recommender.result)
RE(do_the_thing(hw.det, "np.asarray(det)"))
RE(do_the_thing(hw.img, "np.median(img)"))
assert len(results_list) == 2
assert all(_ is not None for _ in results_list)
|
import json
import requests
from dragoneye.utils.app_logger import logger
from dragoneye.dragoneye_exception import DragoneyeException
class AzureAuthorizer:
@staticmethod
def get_authorization_token(tenant_id: str, client_id: str, client_secret: str) -> str:
logger.info('Will try to generate JWT bearer token...')
response = requests.post(
url=f'https://login.microsoftonline.com/{tenant_id}/oauth2/token',
data={
'grant_type': 'client_credentials',
'client_id': client_id,
'client_secret': client_secret,
'resource': 'https://management.azure.com/'
}
)
if response.status_code != 200:
raise DragoneyeException(f'Failed to authenticate. status code: {response.status_code}\n'
f'Reason: {response.text}')
response_body = json.loads(response.text)
access_token = response_body['access_token']
logger.info('JWT bearer token generated successfully')
return f'Bearer {access_token}'
|
import math
def get_bigger_rect(r1, r2):
"""
Returns bigger rectangle.
If given two rectangles have the same size then returns first one
"""
r1_x, r1_y, r1_x2, r1_y2, r1_w, r1_h = __get_rectangle_with_bounds(r1)
r2_x, r2_y, r2_x2, r2_y2, r2_w, r2_h = __get_rectangle_with_bounds(r2)
a1 = r1_w * r2_h
a2 = r2_w * r2_h
if a1 >= a2:
return r1
else:
return r2
def get_distance_between_points(p1, p2):
x1,y1 = p1
x2,y2 = p2
return math.sqrt(((x2-x1) ** 2) + ((y2-y1) ** 2))
def is_similar_rectangle(r1, r2, max_dist):
max_area_diff = (max_dist + 1) ** 2
r1_x, r1_y, r1_x2, r1_y2, r1_w, r1_h = __get_rectangle_with_bounds(r1)
r2_x, r2_y, r2_x2, r2_y2, r2_w, r2_h = __get_rectangle_with_bounds(r2)
distance = get_distance_between_points((r1_x, r1_y), (r2_x, r2_y))
return distance <= max_dist and ((r1_w * r1_h) - (r2_w * r2_h)) <= max_area_diff
def eliminate_child_rects(rects):
rectDict = dict()
newRects = []
has_child = False
rects = list(set(rects))
for i in range(len(rects)):
r1 = rects[i]
for j in range(len(rects)):
r2 = rects[j]
if is_same_rectangle(r1, r2):
print("R1:",r1, "R2:", r2, " same!")
continue
if is_contains_rectangle(rects[i], rects[j]):
print(rects[i], "contains", rects[j])
if i not in rectDict:
rectDict[i] = [rects[j]]
else:
rectDict[i].append(rects[j])
elif is_similar_rectangle(r1, r2, 5):
print(r1, "is similar to", r2)
eliminatedR = None
if get_bigger_rect(r1, r2) == r1:
index = i
eliminatedR = r2
else:
index = j
eliminatedR = r1
print("index:", i, "eliminatedR:", eliminatedR)
if index not in rectDict:
rectDict[index] = [eliminatedR]
else:
rectDict[index].append(eliminatedR)
print(rects)
print(rectDict)
for (k, v) in rectDict.items():
for r in v:
if r in rects:
rects.remove(r)
for r in rects:
newRects.append(r)
return newRects
def is_same_rectangle(r1, r2):
r1_x, r1_y, r1_x2, r1_y2, r1_w, r1_h = __get_rectangle_with_bounds(r1)
r2_x, r2_y, r2_x2, r2_y2, r2_w, r2_h = __get_rectangle_with_bounds(r2)
if r1_x == r2_x and r1_y == r2_y and r1_x2 == r2_x2 and r1_y2 == r2_y2:
return True
return False
def is_contains_rectangle(r1, r2):
"""
Looks the position of r2
if r2 is inside of the r1 returns True else return False
"""
r1_x, r1_y, r1_x2, r1_y2, r1_w, r1_h = __get_rectangle_with_bounds(r1)
r2_x, r2_y, r2_x2, r2_y2, r2_w, r2_h = __get_rectangle_with_bounds(r2)
if r1_x == r2_x and r1_y == r2_y and r1_x2 == r2_x2 and r1_y2 == r2_y2:
return False
return r2_x >= r1_x and r2_x2 <= r1_x2 and r2_y >= r1_y and r2_y2 <= r1_y2 and r2_w * r2_h <= r1_w * r1_h
def __get_rectangle_with_bounds(rect):
x, y, w, h = rect
x2 = x + w
y2 = y + h
return (x, y, x2, y2, w, h)
|
import math
import random as rn
import sys
from greedy_functions import greedy_optimization, calcola_scenario
from graph_functions import minimum_spanning_tree
from utility_functions import get_gateways_classes, set_verbosity
from display_functions import find_sensor_by_id
from feasibility_functions import controlla_ammisibilita
def costo_totale_soluzione(solution):
costo_totale = 0
for a_gateway in solution.values():
costo_totale += a_gateway["costo"]
mst, costo_mst = minimum_spanning_tree(solution)
return costo_totale + costo_mst
def destroy(solution, method='costo'):
tasso_distruzione = 30 # In percentuale, originale: 30
quanti_distruggere = round(len(solution) * tasso_distruzione / 100)
# Se il metodo è "costo" ordino la soluzione per costo del dispositivo decrescente.
# Per aggiungere non-determinismo alla destroy, aggiungo un valore casuale
# che indica quali dispositivi non di costo massimo distruggere.
# Nello specifico: l'ordinamento della soluzione viene effettuato su due campi,
# prima per classe (ossia per costo), poi i dispositivi della stessa
# classe allora si ordinano per un valore casuale fra 0 e 1.
if method == 'costo':
solution = {k: v for k, v in sorted(solution.items(),
key=lambda item: (item[1]["classe"], rn.uniform(0, 1)),
reverse=True)}
else: # Sennò viene effettuata una destroy random
shuffled = list(solution.values())
rn.shuffle(shuffled)
solution = dict(zip(solution, shuffled))
sensori_scoperti = []
classe_gateway_tolti = []
i = 0
while i < quanti_distruggere:
key, a_gateway = list(solution.items())[0]
for sens in a_gateway["sensor_covered"]:
sensori_scoperti.append(find_sensor_by_id(sens))
classe_gateway_tolti.append(a_gateway["classe"])
solution.pop(key)
i += 1
return solution, sensori_scoperti, classe_gateway_tolti
def repair(destroyed_solution, sensori_scoperti, gateways, order_by, pack_by):
sens_dict = calcola_scenario(sensori_scoperti, gateways)
new_solution, new_cost = greedy_optimization(sensori_scoperti, gateways, sens_dict, order_by, pack_by)
# Unisco la soluzione distrutta e il pezzo appena riparato
repaired_solution = destroyed_solution.copy()
for a_gateway in new_solution.keys():
index = max(repaired_solution.keys()) + 1
repaired_solution[index] = new_solution[a_gateway]
ammissibile, reason = controlla_ammisibilita(repaired_solution)
if not ammissibile:
print("Questa soluzione NON è ammissibile!!! " + reason)
return None
return repaired_solution
def accept(delta, temperatura):
# se non è migliore devo accettare il peggioramento con una certa probabilità (Simulated Annealing)
prob_accettata = math.exp(-delta / temperatura) # è un valore tra 0 e 1
return rn.uniform(0, 1) < prob_accettata
# Ricerca Locale tramite Destroy and Repair
def large_neighborhood_search(initial_solution, gateways, order_by, pack_by, destroy_method='costo', num_iterazioni=10):
temperatura = 100 # Valore iniziale della temperatura
soluzione_corrente = initial_solution
costo_soluzione_corrente = costo_totale_soluzione(soluzione_corrente)
migliore_soluzione = soluzione_corrente # Ottimo candidato (migliore finora)
costo_migliore_soluzione = costo_soluzione_corrente
k = 0
while k < num_iterazioni: # Effettuiamo "n" iterazioni
print(f"\n--------RICERCA LOCALE '{destroy_method}': ITERAZIONE {k + 1}--------\n")
destroyed_solution, sensori_scoperti, classe_gateway_tolti = destroy(soluzione_corrente, destroy_method)
# Aggiungo al listino i gateway che ho rimosso con la destroy
for a_gateway in classe_gateway_tolti:
gateways.append(get_gateways_classes()[a_gateway])
gateways = sorted(gateways, key=lambda item: item.costo, reverse=False)
soluzione_tentativo = repair(destroyed_solution, sensori_scoperti, gateways, order_by, pack_by)
if soluzione_tentativo is None:
set_verbosity(quiet=True)
print("\n\n\n-----------------LA SOLUZIONE TROVATA !!!!!NON!!!!! E' AMMISSIBILE-----------------\n\n\n")
print("\n\n\n-----------------COMPUTAZIONE INTERROTTA-----------------\n\n\n")
sys.exit()
costo_soluzione_tentativo = costo_totale_soluzione(soluzione_tentativo)
delta = costo_soluzione_tentativo - costo_soluzione_corrente
print("Pre-accept:")
print(f"Migliore: {round(costo_migliore_soluzione)} | Tentativo: {round(costo_soluzione_tentativo)} | "
f"Corrente: {round(costo_soluzione_corrente)} | "
f"Delta: {round(delta)} | Temperatura: {round(temperatura)}")
# ACCEPT
if delta < 0:
soluzione_corrente = soluzione_tentativo
costo_soluzione_corrente = costo_soluzione_tentativo
print("ACCETTO la soluzione tentativo (costo minore della corrente)\n")
elif accept(delta, temperatura):
soluzione_corrente = soluzione_tentativo
costo_soluzione_corrente = costo_soluzione_tentativo
print("ACCETTO la soluzione tentativo (accetto il peggioramento)\n")
else:
print("NON ACCETTO la soluzione tentativo (costo maggiore della corrente)\n")
print("Post-accept:")
print(f"Migliore: {round(costo_migliore_soluzione)} | Tentativo: {round(costo_soluzione_tentativo)} | "
f"Corrente: {round(costo_soluzione_corrente)}")
if costo_soluzione_corrente < costo_migliore_soluzione:
migliore_soluzione = soluzione_corrente
costo_migliore_soluzione = costo_soluzione_corrente
print(f"Soluzione migliore AGGIORNATA -> Migliore: {round(costo_migliore_soluzione)}\n")
else:
print(f"Soluzione migliore NON AGGIORNATA -> Migliore: {round(costo_migliore_soluzione)}\n")
# Dopo ogni iterazione aggiorniamo la temperatura (La formula fa in modo che la temperatura
# parta da 100 alla prima iterazione e arrivi a 1 all'ultima iterazione, indipendentemente
# dal numero di iterazioni)
temperatura = temperatura * (1 / 100 ** (1.0 / num_iterazioni))
k += 1
return migliore_soluzione, costo_migliore_soluzione
|
import tempfile
import subprocess
import re
FILENAME_RE = re.compile(r'^ - "(.+?)"')
PROGRESS_RE = re.compile(r'^\[.*?(\d{1,3}.\d)% +(.+B/s)')
FILTERED_STRINGS = [b"\x1b[K", b"\r", b"\n"]
def extract_gui():
with tempfile.TemporaryDirectory() as tempdir:
innoextract_cmd = [INNOEXTRACT_BIN, "-e", "-q", "--color=0", "--progress=1",
"-d", tempdir, installer_path]
error_log = open("error_log.txt", "w")
process = subprocess.Popen(innoextract_cmd, bufsize=-1,
stdout=subprocess.PIPE, stderr=error_log)
out_buffer = b""
cur_filename = ""
cur_progress = ""
cur_speed = ""
while True:
char = process.stdout.read(1)
if not char:
break
if char in (b"\n", b"\r"):
line = out_buffer
out_buffer = b""
for filterstring in FILTERED_STRINGS:
line = line.replace(filterstring, b"")
if not line:
continue
line_str = line.decode("utf-8")
#print(line_str)
filename_match = FILENAME_RE.match(line_str)
progress_match = PROGRESS_RE.match(line_str)
if (filename_match):
cur_filename = filename_match.group(1)
if (progress_match):
cur_progress = float(progress_match.group(1))
cur_speed = progress_match.group(2)
print(cur_filename, cur_progress, cur_speed)
else:
out_buffer += char
process.poll()
error_log.close()
if process.returncode != 0:
print("Unpacking failed, check error log at ...")
else:
print("Success!")
|
from client.TemplateManager.TemplateManager import template_manager
from db.model.Bill import Bill
def bill_client_routes(app):
@app.route("/summarization", methods=['GET'])
def get_bills():
return template_manager.get_template('bill.html')
@app.route("/bill", methods=['GET'])
def get_bill_null_id():
return template_manager.get_template('bill_not_found.html')
@app.route("/bill/", methods=['GET'])
def get_bill_null_id_slash():
return template_manager.get_template('bill_not_found.html')
@app.route("/bill/<string:bill_id>", methods=['GET'])
def get_bill(bill_id):
print(bill_id)
return template_manager.get_template('bill.html')
@app.route("/bills", methods=['GET'])
def get_bill_client_search():
return template_manager.get_template('bills.html')
|
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""The minddata pipeline analyser class."""
import csv
import json
import os
import sys
from mindinsight.profiler.analyser.base_analyser import BaseAnalyser
from mindinsight.profiler.common.exceptions.exceptions import \
ProfilerPipelineOpNotExistException
from mindinsight.profiler.common.log import logger
from mindinsight.profiler.common.validator.validate_path import validate_and_normalize_path
class MinddataPipelineAnalyser(BaseAnalyser):
"""
The analyser for analyzing the minddata pipeline operator and queue data.
Args:
profiling_dir (str): The directory where the parsed profiling files are
located.
device_id (str): The device ID.
Raises:
ProfilerPathErrorException: If the profiling dir is invalid.
"""
_col_names = ['op_id', 'op_type', 'num_workers', 'output_queue_size',
'output_queue_average_size', 'output_queue_length',
'output_queue_usage_rate', 'sample_interval', 'parent_id',
'children_id']
_file_name_pipeline = 'minddata_pipeline_raw_{}.csv'
_index_op_id = 0
_index_op_type = 1
_index_num_workers = 2
_index_output_queue_size = 3
_index_output_queue_average_size = 4
_index_output_queue_length = 5
_index_output_queue_usage_rate = 6
_index_sample_interval = 7
_index_parent_id = 8
_index_children_id = 9
def __init__(self, profiling_dir, device_id):
super().__init__(profiling_dir, device_id)
self._none_filter_condition_key = ['threshold', 'is_display_op_detail']
self._none_sort_col_names = ['output_queue_size', 'children_id']
self._op_id_index_map = self._get_op_id_index_map()
def get_op_and_parent_op_info(self, op_id):
"""
Get the operator and parent operator information by `op_id`.
Args:
op_id (int): The minddata pipeline operator ID.
Returns:
dict, the operator and parent operator information.
Raises:
ProfilerPipelineOpNotExistException: If the minddata pipeline
operator does not exist.
"""
index = self._op_id_index_map.get(op_id)
if index is None:
raise ProfilerPipelineOpNotExistException(str(op_id))
op_info = self._data[index]
parent_id = op_info[self._index_parent_id]
parent_index = self._op_id_index_map.get(parent_id)
if parent_index is None:
parent_op = None
queue_info = None
else:
parent_op_info = self._data[parent_index]
parent_op = {
'op_id': parent_op_info[self._index_op_id],
'op_type': parent_op_info[self._index_op_type],
'num_workers': parent_op_info[self._index_num_workers]
}
queue_info = {
'output_queue_size': op_info[self._index_output_queue_size],
'output_queue_average_size':
op_info[self._index_output_queue_average_size],
'output_queue_length': op_info[self._index_output_queue_length],
'output_queue_usage_rate':
op_info[self._index_output_queue_usage_rate],
'sample_interval': op_info[self._index_sample_interval]
}
current_op = {
'op_id': op_info[self._index_op_id],
'op_type': op_info[self._index_op_type],
'num_workers': op_info[self._index_num_workers]
}
return {
'current_op': current_op,
'parent_op': parent_op,
'queue_info': queue_info
}
def _load(self):
"""Load data according to the parsed minddata pipeline file."""
pipeline_file_path = os.path.join(
self._profiling_dir,
self._file_name_pipeline.format(self._device_id)
)
pipeline_file_path = validate_and_normalize_path(
pipeline_file_path, raise_key="Invaild pipeline file path.")
if not os.path.isfile(pipeline_file_path):
logger.warning('The file <%s> does not exist.', pipeline_file_path)
return
with open(pipeline_file_path, 'r') as file:
csv.field_size_limit(sys.maxsize)
csv_reader = csv.reader(file)
_ = next(csv_reader)
for info in csv_reader:
self._data.append(self._convert_field_type(info))
def _filter(self, filter_condition):
"""
Filter the profiling data according to the filter condition.
Args:
filter_condition (dict): The filter condition.
"""
def _inner_filter(item: list):
return self._default_filter(item, filter_condition)
def _inner_map(item: list):
inner_item = item[0:2]
inner_item.extend(item[4:])
return inner_item
threshold = filter_condition.get('threshold')
is_display_op_detail = filter_condition.get(
'is_display_op_detail', False
)
self._set_display_col_name(is_display_op_detail)
filter_result = list(filter(_inner_filter, self._data))
if threshold:
low_threshold = threshold[1]
high_threshold = threshold[0]
filter_result = self._filter_outside_threshold(
filter_result, low_threshold, high_threshold
)
if is_display_op_detail:
self._result = filter_result
else:
self._result = list(map(_inner_map, filter_result))
def _filter_outside_threshold(self, data, low_threshold, high_threshold):
"""
Get the data outside the threshold range.
Args:
data (list[list]): The filtered data.
low_threshold (float): The low threshold.
high_threshold (float): The high threshold.
Returns:
list[list], the data outside the threshold range.
"""
root_node = None
leaf_nodes = []
all_below_low_threshold = True
all_higher_high_threshold = True
result = []
for item in data:
parent_id = item[self._index_parent_id]
if parent_id is None:
root_node = item
continue
# current usage rate compared to the threshold
cur_usage_rate = item[self._index_output_queue_usage_rate]
is_low = False
if cur_usage_rate < low_threshold:
is_low = True
else:
all_below_low_threshold = False
if cur_usage_rate < high_threshold:
all_higher_high_threshold = False
# the child node usage rate compared to the threshold
child_ids = item[self._index_children_id]
if not child_ids:
leaf_nodes.append(item)
continue
child_usage_rates = [
self._get_usage_rate_by_op_id(op_id) for op_id in child_ids
]
is_high = True
for usage_rate in child_usage_rates:
if usage_rate < high_threshold:
is_high = False
break
if is_high and is_low:
result.append(item)
if all_below_low_threshold:
result = leaf_nodes
elif all_higher_high_threshold:
result = [root_node]
return result
def _get_usage_rate_by_op_id(self, op_id):
"""
Gets the usage rate of the queue corresponding to the specified operator.
Args:
op_id (int): The pipeline operator ID.
Returns:
float, the usage rate of the queue corresponding to the specified
operator.
"""
index = self._op_id_index_map.get(op_id)
op_info = self._data[index]
return op_info[self._index_output_queue_usage_rate]
def _set_display_col_name(self, is_display_op_detail):
"""
Set the display column name according to the filter condition.
Args:
is_display_op_detail (bool): Whether to display the detailed operator
information.
"""
if not is_display_op_detail:
self._display_col_names = self._col_names[0:2]
self._display_col_names.extend(self._col_names[4:])
def _convert_field_type(self, row):
"""
Convert the field type of minddata pipeline file to the specific type.
Args:
row (list[str]): One row data from parsed data.
Returns:
list[Union[str, int, float]], the converted data.
"""
return [
int(row[self._index_op_id]),
row[self._index_op_type],
int(row[self._index_num_workers]),
json.loads(row[self._index_output_queue_size])
if row[self._index_output_queue_size] else None,
float(row[self._index_output_queue_average_size])
if row[self._index_output_queue_average_size] else None,
int(row[self._index_output_queue_length])
if row[self._index_output_queue_length] else None,
float(row[self._index_output_queue_usage_rate])
if row[self._index_output_queue_usage_rate] else None,
int(row[self._index_sample_interval]),
int(row[self._index_parent_id])
if row[self._index_parent_id] else None,
json.loads(row[self._index_children_id])
if row[self._index_children_id] else None
]
def _get_op_id_index_map(self):
"""
Get the map of the operator id and index in data.
Returns:
dict, the map of the operator id and index in data.
"""
the_map = {}
for index, op_info in enumerate(self._data):
the_map[op_info[self._index_op_id]] = index
return the_map
|
"""
Tests tleap tools.
"""
import random as random
import shutil
import pytest
from paprika.align import *
from paprika.dummy import *
from paprika.tleap import *
@pytest.fixture
def clean_files(directory=os.path.join(os.path.dirname(__file__), "tmp")):
# This happens before the test function call
if os.path.isdir(directory):
shutil.rmtree(directory)
os.makedirs(directory)
yield
# This happens after the test function call
shutil.rmtree(directory)
@pytest.mark.slow
def test_solvation_simple(clean_files):
""" Test that we can solvate CB6-BUT using default settings. """
waters = np.random.randint(100, 10000)
log.debug("Trying {} waters with default settings...".format(waters))
sys = System()
sys.template_file = os.path.join(
os.path.dirname(__file__), "../data/cb6-but/tleap_solvate.in"
)
sys.output_path = "tmp"
sys.target_waters = waters
sys.output_prefix = "solvate"
sys.build()
grepped_waters = sp.check_output(
["grep -oh 'WAT' ./tmp/solvate.prmtop | wc -w"], shell=True
)
assert int(grepped_waters) == waters
@pytest.mark.parametrize("shape", ["octahedral", "cubic"])
def test_solvation_shapes(shape, clean_files):
""" Test that we can solvate CB6-BUT with a truncated octahedron. """
waters = np.random.randint(1000, 10000)
log.debug("Trying {} waters in a truncated octahedron...".format(waters))
sys = System()
sys.template_file = os.path.join(
os.path.dirname(__file__), "../data/cb6-but/tleap_solvate.in"
)
sys.output_path = "tmp"
sys.loadpdb_file = os.path.join(
os.path.dirname(__file__), "../data/cb6-but/cb6-but.pdb"
)
sys.target_waters = waters
sys.output_prefix = "solvate"
sys.pbc_type = shape
sys.build()
grepped_waters = sp.check_output(
["grep -oh 'WAT' ./tmp/solvate.prmtop | wc -w"], shell=True
)
assert int(grepped_waters) == waters
@pytest.mark.slow
def test_solvation_spatial_size(clean_files):
""" Test that we can solvate CB6-BUT with an buffer size in Angstroms. """
random_int = np.random.randint(10, 20)
random_size = random_int * np.random.random_sample(1) + random_int
log.debug("Trying buffer size of {} A...".format(random_size[0]))
sys = System()
sys.template_file = os.path.join(
os.path.dirname(__file__), "../data/cb6-but/tleap_solvate.in"
)
sys.output_path = "tmp"
sys.loadpdb_file = os.path.join(
os.path.dirname(__file__), "../data/cb6-but/cb6-but.pdb"
)
sys.buffer_value = float(random_size[0])
sys.output_prefix = "solvate"
sys.pbc_type = "cubic"
sys.build()
grepped_waters = sp.check_output(
["grep -oh 'WAT' ./tmp/solvate.prmtop | wc -w"], shell=True
)
assert int(grepped_waters) == sys.target_waters
@pytest.mark.slow
def test_solvation_potassium_control(clean_files):
""" Test there is no potassium by default. A negative control. """
waters = np.random.randint(1000, 10000)
log.debug("Trying {} waters with potassium...".format(waters))
sys = System()
sys.template_file = os.path.join(
os.path.dirname(__file__), "../data/cb6-but/tleap_solvate.in"
)
sys.output_path = "tmp"
sys.loadpdb_file = os.path.join(
os.path.dirname(__file__), "../data/cb6-but/cb6-but.pdb"
)
sys.target_waters = waters
sys.output_prefix = "solvate"
sys.counter_cation = "K+"
sys.build()
potassium = sp.check_output(
["grep -oh 'K+' ./tmp/solvate.prmtop | wc -w"], shell=True
)
assert int(potassium) == 0
@pytest.mark.slow
def test_solvation_with_additional_ions(clean_files):
""" Test that we can solvate CB6-BUT with additional ions. """
waters = np.random.randint(1000, 10000)
cations = ["LI", "Na+", "K+", "RB", "CS"]
anions = ["F", "Cl-", "BR", "IOD"]
n_cations = np.random.randint(1, 10)
n_anions = np.random.randint(1, 10)
random_cation = random.choice(cations)
random_anion = random.choice(anions)
log.debug("Trying {} waters with additional ions...".format(waters))
sys = System()
sys.template_file = os.path.join(
os.path.dirname(__file__), "../data/cb6-but/tleap_solvate.in"
)
sys.output_path = "tmp"
sys.loadpdb_file = os.path.join(
os.path.dirname(__file__), "../data/cb6-but/cb6-but.pdb"
)
sys.target_waters = waters
sys.output_prefix = "solvate"
sys.neutralize = False
sys.add_ions = [random_cation, n_cations, random_anion, n_anions]
sys.build()
# These should come in the RESIDUE_LABEL region of the prmtop and be before all the water.
cation_number = sp.check_output(
[
"grep -A 99 RESIDUE_LABEL ./tmp/solvate.prmtop | "
+ "grep -oh '{} ' | wc -w".format(random_cation)
],
shell=True,
)
anion_number = sp.check_output(
[
"grep -A 99 RESIDUE_LABEL ./tmp/solvate.prmtop | "
+ "grep -oh '{} ' | wc -w".format(random_anion)
],
shell=True,
)
log.debug("Expecting...")
log.debug("cation = {}\tn_cations={}".format(random_cation, n_cations))
log.debug("anion = {}\t n_anions={}".format(random_anion, n_anions))
log.debug("Found...")
log.debug(" n_cations={}".format(cation_number))
log.debug(" n_anions={}".format(anion_number))
assert int(cation_number) == n_cations and int(anion_number) == n_anions
def test_solvation_by_M_and_m(clean_files):
""" Test that we can solvate CB6-BUT through molarity and molality. """
log.debug("Trying 10 A buffer with 150 mM NaCl...")
sys = System()
sys.template_file = os.path.join(
os.path.dirname(__file__), "../data/cb6-but/tleap_solvate.in"
)
sys.output_path = "tmp"
sys.loadpdb_file = os.path.join(
os.path.dirname(__file__), "../data/cb6-but/cb6-but.pdb"
)
sys.buffer_value = 10.0
sys.output_prefix = "solvate"
sys.neutralize = False
sys.pbc_type = "rectangular"
sys.add_ions = ["NA", "0.150M", "CL", "0.150M", "K", "0.100m", "BR", "0.100m"]
sys.build()
# Molarity Check
obs_num_na = sp.check_output(
["grep -A 99 RESIDUE_LABEL ./tmp/solvate.prmtop | " + "grep -oh 'NA ' | wc -w"],
shell=True,
)
obs_num_cl = sp.check_output(
["grep -A 99 RESIDUE_LABEL ./tmp/solvate.prmtop | " + "grep -oh 'CL ' | wc -w"],
shell=True,
)
volume = sys.get_volume()
volume_in_liters = volume * ANGSTROM_CUBED_TO_LITERS
calc_num_na = np.ceil((6.022 * 10 ** 23) * (0.150) * volume_in_liters)
calc_num_cl = np.ceil((6.022 * 10 ** 23) * (0.150) * volume_in_liters)
assert int(obs_num_na) == int(calc_num_na)
assert int(obs_num_cl) == int(calc_num_cl)
# Molality Check
obs_num_k = sp.check_output(
["grep -A 99 RESIDUE_LABEL ./tmp/solvate.prmtop | " + "grep -oh 'K ' | wc -w"],
shell=True,
)
obs_num_br = sp.check_output(
["grep -A 99 RESIDUE_LABEL ./tmp/solvate.prmtop | " + "grep -oh 'BR ' | wc -w"],
shell=True,
)
calc_num_waters = sys.count_residues()["WAT"]
calc_num_k = np.ceil(0.100 * calc_num_waters * 0.018)
calc_num_br = np.ceil(0.100 * calc_num_waters * 0.018)
assert int(obs_num_k) == int(calc_num_k)
assert int(obs_num_br) == int(calc_num_br)
@pytest.mark.slow
def test_alignment_workflow(clean_files):
""" Test that we can solvate CB6-BUT after alignment. """
cb6 = pmd.load_file(
os.path.join(
os.path.dirname(__file__), "../data/cb6-but/cb6-but-notcentered.pdb"
)
)
zalign(cb6, ":CB6", ":BUT", save=True, filename="./tmp/tmp.pdb")
waters = np.random.randint(1000, 10000)
sys = System()
sys.template_file = os.path.join(
os.path.dirname(__file__), "../data/cb6-but/tleap_solvate.in"
)
sys.output_path = "tmp"
sys.loadpdb_file = "tmp.pdb"
sys.target_waters = waters
sys.output_prefix = "solvate"
sys.build()
log.debug("Trying {} waters after alignment...".format(waters))
grepped_waters = sp.check_output(
["grep -oh 'WAT' ./tmp/solvate.prmtop | wc -w"], shell=True
)
assert int(grepped_waters) == waters
def test_add_dummy(clean_files):
""" Test that dummy atoms get added correctly """
temporary_directory = os.path.join(os.path.dirname(__file__), "tmp")
host_guest = pmd.load_file(
os.path.join(
os.path.dirname(__file__), "../data/cb6-but/cb6-but-notcentered.pdb"
),
structure=True,
)
host_guest = zalign(host_guest, ":BUT@C", ":BUT@C3", save=False)
host_guest = add_dummy(host_guest, residue_name="DM1", z=-11.000, y=2.000, x=-1.500)
host_guest.write_pdb(
os.path.join(temporary_directory, "cb6-but-dum.pdb"), renumber=False
)
with open(os.path.join(temporary_directory, "cb6-but-dum.pdb"), "r") as f:
lines = f.readlines()
test_line1 = lines[123].rstrip()
test_line2 = lines[124].rstrip()
ref_line1 = "TER 123 BUT 2"
ref_line2 = (
"HETATM 123 DUM DM1 3 -1.500 2.000 -11.000 0.00 0.00 PB"
)
assert ref_line1 == test_line1
assert ref_line2 == test_line2
write_dummy_frcmod(path=temporary_directory)
write_dummy_mol2(path=temporary_directory, filename="dm1.mol2", residue_name="DM1")
sys = System()
cb6_frcmod = os.path.abspath(
os.path.join(os.path.dirname(__file__), "../data/cb6-but/cb6.frcmod")
)
cb6_mol2 = os.path.abspath(
os.path.join(os.path.dirname(__file__), "../data/cb6-but/cb6.mol2")
)
but_frcmod = os.path.abspath(
os.path.join(os.path.dirname(__file__), "../data/cb6-but/but.frcmod")
)
but_mol2 = os.path.abspath(
os.path.join(os.path.dirname(__file__), "../data/cb6-but/but.mol2")
)
sys.template_lines = [
"source leaprc.gaff",
f"loadamberparams {cb6_frcmod}",
f"CB6 = loadmol2 {cb6_mol2}",
f"loadamberparams {but_frcmod}",
f"BUT = loadmol2 {but_mol2}",
"loadamberparams dummy.frcmod",
"DM1 = loadmol2 dm1.mol2",
"model = loadpdb cb6-but-dum.pdb",
]
sys.output_path = temporary_directory
sys.output_prefix = "cb6-but-dum"
sys.pbc_type = None
sys.neutralize = False
sys.build()
with open(
os.path.join(os.path.dirname(__file__), "../data/cb6-but/REF_cb6-but-dum.rst7"),
"r",
) as f:
contents = f.read()
reference = [float(i) for i in contents.split()[2:]]
with open(os.path.join(temporary_directory, "cb6-but-dum.rst7"), "r") as f:
contents = f.read()
new = [float(i) for i in contents.split()[2:]]
assert np.allclose(reference, new)
def test_hydrogen_mass_repartitioning(clean_files):
""" Test that hydrogen mass is repartitioned. """
temporary_directory = os.path.join(os.path.dirname(__file__), "tmp")
sys = System()
but_frcmod = os.path.abspath(
os.path.join(os.path.dirname(__file__), "../data/cb6-but/but.frcmod")
)
but_mol2 = os.path.abspath(
os.path.join(os.path.dirname(__file__), "../data/cb6-but/but.mol2")
)
sys.template_lines = [
"source leaprc.gaff",
f"loadamberparams {but_frcmod}",
f"BUT = loadmol2 {but_mol2}",
f"model = loadmol2 {but_mol2}",
]
sys.output_path = temporary_directory
sys.output_prefix = "but"
sys.pbc_type = None
sys.neutralize = False
sys.build()
but = pmd.load_file(os.path.join(temporary_directory, sys.output_prefix + ".prmtop"))
assert np.allclose(but["@H="].atoms[0].mass, 1.008)
sys.repartition_hydrogen_mass()
but = pmd.load_file(os.path.join(temporary_directory, sys.output_prefix + ".prmtop"))
assert np.allclose(but["@H="].atoms[0].mass, 3.024)
|
import pytest
from random import seed
from data_reader.dataset import EmailDataset
from adlib.learners.simple_learner import SimpleLearner
from sklearn import svm
@pytest.fixture
def data():
dataset = EmailDataset(path='./data_reader/data/test/100_instance_debug.csv', raw=False)
# set a seed so we get the same output every time
seed(1)
training_data, testing_data = dataset.split({'train': 60, 'test': 40})
return {'training_data': training_data, 'testing_data': testing_data}
@pytest.fixture
def training_data(data):
return data['training_data']
@pytest.fixture
def testing_data(data):
return data['testing_data']
@pytest.fixture
def simple_learner(data):
learning_model = svm.SVC(probability=True, kernel='linear')
learner = SimpleLearner(learning_model, data['training_data'])
return learner
@pytest.fixture
def empty_learner():
return SimpleLearner()
def bad_dataset_params1():
with pytest.raises(AttributeError) as error:
dataset = EmailDataset(raw=False)
def bad_dataset_params2():
with pytest.raises(AttributeError) as error:
dataset = EmailDataset(raw=True)
def bad_dataset_params3():
with pytest.raises(AttributeError) as error:
dataset = EmailDataset(path='notarealpath.pkl', features=[1, 2, 3],
labels=[1])
# TODO: Also test serializing then loading
def load_serialized():
feat_val = data['training'][0].toarray()[0][0]
assert feat_val == 1.0 or feat_val == 0.0
label_val = data['training'][1][0]
assert label_val == 1.0 or label_val == -1.0
def test_predict_returns_binary_label(simple_learner, testing_data):
simple_learner.train()
result = simple_learner.predict(testing_data[0])
assert result in [SimpleLearner.positive_classification, SimpleLearner.negative_classification]
##assert result == sample_.labels
|
MONGODB_SETTINGS = {'DB': 'todo_db'}
|
#!/usr/bin/python3
from __future__ import print_function
import datetime
from scrape_bioarxiv import *
if __name__ == "__main__":
start_date = datetime.date(2016, 1, 11)
scrape_articles(start_date=start_date)
|
# -*- coding:utf-8 -*-
import os
import sys
path = os.path.dirname(__file__) + os.sep + '..' + os.sep
sys.path.append(path)
from tools.util import *
from tools.mydb import *
def get_daily():
list_sql = '''
select * from cn_stocks_info;
'''
start = datetime.now()
stk_info = mydb.read_from_sql(list_sql)
stk_codes = stk_info.code.copy()
stk_info = stk_info.set_index(['code'])
table = 'cn_stocks_d'
mydb.truncate_table(table)
columns = ['code', 'date', 'name', 'sector', 'sp_sector', 'industry', 'total_cap',
'is_ss', 'is_sz', 'is_hs', 'weight',
'open', 'high', 'low', 'close', 'pre_close', 'is_gap',
'vol', 'ma_vol', 'vol_rate',
's_ma', 'm_ma', 'l_ma', 's_ema', 'm_ema', 'l_ema',
'ecs', 'esm', 'pesm', 'is_esm_over',
'eml', 'peml', 'is_eml_over', 'ebais',
'cs', 'pcs', 'is_cs_over',
'sm', 'psm', 'is_sm_over',
'ml', 'pml', 'is_ml_over', 'bais',
's_close', 's_pre_close', 'is_s_up',
'm_close', 'm_pre_close', 'is_m_up',
'l_close', 'l_pre_close', 'is_l_up'
]
# 获取日K线数据
batch = 40
num = 0
for n in range(0, len(stk_codes), batch):
num += 1
print('Processing 第 {} 批 【{}~{}/{}】...'.format(num, n, n+batch, len(stk_codes)))
sub_codes = stk_codes[n: n+batch]
symbol_list = ' '.join(sub_codes)
data = yf.download(symbol_list, start=date.get_9month_ago(), end=date.get_end_day(),
group_by="ticker", threads=True, auto_adjust=True,
interval='1d')
for i in sub_codes:
if i in data.columns:
stock = stk_info.loc[i]
df = data[i]
if df is None:
continue
df = df.reset_index()
df.rename(columns={'Date': 'date', 'Open': 'open', 'High': 'high', 'Low': 'low',
'Close': 'close', 'Volume': 'vol'},
inplace=True)
df = df[~np.isnan(df['close'])]
df['code'] = i
df = analysis.stock_analysis(df, 20, 60, 120)
if df is None:
continue
df['name'] = stock.get('name')
df['sector'] = stock.get('sector')
df['sp_sector'] = stock.get('sp_sector')
df['industry'] = stock.get('industry')
df['total_cap'] = stock.get('total_cap')
df['is_ss'] = stock.get('is_ss')
df['is_sz'] = stock.get('is_sz')
df['is_hs'] = stock.get('is_hs')
df['weight'] = stock.get('weight')
df = df[columns]
mydb.upsert_table(table, columns, df)
end = datetime.now()
print('Download Data use {}'.format(end - start))
# US 股市场宽度
df = mydb.read_from_sql('SELECT * FROM cn_stocks_sector_d ORDER BY date desc;')
mb_name = path + './data/Market-Breadth-CN-' + str(datetime.today().date()) + '.jpg'
analysis.market_breadth(df, mb_name, 'cn')
if __name__ == '__main__':
get_daily()
|
#!/usr/bin/env cmsRun
import FWCore.ParameterSet.Config as cms
process = cms.Process("Geometry")
readGeometryFromDB = False
# N.B. for the time being we load the geometry from local
# XML, whle in future we will have to use the DB. This is
# only a temporary hack, since the material description has
# been updated in release via XML and the DB is behind.
if not readGeometryFromDB:
process.load('Configuration.Geometry.GeometryExtended2026D44_cff')
else:
# GlobalTag and geometry via GT
process.load('Configuration.StandardSequences.FrontierConditions_GlobalTag_cff')
from Configuration.AlCa.GlobalTag import GlobalTag
process.GlobalTag = GlobalTag(process.GlobalTag, 'auto:phase2_realistic', '')
process.load('FWCore.MessageService.MessageLogger_cfi')
process.load('Configuration.EventContent.EventContent_cff')
## MC Related stuff
process.load('Configuration.StandardSequences.Generator_cff')
process.load('SimGeneral.HepPDTESSource.pythiapdt_cfi')
### Loading 10GeV neutrino gun generator
process.load("SimTracker.TrackerMaterialAnalysis.single10GeVNeutrino_cfi")
### Load vertex generator w/o smearing
from Configuration.StandardSequences.VtxSmeared import VtxSmeared
process.load(VtxSmeared['NoSmear'])
# detector simulation (Geant4-based) with tracking material accounting
process.load("SimTracker.TrackerMaterialAnalysis.trackingMaterialProducerHFNose_cff")
#For some reason now neutrino are no longer tracked, so we need to force it.
process.trackingMaterialProducer.StackingAction.TrackNeutrino = True
process.maxEvents = cms.untracked.PSet(
input = cms.untracked.int32(200000)
)
# Input source
process.source = cms.Source("EmptySource")
process.out = cms.OutputModule("PoolOutputModule",
outputCommands = cms.untracked.vstring(
'drop *', # drop all objects
'keep MaterialAccountingTracks_trackingMaterialProducer_*_*'), # but the material accounting informations
fileName = cms.untracked.string('file:material.root')
)
process.path = cms.Path(process.generator
* process.VtxSmeared
* process.generatorSmeared
* process.trackingMaterialProducer)
process.outpath = cms.EndPath(process.out)
def customizeMessageLogger(process):
### Easy customisation of MessageLogger ###
# 1. Extend MessageLogger to monitor all modules: the * means any
# label for all defined python modules
process.MessageLogger.debugModules.extend(['*'])
# 2. Define destination and its default logging properties
destination = 'debugTrackingMaterialProducer'
how_to_debug = cms.untracked.PSet(threshold = cms.untracked.string("DEBUG"),
DEBUG = cms.untracked.PSet(limit = cms.untracked.int32(0)),
default = cms.untracked.PSet(limit = cms.untracked.int32(0)),
)
# 3. Attach destination and its logging properties to the main process
process.MessageLogger.destinations.extend([destination])
process.MessageLogger._Parameterizable__addParameter(destination, how_to_debug)
# 4. Define and extend the categories we would like to monitor
log_debug_categories = ['TrackingMaterialProducer']
process.MessageLogger.categories.extend(log_debug_categories)
# 5. Extend the configuration of the configured destination so that it
# will trace all messages coming from the list of specified
# categories.
unlimit_debug = cms.untracked.PSet(limit = cms.untracked.int32(-1))
for val in log_debug_categories:
process.MessageLogger.debugTrackingMaterialProducer._Parameterizable__addParameter(val, unlimit_debug)
return process
#process = customizeMessageLogger(process)
|
# coding=utf-8
"""
THE SERVICE, STRUCTURE.
"""
import json
from dicttoxml import dicttoxml
from src import *
from src.custom.handlers import CustomJSONEncoder
from src.estimator import *
class CovidService(object):
"""
SERVICE CLASS FOR HANDLING COVID REQUESTS
"""
@classmethod
def hash_data(cls, data, **kwargs):
"""
:param data:
:param kwargs:
:return:
"""
data.update(cache_type=kwargs.get("cache_type", "json"))
return str(hash(json.dumps(data)))
@classmethod
def check_redis(cls, key, **kwargs):
"""
:param key:
:param kwargs:
:return:
"""
res = redis.hget("covid_estimate", key)
res = json.loads(res) if res else None
return res
@classmethod
def set_redis(cls, key, value, **kwargs):
"""
:param key:
:param value:
:param kwargs:
:return:
"""
data = json.dumps(value, cls=CustomJSONEncoder)
redis.hset(name="covid_estimate", key=key, value=data)
return data
@classmethod
def do_estimate(cls, **kwargs):
"""
:param kwargs:
:return:
"""
path_type = kwargs.pop("path_type", None)
cache_type = 'str' if path_type and 'xml' in path_type else 'json'
data_hash = cls.hash_data(kwargs, cache_type=cache_type)
response = cls.check_redis(key=data_hash)
kwargs.pop("cache_type", None)
if not response:
response = estimator(kwargs)
response = dict(response=dicttoxml(response, attr_type=False)) if cache_type == 'str' else response
cls.set_redis(key=data_hash, value=response)
return response
@classmethod
def load_logs(cls, **kwargs):
"""
:param kwargs:
:return:
"""
try:
f = open(settings.LOGS_PATH)
return dict(response=bytes(f.read().encode()))
except Exception as e:
print(e)
return dict(response="No records yet")
|
from .._tier0 import create_matrix_from_pointlists
from .._tier0 import execute
from .._tier0 import plugin_function
from .._tier0 import Image
@plugin_function(output_creator=create_matrix_from_pointlists)
def generate_angle_matrix(coordinate_list1 :Image, coordinate_list2 :Image, angle_matrix_destination :Image = None) -> Image:
"""Computes the angle in radians between all point coordinates given in two point lists.
Takes two images containing pointlists (dimensionality n * d, n: number of
points and d: dimensionality) and builds up a matrix containing the
angles between these points.
Convention: Values range from -90 to 90 degrees (-0.5 to 0.5 pi radians)
* -90 degreess (-0.5 pi radians): Top
* 0 defrees (0 radians): Right
* 90 degrees (0.5 pi radians): Bottom
Convention: Given two point lists with dimensionality n * d and m * d, the distance
matrix will be of size(n + 1) * (m + 1). The first row and column
contain zeros. They represent the distance of the objects to a
theoretical background object. In that way, distance matrices are of
the same size as touch matrices (see generateTouchMatrix). Thus, one
can threshold a distance matrix to generate a touch matrix out of it
for drawing meshes.
Implemented for 2D only at the moment.
Parameters
----------
coordinate_list1 : Image
coordinate_list2 : Image
angle_matrix_destination : Image
Returns
-------
angle_matrix_destination
Examples
--------
>>> import pyclesperanto_prototype as cle
>>> cle.generate_distance_matrix(coordinate_list1, coordinate_list2, angle_matrix_destination)
References
----------
"""
from .._tier1 import set
set(angle_matrix_destination, 0)
if coordinate_list1.shape[0] > 2:
raise ValueError('Only 2D pointlists supported!')
parameters = {
"dst_matrix": angle_matrix_destination,
"src_point_list1": coordinate_list1,
"src_point_list2": coordinate_list2
}
execute(__file__, '../clij-opencl-kernels/kernels/generate_angle_matrix_2d_x.cl', 'generate_angle_matrix', angle_matrix_destination.shape, parameters)
return angle_matrix_destination
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Setup script for GDAL Python bindings.
# Inspired by psycopg2 setup.py file
# http://www.initd.org/tracker/psycopg/browser/psycopg2/trunk/setup.py
# Howard Butler hobu.inc@gmail.com
gdal_version = '3.1.0'
import sys
import os
from glob import glob
from distutils.sysconfig import get_config_vars
from distutils.command.build_ext import build_ext
from distutils.ccompiler import get_default_compiler
from distutils.errors import CompileError
# Strip -Wstrict-prototypes from compiler options, if present. This is
# not required when compiling a C++ extension.
(opt,) = get_config_vars('OPT')
if opt is not None:
os.environ['OPT'] = " ".join(f for f in opt.split() if f != '-Wstrict-prototypes')
# If CXX is defined in the environment, it will be used to link the .so
# but distutils will be confused if it is made of several words like 'ccache g++'
# and it will try to use only the first word.
# See https://lists.osgeo.org/pipermail/gdal-dev/2016-July/044686.html
# Note: in general when doing "make", CXX will not be defined, unless it is defined as
# an environment variable, but in that case it is the value of GDALmake.opt that
# will be set, not the one from the environment that started "make" !
# If no CXX environment variable is defined, then the value of the CXX variable
# in GDALmake.opt will not be set as an environment variable
if 'CXX' in os.environ and os.environ['CXX'].strip().find(' ') >= 0:
if os.environ['CXX'].strip().startswith('ccache ') and os.environ['CXX'].strip()[len('ccache '):].find(' ') < 0:
os.environ['CXX'] = os.environ['CXX'].strip()[len('ccache '):]
else:
print('WARNING: "CXX=%s" was defined in the environment and contains more than one word. Unsetting it since that is incompatible of distutils' % os.environ['CXX'])
del os.environ['CXX']
if 'CC' in os.environ and os.environ['CC'].strip().find(' ') >= 0:
if os.environ['CC'].strip().startswith('ccache ') and os.environ['CC'].strip()[len('ccache '):].find(' ') < 0:
os.environ['CC'] = os.environ['CC'].strip()[len('ccache '):]
else:
print('WARNING: "CC=%s" was defined in the environment and contains more than one word. Unsetting it since that is incompatible of distutils' % os.environ['CC'])
del os.environ['CC']
# ---------------------------------------------------------------------------
# Switches
# ---------------------------------------------------------------------------
HAVE_NUMPY = False
HAVE_SETUPTOOLS = False
BUILD_FOR_CHEESESHOP = False
GNM_ENABLED = True
# ---------------------------------------------------------------------------
# Default build options
# (may be overridden with setup.cfg or command line switches).
# ---------------------------------------------------------------------------
include_dirs = ['../../port', '../../gcore', '../../alg', '../../ogr/', '../../ogr/ogrsf_frmts', '../../gnm', '../../apps']
library_dirs = ['../../.libs', '../../']
libraries = ['gdal']
# ---------------------------------------------------------------------------
# Helper Functions
# ---------------------------------------------------------------------------
# Function to find numpy's include directory
def get_numpy_include():
if HAVE_NUMPY:
return numpy.get_include()
return '.'
# ---------------------------------------------------------------------------
# Imports
# ---------------------------------------------------------------------------
try:
import numpy
HAVE_NUMPY = True
# check version
numpy_major = numpy.__version__.split('.')[0]
if int(numpy_major) < 1:
print("numpy version must be > 1.0.0")
HAVE_NUMPY = False
else:
# print ('numpy include', get_numpy_include())
if get_numpy_include() == '.':
print("WARNING: numpy headers were not found! Array support will not be enabled")
HAVE_NUMPY = False
except ImportError:
print('WARNING: numpy not available! Array support will not be enabled')
pass
fixer_names = [
'lib2to3.fixes.fix_import',
'lib2to3.fixes.fix_next',
'lib2to3.fixes.fix_renames',
'lib2to3.fixes.fix_unicode',
'lib2to3.fixes.fix_ws_comma',
'lib2to3.fixes.fix_xrange',
]
extra = {}
try:
from setuptools import setup
from setuptools import Extension
HAVE_SETUPTOOLS = True
except ImportError:
from distutils.core import setup, Extension
try:
from distutils.command.build_py import build_py_2to3 as build_py
from distutils.command.build_scripts import build_scripts_2to3 as build_scripts
except ImportError:
from distutils.command.build_py import build_py
from distutils.command.build_scripts import build_scripts
else:
build_py.fixer_names = fixer_names
build_scripts.fixer_names = fixer_names
else:
if sys.version_info >= (3,):
from lib2to3.refactor import get_fixers_from_package
all_fixers = set(get_fixers_from_package('lib2to3.fixes'))
exclude_fixers = sorted(all_fixers.difference(fixer_names))
extra['use_2to3'] = True
extra['use_2to3_fixers'] = []
extra['use_2to3_exclude_fixers'] = exclude_fixers
class gdal_config_error(Exception):
pass
def fetch_config(option, gdal_config='gdal-config'):
command = gdal_config + " --%s" % option
try:
import subprocess
command, args = command.split()[0], command.split()[1]
from sys import version_info
if version_info >= (3, 0, 0):
try:
p = subprocess.Popen([command, args], stdout=subprocess.PIPE)
except OSError:
e = sys.exc_info()[1]
raise gdal_config_error(e)
r = p.stdout.readline().decode('ascii').strip()
else:
exec("""try:
p = subprocess.Popen([command, args], stdout=subprocess.PIPE)
except OSError, e:
raise gdal_config_error, e""")
r = p.stdout.readline().strip()
p.stdout.close()
p.wait()
except ImportError:
import popen2
p = popen2.popen3(command)
r = p[0].readline().strip()
if not r:
raise Warning(p[2].readline())
return r
def supports_cxx11(compiler, compiler_flag=None):
ret = False
with open('gdal_python_cxx11_test.cpp', 'wt') as f:
f.write("""
#if __cplusplus < 201103L
#error "C++11 required"
#endif
int main () { return 0; }""")
f.close()
extra_postargs = None
if compiler_flag:
extra_postargs = [compiler_flag]
if os.name == 'posix':
# Redirect stderr to /dev/null to hide any error messages
# from the compiler.
devnull = open(os.devnull, 'w')
oldstderr = os.dup(sys.stderr.fileno())
os.dup2(devnull.fileno(), sys.stderr.fileno())
try:
compiler.compile([f.name], extra_postargs=extra_postargs)
ret = True
except CompileError:
pass
os.dup2(oldstderr, sys.stderr.fileno())
devnull.close()
else:
try:
compiler.compile([f.name], extra_postargs=extra_postargs)
ret = True
except CompileError:
pass
os.unlink('gdal_python_cxx11_test.cpp')
if os.path.exists('gdal_python_cxx11_test.o'):
os.unlink('gdal_python_cxx11_test.o')
return ret
###Based on: https://stackoverflow.com/questions/28641408/how-to-tell-which-compiler-will-be-invoked-for-a-python-c-extension-in-setuptool
def has_flag(compiler, flagname):
import tempfile
from distutils.errors import CompileError
with tempfile.NamedTemporaryFile('w', suffix='.cpp') as f:
f.write('int main (int argc, char **argv) { return 0; }')
try:
compiler.compile([f.name], extra_postargs=[flagname])
except CompileError:
return False
return True
class gdal_ext(build_ext):
GDAL_CONFIG = 'gdal-config'
user_options = build_ext.user_options[:]
user_options.extend([
('gdal-config=', None,
"The name of the gdal-config binary and/or a full path to it"),
])
def initialize_options(self):
build_ext.initialize_options(self)
self.numpy_include_dir = get_numpy_include()
self.gdaldir = None
self.gdal_config = self.GDAL_CONFIG
self.extra_cflags = []
self.parallel = True # Python 3.5 only
def get_compiler(self):
return self.compiler or get_default_compiler()
def get_gdal_config(self, option):
try:
return fetch_config(option, gdal_config=self.gdal_config)
except gdal_config_error:
# If an error is thrown, it is possibly because
# the gdal-config location given in setup.cfg is
# incorrect, or possibly the default -- ../../apps/gdal-config
# We'll try to use the gdal-config that might be on the path.
try:
return fetch_config(option)
except gdal_config_error as e:
msg = 'Could not find gdal-config. Make sure you have installed the GDAL native library and development headers.'
import sys
import traceback
traceback_string = ''.join(traceback.format_exception(*sys.exc_info()))
raise gdal_config_error(traceback_string + '\n' + msg)
def build_extensions(self):
# Add a -std=c++11 or similar flag if needed
ct = self.compiler.compiler_type
if ct == 'unix' and not supports_cxx11(self.compiler):
cxx11_flag = None
if supports_cxx11(self.compiler, '-std=c++11'):
cxx11_flag = '-std=c++11'
if cxx11_flag:
for ext in self.extensions:
# gdalconst builds as a .c file
if ext.name != 'osgeo._gdalconst':
ext.extra_compile_args += [cxx11_flag]
# Adding arch flags here if OS X and compiler is clang
if sys.platform == 'darwin' and [int(x) for x in os.uname()[2].split('.')] >= [11, 0, 0]:
# since MacOS X 10.9, clang no longer accepts -mno-fused-madd
# extra_compile_args.append('-Qunused-arguments')
clang_flag = '-Wno-error=unused-command-line-argument-hard-error-in-future'
if has_flag(self.compiler, clang_flag):
ext.extra_compile_args += [clang_flag]
else:
clang_flag = '-Wno-error=unused-command-line-argument'
if has_flag(self.compiler, clang_flag):
ext.extra_compile_args += [clang_flag]
build_ext.build_extensions(self)
def finalize_options(self):
global include_dirs, library_dirs
if self.include_dirs is None:
self.include_dirs = include_dirs
# Needed on recent MacOSX
elif isinstance(self.include_dirs, str) and sys.platform == 'darwin':
self.include_dirs += ':' + ':'.join(include_dirs)
if self.library_dirs is None:
self.library_dirs = library_dirs
# Needed on recent MacOSX
elif isinstance(self.library_dirs, str) and sys.platform == 'darwin':
self.library_dirs += ':' + ':'.join(library_dirs)
if self.libraries is None:
if self.get_compiler() == 'msvc':
libraries.remove('gdal')
libraries.append('gdal_i')
self.libraries = libraries
build_ext.finalize_options(self)
self.include_dirs.append(self.numpy_include_dir)
if self.get_compiler() == 'msvc':
return True
self.gdaldir = self.get_gdal_config('prefix')
self.library_dirs.append(os.path.join(self.gdaldir, 'lib'))
self.include_dirs.append(os.path.join(self.gdaldir, 'include'))
cflags = self.get_gdal_config('cflags')
if cflags:
self.extra_cflags = cflags.split()
def build_extension(self, ext):
# We override this instead of setting extra_compile_args directly on
# the Extension() instantiations below because we want to use the same
# logic to resolve the location of gdal-config throughout.
ext.extra_compile_args.extend(self.extra_cflags)
return build_ext.build_extension(self, ext)
# This is only needed with Python 2.
if sys.version_info < (3,):
try:
import multiprocessing
from concurrent.futures import ThreadPoolExecutor as Pool
num_jobs = multiprocessing.cpu_count()
def parallel_build_extensions(self):
self.check_extensions_list(self.extensions)
with Pool(num_jobs) as pool:
# Note: map() returns an iterator that needs to be consumed.
list(pool.map(self.build_extension, self.extensions))
build_ext.build_extensions = parallel_build_extensions
except:
pass
extra_link_args = []
extra_compile_args = []
gdal_module = Extension('osgeo._gdal',
sources=['extensions/gdal_wrap.cpp'],
extra_compile_args=extra_compile_args,
extra_link_args=extra_link_args)
gdalconst_module = Extension('osgeo._gdalconst',
sources=['extensions/gdalconst_wrap.c'],
extra_compile_args=extra_compile_args,
extra_link_args=extra_link_args)
osr_module = Extension('osgeo._osr',
sources=['extensions/osr_wrap.cpp'],
extra_compile_args=extra_compile_args,
extra_link_args=extra_link_args)
ogr_module = Extension('osgeo._ogr',
sources=['extensions/ogr_wrap.cpp'],
extra_compile_args=extra_compile_args,
extra_link_args=extra_link_args)
array_module = Extension('osgeo._gdal_array',
sources=['extensions/gdal_array_wrap.cpp'],
extra_compile_args=extra_compile_args,
extra_link_args=extra_link_args)
gnm_module = Extension('osgeo._gnm',
sources=['extensions/gnm_wrap.cpp'],
extra_compile_args=extra_compile_args,
extra_link_args=extra_link_args)
ext_modules = [gdal_module,
gdalconst_module,
osr_module,
ogr_module]
if os.path.exists('setup_vars.ini'):
with open('setup_vars.ini') as f:
lines = f.readlines()
if 'GNM_ENABLED=no' in lines or 'GNM_ENABLED=no\n' in lines:
GNM_ENABLED = False
if GNM_ENABLED:
ext_modules.append(gnm_module)
if HAVE_NUMPY:
ext_modules.append(array_module)
packages = ["osgeo", "osgeo.utils"]
readme = str(open('README.rst', 'rb').read())
name = 'GDAL'
version = gdal_version
author = "Frank Warmerdam"
author_email = "warmerdam@pobox.com"
maintainer = "Howard Butler"
maintainer_email = "hobu.inc@gmail.com"
description = "GDAL: Geospatial Data Abstraction Library"
license_type = "MIT"
url = "http://www.gdal.org"
classifiers = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Programming Language :: C',
'Programming Language :: C++',
'Topic :: Scientific/Engineering :: GIS',
'Topic :: Scientific/Engineering :: Information Analysis',
]
if BUILD_FOR_CHEESESHOP:
data_files = [("osgeo/data/gdal", glob(os.path.join("../../data", "*")))]
else:
data_files = None
exclude_package_data = {'': ['GNUmakefile']}
setup_kwargs = dict(
name=name,
version=gdal_version,
author=author,
author_email=author_email,
maintainer=maintainer,
maintainer_email=maintainer_email,
long_description=readme,
long_description_content_type='text/x-rst',
description=description,
license=license_type,
classifiers=classifiers,
packages=packages,
url=url,
data_files=data_files,
ext_modules=ext_modules,
scripts=glob('scripts/*.py'),
cmdclass={'build_ext': gdal_ext},
extras_require={'numpy': ['numpy > 1.0.0']},
)
# This section can be greatly simplified with python >= 3.5 using **
if HAVE_SETUPTOOLS:
for k, v in extra.items():
setup_kwargs[k] = v
setup_kwargs['zip_safe'] = False
setup_kwargs['exclude_package_data'] = exclude_package_data
setup(**setup_kwargs)
else:
setup_kwargs['cmdclass']['build_py'] = build_py
setup_kwargs['cmdclass']['build_scripts'] = build_scripts
setup(**setup_kwargs)
|
import cProfile
import timeit
import pyfiglet
import click
from flask.cli import with_appcontext
from src.extensions import db
from src.routes.fibonacci import subset_sum_from_fibonacci_set
@click.command(name='create_database')
@with_appcontext
def create_db():
db.create_all()
@click.command(name='fib_benchmark')
def benchmark():
print(pyfiglet.figlet_format("wow"))
print(pyfiglet.figlet_format("such benchmark"))
print("\n\n cProfile")
command = "from src.routes.fibonacci import subset_sum_from_fibonacci_set as f; f(1234)"
cProfile.run(command)
print("\n\n timeit")
print("100 executions took {0:.2f} seconds".format(
timeit.timeit(command, number=100)))
|
import psycopg2
import psycopg2.extras
def get_output_params_names(db_config_params):
""" Gets the names of the output parameters from the database
Args:
db_config_params (dict): contains the connection parameters of the database
Returns:
list: contains the name of the output parameters
"""
conn = None
column_names = list()
try:
# connect to the PostgreSQL server
conn = psycopg2.connect(**db_config_params)
# create a cursor
cur = conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor)
# select only the columns corresponding to the output parameters
get_columns_name_sql = "SELECT column_name FROM information_schema.columns " \
"WHERE table_name = 'simulation_results' " \
"and column_name != 'simulation_id'" \
"and column_name != 'sea_id'" \
"and column_name != 'execution_time'" \
"and column_name != 'label'"
cur.execute(get_columns_name_sql)
column_names_result = cur.fetchall()
for column in column_names_result:
column_names.append(column['column_name'])
column_names = sorted(column_names)
# close the communication with the PostgreSQL
cur.close()
except (Exception, psycopg2.DatabaseError) as error:
raise SystemExit("Failure cause: {0}".format(error))
finally:
if conn is not None:
conn.commit()
conn.close()
return column_names
def execute_vacuum(db_config_params, query):
""" Executes a given query in the database
Args:
db_config_params (dict): contains the connection parameters of the database
query (str): SQL statement to be executed
"""
conn = None
try:
# connect to the PostgreSQL server
conn = psycopg2.connect(**db_config_params)
conn.set_isolation_level(0)
# create a cursor
cur = conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor)
# execute query
cur.execute(query)
# close the communication with the PostgreSQL
cur.close()
except psycopg2.ProgrammingError as err:
print(err)
pass
finally:
if conn is not None:
conn.commit()
conn.close()
def generate_remove_columns_query(output_params):
initial_query = 'ALTER TABLE simulation_results '
# the dynamic part of the query depends on the output parameters specified by the user
dynamic_query = ",".join('DROP COLUMN "' + variable_name + '"' for variable_name in output_params)
final_create_table_sql = initial_query + dynamic_query
return final_create_table_sql
def truncate_database(db_config_params):
output_params = get_output_params_names(db_config_params)
query = generate_remove_columns_query(output_params)
execute_query(db_config_params, query)
execute_vacuum(db_config_params, "VACUUM FULL")
|
#!/usr/bin/env python
# coding: utf-8
# # Spherical and Cylindrical Coordinates
# 
# <i>Caption</i>: The spherical coordinate system (red axes) uses radius r (the distance from the origin which is often the center of the body), theta $\theta$ (the angle between the x and y axes), and phi $\phi$ [the angle between the midplane (z = 0) and the z axis]. Alternatively, the cylindrical coordinate system uses cylindrical radius r$_{xy}$ (the distance perpendicular from the rotational axis), $\theta$, and z (distance from the midplane or xy plane). Both coordinate systems are often more useful than the Cartesian coordinate system (xyz, grey axes) to describe locations within or about ellipsoidal bodies, especially those that rotate like planets. Credit: G. O. Hollyday.
# Typically, we are used to dealing with xyz positions in three-dimensional space. It is easy to understand objects in terms of their x, y, and z dimensions (width, height, and depth, for example). The xyz coordinate system is what is known as <i>Cartesian</i> coordinates.
#
# However, it is not always easy to describe objects with Cartesian coordinates. For example, it is not intuitive to describe a sphere in terms of its xyz coordinates (would you know whether a certain location was on the surface or interior of a sphere based on an xyz coordinate?). It is much easier to describe a location on a sphere using a radius (the distance from the center of the object to the location) and angles (is the location on the back or the front of the sphere? Is the location on top or on bottom of the sphere?). The three-dimensional coordinate system that uses radii and angles to describe locations in space is called <i>spherical</i> coordinates.
#
# Spherical coordinates use r, $\theta$, and $\phi$ (see the image above).
#
# Radius r is the distance from the center of an object, or the origin (0, 0, 0) of the xyz axes, to a point in three-dimensional space. The radius is equal to
#
# $$r = \sqrt{x^2 + y^2 + z^2}$$
#
# On Earth, this would describe whether someone were on an airplane (above the surface) or on the surface (would vary depending on elevation, i.e. mountain, sea level, or scuba diving in the ocean).
#
# Theta $\theta$ is the angle between the midplane (z = 0; perpendicular to rotational axis) and the radius. Theta is related to z via $z=rsin\theta$. In other words, theta describes how far from the midplane and in what direction off the midplane a location is. In relation to Earth, theta would describe whether someone is in the Northern or Southern Hemisphere and at which latitude.
#
# Phi $\phi$ is the angle about the z axis. It describes a position in the xy plane (midplane). Phi is related to x and y via $tan\phi=y/x$. Theta is the equivalent of longitude on Earth (whether someone is in the Western or Eastern Hemisphere).
#
# Another handy coordinate is r$_{xy}$ (cylindrical radius, sometimes referred to as s elsewhere). The cylindrical radius is the distance from the rotational axis (z) to a point in an xy plane, and is equal to
#
# $$r_{xy} = \sqrt{x^2 + y^2}$$
#
# Cylindrical radius is part of the <i>cylindrical</i> coordinate system (r$_{xy}$, $\theta$, and z). It is useful for describing objects that are more oblate or disk-like. If an object is axis-symmetric in its xy plane, then it is easier to describe a particular <i>annulus</i> (a cylindrical shell at a given cylindrical radius) instead of all the x,y points in that annulus. If we use cylindrical radius as our horizontal axis and z as our vertical axis, we can plot a synestia in a 2-D plane instead of a 3-D figure. It is much easier to analyze simulation data from the interior of a synestia this way.
#
# It is much easier to tell via spherical coordinates where a point in space is located relative to a spherical object. For example, if you were told that a planetary body has a radius of 1000 km and that there was an interesting geological feature at (r, $\theta$, $\phi$) = (1010 km, 10$^\circ$, 90$^\circ$), it would be easier to determine the following:
#
# <i>Is this feature below or above the surface of the planetary body?</i>
# We'll use radius r to answer this question. It is above the surface by 10 km. The feature could either have a high elevation (e.g. mountain) or is orbiting the planetary body.
#
# <i>Is this feature closer to the equator or the poles of the planetary body?</i>
# We'll use theta $\theta$ to answer this question. It is closer to the equator (0$^\circ$); it is just above the equator by 10$^\circ$.
#
# <i>Is this feature on the near or far side of the planetary body? (Perhaps we were observing this object and want to know whether the feature is on the near side, the surface that we are currently viewing, or whether we will have to wait for the body to rotate so we can observe the feature)</i>
# We'll use phi $\phi$ to answer this question. It is on the far side. The right side of the near side is 0$^\circ$, so 90$^\circ$ would mean the feature is near the center of the far side.
|
# Version: 5.1
# Architecture: i386
import vstruct
from vstruct.primitives import *
POLICY_AUDIT_EVENT_TYPE = v_enum()
POLICY_AUDIT_EVENT_TYPE.AuditCategorySystem = 0
POLICY_AUDIT_EVENT_TYPE.AuditCategoryLogon = 1
POLICY_AUDIT_EVENT_TYPE.AuditCategoryObjectAccess = 2
POLICY_AUDIT_EVENT_TYPE.AuditCategoryPrivilegeUse = 3
POLICY_AUDIT_EVENT_TYPE.AuditCategoryDetailedTracking = 4
POLICY_AUDIT_EVENT_TYPE.AuditCategoryPolicyChange = 5
POLICY_AUDIT_EVENT_TYPE.AuditCategoryAccountManagement = 6
POLICY_AUDIT_EVENT_TYPE.AuditCategoryDirectoryServiceAccess = 7
POLICY_AUDIT_EVENT_TYPE.AuditCategoryAccountLogon = 8
KINTERRUPT_MODE = v_enum()
KINTERRUPT_MODE.LevelSensitive = 0
KINTERRUPT_MODE.Latched = 1
ARBITER_REQUEST_SOURCE = v_enum()
ARBITER_REQUEST_SOURCE.ArbiterRequestUndefined = -1
ARBITER_REQUEST_SOURCE.ArbiterRequestLegacyReported = 0
ARBITER_REQUEST_SOURCE.ArbiterRequestHalReported = 1
ARBITER_REQUEST_SOURCE.ArbiterRequestLegacyAssigned = 2
ARBITER_REQUEST_SOURCE.ArbiterRequestPnpDetected = 3
ARBITER_REQUEST_SOURCE.ArbiterRequestPnpEnumerated = 4
DEVICE_RELATION_TYPE = v_enum()
DEVICE_RELATION_TYPE.BusRelations = 0
DEVICE_RELATION_TYPE.EjectionRelations = 1
DEVICE_RELATION_TYPE.PowerRelations = 2
DEVICE_RELATION_TYPE.RemovalRelations = 3
DEVICE_RELATION_TYPE.TargetDeviceRelation = 4
DEVICE_RELATION_TYPE.SingleBusRelations = 5
IO_ALLOCATION_ACTION = v_enum()
IO_ALLOCATION_ACTION.KeepObject = 1
IO_ALLOCATION_ACTION.DeallocateObject = 2
IO_ALLOCATION_ACTION.DeallocateObjectKeepRegisters = 3
BUS_QUERY_ID_TYPE = v_enum()
BUS_QUERY_ID_TYPE.BusQueryDeviceID = 0
BUS_QUERY_ID_TYPE.BusQueryHardwareIDs = 1
BUS_QUERY_ID_TYPE.BusQueryCompatibleIDs = 2
BUS_QUERY_ID_TYPE.BusQueryInstanceID = 3
BUS_QUERY_ID_TYPE.BusQueryDeviceSerialNumber = 4
MMSYSTEM_PTE_POOL_TYPE = v_enum()
MMSYSTEM_PTE_POOL_TYPE.SystemPteSpace = 0
MMSYSTEM_PTE_POOL_TYPE.NonPagedPoolExpansion = 1
MMSYSTEM_PTE_POOL_TYPE.MaximumPtePoolTypes = 2
POP_POLICY_DEVICE_TYPE = v_enum()
POP_POLICY_DEVICE_TYPE.PolicyDeviceSystemButton = 0
POP_POLICY_DEVICE_TYPE.PolicyDeviceThermalZone = 1
POP_POLICY_DEVICE_TYPE.PolicyDeviceBattery = 2
POP_POLICY_DEVICE_TYPE.PolicyInitiatePowerActionAPI = 3
POP_POLICY_DEVICE_TYPE.PolicySetPowerStateAPI = 4
POP_POLICY_DEVICE_TYPE.PolicyImmediateDozeS4 = 5
POP_POLICY_DEVICE_TYPE.PolicySystemIdle = 6
MEMORY_CACHING_TYPE = v_enum()
MEMORY_CACHING_TYPE.MmNonCached = 0
MEMORY_CACHING_TYPE.MmCached = 1
MEMORY_CACHING_TYPE.MmWriteCombined = 2
MEMORY_CACHING_TYPE.MmHardwareCoherentCached = 3
MEMORY_CACHING_TYPE.MmNonCachedUnordered = 4
MEMORY_CACHING_TYPE.MmUSWCCached = 5
MEMORY_CACHING_TYPE.MmMaximumCacheType = 6
NT_PRODUCT_TYPE = v_enum()
NT_PRODUCT_TYPE.NtProductWinNt = 1
NT_PRODUCT_TYPE.NtProductLanManNt = 2
NT_PRODUCT_TYPE.NtProductServer = 3
DEVICE_POWER_STATE = v_enum()
DEVICE_POWER_STATE.PowerDeviceUnspecified = 0
DEVICE_POWER_STATE.PowerDeviceD0 = 1
DEVICE_POWER_STATE.PowerDeviceD1 = 2
DEVICE_POWER_STATE.PowerDeviceD2 = 3
DEVICE_POWER_STATE.PowerDeviceD3 = 4
DEVICE_POWER_STATE.PowerDeviceMaximum = 5
PF_SCENARIO_TYPE = v_enum()
PF_SCENARIO_TYPE.PfApplicationLaunchScenarioType = 0
PF_SCENARIO_TYPE.PfSystemBootScenarioType = 1
PF_SCENARIO_TYPE.PfMaxScenarioType = 2
TOKEN_TYPE = v_enum()
TOKEN_TYPE.TokenPrimary = 1
TOKEN_TYPE.TokenImpersonation = 2
VI_DEADLOCK_RESOURCE_TYPE = v_enum()
VI_DEADLOCK_RESOURCE_TYPE.VfDeadlockUnknown = 0
VI_DEADLOCK_RESOURCE_TYPE.VfDeadlockMutex = 1
VI_DEADLOCK_RESOURCE_TYPE.VfDeadlockFastMutex = 2
VI_DEADLOCK_RESOURCE_TYPE.VfDeadlockFastMutexUnsafe = 3
VI_DEADLOCK_RESOURCE_TYPE.VfDeadlockSpinLock = 4
VI_DEADLOCK_RESOURCE_TYPE.VfDeadlockQueuedSpinLock = 5
VI_DEADLOCK_RESOURCE_TYPE.VfDeadlockTypeMaximum = 6
FSINFOCLASS = v_enum()
FSINFOCLASS.FileFsVolumeInformation = 1
FSINFOCLASS.FileFsLabelInformation = 2
FSINFOCLASS.FileFsSizeInformation = 3
FSINFOCLASS.FileFsDeviceInformation = 4
FSINFOCLASS.FileFsAttributeInformation = 5
FSINFOCLASS.FileFsControlInformation = 6
FSINFOCLASS.FileFsFullSizeInformation = 7
FSINFOCLASS.FileFsObjectIdInformation = 8
FSINFOCLASS.FileFsDriverPathInformation = 9
FSINFOCLASS.FileFsMaximumInformation = 10
ARBITER_ACTION = v_enum()
ARBITER_ACTION.ArbiterActionTestAllocation = 0
ARBITER_ACTION.ArbiterActionRetestAllocation = 1
ARBITER_ACTION.ArbiterActionCommitAllocation = 2
ARBITER_ACTION.ArbiterActionRollbackAllocation = 3
ARBITER_ACTION.ArbiterActionQueryAllocatedResources = 4
ARBITER_ACTION.ArbiterActionWriteReservedResources = 5
ARBITER_ACTION.ArbiterActionQueryConflict = 6
ARBITER_ACTION.ArbiterActionQueryArbitrate = 7
ARBITER_ACTION.ArbiterActionAddReserved = 8
ARBITER_ACTION.ArbiterActionBootAllocation = 9
POOL_TYPE = v_enum()
POOL_TYPE.NonPagedPool = 0
POOL_TYPE.PagedPool = 1
POOL_TYPE.NonPagedPoolMustSucceed = 2
POOL_TYPE.DontUseThisType = 3
POOL_TYPE.NonPagedPoolCacheAligned = 4
POOL_TYPE.PagedPoolCacheAligned = 5
POOL_TYPE.NonPagedPoolCacheAlignedMustS = 6
POOL_TYPE.MaxPoolType = 7
POOL_TYPE.NonPagedPoolSession = 32
POOL_TYPE.PagedPoolSession = 33
POOL_TYPE.NonPagedPoolMustSucceedSession = 34
POOL_TYPE.DontUseThisTypeSession = 35
POOL_TYPE.NonPagedPoolCacheAlignedSession = 36
POOL_TYPE.PagedPoolCacheAlignedSession = 37
POOL_TYPE.NonPagedPoolCacheAlignedMustSSession = 38
PCI_DISPATCH_STYLE = v_enum()
PCI_DISPATCH_STYLE.IRP_COMPLETE = 0
PCI_DISPATCH_STYLE.IRP_DOWNWARD = 1
PCI_DISPATCH_STYLE.IRP_UPWARD = 2
PCI_DISPATCH_STYLE.IRP_DISPATCH = 3
MODE = v_enum()
MODE.KernelMode = 0
MODE.UserMode = 1
MODE.MaximumMode = 2
FS_FILTER_SECTION_SYNC_TYPE = v_enum()
FS_FILTER_SECTION_SYNC_TYPE.SyncTypeOther = 0
FS_FILTER_SECTION_SYNC_TYPE.SyncTypeCreateSection = 1
OB_OPEN_REASON = v_enum()
OB_OPEN_REASON.ObCreateHandle = 0
OB_OPEN_REASON.ObOpenHandle = 1
OB_OPEN_REASON.ObDuplicateHandle = 2
OB_OPEN_REASON.ObInheritHandle = 3
OB_OPEN_REASON.ObMaxOpenReason = 4
CPU_VENDORS = v_enum()
CPU_VENDORS.CPU_NONE = 0
CPU_VENDORS.CPU_INTEL = 1
CPU_VENDORS.CPU_AMD = 2
CPU_VENDORS.CPU_CYRIX = 3
CPU_VENDORS.CPU_TRANSMETA = 4
CPU_VENDORS.CPU_CENTAUR = 5
CPU_VENDORS.CPU_RISE = 6
CPU_VENDORS.CPU_UNKNOWN = 7
DEVICE_TEXT_TYPE = v_enum()
DEVICE_TEXT_TYPE.DeviceTextDescription = 0
DEVICE_TEXT_TYPE.DeviceTextLocationInformation = 1
POWER_STATE_TYPE = v_enum()
POWER_STATE_TYPE.SystemPowerState = 0
POWER_STATE_TYPE.DevicePowerState = 1
BUS_DATA_TYPE = v_enum()
BUS_DATA_TYPE.ConfigurationSpaceUndefined = -1
BUS_DATA_TYPE.Cmos = 0
BUS_DATA_TYPE.EisaConfiguration = 1
BUS_DATA_TYPE.Pos = 2
BUS_DATA_TYPE.CbusConfiguration = 3
BUS_DATA_TYPE.PCIConfiguration = 4
BUS_DATA_TYPE.VMEConfiguration = 5
BUS_DATA_TYPE.NuBusConfiguration = 6
BUS_DATA_TYPE.PCMCIAConfiguration = 7
BUS_DATA_TYPE.MPIConfiguration = 8
BUS_DATA_TYPE.MPSAConfiguration = 9
BUS_DATA_TYPE.PNPISAConfiguration = 10
BUS_DATA_TYPE.SgiInternalConfiguration = 11
BUS_DATA_TYPE.MaximumBusDataType = 12
LSA_FOREST_TRUST_RECORD_TYPE = v_enum()
LSA_FOREST_TRUST_RECORD_TYPE.ForestTrustTopLevelName = 0
LSA_FOREST_TRUST_RECORD_TYPE.ForestTrustTopLevelNameEx = 1
LSA_FOREST_TRUST_RECORD_TYPE.ForestTrustDomainInfo = 2
LSA_FOREST_TRUST_RECORD_TYPE.ForestTrustRecordTypeLast = 2
FILE_INFORMATION_CLASS = v_enum()
FILE_INFORMATION_CLASS.FileDirectoryInformation = 1
FILE_INFORMATION_CLASS.FileFullDirectoryInformation = 2
FILE_INFORMATION_CLASS.FileBothDirectoryInformation = 3
FILE_INFORMATION_CLASS.FileBasicInformation = 4
FILE_INFORMATION_CLASS.FileStandardInformation = 5
FILE_INFORMATION_CLASS.FileInternalInformation = 6
FILE_INFORMATION_CLASS.FileEaInformation = 7
FILE_INFORMATION_CLASS.FileAccessInformation = 8
FILE_INFORMATION_CLASS.FileNameInformation = 9
FILE_INFORMATION_CLASS.FileRenameInformation = 10
FILE_INFORMATION_CLASS.FileLinkInformation = 11
FILE_INFORMATION_CLASS.FileNamesInformation = 12
FILE_INFORMATION_CLASS.FileDispositionInformation = 13
FILE_INFORMATION_CLASS.FilePositionInformation = 14
FILE_INFORMATION_CLASS.FileFullEaInformation = 15
FILE_INFORMATION_CLASS.FileModeInformation = 16
FILE_INFORMATION_CLASS.FileAlignmentInformation = 17
FILE_INFORMATION_CLASS.FileAllInformation = 18
FILE_INFORMATION_CLASS.FileAllocationInformation = 19
FILE_INFORMATION_CLASS.FileEndOfFileInformation = 20
FILE_INFORMATION_CLASS.FileAlternateNameInformation = 21
FILE_INFORMATION_CLASS.FileStreamInformation = 22
FILE_INFORMATION_CLASS.FilePipeInformation = 23
FILE_INFORMATION_CLASS.FilePipeLocalInformation = 24
FILE_INFORMATION_CLASS.FilePipeRemoteInformation = 25
FILE_INFORMATION_CLASS.FileMailslotQueryInformation = 26
FILE_INFORMATION_CLASS.FileMailslotSetInformation = 27
FILE_INFORMATION_CLASS.FileCompressionInformation = 28
FILE_INFORMATION_CLASS.FileObjectIdInformation = 29
FILE_INFORMATION_CLASS.FileCompletionInformation = 30
FILE_INFORMATION_CLASS.FileMoveClusterInformation = 31
FILE_INFORMATION_CLASS.FileQuotaInformation = 32
FILE_INFORMATION_CLASS.FileReparsePointInformation = 33
FILE_INFORMATION_CLASS.FileNetworkOpenInformation = 34
FILE_INFORMATION_CLASS.FileAttributeTagInformation = 35
FILE_INFORMATION_CLASS.FileTrackingInformation = 36
FILE_INFORMATION_CLASS.FileIdBothDirectoryInformation = 37
FILE_INFORMATION_CLASS.FileIdFullDirectoryInformation = 38
FILE_INFORMATION_CLASS.FileValidDataLengthInformation = 39
FILE_INFORMATION_CLASS.FileShortNameInformation = 40
FILE_INFORMATION_CLASS.FileMaximumInformation = 41
EXCEPTION_DISPOSITION = v_enum()
EXCEPTION_DISPOSITION.ExceptionContinueExecution = 0
EXCEPTION_DISPOSITION.ExceptionContinueSearch = 1
EXCEPTION_DISPOSITION.ExceptionNestedException = 2
EXCEPTION_DISPOSITION.ExceptionCollidedUnwind = 3
PNP_VETO_TYPE = v_enum()
PNP_VETO_TYPE.PNP_VetoTypeUnknown = 0
PNP_VETO_TYPE.PNP_VetoLegacyDevice = 1
PNP_VETO_TYPE.PNP_VetoPendingClose = 2
PNP_VETO_TYPE.PNP_VetoWindowsApp = 3
PNP_VETO_TYPE.PNP_VetoWindowsService = 4
PNP_VETO_TYPE.PNP_VetoOutstandingOpen = 5
PNP_VETO_TYPE.PNP_VetoDevice = 6
PNP_VETO_TYPE.PNP_VetoDriver = 7
PNP_VETO_TYPE.PNP_VetoIllegalDeviceRequest = 8
PNP_VETO_TYPE.PNP_VetoInsufficientPower = 9
PNP_VETO_TYPE.PNP_VetoNonDisableable = 10
PNP_VETO_TYPE.PNP_VetoLegacyDriver = 11
PNP_VETO_TYPE.PNP_VetoInsufficientRights = 12
PCI_SIGNATURE = v_enum()
PCI_SIGNATURE.PciPdoExtensionType = 1768116272
PCI_SIGNATURE.PciFdoExtensionType = 1768116273
PCI_SIGNATURE.PciArb_Io = 1768116274
PCI_SIGNATURE.PciArb_Memory = 1768116275
PCI_SIGNATURE.PciArb_Interrupt = 1768116276
PCI_SIGNATURE.PciArb_BusNumber = 1768116277
PCI_SIGNATURE.PciTrans_Interrupt = 1768116278
PCI_SIGNATURE.PciInterface_BusHandler = 1768116279
PCI_SIGNATURE.PciInterface_IntRouteHandler = 1768116280
PCI_SIGNATURE.PciInterface_PciCb = 1768116281
PCI_SIGNATURE.PciInterface_LegacyDeviceDetection = 1768116282
PCI_SIGNATURE.PciInterface_PmeHandler = 1768116283
PCI_SIGNATURE.PciInterface_DevicePresent = 1768116284
PCI_SIGNATURE.PciInterface_NativeIde = 1768116285
PCI_SIGNATURE.PciInterface_AgpTarget = 1768116286
SECURITY_OPERATION_CODE = v_enum()
SECURITY_OPERATION_CODE.SetSecurityDescriptor = 0
SECURITY_OPERATION_CODE.QuerySecurityDescriptor = 1
SECURITY_OPERATION_CODE.DeleteSecurityDescriptor = 2
SECURITY_OPERATION_CODE.AssignSecurityDescriptor = 3
PP_NPAGED_LOOKASIDE_NUMBER = v_enum()
PP_NPAGED_LOOKASIDE_NUMBER.LookasideSmallIrpList = 0
PP_NPAGED_LOOKASIDE_NUMBER.LookasideLargeIrpList = 1
PP_NPAGED_LOOKASIDE_NUMBER.LookasideMdlList = 2
PP_NPAGED_LOOKASIDE_NUMBER.LookasideCreateInfoList = 3
PP_NPAGED_LOOKASIDE_NUMBER.LookasideNameBufferList = 4
PP_NPAGED_LOOKASIDE_NUMBER.LookasideTwilightList = 5
PP_NPAGED_LOOKASIDE_NUMBER.LookasideCompletionList = 6
PP_NPAGED_LOOKASIDE_NUMBER.LookasideMaximumList = 7
SECURITY_IMPERSONATION_LEVEL = v_enum()
SECURITY_IMPERSONATION_LEVEL.SecurityAnonymous = 0
SECURITY_IMPERSONATION_LEVEL.SecurityIdentification = 1
SECURITY_IMPERSONATION_LEVEL.SecurityImpersonation = 2
SECURITY_IMPERSONATION_LEVEL.SecurityDelegation = 3
DEVICE_USAGE_NOTIFICATION_TYPE = v_enum()
DEVICE_USAGE_NOTIFICATION_TYPE.DeviceUsageTypeUndefined = 0
DEVICE_USAGE_NOTIFICATION_TYPE.DeviceUsageTypePaging = 1
DEVICE_USAGE_NOTIFICATION_TYPE.DeviceUsageTypeHibernation = 2
DEVICE_USAGE_NOTIFICATION_TYPE.DeviceUsageTypeDumpFile = 3
PROXY_CLASS = v_enum()
PROXY_CLASS.ProxyFull = 0
PROXY_CLASS.ProxyService = 1
PROXY_CLASS.ProxyTree = 2
PROXY_CLASS.ProxyDirectory = 3
PLUGPLAY_EVENT_CATEGORY = v_enum()
PLUGPLAY_EVENT_CATEGORY.HardwareProfileChangeEvent = 0
PLUGPLAY_EVENT_CATEGORY.TargetDeviceChangeEvent = 1
PLUGPLAY_EVENT_CATEGORY.DeviceClassChangeEvent = 2
PLUGPLAY_EVENT_CATEGORY.CustomDeviceEvent = 3
PLUGPLAY_EVENT_CATEGORY.DeviceInstallEvent = 4
PLUGPLAY_EVENT_CATEGORY.DeviceArrivalEvent = 5
PLUGPLAY_EVENT_CATEGORY.PowerEvent = 6
PLUGPLAY_EVENT_CATEGORY.VetoEvent = 7
PLUGPLAY_EVENT_CATEGORY.BlockedDriverEvent = 8
PLUGPLAY_EVENT_CATEGORY.MaxPlugEventCategory = 9
INTERFACE_TYPE = v_enum()
INTERFACE_TYPE.InterfaceTypeUndefined = -1
INTERFACE_TYPE.Internal = 0
INTERFACE_TYPE.Isa = 1
INTERFACE_TYPE.Eisa = 2
INTERFACE_TYPE.MicroChannel = 3
INTERFACE_TYPE.TurboChannel = 4
INTERFACE_TYPE.PCIBus = 5
INTERFACE_TYPE.VMEBus = 6
INTERFACE_TYPE.NuBus = 7
INTERFACE_TYPE.PCMCIABus = 8
INTERFACE_TYPE.CBus = 9
INTERFACE_TYPE.MPIBus = 10
INTERFACE_TYPE.MPSABus = 11
INTERFACE_TYPE.ProcessorInternal = 12
INTERFACE_TYPE.InternalPowerBus = 13
INTERFACE_TYPE.PNPISABus = 14
INTERFACE_TYPE.PNPBus = 15
INTERFACE_TYPE.MaximumInterfaceType = 16
KWAIT_REASON = v_enum()
KWAIT_REASON.Executive = 0
KWAIT_REASON.FreePage = 1
KWAIT_REASON.PageIn = 2
KWAIT_REASON.PoolAllocation = 3
KWAIT_REASON.DelayExecution = 4
KWAIT_REASON.Suspended = 5
KWAIT_REASON.UserRequest = 6
KWAIT_REASON.WrExecutive = 7
KWAIT_REASON.WrFreePage = 8
KWAIT_REASON.WrPageIn = 9
KWAIT_REASON.WrPoolAllocation = 10
KWAIT_REASON.WrDelayExecution = 11
KWAIT_REASON.WrSuspended = 12
KWAIT_REASON.WrUserRequest = 13
KWAIT_REASON.WrEventPair = 14
KWAIT_REASON.WrQueue = 15
KWAIT_REASON.WrLpcReceive = 16
KWAIT_REASON.WrLpcReply = 17
KWAIT_REASON.WrVirtualMemory = 18
KWAIT_REASON.WrPageOut = 19
KWAIT_REASON.WrRendezvous = 20
KWAIT_REASON.Spare2 = 21
KWAIT_REASON.Spare3 = 22
KWAIT_REASON.Spare4 = 23
KWAIT_REASON.Spare5 = 24
KWAIT_REASON.Spare6 = 25
KWAIT_REASON.WrKernel = 26
KWAIT_REASON.MaximumWaitReason = 27
ALTERNATIVE_ARCHITECTURE_TYPE = v_enum()
ALTERNATIVE_ARCHITECTURE_TYPE.StandardDesign = 0
ALTERNATIVE_ARCHITECTURE_TYPE.NEC98x86 = 1
ALTERNATIVE_ARCHITECTURE_TYPE.EndAlternatives = 2
MMLISTS = v_enum()
MMLISTS.ZeroedPageList = 0
MMLISTS.FreePageList = 1
MMLISTS.StandbyPageList = 2
MMLISTS.ModifiedPageList = 3
MMLISTS.ModifiedNoWritePageList = 4
MMLISTS.BadPageList = 5
MMLISTS.ActiveAndValid = 6
MMLISTS.TransitionPage = 7
MEMORY_TYPE = v_enum()
MEMORY_TYPE.MemoryExceptionBlock = 0
MEMORY_TYPE.MemorySystemBlock = 1
MEMORY_TYPE.MemoryFree = 2
MEMORY_TYPE.MemoryBad = 3
MEMORY_TYPE.MemoryLoadedProgram = 4
MEMORY_TYPE.MemoryFirmwareTemporary = 5
MEMORY_TYPE.MemoryFirmwarePermanent = 6
MEMORY_TYPE.MemoryFreeContiguous = 7
MEMORY_TYPE.MemorySpecialMemory = 8
MEMORY_TYPE.MemoryMaximum = 9
PS_QUOTA_TYPE = v_enum()
PS_QUOTA_TYPE.PsNonPagedPool = 0
PS_QUOTA_TYPE.PsPagedPool = 1
PS_QUOTA_TYPE.PsPageFile = 2
PS_QUOTA_TYPE.PsQuotaTypes = 3
ReplacesCorHdrNumericDefines = v_enum()
ReplacesCorHdrNumericDefines.COMIMAGE_FLAGS_ILONLY = 1
ReplacesCorHdrNumericDefines.COMIMAGE_FLAGS_32BITREQUIRED = 2
ReplacesCorHdrNumericDefines.COMIMAGE_FLAGS_IL_LIBRARY = 4
ReplacesCorHdrNumericDefines.COMIMAGE_FLAGS_STRONGNAMESIGNED = 8
ReplacesCorHdrNumericDefines.COMIMAGE_FLAGS_TRACKDEBUGDATA = 65536
ReplacesCorHdrNumericDefines.COR_VERSION_MAJOR_V2 = 2
ReplacesCorHdrNumericDefines.COR_VERSION_MAJOR = 2
ReplacesCorHdrNumericDefines.COR_VERSION_MINOR = 0
ReplacesCorHdrNumericDefines.COR_DELETED_NAME_LENGTH = 8
ReplacesCorHdrNumericDefines.COR_VTABLEGAP_NAME_LENGTH = 8
ReplacesCorHdrNumericDefines.NATIVE_TYPE_MAX_CB = 1
ReplacesCorHdrNumericDefines.COR_ILMETHOD_SECT_SMALL_MAX_DATASIZE = 255
ReplacesCorHdrNumericDefines.IMAGE_COR_MIH_METHODRVA = 1
ReplacesCorHdrNumericDefines.IMAGE_COR_MIH_EHRVA = 2
ReplacesCorHdrNumericDefines.IMAGE_COR_MIH_BASICBLOCK = 8
ReplacesCorHdrNumericDefines.COR_VTABLE_32BIT = 1
ReplacesCorHdrNumericDefines.COR_VTABLE_64BIT = 2
ReplacesCorHdrNumericDefines.COR_VTABLE_FROM_UNMANAGED = 4
ReplacesCorHdrNumericDefines.COR_VTABLE_CALL_MOST_DERIVED = 16
ReplacesCorHdrNumericDefines.IMAGE_COR_EATJ_THUNK_SIZE = 32
ReplacesCorHdrNumericDefines.MAX_CLASS_NAME = 1024
ReplacesCorHdrNumericDefines.MAX_PACKAGE_NAME = 1024
ARBITER_RESULT = v_enum()
ARBITER_RESULT.ArbiterResultUndefined = -1
ARBITER_RESULT.ArbiterResultSuccess = 0
ARBITER_RESULT.ArbiterResultExternalConflict = 1
ARBITER_RESULT.ArbiterResultNullRequest = 2
SYSTEM_POWER_STATE = v_enum()
SYSTEM_POWER_STATE.PowerSystemUnspecified = 0
SYSTEM_POWER_STATE.PowerSystemWorking = 1
SYSTEM_POWER_STATE.PowerSystemSleeping1 = 2
SYSTEM_POWER_STATE.PowerSystemSleeping2 = 3
SYSTEM_POWER_STATE.PowerSystemSleeping3 = 4
SYSTEM_POWER_STATE.PowerSystemHibernate = 5
SYSTEM_POWER_STATE.PowerSystemShutdown = 6
SYSTEM_POWER_STATE.PowerSystemMaximum = 7
MEMORY_CACHING_TYPE_ORIG = v_enum()
MEMORY_CACHING_TYPE_ORIG.MmFrameBufferCached = 2
POWER_ACTION = v_enum()
POWER_ACTION.PowerActionNone = 0
POWER_ACTION.PowerActionReserved = 1
POWER_ACTION.PowerActionSleep = 2
POWER_ACTION.PowerActionHibernate = 3
POWER_ACTION.PowerActionShutdown = 4
POWER_ACTION.PowerActionShutdownReset = 5
POWER_ACTION.PowerActionShutdownOff = 6
POWER_ACTION.PowerActionWarmEject = 7
PNP_DEVNODE_STATE = v_enum()
PNP_DEVNODE_STATE.DeviceNodeUnspecified = 768
PNP_DEVNODE_STATE.DeviceNodeUninitialized = 769
PNP_DEVNODE_STATE.DeviceNodeInitialized = 770
PNP_DEVNODE_STATE.DeviceNodeDriversAdded = 771
PNP_DEVNODE_STATE.DeviceNodeResourcesAssigned = 772
PNP_DEVNODE_STATE.DeviceNodeStartPending = 773
PNP_DEVNODE_STATE.DeviceNodeStartCompletion = 774
PNP_DEVNODE_STATE.DeviceNodeStartPostWork = 775
PNP_DEVNODE_STATE.DeviceNodeStarted = 776
PNP_DEVNODE_STATE.DeviceNodeQueryStopped = 777
PNP_DEVNODE_STATE.DeviceNodeStopped = 778
PNP_DEVNODE_STATE.DeviceNodeRestartCompletion = 779
PNP_DEVNODE_STATE.DeviceNodeEnumeratePending = 780
PNP_DEVNODE_STATE.DeviceNodeEnumerateCompletion = 781
PNP_DEVNODE_STATE.DeviceNodeAwaitingQueuedDeletion = 782
PNP_DEVNODE_STATE.DeviceNodeAwaitingQueuedRemoval = 783
PNP_DEVNODE_STATE.DeviceNodeQueryRemoved = 784
PNP_DEVNODE_STATE.DeviceNodeRemovePendingCloses = 785
PNP_DEVNODE_STATE.DeviceNodeRemoved = 786
PNP_DEVNODE_STATE.DeviceNodeDeletePendingCloses = 787
PNP_DEVNODE_STATE.DeviceNodeDeleted = 788
PROFILE_STATUS = v_enum()
PROFILE_STATUS.DOCK_NOTDOCKDEVICE = 0
PROFILE_STATUS.DOCK_QUIESCENT = 1
PROFILE_STATUS.DOCK_ARRIVING = 2
PROFILE_STATUS.DOCK_DEPARTING = 3
PROFILE_STATUS.DOCK_EJECTIRP_COMPLETED = 4
MI_PFN_CACHE_ATTRIBUTE = v_enum()
MI_PFN_CACHE_ATTRIBUTE.MiNonCached = 0
MI_PFN_CACHE_ATTRIBUTE.MiCached = 1
MI_PFN_CACHE_ATTRIBUTE.MiWriteCombined = 2
MI_PFN_CACHE_ATTRIBUTE.MiNotMapped = 3
class KEXECUTE_OPTIONS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ExecuteDisable = v_uint8()
class PCI_PMC(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Version = v_uint8()
self.Support = PM_SUPPORT()
class _unnamed_14487(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ListHead = LIST_ENTRY()
class _unnamed_14486(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.UserData = v_ptr32()
self.Owner = v_ptr32()
class _unnamed_16779(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.EndingOffset = v_ptr32()
self.ResourceToRelease = v_ptr32()
class SEGMENT_OBJECT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.BaseAddress = v_ptr32()
self.TotalNumberOfPtes = v_uint32()
self.SizeOfSegment = LARGE_INTEGER()
self.NonExtendedPtes = v_uint32()
self.ImageCommitment = v_uint32()
self.ControlArea = v_ptr32()
self.Subsection = v_ptr32()
self.LargeControlArea = v_ptr32()
self.MmSectionFlags = v_ptr32()
self.MmSubSectionFlags = v_ptr32()
self._pad0030 = v_bytes(size=4)
class DUAL(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint32()
self.Map = v_ptr32()
self.SmallDir = v_ptr32()
self.Guard = v_uint32()
self.FreeDisplay = vstruct.VArray([ RTL_BITMAP() for i in xrange(24) ])
self.FreeSummary = v_uint32()
self.FreeBins = LIST_ENTRY()
class SID(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Revision = v_uint8()
self.SubAuthorityCount = v_uint8()
self.IdentifierAuthority = SID_IDENTIFIER_AUTHORITY()
self.SubAuthority = vstruct.VArray([ v_uint32() for i in xrange(1) ])
class MMPTE_HARDWARE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Valid = v_uint32()
class PCI_FUNCTION_RESOURCES(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Limit = vstruct.VArray([ IO_RESOURCE_DESCRIPTOR() for i in xrange(7) ])
self.Current = vstruct.VArray([ CM_PARTIAL_RESOURCE_DESCRIPTOR() for i in xrange(7) ])
class _unnamed_13153(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.EntireFrame = v_uint32()
class DBGKD_SET_SPECIAL_CALL64(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SpecialCall = v_uint64()
class _unnamed_13092(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Bytes = _unnamed_14544()
class KTSS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Backlink = v_uint16()
self.Reserved0 = v_uint16()
self.Esp0 = v_uint32()
self.Ss0 = v_uint16()
self.Reserved1 = v_uint16()
self.NotUsed1 = vstruct.VArray([ v_uint32() for i in xrange(4) ])
self.CR3 = v_uint32()
self.Eip = v_uint32()
self.EFlags = v_uint32()
self.Eax = v_uint32()
self.Ecx = v_uint32()
self.Edx = v_uint32()
self.Ebx = v_uint32()
self.Esp = v_uint32()
self.Ebp = v_uint32()
self.Esi = v_uint32()
self.Edi = v_uint32()
self.Es = v_uint16()
self.Reserved2 = v_uint16()
self.Cs = v_uint16()
self.Reserved3 = v_uint16()
self.Ss = v_uint16()
self.Reserved4 = v_uint16()
self.Ds = v_uint16()
self.Reserved5 = v_uint16()
self.Fs = v_uint16()
self.Reserved6 = v_uint16()
self.Gs = v_uint16()
self.Reserved7 = v_uint16()
self.LDT = v_uint16()
self.Reserved8 = v_uint16()
self.Flags = v_uint16()
self.IoMapBase = v_uint16()
self.IoMaps = vstruct.VArray([ KiIoAccessMap() for i in xrange(1) ])
self.IntDirectionMap = vstruct.VArray([ v_uint8() for i in xrange(32) ])
class CURDIR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DosPath = UNICODE_STRING()
self.Handle = v_ptr32()
class DBGKD_GET_INTERNAL_BREAKPOINT32(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.BreakpointAddress = v_uint32()
self.Flags = v_uint32()
self.Calls = v_uint32()
self.MaxCallsPerPeriod = v_uint32()
self.MinInstructions = v_uint32()
self.MaxInstructions = v_uint32()
self.TotalInstructions = v_uint32()
class DBGKD_MANIPULATE_STATE32(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ApiNumber = v_uint32()
self.ProcessorLevel = v_uint16()
self.Processor = v_uint16()
self.ReturnStatus = v_uint32()
self.u = _unnamed_11882()
class _unnamed_11075(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ListEntry = LIST_ENTRY()
self._pad0028 = v_bytes(size=32)
class PROCESSOR_POWER_POLICY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Revision = v_uint32()
self.DynamicThrottle = v_uint8()
self.Spare = vstruct.VArray([ v_uint8() for i in xrange(3) ])
self.DisableCStates = v_uint32()
self.PolicyCount = v_uint32()
self.Policy = vstruct.VArray([ PROCESSOR_POWER_POLICY_INFO() for i in xrange(3) ])
class _unnamed_11597(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Long = v_uint32()
class _unnamed_12520(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.LongFlags = v_uint32()
class BITMAP_RANGE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Links = LIST_ENTRY()
self.BasePage = v_uint64()
self.FirstDirtyPage = v_uint32()
self.LastDirtyPage = v_uint32()
self.DirtyPages = v_uint32()
self.Bitmap = v_ptr32()
class HARDWARE_PTE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Valid = v_uint32()
class HANDLE_TABLE_ENTRY_INFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.AuditMask = v_uint32()
class DBGKD_WRITE_MEMORY32(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.TargetBaseAddress = v_uint32()
self.TransferCount = v_uint32()
self.ActualBytesWritten = v_uint32()
class _unnamed_13252(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.VirtualAddress = v_ptr32()
class PCI_INTERFACE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.InterfaceType = v_ptr32()
self.MinSize = v_uint16()
self.MinVersion = v_uint16()
self.MaxVersion = v_uint16()
self.Flags = v_uint16()
self.ReferenceCount = v_uint32()
self.Signature = v_uint32()
self.Constructor = v_ptr32()
self.Initializer = v_ptr32()
class _unnamed_16629(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DeviceId = vstruct.VArray([ v_uint16() for i in xrange(1) ])
class MMWSLENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Valid = v_uint32()
class _unnamed_12976(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.AsynchronousParameters = _unnamed_14745()
class CM_PARTIAL_RESOURCE_LIST(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Version = v_uint16()
self.Revision = v_uint16()
self.Count = v_uint32()
self.PartialDescriptors = vstruct.VArray([ CM_PARTIAL_RESOURCE_DESCRIPTOR() for i in xrange(1) ])
class DBGKD_RESTORE_BREAKPOINT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.BreakPointHandle = v_uint32()
class DEVICE_CAPABILITIES(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Size = v_uint16()
self.Version = v_uint16()
self.DeviceD1 = v_uint32()
self.Address = v_uint32()
self.UINumber = v_uint32()
self.DeviceState = vstruct.VArray([ DEVICE_POWER_STATE() for i in xrange(7) ])
self.SystemWake = v_uint32()
self.DeviceWake = v_uint32()
self.D1Latency = v_uint32()
self.D2Latency = v_uint32()
self.D3Latency = v_uint32()
class _unnamed_12973(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.MasterIrp = v_ptr32()
class _unnamed_16624(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ClassGuid = GUID()
self.SymbolicLinkName = vstruct.VArray([ v_uint16() for i in xrange(1) ])
self._pad0014 = v_bytes(size=2)
class _unnamed_16310(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint32()
self.MinBusNumber = v_uint32()
self.MaxBusNumber = v_uint32()
self.Reserved = v_uint32()
class _unnamed_16315(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Priority = v_uint32()
self.Reserved1 = v_uint32()
self.Reserved2 = v_uint32()
class EXCEPTION_RECORD64(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ExceptionCode = v_uint32()
self.ExceptionFlags = v_uint32()
self.ExceptionRecord = v_uint64()
self.ExceptionAddress = v_uint64()
self.NumberParameters = v_uint32()
self.unusedAlignment = v_uint32()
self.ExceptionInformation = vstruct.VArray([ v_uint64() for i in xrange(15) ])
class _unnamed_16250(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ProviderId = v_uint32()
self.DataPath = v_ptr32()
self.BufferSize = v_uint32()
self.Buffer = v_ptr32()
class KPROCESS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Header = DISPATCHER_HEADER()
self.ProfileListHead = LIST_ENTRY()
self.DirectoryTableBase = vstruct.VArray([ v_uint32() for i in xrange(2) ])
self.LdtDescriptor = KGDTENTRY()
self.Int21Descriptor = KIDTENTRY()
self.IopmOffset = v_uint16()
self.Iopl = v_uint8()
self.Unused = v_uint8()
self.ActiveProcessors = v_uint32()
self.KernelTime = v_uint32()
self.UserTime = v_uint32()
self.ReadyListHead = LIST_ENTRY()
self.SwapListEntry = SINGLE_LIST_ENTRY()
self.VdmTrapcHandler = v_ptr32()
self.ThreadListHead = LIST_ENTRY()
self.ProcessLock = v_uint32()
self.Affinity = v_uint32()
self.StackCount = v_uint16()
self.BasePriority = v_uint8()
self.ThreadQuantum = v_uint8()
self.AutoAlignment = v_uint8()
self.State = v_uint8()
self.ThreadSeed = v_uint8()
self.DisableBoost = v_uint8()
self.PowerState = v_uint8()
self.DisableQuantum = v_uint8()
self.IdealNode = v_uint8()
self.Flags = KEXECUTE_OPTIONS()
class DEVICE_OBJECT_POWER_EXTENSION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.IdleCount = v_uint32()
self.ConservationIdleTime = v_uint32()
self.PerformanceIdleTime = v_uint32()
self.DeviceObject = v_ptr32()
self.IdleList = LIST_ENTRY()
self.DeviceType = v_uint8()
self._pad001c = v_bytes(size=3)
self.State = v_uint32()
self.NotifySourceList = LIST_ENTRY()
self.NotifyTargetList = LIST_ENTRY()
self.PowerChannelSummary = POWER_CHANNEL_SUMMARY()
self.Volume = LIST_ENTRY()
class MMPTE_LIST(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Valid = v_uint32()
class HEAP_TAG_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Allocs = v_uint32()
self.Frees = v_uint32()
self.Size = v_uint32()
self.TagIndex = v_uint16()
self.CreatorBackTraceIndex = v_uint16()
self.TagName = vstruct.VArray([ v_uint16() for i in xrange(24) ])
class VI_POOL_ENTRY_INUSE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.VirtualAddress = v_ptr32()
self.CallingAddress = v_ptr32()
self.NumberOfBytes = v_uint32()
self.Tag = v_uint32()
class HEAP_LOOKASIDE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ListHead = SLIST_HEADER()
self.Depth = v_uint16()
self.MaximumDepth = v_uint16()
self.TotalAllocates = v_uint32()
self.AllocateMisses = v_uint32()
self.TotalFrees = v_uint32()
self.FreeMisses = v_uint32()
self.LastTotalAllocates = v_uint32()
self.LastAllocateMisses = v_uint32()
self.Counters = vstruct.VArray([ v_uint32() for i in xrange(2) ])
self._pad0030 = v_bytes(size=4)
class MMPTE_TRANSITION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Valid = v_uint32()
class _unnamed_16247(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.AllocatedResources = v_ptr32()
self.AllocatedResourcesTranslated = v_ptr32()
class OBJECT_HANDLE_INFORMATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.HandleAttributes = v_uint32()
self.GrantedAccess = v_uint32()
class OWNER_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.OwnerThread = v_uint32()
self.OwnerCount = v_uint32()
class DEVOBJ_EXTENSION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint16()
self.Size = v_uint16()
self.DeviceObject = v_ptr32()
self.PowerFlags = v_uint32()
self.Dope = v_ptr32()
self.ExtensionFlags = v_uint32()
self.DeviceNode = v_ptr32()
self.AttachedTo = v_ptr32()
self.StartIoCount = v_uint32()
self.StartIoKey = v_uint32()
self.StartIoFlags = v_uint32()
self.Vpb = v_ptr32()
class _unnamed_14357(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.bits = _unnamed_16509()
class ARBITER_ALLOCATION_STATE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Start = v_uint64()
self.End = v_uint64()
self.CurrentMinimum = v_uint64()
self.CurrentMaximum = v_uint64()
self.Entry = v_ptr32()
self.CurrentAlternative = v_ptr32()
self.AlternativeCount = v_uint32()
self.Alternatives = v_ptr32()
self.Flags = v_uint16()
self.RangeAttributes = v_uint8()
self.RangeAvailableAttributes = v_uint8()
self.WorkSpace = v_uint32()
class DBGKD_SET_INTERNAL_BREAKPOINT64(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.BreakpointAddress = v_uint64()
self.Flags = v_uint32()
self._pad0010 = v_bytes(size=4)
class _unnamed_16089(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint32()
self.EaList = v_ptr32()
self.EaListLength = v_uint32()
self.EaIndex = v_uint32()
class MM_DRIVER_VERIFIER_DATA(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Level = v_uint32()
self.RaiseIrqls = v_uint32()
self.AcquireSpinLocks = v_uint32()
self.SynchronizeExecutions = v_uint32()
self.AllocationsAttempted = v_uint32()
self.AllocationsSucceeded = v_uint32()
self.AllocationsSucceededSpecialPool = v_uint32()
self.AllocationsWithNoTag = v_uint32()
self.TrimRequests = v_uint32()
self.Trims = v_uint32()
self.AllocationsFailed = v_uint32()
self.AllocationsFailedDeliberately = v_uint32()
self.Loads = v_uint32()
self.Unloads = v_uint32()
self.UnTrackedPool = v_uint32()
self.UserTrims = v_uint32()
self.CurrentPagedPoolAllocations = v_uint32()
self.CurrentNonPagedPoolAllocations = v_uint32()
self.PeakPagedPoolAllocations = v_uint32()
self.PeakNonPagedPoolAllocations = v_uint32()
self.PagedBytes = v_uint32()
self.NonPagedBytes = v_uint32()
self.PeakPagedBytes = v_uint32()
self.PeakNonPagedBytes = v_uint32()
self.BurstAllocationsFailedDeliberately = v_uint32()
self.SessionTrims = v_uint32()
self.Reserved = vstruct.VArray([ v_uint32() for i in xrange(2) ])
class PI_BUS_EXTENSION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Flags = v_uint32()
self.NumberCSNs = v_uint32()
self.ReadDataPort = v_ptr32()
self.DataPortMapped = v_uint8()
self._pad0010 = v_bytes(size=3)
self.AddressPort = v_ptr32()
self.AddrPortMapped = v_uint8()
self._pad0018 = v_bytes(size=3)
self.CommandPort = v_ptr32()
self.CmdPortMapped = v_uint8()
self._pad0020 = v_bytes(size=3)
self.NextSlotNumber = v_uint32()
self.DeviceList = SINGLE_LIST_ENTRY()
self.CardList = SINGLE_LIST_ENTRY()
self.PhysicalBusDevice = v_ptr32()
self.FunctionalBusDevice = v_ptr32()
self.AttachedDevice = v_ptr32()
self.BusNumber = v_uint32()
self.SystemPowerState = v_uint32()
self.DevicePowerState = v_uint32()
class MAILSLOT_CREATE_PARAMETERS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.MailslotQuota = v_uint32()
self.MaximumMessageSize = v_uint32()
self.ReadTimeout = LARGE_INTEGER()
self.TimeoutSpecified = v_uint8()
self._pad0018 = v_bytes(size=7)
class FS_FILTER_CALLBACK_DATA(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SizeOfFsFilterCallbackData = v_uint32()
self.Operation = v_uint8()
self.Reserved = v_uint8()
self._pad0008 = v_bytes(size=2)
self.DeviceObject = v_ptr32()
self.FileObject = v_ptr32()
self.Parameters = FS_FILTER_PARAMETERS()
class ACCESS_STATE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.OperationID = LUID()
self.SecurityEvaluated = v_uint8()
self.GenerateAudit = v_uint8()
self.GenerateOnClose = v_uint8()
self.PrivilegesAllocated = v_uint8()
self.Flags = v_uint32()
self.RemainingDesiredAccess = v_uint32()
self.PreviouslyGrantedAccess = v_uint32()
self.OriginalDesiredAccess = v_uint32()
self.SubjectSecurityContext = SECURITY_SUBJECT_CONTEXT()
self.SecurityDescriptor = v_ptr32()
self.AuxData = v_ptr32()
self.Privileges = _unnamed_14065()
self.AuditPrivileges = v_uint8()
self._pad0064 = v_bytes(size=3)
self.ObjectName = UNICODE_STRING()
self.ObjectTypeName = UNICODE_STRING()
class FILE_STANDARD_INFORMATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.AllocationSize = LARGE_INTEGER()
self.EndOfFile = LARGE_INTEGER()
self.NumberOfLinks = v_uint32()
self.DeletePending = v_uint8()
self.Directory = v_uint8()
self._pad0018 = v_bytes(size=2)
class EX_PUSH_LOCK_CACHE_AWARE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Locks = vstruct.VArray([ v_ptr32() for i in xrange(1) ])
class POOL_BLOCK_HEAD(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Header = POOL_HEADER()
self.List = LIST_ENTRY()
class DBGKD_SET_SPECIAL_CALL32(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SpecialCall = v_uint32()
class SYSTEM_POWER_LEVEL(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Enable = v_uint8()
self.Spare = vstruct.VArray([ v_uint8() for i in xrange(3) ])
self.BatteryLevel = v_uint32()
self.PowerPolicy = POWER_ACTION_POLICY()
self.MinSystemState = v_uint32()
class DBGKD_LOAD_SYMBOLS32(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PathNameLength = v_uint32()
self.BaseOfDll = v_uint32()
self.ProcessId = v_uint32()
self.CheckSum = v_uint32()
self.SizeOfImage = v_uint32()
self.UnloadSymbols = v_uint8()
self._pad0018 = v_bytes(size=3)
class DBGKM_EXCEPTION32(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ExceptionRecord = EXCEPTION_RECORD32()
self.FirstChance = v_uint32()
class PAGEFAULT_HISTORY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.CurrentIndex = v_uint32()
self.MaxIndex = v_uint32()
self.SpinLock = v_uint32()
self.Reserved = v_ptr32()
self.WatchInfo = vstruct.VArray([ PROCESS_WS_WATCH_INFORMATION() for i in xrange(1) ])
class _unnamed_16107(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint32()
self.FsInformationClass = v_uint32()
class WNODE_HEADER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.BufferSize = v_uint32()
self.ProviderId = v_uint32()
self.HistoricalContext = v_uint64()
self.CountLost = v_uint32()
self._pad0018 = v_bytes(size=4)
self.Guid = GUID()
self.ClientContext = v_uint32()
self.Flags = v_uint32()
class PROCESS_WS_WATCH_INFORMATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.FaultingPc = v_ptr32()
self.FaultingVa = v_ptr32()
class SECTION_OBJECT_POINTERS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DataSectionObject = v_ptr32()
self.SharedCacheMap = v_ptr32()
self.ImageSectionObject = v_ptr32()
class MDL(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Next = v_ptr32()
self.Size = v_uint16()
self.MdlFlags = v_uint16()
self.Process = v_ptr32()
self.MappedSystemVa = v_ptr32()
self.StartVa = v_ptr32()
self.ByteCount = v_uint32()
self.ByteOffset = v_uint32()
class KTRAP_FRAME(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DbgEbp = v_uint32()
self.DbgEip = v_uint32()
self.DbgArgMark = v_uint32()
self.DbgArgPointer = v_uint32()
self.TempSegCs = v_uint32()
self.TempEsp = v_uint32()
self.Dr0 = v_uint32()
self.Dr1 = v_uint32()
self.Dr2 = v_uint32()
self.Dr3 = v_uint32()
self.Dr6 = v_uint32()
self.Dr7 = v_uint32()
self.SegGs = v_uint32()
self.SegEs = v_uint32()
self.SegDs = v_uint32()
self.Edx = v_uint32()
self.Ecx = v_uint32()
self.Eax = v_uint32()
self.PreviousPreviousMode = v_uint32()
self.ExceptionList = v_ptr32()
self.SegFs = v_uint32()
self.Edi = v_uint32()
self.Esi = v_uint32()
self.Ebx = v_uint32()
self.Ebp = v_uint32()
self.ErrCode = v_uint32()
self.Eip = v_uint32()
self.SegCs = v_uint32()
self.EFlags = v_uint32()
self.HardwareEsp = v_uint32()
self.HardwareSegSs = v_uint32()
self.V86Es = v_uint32()
self.V86Ds = v_uint32()
self.V86Fs = v_uint32()
self.V86Gs = v_uint32()
class CM_INDEX_HINT_BLOCK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Count = v_uint32()
self.HashKey = vstruct.VArray([ v_uint32() for i in xrange(1) ])
class SEP_AUDIT_POLICY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PolicyElements = SEP_AUDIT_POLICY_CATEGORIES()
class MMPTE_SOFTWARE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Valid = v_uint32()
class IO_TIMER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint16()
self.TimerFlag = v_uint16()
self.TimerList = LIST_ENTRY()
self.TimerRoutine = v_ptr32()
self.Context = v_ptr32()
self.DeviceObject = v_ptr32()
class Wx86ThreadState(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.CallBx86Eip = v_ptr32()
self.DeallocationCpu = v_ptr32()
self.UseKnownWx86Dll = v_uint8()
self.OleStubInvoked = v_uint8()
self._pad000c = v_bytes(size=2)
class _unnamed_12112(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.FreeListsInUseTerminate = v_uint16()
class _unnamed_12111(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.FreeListsInUseUlong = vstruct.VArray([ v_uint32() for i in xrange(4) ])
class _unnamed_16218(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DeviceTextType = v_uint32()
self.LocaleId = v_uint32()
class MM_SESSION_SPACE_FLAGS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Initialized = v_uint32()
class _unnamed_14629(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.type0 = PCI_HEADER_TYPE_0()
class EVENT_COUNTER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ListEntry = SINGLE_LIST_ENTRY()
self.RefCount = v_uint32()
self.Event = KEVENT()
class SECURITY_DESCRIPTOR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Revision = v_uint8()
self.Sbz1 = v_uint8()
self.Control = v_uint16()
self.Owner = v_ptr32()
self.Group = v_ptr32()
self.Sacl = v_ptr32()
self.Dacl = v_ptr32()
class SECURITY_TOKEN_AUDIT_DATA(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint32()
self.GrantMask = v_uint32()
self.DenyMask = v_uint32()
class EX_WORK_QUEUE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.WorkerQueue = KQUEUE()
self.DynamicThreadCount = v_uint32()
self.WorkItemsProcessed = v_uint32()
self.WorkItemsProcessedLastPass = v_uint32()
self.QueueDepthLastPass = v_uint32()
self.Info = EX_QUEUE_WORKER_INFO()
class OBJECT_TYPE_INITIALIZER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint16()
self.UseDefaultObject = v_uint8()
self.CaseInsensitive = v_uint8()
self.InvalidAttributes = v_uint32()
self.GenericMapping = GENERIC_MAPPING()
self.ValidAccessMask = v_uint32()
self.SecurityRequired = v_uint8()
self.MaintainHandleCount = v_uint8()
self.MaintainTypeList = v_uint8()
self._pad0020 = v_bytes(size=1)
self.PoolType = v_uint32()
self.DefaultPagedPoolCharge = v_uint32()
self.DefaultNonPagedPoolCharge = v_uint32()
self.DumpProcedure = v_ptr32()
self.OpenProcedure = v_ptr32()
self.CloseProcedure = v_ptr32()
self.DeleteProcedure = v_ptr32()
self.ParseProcedure = v_ptr32()
self.SecurityProcedure = v_ptr32()
self.QueryNameProcedure = v_ptr32()
self.OkayToCloseProcedure = v_ptr32()
class VACB_LEVEL_REFERENCE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Reference = v_uint32()
self.SpecialReference = v_uint32()
class _unnamed_16627(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DeviceIds = vstruct.VArray([ v_uint16() for i in xrange(1) ])
class HEAP_ENTRY_EXTRA(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.AllocatorBackTraceIndex = v_uint16()
self.TagIndex = v_uint16()
self.Settable = v_uint32()
class POP_DEVICE_SYS_STATE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.IrpMinor = v_uint8()
self._pad0004 = v_bytes(size=3)
self.SystemState = v_uint32()
self.Event = KEVENT()
self.SpinLock = v_uint32()
self.Thread = v_ptr32()
self.GetNewDeviceList = v_uint8()
self._pad0024 = v_bytes(size=3)
self.Order = PO_DEVICE_NOTIFY_ORDER()
self.Status = v_uint32()
self.FailedDevice = v_ptr32()
self.Waking = v_uint8()
self.Cancelled = v_uint8()
self.IgnoreErrors = v_uint8()
self.IgnoreNotImplemented = v_uint8()
self.WaitAny = v_uint8()
self.WaitAll = v_uint8()
self._pad027c = v_bytes(size=2)
self.PresentIrpQueue = LIST_ENTRY()
self.Head = POP_DEVICE_POWER_IRP()
self.PowerIrpState = vstruct.VArray([ POP_DEVICE_POWER_IRP() for i in xrange(20) ])
class VI_DEADLOCK_RESOURCE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint32()
self.NodeCount = v_uint32()
self.ResourceAddress = v_ptr32()
self.ThreadOwner = v_ptr32()
self.ResourceList = LIST_ENTRY()
self.HashChainList = LIST_ENTRY()
self.StackTrace = vstruct.VArray([ v_ptr32() for i in xrange(8) ])
self.LastAcquireTrace = vstruct.VArray([ v_ptr32() for i in xrange(8) ])
self.LastReleaseTrace = vstruct.VArray([ v_ptr32() for i in xrange(8) ])
class HEAP_PSEUDO_TAG_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Allocs = v_uint32()
self.Frees = v_uint32()
self.Size = v_uint32()
class _unnamed_13834(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Generic = _unnamed_14637()
class CM_KEY_REFERENCE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.KeyCell = v_uint32()
self.KeyHive = v_ptr32()
class MMSECTION_FLAGS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.BeingDeleted = v_uint32()
class IA64_DBGKD_CONTROL_SET(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Continue = v_uint32()
self.CurrentSymbolStart = v_uint64()
self.CurrentSymbolEnd = v_uint64()
class DBGKD_GET_INTERNAL_BREAKPOINT64(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.BreakpointAddress = v_uint64()
self.Flags = v_uint32()
self.Calls = v_uint32()
self.MaxCallsPerPeriod = v_uint32()
self.MinInstructions = v_uint32()
self.MaxInstructions = v_uint32()
self.TotalInstructions = v_uint32()
class PROCESSOR_POWER_POLICY_INFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.TimeCheck = v_uint32()
self.DemoteLimit = v_uint32()
self.PromoteLimit = v_uint32()
self.DemotePercent = v_uint8()
self.PromotePercent = v_uint8()
self.Spare = vstruct.VArray([ v_uint8() for i in xrange(2) ])
self.AllowDemotion = v_uint32()
class _unnamed_16213(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.IdType = v_uint32()
class POP_POWER_ACTION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Updates = v_uint8()
self.State = v_uint8()
self.Shutdown = v_uint8()
self._pad0004 = v_bytes(size=1)
self.Action = v_uint32()
self.LightestState = v_uint32()
self.Flags = v_uint32()
self.Status = v_uint32()
self.IrpMinor = v_uint8()
self._pad0018 = v_bytes(size=3)
self.SystemState = v_uint32()
self.NextSystemState = v_uint32()
self.ShutdownBugCode = v_ptr32()
self.DevState = v_ptr32()
self.HiberContext = v_ptr32()
self.LastWakeState = v_uint32()
self.WakeTime = v_uint64()
self.SleepTime = v_uint64()
class OBJECT_CREATE_INFORMATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Attributes = v_uint32()
self.RootDirectory = v_ptr32()
self.ParseContext = v_ptr32()
self.ProbeMode = v_uint8()
self._pad0010 = v_bytes(size=3)
self.PagedPoolCharge = v_uint32()
self.NonPagedPoolCharge = v_uint32()
self.SecurityDescriptorCharge = v_uint32()
self.SecurityDescriptor = v_ptr32()
self.SecurityQos = v_ptr32()
self.SecurityQualityOfService = SECURITY_QUALITY_OF_SERVICE()
class OBJECT_HEADER_CREATOR_INFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.TypeList = LIST_ENTRY()
self.CreatorUniqueProcess = v_ptr32()
self.CreatorBackTraceIndex = v_uint16()
self.Reserved = v_uint16()
class PAGED_LOOKASIDE_LIST(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.L = GENERAL_LOOKASIDE()
self.Lock__ObsoleteButDoNotDelete = FAST_MUTEX()
self._pad0100 = v_bytes(size=96)
class HEAP_STOP_ON_TAG(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.HeapAndTagIndex = v_uint32()
class PO_NOTIFY_ORDER_LEVEL(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.LevelReady = KEVENT()
self.DeviceCount = v_uint32()
self.ActiveCount = v_uint32()
self.WaitSleep = LIST_ENTRY()
self.ReadySleep = LIST_ENTRY()
self.Pending = LIST_ENTRY()
self.Complete = LIST_ENTRY()
self.ReadyS0 = LIST_ENTRY()
self.WaitS0 = LIST_ENTRY()
class RTL_BITMAP(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SizeOfBitMap = v_uint32()
self.Buffer = v_ptr32()
class LARGE_INTEGER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.LowPart = v_uint32()
self.HighPart = v_uint32()
class _unnamed_12162(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.CriticalSection = RTL_CRITICAL_SECTION()
self._pad0038 = v_bytes(size=32)
class NPAGED_LOOKASIDE_LIST(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.L = GENERAL_LOOKASIDE()
self.Lock__ObsoleteButDoNotDelete = v_uint32()
self._pad0100 = v_bytes(size=124)
class _unnamed_11794(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ReadMemory = DBGKD_READ_MEMORY64()
self._pad0028 = v_bytes(size=24)
class KLOCK_QUEUE_HANDLE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.LockQueue = KSPIN_LOCK_QUEUE()
self.OldIrql = v_uint8()
self._pad000c = v_bytes(size=3)
class VPB(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint16()
self.Size = v_uint16()
self.Flags = v_uint16()
self.VolumeLabelLength = v_uint16()
self.DeviceObject = v_ptr32()
self.RealDevice = v_ptr32()
self.SerialNumber = v_uint32()
self.ReferenceCount = v_uint32()
self.VolumeLabel = vstruct.VArray([ v_uint16() for i in xrange(32) ])
class SEGMENT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ControlArea = v_ptr32()
self.TotalNumberOfPtes = v_uint32()
self.NonExtendedPtes = v_uint32()
self.WritableUserReferences = v_uint32()
self.SizeOfSegment = v_uint64()
self.SegmentPteTemplate = MMPTE()
self.NumberOfCommittedPages = v_uint32()
self.ExtendInfo = v_ptr32()
self.SystemImageBase = v_ptr32()
self.BasedAddress = v_ptr32()
self.u1 = _unnamed_12605()
self.u2 = _unnamed_12606()
self.PrototypePte = v_ptr32()
self.ThePtes = vstruct.VArray([ MMPTE() for i in xrange(1) ])
self._pad0040 = v_bytes(size=4)
class _unnamed_15247(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.TestAllocation = _unnamed_16554()
self._pad0010 = v_bytes(size=4)
class PP_LOOKASIDE_LIST(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.P = v_ptr32()
self.L = v_ptr32()
class OBJECT_NAME_INFORMATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Name = UNICODE_STRING()
class IO_RESOURCE_LIST(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Version = v_uint16()
self.Revision = v_uint16()
self.Count = v_uint32()
self.Descriptors = vstruct.VArray([ IO_RESOURCE_DESCRIPTOR() for i in xrange(1) ])
class _unnamed_16445(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PageNo = v_uint32()
self.StartPage = v_uint32()
self.EndPage = v_uint32()
self.CheckSum = v_uint32()
class _unnamed_16446(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Next = v_ptr32()
self.NextTable = v_uint32()
self.CheckSum = v_uint32()
self.EntryCount = v_uint32()
class PRIVATE_CACHE_MAP_FLAGS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DontUse = v_uint32()
class FS_FILTER_PARAMETERS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.AcquireForModifiedPageWriter = _unnamed_16779()
self._pad0014 = v_bytes(size=12)
class HEAP(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Entry = HEAP_ENTRY()
self.Signature = v_uint32()
self.Flags = v_uint32()
self.ForceFlags = v_uint32()
self.VirtualMemoryThreshold = v_uint32()
self.SegmentReserve = v_uint32()
self.SegmentCommit = v_uint32()
self.DeCommitFreeBlockThreshold = v_uint32()
self.DeCommitTotalFreeThreshold = v_uint32()
self.TotalFreeSize = v_uint32()
self.MaximumAllocationSize = v_uint32()
self.ProcessHeapsListIndex = v_uint16()
self.HeaderValidateLength = v_uint16()
self.HeaderValidateCopy = v_ptr32()
self.NextAvailableTagIndex = v_uint16()
self.MaximumTagIndex = v_uint16()
self.TagEntries = v_ptr32()
self.UCRSegments = v_ptr32()
self.UnusedUnCommittedRanges = v_ptr32()
self.AlignRound = v_uint32()
self.AlignMask = v_uint32()
self.VirtualAllocdBlocks = LIST_ENTRY()
self.Segments = vstruct.VArray([ v_ptr32() for i in xrange(64) ])
self.u = _unnamed_12111()
self.u2 = _unnamed_12112()
self.AllocatorBackTraceIndex = v_uint16()
self.NonDedicatedListLength = v_uint32()
self.LargeBlocksIndex = v_ptr32()
self.PseudoTagEntries = v_ptr32()
self.FreeLists = vstruct.VArray([ LIST_ENTRY() for i in xrange(128) ])
self.LockVariable = v_ptr32()
self.CommitRoutine = v_ptr32()
self.FrontEndHeap = v_ptr32()
self.FrontHeapLockCount = v_uint16()
self.FrontEndHeapType = v_uint8()
self.LastSegmentIndex = v_uint8()
class HANDLE_TRACE_DEBUG_INFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.CurrentStackIndex = v_uint32()
self.TraceDb = vstruct.VArray([ HANDLE_TRACE_DB_ENTRY() for i in xrange(4096) ])
class PRIVILEGE_SET(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PrivilegeCount = v_uint32()
self.Control = v_uint32()
self.Privilege = vstruct.VArray([ LUID_AND_ATTRIBUTES() for i in xrange(1) ])
class CM_RESOURCE_LIST(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Count = v_uint32()
self.List = vstruct.VArray([ CM_FULL_RESOURCE_DESCRIPTOR() for i in xrange(1) ])
class EPROCESS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Pcb = KPROCESS()
self.ProcessLock = EX_PUSH_LOCK()
self.CreateTime = LARGE_INTEGER()
self.ExitTime = LARGE_INTEGER()
self.RundownProtect = EX_RUNDOWN_REF()
self.UniqueProcessId = v_ptr32()
self.ActiveProcessLinks = LIST_ENTRY()
self.QuotaUsage = vstruct.VArray([ v_uint32() for i in xrange(3) ])
self.QuotaPeak = vstruct.VArray([ v_uint32() for i in xrange(3) ])
self.CommitCharge = v_uint32()
self.PeakVirtualSize = v_uint32()
self.VirtualSize = v_uint32()
self.SessionProcessLinks = LIST_ENTRY()
self.DebugPort = v_ptr32()
self.ExceptionPort = v_ptr32()
self.ObjectTable = v_ptr32()
self.Token = EX_FAST_REF()
self.WorkingSetLock = FAST_MUTEX()
self.WorkingSetPage = v_uint32()
self.AddressCreationLock = FAST_MUTEX()
self.HyperSpaceLock = v_uint32()
self.ForkInProgress = v_ptr32()
self.HardwareTrigger = v_uint32()
self.VadRoot = v_ptr32()
self.VadHint = v_ptr32()
self.CloneRoot = v_ptr32()
self.NumberOfPrivatePages = v_uint32()
self.NumberOfLockedPages = v_uint32()
self.Win32Process = v_ptr32()
self.Job = v_ptr32()
self.SectionObject = v_ptr32()
self.SectionBaseAddress = v_ptr32()
self.QuotaBlock = v_ptr32()
self.WorkingSetWatch = v_ptr32()
self.Win32WindowStation = v_ptr32()
self.InheritedFromUniqueProcessId = v_ptr32()
self.LdtInformation = v_ptr32()
self.VadFreeHint = v_ptr32()
self.VdmObjects = v_ptr32()
self.DeviceMap = v_ptr32()
self.PhysicalVadList = LIST_ENTRY()
self.PageDirectoryPte = HARDWARE_PTE()
self._pad0170 = v_bytes(size=4)
self.Session = v_ptr32()
self.ImageFileName = vstruct.VArray([ v_uint8() for i in xrange(16) ])
self.JobLinks = LIST_ENTRY()
self.LockedPagesList = v_ptr32()
self.ThreadListHead = LIST_ENTRY()
self.SecurityPort = v_ptr32()
self.PaeTop = v_ptr32()
self.ActiveThreads = v_uint32()
self.GrantedAccess = v_uint32()
self.DefaultHardErrorProcessing = v_uint32()
self.LastThreadExitStatus = v_uint32()
self.Peb = v_ptr32()
self.PrefetchTrace = EX_FAST_REF()
self.ReadOperationCount = LARGE_INTEGER()
self.WriteOperationCount = LARGE_INTEGER()
self.OtherOperationCount = LARGE_INTEGER()
self.ReadTransferCount = LARGE_INTEGER()
self.WriteTransferCount = LARGE_INTEGER()
self.OtherTransferCount = LARGE_INTEGER()
self.CommitChargeLimit = v_uint32()
self.CommitChargePeak = v_uint32()
self.AweInfo = v_ptr32()
self.SeAuditProcessCreationInfo = SE_AUDIT_PROCESS_CREATION_INFO()
self.Vm = MMSUPPORT()
self.LastFaultCount = v_uint32()
self.ModifiedPageCount = v_uint32()
self.NumberOfVads = v_uint32()
self.JobStatus = v_uint32()
self.Flags = v_uint32()
self.ExitStatus = v_uint32()
self.NextPageColor = v_uint16()
self.SubSystemMinorVersion = v_uint8()
self.SubSystemMajorVersion = v_uint8()
self.PriorityClass = v_uint8()
self.WorkingSetAcquiredUnsafe = v_uint8()
self._pad0258 = v_bytes(size=2)
self.Cookie = v_uint32()
self._pad0260 = v_bytes(size=4)
class PHYSICAL_MEMORY_RUN(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.BasePage = v_uint32()
self.PageCount = v_uint32()
class CM_KEY_BODY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint32()
self.KeyControlBlock = v_ptr32()
self.NotifyBlock = v_ptr32()
self.ProcessID = v_ptr32()
self.Callers = v_uint32()
self.CallerAddress = vstruct.VArray([ v_ptr32() for i in xrange(10) ])
self.KeyBodyList = LIST_ENTRY()
class KMUTANT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Header = DISPATCHER_HEADER()
self.MutantListEntry = LIST_ENTRY()
self.OwnerThread = v_ptr32()
self.Abandoned = v_uint8()
self.ApcDisable = v_uint8()
self._pad0020 = v_bytes(size=2)
class FX_SAVE_AREA(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.U = _unnamed_10880()
self.NpxSavedCpu = v_uint32()
self.Cr0NpxState = v_uint32()
class POWER_SEQUENCE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SequenceD1 = v_uint32()
self.SequenceD2 = v_uint32()
self.SequenceD3 = v_uint32()
class KTIMER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Header = DISPATCHER_HEADER()
self.DueTime = ULARGE_INTEGER()
self.TimerListEntry = LIST_ENTRY()
self.Dpc = v_ptr32()
self.Period = v_uint32()
class MM_PAGED_POOL_INFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PagedPoolAllocationMap = v_ptr32()
self.EndOfPagedPoolBitmap = v_ptr32()
self.PagedPoolLargeSessionAllocationMap = v_ptr32()
self.FirstPteForPagedPool = v_ptr32()
self.LastPteForPagedPool = v_ptr32()
self.NextPdeForPagedPoolExpansion = v_ptr32()
self.PagedPoolHint = v_uint32()
self.PagedPoolCommit = v_uint32()
self.AllocatedPagedPool = v_uint32()
class HIVE_LIST_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Name = v_ptr32()
self.BaseName = v_ptr32()
self.CmHive = v_ptr32()
self.Flags = v_uint32()
self.CmHive2 = v_ptr32()
self.ThreadFinished = v_uint8()
self.ThreadStarted = v_uint8()
self.Allocate = v_uint8()
self._pad0018 = v_bytes(size=1)
class CM_PARTIAL_RESOURCE_DESCRIPTOR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint8()
self.ShareDisposition = v_uint8()
self.Flags = v_uint16()
self.u = _unnamed_13834()
class RTLP_RANGE_LIST_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Start = v_uint64()
self.End = v_uint64()
self.Allocated = _unnamed_14486()
self.Attributes = v_uint8()
self.PublicFlags = v_uint8()
self.PrivateFlags = v_uint16()
self.ListEntry = LIST_ENTRY()
self._pad0028 = v_bytes(size=4)
class _unnamed_14765(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DeviceQueueEntry = KDEVICE_QUEUE_ENTRY()
self.Thread = v_ptr32()
self.AuxiliaryBuffer = v_ptr32()
self.ListEntry = LIST_ENTRY()
self.CurrentStackLocation = v_ptr32()
self.OriginalFileObject = v_ptr32()
class _unnamed_14762(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Create = _unnamed_15988()
class _unnamed_13383(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.CellData = CELL_DATA()
class MMVAD_LONG(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.StartingVpn = v_uint32()
self.EndingVpn = v_uint32()
self.Parent = v_ptr32()
self.LeftChild = v_ptr32()
self.RightChild = v_ptr32()
self.u = _unnamed_14102()
self.ControlArea = v_ptr32()
self.FirstPrototypePte = v_ptr32()
self.LastContiguousPte = v_ptr32()
self.u2 = _unnamed_14103()
self.u3 = _unnamed_14104()
self.u4 = _unnamed_14105()
class CM_VIEW_OF_FILE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.LRUViewList = LIST_ENTRY()
self.PinViewList = LIST_ENTRY()
self.FileOffset = v_uint32()
self.Size = v_uint32()
self.ViewAddress = v_ptr32()
self.Bcb = v_ptr32()
self.UseCount = v_uint32()
class _unnamed_16143(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint32()
self.StartSid = v_ptr32()
self.SidList = v_ptr32()
self.SidListLength = v_uint32()
class CM_FULL_RESOURCE_DESCRIPTOR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.InterfaceType = v_uint32()
self.BusNumber = v_uint32()
self.PartialResourceList = CM_PARTIAL_RESOURCE_LIST()
class DBGKD_WRITE_MEMORY64(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.TargetBaseAddress = v_uint64()
self.TransferCount = v_uint32()
self.ActualBytesWritten = v_uint32()
class DBGKD_GET_VERSION64(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.MajorVersion = v_uint16()
self.MinorVersion = v_uint16()
self.ProtocolVersion = v_uint16()
self.Flags = v_uint16()
self.MachineType = v_uint16()
self.MaxPacketType = v_uint8()
self.MaxStateChange = v_uint8()
self.MaxManipulate = v_uint8()
self.Simulation = v_uint8()
self.Unused = vstruct.VArray([ v_uint16() for i in xrange(1) ])
self.KernBase = v_uint64()
self.PsLoadedModuleList = v_uint64()
self.DebuggerDataList = v_uint64()
class _unnamed_16069(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint32()
self.FileName = v_ptr32()
self.FileInformationClass = v_uint32()
self.FileIndex = v_uint32()
class FAST_IO_DISPATCH(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SizeOfFastIoDispatch = v_uint32()
self.FastIoCheckIfPossible = v_ptr32()
self.FastIoRead = v_ptr32()
self.FastIoWrite = v_ptr32()
self.FastIoQueryBasicInfo = v_ptr32()
self.FastIoQueryStandardInfo = v_ptr32()
self.FastIoLock = v_ptr32()
self.FastIoUnlockSingle = v_ptr32()
self.FastIoUnlockAll = v_ptr32()
self.FastIoUnlockAllByKey = v_ptr32()
self.FastIoDeviceControl = v_ptr32()
self.AcquireFileForNtCreateSection = v_ptr32()
self.ReleaseFileForNtCreateSection = v_ptr32()
self.FastIoDetachDevice = v_ptr32()
self.FastIoQueryNetworkOpenInfo = v_ptr32()
self.AcquireForModWrite = v_ptr32()
self.MdlRead = v_ptr32()
self.MdlReadComplete = v_ptr32()
self.PrepareMdlWrite = v_ptr32()
self.MdlWriteComplete = v_ptr32()
self.FastIoReadCompressed = v_ptr32()
self.FastIoWriteCompressed = v_ptr32()
self.MdlReadCompleteCompressed = v_ptr32()
self.MdlWriteCompleteCompressed = v_ptr32()
self.FastIoQueryOpen = v_ptr32()
self.ReleaseForModWrite = v_ptr32()
self.AcquireForCcFlush = v_ptr32()
self.ReleaseForCcFlush = v_ptr32()
class CM_KEY_CONTROL_BLOCK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.RefCount = v_uint32()
self.ExtFlags = v_uint32()
self.KeyHash = CM_KEY_HASH()
self.ParentKcb = v_ptr32()
self.NameBlock = v_ptr32()
self.CachedSecurity = v_ptr32()
self.ValueCache = CACHED_CHILD_LIST()
self.IndexHint = v_ptr32()
self.KeyBodyListHead = LIST_ENTRY()
self.KcbLastWriteTime = LARGE_INTEGER()
self.KcbMaxNameLen = v_uint16()
self.KcbMaxValueNameLen = v_uint16()
self.KcbMaxValueDataLen = v_uint32()
self.KcbUserFlags = v_uint32()
self._pad0050 = v_bytes(size=4)
class MMVAD_FLAGS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.CommitCharge = v_uint32()
class MMWSL(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Quota = v_uint32()
self.FirstFree = v_uint32()
self.FirstDynamic = v_uint32()
self.LastEntry = v_uint32()
self.NextSlot = v_uint32()
self.Wsle = v_ptr32()
self.LastInitializedWsle = v_uint32()
self.NonDirectCount = v_uint32()
self.HashTable = v_ptr32()
self.HashTableSize = v_uint32()
self.NumberOfCommittedPageTables = v_uint32()
self.HashTableStart = v_ptr32()
self.HighestPermittedHashAddress = v_ptr32()
self.NumberOfImageWaiters = v_uint32()
self.VadBitMapHint = v_uint32()
self.UsedPageTableEntries = vstruct.VArray([ v_uint16() for i in xrange(768) ])
self.CommittedPageTables = vstruct.VArray([ v_uint32() for i in xrange(24) ])
class DBGKD_CONTINUE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ContinueStatus = v_uint32()
class _unnamed_14102(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.LongFlags = v_uint32()
class _unnamed_14103(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.LongFlags2 = v_uint32()
class SUPPORTED_RANGES(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Version = v_uint16()
self.Sorted = v_uint8()
self.Reserved = v_uint8()
self.NoIO = v_uint32()
self.IO = SUPPORTED_RANGE()
self.NoMemory = v_uint32()
self._pad0030 = v_bytes(size=4)
self.Memory = SUPPORTED_RANGE()
self.NoPrefetchMemory = v_uint32()
self._pad0058 = v_bytes(size=4)
self.PrefetchMemory = SUPPORTED_RANGE()
self.NoDma = v_uint32()
self._pad0080 = v_bytes(size=4)
self.Dma = SUPPORTED_RANGE()
class WORK_QUEUE_ITEM(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.List = LIST_ENTRY()
self.WorkerRoutine = v_ptr32()
self.Parameter = v_ptr32()
class _unnamed_14104(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.List = LIST_ENTRY()
class _unnamed_14105(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Banked = v_ptr32()
class EPROCESS_QUOTA_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Usage = v_uint32()
self.Limit = v_uint32()
self.Peak = v_uint32()
self.Return = v_uint32()
class KSPECIAL_REGISTERS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Cr0 = v_uint32()
self.Cr2 = v_uint32()
self.Cr3 = v_uint32()
self.Cr4 = v_uint32()
self.KernelDr0 = v_uint32()
self.KernelDr1 = v_uint32()
self.KernelDr2 = v_uint32()
self.KernelDr3 = v_uint32()
self.KernelDr6 = v_uint32()
self.KernelDr7 = v_uint32()
self.Gdtr = DESCRIPTOR()
self.Idtr = DESCRIPTOR()
self.Tr = v_uint16()
self.Ldtr = v_uint16()
self.Reserved = vstruct.VArray([ v_uint32() for i in xrange(6) ])
class KINTERRUPT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint16()
self.Size = v_uint16()
self.InterruptListEntry = LIST_ENTRY()
self.ServiceRoutine = v_ptr32()
self.ServiceContext = v_ptr32()
self.SpinLock = v_uint32()
self.TickCount = v_uint32()
self.ActualLock = v_ptr32()
self.DispatchAddress = v_ptr32()
self.Vector = v_uint32()
self.Irql = v_uint8()
self.SynchronizeIrql = v_uint8()
self.FloatingSave = v_uint8()
self.Connected = v_uint8()
self.Number = v_uint8()
self.ShareVector = v_uint8()
self._pad0030 = v_bytes(size=2)
self.Mode = v_uint32()
self.ServiceCount = v_uint32()
self.DispatchCount = v_uint32()
self.DispatchCode = vstruct.VArray([ v_uint32() for i in xrange(106) ])
class RTL_CRITICAL_SECTION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DebugInfo = v_ptr32()
self.LockCount = v_uint32()
self.RecursionCount = v_uint32()
self.OwningThread = v_ptr32()
self.LockSemaphore = v_ptr32()
self.SpinCount = v_uint32()
class _unnamed_16782(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Argument1 = v_ptr32()
self.Argument2 = v_ptr32()
self.Argument3 = v_ptr32()
self.Argument4 = v_ptr32()
self.Argument5 = v_ptr32()
class _unnamed_16780(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ResourceToRelease = v_ptr32()
class _unnamed_16781(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SyncType = v_uint32()
self.PageProtection = v_uint32()
class KSYSTEM_TIME(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.LowPart = v_uint32()
self.High1Time = v_uint32()
self.High2Time = v_uint32()
class PO_DEVICE_NOTIFY_ORDER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DevNodeSequence = v_uint32()
self.WarmEjectPdoPointer = v_ptr32()
self.OrderLevel = vstruct.VArray([ PO_NOTIFY_ORDER_LEVEL() for i in xrange(8) ])
class _unnamed_11882(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ReadMemory = DBGKD_READ_MEMORY32()
self._pad0028 = v_bytes(size=28)
class FLOATING_SAVE_AREA(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ControlWord = v_uint32()
self.StatusWord = v_uint32()
self.TagWord = v_uint32()
self.ErrorOffset = v_uint32()
self.ErrorSelector = v_uint32()
self.DataOffset = v_uint32()
self.DataSelector = v_uint32()
self.RegisterArea = vstruct.VArray([ v_uint8() for i in xrange(80) ])
self.Cr0NpxState = v_uint32()
class WMI_LOGGER_MODE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SequentialFile = v_uint32()
class KQUEUE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Header = DISPATCHER_HEADER()
self.EntryListHead = LIST_ENTRY()
self.CurrentCount = v_uint32()
self.MaximumCount = v_uint32()
self.ThreadListHead = LIST_ENTRY()
class POOL_TRACKER_TABLE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Key = v_uint32()
self.NonPagedAllocs = v_uint32()
self.NonPagedFrees = v_uint32()
self.NonPagedBytes = v_uint32()
self.PagedAllocs = v_uint32()
self.PagedFrees = v_uint32()
self.PagedBytes = v_uint32()
class _unnamed_16666(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DiskId = GUID()
class WMI_BUFFER_STATE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Free = v_uint32()
class LUID_AND_ATTRIBUTES(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Luid = LUID()
self.Attributes = v_uint32()
class _unnamed_15560(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Base = v_uint32()
self.Limit = v_uint32()
class MMMOD_WRITER_MDL_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Links = LIST_ENTRY()
self.WriteOffset = LARGE_INTEGER()
self.u = _unnamed_15130()
self.Irp = v_ptr32()
self.LastPageToWrite = v_uint32()
self.PagingListHead = v_ptr32()
self.CurrentList = v_ptr32()
self.PagingFile = v_ptr32()
self.File = v_ptr32()
self.ControlArea = v_ptr32()
self.FileResource = v_ptr32()
self.Mdl = MDL()
self.Page = vstruct.VArray([ v_uint32() for i in xrange(1) ])
class CACHED_CHILD_LIST(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Count = v_uint32()
self.ValueList = v_uint32()
class KTHREAD(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Header = DISPATCHER_HEADER()
self.MutantListHead = LIST_ENTRY()
self.InitialStack = v_ptr32()
self.StackLimit = v_ptr32()
self.Teb = v_ptr32()
self.TlsArray = v_ptr32()
self.KernelStack = v_ptr32()
self.DebugActive = v_uint8()
self.State = v_uint8()
self.Alerted = vstruct.VArray([ v_uint8() for i in xrange(2) ])
self.Iopl = v_uint8()
self.NpxState = v_uint8()
self.Saturation = v_uint8()
self.Priority = v_uint8()
self.ApcState = KAPC_STATE()
self.ContextSwitches = v_uint32()
self.IdleSwapBlock = v_uint8()
self.Spare0 = vstruct.VArray([ v_uint8() for i in xrange(3) ])
self.WaitStatus = v_uint32()
self.WaitIrql = v_uint8()
self.WaitMode = v_uint8()
self.WaitNext = v_uint8()
self.WaitReason = v_uint8()
self.WaitBlockList = v_ptr32()
self.WaitListEntry = LIST_ENTRY()
self.WaitTime = v_uint32()
self.BasePriority = v_uint8()
self.DecrementCount = v_uint8()
self.PriorityDecrement = v_uint8()
self.Quantum = v_uint8()
self.WaitBlock = vstruct.VArray([ KWAIT_BLOCK() for i in xrange(4) ])
self.LegoData = v_ptr32()
self.KernelApcDisable = v_uint32()
self.UserAffinity = v_uint32()
self.SystemAffinityActive = v_uint8()
self.PowerState = v_uint8()
self.NpxIrql = v_uint8()
self.InitialNode = v_uint8()
self.ServiceTable = v_ptr32()
self.Queue = v_ptr32()
self.ApcQueueLock = v_uint32()
self._pad00f0 = v_bytes(size=4)
self.Timer = KTIMER()
self.QueueListEntry = LIST_ENTRY()
self.SoftAffinity = v_uint32()
self.Affinity = v_uint32()
self.Preempted = v_uint8()
self.ProcessReadyQueue = v_uint8()
self.KernelStackResident = v_uint8()
self.NextProcessor = v_uint8()
self.CallbackStack = v_ptr32()
self.Win32Thread = v_ptr32()
self.TrapFrame = v_ptr32()
self.ApcStatePointer = vstruct.VArray([ v_ptr32() for i in xrange(2) ])
self.PreviousMode = v_uint8()
self.EnableStackSwap = v_uint8()
self.LargeStack = v_uint8()
self.ResourceIndex = v_uint8()
self.KernelTime = v_uint32()
self.UserTime = v_uint32()
self.SavedApcState = KAPC_STATE()
self.Alertable = v_uint8()
self.ApcStateIndex = v_uint8()
self.ApcQueueable = v_uint8()
self.AutoAlignment = v_uint8()
self.StackBase = v_ptr32()
self.SuspendApc = KAPC()
self.SuspendSemaphore = KSEMAPHORE()
self.ThreadListEntry = LIST_ENTRY()
self.FreezeCount = v_uint8()
self.SuspendCount = v_uint8()
self.IdealProcessor = v_uint8()
self.DisableBoost = v_uint8()
self._pad01c0 = v_bytes(size=4)
class _unnamed_12531(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.LongFlags = v_uint32()
class ADAPTER_OBJECT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
class _unnamed_10508(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.LowPart = v_uint32()
self.HighPart = v_uint32()
class CONTEXT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ContextFlags = v_uint32()
self.Dr0 = v_uint32()
self.Dr1 = v_uint32()
self.Dr2 = v_uint32()
self.Dr3 = v_uint32()
self.Dr6 = v_uint32()
self.Dr7 = v_uint32()
self.FloatSave = FLOATING_SAVE_AREA()
self.SegGs = v_uint32()
self.SegFs = v_uint32()
self.SegEs = v_uint32()
self.SegDs = v_uint32()
self.Edi = v_uint32()
self.Esi = v_uint32()
self.Ebx = v_uint32()
self.Edx = v_uint32()
self.Ecx = v_uint32()
self.Eax = v_uint32()
self.Ebp = v_uint32()
self.Eip = v_uint32()
self.SegCs = v_uint32()
self.EFlags = v_uint32()
self.Esp = v_uint32()
self.SegSs = v_uint32()
self.ExtendedRegisters = vstruct.VArray([ v_uint8() for i in xrange(512) ])
class DBGKD_GET_CONTEXT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Unused = v_uint32()
class GENERIC_MAPPING(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.GenericRead = v_uint32()
self.GenericWrite = v_uint32()
self.GenericExecute = v_uint32()
self.GenericAll = v_uint32()
class DEVICE_NODE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Sibling = v_ptr32()
self.Child = v_ptr32()
self.Parent = v_ptr32()
self.LastChild = v_ptr32()
self.Level = v_uint32()
self.Notify = v_ptr32()
self.State = v_uint32()
self.PreviousState = v_uint32()
self.StateHistory = vstruct.VArray([ PNP_DEVNODE_STATE() for i in xrange(20) ])
self.StateHistoryEntry = v_uint32()
self.CompletionStatus = v_uint32()
self.PendingIrp = v_ptr32()
self.Flags = v_uint32()
self.UserFlags = v_uint32()
self.Problem = v_uint32()
self.PhysicalDeviceObject = v_ptr32()
self.ResourceList = v_ptr32()
self.ResourceListTranslated = v_ptr32()
self.InstancePath = UNICODE_STRING()
self.ServiceName = UNICODE_STRING()
self.DuplicatePDO = v_ptr32()
self.ResourceRequirements = v_ptr32()
self.InterfaceType = v_uint32()
self.BusNumber = v_uint32()
self.ChildInterfaceType = v_uint32()
self.ChildBusNumber = v_uint32()
self.ChildBusTypeIndex = v_uint16()
self.RemovalPolicy = v_uint8()
self.HardwareRemovalPolicy = v_uint8()
self.TargetDeviceNotify = LIST_ENTRY()
self.DeviceArbiterList = LIST_ENTRY()
self.DeviceTranslatorList = LIST_ENTRY()
self.NoTranslatorMask = v_uint16()
self.QueryTranslatorMask = v_uint16()
self.NoArbiterMask = v_uint16()
self.QueryArbiterMask = v_uint16()
self.OverUsed1 = _unnamed_12916()
self.OverUsed2 = _unnamed_12917()
self.BootResources = v_ptr32()
self.CapabilityFlags = v_uint32()
self.DockInfo = _unnamed_12918()
self.DisableableDepends = v_uint32()
self.PendedSetInterfaceState = LIST_ENTRY()
self.LegacyBusListEntry = LIST_ENTRY()
self.DriverUnloadRetryCount = v_uint32()
class RTL_ATOM_TABLE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Signature = v_uint32()
self.CriticalSection = RTL_CRITICAL_SECTION()
self.RtlHandleTable = RTL_HANDLE_TABLE()
self.NumberOfBuckets = v_uint32()
self.Buckets = vstruct.VArray([ v_ptr32() for i in xrange(1) ])
class _unnamed_15130(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.IoStatus = IO_STATUS_BLOCK()
class KUSER_SHARED_DATA(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.TickCountLow = v_uint32()
self.TickCountMultiplier = v_uint32()
self.InterruptTime = KSYSTEM_TIME()
self.SystemTime = KSYSTEM_TIME()
self.TimeZoneBias = KSYSTEM_TIME()
self.ImageNumberLow = v_uint16()
self.ImageNumberHigh = v_uint16()
self.NtSystemRoot = vstruct.VArray([ v_uint16() for i in xrange(260) ])
self.MaxStackTraceDepth = v_uint32()
self.CryptoExponent = v_uint32()
self.TimeZoneId = v_uint32()
self.Reserved2 = vstruct.VArray([ v_uint32() for i in xrange(8) ])
self.NtProductType = v_uint32()
self.ProductTypeIsValid = v_uint8()
self._pad026c = v_bytes(size=3)
self.NtMajorVersion = v_uint32()
self.NtMinorVersion = v_uint32()
self.ProcessorFeatures = vstruct.VArray([ v_uint8() for i in xrange(64) ])
self.Reserved1 = v_uint32()
self.Reserved3 = v_uint32()
self.TimeSlip = v_uint32()
self.AlternativeArchitecture = v_uint32()
self._pad02c8 = v_bytes(size=4)
self.SystemExpirationDate = LARGE_INTEGER()
self.SuiteMask = v_uint32()
self.KdDebuggerEnabled = v_uint8()
self.NXSupportPolicy = v_uint8()
self._pad02d8 = v_bytes(size=2)
self.ActiveConsoleId = v_uint32()
self.DismountCount = v_uint32()
self.ComPlusPackage = v_uint32()
self.LastSystemRITEventTickCount = v_uint32()
self.NumberOfPhysicalPages = v_uint32()
self.SafeBootMode = v_uint8()
self._pad02f0 = v_bytes(size=3)
self.TraceLogging = v_uint32()
self._pad02f8 = v_bytes(size=4)
self.TestRetInstruction = v_uint64()
self.SystemCall = v_uint32()
self.SystemCallReturn = v_uint32()
self.SystemCallPad = vstruct.VArray([ v_uint64() for i in xrange(3) ])
self.TickCount = KSYSTEM_TIME()
self._pad0330 = v_bytes(size=4)
self.Cookie = v_uint32()
self._pad0338 = v_bytes(size=4)
class IMAGE_ROM_OPTIONAL_HEADER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Magic = v_uint16()
self.MajorLinkerVersion = v_uint8()
self.MinorLinkerVersion = v_uint8()
self.SizeOfCode = v_uint32()
self.SizeOfInitializedData = v_uint32()
self.SizeOfUninitializedData = v_uint32()
self.AddressOfEntryPoint = v_uint32()
self.BaseOfCode = v_uint32()
self.BaseOfData = v_uint32()
self.BaseOfBss = v_uint32()
self.GprMask = v_uint32()
self.CprMask = vstruct.VArray([ v_uint32() for i in xrange(4) ])
self.GpValue = v_uint32()
class _unnamed_16242(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SystemContext = v_uint32()
self.Type = v_uint32()
self.State = POWER_STATE()
self.ShutdownType = v_uint32()
class HEAP_FREE_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Size = v_uint16()
self.PreviousSize = v_uint16()
self.SmallTagIndex = v_uint8()
self.Flags = v_uint8()
self.UnusedBytes = v_uint8()
self.SegmentIndex = v_uint8()
self.FreeList = LIST_ENTRY()
class LDR_DATA_TABLE_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.InLoadOrderLinks = LIST_ENTRY()
self.InMemoryOrderLinks = LIST_ENTRY()
self.InInitializationOrderLinks = LIST_ENTRY()
self.DllBase = v_ptr32()
self.EntryPoint = v_ptr32()
self.SizeOfImage = v_uint32()
self.FullDllName = UNICODE_STRING()
self.BaseDllName = UNICODE_STRING()
self.Flags = v_uint32()
self.LoadCount = v_uint16()
self.TlsIndex = v_uint16()
self.HashLinks = LIST_ENTRY()
self.TimeDateStamp = v_uint32()
self.EntryPointActivationContext = v_ptr32()
self.PatchInformation = v_ptr32()
class MMADDRESS_LIST(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.StartVpn = v_uint32()
self.EndVpn = v_uint32()
class _unnamed_15988(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SecurityContext = v_ptr32()
self.Options = v_uint32()
self.FileAttributes = v_uint16()
self.ShareAccess = v_uint16()
self.EaLength = v_uint32()
class DBGKD_READ_MEMORY64(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.TargetBaseAddress = v_uint64()
self.TransferCount = v_uint32()
self.ActualBytesRead = v_uint32()
class PO_MEMORY_IMAGE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Signature = v_uint32()
self.Version = v_uint32()
self.CheckSum = v_uint32()
self.LengthSelf = v_uint32()
self.PageSelf = v_uint32()
self.PageSize = v_uint32()
self.ImageType = v_uint32()
self._pad0020 = v_bytes(size=4)
self.SystemTime = LARGE_INTEGER()
self.InterruptTime = v_uint64()
self.FeatureFlags = v_uint32()
self.HiberFlags = v_uint8()
self.spare = vstruct.VArray([ v_uint8() for i in xrange(3) ])
self.NoHiberPtes = v_uint32()
self.HiberVa = v_uint32()
self.HiberPte = LARGE_INTEGER()
self.NoFreePages = v_uint32()
self.FreeMapCheck = v_uint32()
self.WakeCheck = v_uint32()
self.TotalPages = v_uint32()
self.FirstTablePage = v_uint32()
self.LastFilePage = v_uint32()
self.PerfInfo = PO_HIBER_PERF()
class HEAP_UCR_SEGMENT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Next = v_ptr32()
self.ReservedSize = v_uint32()
self.CommittedSize = v_uint32()
self.filler = v_uint32()
class HHIVE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Signature = v_uint32()
self.GetCellRoutine = v_ptr32()
self.ReleaseCellRoutine = v_ptr32()
self.Allocate = v_ptr32()
self.Free = v_ptr32()
self.FileSetSize = v_ptr32()
self.FileWrite = v_ptr32()
self.FileRead = v_ptr32()
self.FileFlush = v_ptr32()
self.BaseBlock = v_ptr32()
self.DirtyVector = RTL_BITMAP()
self.DirtyCount = v_uint32()
self.DirtyAlloc = v_uint32()
self.RealWrites = v_uint8()
self._pad003c = v_bytes(size=3)
self.Cluster = v_uint32()
self.Flat = v_uint8()
self.ReadOnly = v_uint8()
self.Log = v_uint8()
self._pad0044 = v_bytes(size=1)
self.HiveFlags = v_uint32()
self.LogSize = v_uint32()
self.RefreshCount = v_uint32()
self.StorageTypeCount = v_uint32()
self.Version = v_uint32()
self.Storage = vstruct.VArray([ DUAL() for i in xrange(2) ])
class TEB_ACTIVE_FRAME_CONTEXT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Flags = v_uint32()
self.FrameName = v_ptr32()
class TEB(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.NtTib = NT_TIB()
self.EnvironmentPointer = v_ptr32()
self.ClientId = CLIENT_ID()
self.ActiveRpcHandle = v_ptr32()
self.ThreadLocalStoragePointer = v_ptr32()
self.ProcessEnvironmentBlock = v_ptr32()
self.LastErrorValue = v_uint32()
self.CountOfOwnedCriticalSections = v_uint32()
self.CsrClientThread = v_ptr32()
self.Win32ThreadInfo = v_ptr32()
self.User32Reserved = vstruct.VArray([ v_uint32() for i in xrange(26) ])
self.UserReserved = vstruct.VArray([ v_uint32() for i in xrange(5) ])
self.WOW32Reserved = v_ptr32()
self.CurrentLocale = v_uint32()
self.FpSoftwareStatusRegister = v_uint32()
self.SystemReserved1 = vstruct.VArray([ v_ptr32() for i in xrange(54) ])
self.ExceptionCode = v_uint32()
self.ActivationContextStack = ACTIVATION_CONTEXT_STACK()
self.SpareBytes1 = vstruct.VArray([ v_uint8() for i in xrange(24) ])
self.GdiTebBatch = GDI_TEB_BATCH()
self.RealClientId = CLIENT_ID()
self.GdiCachedProcessHandle = v_ptr32()
self.GdiClientPID = v_uint32()
self.GdiClientTID = v_uint32()
self.GdiThreadLocalInfo = v_ptr32()
self.Win32ClientInfo = vstruct.VArray([ v_uint32() for i in xrange(62) ])
self.glDispatchTable = vstruct.VArray([ v_ptr32() for i in xrange(233) ])
self.glReserved1 = vstruct.VArray([ v_uint32() for i in xrange(29) ])
self.glReserved2 = v_ptr32()
self.glSectionInfo = v_ptr32()
self.glSection = v_ptr32()
self.glTable = v_ptr32()
self.glCurrentRC = v_ptr32()
self.glContext = v_ptr32()
self.LastStatusValue = v_uint32()
self.StaticUnicodeString = UNICODE_STRING()
self.StaticUnicodeBuffer = vstruct.VArray([ v_uint16() for i in xrange(261) ])
self._pad0e0c = v_bytes(size=2)
self.DeallocationStack = v_ptr32()
self.TlsSlots = vstruct.VArray([ v_ptr32() for i in xrange(64) ])
self.TlsLinks = LIST_ENTRY()
self.Vdm = v_ptr32()
self.ReservedForNtRpc = v_ptr32()
self.DbgSsReserved = vstruct.VArray([ v_ptr32() for i in xrange(2) ])
self.HardErrorsAreDisabled = v_uint32()
self.Instrumentation = vstruct.VArray([ v_ptr32() for i in xrange(16) ])
self.WinSockData = v_ptr32()
self.GdiBatchCount = v_uint32()
self.InDbgPrint = v_uint8()
self.FreeStackOnTermination = v_uint8()
self.HasFiberData = v_uint8()
self.IdealProcessor = v_uint8()
self.Spare3 = v_uint32()
self.ReservedForPerf = v_ptr32()
self.ReservedForOle = v_ptr32()
self.WaitingOnLoaderLock = v_uint32()
self.Wx86Thread = Wx86ThreadState()
self.TlsExpansionSlots = v_ptr32()
self.ImpersonationLocale = v_uint32()
self.IsImpersonating = v_uint32()
self.NlsCache = v_ptr32()
self.pShimData = v_ptr32()
self.HeapVirtualAffinity = v_uint32()
self.CurrentTransactionHandle = v_ptr32()
self.ActiveFrame = v_ptr32()
self.SafeThunkCall = v_uint8()
self.BooleanSpare = vstruct.VArray([ v_uint8() for i in xrange(3) ])
class DRIVER_OBJECT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint16()
self.Size = v_uint16()
self.DeviceObject = v_ptr32()
self.Flags = v_uint32()
self.DriverStart = v_ptr32()
self.DriverSize = v_uint32()
self.DriverSection = v_ptr32()
self.DriverExtension = v_ptr32()
self.DriverName = UNICODE_STRING()
self.HardwareDatabase = v_ptr32()
self.FastIoDispatch = v_ptr32()
self.DriverInit = v_ptr32()
self.DriverStartIo = v_ptr32()
self.DriverUnload = v_ptr32()
self.MajorFunction = vstruct.VArray([ v_ptr32() for i in xrange(28) ])
class OBJECT_SYMBOLIC_LINK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.CreationTime = LARGE_INTEGER()
self.LinkTarget = UNICODE_STRING()
self.LinkTargetRemaining = UNICODE_STRING()
self.LinkTargetObject = v_ptr32()
self.DosDeviceDriveIndex = v_uint32()
class EJOB(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Event = KEVENT()
self.JobLinks = LIST_ENTRY()
self.ProcessListHead = LIST_ENTRY()
self.JobLock = ERESOURCE()
self.TotalUserTime = LARGE_INTEGER()
self.TotalKernelTime = LARGE_INTEGER()
self.ThisPeriodTotalUserTime = LARGE_INTEGER()
self.ThisPeriodTotalKernelTime = LARGE_INTEGER()
self.TotalPageFaultCount = v_uint32()
self.TotalProcesses = v_uint32()
self.ActiveProcesses = v_uint32()
self.TotalTerminatedProcesses = v_uint32()
self.PerProcessUserTimeLimit = LARGE_INTEGER()
self.PerJobUserTimeLimit = LARGE_INTEGER()
self.LimitFlags = v_uint32()
self.MinimumWorkingSetSize = v_uint32()
self.MaximumWorkingSetSize = v_uint32()
self.ActiveProcessLimit = v_uint32()
self.Affinity = v_uint32()
self.PriorityClass = v_uint8()
self._pad00b0 = v_bytes(size=3)
self.UIRestrictionsClass = v_uint32()
self.SecurityLimitFlags = v_uint32()
self.Token = v_ptr32()
self.Filter = v_ptr32()
self.EndOfJobTimeAction = v_uint32()
self.CompletionPort = v_ptr32()
self.CompletionKey = v_ptr32()
self.SessionId = v_uint32()
self.SchedulingClass = v_uint32()
self._pad00d8 = v_bytes(size=4)
self.ReadOperationCount = v_uint64()
self.WriteOperationCount = v_uint64()
self.OtherOperationCount = v_uint64()
self.ReadTransferCount = v_uint64()
self.WriteTransferCount = v_uint64()
self.OtherTransferCount = v_uint64()
self.IoInfo = IO_COUNTERS()
self.ProcessMemoryLimit = v_uint32()
self.JobMemoryLimit = v_uint32()
self.PeakProcessMemoryUsed = v_uint32()
self.PeakJobMemoryUsed = v_uint32()
self.CurrentJobMemoryUsed = v_uint32()
self.MemoryLimitsLock = FAST_MUTEX()
self.JobSetLinks = LIST_ENTRY()
self.MemberLevel = v_uint32()
self.JobFlags = v_uint32()
self._pad0180 = v_bytes(size=4)
class _unnamed_16023(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint32()
self.Key = v_uint32()
self.ByteOffset = LARGE_INTEGER()
class DBGKD_READ_WRITE_IO_EXTENDED64(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DataSize = v_uint32()
self.InterfaceType = v_uint32()
self.BusNumber = v_uint32()
self.AddressSpace = v_uint32()
self.IoAddress = v_uint64()
self.DataValue = v_uint32()
self._pad0020 = v_bytes(size=4)
class IO_STATUS_BLOCK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Status = v_uint32()
self.Information = v_uint32()
class KPROCESSOR_STATE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ContextFrame = CONTEXT()
self.SpecialRegisters = KSPECIAL_REGISTERS()
class KiIoAccessMap(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DirectionMap = vstruct.VArray([ v_uint8() for i in xrange(32) ])
self.IoMap = vstruct.VArray([ v_uint8() for i in xrange(8196) ])
class KAPC(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint16()
self.Size = v_uint16()
self.Spare0 = v_uint32()
self.Thread = v_ptr32()
self.ApcListEntry = LIST_ENTRY()
self.KernelRoutine = v_ptr32()
self.RundownRoutine = v_ptr32()
self.NormalRoutine = v_ptr32()
self.NormalContext = v_ptr32()
self.SystemArgument1 = v_ptr32()
self.SystemArgument2 = v_ptr32()
self.ApcStateIndex = v_uint8()
self.ApcMode = v_uint8()
self.Inserted = v_uint8()
self._pad0030 = v_bytes(size=1)
class POOL_TRACKER_BIG_PAGES(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Va = v_ptr32()
self.Key = v_uint32()
self.NumberOfPages = v_uint32()
class SID_IDENTIFIER_AUTHORITY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Value = vstruct.VArray([ v_uint8() for i in xrange(6) ])
class RTL_RANGE_LIST(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ListHead = LIST_ENTRY()
self.Flags = v_uint32()
self.Count = v_uint32()
self.Stamp = v_uint32()
class LARGE_CONTROL_AREA(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Segment = v_ptr32()
self.DereferenceList = LIST_ENTRY()
self.NumberOfSectionReferences = v_uint32()
self.NumberOfPfnReferences = v_uint32()
self.NumberOfMappedViews = v_uint32()
self.NumberOfSubsections = v_uint16()
self.FlushInProgressCount = v_uint16()
self.NumberOfUserReferences = v_uint32()
self.u = _unnamed_12520()
self.FilePointer = v_ptr32()
self.WaitingForDeletion = v_ptr32()
self.ModifiedWriteCount = v_uint16()
self.NumberOfSystemCacheViews = v_uint16()
self.StartingFrame = v_uint32()
self.UserGlobalList = LIST_ENTRY()
self.SessionId = v_uint32()
class VI_POOL_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.InUse = VI_POOL_ENTRY_INUSE()
class POOL_HEADER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PreviousSize = v_uint16()
self.BlockSize = v_uint16()
self.ProcessBilled = v_ptr32()
class SHARED_CACHE_MAP(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.NodeTypeCode = v_uint16()
self.NodeByteSize = v_uint16()
self.OpenCount = v_uint32()
self.FileSize = LARGE_INTEGER()
self.BcbList = LIST_ENTRY()
self.SectionSize = LARGE_INTEGER()
self.ValidDataLength = LARGE_INTEGER()
self.ValidDataGoal = LARGE_INTEGER()
self.InitialVacbs = vstruct.VArray([ v_ptr32() for i in xrange(4) ])
self.Vacbs = v_ptr32()
self.FileObject = v_ptr32()
self.ActiveVacb = v_ptr32()
self.NeedToZero = v_ptr32()
self.ActivePage = v_uint32()
self.NeedToZeroPage = v_uint32()
self.ActiveVacbSpinLock = v_uint32()
self.VacbActiveCount = v_uint32()
self.DirtyPages = v_uint32()
self.SharedCacheMapLinks = LIST_ENTRY()
self.Flags = v_uint32()
self.Status = v_uint32()
self.Mbcb = v_ptr32()
self.Section = v_ptr32()
self.CreateEvent = v_ptr32()
self.WaitOnActiveCount = v_ptr32()
self.PagesToWrite = v_uint32()
self.BeyondLastFlush = v_uint64()
self.Callbacks = v_ptr32()
self.LazyWriteContext = v_ptr32()
self.PrivateList = LIST_ENTRY()
self.LogHandle = v_ptr32()
self.FlushToLsnRoutine = v_ptr32()
self.DirtyPageThreshold = v_uint32()
self.LazyWritePassCount = v_uint32()
self.UninitializeEvent = v_ptr32()
self.NeedToZeroVacb = v_ptr32()
self.BcbSpinLock = v_uint32()
self.Reserved = v_ptr32()
self.Event = KEVENT()
self.VacbPushLock = EX_PUSH_LOCK()
self._pad00d8 = v_bytes(size=4)
self.PrivateCacheMap = PRIVATE_CACHE_MAP()
class TRACE_ENABLE_FLAG_EXTENSION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Offset = v_uint16()
self.Length = v_uint8()
self.Flag = v_uint8()
class MI_VERIFIER_POOL_HEADER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ListIndex = v_uint32()
self.Verifier = v_ptr32()
class MMBANKED_SECTION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.BasePhysicalPage = v_uint32()
self.BasedPte = v_ptr32()
self.BankSize = v_uint32()
self.BankShift = v_uint32()
self.BankedRoutine = v_ptr32()
self.Context = v_ptr32()
self.CurrentMappedPte = v_ptr32()
self.BankTemplate = vstruct.VArray([ MMPTE() for i in xrange(1) ])
class PCI_POWER_STATE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.CurrentSystemState = v_uint32()
self.CurrentDeviceState = v_uint32()
self.SystemWakeLevel = v_uint32()
self.DeviceWakeLevel = v_uint32()
self.SystemStateMapping = vstruct.VArray([ DEVICE_POWER_STATE() for i in xrange(7) ])
self.WaitWakeIrp = v_ptr32()
self.SavedCancelRoutine = v_ptr32()
self.Paging = v_uint32()
self.Hibernate = v_uint32()
self.CrashDump = v_uint32()
class RTL_CRITICAL_SECTION_DEBUG(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint16()
self.CreatorBackTraceIndex = v_uint16()
self.CriticalSection = v_ptr32()
self.ProcessLocksList = LIST_ENTRY()
self.EntryCount = v_uint32()
self.ContentionCount = v_uint32()
self.Spare = vstruct.VArray([ v_uint32() for i in xrange(2) ])
class PNP_DEVICE_EVENT_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ListEntry = LIST_ENTRY()
self.Argument = v_uint32()
self.CallerEvent = v_ptr32()
self.Callback = v_ptr32()
self.Context = v_ptr32()
self.VetoType = v_ptr32()
self.VetoName = v_ptr32()
self.Data = PLUGPLAY_EVENT_BLOCK()
class ARBITER_CONFLICT_INFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.OwningObject = v_ptr32()
self._pad0008 = v_bytes(size=4)
self.Start = v_uint64()
self.End = v_uint64()
class SID_AND_ATTRIBUTES(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Sid = v_ptr32()
self.Attributes = v_uint32()
class VI_DEADLOCK_GLOBALS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Nodes = vstruct.VArray([ v_uint32() for i in xrange(2) ])
self.Resources = vstruct.VArray([ v_uint32() for i in xrange(2) ])
self.Threads = vstruct.VArray([ v_uint32() for i in xrange(2) ])
self.TimeAcquire = v_uint64()
self.TimeRelease = v_uint64()
self.BytesAllocated = v_uint32()
self.ResourceDatabase = v_ptr32()
self.ThreadDatabase = v_ptr32()
self.AllocationFailures = v_uint32()
self.NodesTrimmedBasedOnAge = v_uint32()
self.NodesTrimmedBasedOnCount = v_uint32()
self.NodesSearched = v_uint32()
self.MaxNodesSearched = v_uint32()
self.SequenceNumber = v_uint32()
self.RecursionDepthLimit = v_uint32()
self.SearchedNodesLimit = v_uint32()
self.DepthLimitHits = v_uint32()
self.SearchLimitHits = v_uint32()
self.ABC_ACB_Skipped = v_uint32()
self.FreeResourceList = LIST_ENTRY()
self.FreeThreadList = LIST_ENTRY()
self.FreeNodeList = LIST_ENTRY()
self.FreeResourceCount = v_uint32()
self.FreeThreadCount = v_uint32()
self.FreeNodeCount = v_uint32()
self.Instigator = v_ptr32()
self.NumberOfParticipants = v_uint32()
self.Participant = vstruct.VArray([ v_ptr32() for i in xrange(32) ])
self.CacheReductionInProgress = v_uint32()
class TOKEN(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.TokenSource = TOKEN_SOURCE()
self.TokenId = LUID()
self.AuthenticationId = LUID()
self.ParentTokenId = LUID()
self.ExpirationTime = LARGE_INTEGER()
self.TokenLock = v_ptr32()
self._pad0038 = v_bytes(size=4)
self.AuditPolicy = SEP_AUDIT_POLICY()
self.ModifiedId = LUID()
self.SessionId = v_uint32()
self.UserAndGroupCount = v_uint32()
self.RestrictedSidCount = v_uint32()
self.PrivilegeCount = v_uint32()
self.VariableLength = v_uint32()
self.DynamicCharged = v_uint32()
self.DynamicAvailable = v_uint32()
self.DefaultOwnerIndex = v_uint32()
self.UserAndGroups = v_ptr32()
self.RestrictedSids = v_ptr32()
self.PrimaryGroup = v_ptr32()
self.Privileges = v_ptr32()
self.DynamicPart = v_ptr32()
self.DefaultDacl = v_ptr32()
self.TokenType = v_uint32()
self.ImpersonationLevel = v_uint32()
self.TokenFlags = v_uint32()
self.TokenInUse = v_uint8()
self._pad0090 = v_bytes(size=3)
self.ProxyData = v_ptr32()
self.AuditData = v_ptr32()
self.OriginatingLogonSession = LUID()
self.VariablePart = v_uint32()
self._pad00a8 = v_bytes(size=4)
class MMCOLOR_TABLES(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Flink = v_uint32()
self.Blink = v_ptr32()
self.Count = v_uint32()
class DISPATCHER_HEADER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint8()
self.Absolute = v_uint8()
self.Size = v_uint8()
self.Inserted = v_uint8()
self.SignalState = v_uint32()
self.WaitListHead = LIST_ENTRY()
class _unnamed_16509(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DeviceNumber = v_uint32()
class _unnamed_16110(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.OutputBufferLength = v_uint32()
self.InputBufferLength = v_uint32()
self.FsControlCode = v_uint32()
self.Type3InputBuffer = v_ptr32()
class _unnamed_16505(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Mbr = _unnamed_16663()
self._pad0010 = v_bytes(size=8)
class DBGKD_READ_WRITE_IO64(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.IoAddress = v_uint64()
self.DataSize = v_uint32()
self.DataValue = v_uint32()
class PROCESSOR_POWER_STATE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.IdleFunction = v_ptr32()
self.Idle0KernelTimeLimit = v_uint32()
self.Idle0LastTime = v_uint32()
self.IdleHandlers = v_ptr32()
self.IdleState = v_ptr32()
self.IdleHandlersCount = v_uint32()
self.LastCheck = v_uint64()
self.IdleTimes = PROCESSOR_IDLE_TIMES()
self.IdleTime1 = v_uint32()
self.PromotionCheck = v_uint32()
self.IdleTime2 = v_uint32()
self.CurrentThrottle = v_uint8()
self.ThermalThrottleLimit = v_uint8()
self.CurrentThrottleIndex = v_uint8()
self.ThermalThrottleIndex = v_uint8()
self.LastKernelUserTime = v_uint32()
self.LastIdleThreadKernelTime = v_uint32()
self.PackageIdleStartTime = v_uint32()
self.PackageIdleTime = v_uint32()
self.DebugCount = v_uint32()
self.LastSysTime = v_uint32()
self.TotalIdleStateTime = vstruct.VArray([ v_uint64() for i in xrange(3) ])
self.TotalIdleTransitions = vstruct.VArray([ v_uint32() for i in xrange(3) ])
self._pad0090 = v_bytes(size=4)
self.PreviousC3StateTime = v_uint64()
self.KneeThrottleIndex = v_uint8()
self.ThrottleLimitIndex = v_uint8()
self.PerfStatesCount = v_uint8()
self.ProcessorMinThrottle = v_uint8()
self.ProcessorMaxThrottle = v_uint8()
self.EnableIdleAccounting = v_uint8()
self.LastC3Percentage = v_uint8()
self.LastAdjustedBusyPercentage = v_uint8()
self.PromotionCount = v_uint32()
self.DemotionCount = v_uint32()
self.ErrorCount = v_uint32()
self.RetryCount = v_uint32()
self.Flags = v_uint32()
self._pad00b8 = v_bytes(size=4)
self.PerfCounterFrequency = LARGE_INTEGER()
self.PerfTickCount = v_uint32()
self._pad00c8 = v_bytes(size=4)
self.PerfTimer = KTIMER()
self.PerfDpc = KDPC()
self.PerfStates = v_ptr32()
self.PerfSetThrottle = v_ptr32()
self.LastC3KernelUserTime = v_uint32()
self.LastPackageIdleTime = v_uint32()
class SECURITY_CLIENT_CONTEXT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SecurityQos = SECURITY_QUALITY_OF_SERVICE()
self.ClientToken = v_ptr32()
self.DirectlyAccessClientToken = v_uint8()
self.DirectAccessEffectiveOnly = v_uint8()
self.ServerIsRemote = v_uint8()
self._pad0014 = v_bytes(size=1)
self.ClientTokenControl = TOKEN_CONTROL()
class DBGKD_SEARCH_MEMORY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SearchAddress = v_uint64()
self.SearchLength = v_uint64()
self.PatternLength = v_uint32()
self._pad0018 = v_bytes(size=4)
class DESCRIPTOR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Pad = v_uint16()
self.Limit = v_uint16()
self.Base = v_uint32()
class DBGKD_MANIPULATE_STATE64(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ApiNumber = v_uint32()
self.ProcessorLevel = v_uint16()
self.Processor = v_uint16()
self.ReturnStatus = v_uint32()
self._pad0010 = v_bytes(size=4)
self.u = _unnamed_11794()
class LPCP_PORT_QUEUE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.NonPagedPortQueue = v_ptr32()
self.Semaphore = v_ptr32()
self.ReceiveHead = LIST_ENTRY()
class DBGKD_LOAD_SYMBOLS64(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PathNameLength = v_uint32()
self._pad0008 = v_bytes(size=4)
self.BaseOfDll = v_uint64()
self.ProcessId = v_uint64()
self.CheckSum = v_uint32()
self.SizeOfImage = v_uint32()
self.UnloadSymbols = v_uint8()
self._pad0028 = v_bytes(size=7)
class CACHE_UNINITIALIZE_EVENT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Next = v_ptr32()
self.Event = KEVENT()
class SECURITY_QUALITY_OF_SERVICE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint32()
self.ImpersonationLevel = v_uint32()
self.ContextTrackingMode = v_uint8()
self.EffectiveOnly = v_uint8()
self._pad000c = v_bytes(size=2)
class COMPRESSED_DATA_INFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.CompressionFormatAndEngine = v_uint16()
self.CompressionUnitShift = v_uint8()
self.ChunkShift = v_uint8()
self.ClusterShift = v_uint8()
self.Reserved = v_uint8()
self.NumberOfChunks = v_uint16()
self.CompressedChunkSizes = vstruct.VArray([ v_uint32() for i in xrange(1) ])
class _unnamed_14650(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Start = v_uint32()
self.Length = v_uint32()
self.Reserved = v_uint32()
class RTL_HANDLE_TABLE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.MaximumNumberOfHandles = v_uint32()
self.SizeOfHandleTableEntry = v_uint32()
self.Reserved = vstruct.VArray([ v_uint32() for i in xrange(2) ])
self.FreeHandles = v_ptr32()
self.CommittedHandles = v_ptr32()
self.UnCommittedHandles = v_ptr32()
self.MaxReservedHandles = v_ptr32()
class _unnamed_14654(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DataSize = v_uint32()
self.Reserved1 = v_uint32()
self.Reserved2 = v_uint32()
class CMHIVE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Hive = HHIVE()
self.FileHandles = vstruct.VArray([ v_ptr32() for i in xrange(3) ])
self.NotifyList = LIST_ENTRY()
self.HiveList = LIST_ENTRY()
self.HiveLock = v_ptr32()
self.ViewLock = v_ptr32()
self.LRUViewListHead = LIST_ENTRY()
self.PinViewListHead = LIST_ENTRY()
self.FileObject = v_ptr32()
self.FileFullPath = UNICODE_STRING()
self.FileUserName = UNICODE_STRING()
self.MappedViews = v_uint16()
self.PinnedViews = v_uint16()
self.UseCount = v_uint32()
self.SecurityCount = v_uint32()
self.SecurityCacheSize = v_uint32()
self.SecurityHitHint = v_uint32()
self.SecurityCache = v_ptr32()
self.SecurityHash = vstruct.VArray([ LIST_ENTRY() for i in xrange(64) ])
self.UnloadEvent = v_ptr32()
self.RootKcb = v_ptr32()
self.Frozen = v_uint8()
self._pad047c = v_bytes(size=3)
self.UnloadWorkItem = v_ptr32()
self.GrowOnlyMode = v_uint8()
self._pad0484 = v_bytes(size=3)
self.GrowOffset = v_uint32()
self.KcbConvertListHead = LIST_ENTRY()
self.KnodeConvertListHead = LIST_ENTRY()
self.CellRemapArray = v_ptr32()
class POP_SHUTDOWN_BUG_CHECK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Code = v_uint32()
self.Parameter1 = v_uint32()
self.Parameter2 = v_uint32()
self.Parameter3 = v_uint32()
self.Parameter4 = v_uint32()
class SECTION_OBJECT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.StartingVa = v_ptr32()
self.EndingVa = v_ptr32()
self.Parent = v_ptr32()
self.LeftChild = v_ptr32()
self.RightChild = v_ptr32()
self.Segment = v_ptr32()
class LUID(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.LowPart = v_uint32()
self.HighPart = v_uint32()
class OBJECT_HEADER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PointerCount = v_uint32()
self.HandleCount = v_uint32()
self.Type = v_ptr32()
self.NameInfoOffset = v_uint8()
self.HandleInfoOffset = v_uint8()
self.QuotaInfoOffset = v_uint8()
self.Flags = v_uint8()
self.ObjectCreateInfo = v_ptr32()
self.SecurityDescriptor = v_ptr32()
self.Body = QUAD()
class PCI_MN_DISPATCH_TABLE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DispatchStyle = v_uint32()
self.DispatchFunction = v_ptr32()
class PCI_HEADER_TYPE_2(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SocketRegistersBaseAddress = v_uint32()
self.CapabilitiesPtr = v_uint8()
self.Reserved = v_uint8()
self.SecondaryStatus = v_uint16()
self.PrimaryBus = v_uint8()
self.SecondaryBus = v_uint8()
self.SubordinateBus = v_uint8()
self.SecondaryLatency = v_uint8()
self.Range = vstruct.VArray([ _unnamed_15560() for i in xrange(4) ])
self.InterruptLine = v_uint8()
self.InterruptPin = v_uint8()
self.BridgeControl = v_uint16()
class PCI_HEADER_TYPE_1(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.BaseAddresses = vstruct.VArray([ v_uint32() for i in xrange(2) ])
self.PrimaryBus = v_uint8()
self.SecondaryBus = v_uint8()
self.SubordinateBus = v_uint8()
self.SecondaryLatency = v_uint8()
self.IOBase = v_uint8()
self.IOLimit = v_uint8()
self.SecondaryStatus = v_uint16()
self.MemoryBase = v_uint16()
self.MemoryLimit = v_uint16()
self.PrefetchBase = v_uint16()
self.PrefetchLimit = v_uint16()
self.PrefetchBaseUpper32 = v_uint32()
self.PrefetchLimitUpper32 = v_uint32()
self.IOBaseUpper16 = v_uint16()
self.IOLimitUpper16 = v_uint16()
self.CapabilitiesPtr = v_uint8()
self.Reserved1 = vstruct.VArray([ v_uint8() for i in xrange(3) ])
self.ROMBaseAddress = v_uint32()
self.InterruptLine = v_uint8()
self.InterruptPin = v_uint8()
self.BridgeControl = v_uint16()
class PCI_HEADER_TYPE_0(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.BaseAddresses = vstruct.VArray([ v_uint32() for i in xrange(6) ])
self.CIS = v_uint32()
self.SubVendorID = v_uint16()
self.SubSystemID = v_uint16()
self.ROMBaseAddress = v_uint32()
self.CapabilitiesPtr = v_uint8()
self.Reserved1 = vstruct.VArray([ v_uint8() for i in xrange(3) ])
self.Reserved2 = v_uint32()
self.InterruptLine = v_uint8()
self.InterruptPin = v_uint8()
self.MinimumGrant = v_uint8()
self.MaximumLatency = v_uint8()
class MMPFN(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.u1 = _unnamed_13150()
self.PteAddress = v_ptr32()
self.u2 = _unnamed_13151()
self.u3 = _unnamed_13152()
self.OriginalPte = MMPTE()
self.u4 = _unnamed_13153()
class OBJECT_DUMP_CONTROL(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Stream = v_ptr32()
self.Detail = v_uint32()
class CACHE_MANAGER_CALLBACKS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.AcquireForLazyWrite = v_ptr32()
self.ReleaseFromLazyWrite = v_ptr32()
self.AcquireForReadAhead = v_ptr32()
self.ReleaseFromReadAhead = v_ptr32()
class DBGKD_CONTINUE2(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ContinueStatus = v_uint32()
self.ControlSet = X86_DBGKD_CONTROL_SET()
self._pad0020 = v_bytes(size=12)
class HANDLE_TRACE_DB_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ClientId = CLIENT_ID()
self.Handle = v_ptr32()
self.Type = v_uint32()
self.StackTrace = vstruct.VArray([ v_ptr32() for i in xrange(16) ])
class LPCP_NONPAGED_PORT_QUEUE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Semaphore = KSEMAPHORE()
self.BackPointer = v_ptr32()
class DEVICE_RELATIONS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Count = v_uint32()
self.Objects = vstruct.VArray([ v_ptr32() for i in xrange(1) ])
class _unnamed_14532(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Port = _unnamed_16299()
class BATTERY_REPORTING_SCALE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Granularity = v_uint32()
self.Capacity = v_uint32()
class MMPAGING_FILE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Size = v_uint32()
self.MaximumSize = v_uint32()
self.MinimumSize = v_uint32()
self.FreeSpace = v_uint32()
self.CurrentUsage = v_uint32()
self.PeakUsage = v_uint32()
self.Hint = v_uint32()
self.HighestPage = v_uint32()
self.Entry = vstruct.VArray([ v_ptr32() for i in xrange(2) ])
self.Bitmap = v_ptr32()
self.File = v_ptr32()
self.PageFileName = UNICODE_STRING()
self.PageFileNumber = v_uint32()
self.Extended = v_uint8()
self.HintSetToZero = v_uint8()
self.BootPartition = v_uint8()
self._pad0040 = v_bytes(size=1)
self.FileHandle = v_ptr32()
class _unnamed_16200(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.WhichSpace = v_uint32()
self.Buffer = v_ptr32()
self.Offset = v_uint32()
self.Length = v_uint32()
class STRING(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint16()
self.MaximumLength = v_uint16()
self.Buffer = v_ptr32()
class _unnamed_16205(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Lock = v_uint8()
class FNSAVE_FORMAT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ControlWord = v_uint32()
self.StatusWord = v_uint32()
self.TagWord = v_uint32()
self.ErrorOffset = v_uint32()
self.ErrorSelector = v_uint32()
self.DataOffset = v_uint32()
self.DataSelector = v_uint32()
self.RegisterArea = vstruct.VArray([ v_uint8() for i in xrange(80) ])
class CMP_OFFSET_ARRAY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.FileOffset = v_uint32()
self.DataBuffer = v_ptr32()
self.DataLength = v_uint32()
class CM_KEY_VALUE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Signature = v_uint16()
self.NameLength = v_uint16()
self.DataLength = v_uint32()
self.Data = v_uint32()
self.Type = v_uint32()
self.Flags = v_uint16()
self.Spare = v_uint16()
self.Name = vstruct.VArray([ v_uint16() for i in xrange(1) ])
self._pad0018 = v_bytes(size=2)
class MMVAD_FLAGS2(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.FileOffset = v_uint32()
class LIST_ENTRY32(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Flink = v_uint32()
self.Blink = v_uint32()
class MMWSLE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.u1 = _unnamed_13252()
class DBGKD_BREAKPOINTEX(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.BreakPointCount = v_uint32()
self.ContinueStatus = v_uint32()
class FILE_NETWORK_OPEN_INFORMATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.CreationTime = LARGE_INTEGER()
self.LastAccessTime = LARGE_INTEGER()
self.LastWriteTime = LARGE_INTEGER()
self.ChangeTime = LARGE_INTEGER()
self.AllocationSize = LARGE_INTEGER()
self.EndOfFile = LARGE_INTEGER()
self.FileAttributes = v_uint32()
self._pad0038 = v_bytes(size=4)
class PCI_SECONDARY_EXTENSION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.List = SINGLE_LIST_ENTRY()
self.ExtensionType = v_uint32()
self.Destructor = v_ptr32()
class DBGKD_QUERY_MEMORY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Address = v_uint64()
self.Reserved = v_uint64()
self.AddressSpace = v_uint32()
self.Flags = v_uint32()
class PCI_SLOT_NUMBER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.u = _unnamed_14357()
class _unnamed_16115(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_ptr32()
self.Key = v_uint32()
self.ByteOffset = LARGE_INTEGER()
class KDEVICE_QUEUE_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DeviceListEntry = LIST_ENTRY()
self.SortKey = v_uint32()
self.Inserted = v_uint8()
self._pad0010 = v_bytes(size=3)
class LIST_ENTRY64(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Flink = v_uint64()
self.Blink = v_uint64()
class MMPTE_SUBSECTION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Valid = v_uint32()
class PO_DEVICE_NOTIFY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Link = LIST_ENTRY()
self.TargetDevice = v_ptr32()
self.WakeNeeded = v_uint8()
self.OrderLevel = v_uint8()
self._pad0010 = v_bytes(size=2)
self.DeviceObject = v_ptr32()
self.Node = v_ptr32()
self.DeviceName = v_ptr32()
self.DriverName = v_ptr32()
self.ChildCount = v_uint32()
self.ActiveChild = v_uint32()
class HMAP_DIRECTORY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Directory = vstruct.VArray([ v_ptr32() for i in xrange(1024) ])
class _unnamed_13150(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Flink = v_uint32()
class _unnamed_13151(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Blink = v_uint32()
class _unnamed_13152(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.e1 = MMPFNENTRY()
class HEAP_STOP_ON_VALUES(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.AllocAddress = v_uint32()
self.AllocTag = HEAP_STOP_ON_TAG()
self.ReAllocAddress = v_uint32()
self.ReAllocTag = HEAP_STOP_ON_TAG()
self.FreeAddress = v_uint32()
self.FreeTag = HEAP_STOP_ON_TAG()
class WMI_BUFFER_HEADER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Wnode = WNODE_HEADER()
self.Offset = v_uint32()
self.EventsLost = v_uint32()
self.InstanceGuid = GUID()
class RTL_HANDLE_TABLE_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Flags = v_uint32()
class ARBITER_ALTERNATIVE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Minimum = v_uint64()
self.Maximum = v_uint64()
self.Length = v_uint32()
self.Alignment = v_uint32()
self.Priority = v_uint32()
self.Flags = v_uint32()
self.Descriptor = v_ptr32()
self.Reserved = vstruct.VArray([ v_uint32() for i in xrange(3) ])
class EX_FAST_REF(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Object = v_ptr32()
class INTERLOCK_SEQ(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Depth = v_uint16()
self.FreeEntryOffset = v_uint16()
self.Sequence = v_uint32()
class HMAP_TABLE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Table = vstruct.VArray([ HMAP_ENTRY() for i in xrange(512) ])
class KSPIN_LOCK_QUEUE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Next = v_ptr32()
self.Lock = v_ptr32()
class _unnamed_12918(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DockStatus = v_uint32()
self.ListEntry = LIST_ENTRY()
self.SerialNumber = v_ptr32()
class FS_FILTER_CALLBACKS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SizeOfFsFilterCallbacks = v_uint32()
self.Reserved = v_uint32()
self.PreAcquireForSectionSynchronization = v_ptr32()
self.PostAcquireForSectionSynchronization = v_ptr32()
self.PreReleaseForSectionSynchronization = v_ptr32()
self.PostReleaseForSectionSynchronization = v_ptr32()
self.PreAcquireForCcFlush = v_ptr32()
self.PostAcquireForCcFlush = v_ptr32()
self.PreReleaseForCcFlush = v_ptr32()
self.PostReleaseForCcFlush = v_ptr32()
self.PreAcquireForModifiedPageWriter = v_ptr32()
self.PostAcquireForModifiedPageWriter = v_ptr32()
self.PreReleaseForModifiedPageWriter = v_ptr32()
self.PostReleaseForModifiedPageWriter = v_ptr32()
class HANDLE_TABLE_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Object = v_ptr32()
self.GrantedAccess = v_uint32()
class IO_RESOURCE_DESCRIPTOR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Option = v_uint8()
self.Type = v_uint8()
self.ShareDisposition = v_uint8()
self.Spare1 = v_uint8()
self.Flags = v_uint16()
self.Spare2 = v_uint16()
self.u = _unnamed_14532()
class _unnamed_12917(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.NextResourceDeviceNode = v_ptr32()
class _unnamed_12916(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.LegacyDeviceNode = v_ptr32()
class THERMAL_INFORMATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ThermalStamp = v_uint32()
self.ThermalConstant1 = v_uint32()
self.ThermalConstant2 = v_uint32()
self.Processors = v_uint32()
self.SamplingPeriod = v_uint32()
self.CurrentTemperature = v_uint32()
self.PassiveTripPoint = v_uint32()
self.CriticalTripPoint = v_uint32()
self.ActiveTripPointCount = v_uint8()
self._pad0024 = v_bytes(size=3)
self.ActiveTripPoint = vstruct.VArray([ v_uint32() for i in xrange(10) ])
class IMAGE_OPTIONAL_HEADER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Magic = v_uint16()
self.MajorLinkerVersion = v_uint8()
self.MinorLinkerVersion = v_uint8()
self.SizeOfCode = v_uint32()
self.SizeOfInitializedData = v_uint32()
self.SizeOfUninitializedData = v_uint32()
self.AddressOfEntryPoint = v_uint32()
self.BaseOfCode = v_uint32()
self.BaseOfData = v_uint32()
self.ImageBase = v_uint32()
self.SectionAlignment = v_uint32()
self.FileAlignment = v_uint32()
self.MajorOperatingSystemVersion = v_uint16()
self.MinorOperatingSystemVersion = v_uint16()
self.MajorImageVersion = v_uint16()
self.MinorImageVersion = v_uint16()
self.MajorSubsystemVersion = v_uint16()
self.MinorSubsystemVersion = v_uint16()
self.Win32VersionValue = v_uint32()
self.SizeOfImage = v_uint32()
self.SizeOfHeaders = v_uint32()
self.CheckSum = v_uint32()
self.Subsystem = v_uint16()
self.DllCharacteristics = v_uint16()
self.SizeOfStackReserve = v_uint32()
self.SizeOfStackCommit = v_uint32()
self.SizeOfHeapReserve = v_uint32()
self.SizeOfHeapCommit = v_uint32()
self.LoaderFlags = v_uint32()
self.NumberOfRvaAndSizes = v_uint32()
self.DataDirectory = vstruct.VArray([ IMAGE_DATA_DIRECTORY() for i in xrange(16) ])
class SCSI_REQUEST_BLOCK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
class OBJECT_ATTRIBUTES(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint32()
self.RootDirectory = v_ptr32()
self.ObjectName = v_ptr32()
self.Attributes = v_uint32()
self.SecurityDescriptor = v_ptr32()
self.SecurityQualityOfService = v_ptr32()
class SUBSECTION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ControlArea = v_ptr32()
self.u = _unnamed_12531()
self.StartingSector = v_uint32()
self.NumberOfFullSectors = v_uint32()
self.SubsectionBase = v_ptr32()
self.UnusedPtes = v_uint32()
self.PtesInSubsection = v_uint32()
self.NextSubsection = v_ptr32()
class ETHREAD(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Tcb = KTHREAD()
self.CreateTime = LARGE_INTEGER()
self.ExitTime = LARGE_INTEGER()
self.ExitStatus = v_uint32()
self.PostBlockList = LIST_ENTRY()
self.TerminationPort = v_ptr32()
self.ActiveTimerListLock = v_uint32()
self.ActiveTimerListHead = LIST_ENTRY()
self.Cid = CLIENT_ID()
self.LpcReplySemaphore = KSEMAPHORE()
self.LpcReplyMessage = v_ptr32()
self.ImpersonationInfo = v_ptr32()
self.IrpList = LIST_ENTRY()
self.TopLevelIrp = v_uint32()
self.DeviceToVerify = v_ptr32()
self.ThreadsProcess = v_ptr32()
self.StartAddress = v_ptr32()
self.Win32StartAddress = v_ptr32()
self.ThreadListEntry = LIST_ENTRY()
self.RundownProtect = EX_RUNDOWN_REF()
self.ThreadLock = EX_PUSH_LOCK()
self.LpcReplyMessageId = v_uint32()
self.ReadClusterSize = v_uint32()
self.GrantedAccess = v_uint32()
self.CrossThreadFlags = v_uint32()
self.SameThreadPassiveFlags = v_uint32()
self.SameThreadApcFlags = v_uint32()
self.ForwardClusterOnly = v_uint8()
self.DisablePageFaultClustering = v_uint8()
self._pad0258 = v_bytes(size=2)
class _unnamed_16158(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.InterfaceType = v_ptr32()
self.Size = v_uint16()
self.Version = v_uint16()
self.Interface = v_ptr32()
self.InterfaceSpecificData = v_ptr32()
class FAST_MUTEX(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Count = v_uint32()
self.Owner = v_ptr32()
self.Contention = v_uint32()
self.Event = KEVENT()
self.OldIrql = v_uint32()
class _unnamed_16156(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint32()
class MM_SESSION_SPACE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ReferenceCount = v_uint32()
self.u = _unnamed_13227()
self.SessionId = v_uint32()
self.SessionPageDirectoryIndex = v_uint32()
self.GlobalVirtualAddress = v_ptr32()
self.ProcessList = LIST_ENTRY()
self.NonPagedPoolBytes = v_uint32()
self.PagedPoolBytes = v_uint32()
self.NonPagedPoolAllocations = v_uint32()
self.PagedPoolAllocations = v_uint32()
self.NonPagablePages = v_uint32()
self.CommittedPages = v_uint32()
self._pad0038 = v_bytes(size=4)
self.LastProcessSwappedOutTime = LARGE_INTEGER()
self.PageTables = v_ptr32()
self.PagedPoolMutex = FAST_MUTEX()
self.PagedPoolStart = v_ptr32()
self.PagedPoolEnd = v_ptr32()
self.PagedPoolBasePde = v_ptr32()
self.PagedPoolInfo = MM_PAGED_POOL_INFO()
self.Color = v_uint32()
self.ProcessOutSwapCount = v_uint32()
self.ImageList = LIST_ENTRY()
self.GlobalPteEntry = v_ptr32()
self.CopyOnWriteCount = v_uint32()
self.SessionPoolAllocationFailures = vstruct.VArray([ v_uint32() for i in xrange(4) ])
self.AttachCount = v_uint32()
self.AttachEvent = KEVENT()
self.LastProcess = v_ptr32()
self._pad00d8 = v_bytes(size=4)
self.Vm = MMSUPPORT()
self.Wsle = v_ptr32()
self.WsLock = ERESOURCE()
self.WsListEntry = LIST_ENTRY()
self.Session = MMSESSION()
self.Win32KDriverObject = DRIVER_OBJECT()
self.WorkingSetLockOwner = v_ptr32()
self.PagedPool = POOL_DESCRIPTOR()
self.ProcessReferenceToSession = v_uint32()
self.LocaleId = v_uint32()
self._pad1278 = v_bytes(size=4)
class CM_NAME_CONTROL_BLOCK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Compressed = v_uint8()
self._pad0002 = v_bytes(size=1)
self.RefCount = v_uint16()
self.NameHash = CM_NAME_HASH()
class _unnamed_16016(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SecurityContext = v_ptr32()
self.Options = v_uint32()
self.Reserved = v_uint16()
self.ShareAccess = v_uint16()
self.Parameters = v_ptr32()
class _unnamed_13534(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Level = v_uint32()
class KDEVICE_QUEUE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint16()
self.Size = v_uint16()
self.DeviceListHead = LIST_ENTRY()
self.Lock = v_uint32()
self.Busy = v_uint8()
self._pad0014 = v_bytes(size=3)
class IO_COUNTERS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ReadOperationCount = v_uint64()
self.WriteOperationCount = v_uint64()
self.OtherOperationCount = v_uint64()
self.ReadTransferCount = v_uint64()
self.WriteTransferCount = v_uint64()
self.OtherTransferCount = v_uint64()
class _unnamed_16380(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DataLength = v_uint16()
self.TotalLength = v_uint16()
class PCI_BUS_INTERFACE_STANDARD(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Size = v_uint16()
self.Version = v_uint16()
self.Context = v_ptr32()
self.InterfaceReference = v_ptr32()
self.InterfaceDereference = v_ptr32()
self.ReadConfig = v_ptr32()
self.WriteConfig = v_ptr32()
self.PinToLine = v_ptr32()
self.LineToPin = v_ptr32()
class PORT_MESSAGE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.u1 = _unnamed_15734()
self.u2 = _unnamed_15735()
self.ClientId = CLIENT_ID()
self.MessageId = v_uint32()
self.ClientViewSize = v_uint32()
class _unnamed_16385(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint16()
self.DataInfoOffset = v_uint16()
class PCI_COMMON_CONFIG(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.VendorID = v_uint16()
self.DeviceID = v_uint16()
self.Command = v_uint16()
self.Status = v_uint16()
self.RevisionID = v_uint8()
self.ProgIf = v_uint8()
self.SubClass = v_uint8()
self.BaseClass = v_uint8()
self.CacheLineSize = v_uint8()
self.LatencyTimer = v_uint8()
self.HeaderType = v_uint8()
self.BIST = v_uint8()
self.u = _unnamed_14629()
self.DeviceSpecific = vstruct.VArray([ v_uint8() for i in xrange(192) ])
class IO_SECURITY_CONTEXT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SecurityQos = v_ptr32()
self.AccessState = v_ptr32()
self.DesiredAccess = v_uint32()
self.FullCreateOptions = v_uint32()
class TERMINATION_PORT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Next = v_ptr32()
self.Port = v_ptr32()
class IO_CLIENT_EXTENSION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.NextExtension = v_ptr32()
self.ClientIdentificationAddress = v_ptr32()
class INITIAL_PRIVILEGE_SET(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PrivilegeCount = v_uint32()
self.Control = v_uint32()
self.Privilege = vstruct.VArray([ LUID_AND_ATTRIBUTES() for i in xrange(3) ])
class PCI_LOCK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Atom = v_uint32()
self.OldIrql = v_uint8()
self._pad0008 = v_bytes(size=3)
class POOL_DESCRIPTOR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PoolType = v_uint32()
self.PoolIndex = v_uint32()
self.RunningAllocs = v_uint32()
self.RunningDeAllocs = v_uint32()
self.TotalPages = v_uint32()
self.TotalBigPages = v_uint32()
self.Threshold = v_uint32()
self.LockAddress = v_ptr32()
self.PendingFrees = v_ptr32()
self.PendingFreeDepth = v_uint32()
self.ListHeads = vstruct.VArray([ LIST_ENTRY() for i in xrange(512) ])
class DBGKD_QUERY_SPECIAL_CALLS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.NumberOfSpecialCalls = v_uint32()
class HEAP_UNCOMMMTTED_RANGE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Next = v_ptr32()
self.Address = v_uint32()
self.Size = v_uint32()
self.filler = v_uint32()
class HMAP_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.BlockAddress = v_uint32()
self.BinAddress = v_uint32()
self.CmView = v_ptr32()
self.MemAlloc = v_uint32()
class DUMP_STACK_CONTEXT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Init = DUMP_INITIALIZATION_CONTEXT()
self.PartitionOffset = LARGE_INTEGER()
self.DumpPointers = v_ptr32()
self.PointersLength = v_uint32()
self.ModulePrefix = v_ptr32()
self.DriverList = LIST_ENTRY()
self.InitMsg = STRING()
self.ProgMsg = STRING()
self.DoneMsg = STRING()
self.FileObject = v_ptr32()
self.UsageType = v_uint32()
self._pad00b0 = v_bytes(size=4)
class PNP_DEVICE_EVENT_LIST(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Status = v_uint32()
self.EventQueueMutex = KMUTANT()
self.Lock = FAST_MUTEX()
self.List = LIST_ENTRY()
class PROCESSOR_IDLE_TIMES(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.StartTime = v_uint64()
self.EndTime = v_uint64()
self.IdleHandlerReserved = vstruct.VArray([ v_uint32() for i in xrange(4) ])
class KWAIT_BLOCK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.WaitListEntry = LIST_ENTRY()
self.Thread = v_ptr32()
self.Object = v_ptr32()
self.NextWaitBlock = v_ptr32()
self.WaitKey = v_uint16()
self.WaitType = v_uint16()
class DBGKD_READ_WRITE_IO32(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DataSize = v_uint32()
self.IoAddress = v_uint32()
self.DataValue = v_uint32()
class POP_HIBER_CONTEXT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.WriteToFile = v_uint8()
self.ReserveLoaderMemory = v_uint8()
self.ReserveFreeMemory = v_uint8()
self.VerifyOnWake = v_uint8()
self.Reset = v_uint8()
self.HiberFlags = v_uint8()
self.LinkFile = v_uint8()
self._pad0008 = v_bytes(size=1)
self.LinkFileHandle = v_ptr32()
self.Lock = v_uint32()
self.MapFrozen = v_uint8()
self._pad0014 = v_bytes(size=3)
self.MemoryMap = RTL_BITMAP()
self.ClonedRanges = LIST_ENTRY()
self.ClonedRangeCount = v_uint32()
self.NextCloneRange = v_ptr32()
self.NextPreserve = v_uint32()
self.LoaderMdl = v_ptr32()
self.Clones = v_ptr32()
self.NextClone = v_ptr32()
self.NoClones = v_uint32()
self.Spares = v_ptr32()
self._pad0048 = v_bytes(size=4)
self.PagesOut = v_uint64()
self.IoPage = v_ptr32()
self.CurrentMcb = v_ptr32()
self.DumpStack = v_ptr32()
self.WakeState = v_ptr32()
self.NoRanges = v_uint32()
self.HiberVa = v_uint32()
self.HiberPte = LARGE_INTEGER()
self.Status = v_uint32()
self.MemoryImage = v_ptr32()
self.TableHead = v_ptr32()
self.CompressionWorkspace = v_ptr32()
self.CompressedWriteBuffer = v_ptr32()
self.PerformanceStats = v_ptr32()
self.CompressionBlock = v_ptr32()
self.DmaIO = v_ptr32()
self.TemporaryHeap = v_ptr32()
self._pad0098 = v_bytes(size=4)
self.PerfInfo = PO_HIBER_PERF()
class _unnamed_16128(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SecurityInformation = v_uint32()
self.SecurityDescriptor = v_ptr32()
class PS_JOB_TOKEN_FILTER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.CapturedSidCount = v_uint32()
self.CapturedSids = v_ptr32()
self.CapturedSidsLength = v_uint32()
self.CapturedGroupCount = v_uint32()
self.CapturedGroups = v_ptr32()
self.CapturedGroupsLength = v_uint32()
self.CapturedPrivilegeCount = v_uint32()
self.CapturedPrivileges = v_ptr32()
self.CapturedPrivilegesLength = v_uint32()
class CALL_HASH_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ListEntry = LIST_ENTRY()
self.CallersAddress = v_ptr32()
self.CallersCaller = v_ptr32()
self.CallCount = v_uint32()
class _unnamed_16125(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SecurityInformation = v_uint32()
self.Length = v_uint32()
class TOKEN_CONTROL(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.TokenId = LUID()
self.AuthenticationId = LUID()
self.ModifiedId = LUID()
self.TokenSource = TOKEN_SOURCE()
class _unnamed_16120(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.OutputBufferLength = v_uint32()
self.InputBufferLength = v_uint32()
self.IoControlCode = v_uint32()
self.Type3InputBuffer = v_ptr32()
class _unnamed_16554(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ArbitrationList = v_ptr32()
self.AllocateFromCount = v_uint32()
self.AllocateFrom = v_ptr32()
class PCI_COMMON_EXTENSION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Next = v_ptr32()
self.ExtensionType = v_uint32()
self.IrpDispatchTable = v_ptr32()
self.DeviceState = v_uint8()
self.TentativeNextState = v_uint8()
self._pad0010 = v_bytes(size=2)
self.SecondaryExtLock = KEVENT()
class HEAP_USERDATA_HEADER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SFreeListEntry = SINGLE_LIST_ENTRY()
self.HeapHandle = v_ptr32()
self.SizeIndex = v_uint32()
self.Signature = v_uint32()
class _unnamed_16559(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ArbitrationList = v_ptr32()
class RTL_DRIVE_LETTER_CURDIR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Flags = v_uint16()
self.Length = v_uint16()
self.TimeStamp = v_uint32()
self.DosPath = STRING()
class ULARGE_INTEGER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.LowPart = v_uint32()
self.HighPart = v_uint32()
class _unnamed_15734(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.s1 = _unnamed_16380()
class _unnamed_15735(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.s2 = _unnamed_16385()
class TEB_ACTIVE_FRAME(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Flags = v_uint32()
self.Previous = v_ptr32()
self.Context = v_ptr32()
class ETIMER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.KeTimer = KTIMER()
self.TimerApc = KAPC()
self.TimerDpc = KDPC()
self.ActiveTimerListEntry = LIST_ENTRY()
self.Lock = v_uint32()
self.Period = v_uint32()
self.ApcAssociated = v_uint8()
self.WakeTimer = v_uint8()
self._pad008c = v_bytes(size=2)
self.WakeTimerListEntry = LIST_ENTRY()
self._pad0098 = v_bytes(size=4)
class GENERAL_LOOKASIDE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ListHead = SLIST_HEADER()
self.Depth = v_uint16()
self.MaximumDepth = v_uint16()
self.TotalAllocates = v_uint32()
self.AllocateMisses = v_uint32()
self.TotalFrees = v_uint32()
self.FreeMisses = v_uint32()
self.Type = v_uint32()
self.Tag = v_uint32()
self.Size = v_uint32()
self.Allocate = v_ptr32()
self.Free = v_ptr32()
self.ListEntry = LIST_ENTRY()
self.LastTotalAllocates = v_uint32()
self.LastAllocateMisses = v_uint32()
self.Future = vstruct.VArray([ v_uint32() for i in xrange(2) ])
self._pad0080 = v_bytes(size=56)
class PHYSICAL_MEMORY_DESCRIPTOR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.NumberOfRuns = v_uint32()
self.NumberOfPages = v_uint32()
self.Run = vstruct.VArray([ PHYSICAL_MEMORY_RUN() for i in xrange(1) ])
class ARBITER_ORDERING_LIST(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Count = v_uint16()
self.Maximum = v_uint16()
self.Orderings = v_ptr32()
class OBJECT_DIRECTORY_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ChainLink = v_ptr32()
self.Object = v_ptr32()
class CM_KEY_HASH(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ConvKey = v_uint32()
self.NextHash = v_ptr32()
self.KeyHive = v_ptr32()
self.KeyCell = v_uint32()
class ARBITER_LIST_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ListEntry = LIST_ENTRY()
self.AlternativeCount = v_uint32()
self.Alternatives = v_ptr32()
self.PhysicalDeviceObject = v_ptr32()
self.RequestSource = v_uint32()
self.Flags = v_uint32()
self.WorkSpace = v_uint32()
self.InterfaceType = v_uint32()
self.SlotNumber = v_uint32()
self.BusNumber = v_uint32()
self.Assignment = v_ptr32()
self.SelectedAlternative = v_ptr32()
self.Result = v_uint32()
class PROCESSOR_PERF_STATE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PercentFrequency = v_uint8()
self.MinCapacity = v_uint8()
self.Power = v_uint16()
self.IncreaseLevel = v_uint8()
self.DecreaseLevel = v_uint8()
self.Flags = v_uint16()
self.IncreaseTime = v_uint32()
self.DecreaseTime = v_uint32()
self.IncreaseCount = v_uint32()
self.DecreaseCount = v_uint32()
self.PerformanceTime = v_uint64()
class KGDTENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.LimitLow = v_uint16()
self.BaseLow = v_uint16()
self.HighWord = _unnamed_13092()
class MMPFNENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Modified = v_uint32()
class NT_TIB(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ExceptionList = v_ptr32()
self.StackBase = v_ptr32()
self.StackLimit = v_ptr32()
self.SubSystemTib = v_ptr32()
self.FiberData = v_ptr32()
self.ArbitraryUserPointer = v_ptr32()
self.Self = v_ptr32()
class POWER_STATE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SystemState = v_uint32()
class UNICODE_STRING(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint16()
self.MaximumLength = v_uint16()
self.Buffer = v_ptr32()
class CELL_DATA(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.u = u()
class MMSESSION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SystemSpaceViewLock = FAST_MUTEX()
self.SystemSpaceViewLockPointer = v_ptr32()
self.SystemSpaceViewStart = v_ptr32()
self.SystemSpaceViewTable = v_ptr32()
self.SystemSpaceHashSize = v_uint32()
self.SystemSpaceHashEntries = v_uint32()
self.SystemSpaceHashKey = v_uint32()
self.SystemSpaceBitMap = v_ptr32()
class _unnamed_16230(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PowerState = v_uint32()
class _unnamed_16236(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PowerSequence = v_ptr32()
class PEB_FREE_BLOCK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Next = v_ptr32()
self.Size = v_uint32()
class MMFREE_POOL_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.List = LIST_ENTRY()
self.Size = v_uint32()
self.Signature = v_uint32()
self.Owner = v_ptr32()
class EPROCESS_QUOTA_BLOCK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.QuotaEntry = vstruct.VArray([ EPROCESS_QUOTA_ENTRY() for i in xrange(3) ])
self.QuotaList = LIST_ENTRY()
self.ReferenceCount = v_uint32()
self.ProcessCount = v_uint32()
class FXSAVE_FORMAT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ControlWord = v_uint16()
self.StatusWord = v_uint16()
self.TagWord = v_uint16()
self.ErrorOpcode = v_uint16()
self.ErrorOffset = v_uint32()
self.ErrorSelector = v_uint32()
self.DataOffset = v_uint32()
self.DataSelector = v_uint32()
self.MXCsr = v_uint32()
self.MXCsrMask = v_uint32()
self.RegisterArea = vstruct.VArray([ v_uint8() for i in xrange(128) ])
self.Reserved3 = vstruct.VArray([ v_uint8() for i in xrange(128) ])
self.Reserved4 = vstruct.VArray([ v_uint8() for i in xrange(224) ])
self.Align16Byte = vstruct.VArray([ v_uint8() for i in xrange(8) ])
class BUS_HANDLER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Version = v_uint32()
self.InterfaceType = v_uint32()
self.ConfigurationType = v_uint32()
self.BusNumber = v_uint32()
self.DeviceObject = v_ptr32()
self.ParentHandler = v_ptr32()
self.BusData = v_ptr32()
self.DeviceControlExtensionSize = v_uint32()
self.BusAddresses = v_ptr32()
self.Reserved = vstruct.VArray([ v_uint32() for i in xrange(4) ])
self.GetBusData = v_ptr32()
self.SetBusData = v_ptr32()
self.AdjustResourceList = v_ptr32()
self.AssignSlotResources = v_ptr32()
self.GetInterruptVector = v_ptr32()
self.TranslateBusAddress = v_ptr32()
self.Spare1 = v_ptr32()
self.Spare2 = v_ptr32()
self.Spare3 = v_ptr32()
self.Spare4 = v_ptr32()
self.Spare5 = v_ptr32()
self.Spare6 = v_ptr32()
self.Spare7 = v_ptr32()
self.Spare8 = v_ptr32()
class OBJECT_HEADER_NAME_INFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Directory = v_ptr32()
self.Name = UNICODE_STRING()
self.QueryReferences = v_uint32()
class PEB(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.InheritedAddressSpace = v_uint8()
self.ReadImageFileExecOptions = v_uint8()
self.BeingDebugged = v_uint8()
self.SpareBool = v_uint8()
self.Mutant = v_ptr32()
self.ImageBaseAddress = v_ptr32()
self.Ldr = v_ptr32()
self.ProcessParameters = v_ptr32()
self.SubSystemData = v_ptr32()
self.ProcessHeap = v_ptr32()
self.FastPebLock = v_ptr32()
self.FastPebLockRoutine = v_ptr32()
self.FastPebUnlockRoutine = v_ptr32()
self.EnvironmentUpdateCount = v_uint32()
self.KernelCallbackTable = v_ptr32()
self.SystemReserved = vstruct.VArray([ v_uint32() for i in xrange(1) ])
self.AtlThunkSListPtr32 = v_uint32()
self.FreeList = v_ptr32()
self.TlsExpansionCounter = v_uint32()
self.TlsBitmap = v_ptr32()
self.TlsBitmapBits = vstruct.VArray([ v_uint32() for i in xrange(2) ])
self.ReadOnlySharedMemoryBase = v_ptr32()
self.ReadOnlySharedMemoryHeap = v_ptr32()
self.ReadOnlyStaticServerData = v_ptr32()
self.AnsiCodePageData = v_ptr32()
self.OemCodePageData = v_ptr32()
self.UnicodeCaseTableData = v_ptr32()
self.NumberOfProcessors = v_uint32()
self.NtGlobalFlag = v_uint32()
self._pad0070 = v_bytes(size=4)
self.CriticalSectionTimeout = LARGE_INTEGER()
self.HeapSegmentReserve = v_uint32()
self.HeapSegmentCommit = v_uint32()
self.HeapDeCommitTotalFreeThreshold = v_uint32()
self.HeapDeCommitFreeBlockThreshold = v_uint32()
self.NumberOfHeaps = v_uint32()
self.MaximumNumberOfHeaps = v_uint32()
self.ProcessHeaps = v_ptr32()
self.GdiSharedHandleTable = v_ptr32()
self.ProcessStarterHelper = v_ptr32()
self.GdiDCAttributeList = v_uint32()
self.LoaderLock = v_ptr32()
self.OSMajorVersion = v_uint32()
self.OSMinorVersion = v_uint32()
self.OSBuildNumber = v_uint16()
self.OSCSDVersion = v_uint16()
self.OSPlatformId = v_uint32()
self.ImageSubsystem = v_uint32()
self.ImageSubsystemMajorVersion = v_uint32()
self.ImageSubsystemMinorVersion = v_uint32()
self.ImageProcessAffinityMask = v_uint32()
self.GdiHandleBuffer = vstruct.VArray([ v_uint32() for i in xrange(34) ])
self.PostProcessInitRoutine = v_ptr32()
self.TlsExpansionBitmap = v_ptr32()
self.TlsExpansionBitmapBits = vstruct.VArray([ v_uint32() for i in xrange(32) ])
self.SessionId = v_uint32()
self.AppCompatFlags = ULARGE_INTEGER()
self.AppCompatFlagsUser = ULARGE_INTEGER()
self.pShimData = v_ptr32()
self.AppCompatInfo = v_ptr32()
self.CSDVersion = UNICODE_STRING()
self.ActivationContextData = v_ptr32()
self.ProcessAssemblyStorageMap = v_ptr32()
self.SystemDefaultActivationContextData = v_ptr32()
self.SystemAssemblyStorageMap = v_ptr32()
self.MinimumStackCommit = v_uint32()
self._pad0210 = v_bytes(size=4)
class DBGKD_ANY_CONTROL_SET(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.X86ControlSet = X86_DBGKD_CONTROL_SET()
self._pad001c = v_bytes(size=12)
class MMSUPPORT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.LastTrimTime = LARGE_INTEGER()
self.Flags = MMSUPPORT_FLAGS()
self.PageFaultCount = v_uint32()
self.PeakWorkingSetSize = v_uint32()
self.WorkingSetSize = v_uint32()
self.MinimumWorkingSetSize = v_uint32()
self.MaximumWorkingSetSize = v_uint32()
self.VmWorkingSetList = v_ptr32()
self.WorkingSetExpansionLinks = LIST_ENTRY()
self.Claim = v_uint32()
self.NextEstimationSlot = v_uint32()
self.NextAgingSlot = v_uint32()
self.EstimatedAvailable = v_uint32()
self.GrowthSinceLastEstimate = v_uint32()
class HBASE_BLOCK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Signature = v_uint32()
self.Sequence1 = v_uint32()
self.Sequence2 = v_uint32()
self.TimeStamp = LARGE_INTEGER()
self.Major = v_uint32()
self.Minor = v_uint32()
self.Type = v_uint32()
self.Format = v_uint32()
self.RootCell = v_uint32()
self.Length = v_uint32()
self.Cluster = v_uint32()
self.FileName = vstruct.VArray([ v_uint8() for i in xrange(64) ])
self.Reserved1 = vstruct.VArray([ v_uint32() for i in xrange(99) ])
self.CheckSum = v_uint32()
self.Reserved2 = vstruct.VArray([ v_uint32() for i in xrange(894) ])
self.BootType = v_uint32()
self.BootRecover = v_uint32()
class BUS_EXTENSION_LIST(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Next = v_ptr32()
self.BusExtension = v_ptr32()
class DBGKD_GET_SET_BUS_DATA(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.BusDataType = v_uint32()
self.BusNumber = v_uint32()
self.SlotNumber = v_uint32()
self.Offset = v_uint32()
self.Length = v_uint32()
class KDPC(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint16()
self.Number = v_uint8()
self.Importance = v_uint8()
self.DpcListEntry = LIST_ENTRY()
self.DeferredRoutine = v_ptr32()
self.DeferredContext = v_ptr32()
self.SystemArgument1 = v_ptr32()
self.SystemArgument2 = v_ptr32()
self.Lock = v_ptr32()
class KEVENT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Header = DISPATCHER_HEADER()
class KSEMAPHORE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Header = DISPATCHER_HEADER()
self.Limit = v_uint32()
class PCI_ARBITER_INSTANCE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Header = PCI_SECONDARY_EXTENSION()
self.Interface = v_ptr32()
self.BusFdoExtension = v_ptr32()
self.InstanceName = vstruct.VArray([ v_uint16() for i in xrange(24) ])
self.CommonInstance = ARBITER_INSTANCE()
class PI_RESOURCE_ARBITER_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DeviceArbiterList = LIST_ENTRY()
self.ResourceType = v_uint8()
self._pad000c = v_bytes(size=3)
self.ArbiterInterface = v_ptr32()
self.Level = v_uint32()
self.ResourceList = LIST_ENTRY()
self.BestResourceList = LIST_ENTRY()
self.BestConfig = LIST_ENTRY()
self.ActiveArbiterList = LIST_ENTRY()
self.State = v_uint8()
self.ResourcesChanged = v_uint8()
self._pad0038 = v_bytes(size=2)
class OBJECT_TYPE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Mutex = ERESOURCE()
self.TypeList = LIST_ENTRY()
self.Name = UNICODE_STRING()
self.DefaultObject = v_ptr32()
self.Index = v_uint32()
self.TotalNumberOfObjects = v_uint32()
self.TotalNumberOfHandles = v_uint32()
self.HighWaterNumberOfObjects = v_uint32()
self.HighWaterNumberOfHandles = v_uint32()
self.TypeInfo = OBJECT_TYPE_INITIALIZER()
self.Key = v_uint32()
self.ObjectLocks = vstruct.VArray([ ERESOURCE() for i in xrange(4) ])
class DBGKD_SET_INTERNAL_BREAKPOINT32(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.BreakpointAddress = v_uint32()
self.Flags = v_uint32()
class POP_THERMAL_ZONE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Link = LIST_ENTRY()
self.State = v_uint8()
self.Flags = v_uint8()
self.Mode = v_uint8()
self.PendingMode = v_uint8()
self.ActivePoint = v_uint8()
self.PendingActivePoint = v_uint8()
self._pad0010 = v_bytes(size=2)
self.Throttle = v_uint32()
self._pad0018 = v_bytes(size=4)
self.LastTime = v_uint64()
self.SampleRate = v_uint32()
self.LastTemp = v_uint32()
self.PassiveTimer = KTIMER()
self.PassiveDpc = KDPC()
self.OverThrottled = POP_ACTION_TRIGGER()
self.Irp = v_ptr32()
self.Info = THERMAL_INFORMATION()
self._pad00d0 = v_bytes(size=4)
class POOL_HACKER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Header = POOL_HEADER()
self.Contents = vstruct.VArray([ v_uint32() for i in xrange(8) ])
class HANDLE_TABLE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.TableCode = v_uint32()
self.QuotaProcess = v_ptr32()
self.UniqueProcessId = v_ptr32()
self.HandleTableLock = vstruct.VArray([ EX_PUSH_LOCK() for i in xrange(4) ])
self.HandleTableList = LIST_ENTRY()
self.HandleContentionEvent = EX_PUSH_LOCK()
self.DebugInfo = v_ptr32()
self.ExtraInfoPages = v_uint32()
self.FirstFree = v_uint32()
self.LastFree = v_uint32()
self.NextHandleNeedingPool = v_uint32()
self.HandleCount = v_uint32()
self.Flags = v_uint32()
class PO_HIBER_PERF(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.IoTicks = v_uint64()
self.InitTicks = v_uint64()
self.CopyTicks = v_uint64()
self.StartCount = v_uint64()
self.ElapsedTime = v_uint32()
self.IoTime = v_uint32()
self.CopyTime = v_uint32()
self.InitTime = v_uint32()
self.PagesWritten = v_uint32()
self.PagesProcessed = v_uint32()
self.BytesCopied = v_uint32()
self.DumpCount = v_uint32()
self.FileRuns = v_uint32()
self._pad0048 = v_bytes(size=4)
class DEFERRED_WRITE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.NodeTypeCode = v_uint16()
self.NodeByteSize = v_uint16()
self.FileObject = v_ptr32()
self.BytesToWrite = v_uint32()
self.DeferredWriteLinks = LIST_ENTRY()
self.Event = v_ptr32()
self.PostRoutine = v_ptr32()
self.Context1 = v_ptr32()
self.Context2 = v_ptr32()
self.LimitModifiedPages = v_uint8()
self._pad0028 = v_bytes(size=3)
class ARBITER_INSTANCE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Signature = v_uint32()
self.MutexEvent = v_ptr32()
self.Name = v_ptr32()
self.ResourceType = v_uint32()
self.Allocation = v_ptr32()
self.PossibleAllocation = v_ptr32()
self.OrderingList = ARBITER_ORDERING_LIST()
self.ReservedList = ARBITER_ORDERING_LIST()
self.ReferenceCount = v_uint32()
self.Interface = v_ptr32()
self.AllocationStackMaxSize = v_uint32()
self.AllocationStack = v_ptr32()
self.UnpackRequirement = v_ptr32()
self.PackResource = v_ptr32()
self.UnpackResource = v_ptr32()
self.ScoreRequirement = v_ptr32()
self.TestAllocation = v_ptr32()
self.RetestAllocation = v_ptr32()
self.CommitAllocation = v_ptr32()
self.RollbackAllocation = v_ptr32()
self.BootAllocation = v_ptr32()
self.QueryArbitrate = v_ptr32()
self.QueryConflict = v_ptr32()
self.AddReserved = v_ptr32()
self.StartArbiter = v_ptr32()
self.PreprocessEntry = v_ptr32()
self.AllocateEntry = v_ptr32()
self.GetNextAllocationRange = v_ptr32()
self.FindSuitableRange = v_ptr32()
self.AddAllocation = v_ptr32()
self.BacktrackAllocation = v_ptr32()
self.OverrideConflict = v_ptr32()
self.TransactionInProgress = v_uint8()
self._pad008c = v_bytes(size=3)
self.Extension = v_ptr32()
self.BusDeviceObject = v_ptr32()
self.ConflictCallbackContext = v_ptr32()
self.ConflictCallback = v_ptr32()
class MMMOD_WRITER_LISTHEAD(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ListHead = LIST_ENTRY()
self.Event = KEVENT()
class NAMED_PIPE_CREATE_PARAMETERS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.NamedPipeType = v_uint32()
self.ReadMode = v_uint32()
self.CompletionMode = v_uint32()
self.MaximumInstances = v_uint32()
self.InboundQuota = v_uint32()
self.OutboundQuota = v_uint32()
self.DefaultTimeout = LARGE_INTEGER()
self.TimeoutSpecified = v_uint8()
self._pad0028 = v_bytes(size=7)
class POP_IDLE_HANDLER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Latency = v_uint32()
self.TimeCheck = v_uint32()
self.DemoteLimit = v_uint32()
self.PromoteLimit = v_uint32()
self.PromoteCount = v_uint32()
self.Demote = v_uint8()
self.Promote = v_uint8()
self.PromotePercent = v_uint8()
self.DemotePercent = v_uint8()
self.State = v_uint8()
self.Spare = vstruct.VArray([ v_uint8() for i in xrange(3) ])
self.IdleFunction = v_ptr32()
class MMSUPPORT_FLAGS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SessionSpace = v_uint32()
class HEAP_LOCK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Lock = _unnamed_12162()
class EXCEPTION_REGISTRATION_RECORD(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Next = v_ptr32()
self.Handler = v_ptr32()
class FILE_BASIC_INFORMATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.CreationTime = LARGE_INTEGER()
self.LastAccessTime = LARGE_INTEGER()
self.LastWriteTime = LARGE_INTEGER()
self.ChangeTime = LARGE_INTEGER()
self.FileAttributes = v_uint32()
self._pad0028 = v_bytes(size=4)
class PLUGPLAY_EVENT_BLOCK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.EventGuid = GUID()
self.EventCategory = v_uint32()
self.Result = v_ptr32()
self.Flags = v_uint32()
self.TotalSize = v_uint32()
self.DeviceObject = v_ptr32()
self.u = _unnamed_15795()
class LIST_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Flink = v_ptr32()
self.Blink = v_ptr32()
class CM_KEY_SECURITY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Signature = v_uint16()
self.Reserved = v_uint16()
self.Flink = v_uint32()
self.Blink = v_uint32()
self.ReferenceCount = v_uint32()
self.DescriptorLength = v_uint32()
self.Descriptor = SECURITY_DESCRIPTOR_RELATIVE()
class _unnamed_14637(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Start = LARGE_INTEGER()
self.Length = v_uint32()
class _unnamed_14395(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Acquired = v_uint8()
self.CacheLineSize = v_uint8()
self.LatencyTimer = v_uint8()
self.EnablePERR = v_uint8()
self.EnableSERR = v_uint8()
class CLIENT_ID(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.UniqueProcess = v_ptr32()
self.UniqueThread = v_ptr32()
class POP_ACTION_TRIGGER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint32()
self.Flags = v_uint8()
self.Spare = vstruct.VArray([ v_uint8() for i in xrange(3) ])
self.Battery = _unnamed_13534()
class CM_CACHED_VALUE_INDEX(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.CellIndex = v_uint32()
self.Data = _unnamed_13383()
class DEVICE_MAP(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DosDevicesDirectory = v_ptr32()
self.GlobalDosDevicesDirectory = v_ptr32()
self.ReferenceCount = v_uint32()
self.DriveMap = v_uint32()
self.DriveType = vstruct.VArray([ v_uint8() for i in xrange(32) ])
class CONTROL_AREA(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Segment = v_ptr32()
self.DereferenceList = LIST_ENTRY()
self.NumberOfSectionReferences = v_uint32()
self.NumberOfPfnReferences = v_uint32()
self.NumberOfMappedViews = v_uint32()
self.NumberOfSubsections = v_uint16()
self.FlushInProgressCount = v_uint16()
self.NumberOfUserReferences = v_uint32()
self.u = _unnamed_12520()
self.FilePointer = v_ptr32()
self.WaitingForDeletion = v_ptr32()
self.ModifiedWriteCount = v_uint16()
self.NumberOfSystemCacheViews = v_uint16()
class GUID(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Data1 = v_uint32()
self.Data2 = v_uint16()
self.Data3 = v_uint16()
self.Data4 = vstruct.VArray([ v_uint8() for i in xrange(8) ])
class KAPC_STATE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ApcListHead = vstruct.VArray([ LIST_ENTRY() for i in xrange(2) ])
self.Process = v_ptr32()
self.KernelApcInProgress = v_uint8()
self.KernelApcPending = v_uint8()
self.UserApcPending = v_uint8()
self._pad0018 = v_bytes(size=1)
class MMVAD_SHORT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.StartingVpn = v_uint32()
self.EndingVpn = v_uint32()
self.Parent = v_ptr32()
self.LeftChild = v_ptr32()
self.RightChild = v_ptr32()
self.u = _unnamed_14102()
class DBGKD_GET_VERSION32(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.MajorVersion = v_uint16()
self.MinorVersion = v_uint16()
self.ProtocolVersion = v_uint16()
self.Flags = v_uint16()
self.KernBase = v_uint32()
self.PsLoadedModuleList = v_uint32()
self.MachineType = v_uint16()
self.ThCallbackStack = v_uint16()
self.NextCallback = v_uint16()
self.FramePointer = v_uint16()
self.KiCallUserMode = v_uint32()
self.KeUserCallbackDispatcher = v_uint32()
self.BreakpointWithStatus = v_uint32()
self.DebuggerDataList = v_uint32()
class CM_CELL_REMAP_BLOCK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.OldCell = v_uint32()
self.NewCell = v_uint32()
class _unnamed_14065(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.InitialPrivilegeSet = INITIAL_PRIVILEGE_SET()
class KIDTENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Offset = v_uint16()
self.Selector = v_uint16()
self.Access = v_uint16()
self.ExtendedOffset = v_uint16()
class _unnamed_16198(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.IoResourceRequirementList = v_ptr32()
class _unnamed_16195(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Capabilities = v_ptr32()
class _unnamed_14640(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Level = v_uint32()
self.Vector = v_uint32()
self.Affinity = v_uint32()
class PO_MEMORY_RANGE_ARRAY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Range = _unnamed_16445()
class _unnamed_14644(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Channel = v_uint32()
self.Port = v_uint32()
self.Reserved1 = v_uint32()
class SYSTEM_POWER_POLICY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Revision = v_uint32()
self.PowerButton = POWER_ACTION_POLICY()
self.SleepButton = POWER_ACTION_POLICY()
self.LidClose = POWER_ACTION_POLICY()
self.LidOpenWake = v_uint32()
self.Reserved = v_uint32()
self.Idle = POWER_ACTION_POLICY()
self.IdleTimeout = v_uint32()
self.IdleSensitivity = v_uint8()
self.DynamicThrottle = v_uint8()
self.Spare2 = vstruct.VArray([ v_uint8() for i in xrange(2) ])
self.MinSleep = v_uint32()
self.MaxSleep = v_uint32()
self.ReducedLatencySleep = v_uint32()
self.WinLogonFlags = v_uint32()
self.Spare3 = v_uint32()
self.DozeS4Timeout = v_uint32()
self.BroadcastCapacityResolution = v_uint32()
self.DischargePolicy = vstruct.VArray([ SYSTEM_POWER_LEVEL() for i in xrange(4) ])
self.VideoTimeout = v_uint32()
self.VideoDimDisplay = v_uint8()
self._pad00c8 = v_bytes(size=3)
self.VideoReserved = vstruct.VArray([ v_uint32() for i in xrange(3) ])
self.SpindownTimeout = v_uint32()
self.OptimizeForPower = v_uint8()
self.FanThrottleTolerance = v_uint8()
self.ForcedThrottle = v_uint8()
self.MinThrottle = v_uint8()
self.OverThrottled = POWER_ACTION_POLICY()
class IRP(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint16()
self.Size = v_uint16()
self.MdlAddress = v_ptr32()
self.Flags = v_uint32()
self.AssociatedIrp = _unnamed_12973()
self.ThreadListEntry = LIST_ENTRY()
self.IoStatus = IO_STATUS_BLOCK()
self.RequestorMode = v_uint8()
self.PendingReturned = v_uint8()
self.StackCount = v_uint8()
self.CurrentLocation = v_uint8()
self.Cancel = v_uint8()
self.CancelIrql = v_uint8()
self.ApcEnvironment = v_uint8()
self.AllocationFlags = v_uint8()
self.UserIosb = v_ptr32()
self.UserEvent = v_ptr32()
self.Overlay = _unnamed_12976()
self.CancelRoutine = v_ptr32()
self.UserBuffer = v_ptr32()
self.Tail = _unnamed_12979()
class _unnamed_14648(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Data = vstruct.VArray([ v_uint32() for i in xrange(3) ])
class _unnamed_16307(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.MinimumChannel = v_uint32()
self.MaximumChannel = v_uint32()
class _unnamed_16081(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint32()
self.FileInformationClass = v_uint32()
self.FileObject = v_ptr32()
self.ReplaceIfExists = v_uint8()
self.AdvanceOnly = v_uint8()
self._pad0010 = v_bytes(size=2)
class POWER_ACTION_POLICY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Action = v_uint32()
self.Flags = v_uint32()
self.EventCode = v_uint32()
class SECURITY_DESCRIPTOR_RELATIVE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Revision = v_uint8()
self.Sbz1 = v_uint8()
self.Control = v_uint16()
self.Owner = v_uint32()
self.Group = v_uint32()
self.Sacl = v_uint32()
self.Dacl = v_uint32()
class DUMP_INITIALIZATION_CONTEXT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint32()
self.Reserved = v_uint32()
self.MemoryBlock = v_ptr32()
self.CommonBuffer = vstruct.VArray([ v_ptr32() for i in xrange(2) ])
self._pad0018 = v_bytes(size=4)
self.PhysicalAddress = vstruct.VArray([ LARGE_INTEGER() for i in xrange(2) ])
self.StallRoutine = v_ptr32()
self.OpenRoutine = v_ptr32()
self.WriteRoutine = v_ptr32()
self.FinishRoutine = v_ptr32()
self.AdapterObject = v_ptr32()
self.MappedRegisterBase = v_ptr32()
self.PortConfiguration = v_ptr32()
self.CrashDump = v_uint8()
self._pad0048 = v_bytes(size=3)
self.MaximumTransferSize = v_uint32()
self.CommonBufferSize = v_uint32()
self.TargetAddress = v_ptr32()
self.WritePendingRoutine = v_ptr32()
self.PartitionStyle = v_uint32()
self.DiskInfo = _unnamed_16505()
self._pad0070 = v_bytes(size=4)
class FILE_GET_QUOTA_INFORMATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.NextEntryOffset = v_uint32()
self.SidLength = v_uint32()
self.Sid = SID()
class IO_COMPLETION_CONTEXT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Port = v_ptr32()
self.Key = v_ptr32()
class _unnamed_16565(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PhysicalDeviceObject = v_ptr32()
self.ConflictingResource = v_ptr32()
self.ConflictCount = v_ptr32()
self.Conflicts = v_ptr32()
class DRIVER_EXTENSION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DriverObject = v_ptr32()
self.AddDevice = v_ptr32()
self.Count = v_uint32()
self.ServiceKeyName = UNICODE_STRING()
self.ClientDriverExtension = v_ptr32()
self.FsFilterCallbacks = v_ptr32()
class TOKEN_SOURCE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SourceName = vstruct.VArray([ v_uint8() for i in xrange(8) ])
self.SourceIdentifier = LUID()
class _unnamed_16561(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.AllocatedResources = v_ptr32()
class _unnamed_14549(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.BaseMid = v_uint32()
class flags(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Removable = v_uint8()
class DBGKM_EXCEPTION64(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ExceptionRecord = EXCEPTION_RECORD64()
self.FirstChance = v_uint32()
self._pad00a0 = v_bytes(size=4)
class _unnamed_14544(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.BaseMid = v_uint8()
self.Flags1 = v_uint8()
self.Flags2 = v_uint8()
self.BaseHi = v_uint8()
class PM_SUPPORT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Rsvd2 = v_uint8()
class KPRCB(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.MinorVersion = v_uint16()
self.MajorVersion = v_uint16()
self.CurrentThread = v_ptr32()
self.NextThread = v_ptr32()
self.IdleThread = v_ptr32()
self.Number = v_uint8()
self.Reserved = v_uint8()
self.BuildType = v_uint16()
self.SetMember = v_uint32()
self.CpuType = v_uint8()
self.CpuID = v_uint8()
self.CpuStep = v_uint16()
self.ProcessorState = KPROCESSOR_STATE()
self.KernelReserved = vstruct.VArray([ v_uint32() for i in xrange(16) ])
self.HalReserved = vstruct.VArray([ v_uint32() for i in xrange(16) ])
self.PrcbPad0 = vstruct.VArray([ v_uint8() for i in xrange(92) ])
self.LockQueue = vstruct.VArray([ KSPIN_LOCK_QUEUE() for i in xrange(16) ])
self.PrcbPad1 = vstruct.VArray([ v_uint8() for i in xrange(8) ])
self.NpxThread = v_ptr32()
self.InterruptCount = v_uint32()
self.KernelTime = v_uint32()
self.UserTime = v_uint32()
self.DpcTime = v_uint32()
self.DebugDpcTime = v_uint32()
self.InterruptTime = v_uint32()
self.AdjustDpcThreshold = v_uint32()
self.PageColor = v_uint32()
self.SkipTick = v_uint32()
self.MultiThreadSetBusy = v_uint8()
self.Spare2 = vstruct.VArray([ v_uint8() for i in xrange(3) ])
self.ParentNode = v_ptr32()
self.MultiThreadProcessorSet = v_uint32()
self.MultiThreadSetMaster = v_ptr32()
self.ThreadStartCount = vstruct.VArray([ v_uint32() for i in xrange(2) ])
self.CcFastReadNoWait = v_uint32()
self.CcFastReadWait = v_uint32()
self.CcFastReadNotPossible = v_uint32()
self.CcCopyReadNoWait = v_uint32()
self.CcCopyReadWait = v_uint32()
self.CcCopyReadNoWaitMiss = v_uint32()
self.KeAlignmentFixupCount = v_uint32()
self.KeContextSwitches = v_uint32()
self.KeDcacheFlushCount = v_uint32()
self.KeExceptionDispatchCount = v_uint32()
self.KeFirstLevelTbFills = v_uint32()
self.KeFloatingEmulationCount = v_uint32()
self.KeIcacheFlushCount = v_uint32()
self.KeSecondLevelTbFills = v_uint32()
self.KeSystemCalls = v_uint32()
self.SpareCounter0 = vstruct.VArray([ v_uint32() for i in xrange(1) ])
self.PPLookasideList = vstruct.VArray([ PP_LOOKASIDE_LIST() for i in xrange(16) ])
self.PPNPagedLookasideList = vstruct.VArray([ PP_LOOKASIDE_LIST() for i in xrange(32) ])
self.PPPagedLookasideList = vstruct.VArray([ PP_LOOKASIDE_LIST() for i in xrange(32) ])
self.PacketBarrier = v_uint32()
self.ReverseStall = v_uint32()
self.IpiFrame = v_ptr32()
self.PrcbPad2 = vstruct.VArray([ v_uint8() for i in xrange(52) ])
self.CurrentPacket = vstruct.VArray([ v_ptr32() for i in xrange(3) ])
self.TargetSet = v_uint32()
self.WorkerRoutine = v_ptr32()
self.IpiFrozen = v_uint32()
self.PrcbPad3 = vstruct.VArray([ v_uint8() for i in xrange(40) ])
self.RequestSummary = v_uint32()
self.SignalDone = v_ptr32()
self.PrcbPad4 = vstruct.VArray([ v_uint8() for i in xrange(56) ])
self.DpcListHead = LIST_ENTRY()
self.DpcStack = v_ptr32()
self.DpcCount = v_uint32()
self.DpcQueueDepth = v_uint32()
self.DpcRoutineActive = v_uint32()
self.DpcInterruptRequested = v_uint32()
self.DpcLastCount = v_uint32()
self.DpcRequestRate = v_uint32()
self.MaximumDpcQueueDepth = v_uint32()
self.MinimumDpcRate = v_uint32()
self.QuantumEnd = v_uint32()
self.PrcbPad5 = vstruct.VArray([ v_uint8() for i in xrange(16) ])
self.DpcLock = v_uint32()
self.PrcbPad6 = vstruct.VArray([ v_uint8() for i in xrange(28) ])
self.CallDpc = KDPC()
self.ChainedInterruptList = v_ptr32()
self.LookasideIrpFloat = v_uint32()
self.SpareFields0 = vstruct.VArray([ v_uint32() for i in xrange(6) ])
self.VendorString = vstruct.VArray([ v_uint8() for i in xrange(13) ])
self.InitialApicId = v_uint8()
self.LogicalProcessorsPerPhysicalProcessor = v_uint8()
self._pad0910 = v_bytes(size=1)
self.MHz = v_uint32()
self.FeatureBits = v_uint32()
self.UpdateSignature = LARGE_INTEGER()
self.NpxSaveArea = FX_SAVE_AREA()
self.PowerState = PROCESSOR_POWER_STATE()
class HEAP_VIRTUAL_ALLOC_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Entry = LIST_ENTRY()
self.ExtraStuff = HEAP_ENTRY_EXTRA()
self.CommitSize = v_uint32()
self.ReserveSize = v_uint32()
self.BusyBlock = HEAP_ENTRY()
class VI_DEADLOCK_THREAD(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Thread = v_ptr32()
self.CurrentSpinNode = v_ptr32()
self.CurrentOtherNode = v_ptr32()
self.ListEntry = LIST_ENTRY()
self.NodeCount = v_uint32()
self.PagingCount = v_uint32()
class SUPPORTED_RANGE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Next = v_ptr32()
self.SystemAddressSpace = v_uint32()
self.SystemBase = v_uint64()
self.Base = v_uint64()
self.Limit = v_uint64()
class ARBITER_PARAMETERS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Parameters = _unnamed_15247()
class EXCEPTION_RECORD(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ExceptionCode = v_uint32()
self.ExceptionFlags = v_uint32()
self.ExceptionRecord = v_ptr32()
self.ExceptionAddress = v_ptr32()
self.NumberParameters = v_uint32()
self.ExceptionInformation = vstruct.VArray([ v_uint32() for i in xrange(15) ])
class MMPTE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.u = _unnamed_11597()
class VI_DEADLOCK_NODE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Parent = v_ptr32()
self.ChildrenList = LIST_ENTRY()
self.SiblingsList = LIST_ENTRY()
self.ResourceList = LIST_ENTRY()
self.Root = v_ptr32()
self.ThreadEntry = v_ptr32()
self.Active = v_uint32()
self.StackTrace = vstruct.VArray([ v_ptr32() for i in xrange(8) ])
self.ParentStackTrace = vstruct.VArray([ v_ptr32() for i in xrange(8) ])
class KPCR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.NtTib = NT_TIB()
self.SelfPcr = v_ptr32()
self.Prcb = v_ptr32()
self.Irql = v_uint8()
self._pad0028 = v_bytes(size=3)
self.IRR = v_uint32()
self.IrrActive = v_uint32()
self.IDR = v_uint32()
self.KdVersionBlock = v_ptr32()
self.IDT = v_ptr32()
self.GDT = v_ptr32()
self.TSS = v_ptr32()
self.MajorVersion = v_uint16()
self.MinorVersion = v_uint16()
self.SetMember = v_uint32()
self.StallScaleFactor = v_uint32()
self.DebugActive = v_uint8()
self.Number = v_uint8()
self.Spare0 = v_uint8()
self.SecondLevelCacheAssociativity = v_uint8()
self.VdmAlert = v_uint32()
self.KernelReserved = vstruct.VArray([ v_uint32() for i in xrange(14) ])
self.SecondLevelCacheSize = v_uint32()
self.HalReserved = vstruct.VArray([ v_uint32() for i in xrange(16) ])
self.InterruptMode = v_uint32()
self.Spare1 = v_uint8()
self._pad00dc = v_bytes(size=3)
self.KernelReserved2 = vstruct.VArray([ v_uint32() for i in xrange(17) ])
self.PrcbData = KPRCB()
class IMAGE_FILE_HEADER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Machine = v_uint16()
self.NumberOfSections = v_uint16()
self.TimeDateStamp = v_uint32()
self.PointerToSymbolTable = v_uint32()
self.NumberOfSymbols = v_uint32()
self.SizeOfOptionalHeader = v_uint16()
self.Characteristics = v_uint16()
class CM_KEY_INDEX(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Signature = v_uint16()
self.Count = v_uint16()
self.List = vstruct.VArray([ v_uint32() for i in xrange(1) ])
class IMAGE_DEBUG_DIRECTORY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Characteristics = v_uint32()
self.TimeDateStamp = v_uint32()
self.MajorVersion = v_uint16()
self.MinorVersion = v_uint16()
self.Type = v_uint32()
self.SizeOfData = v_uint32()
self.AddressOfRawData = v_uint32()
self.PointerToRawData = v_uint32()
class AMD64_DBGKD_CONTROL_SET(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.TraceFlag = v_uint32()
self.Dr7 = v_uint64()
self.CurrentSymbolStart = v_uint64()
self.CurrentSymbolEnd = v_uint64()
class SYSPTES_HEADER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ListHead = LIST_ENTRY()
self.Count = v_uint32()
class DBGKD_READ_WRITE_IO_EXTENDED32(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DataSize = v_uint32()
self.InterfaceType = v_uint32()
self.BusNumber = v_uint32()
self.AddressSpace = v_uint32()
self.IoAddress = v_uint32()
self.DataValue = v_uint32()
class PEB_LDR_DATA(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint32()
self.Initialized = v_uint8()
self._pad0008 = v_bytes(size=3)
self.SsHandle = v_ptr32()
self.InLoadOrderModuleList = LIST_ENTRY()
self.InMemoryOrderModuleList = LIST_ENTRY()
self.InInitializationOrderModuleList = LIST_ENTRY()
self.EntryInProgress = v_ptr32()
class DBGKD_WRITE_BREAKPOINT64(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.BreakPointAddress = v_uint64()
self.BreakPointHandle = v_uint32()
self._pad0010 = v_bytes(size=4)
class IMAGE_NT_HEADERS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Signature = v_uint32()
self.FileHeader = IMAGE_FILE_HEADER()
self.OptionalHeader = IMAGE_OPTIONAL_HEADER()
class HEAP_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Size = v_uint16()
self.PreviousSize = v_uint16()
self.SmallTagIndex = v_uint8()
self.Flags = v_uint8()
self.UnusedBytes = v_uint8()
self.SegmentIndex = v_uint8()
class _unnamed_16304(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.MinimumVector = v_uint32()
self.MaximumVector = v_uint32()
class SECURITY_SUBJECT_CONTEXT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ClientToken = v_ptr32()
self.ImpersonationLevel = v_uint32()
self.PrimaryToken = v_ptr32()
self.ProcessAuditId = v_ptr32()
class X86_DBGKD_CONTROL_SET(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.TraceFlag = v_uint32()
self.Dr7 = v_uint32()
self.CurrentSymbolStart = v_uint32()
self.CurrentSymbolEnd = v_uint32()
class SEP_AUDIT_POLICY_OVERLAY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PolicyBits = v_uint64()
class MI_VERIFIER_DRIVER_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Links = LIST_ENTRY()
self.Loads = v_uint32()
self.Unloads = v_uint32()
self.BaseName = UNICODE_STRING()
self.StartAddress = v_ptr32()
self.EndAddress = v_ptr32()
self.Flags = v_uint32()
self.Signature = v_uint32()
self.Reserved = v_uint32()
self.VerifierPoolLock = v_uint32()
self.PoolHash = v_ptr32()
self.PoolHashSize = v_uint32()
self.PoolHashFree = v_uint32()
self.PoolHashReserved = v_uint32()
self.CurrentPagedPoolAllocations = v_uint32()
self.CurrentNonPagedPoolAllocations = v_uint32()
self.PeakPagedPoolAllocations = v_uint32()
self.PeakNonPagedPoolAllocations = v_uint32()
self.PagedBytes = v_uint32()
self.NonPagedBytes = v_uint32()
self.PeakPagedBytes = v_uint32()
self.PeakNonPagedBytes = v_uint32()
class GDI_TEB_BATCH(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Offset = v_uint32()
self.HDC = v_uint32()
self.Buffer = vstruct.VArray([ v_uint32() for i in xrange(310) ])
class WMI_CLIENT_CONTEXT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ProcessorNumber = v_uint8()
self.Alignment = v_uint8()
self.LoggerId = v_uint16()
class MMSUBSECTION_FLAGS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ReadOnly = v_uint32()
class INTERFACE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Size = v_uint16()
self.Version = v_uint16()
self.Context = v_ptr32()
self.InterfaceReference = v_ptr32()
self.InterfaceDereference = v_ptr32()
class OBJECT_DIRECTORY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.HashBuckets = vstruct.VArray([ v_ptr32() for i in xrange(37) ])
self.Lock = EX_PUSH_LOCK()
self.DeviceMap = v_ptr32()
self.SessionId = v_uint32()
self.Reserved = v_uint16()
self.SymbolicLinkUsageCount = v_uint16()
class WMI_LOGGER_CONTEXT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.BufferSpinLock = v_uint32()
self._pad0008 = v_bytes(size=4)
self.StartTime = LARGE_INTEGER()
self.LogFileHandle = v_ptr32()
self.LoggerSemaphore = KSEMAPHORE()
self.LoggerThread = v_ptr32()
self.LoggerEvent = KEVENT()
self.FlushEvent = KEVENT()
self.LoggerStatus = v_uint32()
self.LoggerId = v_uint32()
self.BuffersAvailable = v_uint32()
self.UsePerfClock = v_uint32()
self.WriteFailureLimit = v_uint32()
self.BuffersDirty = v_uint32()
self.BuffersInUse = v_uint32()
self.SwitchingInProgress = v_uint32()
self._pad0070 = v_bytes(size=4)
self.FreeList = SLIST_HEADER()
self.FlushList = SLIST_HEADER()
self.GlobalList = SLIST_HEADER()
self.ProcessorBuffers = v_ptr32()
self.LoggerName = UNICODE_STRING()
self.LogFileName = UNICODE_STRING()
self.LogFilePattern = UNICODE_STRING()
self.NewLogFileName = UNICODE_STRING()
self.EndPageMarker = v_ptr32()
self.CollectionOn = v_uint32()
self.KernelTraceOn = v_uint32()
self.PerfLogInTransition = v_uint32()
self.RequestFlag = v_uint32()
self.EnableFlags = v_uint32()
self.MaximumFileSize = v_uint32()
self.LoggerMode = v_uint32()
self.LastFlushedBuffer = v_uint32()
self.RefCount = v_uint32()
self.FlushTimer = v_uint32()
self.FirstBufferOffset = LARGE_INTEGER()
self.ByteOffset = LARGE_INTEGER()
self.BufferAgeLimit = LARGE_INTEGER()
self.MaximumBuffers = v_uint32()
self.MinimumBuffers = v_uint32()
self.EventsLost = v_uint32()
self.BuffersWritten = v_uint32()
self.LogBuffersLost = v_uint32()
self.RealTimeBuffersLost = v_uint32()
self.BufferSize = v_uint32()
self.NumberOfBuffers = v_uint32()
self.SequencePtr = v_ptr32()
self.InstanceGuid = GUID()
self.LoggerHeader = v_ptr32()
self.GetCpuClock = v_ptr32()
self.ClientSecurityContext = SECURITY_CLIENT_CONTEXT()
self.LoggerExtension = v_ptr32()
self.ReleaseQueue = v_uint32()
self.EnableFlagExtension = TRACE_ENABLE_FLAG_EXTENSION()
self.LocalSequence = v_uint32()
self.MaximumIrql = v_uint32()
self.EnableFlagArray = v_ptr32()
self.LoggerMutex = KMUTANT()
self.MutexCount = v_uint32()
self.FileCounter = v_uint32()
self.BufferCallback = v_ptr32()
self.CallbackContext = v_ptr32()
self.PoolType = v_uint32()
self._pad01b8 = v_bytes(size=4)
self.ReferenceSystemTime = LARGE_INTEGER()
self.ReferenceTimeStamp = LARGE_INTEGER()
class IO_STACK_LOCATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.MajorFunction = v_uint8()
self.MinorFunction = v_uint8()
self.Flags = v_uint8()
self.Control = v_uint8()
self.Parameters = _unnamed_14762()
self.DeviceObject = v_ptr32()
self.FileObject = v_ptr32()
self.CompletionRoutine = v_ptr32()
self.Context = v_ptr32()
class DBGKD_READ_WRITE_MSR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Msr = v_uint32()
self.DataValueLow = v_uint32()
self.DataValueHigh = v_uint32()
class _unnamed_14745(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.UserApcRoutine = v_ptr32()
self.UserApcContext = v_ptr32()
class PCI_PDO_EXTENSION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Next = v_ptr32()
self.ExtensionType = v_uint32()
self.IrpDispatchTable = v_ptr32()
self.DeviceState = v_uint8()
self.TentativeNextState = v_uint8()
self._pad0010 = v_bytes(size=2)
self.SecondaryExtLock = KEVENT()
self.Slot = PCI_SLOT_NUMBER()
self.PhysicalDeviceObject = v_ptr32()
self.ParentFdoExtension = v_ptr32()
self.SecondaryExtension = SINGLE_LIST_ENTRY()
self.BusInterfaceReferenceCount = v_uint32()
self.AgpInterfaceReferenceCount = v_uint32()
self.VendorId = v_uint16()
self.DeviceId = v_uint16()
self.SubsystemVendorId = v_uint16()
self.SubsystemId = v_uint16()
self.RevisionId = v_uint8()
self.ProgIf = v_uint8()
self.SubClass = v_uint8()
self.BaseClass = v_uint8()
self.AdditionalResourceCount = v_uint8()
self.AdjustedInterruptLine = v_uint8()
self.InterruptPin = v_uint8()
self.RawInterruptLine = v_uint8()
self.CapabilitiesPtr = v_uint8()
self.SavedLatencyTimer = v_uint8()
self.SavedCacheLineSize = v_uint8()
self.HeaderType = v_uint8()
self.NotPresent = v_uint8()
self.ReportedMissing = v_uint8()
self.ExpectedWritebackFailure = v_uint8()
self.NoTouchPmeEnable = v_uint8()
self.LegacyDriver = v_uint8()
self.UpdateHardware = v_uint8()
self.MovedDevice = v_uint8()
self.DisablePowerDown = v_uint8()
self.NeedsHotPlugConfiguration = v_uint8()
self.SwitchedIDEToNativeMode = v_uint8()
self.BIOSAllowsIDESwitchToNativeMode = v_uint8()
self.IoSpaceUnderNativeIdeControl = v_uint8()
self.OnDebugPath = v_uint8()
self._pad005c = v_bytes(size=3)
self.PowerState = PCI_POWER_STATE()
self.Dependent = PCI_HEADER_TYPE_DEPENDENT()
self.HackFlags = v_uint64()
self.Resources = v_ptr32()
self.BridgeFdoExtension = v_ptr32()
self.NextBridge = v_ptr32()
self.NextHashEntry = v_ptr32()
self.Lock = PCI_LOCK()
self.PowerCapabilities = PCI_PMC()
self.TargetAgpCapabilityId = v_uint8()
self._pad00c4 = v_bytes(size=1)
self.CommandEnables = v_uint16()
self.InitialCommand = v_uint16()
class IMAGE_DATA_DIRECTORY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.VirtualAddress = v_uint32()
self.Size = v_uint32()
class FILE_OBJECT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint16()
self.Size = v_uint16()
self.DeviceObject = v_ptr32()
self.Vpb = v_ptr32()
self.FsContext = v_ptr32()
self.FsContext2 = v_ptr32()
self.SectionObjectPointer = v_ptr32()
self.PrivateCacheMap = v_ptr32()
self.FinalStatus = v_uint32()
self.RelatedFileObject = v_ptr32()
self.LockOperation = v_uint8()
self.DeletePending = v_uint8()
self.ReadAccess = v_uint8()
self.WriteAccess = v_uint8()
self.DeleteAccess = v_uint8()
self.SharedRead = v_uint8()
self.SharedWrite = v_uint8()
self.SharedDelete = v_uint8()
self.Flags = v_uint32()
self.FileName = UNICODE_STRING()
self.CurrentByteOffset = LARGE_INTEGER()
self.Waiters = v_uint32()
self.Busy = v_uint32()
self.LastLock = v_ptr32()
self.Lock = KEVENT()
self.Event = KEVENT()
self.CompletionContext = v_ptr32()
class MMWSLE_HASH(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Key = v_ptr32()
self.Index = v_uint32()
class _unnamed_16004(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SecurityContext = v_ptr32()
self.Options = v_uint32()
self.Reserved = v_uint16()
self.ShareAccess = v_uint16()
self.Parameters = v_ptr32()
class SECTION_IMAGE_INFORMATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.TransferAddress = v_ptr32()
self.ZeroBits = v_uint32()
self.MaximumStackSize = v_uint32()
self.CommittedStackSize = v_uint32()
self.SubSystemType = v_uint32()
self.SubSystemMinorVersion = v_uint16()
self.SubSystemMajorVersion = v_uint16()
self.GpValue = v_uint32()
self.ImageCharacteristics = v_uint16()
self.DllCharacteristics = v_uint16()
self.Machine = v_uint16()
self.ImageContainsCode = v_uint8()
self.Spare1 = v_uint8()
self.LoaderFlags = v_uint32()
self.ImageFileSize = v_uint32()
self.Reserved = vstruct.VArray([ v_uint32() for i in xrange(1) ])
class HEAP_SUBSEGMENT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Bucket = v_ptr32()
self.UserBlocks = v_ptr32()
self.AggregateExchg = INTERLOCK_SEQ()
self.BlockSize = v_uint16()
self.FreeThreshold = v_uint16()
self.BlockCount = v_uint16()
self.SizeIndex = v_uint8()
self.AffinityIndex = v_uint8()
self.SFreeListEntry = SINGLE_LIST_ENTRY()
self.Lock = v_uint32()
class ERESOURCE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SystemResourcesList = LIST_ENTRY()
self.OwnerTable = v_ptr32()
self.ActiveCount = v_uint16()
self.Flag = v_uint16()
self.SharedWaiters = v_ptr32()
self.ExclusiveWaiters = v_ptr32()
self.OwnerThreads = vstruct.VArray([ OWNER_ENTRY() for i in xrange(2) ])
self.ContentionCount = v_uint32()
self.NumberOfSharedWaiters = v_uint16()
self.NumberOfExclusiveWaiters = v_uint16()
self.Address = v_ptr32()
self.SpinLock = v_uint32()
class MBCB(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.NodeTypeCode = v_uint16()
self.NodeIsInZone = v_uint16()
self.PagesToWrite = v_uint32()
self.DirtyPages = v_uint32()
self.Reserved = v_uint32()
self.BitmapRanges = LIST_ENTRY()
self.ResumeWritePage = v_uint64()
self.BitmapRange1 = BITMAP_RANGE()
self.BitmapRange2 = BITMAP_RANGE()
self.BitmapRange3 = BITMAP_RANGE()
class RTL_ATOM_TABLE_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.HashLink = v_ptr32()
self.HandleIndex = v_uint16()
self.Atom = v_uint16()
self.ReferenceCount = v_uint16()
self.Flags = v_uint8()
self.NameLength = v_uint8()
self.Name = vstruct.VArray([ v_uint16() for i in xrange(1) ])
self._pad0010 = v_bytes(size=2)
class _unnamed_12979(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Overlay = _unnamed_14765()
self._pad0030 = v_bytes(size=8)
class CHILD_LIST(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Count = v_uint32()
self.List = v_uint32()
class _unnamed_16094(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint32()
class RTL_RANGE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Start = v_uint64()
self.End = v_uint64()
self.UserData = v_ptr32()
self.Owner = v_ptr32()
self.Attributes = v_uint8()
self.Flags = v_uint8()
self._pad0020 = v_bytes(size=6)
class PCI_MJ_DISPATCH_TABLE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PnpIrpMaximumMinorFunction = v_uint32()
self.PnpIrpDispatchTable = v_ptr32()
self.PowerIrpMaximumMinorFunction = v_uint32()
self.PowerIrpDispatchTable = v_ptr32()
self.SystemControlIrpDispatchStyle = v_uint32()
self.SystemControlIrpDispatchFunction = v_ptr32()
self.OtherIrpDispatchStyle = v_uint32()
self.OtherIrpDispatchFunction = v_ptr32()
class EX_PUSH_LOCK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Waiting = v_uint32()
class ARBITER_INTERFACE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Size = v_uint16()
self.Version = v_uint16()
self.Context = v_ptr32()
self.InterfaceReference = v_ptr32()
self.InterfaceDereference = v_ptr32()
self.ArbiterHandler = v_ptr32()
self.Flags = v_uint32()
class SLIST_HEADER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Alignment = v_uint64()
class _unnamed_16135(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Srb = v_ptr32()
class _unnamed_16642(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.BlockedDriverGuid = GUID()
class _unnamed_16131(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Vpb = v_ptr32()
self.DeviceObject = v_ptr32()
class HEAP_SEGMENT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Entry = HEAP_ENTRY()
self.Signature = v_uint32()
self.Flags = v_uint32()
self.Heap = v_ptr32()
self.LargestUnCommittedRange = v_uint32()
self.BaseAddress = v_ptr32()
self.NumberOfPages = v_uint32()
self.FirstEntry = v_ptr32()
self.LastValidEntry = v_ptr32()
self.NumberOfUnCommittedPages = v_uint32()
self.NumberOfUnCommittedRanges = v_uint32()
self.UnCommittedRanges = v_ptr32()
self.AllocatorBackTraceIndex = v_uint16()
self.Reserved = v_uint16()
self.LastEntryInSegment = v_ptr32()
class POP_DEVICE_POWER_IRP(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Free = SINGLE_LIST_ENTRY()
self.Irp = v_ptr32()
self.Notify = v_ptr32()
self.Pending = LIST_ENTRY()
self.Complete = LIST_ENTRY()
self.Abort = LIST_ENTRY()
self.Failed = LIST_ENTRY()
class HEAP_FREE_ENTRY_EXTRA(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.TagIndex = v_uint16()
self.FreeBackTraceIndex = v_uint16()
class PRIVATE_CACHE_MAP(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.NodeTypeCode = v_uint16()
self._pad0004 = v_bytes(size=2)
self.ReadAheadMask = v_uint32()
self.FileObject = v_ptr32()
self._pad0010 = v_bytes(size=4)
self.FileOffset1 = LARGE_INTEGER()
self.BeyondLastByte1 = LARGE_INTEGER()
self.FileOffset2 = LARGE_INTEGER()
self.BeyondLastByte2 = LARGE_INTEGER()
self.ReadAheadOffset = vstruct.VArray([ LARGE_INTEGER() for i in xrange(2) ])
self.ReadAheadLength = vstruct.VArray([ v_uint32() for i in xrange(2) ])
self.ReadAheadSpinLock = v_uint32()
self.PrivateLinks = LIST_ENTRY()
self._pad0058 = v_bytes(size=4)
class SEP_AUDIT_POLICY_CATEGORIES(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.System = v_uint32()
self.AccountLogon = v_uint32()
class IMAGE_SECTION_HEADER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Name = vstruct.VArray([ v_uint8() for i in xrange(8) ])
self.Misc = _unnamed_14793()
self.VirtualAddress = v_uint32()
self.SizeOfRawData = v_uint32()
self.PointerToRawData = v_uint32()
self.PointerToRelocations = v_uint32()
self.PointerToLinenumbers = v_uint32()
self.NumberOfRelocations = v_uint16()
self.NumberOfLinenumbers = v_uint16()
self.Characteristics = v_uint32()
class ACL(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.AclRevision = v_uint8()
self.Sbz1 = v_uint8()
self.AclSize = v_uint16()
self.AceCount = v_uint16()
self.Sbz2 = v_uint16()
class _unnamed_10498(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.LowPart = v_uint32()
self.HighPart = v_uint32()
class _unnamed_10880(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.FnArea = FNSAVE_FORMAT()
self._pad0208 = v_bytes(size=412)
class VACB(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.BaseAddress = v_ptr32()
self.SharedCacheMap = v_ptr32()
self.Overlay = _unnamed_11926()
self.LruList = LIST_ENTRY()
class WAIT_CONTEXT_BLOCK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.WaitQueueEntry = KDEVICE_QUEUE_ENTRY()
self.DeviceRoutine = v_ptr32()
self.DeviceContext = v_ptr32()
self.NumberOfMapRegisters = v_uint32()
self.DeviceObject = v_ptr32()
self.CurrentIrp = v_ptr32()
self.BufferChainingDpc = v_ptr32()
class CM_KEY_NODE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Signature = v_uint16()
self.Flags = v_uint16()
self.LastWriteTime = LARGE_INTEGER()
self.Spare = v_uint32()
self.Parent = v_uint32()
self.SubKeyCounts = vstruct.VArray([ v_uint32() for i in xrange(2) ])
self.SubKeyLists = vstruct.VArray([ v_uint32() for i in xrange(2) ])
self.ValueList = CHILD_LIST()
self.Security = v_uint32()
self.Class = v_uint32()
self.MaxNameLen = v_uint32()
self.MaxClassLen = v_uint32()
self.MaxValueNameLen = v_uint32()
self.MaxValueDataLen = v_uint32()
self.WorkVar = v_uint32()
self.NameLength = v_uint16()
self.ClassLength = v_uint16()
self.Name = vstruct.VArray([ v_uint16() for i in xrange(1) ])
self._pad0050 = v_bytes(size=2)
class SE_AUDIT_PROCESS_CREATION_INFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ImageFileName = v_ptr32()
class ACTIVATION_CONTEXT_STACK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Flags = v_uint32()
self.NextCookieSequenceNumber = v_uint32()
self.ActiveFrame = v_ptr32()
self.FrameListCache = LIST_ENTRY()
class SECURITY_TOKEN_PROXY_DATA(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint32()
self.ProxyClass = v_uint32()
self.PathInfo = UNICODE_STRING()
self.ContainerMask = v_uint32()
self.ObjectMask = v_uint32()
class _unnamed_16639(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.VetoType = v_uint32()
self.DeviceIdVetoNameBuffer = vstruct.VArray([ v_uint16() for i in xrange(1) ])
self._pad0008 = v_bytes(size=2)
class _unnamed_16636(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.NotificationCode = v_uint32()
self.NotificationData = v_uint32()
class _unnamed_16634(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Notification = v_ptr32()
class EX_RUNDOWN_REF(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Count = v_uint32()
class _unnamed_16631(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.NotificationStructure = v_ptr32()
self.DeviceIds = vstruct.VArray([ v_uint16() for i in xrange(1) ])
self._pad0008 = v_bytes(size=2)
class CM_NOTIFY_BLOCK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.HiveList = LIST_ENTRY()
self.PostList = LIST_ENTRY()
self.KeyControlBlock = v_ptr32()
self.KeyBody = v_ptr32()
self.Filter = v_uint32()
self.SubjectContext = SECURITY_SUBJECT_CONTEXT()
class MMPTE_PROTOTYPE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Valid = v_uint32()
class PCI_HEADER_TYPE_DEPENDENT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.type0 = _unnamed_14410()
class CM_BIG_DATA(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Signature = v_uint16()
self.Count = v_uint16()
self.List = v_uint32()
class IMAGE_DOS_HEADER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.e_magic = v_uint16()
self.e_cblp = v_uint16()
self.e_cp = v_uint16()
self.e_crlc = v_uint16()
self.e_cparhdr = v_uint16()
self.e_minalloc = v_uint16()
self.e_maxalloc = v_uint16()
self.e_ss = v_uint16()
self.e_sp = v_uint16()
self.e_csum = v_uint16()
self.e_ip = v_uint16()
self.e_cs = v_uint16()
self.e_lfarlc = v_uint16()
self.e_ovno = v_uint16()
self.e_res = vstruct.VArray([ v_uint16() for i in xrange(4) ])
self.e_oemid = v_uint16()
self.e_oeminfo = v_uint16()
self.e_res2 = vstruct.VArray([ v_uint16() for i in xrange(10) ])
self.e_lfanew = v_uint32()
class _unnamed_15795(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DeviceClass = _unnamed_16624()
class DBGKD_FILL_MEMORY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Address = v_uint64()
self.Length = v_uint32()
self.Flags = v_uint16()
self.PatternLength = v_uint16()
class CM_KEY_SECURITY_CACHE_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Cell = v_uint32()
self.CachedSecurity = v_ptr32()
class _unnamed_16663(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Signature = v_uint32()
self.CheckSum = v_uint32()
class _unnamed_16255(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Argument1 = v_ptr32()
self.Argument2 = v_ptr32()
self.Argument3 = v_ptr32()
self.Argument4 = v_ptr32()
class _unnamed_12606(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ImageInformation = v_ptr32()
class _unnamed_12605(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ImageCommitment = v_uint32()
class _unnamed_16226(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.InPath = v_uint8()
self.Reserved = vstruct.VArray([ v_uint8() for i in xrange(3) ])
self.Type = v_uint32()
class ARBITER_ORDERING(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Start = v_uint64()
self.End = v_uint64()
class MMVIEW(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Entry = v_uint32()
self.ControlArea = v_ptr32()
class EXCEPTION_RECORD32(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ExceptionCode = v_uint32()
self.ExceptionFlags = v_uint32()
self.ExceptionRecord = v_uint32()
self.ExceptionAddress = v_uint32()
self.NumberParameters = v_uint32()
self.ExceptionInformation = vstruct.VArray([ v_uint32() for i in xrange(15) ])
class DBGKD_READ_MEMORY32(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.TargetBaseAddress = v_uint32()
self.TransferCount = v_uint32()
self.ActualBytesRead = v_uint32()
class QUAD(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DoNotUseThisField = v_uint64()
class _unnamed_11926(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.FileOffset = LARGE_INTEGER()
class LPCP_PORT_OBJECT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ConnectionPort = v_ptr32()
self.ConnectedPort = v_ptr32()
self.MsgQueue = LPCP_PORT_QUEUE()
self.Creator = CLIENT_ID()
self.ClientSectionBase = v_ptr32()
self.ServerSectionBase = v_ptr32()
self.PortContext = v_ptr32()
self.ClientThread = v_ptr32()
self.SecurityQos = SECURITY_QUALITY_OF_SERVICE()
self.StaticSecurity = SECURITY_CLIENT_CONTEXT()
self.LpcReplyChainHead = LIST_ENTRY()
self.LpcDataInfoChainHead = LIST_ENTRY()
self.ServerProcess = v_ptr32()
self.MaxMessageLength = v_uint16()
self.MaxConnectionInfoLength = v_uint16()
self.Flags = v_uint32()
self.WaitEvent = KEVENT()
class CALL_PERFORMANCE_DATA(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SpinLock = v_uint32()
self.HashTable = vstruct.VArray([ LIST_ENTRY() for i in xrange(64) ])
class EXCEPTION_POINTERS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ExceptionRecord = v_ptr32()
self.ContextRecord = v_ptr32()
class CM_KEY_SECURITY_CACHE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Cell = v_uint32()
self.ConvKey = v_uint32()
self.List = LIST_ENTRY()
self.DescriptorLength = v_uint32()
self.Descriptor = SECURITY_DESCRIPTOR_RELATIVE()
class POP_TRIGGER_WAIT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Event = KEVENT()
self.Status = v_uint32()
self.Link = LIST_ENTRY()
self.Trigger = v_ptr32()
class DEVICE_OBJECT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint16()
self.Size = v_uint16()
self.ReferenceCount = v_uint32()
self.DriverObject = v_ptr32()
self.NextDevice = v_ptr32()
self.AttachedDevice = v_ptr32()
self.CurrentIrp = v_ptr32()
self.Timer = v_ptr32()
self.Flags = v_uint32()
self.Characteristics = v_uint32()
self.Vpb = v_ptr32()
self.DeviceExtension = v_ptr32()
self.DeviceType = v_uint32()
self.StackSize = v_uint8()
self._pad0034 = v_bytes(size=3)
self.Queue = _unnamed_11075()
self.AlignmentRequirement = v_uint32()
self.DeviceQueue = KDEVICE_QUEUE()
self.Dpc = KDPC()
self.ActiveThreadCount = v_uint32()
self.SecurityDescriptor = v_ptr32()
self.DeviceLock = KEVENT()
self.SectorSize = v_uint16()
self.Spare1 = v_uint16()
self.DeviceObjectExtension = v_ptr32()
self.Reserved = v_ptr32()
class MMVAD(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.StartingVpn = v_uint32()
self.EndingVpn = v_uint32()
self.Parent = v_ptr32()
self.LeftChild = v_ptr32()
self.RightChild = v_ptr32()
self.u = _unnamed_14102()
self.ControlArea = v_ptr32()
self.FirstPrototypePte = v_ptr32()
self.LastContiguousPte = v_ptr32()
self.u2 = _unnamed_14103()
class _unnamed_13227(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.LongFlags = v_uint32()
class CM_NAME_HASH(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ConvKey = v_uint32()
self.NextHash = v_ptr32()
self.NameLength = v_uint16()
self.Name = vstruct.VArray([ v_uint16() for i in xrange(1) ])
class EX_PUSH_LOCK_WAIT_BLOCK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.WakeEvent = KEVENT()
self.Next = v_ptr32()
self.ShareCount = v_uint32()
self.Exclusive = v_uint8()
self._pad001c = v_bytes(size=3)
class _unnamed_13174(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ShortFlags = v_uint16()
self.ReferenceCount = v_uint16()
class _unnamed_16299(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint32()
self.Alignment = v_uint32()
self.MinimumAddress = LARGE_INTEGER()
self.MaximumAddress = LARGE_INTEGER()
class LPCP_MESSAGE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Entry = LIST_ENTRY()
self.SenderPort = v_ptr32()
self.RepliedToThread = v_ptr32()
self.PortContext = v_ptr32()
self._pad0018 = v_bytes(size=4)
self.Request = PORT_MESSAGE()
class EX_QUEUE_WORKER_INFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.QueueDisabled = v_uint32()
class PCI_FDO_EXTENSION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.List = SINGLE_LIST_ENTRY()
self.ExtensionType = v_uint32()
self.IrpDispatchTable = v_ptr32()
self.DeviceState = v_uint8()
self.TentativeNextState = v_uint8()
self._pad0010 = v_bytes(size=2)
self.SecondaryExtLock = KEVENT()
self.PhysicalDeviceObject = v_ptr32()
self.FunctionalDeviceObject = v_ptr32()
self.AttachedDeviceObject = v_ptr32()
self.ChildListLock = KEVENT()
self.ChildPdoList = v_ptr32()
self.BusRootFdoExtension = v_ptr32()
self.ParentFdoExtension = v_ptr32()
self.ChildBridgePdoList = v_ptr32()
self.PciBusInterface = v_ptr32()
self.MaxSubordinateBus = v_uint8()
self._pad0054 = v_bytes(size=3)
self.BusHandler = v_ptr32()
self.BaseBus = v_uint8()
self.Fake = v_uint8()
self.ChildDelete = v_uint8()
self.Scanned = v_uint8()
self.ArbitersInitialized = v_uint8()
self.BrokenVideoHackApplied = v_uint8()
self.Hibernated = v_uint8()
self._pad0060 = v_bytes(size=1)
self.PowerState = PCI_POWER_STATE()
self.SecondaryExtension = SINGLE_LIST_ENTRY()
self.ChildWaitWakeCount = v_uint32()
self.PreservedConfig = v_ptr32()
self.Lock = PCI_LOCK()
self.HotPlugParameters = _unnamed_14395()
self._pad00bc = v_bytes(size=3)
self.BusHackFlags = v_uint32()
class _unnamed_16573(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ReserveDevice = v_ptr32()
class PS_IMPERSONATION_INFORMATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Token = v_ptr32()
self.CopyOnOpen = v_uint8()
self.EffectiveOnly = v_uint8()
self._pad0008 = v_bytes(size=2)
self.ImpersonationLevel = v_uint32()
class DBGKD_WRITE_BREAKPOINT32(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.BreakPointAddress = v_uint32()
self.BreakPointHandle = v_uint32()
class MMPFNLIST(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Total = v_uint32()
self.ListName = v_uint32()
self.Flink = v_uint32()
self.Blink = v_uint32()
class SINGLE_LIST_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Next = v_ptr32()
class _unnamed_14410(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Spare = vstruct.VArray([ v_uint8() for i in xrange(4) ])
class _unnamed_14411(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PrimaryBus = v_uint8()
self.SecondaryBus = v_uint8()
self.SubordinateBus = v_uint8()
self.SubtractiveDecode = v_uint8()
class KNODE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ProcessorMask = v_uint32()
self.Color = v_uint32()
self.MmShiftedColor = v_uint32()
self.FreeCount = vstruct.VArray([ v_uint32() for i in xrange(2) ])
self._pad0018 = v_bytes(size=4)
self.DeadStackList = SLIST_HEADER()
self.PfnDereferenceSListHead = SLIST_HEADER()
self.PfnDeferredList = v_ptr32()
self.Seed = v_uint8()
self.Flags = flags()
self._pad0030 = v_bytes(size=2)
class _unnamed_14793(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PhysicalAddress = v_uint32()
class _unnamed_16078(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint32()
self.FileInformationClass = v_uint32()
class SYSTEM_POWER_CAPABILITIES(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PowerButtonPresent = v_uint8()
self.SleepButtonPresent = v_uint8()
self.LidPresent = v_uint8()
self.SystemS1 = v_uint8()
self.SystemS2 = v_uint8()
self.SystemS3 = v_uint8()
self.SystemS4 = v_uint8()
self.SystemS5 = v_uint8()
self.HiberFilePresent = v_uint8()
self.FullWake = v_uint8()
self.VideoDimPresent = v_uint8()
self.ApmPresent = v_uint8()
self.UpsPresent = v_uint8()
self.ThermalControl = v_uint8()
self.ProcessorThrottle = v_uint8()
self.ProcessorMinThrottle = v_uint8()
self.ProcessorMaxThrottle = v_uint8()
self.spare2 = vstruct.VArray([ v_uint8() for i in xrange(4) ])
self.DiskSpinDown = v_uint8()
self.spare3 = vstruct.VArray([ v_uint8() for i in xrange(8) ])
self.SystemBatteriesPresent = v_uint8()
self.BatteriesAreShortTerm = v_uint8()
self.BatteryScale = vstruct.VArray([ BATTERY_REPORTING_SCALE() for i in xrange(3) ])
self.AcOnLineWake = v_uint32()
self.SoftLidWake = v_uint32()
self.RtcWake = v_uint32()
self.MinDeviceWakeState = v_uint32()
self.DefaultLowLatencyWake = v_uint32()
class DBGKD_SET_CONTEXT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ContextFlags = v_uint32()
class MMEXTEND_INFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.CommittedSize = v_uint64()
self.ReferenceCount = v_uint32()
self._pad0010 = v_bytes(size=4)
class _unnamed_16075(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint32()
self.CompletionFilter = v_uint32()
class RTL_USER_PROCESS_PARAMETERS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.MaximumLength = v_uint32()
self.Length = v_uint32()
self.Flags = v_uint32()
self.DebugFlags = v_uint32()
self.ConsoleHandle = v_ptr32()
self.ConsoleFlags = v_uint32()
self.StandardInput = v_ptr32()
self.StandardOutput = v_ptr32()
self.StandardError = v_ptr32()
self.CurrentDirectory = CURDIR()
self.DllPath = UNICODE_STRING()
self.ImagePathName = UNICODE_STRING()
self.CommandLine = UNICODE_STRING()
self.Environment = v_ptr32()
self.StartingX = v_uint32()
self.StartingY = v_uint32()
self.CountX = v_uint32()
self.CountY = v_uint32()
self.CountCharsX = v_uint32()
self.CountCharsY = v_uint32()
self.FillAttribute = v_uint32()
self.WindowFlags = v_uint32()
self.ShowWindowFlags = v_uint32()
self.WindowTitle = UNICODE_STRING()
self.DesktopInfo = UNICODE_STRING()
self.ShellInfo = UNICODE_STRING()
self.RuntimeData = UNICODE_STRING()
self.CurrentDirectores = vstruct.VArray([ RTL_DRIVE_LETTER_CURDIR() for i in xrange(32) ])
class u(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.KeyNode = CM_KEY_NODE()
class IO_RESOURCE_REQUIREMENTS_LIST(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ListSize = v_uint32()
self.InterfaceType = v_uint32()
self.BusNumber = v_uint32()
self.SlotNumber = v_uint32()
self.Reserved = vstruct.VArray([ v_uint32() for i in xrange(3) ])
self.AlternativeLists = v_uint32()
self.List = vstruct.VArray([ IO_RESOURCE_LIST() for i in xrange(1) ])
class POWER_CHANNEL_SUMMARY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Signature = v_uint32()
self.TotalCount = v_uint32()
self.D0Count = v_uint32()
self.NotifyList = LIST_ENTRY()
|
from Core.IFactory import IFactory
from Regs.Block_0 import R0002
class R0002Factory(IFactory):
def create_block_object(self, line):
self.r0002 = _r0002 = R0002()
_r0002.reg_list = line
return _r0002
|
# Copyright 2017 Google Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from gapit_test_framework import gapit_test, require, require_equal
from gapit_test_framework import require_not_equal, little_endian_bytes_to_int
from gapit_test_framework import GapitTest, get_read_offset_function
from struct_offsets import *
from vulkan_constants import *
FENCE_CREATE_INFO = [
("sType", UINT32_T),
("pNext", POINTER),
("flags", UINT32_T)
]
@gapit_test("Fence_test")
class CreateDestroyWaitTest(GapitTest):
def expect(self):
architecture = self.architecture
create_fence = require(self.nth_call_of("vkCreateFence", 1))
wait_for_fences = require(self.next_call_of("vkWaitForFences"))
reset_fences = require(self.next_call_of("vkResetFences"))
destroy_fence = require(self.next_call_of("vkDestroyFence"))
require_not_equal(0, create_fence.int_device)
require_not_equal(0, create_fence.hex_pCreateInfo)
require_equal(0, create_fence.hex_pAllocator)
require_not_equal(0, create_fence.hex_pFence)
create_info = VulkanStruct(architecture,
FENCE_CREATE_INFO, get_read_offset_function(
create_fence, create_fence.hex_pCreateInfo))
require_equal(VK_STRUCTURE_TYPE_FENCE_CREATE_INFO, create_info.sType)
require_equal(0, create_info.pNext)
require_equal(0, create_info.flags)
returned_fence = little_endian_bytes_to_int(
require(
create_fence.get_write_data(
create_fence.hex_pFence, NON_DISPATCHABLE_HANDLE_SIZE)))
require_not_equal(0, returned_fence)
require_equal(create_fence.int_device, wait_for_fences.int_device)
require_equal(1, wait_for_fences.int_fenceCount)
require_not_equal(1, wait_for_fences.hex_pFences)
require_equal(0, wait_for_fences.int_waitAll)
require_equal(1000000, wait_for_fences.int_timeout)
waited_for_fence = little_endian_bytes_to_int(
require(
wait_for_fences.get_read_data(
wait_for_fences.hex_pFences, NON_DISPATCHABLE_HANDLE_SIZE
)))
require_equal(waited_for_fence, returned_fence)
require_equal(create_fence.int_device, reset_fences.int_device)
require_equal(1, reset_fences.int_fenceCount)
require_equal(create_fence.int_device, reset_fences.int_device)
reset_fence = little_endian_bytes_to_int(
require(
reset_fences.get_read_data(
wait_for_fences.hex_pFences, NON_DISPATCHABLE_HANDLE_SIZE
)))
require_equal(returned_fence, reset_fence)
require_equal(create_fence.int_device, destroy_fence.int_device)
require_equal(returned_fence, destroy_fence.int_fence)
require_equal(0, destroy_fence.hex_pAllocator)
@gapit_test("Fence_test")
class GetFenceStatus(GapitTest):
def expect(self):
require(self.nth_call_of("vkCreateFence", 2))
first_get_status = require(self.next_call_of("vkGetFenceStatus"))
require_equal(VK_NOT_READY, int(first_get_status.return_val))
second_get_status = require(self.next_call_of("vkGetFenceStatus"))
require_equal(VK_SUCCESS, int(second_get_status.return_val))
third_get_status = require(self.next_call_of("vkGetFenceStatus"))
require_equal(VK_NOT_READY, int(third_get_status.return_val))
require(self.next_call_of("vkCreateFence"))
fourth_get_status = require(self.next_call_of("vkGetFenceStatus"))
require_equal(VK_SUCCESS, (fourth_get_status.return_val))
|
# pylint: disable=W0621
import os
from dataclasses import dataclass, field
from pathlib import Path
import pytest
from bentoctl.operator import get_local_operator_registry
from bentoctl.operator.registry import OperatorRegistry
TESTOP_PATH = os.path.join(os.path.dirname(__file__), "test-operator")
@pytest.fixture
def mock_operator_registry(tmp_path):
os.environ["BENTOCTL_HOME"] = str(tmp_path)
op_reg = get_local_operator_registry()
yield op_reg
del os.environ["BENTOCTL_HOME"]
@pytest.fixture
def get_mock_operator_registry(monkeypatch, tmp_path):
operator_registry = OperatorRegistry(tmp_path)
monkeypatch.setattr(
"bentoctl.operator.get_local_operator_registry", lambda: operator_registry
)
return operator_registry
@pytest.fixture
def tmp_bento_path(tmpdir):
Path(tmpdir, "bento.yaml").touch()
return tmpdir
@pytest.fixture()
def mock_operator():
@dataclass
class MockOperator:
name: str
schema: dict
module_name: str = None
default_template: str = None
available_templates: list = field(default_factory=lambda: ["terraform"])
def __post_init__(self):
if self.module_name is None:
self.module_name = self.name
if self.default_template is None:
self.default_template = self.available_templates[0]
def generate(self):
pass
def factory(**kwargs):
return MockOperator(**kwargs)
return factory
|
import xml.etree.ElementTree as ET
import numpy as np
from PIL import Image
import glob
class MaskDataset:
def __init__(self):
# 根目录;数据路径;
self.root = '/home/team/xiaonan/dataset/mask/'
self.data_path = {
'sample': self.root,
'train': self.root,
'test': self.root
}
# 获取图像和注释路径
self.image_path = glob.glob(self.data_path['sample'] + '*.jpg')
self.ann_path = glob.glob(self.data_path['sample'] + '*.xml')
# 标签名称
self.label_dict = {
'mask': 0,
'head': 1,
'back': 2,
'mid_mask': 3
}
self.label_names = ['mask', 'head', 'back', 'mid_mask']
# 制作图像名称和路径的字典对,即{‘*.jpg’:'/**/**/*.jpg'}
self.image_path_dict = self.data_dict(self.image_path)
# 是否使用difficult
self.use_difficult = True
# 数据集大小
self.data_size = len(self.ann_path)
# 边界框名称
self.bbox_name = ['ymin', 'xmin', 'ymax', 'xmax']
def get_example(self, index):
# 解析单个注释文件
anno = ET.parse(self.ann_path[index])
# 定义边界框、标签列表、困难列表
bbox_list = list()
label_list = list()
difficult_list = list()
# 遍历‘目标’标签
for attr in anno.findall('object'):
# 当不使用困难划分时,并是困难时,则跳过以下操作。
if not self.use_difficult and int(attr.find('difficult').text) == 1:
print('1')
continue
# 获取标签名称(去空格、变成小写)
label_ = attr.find('name').text.lower().strip()
label_list.append(self.label_dict[label_])
# 获取边界框;减去1以使像素索引从0开始
bbox_ = attr.find('bndbox')
bbox_list.append([int(bbox_.find(bbox_tag).text) - 1
for bbox_tag in self.bbox_name])
# 获取困难值
difficult_list.append(int(attr.find('difficult').text))
# 将标签、边界框、困难列表堆叠成numpy数组
label = np.stack(label_list).astype(np.int32)
bbox = np.stack(bbox_list).astype(np.float32)
# 当' use difficult==False '时,' difficult '中的所有元素都为False。
difficult = np.array(difficult_list, dtype=np.bool).astype(np.uint8)
# 加载图像数据
image_path = self.image_path_dict[anno.find('filename').text.lower().strip()]
image = self.read_image(image_path)
return image, bbox, label, difficult
def __len__(self):
return self.data_size
__getitem__ = get_example
def read_image(self, image_file):
"""
读取图像数据
Args:
image_file: 图像的路径
Returns:
"""
# 读取图像
image = Image.open(image_file)
# 将图像转化成RGB模式和numpy格式
try:
img = image.convert('RGB')
img = np.asarray(img, dtype=np.float32)
finally:
if hasattr(image, 'close'):
image.close()
if img.ndim == 2:
# reshape (H, W) -> (1, H, W)
return img[np.newaxis]
else:
# transpose (H, W, C) -> (C, H, W)
return img.transpose((2, 0, 1))
def data_dict(self, data):
"""
制作数据字典,如图像路径列表,将图像文件名称和其路径对应到字典中
Args:
data: 数据列表
Returns:数据字典
"""
data_dic = dict()
for idx, path in enumerate(data):
data_name = str(path.split('/')[-1].lower())
data_dic[data_name] = path
print('\r 制作数字字典:【{}|{}】'.format(idx+1, len(data)), end=' ')
return data_dic
if __name__ == '__main__':
dataset = MaskDataset()
|
from __future__ import absolute_import
from __future__ import print_function
import veriloggen
import dataflow_connect
expected_verilog = """
module test
(
);
reg CLK;
reg RST;
main
uut
(
.CLK(CLK),
.RST(RST)
);
initial begin
$dumpfile("uut.vcd");
$dumpvars(0, uut, CLK, RST);
end
initial begin
CLK = 0;
forever begin
#5 CLK = !CLK;
end
end
initial begin
RST = 0;
#100;
RST = 1;
#100;
RST = 0;
#100000;
$finish;
end
endmodule
module main
(
input CLK,
input RST
);
wire [32-1:0] xdata;
wire xvalid;
wire xready;
reg [32-1:0] _dataflow_plus_data_1;
reg _dataflow_plus_valid_1;
wire _dataflow_plus_ready_1;
assign xready = (_dataflow_plus_ready_1 || !_dataflow_plus_valid_1) && xvalid;
reg [32-1:0] _dataflow_plus_data_3;
reg _dataflow_plus_valid_3;
wire _dataflow_plus_ready_3;
reg [32-1:0] _dataflow__delay_data_6;
reg _dataflow__delay_valid_6;
wire _dataflow__delay_ready_6;
assign _dataflow_plus_ready_1 = (_dataflow_plus_ready_3 || !_dataflow_plus_valid_3) && _dataflow_plus_valid_1 && ((_dataflow__delay_ready_6 || !_dataflow__delay_valid_6) && _dataflow_plus_valid_1);
wire [32-1:0] zdata;
wire zvalid;
wire zready;
assign zdata = _dataflow_plus_data_3;
assign zvalid = _dataflow_plus_valid_3;
assign _dataflow_plus_ready_3 = zready;
wire [32-1:0] ydata;
wire yvalid;
wire yready;
assign ydata = _dataflow__delay_data_6;
assign yvalid = _dataflow__delay_valid_6;
assign _dataflow__delay_ready_6 = yready;
reg [32-1:0] xfsm;
localparam xfsm_init = 0;
reg [32-1:0] _tmp_0;
reg [32-1:0] _tmp_1;
assign xdata = _tmp_1;
reg _tmp_2;
assign xvalid = _tmp_2;
reg __dataflow_seq_0_cond_0_1;
localparam xfsm_1 = 1;
always @(posedge CLK) begin
if(RST) begin
xfsm <= xfsm_init;
_tmp_0 <= 0;
end else begin
case(xfsm)
xfsm_init: begin
if(xready || !_tmp_2) begin
_tmp_0 <= _tmp_0 + 1;
end
if((xready || !_tmp_2) && (_tmp_0 == 15)) begin
xfsm <= xfsm_1;
end
end
endcase
end
end
assign yready = 1;
always @(posedge CLK) begin
if(yvalid) begin
$display("ydata=%d", ydata);
end
end
assign zready = 1;
always @(posedge CLK) begin
if(zvalid) begin
$display("zdata=%d", zdata);
end
end
always @(posedge CLK) begin
if(RST) begin
_dataflow_plus_data_1 <= 0;
_dataflow_plus_valid_1 <= 0;
_dataflow_plus_data_3 <= 0;
_dataflow_plus_valid_3 <= 0;
_dataflow__delay_data_6 <= 0;
_dataflow__delay_valid_6 <= 0;
_tmp_1 <= 0;
_tmp_2 <= 0;
__dataflow_seq_0_cond_0_1 <= 0;
end else begin
if(__dataflow_seq_0_cond_0_1) begin
_tmp_2 <= 0;
end
if((_dataflow_plus_ready_1 || !_dataflow_plus_valid_1) && xready && xvalid) begin
_dataflow_plus_data_1 <= xdata + 2'sd1;
end
if(_dataflow_plus_valid_1 && _dataflow_plus_ready_1) begin
_dataflow_plus_valid_1 <= 0;
end
if((_dataflow_plus_ready_1 || !_dataflow_plus_valid_1) && xready) begin
_dataflow_plus_valid_1 <= xvalid;
end
if((_dataflow_plus_ready_3 || !_dataflow_plus_valid_3) && _dataflow_plus_ready_1 && _dataflow_plus_valid_1) begin
_dataflow_plus_data_3 <= _dataflow_plus_data_1 + 2'sd1;
end
if(_dataflow_plus_valid_3 && _dataflow_plus_ready_3) begin
_dataflow_plus_valid_3 <= 0;
end
if((_dataflow_plus_ready_3 || !_dataflow_plus_valid_3) && _dataflow_plus_ready_1) begin
_dataflow_plus_valid_3 <= _dataflow_plus_valid_1;
end
if((_dataflow__delay_ready_6 || !_dataflow__delay_valid_6) && _dataflow_plus_ready_1 && _dataflow_plus_valid_1) begin
_dataflow__delay_data_6 <= _dataflow_plus_data_1;
end
if(_dataflow__delay_valid_6 && _dataflow__delay_ready_6) begin
_dataflow__delay_valid_6 <= 0;
end
if((_dataflow__delay_ready_6 || !_dataflow__delay_valid_6) && _dataflow_plus_ready_1) begin
_dataflow__delay_valid_6 <= _dataflow_plus_valid_1;
end
if((xfsm == 0) && (xready || !_tmp_2)) begin
_tmp_1 <= _tmp_0;
end
if((xfsm == 0) && (xready || !_tmp_2)) begin
_tmp_2 <= 1;
end
__dataflow_seq_0_cond_0_1 <= 1;
if(_tmp_2 && !xready) begin
_tmp_2 <= _tmp_2;
end
end
end
endmodule
"""
def test():
veriloggen.reset()
test_module = dataflow_connect.mkTest()
code = test_module.to_verilog()
from pyverilog.vparser.parser import VerilogParser
from pyverilog.ast_code_generator.codegen import ASTCodeGenerator
parser = VerilogParser()
expected_ast = parser.parse(expected_verilog)
codegen = ASTCodeGenerator()
expected_code = codegen.visit(expected_ast)
assert(expected_code == code)
|
from __future__ import print_function
import torch
import torch.nn as nn
import torch.optim as optim
import torch.nn.functional as F
from torch.autograd import Variable
torch.backends.cudnn.bencmark = True
import os,sys,cv2,random,datetime
import argparse
import numpy as np
import zipfile
from dataset import ImageDataset
from matlab_cp2tform import get_similarity_transform_for_cv2
import net_sphere
def alignment(src_img,src_pts):
ref_pts = [ [30.2946, 51.6963],[65.5318, 51.5014],
[48.0252, 71.7366],[33.5493, 92.3655],[62.7299, 92.2041] ]
crop_size = (96, 112)
src_pts = np.array(src_pts).reshape(5,2)
s = np.array(src_pts).astype(np.float32)
r = np.array(ref_pts).astype(np.float32)
tfm = get_similarity_transform_for_cv2(s, r)
face_img = cv2.warpAffine(src_img, tfm, crop_size)
return face_img
def KFold(n=6000, n_folds=10, shuffle=False):
folds = []
base = list(range(n))
for i in range(n_folds):
test = base[i*n//n_folds:(i+1)*n//n_folds]
train = list(set(base)-set(test))
folds.append([train,test])
return folds
def eval_acc(threshold, diff):
y_true = []
y_predict = []
for d in diff:
same = 1 if float(d[2]) > threshold else 0
y_predict.append(same)
y_true.append(int(d[3]))
y_true = np.array(y_true)
y_predict = np.array(y_predict)
accuracy = 1.0*np.count_nonzero(y_true==y_predict)/len(y_true)
return accuracy
def find_best_threshold(thresholds, predicts):
best_threshold = best_acc = 0
for threshold in thresholds:
accuracy = eval_acc(threshold, predicts)
if accuracy >= best_acc:
best_acc = accuracy
best_threshold = threshold
return best_threshold
parser = argparse.ArgumentParser(description='PyTorch sphereface lfw')
parser.add_argument('--net','-n', default='sphere20a', type=str)
parser.add_argument('--lfw', default='../../dataset/face/lfw/lfw.zip', type=str)
parser.add_argument('--model','-m', default='sphere20a.pth', type=str)
args = parser.parse_args()
predicts=[]
net = getattr(net_sphere,args.net)()
net.load_state_dict(torch.load(args.model))
net.cuda()
net.eval()
net.feature = True
zfile = zipfile.ZipFile(args.lfw)
landmark = {}
with open('data/lfw_landmark.txt') as f:
landmark_lines = f.readlines()
for line in landmark_lines:
l = line.replace('\n','').split('\t')
landmark[l[0]] = [int(k) for k in l[1:]]
with open('data/pairs.txt') as f:
pairs_lines = f.readlines()[1:]
for i in range(6000):
p = pairs_lines[i].replace('\n','').split('\t')
if 3==len(p):
sameflag = 1
name1 = p[0]+'/'+p[0]+'_'+'{:04}.jpg'.format(int(p[1]))
name2 = p[0]+'/'+p[0]+'_'+'{:04}.jpg'.format(int(p[2]))
if 4==len(p):
sameflag = 0
name1 = p[0]+'/'+p[0]+'_'+'{:04}.jpg'.format(int(p[1]))
name2 = p[2]+'/'+p[2]+'_'+'{:04}.jpg'.format(int(p[3]))
img1 = alignment(cv2.imdecode(np.frombuffer(zfile.read('lfw/' + name1),np.uint8),1),landmark[name1])
img2 = alignment(cv2.imdecode(np.frombuffer(zfile.read('lfw/' + name2),np.uint8),1),landmark[name2])
imglist = [img1,cv2.flip(img1,1),img2,cv2.flip(img2,1)]
for i in range(len(imglist)):
imglist[i] = imglist[i].transpose(2, 0, 1).reshape((1,3,112,96))
imglist[i] = (imglist[i]-127.5)/128.0
img = np.vstack(imglist)
img = Variable(torch.from_numpy(img).float(),volatile=True).cuda()
output = net(img)
f = output.data
f1,f2 = f[0],f[2]
cosdistance = f1.dot(f2)/(f1.norm()*f2.norm()+1e-5)
predicts.append('{}\t{}\t{}\t{}\n'.format(name1,name2,cosdistance,sameflag))
accuracy = []
thd = []
folds = KFold(n=6000, n_folds=10, shuffle=False)
thresholds = np.arange(-1.0, 1.0, 0.005)
# predicts = np.array(map(lambda line:line.strip('\n').split(), predicts))
predicts = list(map(lambda line:line.strip('\n').split(), predicts))
for idx, (train, test) in enumerate(folds):
best_thresh = find_best_threshold(thresholds, [predicts[i] for i in train])
accuracy.append(eval_acc(best_thresh, [predicts[i] for i in test]))
thd.append(best_thresh)
print('LFWACC={:.4f} std={:.4f} thd={:.4f}'.format(np.mean(accuracy), np.std(accuracy), np.mean(thd)))
|
from django.contrib import admin
# Register your models here.
from .models import *
admin.site.register(User)
admin.site.register(Course)
admin.site.register(Quiz)
admin.site.register(Student)
admin.site.register(Question)
admin.site.register(Subject)
|
import flask
from flask_required_args import required_data
app = flask.Flask(__name__)
@app.route("/")
@required_data
def home():
return 'ok'
@app.route("/<name>")
@required_data
def get_name(name):
return name
@app.route('/hello', methods=['POST'])
@required_data
def hello_name(name="World"):
return f'Hello {name}'
@app.route('/hello/<user_name>', methods=['POST'])
@required_data
def hello_user(user_name, greeting="hello"):
return f'{greeting} {user_name}'
|
# This function will take an irregular list composed of lists
# and flatten it
from compiler.ast import flatten
class FilterModule (object):
def filters(self):
return {
"flatten": flatten
}
|
# Python module for handling calculations for times and dates
# www.scienceexposure.com
# Copyright 2015 Ismail Uddin
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from collections import OrderedDict
import time
import datetime
def mathsForTime(hours, minutes):
"""
Wrapper function for datetime.timedelta() function
"""
return datetime.timedelta(0,0,0,0,minutes,hours,0)
def cTDelta():
"""
Returns time delta for current time
"""
currentTime = time.strftime("%H%M")
cT_minutes = int(currentTime[-2:])
cT_hours = int(currentTime[:-2])
return mathsForTime(cT_hours, cT_minutes)
def calculateTimeDifference(time1, time2):
"""
Calculates the time difference for two time deltas
and returns an additional value if the difference
is negative
"""
negative = False
absoluteDifference = abs(time1 - time2)
relativeDifference = time1 - time2
if str(relativeDifference)[0:1] == '-':
negative = True
else:
negative = False
return absoluteDifference, negative
def buildArrivalTimeDeltaList(busArrivalTimes):
"""
Builds a list containing relative time deltas for the
bus arrival times returned from TfL Bus Arrivals API
"""
arrivalTimesDelta = []
for time in busArrivalTimes:
arrivalTimesDelta.append(mathsForTime(0,time) +cTDelta())
return arrivalTimesDelta
def queryCandidateTimes(arrivalTimesDelta, idealTime):
"""
Returns a sorted list of absolute arrival times, sorted in ascending
order for the relative difference from the ideal time.
Ideal time is defined as the set alarm time with the head start
in minutes added to this time.
"""
currTime = cTDelta()
differenceTimes = {}
# Creates a dictionary with keys storing the relative time difference
# between the ideal time and the bus arrival times.
# The values store the absolute bus arrival time for the given key.
for time in arrivalTimesDelta:
#absoluteValue, negative = calculateTimeDifference(idealTime, time)
differenceTimes[abs(idealTime - time)] = time
# Create a sorted list of differenceTimes in ascending order
sortedDifferenceTimes = sorted(differenceTimes.keys())
#sortedDifferenceTimes = sorted(differenceTimes, key=lambda x:int(str(abs(differenceTimes[x]))[:-2].replace(':','')))
# Create an ordered dictionary mapping the relative time difference from
# the ideal time, to the absolute bus arrival times.
# Sorted according to smallest time difference.
sortedTimes = OrderedDict()
for entry in sortedDifferenceTimes:
sortedTimes[entry] = differenceTimes[entry]
print("Querying bus arrival times:")
for k,v in sortedTimes.items():
print("Difference: %s, Arrival time: %s" % (str(k),str(v)))
return sortedTimes
|
from feature.feature import EmptyFeature
import tulipy as ti
class Vwap(EmptyFeature):
def __init__(self, lookback, raw_data_manager, history_lengh=None):
self.per = lookback
super().__init__(lookback, raw_data_manager,history_lengh=history_lengh)
def compute(self, data_dict):
close = data_dict.get('close')
volume = data_dict.get('volume')
return ti.vwma(close, volume, self.per)
|
"""Sample download file shell command definition."""
import sys
import click
from gencove.command.common_cli_options import add_options, common_options
from gencove.constants import Credentials, Optionals
from gencove.logger import echo_error
from .main import DownloadFile
@click.command("download-file")
@click.argument("sample_id")
@click.argument("file_type")
@click.argument("destination")
@add_options(common_options)
@click.option(
"--no-progress",
is_flag=True,
help="If specified, no progress bar is shown.",
)
# pylint: disable=too-many-arguments
def download_file(
sample_id,
file_type,
destination,
host,
email,
password,
api_key,
no_progress,
): # noqa: D413,D301,D412 # pylint: disable=C0301
"""Download sample file metadata.
SAMPLE_ID specific sample for which to download the results
FILE_TYPE specific deliverable to download results for
DESTINATION path/to/file
Examples:
Download sample:
gencove samples download-file e6b45af7-07c5-4a6d-9f97-6e1efbf3e215 ancestry-json ancestry.json
Download and print to stdout then compress using gzip:
gencove samples download-file e6b45af7-07c5-4a6d-9f97-6e1efbf3e215 ancestry-json - | gzip > ancestry.json.gz
\f
Args:
sample_id (str): specific sample for which
to download the results.
file_type (str): specific deliverable to download
results for.
destination (str): path/to/file.
no_progress (bool, optional, default False): do not show progress
bar.
""" # noqa: E501
if destination in ("-", "/dev/stdout"):
DownloadFile(
sample_id,
file_type,
sys.stdout.buffer,
Credentials(email=email, password=password, api_key=api_key),
Optionals(host=host),
no_progress,
).run()
else:
try:
with open(destination, "wb") as destination_file:
DownloadFile(
sample_id,
file_type,
destination_file,
Credentials(
email=email, password=password, api_key=api_key
),
Optionals(host=host),
no_progress,
).run()
except IsADirectoryError:
echo_error(
"Please specify a file path (not directory path)"
" for DESTINATION"
)
raise click.Abort() # pylint: disable=raise-missing-from
|
# coding: utf-8
#
# Copyright 2022 :Barry-Thomas-Paul: Moss
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http: // www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Service Class
# this is a auto generated file generated by Cheetah
# Libre Office Version: 7.3
# Namespace: com.sun.star.text
import typing
from abc import abstractproperty
from .base_index import BaseIndex as BaseIndex_8f0d0a40
if typing.TYPE_CHECKING:
from ..container.x_index_replace import XIndexReplace as XIndexReplace_feed0dd7
class ContentIndex(BaseIndex_8f0d0a40):
"""
Service Class
specifies service of content indexes within a document.
See Also:
`API ContentIndex <https://api.libreoffice.org/docs/idl/ref/servicecom_1_1sun_1_1star_1_1text_1_1ContentIndex.html>`_
"""
__ooo_ns__: str = 'com.sun.star.text'
__ooo_full_ns__: str = 'com.sun.star.text.ContentIndex'
__ooo_type_name__: str = 'service'
@abstractproperty
def CreateFromMarks(self) -> bool:
"""
contains TRUE if the document index marks are included in this index.
"""
@abstractproperty
def CreateFromOutline(self) -> bool:
"""
determines if the document index is created from outlines.
"""
@abstractproperty
def Level(self) -> int:
"""
determines the depth of outline levels that are included into the content index.
"""
@abstractproperty
def LevelParagraphStyles(self) -> 'XIndexReplace_feed0dd7':
"""
contains the interface to access the paragraph style names that are included in this index.
"""
__all__ = ['ContentIndex']
|
# -*- coding: utf-8 -*-
# Copyright 2019 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Helps importing C ops with a clean namespace."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
def get_ops(module):
"""Returns a dict of ops defined in a module by blacklisting internals."""
ops = dict()
for name in dir(module):
if name.startswith("_"):
continue
if name.endswith("_eager_fallback"):
continue
if name in ("LIB_HANDLE", "OP_LIST", "deprecated_endpoints", "tf_export"):
continue
ops[name] = getattr(module, name)
return ops
|
# -*- coding: utf-8 -*-
"""
RelatedViewFunction
~~~~~~~~~
:copyright: (c) 2018 by geeksaga.
:license: MIT LICENSE 2.0, see license for more details.
"""
from sqlalchemy import Column, Integer, ForeignKey
from sqlalchemy.orm import relationship, backref
from . import Base
class RelatedConditionScript(Base):
__tablename__ = 'gs_related_view_function'
version_id = Column(Integer, ForeignKey('gs_version.id'))
condition_id = Column(Integer, ForeignKey('gs_condition.id'))
script_id = Column(Integer, ForeignKey('gs_script.id'))
def __init__(self, version_id, condition_id, script_id):
self.version_id = version_id
self.condition_id = condition_id
self.script_id = script_id
def __repr__(self):
return '<RelatedConditionScript %r %r %r>' % (self.version_id, self.condition_id, self.script_id)
|
"""
Testing sum_for_list function
"""
# Created by Egor Kostan.
# GitHub: https://github.com/ikostan
# LinkedIn: https://www.linkedin.com/in/egor-kostan/
# ALGORITHMS NUMBERS ARRAYS
import allure
import unittest
from utils.log_func import print_log
from kyu_4.sum_by_factors.sum_for_list import sum_for_list
@allure.epic('4 kyu')
@allure.parent_suite('Competent')
@allure.suite("Algorithms")
@allure.sub_suite("Unit Tests")
@allure.feature("Lists")
@allure.story('Sum by Factors')
@allure.tag('ALGORITHMS', 'NUMBERS', 'ARRAYS')
@allure.link(
url='https://www.codewars.com/kata/54d496788776e49e6b00052f/train/python',
name='Source/Kata')
class SumForListTestCase(unittest.TestCase):
"""
Testing sum_for_list function
"""
def test_sum_for_list(self):
"""
Testing sum_for_list function
:return:
"""
allure.dynamic.title("Testing sum_for_list function")
allure.dynamic.severity(allure.severity_level.NORMAL)
allure.dynamic.description_html(
'<h3>Codewars badge:</h3>'
'<img src="https://www.codewars.com/users/myFirstCode'
'/badges/large">'
'<h3>Test Description:</h3>'
"<p>Verify that one given an array of positive "
"or negative integers I= [i1,..,in] the function "
"produces a sorted array P of the form "
"[[p, sum of all ij of I for which p is a prime factor (p positive) of ij]...]"
"</p>")
test_data = (
([12, 15],
[[2, 12], [3, 27], [5, 15]]),
([15, 21, 24, 30, 45],
[[2, 54], [3, 135], [5, 90], [7, 21]]),
([107, 158, 204, 100, 118, 123, 126, 110, 116, 100],
[[2, 1032], [3, 453], [5, 310], [7, 126], [11, 110],
[17, 204], [29, 116], [41, 123], [59, 118], [79, 158],
[107, 107]]),
([-29804, -4209, -28265, -72769, -31744],
[[2, -61548], [3, -4209], [5, -28265], [23, -4209], [31, -31744],
[53, -72769], [61, -4209], [1373, -72769], [5653, -28265], [7451, -29804]]),
([15, -36, -133, -61, -54, 78, -126, -113, -106, -158],
[[2, -402], [3, -123], [5, 15], [7, -259], [13, 78], [19, -133],
[53, -106], [61, -61], [79, -158], [113, -113]]),
([154, -150, -196, -164, -57, 133, -110, -126, -191, -171, 131, -55, -85, -37, -113],
[[2, -592], [3, -504], [5, -400], [7, -35], [11, -11], [17, -85], [19, -95],
[37, -37], [41, -164], [113, -113], [131, 131], [191, -191]]),
([12, -138, -175, -64, -153, 11, -11, -126, -67, -30, 153, -72, -102],
[[2, -520], [3, -456], [5, -205], [7, -301], [11, 0], [17, -102], [23, -138], [67, -67]])
)
for lst, expected in test_data:
actual_result = sum_for_list(lst)
print_log(expected=expected,
actual_result=actual_result)
with allure.step("Enter test list ({}) and "
"verify the output ({}) vs "
"expected ({})".format(lst,
actual_result,
expected)):
self.assertListEqual(expected, actual_result)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.