hexsha
stringlengths 40
40
| size
int64 1
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
239
| max_stars_repo_name
stringlengths 5
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
239
| max_issues_repo_name
stringlengths 5
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
239
| max_forks_repo_name
stringlengths 5
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 1
1.03M
| avg_line_length
float64 1
958k
| max_line_length
int64 1
1.03M
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
4a10657a3f0b0b5d7635da0ce3ad16bf8a298af4
| 13,543
|
py
|
Python
|
moto/ec2/responses/route_tables.py
|
oakbramble/moto
|
6350d8ec4c59eaf12b83385b6acd386e5c2f5593
|
[
"Apache-2.0"
] | null | null | null |
moto/ec2/responses/route_tables.py
|
oakbramble/moto
|
6350d8ec4c59eaf12b83385b6acd386e5c2f5593
|
[
"Apache-2.0"
] | null | null | null |
moto/ec2/responses/route_tables.py
|
oakbramble/moto
|
6350d8ec4c59eaf12b83385b6acd386e5c2f5593
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import unicode_literals
from moto.core.responses import BaseResponse
from moto.ec2.utils import filters_from_querystring
class RouteTables(BaseResponse):
def associate_route_table(self):
route_table_id = self._get_param("RouteTableId")
gateway_id = self._get_param("GatewayId")
subnet_id = self._get_param("SubnetId")
association_id = self.ec2_backend.associate_route_table(
route_table_id, gateway_id, subnet_id
)
template = self.response_template(ASSOCIATE_ROUTE_TABLE_RESPONSE)
return template.render(association_id=association_id)
def create_route(self):
route_table_id = self._get_param("RouteTableId")
destination_cidr_block = self._get_param("DestinationCidrBlock")
destination_ipv6_cidr_block = self._get_param("DestinationIpv6CidrBlock")
destination_prefix_list_id = self._get_param("DestinationPrefixListId")
gateway_id = self._get_param("GatewayId")
instance_id = self._get_param("InstanceId")
nat_gateway_id = self._get_param("NatGatewayId")
egress_only_igw_id = self._get_param("EgressOnlyInternetGatewayId")
transit_gateway_id = self._get_param("TransitGatewayId")
interface_id = self._get_param("NetworkInterfaceId")
pcx_id = self._get_param("VpcPeeringConnectionId")
carrier_gateway_id = self._get_param("CarrierGatewayId")
self.ec2_backend.create_route(
route_table_id,
destination_cidr_block,
destination_ipv6_cidr_block,
destination_prefix_list_id,
gateway_id=gateway_id,
instance_id=instance_id,
nat_gateway_id=nat_gateway_id,
egress_only_igw_id=egress_only_igw_id,
transit_gateway_id=transit_gateway_id,
interface_id=interface_id,
vpc_peering_connection_id=pcx_id,
carrier_gateway_id=carrier_gateway_id,
)
template = self.response_template(CREATE_ROUTE_RESPONSE)
return template.render()
def create_route_table(self):
vpc_id = self._get_param("VpcId")
tags = self._get_multi_param("TagSpecification", skip_result_conversion=True)
if tags:
tags = tags[0].get("Tag") or []
route_table = self.ec2_backend.create_route_table(vpc_id, tags)
template = self.response_template(CREATE_ROUTE_TABLE_RESPONSE)
return template.render(route_table=route_table)
def delete_route(self):
route_table_id = self._get_param("RouteTableId")
destination_cidr_block = self._get_param("DestinationCidrBlock")
destination_ipv6_cidr_block = self._get_param("DestinationIpv6CidrBlock")
destination_prefix_list_id = self._get_param("DestinationPrefixListId")
self.ec2_backend.delete_route(
route_table_id,
destination_cidr_block,
destination_ipv6_cidr_block,
destination_prefix_list_id,
)
template = self.response_template(DELETE_ROUTE_RESPONSE)
return template.render()
def delete_route_table(self):
route_table_id = self._get_param("RouteTableId")
self.ec2_backend.delete_route_table(route_table_id)
template = self.response_template(DELETE_ROUTE_TABLE_RESPONSE)
return template.render()
def describe_route_tables(self):
route_table_ids = self._get_multi_param("RouteTableId")
filters = filters_from_querystring(self.querystring)
route_tables = self.ec2_backend.describe_route_tables(route_table_ids, filters)
template = self.response_template(DESCRIBE_ROUTE_TABLES_RESPONSE)
return template.render(route_tables=route_tables)
def disassociate_route_table(self):
association_id = self._get_param("AssociationId")
self.ec2_backend.disassociate_route_table(association_id)
template = self.response_template(DISASSOCIATE_ROUTE_TABLE_RESPONSE)
return template.render()
def replace_route(self):
route_table_id = self._get_param("RouteTableId")
destination_cidr_block = self._get_param("DestinationCidrBlock")
destination_ipv6_cidr_block = self._get_param("DestinationIpv6CidrBlock")
destination_prefix_list_id = self._get_param("DestinationPrefixListId")
gateway_id = self._get_param("GatewayId")
instance_id = self._get_param("InstanceId")
interface_id = self._get_param("NetworkInterfaceId")
pcx_id = self._get_param("VpcPeeringConnectionId")
nat_gateway_id = self._get_param("NatGatewayId")
egress_only_igw_id = self._get_param("EgressOnlyInternetGatewayId")
transit_gateway_id = self._get_param("TransitGatewayId")
self.ec2_backend.replace_route(
route_table_id,
destination_cidr_block,
destination_ipv6_cidr_block,
destination_prefix_list_id,
nat_gateway_id,
egress_only_igw_id,
transit_gateway_id,
gateway_id=gateway_id,
instance_id=instance_id,
interface_id=interface_id,
vpc_peering_connection_id=pcx_id,
)
template = self.response_template(REPLACE_ROUTE_RESPONSE)
return template.render()
def replace_route_table_association(self):
route_table_id = self._get_param("RouteTableId")
association_id = self._get_param("AssociationId")
new_association_id = self.ec2_backend.replace_route_table_association(
association_id, route_table_id
)
template = self.response_template(REPLACE_ROUTE_TABLE_ASSOCIATION_RESPONSE)
return template.render(association_id=new_association_id)
CREATE_ROUTE_RESPONSE = """
<CreateRouteResponse xmlns="http://ec2.amazonaws.com/doc/2013-10-15/">
<requestId>59dbff89-35bd-4eac-99ed-be587EXAMPLE</requestId>
<return>true</return>
</CreateRouteResponse>
"""
REPLACE_ROUTE_RESPONSE = """
<ReplaceRouteResponse xmlns="http://ec2.amazonaws.com/doc/2013-10-15/">
<requestId>59dbff89-35bd-4eac-99ed-be587EXAMPLE</requestId>
<return>true</return>
</ReplaceRouteResponse>
"""
CREATE_ROUTE_TABLE_RESPONSE = """
<CreateRouteTableResponse xmlns="http://ec2.amazonaws.com/doc/2013-10-15/">
<requestId>59dbff89-35bd-4eac-99ed-be587EXAMPLE</requestId>
<routeTable>
<routeTableId>{{ route_table.id }}</routeTableId>
<vpcId>{{ route_table.vpc_id }}</vpcId>
<ownerId>{{ route_table.owner_id }}</ownerId>
<routeSet>
{% for route in route_table.routes.values() %}
{% if route.local %}
<item>
{% if route.destination_ipv6_cidr_block %}
<destinationIpv6CidrBlock>{{ route.destination_ipv6_cidr_block }}</destinationIpv6CidrBlock>
{% endif %}
{% if route.destination_cidr_block %}
<destinationCidrBlock>{{ route.destination_cidr_block }}</destinationCidrBlock>
{% endif %}
{% if route.destination_prefix_list_id %}
<destinationPrefixListId>{{ route.destination_prefix_list_id }}</destinationPrefixListId>
{% endif %}
<gatewayId>local</gatewayId>
<state>active</state>
</item>
{% endif %}
{% endfor %}
</routeSet>
<associationSet/>
<tagSet>
{% for tag in route_table.get_tags() %}
<item>
<resourceId>{{ tag.resource_id }}</resourceId>
<resourceType>{{ tag.resource_type }}</resourceType>
<key>{{ tag.key }}</key>
<value>{{ tag.value }}</value>
</item>
{% endfor %}
</tagSet>
</routeTable>
</CreateRouteTableResponse>
"""
DESCRIBE_ROUTE_TABLES_RESPONSE = """
<DescribeRouteTablesResponse xmlns="http://ec2.amazonaws.com/doc/2013-10-15/">
<requestId>6f570b0b-9c18-4b07-bdec-73740dcf861a</requestId>
<routeTableSet>
{% for route_table in route_tables %}
<item>
<routeTableId>{{ route_table.id }}</routeTableId>
<vpcId>{{ route_table.vpc_id }}</vpcId>
<ownerId>{{ route_table.owner_id }}</ownerId>
<routeSet>
{% for route in route_table.routes.values() %}
<item>
{% if route.destination_ipv6_cidr_block %}
<destinationIpv6CidrBlock>{{ route.destination_ipv6_cidr_block }}</destinationIpv6CidrBlock>
{% endif %}
{% if route.destination_cidr_block %}
<destinationCidrBlock>{{ route.destination_cidr_block }}</destinationCidrBlock>
{% endif %}
{% if route.destination_prefix_list %}
<destinationPrefixListId>{{ route.destination_prefix_list.id }}</destinationPrefixListId>
{% endif %}
{% if route.local %}
<gatewayId>local</gatewayId>
<origin>CreateRouteTable</origin>
<state>active</state>
{% endif %}
{% if route.gateway %}
<gatewayId>{{ route.gateway.id }}</gatewayId>
<origin>CreateRoute</origin>
<state>active</state>
{% endif %}
{% if route.instance %}
<instanceId>{{ route.instance.id }}</instanceId>
<origin>CreateRoute</origin>
<state>active</state>
{% endif %}
{% if route.vpc_pcx %}
<vpcPeeringConnectionId>{{ route.vpc_pcx.id }}</vpcPeeringConnectionId>
<origin>CreateRoute</origin>
<state>active</state>
{% endif %}
{% if route.carrier_gateway %}
<carrierGatewayId>{{ route.carrier_gateway.id }}</carrierGatewayId>
<origin>CreateRoute</origin>
<state>active</state>
{% endif %}
{% if route.nat_gateway %}
<natGatewayId>{{ route.nat_gateway.id }}</natGatewayId>
<origin>CreateRoute</origin>
<state>active</state>
{% endif %}
{% if route.egress_only_igw %}
<egressOnlyInternetGatewayId>{{ route.egress_only_igw.id }}</egressOnlyInternetGatewayId>
<origin>CreateRoute</origin>
<state>active</state>
{% endif %}
{% if route.transit_gateway %}
<transitGatewayId>{{ route.transit_gateway.id }}</transitGatewayId>
<origin>CreateRoute</origin>
<state>active</state>
{% endif %}
{% if route.interface %}
<networkInterfaceId>{{ route.interface.id }}</networkInterfaceId>
<origin>CreateRoute</origin>
<state>active</state>
{% endif %}
</item>
{% endfor %}
</routeSet>
<associationSet>
{% for association_id,subnet_id in route_table.associations.items() %}
<item>
<routeTableAssociationId>{{ association_id }}</routeTableAssociationId>
<routeTableId>{{ route_table.id }}</routeTableId>
<main>true</main>
{% if subnet_id.startswith("igw") %}
<gatewayId>{{ subnet_id }}</gatewayId>
{% endif %}
{% if subnet_id.startswith("subnet") %}
<subnetId>{{ subnet_id }}</subnetId>
{% endif %}
<associationState>
<state>associated</state>
</associationState>
</item>
{% endfor %}
</associationSet>
<tagSet>
{% for tag in route_table.get_tags() %}
<item>
<resourceId>{{ tag.resource_id }}</resourceId>
<resourceType>{{ tag.resource_type }}</resourceType>
<key>{{ tag.key }}</key>
<value>{{ tag.value }}</value>
</item>
{% endfor %}
</tagSet>
</item>
{% endfor %}
</routeTableSet>
</DescribeRouteTablesResponse>
"""
DELETE_ROUTE_RESPONSE = """
<DeleteRouteResponse xmlns="http://ec2.amazonaws.com/doc/2013-10-15/">
<requestId>59dbff89-35bd-4eac-99ed-be587EXAMPLE</requestId>
<return>true</return>
</DeleteRouteResponse>
"""
DELETE_ROUTE_TABLE_RESPONSE = """
<DeleteRouteTableResponse xmlns="http://ec2.amazonaws.com/doc/2013-10-15/">
<requestId>59dbff89-35bd-4eac-99ed-be587EXAMPLE</requestId>
<return>true</return>
</DeleteRouteTableResponse>
"""
ASSOCIATE_ROUTE_TABLE_RESPONSE = """
<AssociateRouteTableResponse xmlns="http://ec2.amazonaws.com/doc/2013-10-15/">
<requestId>59dbff89-35bd-4eac-99ed-be587EXAMPLE</requestId>
<associationId>{{ association_id }}</associationId>
</AssociateRouteTableResponse>
"""
DISASSOCIATE_ROUTE_TABLE_RESPONSE = """
<DisassociateRouteTableResponse xmlns="http://ec2.amazonaws.com/doc/2013-10-15/">
<requestId>59dbff89-35bd-4eac-99ed-be587EXAMPLE</requestId>
<return>true</return>
</DisassociateRouteTableResponse>
"""
REPLACE_ROUTE_TABLE_ASSOCIATION_RESPONSE = """
<ReplaceRouteTableAssociationResponse xmlns="http://ec2.amazonaws.com/doc/2013-10-15/">
<requestId>59dbff89-35bd-4eac-99ed-be587EXAMPLE</requestId>
<newAssociationId>{{ association_id }}</newAssociationId>
</ReplaceRouteTableAssociationResponse>
"""
| 41.928793
| 108
| 0.631249
|
4a1065c4767e0aa76a4c87f6f21ed6ad6fa5fd1b
| 17,823
|
py
|
Python
|
src/datahandler/denoise_dataset.py
|
wooseoklee4/AP-BSN
|
210013cfe0657e678e4b940fd4d5719ac0ac87c6
|
[
"MIT"
] | 8
|
2022-03-23T08:07:19.000Z
|
2022-03-30T17:08:17.000Z
|
src/datahandler/denoise_dataset.py
|
wooseoklee4/AP-BSN
|
210013cfe0657e678e4b940fd4d5719ac0ac87c6
|
[
"MIT"
] | 1
|
2022-03-25T13:26:58.000Z
|
2022-03-26T10:35:04.000Z
|
src/datahandler/denoise_dataset.py
|
wooseoklee4/AP-BSN
|
210013cfe0657e678e4b940fd4d5719ac0ac87c6
|
[
"MIT"
] | 1
|
2022-03-29T03:34:38.000Z
|
2022-03-29T03:34:38.000Z
|
import random, os
import cv2
import numpy as np
from scipy.io import savemat
import torch
from torch.utils.data import Dataset
from ..util.util import rot_hflip_img, tensor2np, np2tensor, mean_conv2d
class DenoiseDataSet(Dataset):
def __init__(self, add_noise:str=None, crop_size:list=None, aug:list=None, n_repeat:int=1, n_data:int=None, ratio_data:float=None) -> None:
'''
Base denoising dataset class for various dataset.
to build custom dataset class, below functions must be implemented in the inherited class. (or see other dataset class already implemented.)
- self._scan(self) : scan image data & save its paths. (saved to self.img_paths)
- self._load_data(self, data_idx) : load single paired data from idx as a form of dictionary.
Args:
add_noise (str) : configuration of additive noise to synthesize noisy image. (see _add_noise() for more details.)
crop_size (list) : crop size, e.g. [W] or [H, W] and no crop if None
aug (list) : list of data augmentations (see _augmentation() for more details.)
n_repeat (int) : number of repeat for each data.
n_data (int) : number of data to be used. (default: None = all data)
ratio_data (float) : ratio of data to be used. (activated when n_data=None, default: None = all data)
'''
self.dataset_dir = './dataset'
if not os.path.isdir(self.dataset_dir):
raise Exception('dataset directory is not exist')
# parse additive noise argument
self.add_noise_type, self.add_noise_opt, self.add_noise_clamp = self._parse_add_noise(add_noise)
# set parameters for dataset.
self.crop_size = crop_size
self.aug = aug
self.n_repeat = n_repeat
# scan all data and fill in self.img_paths
self.img_paths = []
self._scan()
if len(self.img_paths) > 0:
if self.img_paths[0].__class__.__name__ in ['int', 'str', 'float']:
self.img_paths.sort()
# set data amount
if n_data is not None: self.n_data = n_data
elif ratio_data is not None: self.n_data = int(ratio_data * len(self.img_paths))
else: self.n_data = len(self.img_paths)
def __len__(self):
return self.n_data * self.n_repeat
def __getitem__(self, idx):
'''
final dictionary shape of data:
{'clean', 'syn_noisy', 'real_noisy', 'noisy (any of real[first priority] and syn)', etc}
'''
# calculate data index
data_idx = idx % self.n_data
# load data
data = self._load_data(data_idx)
# pre-processing (currently only crop)
data = self._pre_processing(data)
# synthesize additive noise
if self.add_noise_type is not None:
if 'clean' in data:
syn_noisy_img, nlf = self._add_noise(data['clean'], self.add_noise_type, self.add_noise_opt, self.add_noise_clamp)
data['syn_noisy'] = syn_noisy_img
data['nlf'] = nlf
elif 'real_noisy' in data:
syn_noisy_img, nlf = self._add_noise(data['real_noisy'], self.add_noise_type, self.add_noise_opt, self.add_noise_clamp)
data['syn_noisy'] = syn_noisy_img
data['nlf'] = nlf
else:
raise RuntimeError('there is no clean or real image to synthesize. (synthetic noise type: %s)'%self.add_noise_type)
# data augmentation
if self.aug is not None:
data = self._augmentation(data, self.aug)
# add general label 'noisy' to use any of real_noisy or syn_noisy (real first)
if 'real_noisy' in data or 'syn_noisy' in data:
data['noisy'] = data['real_noisy'] if 'real_noisy' in data else data['syn_noisy']
return data
def _scan(self):
raise NotImplementedError
# TODO fill in self.img_paths (include path from project directory)
def _load_data(self, data_idx):
raise NotImplementedError
# TODO load possible data as dictionary
# dictionary key list :
# 'clean' : clean image without noise (gt or anything).
# 'real_noisy' : real noisy image or already synthesized noisy image.
# 'instances' : any other information of capturing situation.
#----------------------------#
# Image handling functions #
#----------------------------#
def _load_img(self, img_name, as_gray=False):
img = cv2.imread(img_name, 1)
assert img is not None, "failure on loading image - %s"%img_name
return self._load_img_from_np(img, as_gray, RGBflip=True)
def _load_img_from_np(self, img, as_gray=False, RGBflip=False):
# if color
if len(img.shape) != 2:
if as_gray:
# follows definition of sRBG in terms of the CIE 1931 linear luminance.
# because calculation opencv color conversion and imread grayscale mode is a bit different.
# https://en.wikipedia.org/wiki/Grayscale
img = np.average(img, axis=2, weights=[0.0722, 0.7152, 0.2126])
img = np.expand_dims(img, axis=0)
else:
if RGBflip:
img = np.flip(img, axis=2)
img = np.transpose(img, (2,0,1))
# if gray
else:
img = np.expand_dims(img, axis=0)
return torch.from_numpy(np.ascontiguousarray(img).astype(np.float32))
def _pre_processing(self, data):
# get a patch from image data
if self.crop_size != None:
data = self._get_patch(self.crop_size, data)
return data
def _get_patch(self, crop_size, data, rnd=True):
# check all image size is same
if 'clean' in data and 'real_noisy' in data:
assert data['clean'].shape[1] == data['clean'].shape[1] and data['real_noisy'].shape[2] == data['real_noisy'].shape[2], \
'img shape should be same. (%d, %d) != (%d, %d)' % (data['clean'].shape[1], data['clean'].shape[1], data['real_noisy'].shape[2], data['real_noisy'].shape[2])
# get image shape and select random crop location
if 'clean' in data:
max_x = data['clean'].shape[2] - crop_size[0]
max_y = data['clean'].shape[1] - crop_size[1]
else:
max_x = data['real_noisy'].shape[2] - crop_size[0]
max_y = data['real_noisy'].shape[1] - crop_size[1]
assert max_x >= 0
assert max_y >= 0
if rnd and max_x>0 and max_y>0:
x = np.random.randint(0, max_x)
y = np.random.randint(0, max_y)
else:
x, y = 0, 0
# crop
if 'clean' in data:
data['clean'] = data['clean'][:, y:y+crop_size[1], x:x+crop_size[0]]
if 'real_noisy' in data:
data['real_noisy'] = data['real_noisy'][:, y:y+crop_size[1], x:x+crop_size[0]]
return data
def normalize_data(self, data, cuda=False):
# for all image
for key in data:
if self._is_image_tensor(data[key]):
data[key] = self.normalize(data[key], cuda)
return data
def inverse_normalize_data(self, data, cuda=False):
# for all image
for key in data:
# is image
if self._is_image_tensor(data[key]):
data[key] = self.inverse_normalize(data[key], cuda)
return data
def normalize(self, img, cuda=False):
if img.shape[0] == 1:
stds = self.gray_stds
means = self.gray_means
elif img.shape[0] == 3:
stds = self.color_stds
means = self.color_means
else:
raise RuntimeError('undefined image channel length : %d'%img.shape[0])
if cuda:
means, stds = means.cuda(), stds.cuda()
return (img-means) / stds
def inverse_normalize(self, img, cuda=False):
if img.shape[0] == 1:
stds = self.gray_stds
means = self.gray_means
elif img.shape[0] == 3:
stds = self.color_stds
means = self.color_means
else:
raise RuntimeError('undefined image channel length : %d'%img.shape[0])
if cuda:
means, stds = means.cuda(), stds.cuda()
return (img*stds) + means
def _parse_add_noise(self, add_noise_str:str):
'''
noise_type-opt0:opt1:opt2-clamp
'''
if add_noise_str == 'bypass':
return 'bypass', None, None
elif add_noise_str != None:
add_noise_type = add_noise_str.split('-')[0]
add_noise_opt = [float(v) for v in add_noise_str.split('-')[1].split(':')]
add_noise_clamp = len(add_noise_str.split('-'))>2 and add_noise_str.split('-')[2] == 'clamp'
return add_noise_type, add_noise_opt, add_noise_clamp
else:
return None, None, None
def _add_noise(self, clean_img:torch.Tensor, add_noise_type:str, opt:list, clamp:bool=False) -> torch.Tensor:
'''
add various noise to clean image.
Args:
clean_img (Tensor) : clean image to synthesize on
add_noise_type : below types are available
opt (list) : args for synthsize noise
clamp (bool) : optional, clamp noisy image into [0,255]
Return:
synthesized_img
Noise_types
- bypass : bypass clean image
- uni : uniform distribution noise from -opt[0] ~ opt[0]
- gau : gaussian distribution noise with zero-mean & opt[0] variance
- gau_blind : blind gaussian distribution with zero-mean, variance is uniformly selected from opt[0] ~ opt[1]
- struc_gau : structured gaussian noise. gaussian filter is applied to above gaussian noise. opt[0] is variance of gaussian, opt[1] is window size and opt[2] is sigma of gaussian filter.
- het_gau : heteroscedastic gaussian noise with indep weight:opt[0], dep weight:opt[1]
'''
nlf = None
if add_noise_type == 'bypass':
# bypass clean image
synthesized_img = clean_img
elif add_noise_type == 'uni':
# add uniform noise
synthesized_img = clean_img + 2*opt[0] * torch.rand(clean_img.shape) - opt[0]
elif add_noise_type == 'gau':
# add AWGN
nlf = opt[0]
synthesized_img = clean_img + torch.normal(mean=0., std=nlf, size=clean_img.shape)
elif add_noise_type == 'gau_blind':
# add blind gaussian noise
nlf = random.uniform(opt[0], opt[1])
synthesized_img = clean_img + torch.normal(mean=0., std=nlf, size=clean_img.shape)
elif add_noise_type == 'struc_gau':
# add structured gaussian noise (used in the paper "Noiser2Noise": https://arxiv.org/pdf/1910.11908.pdf)
nlf = opt[0]
gau_noise = torch.normal(mean=0., std=opt[0], size=clean_img.shape)
struc_gau = mean_conv2d(gau_noise, window_size=int(opt[1]), sigma=opt[2], keep_sigma=True)
synthesized_img = clean_img + struc_gau
elif add_noise_type == 'het_gau':
# add heteroscedastic guassian noise
het_gau_std = (clean_img * (opt[0]**2) + torch.ones(clean_img.shape) * (opt[1]**2)).sqrt()
nlf = het_gau_std
synthesized_img = clean_img + torch.normal(mean=0., std=nlf)
else:
raise RuntimeError('undefined additive noise type : %s'%add_noise_type)
if clamp:
synthesized_img = torch.clamp(synthesized_img, 0, 255)
return synthesized_img, nlf
def _augmentation(self, data:dict, aug:list):
'''
Parsing augmentation list and apply it to the data images.
'''
# parsign augmentation
rot, hflip = 0, 0
for aug_name in aug:
# aug : random rotation
if aug_name == 'rot':
rot = random.randint(0,3)
# aug : random flip
elif aug_name == 'hflip':
hflip = random.randint(0,1)
else:
raise RuntimeError('undefined augmentation option : %s'%aug_name)
# for every data(only image), apply rotation and flipping augmentation.
for key in data:
if self._is_image_tensor(data[key]):
# random rotation and flip
if rot != 0 or hflip != 0:
data[key] = rot_hflip_img(data[key], rot, hflip)
return data
#----------------------------#
# Image saving functions #
#----------------------------#
def save_all_image(self, dir, clean=False, syn_noisy=False, real_noisy=False):
for idx in range(len(self.img_paths)):
data = self.__getitem__(idx)
if clean and 'clean' in data:
cv2.imwrite(os.path.join(dir, '%04d_CL.png'%idx), tensor2np(data['clean']))
if syn_noisy and 'syn_noisy' in data:
cv2.imwrite(os.path.join(dir, '%04d_SN.png'%idx), tensor2np(data['syn_noisy']))
if real_noisy and 'real_noisy' in data:
cv2.imwrite(os.path.join(dir, '%04d_RN.png'%idx), tensor2np(data['real_noisy']))
print('image %04d saved!'%idx)
def prep_save(self, img_idx:int, img_size:int, overlap:int, clean:bool=False, syn_noisy:bool=False, real_noisy:bool=False):
'''
cropping am image into mini-size patches for efficient training.
Args:
img_idx (int) : index of image
img_size (int) : size of image
overlap (int) : overlap between patches
clean (bool) : save clean image (default: False)
syn_noisy (bool) : save synthesized noisy image (default: False)
real_noisy (bool) : save real noisy image (default: False)
'''
d_name = '%s_s%d_o%d'%(self.__class__.__name__, img_size, overlap)
os.makedirs(os.path.join(self.dataset_dir, 'prep', d_name), exist_ok=True)
assert overlap < img_size
stride = img_size - overlap
if clean:
clean_dir = os.path.join(self.dataset_dir, 'prep', d_name, 'CL')
os.makedirs(clean_dir, exist_ok=True)
if syn_noisy:
syn_noisy_dir = os.path.join(self.dataset_dir, 'prep', d_name, 'SN')
os.makedirs(syn_noisy_dir, exist_ok=True)
if real_noisy:
real_noisy_dir = os.path.join(self.dataset_dir, 'prep', d_name, 'RN')
os.makedirs(real_noisy_dir, exist_ok=True)
data = self.__getitem__(img_idx)
c,h,w = data['clean'].shape if 'clean' in data else data['real_noisy'].shape
for h_idx in range((h-img_size)//stride + 1):
for w_idx in range((w-img_size+1)//stride + 1):
hl, hr = h_idx*stride, h_idx*stride+img_size
wl, wr = w_idx*stride, w_idx*stride+img_size
if clean: cv2.imwrite(os.path.join(clean_dir, '%d_%d_%d.png'%(img_idx, h_idx, w_idx)), tensor2np(data['clean'][:,hl:hr,wl:wr]))
if syn_noisy: cv2.imwrite(os.path.join(syn_noisy_dir, '%d_%d_%d.png'%(img_idx, h_idx, w_idx)), tensor2np(data['syn_noisy'][:,hl:hr,wl:wr]))
if real_noisy: cv2.imwrite(os.path.join(real_noisy_dir, '%d_%d_%d.png'%(img_idx, h_idx, w_idx)), tensor2np(data['real_noisy'][:,hl:hr,wl:wr]))
print('Cropped image %d / %d'%(img_idx, self.__len__()))
#----------------------------#
# etc #
#----------------------------#
def _is_image_tensor(self, x):
'''
return input tensor has image shape. (include batched image)
'''
if isinstance(x, torch.Tensor):
if len(x.shape) == 3 or len(x.shape) == 4:
if x.dtype != torch.bool:
return True
return False
class ReturnMergedDataset():
def __init__(self, d_list):
self.d_list = d_list
def __call__(self, *args, **kwargs):
return MergedDataset(self.d_list, *args, **kwargs)
class MergedDataset(Dataset):
def __init__(self, d_list, *args, **kwargs):
'''
Merged denoising dataset when you use multiple dataset combined.
see more details of DenoiseDataSet
'''
from ..datahandler import get_dataset_object
self.dataset_list = []
for d in d_list:
self.dataset_list.append(get_dataset_object(d)(*args, **kwargs))
self.data_contents_flags = {'clean':True, 'noisy':True, 'real_noisy':True}
self.dataset_length = []
for d in self.dataset_list:
self.dataset_length.append(d.__len__())
data_sample = d.__getitem__(0)
for key in self.data_contents_flags.keys():
if not key in data_sample:
self.data_contents_flags[key] = False
def __len__(self):
return sum(self.dataset_length)
def __getitem__(self, idx):
t_idx = idx
for d_idx, d in enumerate(self.dataset_list):
if t_idx < self.dataset_length[d_idx]:
data = d.__getitem__(t_idx)
return_data = {}
for key in self.data_contents_flags.keys():
if self.data_contents_flags[key]:
return_data[key] = data[key]
return return_data
t_idx -= self.dataset_length[d_idx]
raise RuntimeError('index of merged dataset contains some bugs, total length %d, requiring idx %d'%(self.__len__(), idx))
| 42.536993
| 198
| 0.579981
|
4a106896f60218b26cebcc867c868caca5b72ea7
| 6,671
|
py
|
Python
|
tpubar/cli.py
|
trisongz/tpubar
|
adf44909a9a1afb30aeb38664200f372a1d7f34d
|
[
"MIT"
] | 3
|
2020-12-13T00:58:33.000Z
|
2021-02-17T22:30:44.000Z
|
tpubar/cli.py
|
trisongz/tpubar
|
adf44909a9a1afb30aeb38664200f372a1d7f34d
|
[
"MIT"
] | 1
|
2020-12-09T00:01:59.000Z
|
2020-12-09T00:01:59.000Z
|
tpubar/cli.py
|
trisongz/tpubar
|
adf44909a9a1afb30aeb38664200f372a1d7f34d
|
[
"MIT"
] | 1
|
2021-02-17T22:30:47.000Z
|
2021-02-17T22:30:47.000Z
|
import click
import json
import sys
import os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '0'
import time
from tpubar.utils import run_command
@click.group()
@click.pass_context
def cli(ctx, **kws):
ctx.obj = kws
@cli.command('monitor')
@click.argument('tpu_name', type=click.STRING, default=os.environ.get('TPU_NAME', None))
@click.option('--project', type=click.STRING, default=None)
@click.option('-v', '--verbose', is_flag=True)
def monitor_tpubar(tpu_name, project, verbose):
tpu_name = tpu_name if tpu_name else os.environ.get('TPU_NAME', None)
from tpubar import TPUMonitor, env, auths
if not tpu_name:
tpu_name = click.prompt('Please enter a TPU Name', type=click.STRING)
if not tpu_name:
raise ValueError('Valid TPU Name must be selected')
elif not os.environ.get('GOOGLE_APPLICATION_CREDENTIALS', None) and not auths['DEFAULT_ADC']:
adc = click.prompt('Please enter a path to GOOGLE_APPLICATION_CREDENTIALS', type=click.STRING)
if adc:
os.environ['GOOGLE_APPLICATION_CREDENTIALS'] = adc
click.echo(f'Monitoring TPU: {tpu_name} until cancelled.')
if env['colab']:
monitor = TPUMonitor(tpu_name=tpu_name, project=project, profiler='v1', refresh_secs=3, verbose=verbose)
else:
monitor = TPUMonitor(tpu_name=tpu_name, project=project, profiler='v1', refresh_secs=3, verbose=verbose)
monitor.start()
while True:
try:
time.sleep(10)
except KeyboardInterrupt:
click.echo(f'\nShutting Down Monitor')
monitor.close()
sys.exit()
@cli.command('test')
@click.argument('tpu_name', type=click.STRING, default=os.environ.get('TPU_NAME', None))
@click.option('--project', type=click.STRING, default=None)
def test_tpubar(tpu_name, project):
tpu_name = tpu_name if tpu_name else os.environ.get('TPU_NAME', None)
from tpubar import TPUMonitor, env, auths
if not tpu_name:
tpu_name = click.prompt('Please enter a TPU Name', type=click.STRING)
if not tpu_name:
raise ValueError('Valid TPU Name must be selected')
if not os.environ.get('GOOGLE_APPLICATION_CREDENTIALS', None) and not auths['DEFAULT_ADC']:
adc = click.prompt('Please enter a path to GOOGLE_APPLICATION_CREDENTIALS', type=click.STRING)
if adc:
os.environ['GOOGLE_APPLICATION_CREDENTIALS'] = adc
click.echo(f'Running Test for TPUBar on TPU {tpu_name}')
if env['colab']:
monitor = TPUMonitor(tpu_name=tpu_name, project=project, profiler='v2', refresh_secs=3, verbose=True)
else:
#click.echo(f'{project}')
monitor = TPUMonitor(tpu_name=tpu_name, project=project, profiler='v1', refresh_secs=3, verbose=True)
monitor.start()
for x in range(6):
time.sleep(10)
click.echo(f'\nCompleted Testing')
@cli.command('trace')
@click.argument('tpu_name', type=click.STRING, default=os.environ.get('TPU_NAME', None))
@click.option('-v', '--verbose', is_flag=True)
def trace_tpubar(tpu_name, verbose):
tpu_name = tpu_name if tpu_name else os.environ.get('TPU_NAME', None)
if not tpu_name:
tpu_name = click.prompt('Please enter a TPU Name', type=click.STRING)
if not tpu_name:
raise ValueError('Valid TPU Name must be selected')
if not os.environ.get('GOOGLE_APPLICATION_CREDENTIALS', None):
adc = click.prompt('Please enter a path to GOOGLE_APPLICATION_CREDENTIALS', type=click.STRING)
if adc:
os.environ['GOOGLE_APPLICATION_CREDENTIALS'] = adc
click.echo(f'Tracing TPU: {tpu_name} until cancelled.')
from tpubar import TPUMonitor, env
monitor = TPUMonitor(tpu_name=tpu_name, profiler='trace', refresh_secs=10, verbose=verbose)
monitor.trace()
while True:
try:
time.sleep(10)
except KeyboardInterrupt:
click.echo(f'\nShutting Down Tracer')
sys.exit()
@cli.command('auth')
@click.argument('auth_name', type=click.STRING, default=os.environ.get('GOOGLE_APPLICATION_CREDENTIALS', None))
@click.option('-l', '--list_auths', is_flag=True)
def set_auth(auth_name, list_auths):
from tpubar import env, auths, update_auth
click.echo('\n')
if list_auths:
click.echo('Listing Auths')
for name, adc_path in auths.items():
click.echo(f'- {name}: {adc_path}')
click.echo('\n')
click.echo(f'Current ADC is set to {os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", "None")}')
if auth_name in auths.keys():
if auths[auth_name] not in auths.values():
click.echo(f'Setting {auth_name} to BACKUP_ADC_PATH')
auths['BACKUP_ADC_PATH'] = auths[auth_name]
click.echo(f'- {auth_name} is now the Default ADC: {auths[auth_name]}')
auths['DEFAULT_ADC'] = auths[auth_name]
else:
click.echo(f'{auth_name} was not found in {list(auths.keys())} - Creating New Auth')
adc_name = click.prompt('Please enter a name for your ADC', type=click.STRING)
adc_path = click.prompt('Please enter a path to GOOGLE_APPLICATION_CREDENTIALS', type=click.STRING)
assert os.path.exists(adc_path), 'Path to GOOGLE_APPLICATION_CREDENTIALS was not found. Exiting'
auths.update({adc_name: adc_path})
auths['DEFAULT_ADC'] = adc_path
click.echo(f'- {adc_name} is now the Default ADC: {adc_path}')
update_auth(auths)
os.environ['GOOGLE_APPLICATION_CREDENTIALS'] = auths['DEFAULT_ADC']
@cli.command('sess')
@click.argument('session_name', default='train')
def create_sess(session_name):
_conda_exe = os.getenv('CONDA_EXE').replace('bin/conda', 'etc/profile.d/conda.sh')
_conda_env = os.getenv('CONDA_DEFAULT_ENV', None)
command = f'tmux new -d -s {session_name}'
os.system(command)
if _conda_env:
command = f'tmux send-keys -t {session_name}.0 "source {_conda_exe} && conda deactivate && conda activate {_conda_env} && clear && cd {os.getcwd()}" ENTER'
os.system(command)
os.system(f'tmux a -t {session_name}')
@cli.command('attach')
@click.argument('session_name', default='train')
def attach_sess(session_name):
command = f'tmux a -t {session_name}'
os.system(command)
@cli.command('killsess')
@click.argument('session_name', default='train')
def kill_sess(session_name):
click.echo(f'Killing {session_name}')
command = f'tmux kill-session -t {session_name}'
os.system(command)
def main(*args, prog_name='tpubar', auto_envvar_prefix='TPUBAR', **kws):
cli.main(*args, prog_name=prog_name, auto_envvar_prefix=auto_envvar_prefix, **kws)
if __name__ == "__main__":
main()
| 40.92638
| 163
| 0.678759
|
4a10689fdc09a9153cb6101952a3b446246734e1
| 1,905
|
py
|
Python
|
video_transformation_numpy/test/video_io_test.py
|
skeselj/video-transformation-numpy
|
6b27e11ea1051e0c63be9de4f441bfd6dbcbd571
|
[
"MIT"
] | null | null | null |
video_transformation_numpy/test/video_io_test.py
|
skeselj/video-transformation-numpy
|
6b27e11ea1051e0c63be9de4f441bfd6dbcbd571
|
[
"MIT"
] | null | null | null |
video_transformation_numpy/test/video_io_test.py
|
skeselj/video-transformation-numpy
|
6b27e11ea1051e0c63be9de4f441bfd6dbcbd571
|
[
"MIT"
] | null | null | null |
"""Tests for the video_io.py."""
import numpy as np
import os
from unittest import main
from unittest import TestCase
from video_transformation_numpy import video_io
TEMP_DIR = './tmp'
class TestVideoIo(TestCase):
def setUp(self):
os.mkdir(TEMP_DIR)
def tearDown(self):
os.rmdir(TEMP_DIR)
def testNonExistent(self):
'''Given a path that doesn't contain a video, read_video outputs should be Nones.'''
input_path = os.path.join(TEMP_DIR, 'nonexistent.mp4')
video_np, codec, fps = video_io.read_video(input_path)
self.assertIsNone(video_np)
self.assertIsNone(codec)
self.assertIsNone(fps)
def testAllZeroReadThenWrite(self):
'''Write a NumPy array of all zeros to file, then read it.'''
path = os.path.join(TEMP_DIR, 'all_zero.mp4')
# Write video bytes to file.
num_frames_in, frame_height_in, frame_width_in, num_channels_in = 1800, 360, 640, 3
fps_in, codec_in = 60, 'mp4v'
video_np_in = np.zeros((num_frames_in, frame_height_in, frame_width_in, num_channels_in),
dtype='uint8')
self.assertTrue(video_io.write_video(path, video_np_in, codec_in, fps_in))
# Read video bytes, and associate metadata from the same file.
video_np_out, codec_out, fps_out = video_io.read_video(path)
num_frames_out, frame_height_out, frame_width_out, num_channels_out = video_np_out.shape
self.assertEqual(num_frames_in, num_frames_out)
self.assertEqual(frame_height_in, frame_height_out)
self.assertEqual(frame_width_in, frame_width_out)
self.assertEqual(num_channels_in, num_channels_out)
self.assertEqual(num_channels_in, 3)
self.assertEqual(codec_in, codec_out)
self.assertEqual(fps_in, fps_out)
os.remove(path)
if __name__ == '__main__':
main()
| 31.75
| 97
| 0.685039
|
4a106982850027ae5df76b5b2d006fd18ea907ca
| 3,338
|
py
|
Python
|
tensorflow/contrib/distributions/python/kernel_tests/bijectors/inline_test.py
|
CupOfNormal/tensorflow
|
ac5bde122fff9cc12efbc0a38dd7098c9ff3b1c6
|
[
"Apache-2.0"
] | 1
|
2017-02-24T05:09:40.000Z
|
2017-02-24T05:09:40.000Z
|
tensorflow/contrib/distributions/python/kernel_tests/bijectors/inline_test.py
|
snowluliang/tensorflow
|
ac5bde122fff9cc12efbc0a38dd7098c9ff3b1c6
|
[
"Apache-2.0"
] | null | null | null |
tensorflow/contrib/distributions/python/kernel_tests/bijectors/inline_test.py
|
snowluliang/tensorflow
|
ac5bde122fff9cc12efbc0a38dd7098c9ff3b1c6
|
[
"Apache-2.0"
] | 1
|
2021-02-16T15:38:50.000Z
|
2021-02-16T15:38:50.000Z
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for Bijector."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.contrib.distributions.python.ops.bijectors import exp as exp_lib
from tensorflow.contrib.distributions.python.ops.bijectors import inline as inline_lib
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.platform import test
class InlineBijectorTest(test.TestCase):
"""Tests correctness of the inline constructed bijector."""
def testBijector(self):
with self.test_session():
exp = exp_lib.Exp(event_ndims=1)
inline = inline_lib.Inline(
forward_fn=math_ops.exp,
inverse_fn=math_ops.log,
inverse_log_det_jacobian_fn=(
lambda y: -math_ops.reduce_sum( # pylint: disable=g-long-lambda
math_ops.log(y), reduction_indices=-1)),
forward_log_det_jacobian_fn=(
lambda x: math_ops.reduce_sum(x, reduction_indices=-1)),
name="exp")
self.assertEqual(exp.name, inline.name)
x = [[[1., 2.], [3., 4.], [5., 6.]]]
y = np.exp(x)
self.assertAllClose(y, inline.forward(x).eval())
self.assertAllClose(x, inline.inverse(y).eval())
self.assertAllClose(
-np.sum(np.log(y), axis=-1),
inline.inverse_log_det_jacobian(y).eval())
self.assertAllClose(-inline.inverse_log_det_jacobian(y).eval(),
inline.forward_log_det_jacobian(x).eval())
rev, jac = inline.inverse_and_inverse_log_det_jacobian(y)
self.assertAllClose(x, rev.eval())
self.assertAllClose(-np.sum(np.log(y), axis=-1), jac.eval())
def testShapeGetters(self):
with self.test_session():
bijector = inline_lib.Inline(
forward_event_shape_tensor_fn=lambda x: array_ops.concat((x, [1]), 0),
forward_event_shape_fn=lambda x: x.as_list() + [1],
inverse_event_shape_tensor_fn=lambda x: x[:-1],
inverse_event_shape_fn=lambda x: x[:-1],
name="shape_only")
x = tensor_shape.TensorShape([1, 2, 3])
y = tensor_shape.TensorShape([1, 2, 3, 1])
self.assertAllEqual(y, bijector.forward_event_shape(x))
self.assertAllEqual(
y.as_list(),
bijector.forward_event_shape_tensor(x.as_list()).eval())
self.assertAllEqual(x, bijector.inverse_event_shape(y))
self.assertAllEqual(
x.as_list(),
bijector.inverse_event_shape_tensor(y.as_list()).eval())
if __name__ == "__main__":
test.main()
| 40.216867
| 86
| 0.673158
|
4a1069f737ce564924b5060004b809624f83b228
| 12,338
|
py
|
Python
|
src/transformers/tokenization_camembert.py
|
12190143/transformers
|
6faca88ee0c472de8207e648b0999a1ee01ff127
|
[
"Apache-2.0"
] | 480
|
2019-10-14T02:22:34.000Z
|
2022-03-29T18:07:00.000Z
|
src/transformers/tokenization_camembert.py
|
hmason/transformers
|
ab90353f1abfd15f8d21f99395658d060679a08c
|
[
"Apache-2.0"
] | 20
|
2019-10-15T16:18:05.000Z
|
2022-02-27T21:23:55.000Z
|
src/transformers/tokenization_camembert.py
|
hmason/transformers
|
ab90353f1abfd15f8d21f99395658d060679a08c
|
[
"Apache-2.0"
] | 42
|
2019-10-14T21:29:59.000Z
|
2021-12-28T15:25:58.000Z
|
# coding=utf-8
# Copyright 2018 Google AI, Google Brain and Carnegie Mellon University Authors and the HuggingFace Inc. team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License
""" Tokenization classes for Camembert model."""
import logging
import os
from shutil import copyfile
from typing import List, Optional
import sentencepiece as spm
from .tokenization_utils import PreTrainedTokenizer
from .tokenization_xlnet import SPIECE_UNDERLINE
logger = logging.getLogger(__name__)
VOCAB_FILES_NAMES = {"vocab_file": "sentencepiece.bpe.model"}
PRETRAINED_VOCAB_FILES_MAP = {
"vocab_file": {
"camembert-base": "https://s3.amazonaws.com/models.huggingface.co/bert/camembert-base-sentencepiece.bpe.model",
}
}
PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES = {
"camembert-base": None,
}
SHARED_MODEL_IDENTIFIERS = [
# Load with
# `tokenizer = AutoTokenizer.from_pretrained("username/pretrained_model")`
"Musixmatch/umberto-commoncrawl-cased-v1",
"Musixmatch/umberto-wikipedia-uncased-v1",
]
class CamembertTokenizer(PreTrainedTokenizer):
"""
Adapted from RobertaTokenizer and XLNetTokenizer
SentencePiece based tokenizer. Peculiarities:
- requires `SentencePiece <https://github.com/google/sentencepiece>`_
This tokenizer inherits from :class:`~transformers.PreTrainedTokenizer` which contains most of the methods. Users
should refer to the superclass for more information regarding methods.
Args:
vocab_file (:obj:`str`):
Path to the vocabulary file.
bos_token (:obj:`string`, `optional`, defaults to "<s>"):
The beginning of sequence token that was used during pre-training. Can be used a sequence classifier token.
.. note::
When building a sequence using special tokens, this is not the token that is used for the beginning
of sequence. The token used is the :obj:`cls_token`.
eos_token (:obj:`string`, `optional`, defaults to "</s>"):
The end of sequence token.
.. note::
When building a sequence using special tokens, this is not the token that is used for the end
of sequence. The token used is the :obj:`sep_token`.
sep_token (:obj:`string`, `optional`, defaults to "</s>"):
The separator token, which is used when building a sequence from multiple sequences, e.g. two sequences
for sequence classification or for a text and a question for question answering.
It is also used as the last token of a sequence built with special tokens.
cls_token (:obj:`string`, `optional`, defaults to "<s>"):
The classifier token which is used when doing sequence classification (classification of the whole
sequence instead of per-token classification). It is the first token of the sequence when built with
special tokens.
unk_token (:obj:`string`, `optional`, defaults to "<unk>"):
The unknown token. A token that is not in the vocabulary cannot be converted to an ID and is set to be this
token instead.
pad_token (:obj:`string`, `optional`, defaults to "<pad>"):
The token used for padding, for example when batching sequences of different lengths.
mask_token (:obj:`string`, `optional`, defaults to "<mask>"):
The token used for masking values. This is the token used when training this model with masked language
modeling. This is the token which the model will try to predict.
additional_special_tokens (:obj:`List[str]`, `optional`, defaults to :obj:`["<s>NOTUSED", "</s>NOTUSED"]`):
Additional special tokens used by the tokenizer.
Attributes:
sp_model (:obj:`SentencePieceProcessor`):
The `SentencePiece` processor that is used for every conversion (string, tokens and IDs).
"""
vocab_files_names = VOCAB_FILES_NAMES
pretrained_vocab_files_map = PRETRAINED_VOCAB_FILES_MAP
max_model_input_sizes = PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES
def __init__(
self,
vocab_file,
bos_token="<s>",
eos_token="</s>",
sep_token="</s>",
cls_token="<s>",
unk_token="<unk>",
pad_token="<pad>",
mask_token="<mask>",
additional_special_tokens=["<s>NOTUSED", "</s>NOTUSED"],
**kwargs
):
super().__init__(
max_len=512,
bos_token=bos_token,
eos_token=eos_token,
unk_token=unk_token,
sep_token=sep_token,
cls_token=cls_token,
pad_token=pad_token,
mask_token=mask_token,
additional_special_tokens=additional_special_tokens,
**kwargs,
)
self.sp_model = spm.SentencePieceProcessor()
self.sp_model.Load(str(vocab_file))
self.vocab_file = vocab_file
# HACK: These tokens were added by fairseq but don't seem to be actually used when duplicated in the actual
# sentencepiece vocabulary (this is the case for <s> and </s>
self.fairseq_tokens_to_ids = {"<s>NOTUSED": 0, "<pad>": 1, "</s>NOTUSED": 2, "<unk>": 3}
self.fairseq_offset = len(self.fairseq_tokens_to_ids)
self.fairseq_tokens_to_ids["<mask>"] = len(self.sp_model) + len(self.fairseq_tokens_to_ids)
self.fairseq_ids_to_tokens = {v: k for k, v in self.fairseq_tokens_to_ids.items()}
def build_inputs_with_special_tokens(
self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None
) -> List[int]:
"""
Build model inputs from a sequence or a pair of sequence for sequence classification tasks
by concatenating and adding special tokens.
A CamemBERT sequence has the following format:
- single sequence: ``<s> X </s>``
- pair of sequences: ``<s> A </s></s> B </s>``
Args:
token_ids_0 (:obj:`List[int]`):
List of IDs to which the special tokens will be added
token_ids_1 (:obj:`List[int]`, `optional`, defaults to :obj:`None`):
Optional second list of IDs for sequence pairs.
Returns:
:obj:`List[int]`: list of `input IDs <../glossary.html#input-ids>`__ with the appropriate special tokens.
"""
if token_ids_1 is None:
return [self.cls_token_id] + token_ids_0 + [self.sep_token_id]
cls = [self.cls_token_id]
sep = [self.sep_token_id]
return cls + token_ids_0 + sep + sep + token_ids_1 + sep
def get_special_tokens_mask(
self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None, already_has_special_tokens: bool = False
) -> List[int]:
"""
Retrieves sequence ids from a token list that has no special tokens added. This method is called when adding
special tokens using the tokenizer ``prepare_for_model`` or ``encode_plus`` methods.
Args:
token_ids_0 (:obj:`List[int]`):
List of ids.
token_ids_1 (:obj:`List[int]`, `optional`, defaults to :obj:`None`):
Optional second list of IDs for sequence pairs.
already_has_special_tokens (:obj:`bool`, `optional`, defaults to :obj:`False`):
Set to True if the token list is already formatted with special tokens for the model
Returns:
:obj:`List[int]`: A list of integers in the range [0, 1]: 0 for a special token, 1 for a sequence token.
"""
if already_has_special_tokens:
if token_ids_1 is not None:
raise ValueError(
"You should not supply a second sequence if the provided sequence of "
"ids is already formated with special tokens for the model."
)
return list(map(lambda x: 1 if x in [self.sep_token_id, self.cls_token_id] else 0, token_ids_0))
if token_ids_1 is None:
return [1] + ([0] * len(token_ids_0)) + [1]
return [1] + ([0] * len(token_ids_0)) + [1, 1] + ([0] * len(token_ids_1)) + [1]
def create_token_type_ids_from_sequences(
self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None
) -> List[int]:
"""
Creates a mask from the two sequences passed to be used in a sequence-pair classification task.
A CamemBERT sequence pair mask has the following format:
::
0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1
| first sequence | | second sequence |
if token_ids_1 is None, only returns the first portion of the mask (0s).
Args:
token_ids_0 (:obj:`List[int]`):
List of ids.
token_ids_1 (:obj:`List[int]`, `optional`, defaults to :obj:`None`):
Optional second list of IDs for sequence pairs.
Returns:
:obj:`List[int]`: List of `token type IDs <../glossary.html#token-type-ids>`_ according to the given
sequence(s).
"""
sep = [self.sep_token_id]
cls = [self.cls_token_id]
if token_ids_1 is None:
return len(cls + token_ids_0 + sep) * [0]
return len(cls + token_ids_0 + sep + sep) * [0] + len(token_ids_1 + sep) * [1]
@property
def vocab_size(self):
return len(self.fairseq_tokens_to_ids) + len(self.sp_model)
def _tokenize(self, text):
return self.sp_model.EncodeAsPieces(text)
def _convert_token_to_id(self, token):
""" Converts a token (str) in an id using the vocab. """
if token in self.fairseq_tokens_to_ids:
return self.fairseq_tokens_to_ids[token]
elif self.sp_model.PieceToId(token) == 0:
# Convert sentence piece unk token to fairseq unk token index
return self.unk_token_id
return self.fairseq_offset + self.sp_model.PieceToId(token)
def _convert_id_to_token(self, index):
"""Converts an index (integer) in a token (str) using the vocab."""
if index in self.fairseq_ids_to_tokens:
return self.fairseq_ids_to_tokens[index]
return self.sp_model.IdToPiece(index - self.fairseq_offset)
def __getstate__(self):
state = self.__dict__.copy()
state["sp_model"] = None
return state
def __setstate__(self, d):
self.__dict__ = d
try:
import sentencepiece as spm
except ImportError:
logger.warning(
"You need to install SentencePiece to use AlbertTokenizer: https://github.com/google/sentencepiece"
"pip install sentencepiece"
)
raise
self.sp_model = spm.SentencePieceProcessor()
self.sp_model.Load(self.vocab_file)
def convert_tokens_to_string(self, tokens):
"""Converts a sequence of tokens (strings for sub-words) in a single string."""
out_string = "".join(tokens).replace(SPIECE_UNDERLINE, " ").strip()
return out_string
def save_vocabulary(self, save_directory):
"""
Save the sentencepiece vocabulary (copy original file) and special tokens file to a directory.
Args:
save_directory (:obj:`str`):
The directory in which to save the vocabulary.
Returns:
:obj:`Tuple(str)`: Paths to the files saved.
"""
if not os.path.isdir(save_directory):
logger.error("Vocabulary path ({}) should be a directory".format(save_directory))
return
out_vocab_file = os.path.join(save_directory, VOCAB_FILES_NAMES["vocab_file"])
if os.path.abspath(self.vocab_file) != os.path.abspath(out_vocab_file):
copyfile(self.vocab_file, out_vocab_file)
return (out_vocab_file,)
| 41.965986
| 119
| 0.639731
|
4a106c870868fabf71427f1614a9691985104475
| 12,973
|
py
|
Python
|
gector/gec_model.py
|
nymwa/gector
|
9ce717570e81fabfc437d7ca45d27fd011c8fc5a
|
[
"Apache-2.0"
] | 1
|
2021-04-20T14:06:58.000Z
|
2021-04-20T14:06:58.000Z
|
gector/gec_model.py
|
nymwa/gector
|
9ce717570e81fabfc437d7ca45d27fd011c8fc5a
|
[
"Apache-2.0"
] | null | null | null |
gector/gec_model.py
|
nymwa/gector
|
9ce717570e81fabfc437d7ca45d27fd011c8fc5a
|
[
"Apache-2.0"
] | null | null | null |
"""Wrapper of AllenNLP model. Fixes errors based on model predictions"""
import logging
import os
import sys
from time import time
import torch
from allennlp.data.dataset import Batch
from allennlp.data.fields import TextField
from allennlp.data.instance import Instance
from allennlp.data.tokenizers import Token
from allennlp.data.vocabulary import Vocabulary
from allennlp.modules.text_field_embedders import BasicTextFieldEmbedder
from allennlp.nn import util
from gector.bert_token_embedder import PretrainedBertEmbedder
from gector.seq2labels_model import Seq2Labels
from gector.wordpiece_indexer import PretrainedBertIndexer
from utils.helpers import PAD, UNK, get_target_sent_by_edits, START_TOKEN
logging.getLogger("werkzeug").setLevel(logging.ERROR)
logger = logging.getLogger(__file__)
def get_weights_name(transformer_name, lowercase):
if transformer_name == 'bert' and lowercase:
return 'bert-base-uncased'
if transformer_name == 'bert' and not lowercase:
return 'bert-base-cased'
if transformer_name == 'distilbert':
if not lowercase:
print('Warning! This model was trained only on uncased sentences.')
return 'distilbert-base-uncased'
if transformer_name == 'albert':
if not lowercase:
print('Warning! This model was trained only on uncased sentences.')
return 'albert-base-v1'
if lowercase:
print('Warning! This model was trained only on cased sentences.')
if transformer_name == 'roberta':
return 'roberta-base'
if transformer_name == 'gpt2':
return 'gpt2'
if transformer_name == 'transformerxl':
return 'transfo-xl-wt103'
if transformer_name == 'xlnet':
return 'xlnet-base-cased'
class GecBERTModel(object):
def __init__(self, vocab_path=None, model_paths=None,
weigths=None,
max_len=50,
min_len=3,
lowercase_tokens=False,
log=False,
iterations=3,
model_name='roberta',
special_tokens_fix=1,
is_ensemble=True,
min_error_probability=0.0,
confidence=0,
resolve_cycles=False,
):
self.model_weights = list(map(float, weigths)) if weigths else [1] * len(model_paths)
self.device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
self.max_len = max_len
self.min_len = min_len
self.lowercase_tokens = lowercase_tokens
self.min_error_probability = min_error_probability
self.vocab = Vocabulary.from_files(vocab_path)
self.log = log
self.iterations = iterations
self.confidence = confidence
self.resolve_cycles = resolve_cycles
# set training parameters and operations
self.indexers = []
self.models = []
for model_path in model_paths:
if is_ensemble:
model_name, special_tokens_fix = self._get_model_data(model_path)
weights_name = get_weights_name(model_name, lowercase_tokens)
self.indexers.append(self._get_indexer(weights_name, special_tokens_fix))
model = Seq2Labels(vocab=self.vocab,
text_field_embedder=self._get_embbeder(weights_name, special_tokens_fix),
confidence=self.confidence
).to(self.device)
if torch.cuda.is_available():
model.load_state_dict(torch.load(model_path))
else:
model.load_state_dict(torch.load(model_path,
map_location=torch.device('cpu')))
model.eval()
self.models.append(model)
@staticmethod
def _get_model_data(model_path):
model_name = model_path.split('/')[-1]
tr_model, stf = model_name.split('_')[:2]
return tr_model, int(stf)
def _restore_model(self, input_path):
if os.path.isdir(input_path):
print("Model could not be restored from directory", file=sys.stderr)
filenames = []
else:
filenames = [input_path]
for model_path in filenames:
try:
if torch.cuda.is_available():
loaded_model = torch.load(model_path)
else:
loaded_model = torch.load(model_path,
map_location=lambda storage,
loc: storage)
except:
print(f"{model_path} is not valid model", file=sys.stderr)
own_state = self.model.state_dict()
for name, weights in loaded_model.items():
if name not in own_state:
continue
try:
if len(filenames) == 1:
own_state[name].copy_(weights)
else:
own_state[name] += weights
except RuntimeError:
continue
print("Model is restored", file=sys.stderr)
def predict(self, batches):
t11 = time()
predictions = []
for batch, model in zip(batches, self.models):
batch = util.move_to_device(batch.as_tensor_dict(), 0 if torch.cuda.is_available() else -1)
with torch.no_grad():
prediction = model.forward(**batch)
predictions.append(prediction)
preds, idx, error_probs = self._convert(predictions)
t55 = time()
if self.log:
print(f"Inference time {t55 - t11}")
return preds, idx, error_probs
def get_token_action(self, token, index, prob, sugg_token):
"""Get lost of suggested actions for token."""
# cases when we don't need to do anything
if prob < self.min_error_probability or sugg_token in [UNK, PAD, '$KEEP']:
return None
if sugg_token.startswith('$REPLACE_') or sugg_token.startswith('$TRANSFORM_') or sugg_token == '$DELETE':
start_pos = index
end_pos = index + 1
elif sugg_token.startswith("$APPEND_") or sugg_token.startswith("$MERGE_"):
start_pos = index + 1
end_pos = index + 1
if sugg_token == "$DELETE":
sugg_token_clear = ""
elif sugg_token.startswith('$TRANSFORM_') or sugg_token.startswith("$MERGE_"):
sugg_token_clear = sugg_token[:]
else:
sugg_token_clear = sugg_token[sugg_token.index('_') + 1:]
return start_pos - 1, end_pos - 1, sugg_token_clear, prob
def _get_embbeder(self, weigths_name, special_tokens_fix):
embedders = {'bert': PretrainedBertEmbedder(
pretrained_model=weigths_name,
requires_grad=False,
top_layer_only=True,
special_tokens_fix=special_tokens_fix)
}
text_field_embedder = BasicTextFieldEmbedder(
token_embedders=embedders,
embedder_to_indexer_map={"bert": ["bert", "bert-offsets"]},
allow_unmatched_keys=True)
return text_field_embedder
def _get_indexer(self, weights_name, special_tokens_fix):
bert_token_indexer = PretrainedBertIndexer(
pretrained_model=weights_name,
do_lowercase=self.lowercase_tokens,
max_pieces_per_token=5,
use_starting_offsets=True,
truncate_long_sequences=True,
special_tokens_fix=special_tokens_fix,
is_test=True
)
return {'bert': bert_token_indexer}
def preprocess(self, token_batch):
seq_lens = [len(sequence) for sequence in token_batch if sequence]
if not seq_lens:
return []
max_len = min(max(seq_lens), self.max_len)
batches = []
for indexer in self.indexers:
batch = []
for sequence in token_batch:
tokens = sequence[:max_len]
tokens = [Token(token) for token in ['$START'] + tokens]
batch.append(Instance({'tokens': TextField(tokens, indexer)}))
batch = Batch(batch)
batch.index_instances(self.vocab)
batches.append(batch)
return batches
def _convert(self, data):
all_class_probs = torch.zeros_like(data[0]['class_probabilities_labels'])
error_probs = torch.zeros_like(data[0]['max_error_probability'])
for output, weight in zip(data, self.model_weights):
all_class_probs += weight * output['class_probabilities_labels'] / sum(self.model_weights)
error_probs += weight * output['max_error_probability'] / sum(self.model_weights)
max_vals = torch.max(all_class_probs, dim=-1)
probs = max_vals[0].tolist()
idx = max_vals[1].tolist()
return probs, idx, error_probs.tolist()
def update_final_batch(self, final_batch, pred_ids, pred_batch,
prev_preds_dict):
new_pred_ids = []
total_updated = 0
for i, orig_id in enumerate(pred_ids):
orig = final_batch[orig_id]
pred = pred_batch[i]
prev_preds = prev_preds_dict[orig_id]
if orig != pred and pred not in prev_preds:
final_batch[orig_id] = pred
new_pred_ids.append(orig_id)
prev_preds_dict[orig_id].append(pred)
total_updated += 1
elif orig != pred and pred in prev_preds:
# update final batch, but stop iterations
final_batch[orig_id] = pred
total_updated += 1
else:
continue
return final_batch, new_pred_ids, total_updated
def postprocess_batch(self, batch, all_probabilities, all_idxs,
error_probs,
max_len=50):
all_results = []
noop_index = self.vocab.get_token_index("$KEEP", "labels")
for tokens, probabilities, idxs, error_prob in zip(batch,
all_probabilities,
all_idxs,
error_probs):
length = min(len(tokens), max_len)
edits = []
# skip whole sentences if there no errors
if max(idxs) == 0:
all_results.append(tokens)
continue
# skip whole sentence if probability of correctness is not high
if error_prob < self.min_error_probability:
all_results.append(tokens)
continue
for i in range(length + 1):
# because of START token
if i == 0:
token = START_TOKEN
else:
token = tokens[i - 1]
# skip if there is no error
if idxs[i] == noop_index:
continue
sugg_token = self.vocab.get_token_from_index(idxs[i],
namespace='labels')
action = self.get_token_action(token, i, probabilities[i],
sugg_token)
if not action:
continue
edits.append(action)
all_results.append(get_target_sent_by_edits(tokens, edits))
return all_results
def handle_batch(self, full_batch):
"""
Handle batch of requests.
"""
final_batch = full_batch[:]
batch_size = len(full_batch)
prev_preds_dict = {i: [final_batch[i]] for i in range(len(final_batch))}
short_ids = [i for i in range(len(full_batch))
if len(full_batch[i]) < self.min_len]
pred_ids = [i for i in range(len(full_batch)) if i not in short_ids]
total_updates = 0
for n_iter in range(self.iterations):
orig_batch = [final_batch[i] for i in pred_ids]
sequences = self.preprocess(orig_batch)
if not sequences:
break
probabilities, idxs, error_probs = self.predict(sequences)
pred_batch = self.postprocess_batch(orig_batch, probabilities,
idxs, error_probs)
if self.log:
print(f"Iteration {n_iter + 1}. Predicted {round(100*len(pred_ids)/batch_size, 1)}% of sentences.")
final_batch, pred_ids, cnt = \
self.update_final_batch(final_batch, pred_ids, pred_batch,
prev_preds_dict)
total_updates += cnt
if not pred_ids:
break
return final_batch, total_updates
| 40.164087
| 115
| 0.574501
|
4a106e56eb8acc9879d5e2e84ed289dd909ff74d
| 6,124
|
py
|
Python
|
app/scrapers/codechef.py
|
Starscream-11813/CpZen
|
211f7322dbd02d484f1782e941cb202dd0c20443
|
[
"MIT"
] | 5
|
2021-10-19T16:01:27.000Z
|
2022-02-14T01:05:39.000Z
|
app/scrapers/codechef.py
|
Starscream-11813/CpZen
|
211f7322dbd02d484f1782e941cb202dd0c20443
|
[
"MIT"
] | null | null | null |
app/scrapers/codechef.py
|
Starscream-11813/CpZen
|
211f7322dbd02d484f1782e941cb202dd0c20443
|
[
"MIT"
] | null | null | null |
import requests
from bs4 import BeautifulSoup
import urllib3
from datetime import datetime, timedelta
class CodeChef:
contests = {}
url = ""
def __init__(self):
#self.url = 'https://www.codechef.com/contests'
#self.url = 'https://www.stopstalk.com/contests'
self.url = 'https://clist.by:443/api/v1/contest/?end__gt='
self.contests = {
"Code":[],
"Name":[],
"Start":[],
"End":[]
}
self.contestData=[]
def __scrape(self):
stTime = datetime.today() - timedelta(hours=6, minutes=0)
enTime = datetime.today() - timedelta(hours=6, minutes=0) + timedelta(hours=480, minutes=0)
begin = str(stTime.year) + "-" + str(stTime.month) + "-" + str(stTime.day) + "T" + str(stTime.hour) + "%3A" + str(stTime.minute) + "%3A" + str(stTime.second)
endd = str(enTime.year) + "-" + str(enTime.month) + "-" + str(enTime.day) + "T" + str(enTime.hour) + "%3A" + str(enTime.minute) + "%3A" + str(enTime.second)
# print(begin)
# print(endd)
url3 = "https://clist.by:443/api/v1/contest/?end__gt=" + begin + "&" + "end__lt=" + endd
# print(url3)
# print(url)
res = requests.get(url3,headers={'Authorization': 'ApiKey Ahb_arif:e746f33d1dca698bf9e578774d86dafb916fe288'})
# print(res.text)
jsonData = res.json()
objects = jsonData["objects"]
#contestData = []
for x in objects:
siteName = x["resource"]["name"]
contestName = x["event"]
startTime = str(x["start"])
startTime.replace("T", " , ")
endTime = str(x["end"])
endTime.replace("T", " , ")
sortKey = str(x["end"])
sortKey = sortKey.replace("T", " ")
link = x["href"]
duration = int(float(x["duration"]) * 0.000277778)
if duration >=24:
d = int(duration/24)
h = duration % 24
duration = str(d) + " days "
if h >0:
duration+= str(h) + " hours "
else:
duration = str(duration) + " hours"
if siteName == "kaggle.com" or siteName == "toph.co" or siteName == "codingcompetitions.withgoogle.com" or siteName == "codeforces.com" or siteName == "csacademy.com" or siteName == "hackerrank.com" or siteName=="codechef.com" or siteName=="spoj.com" or siteName=="hackerearth.com" or siteName=="lightoj.com" or siteName=="atcoder.jp" or siteName=="projecteuler.net" or siteName=="e-olymp.com":
temp = {}
temp["sitename"] = siteName
temp["contest_name"] = contestName
temp["startTime"] = startTime.replace("T",", ") +" (GMT)"
temp["endTime"] = endTime.replace("T",", ") +" (GMT)"
temp["sortKey"] = sortKey
temp["link"] = link
temp["duration"] = duration
# print(temp)
self.contestData.append(temp)
self.contestData = sorted(self.contestData, key=lambda k: datetime.strptime(str(k["sortKey"]), "%Y-%m-%d %H:%M:%S"),
reverse=False)
#print(self.contestData)
# page = requests.get(self.url)
# soup = BeautifulSoup(page.content,'html.parser')#page.text(?)
#print(soup)
# quotes1=[]
# tables = soup.find_all('table',{'class': 'dataTable'})
#imgs = soup.find_all('img',{'title': 'CodeChef'})
#print(tables)
# contests1 = tables[1].findAll('td')
#rows = tables[2].findAll('tr')
#print(tables[1].find("tbody").find_all("tr"))
# activeContests = tables[0]
# upcomingContests = tables[1]
# ignoredContests = ['INOIPRAC', 'ZCOPRAC', 'IARCSJUD']
# requiredContests = []
# requiredContests.extend(activeContests.find("tbody").find_all("tr"))
# requiredContests.extend(upcomingContests.find("tbody").find_all("tr"))
# i=0
# quote1={}
# for row in contests1:
# if(i%4 == 1):
# quote1['name'] = row.text.strip().replace('#', "")
# if(i%4 == 2):
# quote1['start'] = row.text.strip().replace('#', "")
# if(i%4 == 3):
# quote1['end'] = row.text.strip().replace('#', "")
# if(i%4 == 3):
# quotes1.append(quote1)
# quote1 = {}
# i += 1
# print(str(quotes1))
# for i in range(1,len(rows)):
# td = rows[i].findAll('td')
# self.contests["Code"].append(td[0].text)
# self.contests["Name"].append(td[1].text.replace("\n",""))
# self.contests["Start"].append(td[2].text)
# self.contests["End"].append(td[3].text)
#return contest_list
#events = []
# for contest in requiredContests:
# tds = contest.find_all("td")
# currentContest = {
# 'contestCode': tds[0].text,
# 'contestLink': 'https://www.codechef.com{0}'.format(tds[1].next.next.attrs['href'].split('?')[0]),
# 'contestTitle': tds[1].text.split('\n')[1],
# 'contestStartDate': tds[2].attrs['data-starttime'],
# 'contestEndDate': tds[3].attrs['data-endtime']
# }
# if currentContest['contestCode'] in ignoredContests:
# continue
# if int(currentContest['contestEndDate'][:4]) - int(currentContest['contestStartDate'][:4]) > 0:
# continue
# self.contests.append(currentContest)
# print(self.contests)
def getFutureContests(self):
self.__scrape()
#return self.contests
return self.contestData
if __name__ == "__main__":
cc = CodeChef()
print(cc.getFutureContests())
| 40.026144
| 407
| 0.502776
|
4a106e997f7017cac41c0a3a964a73a155e48baa
| 752
|
py
|
Python
|
resources/carservice.py
|
Ayobami-00/Car-Service-Api
|
2d17798b8e3d0cbd2764ef101e3c5225b1b2e825
|
[
"MIT"
] | null | null | null |
resources/carservice.py
|
Ayobami-00/Car-Service-Api
|
2d17798b8e3d0cbd2764ef101e3c5225b1b2e825
|
[
"MIT"
] | null | null | null |
resources/carservice.py
|
Ayobami-00/Car-Service-Api
|
2d17798b8e3d0cbd2764ef101e3c5225b1b2e825
|
[
"MIT"
] | null | null | null |
from flask import Response, request
from database.models import CarServiceDataModel, User
from flask_jwt_extended import jwt_required, get_jwt_identity
from flask_restful import Resource
from resources.errors import InternalServerError,NoAuthorizationError
import random
class CarService(Resource):
@jwt_required
def post(self):
try:
user_id = get_jwt_identity()
body = request.get_json()
car_service_data_model = CarServiceDataModel(**body)
#MODEL PREDICTION
return {'price': int(random.randrange(500000, 50000000))}, 200
except NoAuthorizationError:
raise NoAuthorizationError
except Exception as e:
raise InternalServerError
| 31.333333
| 74
| 0.704787
|
4a106eb02042ed9143826646faeea92b520d8a98
| 13,890
|
py
|
Python
|
BERT/Multi-column/train.py
|
amurto/nlp-scripts
|
f1158221a87838589ccff4eaa4db5349e6ec1c8d
|
[
"MIT"
] | 1
|
2020-06-15T05:46:42.000Z
|
2020-06-15T05:46:42.000Z
|
BERT/Multi-column/train.py
|
amurto/nlp-scripts
|
f1158221a87838589ccff4eaa4db5349e6ec1c8d
|
[
"MIT"
] | null | null | null |
BERT/Multi-column/train.py
|
amurto/nlp-scripts
|
f1158221a87838589ccff4eaa4db5349e6ec1c8d
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""BERT_train.ipynb
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/1UbYB9XAAcumx3wz2FFirnvt3hM_6dV5Y
"""
# install mxnet-cu100 because google colab supports version 100, if gpu not available run "!pip install mxnet" instead
!pip install mxnet-cu100
!pip install gluonnlp
# unzip sentence_embedding from gluonnlp which contains our bert model
!unzip sentence_embedding.zip
# These training examples give error, because they contain inconsistencies, so remove them from dataset
to_ignore = to_ignore = [232,
275,
957,
1129,
1322,
1722,
1865,
1891,
1900,
2398,
2447,
3180,
3263,
3270,
3465,
3520,
3656,
4079,
4419,
4439,
4498,
4754,
5074,
5172,
5236,
5672,
6140,
6258,
6777,
6784,
6830,
7046,
7380,
7493,
7516,
7596,
7621,
7883,
8135,
8762,
8867,
9296,
9386,
9581,
9757,
9766,
10121,
10319,
10463,
10895,
10917,
10961,
11296,
11524,
11745,
12149,
12560,
12705,
12788,
13160,
13311,
13784,
13943,
14059,
14540,
14651,
14720,
15015,
15551,
15618,
15837,
15866,
16034,
16179,
16260,
16403,
16500,
16532,
16668,
16884,
17050,
17257,
17423,
17795,
17802,
17817,
18311,
18409,
18461,
18978,
19087,
19494,
19602,
20258,
20557,
21672,
21713,
21832,
22045,
22184,
22259,
22299,
22426,
22568,
23028,
23198,
23944,
24212,
24648,
25447,
25663,
25740,
25790,
25874,
25951,
26231,
26839,
27006,
27364,
27935,
28245,
28248,
28676,
29120,
29260,
29408,
30026,
30105,
30227,
30433,
30695,
30702,
30864,
30933,
31223,
31299,
31831,
31854,
32013,
32610,
33187,
33282,
33738,
33878,
34406,
34464,
34496,
35239,
35578,
35726,
35783,
35917,
36021,
36123,
36400,
36577,
36809,
37061,
37125,
37186,
37563,
37578,
37608,
37823,
38022,
38197,
38492,
38684,
38725,
38798,
39288,
39314,
39681,
39925,
40029,
40135,
40383,
40499,
40828,
40971,
41389,
41510,
41768,
41991,
42166,
42183,
42779,
43736,
44485,
44593,
44711,
45434,
45562,
45645,
45980,
46099,
46308,
46731,
46801,
46829,
46892,
47491,
47582,
48036,
48177,
48310,
48355,
48637,
49375]
import csv
import pandas as pd
train_bodies = pd.read_csv('train_bodies.csv')
train_topics = pd.read_csv('train_stances.csv')
train_labels = train_topics['Stance'].values
train_dataset = pd.merge(train_topics, train_bodies, how='left', on='Body ID')
train_dataset.drop(train_dataset.index[to_ignore], inplace=True)
train_dataset = train_dataset[['Headline', 'articleBody', 'Stance']]
ibm_dataset = pd.read_csv('ibm_dataset.csv')
train_dataset = pd.concat([train_dataset, ibm_dataset])
train_dataset.to_csv('train_bert.csv', index=False, header=False)
csv.writer(open('train_bert.tsv', 'w+'), delimiter='\t').writerows(csv.reader(open("train_bert.csv")))
# tsv format is required for gluonnlp
!mv train_bert.tsv sentence_embedding/
cd sentence_embedding/
import warnings
warnings.filterwarnings('ignore')
import io
import random
import numpy as np
import mxnet as mx
import gluonnlp as nlp
from bert import data, model
np.random.seed(100)
random.seed(100)
mx.random.seed(10000)
# change `ctx` to `mx.cpu()` if no GPU is available.
ctx = mx.gpu(0)
# Automatically downloads and loads bert uncased model
bert_base, vocabulary = nlp.model.get_model('bert_12_768_12',
dataset_name='book_corpus_wiki_en_uncased',
pretrained=True, ctx=ctx, use_pooler=True,
use_decoder=False, use_classifier=False)
print(bert_base)
# Attach a single classifier layer on top of language model
bert_classifier = model.classification.BERTClassifier(bert_base, num_classes=4, dropout=0.1)
# only need to initialize the classifier layer.
bert_classifier.classifier.initialize(init=mx.init.Normal(0.02), ctx=ctx)
bert_classifier.hybridize(static_alloc=True)
# softmax cross entropy loss for classification
loss_function = mx.gluon.loss.SoftmaxCELoss()
loss_function.hybridize(static_alloc=True)
metric = mx.metric.Accuracy()
tsv_file = io.open('train_bert.tsv', encoding='utf-8', newline='\r\n')
for i in range(5):
print(tsv_file.readline())
# Modify newline parameter to support news articles loading which contains windows type newlines
class modifyread(nlp.data.TSVDataset):
def _read(self):
all_samples = []
for filename in self._filenames:
with io.open(filename, 'r', encoding=self._encoding, newline='\r\n') as fin:
content = fin.read()
samples = (s for s in self._sample_splitter(content) if not self._should_discard())
if self._field_separator:
if not self._allow_missing:
samples = [self._field_selector(self._field_separator(s)) for s in samples]
else:
selected_samples = []
num_missing = 0
for s in samples:
try:
fields = self._field_separator(s)
selected_samples.append(self._field_selector(fields))
except IndexError:
num_missing += 1
if num_missing > 0:
warnings.warn('%d incomplete samples in %s'%(num_missing, filename))
samples = selected_samples
all_samples += samples
return all_samples
# TO skip the first line, in case of headers, change the value to 1 below
num_discard_samples = 0
# Split fields by tabs
field_separator = nlp.data.Splitter('\t')
data_train_raw = modifyread(filename='train_bert.tsv', sample_splitter=nlp.data.utils.Splitter('\r\n'),
field_separator=field_separator,
num_discard_samples=num_discard_samples,
field_indices=None)
sample_id = 231
# Headline
print(data_train_raw[sample_id][0])
# Articles
print(data_train_raw[sample_id][1])
# Stance
print(data_train_raw[sample_id][2])
################ Used to find inconsistent examples, no longer needed, just for reference #####################################
'''indices = []
for item in data_train_raw:
try:
indices.append(item[2])
except IndexError:
pass
to_ignore = []
for i,item in enumerate(indices):
if len(item) > 9:
to_ignore.append(i)
'''
# Use the vocabulary from pre-trained model for tokenization
bert_tokenizer = nlp.data.BERTTokenizer(vocabulary, lower=True)
# The maximum length of an input sequence
max_len = 200
# The labels for the four classes
all_labels = ["agree", "disagree", "discuss", "unrelated"]
# Transform the data as sentence pairs.
pair = True
transform = data.transform.BERTDatasetTransform(bert_tokenizer, max_len,
class_labels=all_labels,
has_label=True,
pad=True,
pair=pair)
data_train = data_train_raw.transform(transform)
print('vocabulary used for tokenization = \n%s'%vocabulary)
print('%s token id = %s'%(vocabulary.padding_token, vocabulary[vocabulary.padding_token]))
print('%s token id = %s'%(vocabulary.cls_token, vocabulary[vocabulary.cls_token]))
print('%s token id = %s'%(vocabulary.sep_token, vocabulary[vocabulary.sep_token]))
print('token ids = \n%s'%data_train[sample_id][0])
print('valid length = \n%s'%data_train[sample_id][1])
print('segment ids = \n%s'%data_train[sample_id][2])
print('label = \n%s'%data_train[sample_id][3])
# The hyperparameters
batch_size = 20
lr = 5e-6
# The FixedBucketSampler and the DataLoader for making the mini-batches
train_sampler = nlp.data.FixedBucketSampler(lengths=[int(item[1]) for item in data_train],
batch_size=batch_size,
shuffle=True)
bert_dataloader = mx.gluon.data.DataLoader(data_train, batch_sampler=train_sampler)
trainer = mx.gluon.Trainer(bert_classifier.collect_params(), 'adam',
{'learning_rate': lr, 'epsilon': 1e-9})
# Collect all differentiable parameters
# `grad_req == 'null'` indicates no gradients are calculated (e.g. constant parameters)
# The gradients for these params are clipped later
params = [p for p in bert_classifier.collect_params().values() if p.grad_req != 'null']
grad_clip = 1
# Training the model with only two epochs to avoid overfitting
log_interval = 4
num_epochs = 2
for epoch_id in range(num_epochs):
metric.reset()
step_loss = 0
for batch_id, (token_ids, valid_length, segment_ids, label) in enumerate(bert_dataloader):
with mx.autograd.record():
# Load the data to the GPU
token_ids = token_ids.as_in_context(ctx)
valid_length = valid_length.as_in_context(ctx)
segment_ids = segment_ids.as_in_context(ctx)
label = label.as_in_context(ctx)
# Forward computation
out = bert_classifier(token_ids, segment_ids, valid_length.astype('float32'))
ls = loss_function(out, label).mean()
# And backwards computation
ls.backward()
# Gradient clipping
trainer.allreduce_grads()
nlp.utils.clip_grad_global_norm(params, 1)
trainer.update(1)
step_loss += ls.asscalar()
metric.update([label], [out])
# Printing vital information
if (batch_id + 1) % (log_interval) == 0:
print('[Epoch {} Batch {}/{}] loss={:.4f}, lr={:.7f}, acc={:.3f}'
.format(epoch_id, batch_id + 1, len(bert_dataloader),
step_loss / log_interval,
trainer.learning_rate, metric.get()[1]))
step_loss = 0
cd ..
# similar to training examples, some test examples are not properly loaded so remove them
to_remove = [776,
1898,
5433,
10487,
10924,
11796,
13534,
14205,
17744,
18082,
19655,
20866,
21043,
21395]
##################################################### Run on test data and generate predictions ################################################################
test_bodies = pd.read_csv('test_bodies.csv')
test_topics = pd.read_csv('test_stances.csv')
test_dataset = pd.merge(test_topics, test_bodies, how='left', on='Body ID')
test_dataset.drop(test_dataset.index[to_remove], inplace=True)
test_labels = test_dataset['Stance'].values
# testing for only 1000 examples currently
test_dataset = test_dataset[['Headline', 'articleBody']][:5000]
test_labels = test_labels[:5000]
test_dataset.to_csv('test_bert.csv', index=False, header=False)
csv.writer(open('test_bert.tsv', 'w+'), delimiter='\t').writerows(csv.reader(open("test_bert.csv")))
!mv test_bert.tsv sentence_embedding/
cd sentence_embedding
# TO skip the first line, in case of headers, change the value to 1 below
num_discard_samples = 0
# Split fields by tabs
field_separator = nlp.data.Splitter('\t')
data_test_raw = modifyread(filename='test_bert.tsv', sample_splitter=nlp.data.utils.Splitter('\r\n'),
field_separator=field_separator,
num_discard_samples=num_discard_samples,
field_indices=None)
sample_id = 231
# Headline
print(data_test_raw[sample_id][0])
# Articles
print(data_test_raw[sample_id][1])
################### Used to find inconsistent examples, no longer needed, just for reference ###############################
'''
to_remove = []
for i,item in enumerate(data_test_raw):
if len(item) != 2:
to_remove.append(i)
'''
test_transform = data.transform.BERTDatasetTransform(bert_tokenizer, max_len,
class_labels=False,
has_label=False,
pad=True,
pair=pair)
data_test = data_test_raw.transform(test_transform)
print('vocabulary used for tokenization = \n%s'%vocabulary)
print('%s token id = %s'%(vocabulary.padding_token, vocabulary[vocabulary.padding_token]))
print('%s token id = %s'%(vocabulary.cls_token, vocabulary[vocabulary.cls_token]))
print('%s token id = %s'%(vocabulary.sep_token, vocabulary[vocabulary.sep_token]))
print('token ids = \n%s'%data_test[sample_id][0])
print('valid length = \n%s'%data_test[sample_id][1])
print('segment ids = \n%s'%data_test[sample_id][2])
test_dataloader = mx.gluon.data.DataLoader(data_test,batch_size=batch_size, num_workers=4)
# This gives weird assertion error at the last but the result is not affected, so we wrap this in try and except
try:
predictions = []
for batch_id, (token_ids, valid_length, segment_ids) in enumerate(test_dataloader):
# Load the data to the GPU
token_ids = token_ids.as_in_context(ctx)
valid_length = valid_length.as_in_context(ctx)
segment_ids = segment_ids.as_in_context(ctx)
# Forward computation
out = bert_classifier(token_ids, segment_ids, valid_length.astype('float32'))
indices = mx.nd.topk(out, k=1, ret_typ='indices', dtype='int32').asnumpy()
for index in indices:
predictions.append(index)
except:
pass
from sklearn import metrics
from sklearn.preprocessing import LabelEncoder
encoder = LabelEncoder()
encoder.fit(test_labels)
encoded_test_labels = encoder.transform(test_labels)
acc = metrics.accuracy_score(encoded_test_labels, predictions)
print(f"Accuracy obtained on test set: {acc}")
print(metrics.confusion_matrix(encoded_test_labels, predictions))
bert_classifier.save_parameters('parameters_bert')
| 25.769944
| 160
| 0.649316
|
4a106f620e0595a6f0bf5460f00bc550ed8006d9
| 404
|
py
|
Python
|
day8/a.py
|
bujiie/adventofcode2021
|
0e42c01f536bdfe66fd679cb05233ae2293a72c0
|
[
"MIT"
] | 1
|
2021-12-12T03:44:35.000Z
|
2021-12-12T03:44:35.000Z
|
day8/a.py
|
bujiie/adventofcode2021
|
0e42c01f536bdfe66fd679cb05233ae2293a72c0
|
[
"MIT"
] | null | null | null |
day8/a.py
|
bujiie/adventofcode2021
|
0e42c01f536bdfe66fd679cb05233ae2293a72c0
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
from sys import argv
I=[]
with open(argv[1]) as fp:
for i, l in enumerate(fp):
l=l.strip()
left,right=l.split(' | ')
lp=left.split(' ')
rp=right.split(' ')
I.append((lp,rp))
count=0
for i in I:
_,r=i
for rr in r:
if len(rr) in [2,3,4,7]:
count+=1
print(count)
| 13.931034
| 33
| 0.440594
|
4a10701e6b25efbdbbb0cd02ca18b669c934ff42
| 1,163
|
py
|
Python
|
187_repeated_dna_sequences.py
|
gengwg/leetcode
|
0af5256ec98149ef5863f3bba78ed1e749650f6e
|
[
"Apache-2.0"
] | 2
|
2018-04-24T19:17:40.000Z
|
2018-04-24T19:33:52.000Z
|
187_repeated_dna_sequences.py
|
gengwg/leetcode
|
0af5256ec98149ef5863f3bba78ed1e749650f6e
|
[
"Apache-2.0"
] | null | null | null |
187_repeated_dna_sequences.py
|
gengwg/leetcode
|
0af5256ec98149ef5863f3bba78ed1e749650f6e
|
[
"Apache-2.0"
] | 3
|
2020-06-17T05:48:52.000Z
|
2021-01-02T06:08:25.000Z
|
# 187. Repeated DNA Sequences
#
# All DNA is composed of a series of nucleotides abbreviated as A, C, G, and T, for example: "ACGAATTCCG".
# When studying DNA, it is sometimes useful to identify repeated sequences within the DNA.
#
# Write a function to find all the 10-letter-long sequences (substrings)
# that occur more than once in a DNA molecule.
#
# For example,
#
# Given s = "AAAAACCCCCAAAAACCCCCCAAAAAGGGTTT",
#
# Return:
# ["AAAAACCCCC", "CCCCCAAAAA"].
#
# this Problem can be solve with O(n) time and O(n) space with hashing
# http://blog.csdn.net/hyperbolechi/article/details/44302991
class Solution(object):
def findRepeatedDnaSequences(self, s):
"""
:type s: str
:rtype: List[str]
"""
dict = {}
for i in range(len(s) - 9):
if s[i:i + 10] not in dict:
dict[s[i:i + 10]] = 1
else:
dict[s[i:i + 10]] += 1
res = []
for elem in dict:
if dict[elem] > 1:
res.append(elem)
return res
if __name__ == '__main__':
print Solution().findRepeatedDnaSequences("AAAAACCCCCAAAAACCCCCCAAAAAGGGTTT")
| 28.365854
| 106
| 0.60877
|
4a10706b52e2e5fd896378c64e8aef5b1d2c9d00
| 7,374
|
py
|
Python
|
slutils/Pano.py
|
0b3d/Image-Map-Embeddings
|
a9fc65ac92094bcfcd0f19a3604f0b9d8bd3174f
|
[
"MIT"
] | 2
|
2022-02-11T06:05:35.000Z
|
2022-03-14T02:10:31.000Z
|
slutils/Pano.py
|
0b3d/Image-Map-Embeddings
|
a9fc65ac92094bcfcd0f19a3604f0b9d8bd3174f
|
[
"MIT"
] | null | null | null |
slutils/Pano.py
|
0b3d/Image-Map-Embeddings
|
a9fc65ac92094bcfcd0f19a3604f0b9d8bd3174f
|
[
"MIT"
] | null | null | null |
import os, sys
import numpy as np
import cv2
import pandas as pd
from slutils.equirec2perspec import Equirec2Perspec as E2P
class Pano():
""" Create object panorama"""
def __init__(self, panoDir, ID):
"""Initialize the Pano class.
Parameters:
panoDir (str) -- Path to the directory where panoramas are stored. Example os.path.join( os.environ['DATASETS'],'streetlearn', 'jpegs_manhattan_2019')
ID (str or int) -- If ID is a string it should be the name of the panorama file (Example PreXwwylmG23hnheZ__zGw.jpg)
-- If ID is an integer it will be intewrpreted as the global index of the location. According to the nodes.txt file
"""
self.panoDir = panoDir
if type(ID) is str:
self.panoName = ID
elif type(ID) is int:
filename = os.path.join( self.panoDir, 'nodes.txt' )
names = ["pano_id", "yaw", "lat", "lon"]
frame = pd.read_csv(filename, names=names)
pano_id = frame.loc[ID, 'pano_id']
self.panoName = pano_id + '.jpg'
else:
raise Exception("Pano ID not found")
self.path = self.getPath()
self.pano = self.getPano()
self.shape = self.pano.shape
def getPath(self):
path = os.path.join( self.panoDir, self.panoName)
return path
def getPano(self, size=None, flip=False):
assert os.path.isfile(self.path), "Pano {} was not found".format(self.path)
pano = cv2.imread(self.path)
if size is not None:
pano = cv2.resize(pano, size)
if flip:
pano = cv2.flip(pano, 1)
return pano
def showPano(self):
cv2.imshow(self.panoName, self.pano)
cv2.waitKey(0)
def getZoom(self):
"""Returns pano's zoom level"""
return int(np.ceil(self.pano.shape[0] / 512))
def getSnaps(self, size=224, mode='list', rotation=0.0, flip=False, noise=False):
""" Returns panorama snaps
Parameters:
size (int) -> Snaps tile size
mode (str) -> Indicates snaps returned format grid|list
rotation (float) -> Yaw rotation angle
flip (bool) -> Indicates whether a vertical flip to the panorama will be made before crop
noise -> Add random noise to the cropping parameters (fov, pitch and tetha)
Returns ->
if mode is 'list', it will return a list of snaps in the directions [0,-90,90,180] (F,L,R,B) w.r.t yaw angle.
if mode is 'grid', it will return an image with snaps concatenated in a grid [F,B
L,R]
"""
snaps = []
equ = E2P.Equirectangular(self.pano)
views = [0,-90,90,180]
if noise:
fov_shift = np.random.normal(loc=0, scale=10)
pitch_shift = np.random.normal(loc=0,scale=10)
tetha_shift = np.random.normal(loc=0,scale=10)
else:
fov_shift = 0
pitch_shift = 0
tetha_shift = 0
H, W = size if hasattr(size,'__iter__') else (size,size)
tetha_shift = tetha_shift + rotation
snaps = [equ.GetPerspective(100+fov_shift, t+tetha_shift, pitch_shift, H, W) for t in views]
if mode == 'list' and not flip:
return snaps
elif mode == 'list' and flip:
new_list = [cv2.flip(snaps[i], 1) for i in [0,2,1,3]] # flip snaps
return new_list
elif mode == 'grid' and not flip:
row1 = np.concatenate([snaps[0], snaps[3]], axis=1) # Concatenate F and B
row2 = np.concatenate([snaps[1], snaps[2]], axis=1) # Concatenate L and R
img = np.concatenate([row1, row2], axis=0) # [F,R;L,R]
return img
elif mode == 'grid' and flip:
snaps = [cv2.flip(snap, 1) for snap in snaps]
row1 = np.concatenate([snaps[0], snaps[3]], axis=1) # Concatenate F and B
row2 = np.concatenate([snaps[2], snaps[1]], axis=1) # Concatenate L and R
img = np.concatenate([row1, row2], axis=0) # [F,R;L,R]
return img
def getSnapswithInfo(self, size=224, colour = (255,255,255), text=None):
""" Returns a list with snaps in directions 0, 90, -90, 180"""
H, W = size if hasattr(size,'__iter__') else (size,size)
thick = int(0.05 * H) # Thickness is 5 %
snaps = self.getSnaps(size)
snaps = [cv2.copyMakeBorder(snap, thick,thick,thick,thick, cv2.BORDER_CONSTANT, None, colour) for snap in snaps]
directions = ['F', 'L', 'R', 'B']
if text is not None:
for i, direction in enumerate(directions):
txt = 'ID: ' + text + ' ' + direction
cv2.putText(snaps[i], txt, (10,size), cv2. FONT_HERSHEY_PLAIN, 1, (255, 0, 0), 1, cv2.LINE_AA)
return snaps
def cropPano(self, fov, theta, pitch, h, w):
""" Returns a pano the the specified parameters"""
equ = E2P.Equirectangular(self.pano)
snap = equ.GetPerspective(fov, theta, pitch, h, w)
return snap
def saveSnaps(self, size=224, directory=None, option='group'):
savedir = os.getcwd() if directory == None else directory
basename = os.path.join(savedir, self.panoName.split('.')[0])
if option == 'group':
snaps = self.getSnapswithInfo(size=size, text=None)
row1 = np.concatenate([snaps[0], snaps[2]], axis=1) # FR
row2 = np.concatenate([snaps[3], snaps[1]], axis=1) # BL
image = np.concatenate([row1, row2], axis=0)
filename = basename + '.jpg'
cv2.imwrite(filename, image)
elif option == 'individual':
snaps = self.getSnapswithInfo(size=size, text=None)
directions = ['F', 'L', 'R', 'B']
for i, snap in enumerate(snaps):
direction = directions[i]
filename = basename + '_' + direction + '.jpg'
cv2.imwrite(filename, snap)
else:
print("Option not found, image not saved")
def getCoordinates(self):
filename = os.path.join( self.panoDir, 'nodes.txt' )
names = ["pano_id", "yaw", "lat", "lon"]
frame = pd.read_csv(filename, names=names)
row = frame.loc[frame['pano_id'] == self.panoName.split('.')[0]]
index = row.index[0]
yaw, lat, lon = row['yaw'].values[0], row['lat'].values[0], row['lon'].values[0]
return (index, lat, lon, yaw)
def __str__(self):
index, lat, lon, yaw = self.getCoordinates()
return "Pano name: {}, index: {}, shape: {}, coordinates: ({},{},{})".format(self.panoName, index, self.pano.shape, lat, lon, yaw)
if __name__ == "__main__":
panoDir = os.path.join( os.environ['DATASETS'],'streetlearn', 'jpegs_manhattan_2019')
pano = Pano(panoDir, 'PreXwwylmG23hnheZ__zGw.jpg')
snaps = pano.getSnapswithInfo(size=(256,340))
pano.saveSnaps(size=(256,340), directory=None, option='individual')
img = np.concatenate(snaps, axis=1)
cv2.imshow("pano", img)
cv2.waitKey(0)
| 41.661017
| 164
| 0.556821
|
4a10715ccf80b2eed8317a749ac2c2e60f83ace0
| 285
|
py
|
Python
|
prototype/KnowledgeBase/migrations/0017_merge_20200417_1524.py
|
Woods-Taylor2/Cybersecurity-based-Knowledge-Management-System
|
53aa5bc3878051392a7cebb605e3333efceb4791
|
[
"PostgreSQL"
] | null | null | null |
prototype/KnowledgeBase/migrations/0017_merge_20200417_1524.py
|
Woods-Taylor2/Cybersecurity-based-Knowledge-Management-System
|
53aa5bc3878051392a7cebb605e3333efceb4791
|
[
"PostgreSQL"
] | null | null | null |
prototype/KnowledgeBase/migrations/0017_merge_20200417_1524.py
|
Woods-Taylor2/Cybersecurity-based-Knowledge-Management-System
|
53aa5bc3878051392a7cebb605e3333efceb4791
|
[
"PostgreSQL"
] | null | null | null |
# Generated by Django 3.0.5 on 2020-04-17 15:24
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('KnowledgeBase', '0003_auto_20200416_1652'),
('KnowledgeBase', '0016_auto_20200405_1726'),
]
operations = [
]
| 19
| 53
| 0.663158
|
4a1072f7fe37176cb17a193d926074b950a6eb81
| 38
|
py
|
Python
|
venv/Lib/site-packages/IPython/kernel/threaded.py
|
ajayiagbebaku/NFL-Model
|
afcc67a85ca7138c58c3334d45988ada2da158ed
|
[
"MIT"
] | 6,989
|
2017-07-18T06:23:18.000Z
|
2022-03-31T15:58:36.000Z
|
venv/Lib/site-packages/IPython/kernel/threaded.py
|
ajayiagbebaku/NFL-Model
|
afcc67a85ca7138c58c3334d45988ada2da158ed
|
[
"MIT"
] | 1,978
|
2017-07-18T09:17:58.000Z
|
2022-03-31T14:28:43.000Z
|
venv/Lib/site-packages/IPython/kernel/threaded.py
|
ajayiagbebaku/NFL-Model
|
afcc67a85ca7138c58c3334d45988ada2da158ed
|
[
"MIT"
] | 1,228
|
2017-07-18T09:03:13.000Z
|
2022-03-29T05:57:40.000Z
|
from jupyter_client.threaded import *
| 19
| 37
| 0.842105
|
4a107389b4629200e3996efe55036315452598ea
| 5,787
|
py
|
Python
|
pyParallelSingleCellNet/utils.py
|
gavehan/pyParallelSingleCellNet
|
0d59eb445a71219504171cc7fd76094446b7f3fc
|
[
"MIT"
] | null | null | null |
pyParallelSingleCellNet/utils.py
|
gavehan/pyParallelSingleCellNet
|
0d59eb445a71219504171cc7fd76094446b7f3fc
|
[
"MIT"
] | null | null | null |
pyParallelSingleCellNet/utils.py
|
gavehan/pyParallelSingleCellNet
|
0d59eb445a71219504171cc7fd76094446b7f3fc
|
[
"MIT"
] | null | null | null |
import datetime
import numpy as np
import pandas as pd
import scipy.stats as stats
from anndata import AnnData
import scanpy as sc
def ctMerge(sampTab, annCol, ctVect, newName):
oldRows = np.isin(sampTab[annCol], ctVect)
newSampTab = sampTab.copy()
newSampTab.loc[oldRows, annCol] = newName
return newSampTab
def ctRename(sampTab, annCol, oldName, newName):
oldRows = sampTab[annCol] == oldName
newSampTab = sampTab.copy()
newSampTab.loc[oldRows, annCol] = newName
return newSampTab
# adapted from Sam's code
# modified to use cell index for improved speed
def splitCommonAnnData(adata, ncells, dLevel="cell_ontology_class", cells_reserved=3):
cts = sorted(list(set(adata.obs[dLevel])))
train_ids = np.empty(0)
val_ids = np.empty(0)
for i, ct in enumerate(cts):
aX = adata[adata.obs[dLevel] == ct, :]
ccount = min([aX.n_obs-cells_reserved, ncells])
new_train_ids = np.random.choice(aX.obs.index, ccount, replace=False)
train_ids = np.append(train_ids, new_train_ids)
ccount = min([aX.n_obs-ccount, ncells*2])
val_temp = list(set(aX.obs.index)-set(new_train_ids))
new_val_ids = np.random.choice(val_temp, ccount, replace=False)
val_ids = np.append(val_ids, new_val_ids)
print(f"{i+1}/{len(cts)} : {ct} > {aX.n_obs}")
aTrain = adata[train_ids, :]
aTest = adata[val_ids, :]
return([aTrain, aTest])
def splitCommon(expData, ncells, sampTab, dLevel="cell_ontology_class", cells_reserved=3):
cts = set(sampTab[dLevel])
train_ids = np.empty(0)
for ct in cts:
aX = expData.loc[sampTab[dLevel] == ct, :]
print(ct, ": ")
ccount = len(aX.index) - cells_reserved
ccount = min([ccount, ncells])
print(ccount)
train_ids = np.append(train_ids, np.random.choice(aX.index.values, ccount, replace=False))
val_ids = np.setdiff1d(sampTab.index, train_ids, assume_unique=True)
aTrain = expData.loc[np.isin(sampTab.index.values, train_ids, assume_unique=True), :]
aTest = expData.loc[np.isin(sampTab.index.values, val_ids, assume_unique=True), :]
return([aTrain, aTest])
def annSetUp(species="mmusculus"):
annot = sc.queries.biomart_annotations(species, ["external_gene_name", "go_id"],)
return annot
def getGenesFromGO(GOID, annList):
if (str(type(GOID)) != "<class 'str'>"):
return annList.loc[annList.go_id.isin(GOID), :].external_gene_name.sort_values().to_numpy()
else:
return annList.loc[annList.go_id == GOID, :].external_gene_name.sort_values().to_numpy()
def dumbfunc(aNamedList):
return aNamedList.index.values
def GEP_makeMean(expDat, groupings, type='mean'):
if (type == "mean"):
return expDat.groupby(groupings).mean()
if (type == "median"):
return expDat.groupby(groupings).median()
def utils_myDist(expData):
numSamps = len(expData.index)
result = np.subtract(np.ones([numSamps, numSamps]), expData.T.corr())
del result.index.name
del result.columns.name
return result
def utils_stripwhite(string):
return string.strip()
def utils_myDate():
d = datetime.datetime.today()
return d.strftime("%b_%d_%Y")
def utils_strip_fname(string):
sp = string.split("/")
return sp[len(sp)-1]
def utils_stderr(x):
return (stats.sem(x))
def zscore(x, meanVal, sdVal):
return np.subtract(x, meanVal)/sdVal
def zscoreVect(genes, expDat, tVals, ctt, cttVec):
res = {}
x = expDat.loc[cttVec == ctt, :]
for gene in genes:
xvals = x[gene]
res[gene] = pd.series(
data=zscore(xvals, tVals[ctt]['mean'][gene],
tVals[ctt]['sd'][gene]),
index=xvals.index.values
)
return res
def downSampleW(vector, total=1e5, dThresh=0):
vSum = np.sum(vector)
dVector = total/vSum
res = dVector*vector
res[res < dThresh] = 0
return res
def weighted_down(expDat, total, dThresh=0):
rSums = expDat.sum(axis=1)
dVector = np.divide(total, rSums)
res = expDat.mul(dVector, axis=0)
res[res < dThresh] = 0
return res
def trans_prop(expDat, total, dThresh=0):
rSums = expDat.sum(axis=1)
dVector = np.divide(total, rSums)
res = expDat.mul(dVector, axis=0)
res[res < dThresh] = 0
return np.log(res + 1)
def trans_zscore_col(expDat):
return expDat.apply(stats.zscore, axis=0)
def trans_zscore_row(expDat):
return expDat.T.apply(stats.zscore, axis=0).T
def trans_binarize(expData, threshold=1):
expData[expData < threshold] = 0
expData[expData > 0] = 1
return expData
def getUniqueGenes(genes, transID='id', geneID='symbol'):
genes2 = genes.copy()
genes2.index = genes2[transID]
genes2.drop_duplicates(subset=geneID, inplace=True, keep="first")
del genes2.index.name
return genes2
def removeRed(expData, genes, transID="id", geneID="symbol"):
genes2 = getUniqueGenes(genes, transID, geneID)
return expData.loc[:, genes2.index.values]
def cn_correctZmat_col(zmat):
def myfuncInf(vector):
mx = np.max(vector[vector < np.inf])
mn = np.min(vector[vector > (np.inf * -1)])
res = vector.copy()
res[res > mx] = mx
res[res < mn] = mn
return res
return zmat.apply(myfuncInf, axis=0)
def cn_correctZmat_row(zmat):
def myfuncInf(vector):
mx = np.max(vector[vector < np.inf])
mn = np.min(vector[vector > (np.inf * -1)])
res = vector.copy()
res[res > mx] = mx
res[res < mn] = mn
return res
return zmat.apply(myfuncInf, axis=1)
def makeExpMat(adata):
expMat = pd.DataFrame(adata.X, index=adata.obs_names, columns=adata.var_names)
return expMat
def makeSampTab(adata):
sampTab = adata.obs
return sampTab
| 31.622951
| 99
| 0.656817
|
4a1073e1b186116a2f84119609d420f9118482db
| 7,848
|
py
|
Python
|
setup.py
|
JamesLinus/pyopencl
|
e1456850465c5904baee4e5e0047e098c1269c1c
|
[
"Apache-2.0"
] | 1
|
2019-04-22T09:09:18.000Z
|
2019-04-22T09:09:18.000Z
|
setup.py
|
JamesLinus/pyopencl
|
e1456850465c5904baee4e5e0047e098c1269c1c
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
JamesLinus/pyopencl
|
e1456850465c5904baee4e5e0047e098c1269c1c
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function
__copyright__ = """
Copyright (C) 2009-15 Andreas Kloeckner
Copyright (C) 2013 Marko Bencun
Copyright (C) 2013 Marko Bencun
"""
__license__ = """
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import sys
def get_config_schema():
from aksetup_helper import ConfigSchema, Option, \
IncludeDir, LibraryDir, Libraries, \
Switch, StringListOption
default_cxxflags = ['-std=c++0x']
if 'darwin' in sys.platform:
import platform
osx_ver, _, _ = platform.mac_ver()
osx_ver = '.'.join(osx_ver.split('.')[:2])
sysroot_paths = [
"/Applications/Xcode.app/Contents/Developer/Platforms/"
"MacOSX.platform/Developer/SDKs/MacOSX%s.sdk" % osx_ver,
"/Developer/SDKs/MacOSX%s.sdk" % osx_ver
]
default_libs = []
default_cxxflags = default_cxxflags + [
'-stdlib=libc++', '-mmacosx-version-min=10.7',
'-arch', 'i386', '-arch', 'x86_64'
]
from os.path import isdir
for srp in sysroot_paths:
if isdir(srp):
default_cxxflags.extend(['-isysroot', srp])
break
default_ldflags = default_cxxflags[:] + ["-Wl,-framework,OpenCL"]
else:
default_libs = ["OpenCL"]
default_ldflags = []
return ConfigSchema([
Switch("CL_TRACE", False, "Enable OpenCL API tracing"),
Switch("CL_ENABLE_GL", False, "Enable OpenCL<->OpenGL interoperability"),
Option("CL_PRETEND_VERSION", None,
"Dotted CL version (e.g. 1.2) which you'd like to use."),
IncludeDir("CL", []),
LibraryDir("CL", []),
Libraries("CL", default_libs),
StringListOption("CXXFLAGS", default_cxxflags,
help="Any extra C++ compiler options to include"),
StringListOption("LDFLAGS", default_ldflags,
help="Any extra linker options to include"),
])
def main():
from setuptools import find_packages
from aksetup_helper import (hack_distutils, get_config, setup,
check_git_submodules)
check_git_submodules()
hack_distutils()
conf = get_config(get_config_schema(),
warn_about_no_config=False)
extra_defines = {}
extra_defines["PYGPU_PACKAGE"] = "pyopencl"
extra_defines["PYGPU_PYOPENCL"] = "1"
if conf["CL_TRACE"]:
extra_defines["PYOPENCL_TRACE"] = 1
if conf["CL_ENABLE_GL"]:
extra_defines["HAVE_GL"] = 1
if conf["CL_PRETEND_VERSION"]:
try:
major, minor = [int(x) for x in conf["CL_PRETEND_VERSION"].split(".")]
extra_defines["PYOPENCL_PRETEND_CL_VERSION"] = \
0x1000*major + 0x10 * minor
except:
print("CL_PRETEND_VERSION must be of the form M.N, "
"with two integers M and N")
raise
conf["EXTRA_DEFINES"] = extra_defines
ver_dic = {}
version_file = open("pyopencl/version.py")
try:
version_file_contents = version_file.read()
finally:
version_file.close()
exec(compile(version_file_contents, "pyopencl/version.py", 'exec'), ver_dic)
separator = "-"*75
try:
import mako # noqa
except ImportError:
print(separator)
print("Mako is not installed.")
print(separator)
print("That is not a problem, as most of PyOpenCL will be just fine ")
print("without it.Some higher-level parts of pyopencl (such as ")
print("pyopencl.reduction) will not function without the templating engine ")
print("Mako [1] being installed. If you would like this functionality to ")
print("work, you might want to install Mako after you finish ")
print("installing PyOpenCL.")
print("")
print("Simply type")
print("python -m pip install mako")
print("either now or after the installation completes to fix this.")
print("")
print("[1] http://www.makotemplates.org/")
print(separator)
print("Hit Ctrl-C now if you'd like to think about the situation.")
print(separator)
from aksetup_helper import count_down_delay
count_down_delay(delay=5)
# {{{ write cffi build script
with open("cffi_build.py.in", "rt") as f:
build_script_template = f.read()
format_args = {}
for k, v in conf.items():
format_args[k] = repr(v)
build_script = build_script_template.format(**format_args)
with open("cffi_build.py", "wt") as f:
f.write(build_script)
# }}}
setup(name="pyopencl",
# metadata
version=ver_dic["VERSION_TEXT"],
description="Python wrapper for OpenCL",
long_description=open("README.rst", "rt").read(),
author="Andreas Kloeckner",
author_email="inform@tiker.net",
license="MIT",
url="http://mathema.tician.de/software/pyopencl",
classifiers=[
'Environment :: Console',
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: Other Audience',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: C++',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Mathematics',
'Topic :: Scientific/Engineering :: Physics',
],
# build info
packages=find_packages(),
setup_requires=[
"numpy",
"cffi>=1.1.0",
],
install_requires=[
"numpy",
"pytools>=2015.1.2",
"pytest>=2",
"decorator>=3.2.0",
"cffi>=1.1.0",
"appdirs>=1.4.0",
"six>=1.9.0",
# "Mako>=0.3.6",
],
cffi_modules=["cffi_build.py:ffi"],
include_package_data=True,
package_data={
"pyopencl": ["cl/*.cl", "cl/*.h"]
},
zip_safe=False)
if __name__ == '__main__':
main()
| 33.395745
| 85
| 0.587156
|
4a1075637a5ef95c542f4e6c4bcd93e38f56d9ce
| 1,588
|
py
|
Python
|
40. Combination Sum II/solution2.py
|
sunshot/LeetCode
|
8f6503201831055f1d49ed3abb25be44a13ec317
|
[
"MIT"
] | null | null | null |
40. Combination Sum II/solution2.py
|
sunshot/LeetCode
|
8f6503201831055f1d49ed3abb25be44a13ec317
|
[
"MIT"
] | null | null | null |
40. Combination Sum II/solution2.py
|
sunshot/LeetCode
|
8f6503201831055f1d49ed3abb25be44a13ec317
|
[
"MIT"
] | null | null | null |
from typing import List
import collections
class Solution:
def combinationSum2(self, candidates: List[int], target: int) -> List[List[int]]:
ans = []
if not candidates or len(candidates) == 0:
return ans
if len(candidates) == 1:
if target == candidates[0]:
result = [candidates[0]]
ans.append(result)
return ans
if sum(candidates) < target:
return ans
counter = collections.Counter(candidates)
# convert the counter dicts to a list of (num, count) tuples
counter = [(num, counter[num]) for num in counter]
# backtrack
def backtrack(path: List[int], index: int, target: int) -> None:
if target == 0:
ans.append(path)
return
elif target < 0:
return
if index >= len(counter):
return
for i in range(index, len(counter)):
num, freq = counter[i]
if freq <= 0:
continue
# backtrack for i
counter[i] = (num, freq - 1)
backtrack(path + [num], i, target - num)
# restore
counter[i] = (num, freq)
backtrack([], 0, target)
return ans
if __name__== '__main__':
solution = Solution()
candidates = [10,1,2,7,6,1,5]
target = 8
ans = solution.combinationSum2(candidates, target)
print(ans)
| 28.872727
| 85
| 0.479219
|
4a107623ccbbbfb4b769121365873eec39f3f37a
| 236
|
py
|
Python
|
Python-3/basic_examples/python_object_examples.py
|
ghiloufibelgacem/jornaldev
|
b9b27f9f7da595892520314b4ed1d2675556310a
|
[
"MIT"
] | 1,139
|
2018-05-09T11:54:36.000Z
|
2022-03-31T06:52:50.000Z
|
Python-3/basic_examples/python_object_examples.py
|
iamharshverma/journaldev
|
af24242a1ac1b7dc3e8e2404ec916b77ccf5044a
|
[
"MIT"
] | 56
|
2018-06-20T03:52:53.000Z
|
2022-02-09T22:57:41.000Z
|
Python-3/basic_examples/python_object_examples.py
|
iamharshverma/journaldev
|
af24242a1ac1b7dc3e8e2404ec916b77ccf5044a
|
[
"MIT"
] | 2,058
|
2018-05-09T09:32:17.000Z
|
2022-03-29T13:19:42.000Z
|
obj = object()
print(type(obj))
print(dir(obj))
print(obj.__hash__())
# obj.i = 10 # AttributeError: 'object' object has no attribute 'i'
# setattr(obj, 'name', 'pankaj') # AttributeError: 'object' object has no attribute 'name'
| 19.666667
| 91
| 0.673729
|
4a1076ad4284b0ca966e6f42c2127c7fb6d20987
| 1,622
|
py
|
Python
|
docs/test_doc_production_use.py
|
yanhuixie/iommi
|
b6ae39c270b10b553a7ef2ab52e8c1c96489db0b
|
[
"BSD-3-Clause"
] | 192
|
2020-01-30T14:29:56.000Z
|
2022-03-28T19:55:30.000Z
|
docs/test_doc_production_use.py
|
yanhuixie/iommi
|
b6ae39c270b10b553a7ef2ab52e8c1c96489db0b
|
[
"BSD-3-Clause"
] | 105
|
2020-03-29T21:59:01.000Z
|
2022-03-24T12:29:09.000Z
|
docs/test_doc_production_use.py
|
yanhuixie/iommi
|
b6ae39c270b10b553a7ef2ab52e8c1c96489db0b
|
[
"BSD-3-Clause"
] | 28
|
2020-02-02T20:51:09.000Z
|
2022-03-08T16:23:42.000Z
|
from tests.helpers import req
request = req('get')
def test_production_use():
# language=rst
"""
Production use
--------------
You probably want to define your own `Style` in a production scenario. See
:doc:`How to create a Style <styles>`, and especially the section on how to integrate into
an existing code base.
Just like you have your own custom base class for Django's `Model` to have a
central place to put customization you will want to do the same for the base
classes of iommi. In iommi this is even more important since you will almost
certainly want to add more shortcuts that are specific to your product.
Copy this boilerplate to some place in your code and import these classes
instead of the corresponding ones from iommi:
"""
import iommi
class Page(iommi.Page):
pass
class Action(iommi.Action):
pass
class Field(iommi.Field):
pass
class Form(iommi.Form):
class Meta:
member_class = Field
page_class = Page
action_class = Action
class Filter(iommi.Filter):
pass
class Query(iommi.Query):
class Meta:
member_class = Filter
form_class = Form
class Column(iommi.Column):
pass
class Table(iommi.Table):
class Meta:
member_class = Column
form_class = Form
query_class = Query
page_class = Page
action_class = Action
class Menu(iommi.Menu):
pass
class MenuItem(iommi.MenuItem):
pass
| 21.064935
| 94
| 0.618989
|
4a1076f6700c2f7d12ad9125ca91b9ac8c57d9e1
| 3,499
|
py
|
Python
|
test/functional/p2p_timeouts.py
|
tradecraftio/tradecraft
|
a014fea4d4656df67aef19e379f10322386cf6f8
|
[
"MIT"
] | 10
|
2019-03-08T04:10:37.000Z
|
2021-08-20T11:55:14.000Z
|
test/functional/p2p_timeouts.py
|
tradecraftio/tradecraft
|
a014fea4d4656df67aef19e379f10322386cf6f8
|
[
"MIT"
] | 69
|
2018-11-09T20:29:29.000Z
|
2021-10-05T00:08:36.000Z
|
test/functional/p2p_timeouts.py
|
tradecraftio/tradecraft
|
a014fea4d4656df67aef19e379f10322386cf6f8
|
[
"MIT"
] | 7
|
2019-01-21T06:00:18.000Z
|
2021-12-19T16:18:00.000Z
|
#!/usr/bin/env python3
# Copyright (c) 2016-2018 The Bitcoin Core developers
# Copyright (c) 2010-2021 The Freicoin Developers
#
# This program is free software: you can redistribute it and/or modify it under
# the terms of version 3 of the GNU Affero General Public License as published
# by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
"""Test various net timeouts.
- Create three freicoind nodes:
no_verack_node - we never send a verack in response to their version
no_version_node - we never send a version (only a ping)
no_send_node - we never send any P2P message.
- Start all three nodes
- Wait 1 second
- Assert that we're connected
- Send a ping to no_verack_node and no_version_node
- Wait 1 second
- Assert that we're still connected
- Send a ping to no_verack_node and no_version_node
- Wait 2 seconds
- Assert that we're no longer connected (timeout to receive version/verack is 3 seconds)
"""
from time import sleep
from test_framework.messages import msg_ping
from test_framework.mininode import P2PInterface
from test_framework.test_framework import FreicoinTestFramework
class TestP2PConn(P2PInterface):
def on_version(self, message):
# Don't send a verack in response
pass
class TimeoutsTest(FreicoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
# set timeout to receive version/verack to 3 seconds
self.extra_args = [["-peertimeout=3"]]
def run_test(self):
# Setup the p2p connections
no_verack_node = self.nodes[0].add_p2p_connection(TestP2PConn())
no_version_node = self.nodes[0].add_p2p_connection(TestP2PConn(), send_version=False, wait_for_verack=False)
no_send_node = self.nodes[0].add_p2p_connection(TestP2PConn(), send_version=False, wait_for_verack=False)
sleep(1)
assert no_verack_node.is_connected
assert no_version_node.is_connected
assert no_send_node.is_connected
no_verack_node.send_message(msg_ping())
no_version_node.send_message(msg_ping())
sleep(1)
assert "version" in no_verack_node.last_message
assert no_verack_node.is_connected
assert no_version_node.is_connected
assert no_send_node.is_connected
no_verack_node.send_message(msg_ping())
no_version_node.send_message(msg_ping())
expected_timeout_logs = [
"version handshake timeout from 0",
"socket no message in first 3 seconds, 1 0 from 1",
"socket no message in first 3 seconds, 0 0 from 2",
]
with self.nodes[0].assert_debug_log(expected_msgs=expected_timeout_logs):
sleep(3)
# By now, we waited a total of 5 seconds. Off-by-two for two
# reasons:
# * The internal precision is one second
# * Account for network delay
assert not no_verack_node.is_connected
assert not no_version_node.is_connected
assert not no_send_node.is_connected
if __name__ == '__main__':
TimeoutsTest().main()
| 36.072165
| 116
| 0.712203
|
4a107701a10aad2391de741cba9392bf26aa21c1
| 187
|
py
|
Python
|
posts/urls.py
|
kuter/django-saml2-auth-example
|
dc920020b5529f0e8bd692b43a3af3707aee1770
|
[
"MIT"
] | null | null | null |
posts/urls.py
|
kuter/django-saml2-auth-example
|
dc920020b5529f0e8bd692b43a3af3707aee1770
|
[
"MIT"
] | null | null | null |
posts/urls.py
|
kuter/django-saml2-auth-example
|
dc920020b5529f0e8bd692b43a3af3707aee1770
|
[
"MIT"
] | null | null | null |
from django.urls import path
from django.contrib.auth.decorators import login_required
from . import views
urlpatterns = [
path('', login_required(views.PostListView.as_view())),
]
| 20.777778
| 59
| 0.764706
|
4a1077c43e6fc42e1448c2259282745030f7f8d1
| 7,543
|
py
|
Python
|
packetselector.py
|
androdev4u/XFLTReaT
|
17d4ec857dd1b98d8f57fec2edc540fce7087b21
|
[
"MIT"
] | 315
|
2017-08-25T04:17:56.000Z
|
2022-03-03T14:28:46.000Z
|
packetselector.py
|
FDlucifer/XFLTReaT
|
17d4ec857dd1b98d8f57fec2edc540fce7087b21
|
[
"MIT"
] | 8
|
2017-09-10T21:03:06.000Z
|
2020-01-24T15:48:58.000Z
|
packetselector.py
|
FDlucifer/XFLTReaT
|
17d4ec857dd1b98d8f57fec2edc540fce7087b21
|
[
"MIT"
] | 71
|
2017-08-25T06:01:43.000Z
|
2022-03-24T15:35:53.000Z
|
# MIT License
# Copyright (c) 2017 Balazs Bucsay
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import sys
if "packetselector.py" in sys.argv[0]:
print("[-] Instead of poking around just try: python xfltreat.py --help")
sys.exit(-1)
# This is the engine of the whole communication. Every packet that arrives to
# the tunnel will be carefully selected. In case the destination IP matches, it
# will be redirected (written) to the appropriate client pipe.
import threading
import os
import select
import struct
import socket
#local files
import common
from client import Client
class PacketSelector(threading.Thread):
clients = None
def __init__(self, tunnel):
threading.Thread.__init__(self)
self.timeout = 1.0 # seems to be a good value for timeout
self.clients = []
self.tunnel = tunnel
self._stop = False
self.os_type = common.get_os_type()
if self.os_type == common.OS_WINDOWS:
self.run_ps_mainloop = self.run_windows
else:
self.run_ps_mainloop = self.run_unix
# return client list
def get_clients(self):
return self.clients
# add new client to the client list
def add_client(self, client):
self.clients.append(client)
return
# This function is called when a client object has to be replaced.
# That could happen when the client connection was reset, or there is a
# duplicated config with the same private IP.
def replace_client(self, old_client, new_client):
if old_client in self.clients:
self.clients.remove(old_client)
self.clients.append(new_client)
try:
old_client.get_pipe_w_fd().close()
except:
pass
try:
old_client.get_pipe_r_fd().close()
except:
pass
try:
socket.close(old_client.get_socket())
except:
pass
# removing client from the client list
def delete_client(self, client):
if client in self.clients:
if self.os_type == common.OS_WINDOWS:
import win32file
try:
win32file.CloseHandle(client.get_pipe_r())
win32file.CloseHandle(client.get_pipe_w())
except Exception as e:
common.internal_print("Remove authenticated client: CloseHandle exception: {0}".format(e), -1)
else:
try:
client.get_pipe_r_fd().close()
client.get_pipe_w_fd().close()
except Exception as e:
common.internal_print("Remove authenticated client: os.close exception: {0}".format(e), -1)
client.call_stopfp()
self.clients.remove(client)
return
# This function should run from the point when the framework was started.
# It runs as an infinite loop to read the packets off the tunnel.
# When an IPv4 packet was found that will be selected and checked whether
# it addresses a client in the client list. If a client was found, then the
# packet will be written on that pipe.
def run(self):
return self.run_ps_mainloop()
def run_unix(self):
rlist = [self.tunnel]
wlist = []
xlist = []
while not self._stop:
try:
readable, writable, exceptional = select.select(rlist, wlist, xlist, self.timeout)
except select.error as e:
print(e)
break
for s in readable:
# is there anything on the tunnel interface?
if s is self.tunnel:
# yes there is, read the packet or packets off the tunnel
message = os.read(self.tunnel, 4096)
if self.os_type == common.OS_MACOSX:
message = message[4:]
while True:
# dumb check, but seems to be working. The packet has
# to be longer than 4 and it must be IPv4
if (len(message) < 4) or (message[0:1] != "\x45"): #Only care about IPv4
break
packetlen = struct.unpack(">H", message[2:4])[0]
if packetlen == 0:
break
# is the rest less than the packet length?
if packetlen > len(message):
# in case it is less, we need to read more
message += os.read(self.tunnel, 4096)
readytogo = message[0:packetlen]
message = message[packetlen:]
# looking for client
for c in self.clients:
if c.get_private_ip_addr() == readytogo[16:20]:
# client found, writing packet on client's pipe
try:
os.write(c.get_pipe_w(), readytogo)
# flushing, no buffering please
c.get_pipe_w_fd().flush()
except:
# it can break if there is a race condition
# the client was found above but in the
# same time the client left and the pipe
# got closed. Broken pipe would be raised
pass
return
# some ideas were taken from: https://github.com/boytm/minivtun-win/
def run_windows(self):
import win32file
import win32event
import pywintypes
import winerror
import win32api
# creating events, overlapped structures and a buffer for reading and writing
hEvent_read = win32event.CreateEvent(None, 0, 0, None)
overlapped_read = pywintypes.OVERLAPPED()
overlapped_read.hEvent = hEvent_read
overlapped_write = pywintypes.OVERLAPPED()
message = win32file.AllocateReadBuffer(4096)
while not self._stop:
try:
# Overlapped/async read, it either blocks or returns pending
hr, _ = win32file.ReadFile(self.tunnel, message, overlapped_read)
if (hr == winerror.ERROR_IO_PENDING):
# when the event gets signalled or timeout happens it will return
rc = win32event.WaitForSingleObject(hEvent_read, int(self.timeout*1000))
if rc == winerror.WAIT_TIMEOUT:
# timed out, just rerun read
continue
if rc == win32event.WAIT_OBJECT_0:
# read happened, packet is in "message"
if (overlapped_read.InternalHigh < 4) or (message[0:1] != "\x45"): #Only care about IPv4
# too small which should not happen or not IPv4, so we just drop it.
continue
# reading out the packet from the buffer and discarding the rest
readytogo = message[0:overlapped_read.InternalHigh]
# looking for client
for c in self.clients:
if c.get_private_ip_addr() == readytogo[16:20]:
# client found, writing packet on client's pipe
# ignoring outcome, it is async so it will happen when it will happen ;)
win32file.WriteFile(c.get_pipe_w(), readytogo, overlapped_write)
except win32api.error as e:
if e.args[0] == 995:
common.internal_print("Interface disappered, exiting PS thread: {0}".format(e), -1)
self.stop()
continue
if e.args[0] == 1453:
common.internal_print("OS Internal error: {0}".format(e), -1)
self.stop()
continue
common.internal_print("PS Exception: {0}".format(e), -1)
return
# stop the so called infinite loop
def stop(self):
self._stop = True
return
| 32.097872
| 99
| 0.702108
|
4a107873b1cd837477d4c6949b4ea931bad64944
| 2,867
|
py
|
Python
|
occurrences/migrations/0022_study_level.py
|
City-of-Helsinki/palvelutarjotin
|
4ad67cbf4062832aba8080cb96ac785bdbbe9aa8
|
[
"MIT"
] | 2
|
2020-11-10T16:31:26.000Z
|
2021-02-19T10:33:27.000Z
|
occurrences/migrations/0022_study_level.py
|
City-of-Helsinki/palvelutarjotin
|
4ad67cbf4062832aba8080cb96ac785bdbbe9aa8
|
[
"MIT"
] | 239
|
2020-04-23T07:36:17.000Z
|
2022-03-15T08:38:38.000Z
|
occurrences/migrations/0022_study_level.py
|
City-of-Helsinki/palvelutarjotin
|
4ad67cbf4062832aba8080cb96ac785bdbbe9aa8
|
[
"MIT"
] | null | null | null |
# Generated by Django 2.2.13 on 2021-01-14 14:22
from django.db import migrations, models
import django.db.models.deletion
import parler.models
class Migration(migrations.Migration):
dependencies = [
("occurrences", "0021_palvelutarjotinevent_mandatory_additional_information"),
]
operations = [
migrations.CreateModel(
name="StudyLevel",
fields=[
(
"id",
models.CharField(max_length=255, primary_key=True, serialize=False),
),
(
"level",
models.PositiveIntegerField(
help_text="Used to make a hierarchy between study levels.",
verbose_name="level",
),
),
],
options={
"ordering": ["level"],
"verbose_name": "study level",
"verbose_name_plural": "study levels",
},
bases=(parler.models.TranslatableModelMixin, models.Model),
),
migrations.AddField(
model_name="studygroup",
name="study_levels",
field=models.ManyToManyField(
blank=True,
related_name="study_groups",
to="occurrences.StudyLevel",
verbose_name="study levels",
),
),
migrations.CreateModel(
name="StudyLevelTranslation",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
(
"language_code",
models.CharField(
db_index=True, max_length=15, verbose_name="Language"
),
),
("label", models.CharField(max_length=255, verbose_name="label")),
(
"master",
models.ForeignKey(
editable=False,
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="translations",
to="occurrences.StudyLevel",
),
),
],
options={
"verbose_name": "study level Translation",
"db_table": "occurrences_studylevel_translation",
"db_tablespace": "",
"managed": True,
"default_permissions": (),
"unique_together": {("language_code", "master")},
},
),
]
| 32.954023
| 88
| 0.436693
|
4a1078c525eb687a6f9956b905dbea29483c4d41
| 12,998
|
py
|
Python
|
cirq-google/cirq_google/api/v1/programs_test.py
|
mmcewen-g/Cirq
|
fe80da777401defd177aab38ce2b844912eca6e9
|
[
"Apache-2.0"
] | 1
|
2022-01-21T01:55:48.000Z
|
2022-01-21T01:55:48.000Z
|
cirq-google/cirq_google/api/v1/programs_test.py
|
mmcewen-g/Cirq
|
fe80da777401defd177aab38ce2b844912eca6e9
|
[
"Apache-2.0"
] | null | null | null |
cirq-google/cirq_google/api/v1/programs_test.py
|
mmcewen-g/Cirq
|
fe80da777401defd177aab38ce2b844912eca6e9
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2018 The Cirq Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
import pytest
import sympy
import cirq
import cirq_google as cg
import cirq_google.api.v1.programs as programs
from cirq_google.api.v1 import operations_pb2
def assert_proto_dict_convert(gate: cirq.Gate, proto: operations_pb2.Operation, *qubits: cirq.Qid):
assert programs.gate_to_proto(gate, qubits, delay=0) == proto
assert programs.xmon_op_from_proto(proto) == gate(*qubits)
def test_protobuf_round_trip():
device = cg.Foxtail
circuit = cirq.Circuit(
[cirq.X(q) ** 0.5 for q in device.qubits],
[cirq.CZ(q, q2) for q in [cirq.GridQubit(0, 0)] for q2 in device.neighbors_of(q)],
)
protos = list(programs.circuit_as_schedule_to_protos(circuit))
s2 = programs.circuit_from_schedule_from_protos(protos)
assert s2 == circuit
def test_protobuf_round_trip_device_deprecated():
device = cg.Foxtail
circuit = cirq.Circuit(
[cirq.X(q) ** 0.5 for q in device.qubits],
[cirq.CZ(q, q2) for q in [cirq.GridQubit(0, 0)] for q2 in device.neighbors_of(q)],
)
circuit._device = device
protos = list(programs.circuit_as_schedule_to_protos(circuit))
with cirq.testing.assert_deprecated(
cirq.circuits.circuit._DEVICE_DEP_MESSAGE, deadline='v0.15'
):
s2 = programs.circuit_from_schedule_from_protos(device, protos)
assert s2 == circuit
def make_bytes(s: str) -> bytes:
"""Helper function to convert a string of digits into packed bytes.
Ignores any characters other than 0 and 1, in particular whitespace. The
bits are packed in little-endian order within each byte.
"""
buf = []
byte = 0
idx = 0
for c in s:
if c == '0':
pass
elif c == '1':
byte |= 1 << idx
else:
# coverage: ignore
continue
idx += 1
if idx == 8:
buf.append(byte)
byte = 0
idx = 0
if idx:
buf.append(byte)
return bytearray(buf)
def test_pack_results():
measurements = [
(
'a',
np.array(
[
[0, 0, 0],
[0, 0, 1],
[0, 1, 0],
[0, 1, 1],
[1, 0, 0],
[1, 0, 1],
[1, 1, 0],
]
),
),
(
'b',
np.array(
[
[0, 0],
[0, 1],
[1, 0],
[1, 1],
[0, 0],
[0, 1],
[1, 0],
]
),
),
]
data = programs.pack_results(measurements)
expected = make_bytes(
"""
000 00
001 01
010 10
011 11
100 00
101 01
110 10
000 00 -- padding
"""
)
assert data == expected
def test_pack_results_no_measurements():
assert programs.pack_results([]) == b''
def test_pack_results_incompatible_shapes():
def bools(*shape):
return np.zeros(shape, dtype=bool)
with pytest.raises(ValueError):
programs.pack_results([('a', bools(10))])
with pytest.raises(ValueError):
programs.pack_results([('a', bools(7, 3)), ('b', bools(8, 2))])
def test_unpack_results():
data = make_bytes(
"""
000 00
001 01
010 10
011 11
100 00
101 01
110 10
"""
)
assert len(data) == 5 # 35 data bits + 5 padding bits
results = programs.unpack_results(data, 7, [('a', 3), ('b', 2)])
assert 'a' in results
assert results['a'].shape == (7, 3)
assert results['a'].dtype == bool
np.testing.assert_array_equal(
results['a'],
[
[0, 0, 0],
[0, 0, 1],
[0, 1, 0],
[0, 1, 1],
[1, 0, 0],
[1, 0, 1],
[1, 1, 0],
],
)
assert 'b' in results
assert results['b'].shape == (7, 2)
assert results['b'].dtype == bool
np.testing.assert_array_equal(
results['b'],
[
[0, 0],
[0, 1],
[1, 0],
[1, 1],
[0, 0],
[0, 1],
[1, 0],
],
)
def test_single_qubit_measurement_proto_convert():
gate = cirq.MeasurementGate(1, 'test')
proto = operations_pb2.Operation(
measurement=operations_pb2.Measurement(
targets=[operations_pb2.Qubit(row=2, col=3)], key='test'
)
)
assert_proto_dict_convert(gate, proto, cirq.GridQubit(2, 3))
def test_single_qubit_measurement_to_proto_convert_invert_mask():
gate = cirq.MeasurementGate(1, 'test', invert_mask=(True,))
proto = operations_pb2.Operation(
measurement=operations_pb2.Measurement(
targets=[operations_pb2.Qubit(row=2, col=3)], key='test', invert_mask=[True]
)
)
assert_proto_dict_convert(gate, proto, cirq.GridQubit(2, 3))
def test_single_qubit_measurement_to_proto_pad_invert_mask():
gate = cirq.MeasurementGate(2, 'test', invert_mask=(True,))
proto = operations_pb2.Operation(
measurement=operations_pb2.Measurement(
targets=[operations_pb2.Qubit(row=2, col=3), operations_pb2.Qubit(row=2, col=4)],
key='test',
invert_mask=[True, False],
)
)
assert (
programs.gate_to_proto(gate, (cirq.GridQubit(2, 3), cirq.GridQubit(2, 4)), delay=0) == proto
)
def test_multi_qubit_measurement_to_proto():
gate = cirq.MeasurementGate(2, 'test')
proto = operations_pb2.Operation(
measurement=operations_pb2.Measurement(
targets=[operations_pb2.Qubit(row=2, col=3), operations_pb2.Qubit(row=3, col=4)],
key='test',
)
)
assert_proto_dict_convert(gate, proto, cirq.GridQubit(2, 3), cirq.GridQubit(3, 4))
def test_z_proto_convert():
gate = cirq.Z ** sympy.Symbol('k')
proto = operations_pb2.Operation(
exp_z=operations_pb2.ExpZ(
target=operations_pb2.Qubit(row=2, col=3),
half_turns=operations_pb2.ParameterizedFloat(parameter_key='k'),
)
)
assert_proto_dict_convert(gate, proto, cirq.GridQubit(2, 3))
gate = cirq.Z ** 0.5
proto = operations_pb2.Operation(
exp_z=operations_pb2.ExpZ(
target=operations_pb2.Qubit(row=2, col=3),
half_turns=operations_pb2.ParameterizedFloat(raw=0.5),
)
)
assert_proto_dict_convert(gate, proto, cirq.GridQubit(2, 3))
def test_cz_proto_convert():
gate = cirq.CZ ** sympy.Symbol('k')
proto = operations_pb2.Operation(
exp_11=operations_pb2.Exp11(
target1=operations_pb2.Qubit(row=2, col=3),
target2=operations_pb2.Qubit(row=3, col=4),
half_turns=operations_pb2.ParameterizedFloat(parameter_key='k'),
)
)
assert_proto_dict_convert(gate, proto, cirq.GridQubit(2, 3), cirq.GridQubit(3, 4))
gate = cirq.CZ ** 0.5
proto = operations_pb2.Operation(
exp_11=operations_pb2.Exp11(
target1=operations_pb2.Qubit(row=2, col=3),
target2=operations_pb2.Qubit(row=3, col=4),
half_turns=operations_pb2.ParameterizedFloat(raw=0.5),
)
)
assert_proto_dict_convert(gate, proto, cirq.GridQubit(2, 3), cirq.GridQubit(3, 4))
def test_w_to_proto():
gate = cirq.PhasedXPowGate(exponent=sympy.Symbol('k'), phase_exponent=1)
proto = operations_pb2.Operation(
exp_w=operations_pb2.ExpW(
target=operations_pb2.Qubit(row=2, col=3),
axis_half_turns=operations_pb2.ParameterizedFloat(raw=1),
half_turns=operations_pb2.ParameterizedFloat(parameter_key='k'),
)
)
assert_proto_dict_convert(gate, proto, cirq.GridQubit(2, 3))
gate = cirq.PhasedXPowGate(exponent=0.5, phase_exponent=sympy.Symbol('j'))
proto = operations_pb2.Operation(
exp_w=operations_pb2.ExpW(
target=operations_pb2.Qubit(row=2, col=3),
axis_half_turns=operations_pb2.ParameterizedFloat(parameter_key='j'),
half_turns=operations_pb2.ParameterizedFloat(raw=0.5),
)
)
assert_proto_dict_convert(gate, proto, cirq.GridQubit(2, 3))
gate = cirq.X ** 0.25
proto = operations_pb2.Operation(
exp_w=operations_pb2.ExpW(
target=operations_pb2.Qubit(row=2, col=3),
axis_half_turns=operations_pb2.ParameterizedFloat(raw=0.0),
half_turns=operations_pb2.ParameterizedFloat(raw=0.25),
)
)
assert_proto_dict_convert(gate, proto, cirq.GridQubit(2, 3))
gate = cirq.Y ** 0.25
proto = operations_pb2.Operation(
exp_w=operations_pb2.ExpW(
target=operations_pb2.Qubit(row=2, col=3),
axis_half_turns=operations_pb2.ParameterizedFloat(raw=0.5),
half_turns=operations_pb2.ParameterizedFloat(raw=0.25),
)
)
assert_proto_dict_convert(gate, proto, cirq.GridQubit(2, 3))
gate = cirq.PhasedXPowGate(exponent=0.5, phase_exponent=sympy.Symbol('j'))
proto = operations_pb2.Operation(
exp_w=operations_pb2.ExpW(
target=operations_pb2.Qubit(row=2, col=3),
axis_half_turns=operations_pb2.ParameterizedFloat(parameter_key='j'),
half_turns=operations_pb2.ParameterizedFloat(raw=0.5),
)
)
assert_proto_dict_convert(gate, proto, cirq.GridQubit(2, 3))
def test_unsupported_op():
with pytest.raises(ValueError, match='invalid operation'):
programs.xmon_op_from_proto(operations_pb2.Operation())
with pytest.raises(ValueError, match='know how to serialize'):
programs.gate_to_proto(
cirq.CCZ, (cirq.GridQubit(0, 0), cirq.GridQubit(0, 1), cirq.GridQubit(0, 2)), delay=0
)
def test_invalid_to_proto_dict_qubit_number():
with pytest.raises(ValueError, match='Wrong number of qubits'):
_ = programs.gate_to_proto(cirq.CZ ** 0.5, (cirq.GridQubit(2, 3),), delay=0)
with pytest.raises(ValueError, match='Wrong number of qubits'):
programs.gate_to_proto(cirq.Z ** 0.5, (cirq.GridQubit(2, 3), cirq.GridQubit(3, 4)), delay=0)
with pytest.raises(ValueError, match='Wrong number of qubits'):
programs.gate_to_proto(
cirq.PhasedXPowGate(exponent=0.5, phase_exponent=0),
(cirq.GridQubit(2, 3), cirq.GridQubit(3, 4)),
delay=0,
)
def test_parameterized_value_from_proto():
from_proto = programs._parameterized_value_from_proto
m1 = operations_pb2.ParameterizedFloat(raw=5)
assert from_proto(m1) == 5
with pytest.raises(ValueError):
from_proto(operations_pb2.ParameterizedFloat())
m3 = operations_pb2.ParameterizedFloat(parameter_key='rr')
assert from_proto(m3) == sympy.Symbol('rr')
def test_invalid_measurement_gate():
with pytest.raises(ValueError, match='length'):
_ = programs.gate_to_proto(
cirq.MeasurementGate(3, 'test', invert_mask=(True,)),
(cirq.GridQubit(2, 3), cirq.GridQubit(3, 4)),
delay=0,
)
with pytest.raises(ValueError, match='no qubits'):
_ = programs.gate_to_proto(cirq.MeasurementGate(1, 'test'), (), delay=0)
def test_is_supported():
a = cirq.GridQubit(0, 0)
b = cirq.GridQubit(0, 1)
c = cirq.GridQubit(1, 0)
assert programs.is_native_xmon_op(cirq.CZ(a, b))
assert programs.is_native_xmon_op(cirq.X(a) ** 0.5)
assert programs.is_native_xmon_op(cirq.Y(a) ** 0.5)
assert programs.is_native_xmon_op(cirq.Z(a) ** 0.5)
assert programs.is_native_xmon_op(cirq.PhasedXPowGate(phase_exponent=0.2).on(a) ** 0.5)
assert programs.is_native_xmon_op(cirq.Z(a) ** 1)
assert not programs.is_native_xmon_op(cirq.CCZ(a, b, c))
assert not programs.is_native_xmon_op(cirq.SWAP(a, b))
def test_is_native_xmon_gate():
assert programs.is_native_xmon_gate(cirq.CZ)
assert programs.is_native_xmon_gate(cirq.X ** 0.5)
assert programs.is_native_xmon_gate(cirq.Y ** 0.5)
assert programs.is_native_xmon_gate(cirq.Z ** 0.5)
assert programs.is_native_xmon_gate(cirq.PhasedXPowGate(phase_exponent=0.2) ** 0.5)
assert programs.is_native_xmon_gate(cirq.Z ** 1)
assert not programs.is_native_xmon_gate(cirq.CCZ)
assert not programs.is_native_xmon_gate(cirq.SWAP)
| 32.333333
| 100
| 0.616556
|
4a1078e945fe56927eeaf08f57033ba8df4583f4
| 35,967
|
py
|
Python
|
venv/lib/python2.7/site-packages/ansible/modules/cloud/azure/azure_rm_virtualmachine_scaleset.py
|
haind27/test01
|
7f86c0a33eb0874a6c3f5ff9a923fd0cfc8ef852
|
[
"MIT"
] | null | null | null |
venv/lib/python2.7/site-packages/ansible/modules/cloud/azure/azure_rm_virtualmachine_scaleset.py
|
haind27/test01
|
7f86c0a33eb0874a6c3f5ff9a923fd0cfc8ef852
|
[
"MIT"
] | null | null | null |
venv/lib/python2.7/site-packages/ansible/modules/cloud/azure/azure_rm_virtualmachine_scaleset.py
|
haind27/test01
|
7f86c0a33eb0874a6c3f5ff9a923fd0cfc8ef852
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
#
# Copyright (c) 2016 Sertac Ozercan, <seozerca@microsoft.com>
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: azure_rm_virtualmachine_scaleset
version_added: "2.4"
short_description: Manage Azure virtual machine scale sets.
description:
- Create and update a virtual machine scale set.
options:
resource_group:
description:
- Name of the resource group containing the virtual machine scale set.
required: true
name:
description:
- Name of the virtual machine.
required: true
state:
description:
- Assert the state of the virtual machine scale set.
- State 'present' will check that the machine exists with the requested configuration. If the configuration
of the existing machine does not match, the machine will be updated.
state.
- State 'absent' will remove the virtual machine scale set.
default: present
choices:
- absent
- present
location:
description:
- Valid Azure location. Defaults to location of the resource group.
short_hostname:
description:
- Short host name
version_added: 2.6
vm_size:
description:
- A valid Azure VM size value. For example, 'Standard_D4'. The list of choices varies depending on the
subscription and location. Check your subscription for available choices.
required: true
capacity:
description:
- Capacity of VMSS.
required: true
default: 1
tier:
description:
- SKU Tier.
choices:
- Basic
- Standard
upgrade_policy:
description:
- Upgrade policy.
choices:
- Manual
- Automatic
admin_username:
description:
- Admin username used to access the host after it is created. Required when creating a VM.
admin_password:
description:
- Password for the admin username. Not required if the os_type is Linux and SSH password authentication
is disabled by setting ssh_password_enabled to false.
ssh_password_enabled:
description:
- When the os_type is Linux, setting ssh_password_enabled to false will disable SSH password authentication
and require use of SSH keys.
type: bool
default: true
ssh_public_keys:
description:
- "For os_type Linux provide a list of SSH keys. Each item in the list should be a dictionary where the
dictionary contains two keys: path and key_data. Set the path to the default location of the
authorized_keys files. On an Enterprise Linux host, for example, the path will be
/home/<admin username>/.ssh/authorized_keys. Set key_data to the actual value of the public key."
image:
description:
- Specifies the image used to build the VM.
- If a string, the image is sourced from a custom image based on the
name.
- 'If a dict with the keys C(publisher), C(offer), C(sku), and
C(version), the image is sourced from a Marketplace image. NOTE:
set image.version to C(latest) to get the most recent version of a
given image.'
- 'If a dict with the keys C(name) and C(resource_group), the image
is sourced from a custom image based on the C(name) and
C(resource_group) set. NOTE: the key C(resource_group) is optional
and if omitted, all images in the subscription will be searched for
by C(name).'
- Custom image support was added in Ansible 2.5
required: true
os_disk_caching:
description:
- Type of OS disk caching.
choices:
- ReadOnly
- ReadWrite
default: ReadOnly
aliases:
- disk_caching
os_type:
description:
- Base type of operating system.
choices:
- Windows
- Linux
default: Linux
managed_disk_type:
description:
- Managed disk type.
choices:
- Standard_LRS
- Premium_LRS
data_disks:
description:
- Describes list of data disks.
version_added: "2.4"
suboptions:
lun:
description:
- The logical unit number for data disk.
default: 0
version_added: "2.4"
disk_size_gb:
description:
- The initial disk size in GB for blank data disks.
version_added: "2.4"
managed_disk_type:
description:
- Managed data disk type.
choices:
- Standard_LRS
- Premium_LRS
version_added: "2.4"
caching:
description:
- Type of data disk caching.
choices:
- ReadOnly
- ReadWrite
default: ReadOnly
version_added: "2.4"
virtual_network_resource_group:
description:
- When creating a virtual machine, if a specific virtual network from another resource group should be
used, use this parameter to specify the resource group to use.
version_added: "2.5"
virtual_network_name:
description:
- Virtual Network name.
aliases:
- virtual_network
subnet_name:
description:
- Subnet name.
aliases:
- subnet
load_balancer:
description:
- Load balancer name.
version_added: "2.5"
remove_on_absent:
description:
- When removing a VM using state 'absent', also remove associated resources.
- "It can be 'all' or a list with any of the following: ['network_interfaces', 'virtual_storage', 'public_ips']."
- Any other input will be ignored.
default: ['all']
extends_documentation_fragment:
- azure
- azure_tags
author:
- "Sertac Ozercan (@sozercan)"
'''
EXAMPLES = '''
- name: Create VMSS
azure_rm_virtualmachine_scaleset:
resource_group: Testing
name: testvmss
vm_size: Standard_DS1_v2
capacity: 2
virtual_network_name: testvnet
subnet_name: testsubnet
admin_username: adminUser
ssh_password_enabled: false
ssh_public_keys:
- path: /home/adminUser/.ssh/authorized_keys
key_data: < insert yor ssh public key here... >
managed_disk_type: Standard_LRS
image:
offer: CoreOS
publisher: CoreOS
sku: Stable
version: latest
data_disks:
- lun: 0
disk_size_gb: 64
caching: ReadWrite
managed_disk_type: Standard_LRS
- name: Create a VMSS with a custom image
azure_rm_virtualmachine_scaleset:
resource_group: Testing
name: testvmss
vm_size: Standard_DS1_v2
capacity: 2
virtual_network_name: testvnet
subnet_name: testsubnet
admin_username: adminUser
admin_password: password01
managed_disk_type: Standard_LRS
image: customimage001
- name: Create a VMSS with a custom image from a particular resource group
azure_rm_virtualmachine_scaleset:
resource_group: Testing
name: testvmss
vm_size: Standard_DS1_v2
capacity: 2
virtual_network_name: testvnet
subnet_name: testsubnet
admin_username: adminUser
admin_password: password01
managed_disk_type: Standard_LRS
image:
name: customimage001
resource_group: Testing
'''
RETURN = '''
azure_vmss:
description: Facts about the current state of the object. Note that facts are not part of the registered output but available directly.
returned: always
type: complex
contains: {
"properties": {
"overprovision": true,
"singlePlacementGroup": true,
"upgradePolicy": {
"mode": "Manual"
},
"virtualMachineProfile": {
"networkProfile": {
"networkInterfaceConfigurations": [
{
"name": "testvmss",
"properties": {
"dnsSettings": {
"dnsServers": []
},
"enableAcceleratedNetworking": false,
"ipConfigurations": [
{
"name": "default",
"properties": {
"privateIPAddressVersion": "IPv4",
"subnet": {
"id": "/subscriptions/XXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXX/resourceGroups/Testing/providers/Microsoft.Network/virtualNetworks/testvnet/subnets/testsubnet"
}
}
}
],
"primary": true
}
}
]
},
"osProfile": {
"adminUsername": "testuser",
"computerNamePrefix": "testvmss",
"linuxConfiguration": {
"disablePasswordAuthentication": true,
"ssh": {
"publicKeys": [
{
"keyData": "",
"path": "/home/testuser/.ssh/authorized_keys"
}
]
}
},
"secrets": []
},
"storageProfile": {
"dataDisks": [
{
"caching": "ReadWrite",
"createOption": "empty",
"diskSizeGB": 64,
"lun": 0,
"managedDisk": {
"storageAccountType": "Standard_LRS"
}
}
],
"imageReference": {
"offer": "CoreOS",
"publisher": "CoreOS",
"sku": "Stable",
"version": "899.17.0"
},
"osDisk": {
"caching": "ReadWrite",
"createOption": "fromImage",
"managedDisk": {
"storageAccountType": "Standard_LRS"
}
}
}
}
},
"sku": {
"capacity": 2,
"name": "Standard_DS1_v2",
"tier": "Standard"
},
"tags": null,
"type": "Microsoft.Compute/virtualMachineScaleSets"
}
''' # NOQA
import random
import re
try:
from msrestazure.azure_exceptions import CloudError
from msrestazure.tools import parse_resource_id
except ImportError:
# This is handled in azure_rm_common
pass
from ansible.module_utils.azure_rm_common import AzureRMModuleBase, azure_id_to_dict
AZURE_OBJECT_CLASS = 'VirtualMachineScaleSet'
AZURE_ENUM_MODULES = ['azure.mgmt.compute.models']
class AzureRMVirtualMachineScaleSet(AzureRMModuleBase):
def __init__(self):
self.module_arg_spec = dict(
resource_group=dict(type='str', required=True),
name=dict(type='str', required=True),
state=dict(choices=['present', 'absent'], default='present', type='str'),
location=dict(type='str'),
short_hostname=dict(type='str'),
vm_size=dict(type='str', required=True),
tier=dict(type='str', choices=['Basic', 'Standard']),
capacity=dict(type='int', default=1),
upgrade_policy=dict(type='str', choices=['Automatic', 'Manual']),
admin_username=dict(type='str'),
admin_password=dict(type='str', no_log=True),
ssh_password_enabled=dict(type='bool', default=True),
ssh_public_keys=dict(type='list'),
image=dict(type='raw'),
os_disk_caching=dict(type='str', aliases=['disk_caching'], choices=['ReadOnly', 'ReadWrite'],
default='ReadOnly'),
os_type=dict(type='str', choices=['Linux', 'Windows'], default='Linux'),
managed_disk_type=dict(type='str', choices=['Standard_LRS', 'Premium_LRS']),
data_disks=dict(type='list'),
subnet_name=dict(type='str', aliases=['subnet']),
load_balancer=dict(type='str'),
virtual_network_resource_group=dict(type='str'),
virtual_network_name=dict(type='str', aliases=['virtual_network']),
remove_on_absent=dict(type='list', default=['all']),
)
self.resource_group = None
self.name = None
self.state = None
self.location = None
self.short_hostname = None
self.vm_size = None
self.capacity = None
self.tier = None
self.upgrade_policy = None
self.admin_username = None
self.admin_password = None
self.ssh_password_enabled = None
self.ssh_public_keys = None
self.image = None
self.os_disk_caching = None
self.managed_disk_type = None
self.data_disks = None
self.os_type = None
self.subnet_name = None
self.virtual_network_resource_group = None
self.virtual_network_name = None
self.tags = None
self.differences = None
self.load_balancer = None
self.results = dict(
changed=False,
actions=[],
ansible_facts=dict(azure_vmss=None)
)
super(AzureRMVirtualMachineScaleSet, self).__init__(
derived_arg_spec=self.module_arg_spec,
supports_check_mode=True
)
def exec_module(self, **kwargs):
for key in list(self.module_arg_spec.keys()) + ['tags']:
setattr(self, key, kwargs[key])
# make sure options are lower case
self.remove_on_absent = set([resource.lower() for resource in self.remove_on_absent])
# default virtual_network_resource_group to resource_group
if not self.virtual_network_resource_group:
self.virtual_network_resource_group = self.resource_group
changed = False
results = dict()
vmss = None
disable_ssh_password = None
vmss_dict = None
virtual_network = None
subnet = None
image_reference = None
custom_image = False
resource_group = self.get_resource_group(self.resource_group)
if not self.location:
# Set default location
self.location = resource_group.location
if self.state == 'present':
# Verify parameters and resolve any defaults
if self.vm_size and not self.vm_size_is_valid():
self.fail("Parameter error: vm_size {0} is not valid for your subscription and location.".format(
self.vm_size
))
# if self.virtual_network_name:
# virtual_network = self.get_virtual_network(self.virtual_network_name)
if self.ssh_public_keys:
msg = "Parameter error: expecting ssh_public_keys to be a list of type dict where " \
"each dict contains keys: path, key_data."
for key in self.ssh_public_keys:
if not isinstance(key, dict):
self.fail(msg)
if not key.get('path') or not key.get('key_data'):
self.fail(msg)
if self.image and isinstance(self.image, dict):
if all(key in self.image for key in ('publisher', 'offer', 'sku', 'version')):
marketplace_image = self.get_marketplace_image_version()
if self.image['version'] == 'latest':
self.image['version'] = marketplace_image.name
self.log("Using image version {0}".format(self.image['version']))
image_reference = self.compute_models.ImageReference(
publisher=self.image['publisher'],
offer=self.image['offer'],
sku=self.image['sku'],
version=self.image['version']
)
elif self.image.get('name'):
custom_image = True
image_reference = self.get_custom_image_reference(
self.image.get('name'),
self.image.get('resource_group'))
else:
self.fail("parameter error: expecting image to contain [publisher, offer, sku, version] or [name, resource_group]")
elif self.image and isinstance(self.image, str):
custom_image = True
image_reference = self.get_custom_image_reference(self.image)
elif self.image:
self.fail("parameter error: expecting image to be a string or dict not {0}".format(type(self.image).__name__))
disable_ssh_password = not self.ssh_password_enabled
try:
self.log("Fetching virtual machine scale set {0}".format(self.name))
vmss = self.compute_client.virtual_machine_scale_sets.get(self.resource_group, self.name)
self.check_provisioning_state(vmss, self.state)
vmss_dict = self.serialize_vmss(vmss)
if self.state == 'present':
differences = []
results = vmss_dict
if self.os_disk_caching and \
self.os_disk_caching != vmss_dict['properties']['virtualMachineProfile']['storageProfile']['osDisk']['caching']:
self.log('CHANGED: virtual machine scale set {0} - OS disk caching'.format(self.name))
differences.append('OS Disk caching')
changed = True
vmss_dict['properties']['virtualMachineProfile']['storageProfile']['osDisk']['caching'] = self.os_disk_caching
if self.capacity and \
self.capacity != vmss_dict['sku']['capacity']:
self.log('CHANGED: virtual machine scale set {0} - Capacity'.format(self.name))
differences.append('Capacity')
changed = True
vmss_dict['sku']['capacity'] = self.capacity
if self.data_disks and \
len(self.data_disks) != len(vmss_dict['properties']['virtualMachineProfile']['storageProfile'].get('dataDisks', [])):
self.log('CHANGED: virtual machine scale set {0} - Data Disks'.format(self.name))
differences.append('Data Disks')
changed = True
update_tags, vmss_dict['tags'] = self.update_tags(vmss_dict.get('tags', dict()))
if update_tags:
differences.append('Tags')
changed = True
self.differences = differences
elif self.state == 'absent':
self.log("CHANGED: virtual machine scale set {0} exists and requested state is 'absent'".format(self.name))
results = dict()
changed = True
except CloudError:
self.log('Virtual machine scale set {0} does not exist'.format(self.name))
if self.state == 'present':
self.log("CHANGED: virtual machine scale set {0} does not exist but state is 'present'.".format(self.name))
changed = True
self.results['changed'] = changed
self.results['ansible_facts']['azure_vmss'] = results
if self.check_mode:
return self.results
if changed:
if self.state == 'present':
if not vmss:
# Create the VMSS
self.log("Create virtual machine scale set {0}".format(self.name))
self.results['actions'].append('Created VMSS {0}'.format(self.name))
# Validate parameters
if not self.admin_username:
self.fail("Parameter error: admin_username required when creating a virtual machine scale set.")
if self.os_type == 'Linux':
if disable_ssh_password and not self.ssh_public_keys:
self.fail("Parameter error: ssh_public_keys required when disabling SSH password.")
if not self.virtual_network_name:
default_vnet = self.create_default_vnet()
virtual_network = default_vnet.id
self.virtual_network_name = default_vnet.name
if self.subnet_name:
subnet = self.get_subnet(self.virtual_network_name, self.subnet_name)
load_balancer_backend_address_pools = None
load_balancer_inbound_nat_pools = None
if self.load_balancer:
load_balancer = self.get_load_balancer(self.load_balancer)
load_balancer_backend_address_pools = ([self.compute_models.SubResource(resource.id)
for resource in load_balancer.backend_address_pools]
if load_balancer.backend_address_pools else None)
load_balancer_inbound_nat_pools = ([self.compute_models.SubResource(resource.id)
for resource in load_balancer.inbound_nat_pools]
if load_balancer.inbound_nat_pools else None)
if not self.short_hostname:
self.short_hostname = self.name
if not image_reference:
self.fail("Parameter error: an image is required when creating a virtual machine.")
managed_disk = self.compute_models.VirtualMachineScaleSetManagedDiskParameters(storage_account_type=self.managed_disk_type)
vmss_resource = self.compute_models.VirtualMachineScaleSet(
self.location,
tags=self.tags,
upgrade_policy=self.compute_models.UpgradePolicy(
mode=self.upgrade_policy
),
sku=self.compute_models.Sku(
name=self.vm_size,
capacity=self.capacity,
tier=self.tier,
),
virtual_machine_profile=self.compute_models.VirtualMachineScaleSetVMProfile(
os_profile=self.compute_models.VirtualMachineScaleSetOSProfile(
admin_username=self.admin_username,
computer_name_prefix=self.short_hostname,
),
storage_profile=self.compute_models.VirtualMachineScaleSetStorageProfile(
os_disk=self.compute_models.VirtualMachineScaleSetOSDisk(
managed_disk=managed_disk,
create_option=self.compute_models.DiskCreateOptionTypes.from_image,
caching=self.os_disk_caching,
),
image_reference=image_reference,
),
network_profile=self.compute_models.VirtualMachineScaleSetNetworkProfile(
network_interface_configurations=[
self.compute_models.VirtualMachineScaleSetNetworkConfiguration(
name=self.name,
primary=True,
ip_configurations=[
self.compute_models.VirtualMachineScaleSetIPConfiguration(
name='default',
subnet=self.compute_models.ApiEntityReference(
id=subnet.id
),
primary=True,
load_balancer_backend_address_pools=load_balancer_backend_address_pools,
load_balancer_inbound_nat_pools=load_balancer_inbound_nat_pools
)
]
)
]
)
)
)
if self.admin_password:
vmss_resource.virtual_machine_profile.os_profile.admin_password = self.admin_password
if self.os_type == 'Linux':
vmss_resource.virtual_machine_profile.os_profile.linux_configuration = self.compute_models.LinuxConfiguration(
disable_password_authentication=disable_ssh_password
)
if self.ssh_public_keys:
ssh_config = self.compute_models.SshConfiguration()
ssh_config.public_keys = \
[self.compute_models.SshPublicKey(path=key['path'], key_data=key['key_data']) for key in self.ssh_public_keys]
vmss_resource.virtual_machine_profile.os_profile.linux_configuration.ssh = ssh_config
if self.data_disks:
data_disks = []
for data_disk in self.data_disks:
data_disk_managed_disk = self.compute_models.VirtualMachineScaleSetManagedDiskParameters(
storage_account_type=data_disk['managed_disk_type']
)
data_disk['caching'] = data_disk.get(
'caching',
self.compute_models.CachingTypes.read_only
)
data_disks.append(self.compute_models.VirtualMachineScaleSetDataDisk(
lun=data_disk['lun'],
caching=data_disk['caching'],
create_option=self.compute_models.DiskCreateOptionTypes.empty,
disk_size_gb=data_disk['disk_size_gb'],
managed_disk=data_disk_managed_disk,
))
vmss_resource.virtual_machine_profile.storage_profile.data_disks = data_disks
self.log("Create virtual machine with parameters:")
self.create_or_update_vmss(vmss_resource)
elif self.differences and len(self.differences) > 0:
self.log("Update virtual machine scale set {0}".format(self.name))
self.results['actions'].append('Updated VMSS {0}'.format(self.name))
vmss_resource = self.get_vmss()
vmss_resource.virtual_machine_profile.storage_profile.os_disk.caching = self.os_disk_caching
vmss_resource.sku.capacity = self.capacity
data_disks = []
for data_disk in self.data_disks:
data_disks.append(self.compute_models.VirtualMachineScaleSetDataDisk(
lun=data_disk['lun'],
caching=data_disk['caching'],
create_option=self.compute_models.DiskCreateOptionTypes.empty,
disk_size_gb=data_disk['disk_size_gb'],
managed_disk=self.compute_models.VirtualMachineScaleSetManagedDiskParameters(
storage_account_type=data_disk['managed_disk_type']
),
))
vmss_resource.virtual_machine_profile.storage_profile.data_disks = data_disks
self.log("Update virtual machine with parameters:")
self.create_or_update_vmss(vmss_resource)
self.results['ansible_facts']['azure_vmss'] = self.serialize_vmss(self.get_vmss())
elif self.state == 'absent':
# delete the VM
self.log("Delete virtual machine scale set {0}".format(self.name))
self.results['ansible_facts']['azure_vmss'] = None
self.delete_vmss(vmss)
# until we sort out how we want to do this globally
del self.results['actions']
return self.results
def get_vmss(self):
'''
Get the VMSS
:return: VirtualMachineScaleSet object
'''
try:
vmss = self.compute_client.virtual_machine_scale_sets.get(self.resource_group, self.name)
return vmss
except CloudError as exc:
self.fail("Error getting virtual machine scale set {0} - {1}".format(self.name, str(exc)))
def get_virtual_network(self, name):
try:
vnet = self.network_client.virtual_networks.get(self.virtual_network_resource_group, name)
return vnet
except CloudError as exc:
self.fail("Error fetching virtual network {0} - {1}".format(name, str(exc)))
def get_subnet(self, vnet_name, subnet_name):
self.log("Fetching subnet {0} in virtual network {1}".format(subnet_name, vnet_name))
try:
subnet = self.network_client.subnets.get(self.virtual_network_resource_group, vnet_name, subnet_name)
except CloudError as exc:
self.fail("Error: fetching subnet {0} in virtual network {1} - {2}".format(
subnet_name,
vnet_name,
str(exc)))
return subnet
def get_load_balancer(self, id):
id_dict = parse_resource_id(id)
try:
return self.network_client.load_balancers.get(id_dict.get('resource_group', self.resource_group), id_dict.get('name'))
except CloudError as exc:
self.fail("Error fetching load balancer {0} - {1}".format(id, str(exc)))
def serialize_vmss(self, vmss):
'''
Convert a VirtualMachineScaleSet object to dict.
:param vm: VirtualMachineScaleSet object
:return: dict
'''
result = self.serialize_obj(vmss, AZURE_OBJECT_CLASS, enum_modules=AZURE_ENUM_MODULES)
result['id'] = vmss.id
result['name'] = vmss.name
result['type'] = vmss.type
result['location'] = vmss.location
result['tags'] = vmss.tags
return result
def delete_vmss(self, vmss):
self.log("Deleting virtual machine scale set {0}".format(self.name))
self.results['actions'].append("Deleted virtual machine scale set {0}".format(self.name))
try:
poller = self.compute_client.virtual_machine_scale_sets.delete(self.resource_group, self.name)
# wait for the poller to finish
self.get_poller_result(poller)
except CloudError as exc:
self.fail("Error deleting virtual machine scale set {0} - {1}".format(self.name, str(exc)))
return True
def get_marketplace_image_version(self):
try:
versions = self.compute_client.virtual_machine_images.list(self.location,
self.image['publisher'],
self.image['offer'],
self.image['sku'])
except CloudError as exc:
self.fail("Error fetching image {0} {1} {2} - {3}".format(self.image['publisher'],
self.image['offer'],
self.image['sku'],
str(exc)))
if versions and len(versions) > 0:
if self.image['version'] == 'latest':
return versions[len(versions) - 1]
for version in versions:
if version.name == self.image['version']:
return version
self.fail("Error could not find image {0} {1} {2} {3}".format(self.image['publisher'],
self.image['offer'],
self.image['sku'],
self.image['version']))
def get_custom_image_reference(self, name, resource_group=None):
try:
if resource_group:
vm_images = self.compute_client.images.list_by_resource_group(resource_group)
else:
vm_images = self.compute_client.images.list()
except Exception as exc:
self.fail("Error fetching custom images from subscription - {0}".format(str(exc)))
for vm_image in vm_images:
if vm_image.name == name:
self.log("Using custom image id {0}".format(vm_image.id))
return self.compute_models.ImageReference(id=vm_image.id)
self.fail("Error could not find image with name {0}".format(name))
def create_or_update_vmss(self, params):
try:
poller = self.compute_client.virtual_machine_scale_sets.create_or_update(self.resource_group, self.name, params)
self.get_poller_result(poller)
except CloudError as exc:
self.fail("Error creating or updating virtual machine {0} - {1}".format(self.name, str(exc)))
def vm_size_is_valid(self):
'''
Validate self.vm_size against the list of virtual machine sizes available for the account and location.
:return: boolean
'''
try:
sizes = self.compute_client.virtual_machine_sizes.list(self.location)
except CloudError as exc:
self.fail("Error retrieving available machine sizes - {0}".format(str(exc)))
for size in sizes:
if size.name == self.vm_size:
return True
return False
def main():
AzureRMVirtualMachineScaleSet()
if __name__ == '__main__':
main()
| 41.968495
| 199
| 0.533044
|
4a10795dbb40b3862eceed11241cc46cbc74f1c9
| 1,316
|
py
|
Python
|
Advance_Python/searching_and_sorting/Quick_Sort3.py
|
siddharth-143/Python
|
293f4643a3a13e3b82d23fd8922db54dbb0f12bc
|
[
"MIT"
] | null | null | null |
Advance_Python/searching_and_sorting/Quick_Sort3.py
|
siddharth-143/Python
|
293f4643a3a13e3b82d23fd8922db54dbb0f12bc
|
[
"MIT"
] | null | null | null |
Advance_Python/searching_and_sorting/Quick_Sort3.py
|
siddharth-143/Python
|
293f4643a3a13e3b82d23fd8922db54dbb0f12bc
|
[
"MIT"
] | null | null | null |
# Python program to implement quick sort
# using median of 3 values
import statistics
def pivot_place(list1, first, last):
low = list1[first]
high = list1[last]
mid = (first + last)//2
pivot_val = statistics.median([low, list1[mid], high])
if pivot_val == low:
pindex = first
elif pivot_val == high:
pindex = last
else:
pindex = mid
list1[last], list1[pindex] = list1[pindex], list1[last]
pivot = list1[last]
left = first
right = last - 1
while True:
while left <= right and list1[left] <= pivot:
left += 1
while left <= right and list1[right] >= pivot:
right -= 1
if right < left:
break
else:
list1[left], list1[right] = list1[right], list1[left]
list1[last], list1[left] = list1[left], list1[last]
return left
def Quick_Sort(list1, first, last):
if first < last:
p = pivot_place(list1, first, last)
Quick_Sort(list1, first, p - 1)
Quick_Sort(list1, last, p + 1)
list1 = [56, 26, 93, 17, 31, 44, 17]
# num = int(input("Enter a range : "))
# list1 = [int(input("Enter an elements : ")) for i in range(num)]
print("Unsorted list : ", list1)
Quick_Sort(list1, 0, len(list1) - 1)
print("Sorted List : ", list1)
| 24.830189
| 66
| 0.575988
|
4a107af80a9021ebbd9a5ed47fc719ec2a89e026
| 574
|
py
|
Python
|
mysite/polls/admin.py
|
meli-lewis/django_tutorial
|
c1f04a9d281ebb150d32d766174380bc0e7defb2
|
[
"MIT"
] | 4
|
2018-04-11T17:54:27.000Z
|
2018-04-12T06:14:29.000Z
|
mysite/polls/admin.py
|
meli-lewis/django_tutorial
|
c1f04a9d281ebb150d32d766174380bc0e7defb2
|
[
"MIT"
] | null | null | null |
mysite/polls/admin.py
|
meli-lewis/django_tutorial
|
c1f04a9d281ebb150d32d766174380bc0e7defb2
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import Choice, Question
class ChoiceInline(admin.TabularInline):
model = Choice
extra = 3
class QuestionAdmin(admin.ModelAdmin):
list_display = ('question_text', 'pub_date', 'was_published_recently')
fieldsets = [
(None, {'fields': ['question_text']}),
('Date information', {'fields': ['pub_date'], 'classes': ['collapse']}),
]
inlines = [ChoiceInline]
list_filter = ['pub_date']
search_fields = ['question_text']
admin.site.register(Question, QuestionAdmin)
| 24.956522
| 80
| 0.655052
|
4a107c704866ef6bc179a673b69a5a04802d02a0
| 1,887
|
py
|
Python
|
tests/py_grpc_prometheus/test_grpc_server_started.py
|
RyanSiu1995/py-grpc-prometheus
|
eb9dee1f0a4e57cef220193ee48021dc9a9f3d82
|
[
"Apache-2.0"
] | 40
|
2018-12-05T15:35:52.000Z
|
2022-01-31T09:02:12.000Z
|
tests/py_grpc_prometheus/test_grpc_server_started.py
|
RyanSiu1995/py-grpc-prometheus
|
eb9dee1f0a4e57cef220193ee48021dc9a9f3d82
|
[
"Apache-2.0"
] | 12
|
2019-08-06T12:14:20.000Z
|
2021-08-09T14:53:37.000Z
|
tests/py_grpc_prometheus/test_grpc_server_started.py
|
RyanSiu1995/py-grpc-prometheus
|
eb9dee1f0a4e57cef220193ee48021dc9a9f3d82
|
[
"Apache-2.0"
] | 9
|
2019-12-18T08:49:37.000Z
|
2022-03-30T17:08:15.000Z
|
import pytest
from tests.py_grpc_prometheus.utils import get_server_metric
from tests.integration.hello_world import hello_world_pb2
@pytest.mark.parametrize("target_count", [1, 10, 100])
def test_grpc_server_started_with_normal(
target_count, grpc_server, grpc_stub
): # pylint: disable=unused-argument
for i in range(target_count):
grpc_stub.SayHello(hello_world_pb2.HelloRequest(name=str(i)))
target_metric = get_server_metric("grpc_server_started")
assert target_metric.samples[0].value == target_count
@pytest.mark.parametrize("number_of_res", [1, 10, 100])
def test_grpc_server_started_with_unary_stream(
number_of_res, grpc_server, grpc_stub
): # pylint: disable=unused-argument
list(
grpc_stub.SayHelloUnaryStream(
hello_world_pb2.MultipleHelloResRequest(
name="unary stream", res=number_of_res
)
)
)
target_metric = get_server_metric("grpc_server_started")
# Only one request sent
assert target_metric.samples[0].value == 1
@pytest.mark.parametrize("number_of_names", [1, 10, 100])
def test_grpc_server_started_with_stream_unary(
number_of_names, grpc_server, grpc_stub, stream_request_generator
): # pylint: disable=unused-argument
grpc_stub.SayHelloStreamUnary(stream_request_generator(number_of_names))
target_metric = get_server_metric("grpc_server_started")
assert target_metric.samples == []
@pytest.mark.parametrize(
"number_of_names, number_of_res", [(1, 10), (10, 100), (100, 100)]
)
def test_grpc_server_started_with_bidi_stream(
number_of_names, number_of_res, grpc_server, grpc_stub, bidi_request_generator
): # pylint: disable=unused-argument
list(
grpc_stub.SayHelloBidiStream(
bidi_request_generator(number_of_names, number_of_res)
)
)
target_metric = get_server_metric("grpc_server_started")
assert target_metric.samples == []
| 34.309091
| 82
| 0.765766
|
4a107c763a8297f156ecdd78eb02113beb9fb2ac
| 2,661
|
py
|
Python
|
movies/views.py
|
shershen08/django-rest-framework-crud
|
f83edcd746c71b5ee717932c48173214d3efc131
|
[
"MIT"
] | null | null | null |
movies/views.py
|
shershen08/django-rest-framework-crud
|
f83edcd746c71b5ee717932c48173214d3efc131
|
[
"MIT"
] | null | null | null |
movies/views.py
|
shershen08/django-rest-framework-crud
|
f83edcd746c71b5ee717932c48173214d3efc131
|
[
"MIT"
] | null | null | null |
from rest_framework.decorators import api_view, permission_classes
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from rest_framework import status
from rest_framework.parsers import JSONParser
from .models import Movie
from .serializers import MovieSerializer
from .permissions import IsOwnerOrReadOnly
@api_view(['GET', 'DELETE', 'PUT']) # Methods Allowed
@permission_classes((IsAuthenticated, IsOwnerOrReadOnly,)) # Pemissions, Only Authenticated user
def get_delete_update_movie(request, pk): #pk es PrimaryKey(Id)
try:
movie = Movie.objects.get(pk=pk)
except Movie.DoesNotExist:
content = {
'status': 'Not Found'
}
return Response(content, status=status.HTTP_404_NOT_FOUND)
# details a sinlge movie
if request.method == 'GET':
serializer = MovieSerializer(movie)
return Response(serializer.data)
# delete a movie
elif request.method == 'DELETE':
if(request.user == movie.creator): # If creator is who makes request
movie.delete()
content = {
'status': 'NO CONTENT'
}
return Response(content, status=status.HTTP_204_NO_CONTENT)
else:
content = {
'status': 'UNAUTHORIZED'
}
return Response(content, status=status.HTTP_401_UNAUTHORIZED)
# update a movie
elif request.method == 'PUT':
if(request.user == movie.creator): # If creator is who makes request
serializer = MovieSerializer(movie, data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
else:
content = {
'status': 'UNAUTHORIZED'
}
return Response(content, status=status.HTTP_401_UNAUTHORIZED)
@api_view(['GET', 'POST'])
@permission_classes((IsAuthenticated, ))
def get_post_movies(request):
# get all movies
if request.method == 'GET':
puppies = Movie.objects.all()
serializer = MovieSerializer(puppies, many=True)
return Response(serializer.data)
# create a new movie
elif request.method == 'POST':
serializer = MovieSerializer(data=request.data)
if serializer.is_valid():
serializer.save(creator=request.user)
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
| 37.478873
| 96
| 0.659526
|
4a107ca1f19ec60ef21d3c3f2c805d46aff92d02
| 36
|
py
|
Python
|
__init__.py
|
sickless/flask_private_area
|
09b2f9382c0426f5ed63488f9fd8ca6d4b3f751c
|
[
"BSD-3-Clause"
] | null | null | null |
__init__.py
|
sickless/flask_private_area
|
09b2f9382c0426f5ed63488f9fd8ca6d4b3f751c
|
[
"BSD-3-Clause"
] | null | null | null |
__init__.py
|
sickless/flask_private_area
|
09b2f9382c0426f5ed63488f9fd8ca6d4b3f751c
|
[
"BSD-3-Clause"
] | null | null | null |
from .flask_private_area import app
| 18
| 35
| 0.861111
|
4a107d35aef936b52825fffa019946bb204ef3d4
| 5,768
|
py
|
Python
|
azure-servicefabric/azure/servicefabric/models/service_created_event_py3.py
|
AlexanderYukhanov/azure-sdk-for-python
|
41e37c8a10876db40697a63e828ed7cafc19c7d6
|
[
"MIT"
] | 1
|
2018-07-23T08:59:24.000Z
|
2018-07-23T08:59:24.000Z
|
azure-servicefabric/azure/servicefabric/models/service_created_event_py3.py
|
AlexanderYukhanov/azure-sdk-for-python
|
41e37c8a10876db40697a63e828ed7cafc19c7d6
|
[
"MIT"
] | null | null | null |
azure-servicefabric/azure/servicefabric/models/service_created_event_py3.py
|
AlexanderYukhanov/azure-sdk-for-python
|
41e37c8a10876db40697a63e828ed7cafc19c7d6
|
[
"MIT"
] | null | null | null |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .service_event_py3 import ServiceEvent
class ServiceCreatedEvent(ServiceEvent):
"""Service Created event.
All required parameters must be populated in order to send to Azure.
:param event_instance_id: Required. The identifier for the FabricEvent
instance.
:type event_instance_id: str
:param time_stamp: Required. The time event was logged.
:type time_stamp: datetime
:param has_correlated_events: Shows there is existing related events
available.
:type has_correlated_events: bool
:param kind: Required. Constant filled by server.
:type kind: str
:param service_id: Required. The identity of the service. This is an
encoded representation of the service name. This is used in the REST APIs
to identify the service resource.
Starting in version 6.0, hierarchical names are delimited with the "\\~"
character. For example, if the service name is "fabric:/myapp/app1/svc1",
the service identity would be "myapp~app1\\~svc1" in 6.0+ and
"myapp/app1/svc1" in previous versions.
:type service_id: str
:param service_type_name: Required. Service type name.
:type service_type_name: str
:param application_name: Required. Application name.
:type application_name: str
:param application_type_name: Required. Application type name.
:type application_type_name: str
:param service_instance: Required. Id of Service instance.
:type service_instance: long
:param is_stateful: Required. Indicates if Service is stateful.
:type is_stateful: bool
:param partition_count: Required. Number of partitions.
:type partition_count: int
:param target_replica_set_size: Required. Size of target replicas set.
:type target_replica_set_size: int
:param min_replica_set_size: Required. Minimum size of replicas set.
:type min_replica_set_size: int
:param service_package_version: Required. Version of Service package.
:type service_package_version: str
:param partition_id: Required. An internal ID used by Service Fabric to
uniquely identify a partition. This is a randomly generated GUID when the
service was created. The partition ID is unique and does not change for
the lifetime of the service. If the same service was deleted and recreated
the IDs of its partitions would be different.
:type partition_id: str
"""
_validation = {
'event_instance_id': {'required': True},
'time_stamp': {'required': True},
'kind': {'required': True},
'service_id': {'required': True},
'service_type_name': {'required': True},
'application_name': {'required': True},
'application_type_name': {'required': True},
'service_instance': {'required': True},
'is_stateful': {'required': True},
'partition_count': {'required': True},
'target_replica_set_size': {'required': True},
'min_replica_set_size': {'required': True},
'service_package_version': {'required': True},
'partition_id': {'required': True},
}
_attribute_map = {
'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'},
'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'},
'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'},
'kind': {'key': 'Kind', 'type': 'str'},
'service_id': {'key': 'ServiceId', 'type': 'str'},
'service_type_name': {'key': 'ServiceTypeName', 'type': 'str'},
'application_name': {'key': 'ApplicationName', 'type': 'str'},
'application_type_name': {'key': 'ApplicationTypeName', 'type': 'str'},
'service_instance': {'key': 'ServiceInstance', 'type': 'long'},
'is_stateful': {'key': 'IsStateful', 'type': 'bool'},
'partition_count': {'key': 'PartitionCount', 'type': 'int'},
'target_replica_set_size': {'key': 'TargetReplicaSetSize', 'type': 'int'},
'min_replica_set_size': {'key': 'MinReplicaSetSize', 'type': 'int'},
'service_package_version': {'key': 'ServicePackageVersion', 'type': 'str'},
'partition_id': {'key': 'PartitionId', 'type': 'str'},
}
def __init__(self, *, event_instance_id: str, time_stamp, service_id: str, service_type_name: str, application_name: str, application_type_name: str, service_instance: int, is_stateful: bool, partition_count: int, target_replica_set_size: int, min_replica_set_size: int, service_package_version: str, partition_id: str, has_correlated_events: bool=None, **kwargs) -> None:
super(ServiceCreatedEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, service_id=service_id, **kwargs)
self.service_type_name = service_type_name
self.application_name = application_name
self.application_type_name = application_type_name
self.service_instance = service_instance
self.is_stateful = is_stateful
self.partition_count = partition_count
self.target_replica_set_size = target_replica_set_size
self.min_replica_set_size = min_replica_set_size
self.service_package_version = service_package_version
self.partition_id = partition_id
self.kind = 'ServiceCreated'
| 51.5
| 376
| 0.678745
|
4a107e505a3b8ee2e5ae08ba6aa77e3b55857b04
| 5,181
|
py
|
Python
|
bertmap/extend/onto_extend.py
|
KRR-Oxford/BERTMap
|
26eb78288885b8749c5bd970d44fa0ec18c6f417
|
[
"Apache-2.0"
] | 10
|
2021-12-07T09:09:18.000Z
|
2022-03-28T22:54:25.000Z
|
bertmap/extend/onto_extend.py
|
KRR-Oxford/BERTMap
|
26eb78288885b8749c5bd970d44fa0ec18c6f417
|
[
"Apache-2.0"
] | null | null | null |
bertmap/extend/onto_extend.py
|
KRR-Oxford/BERTMap
|
26eb78288885b8749c5bd970d44fa0ec18c6f417
|
[
"Apache-2.0"
] | 1
|
2022-03-24T09:39:36.000Z
|
2022-03-24T09:39:36.000Z
|
"""
Mapping Extension class
"""
from typing import List, Tuple, Dict, Union
import bertmap
from bertmap.onto import OntoBox
from owlready2.entity import ThingClass
from pandas.core.frame import DataFrame
import pandas as pd
from itertools import product
from copy import deepcopy
import time
class OntoExtend:
na_vals = bertmap.na_vals
def __init__(
self,
src_ob: OntoBox,
tgt_ob: OntoBox,
mapping_file: str,
extend_threshold: float,
):
self.src_ob = src_ob
self.tgt_ob = tgt_ob
self.threshold = extend_threshold
self.raw_mappings = self.read_mappings_to_dict(mapping_file, extend_threshold)
self.frontier = deepcopy(self.raw_mappings) # the frontier of mapping expansion
self.expansion = dict()
def extend_mappings(self, max_iter: int = 1):
start_time = time.time()
num_iter = 0
while self.frontier and num_iter < max_iter:
count = 0
new_expansion = dict()
for mapping in self.frontier.keys():
src_iri, tgt_iri = mapping.split("\t")
print(f"[Time: {round(time.time() - start_time)}][Map {count}]: {src_iri} -> {tgt_iri}")
sup_maps, sub_maps = self.one_hob_extend(src_iri, tgt_iri)
# merging dictionary is possible because the duplicates have been removed
new_expansion = {**new_expansion, **sup_maps, **sub_maps}
print(f"\t[Iteration {num_iter}]: Extend {len(new_expansion)} new mappings")
count += 1
num_iter += 1
self.frontier = deepcopy(new_expansion)
self.expansion = {**self.expansion, **new_expansion}
print(f"[Expansion]: total={len(self.expansion)}")
def one_hob_extend(self, src_iri: str, tgt_iri: str) -> Tuple[Dict, Dict]:
"""1-hop mapping extension, the assumption is given a highly confident mapping,
the corresponding classes' parents and children are likely to be matched.
Args:
src_iri (str): source class iri
tgt_iri (str): target class iri
"""
src_class = self.iri2class(src_iri, flag="SRC")
tgt_class = self.iri2class(tgt_iri, flag="TGT")
# detect parents
src_parents = OntoBox.super_classes(src_class)
tgt_parents = OntoBox.super_classes(tgt_class)
sup_maps = self.batch_compute_mapping(src_parents, tgt_parents, "Parents")
# detect children
src_children = list(src_class.subclasses())
tgt_children = list(tgt_class.subclasses())
sub_maps = self.batch_compute_mapping(src_children, tgt_children, "Children")
return sup_maps, sub_maps
def batch_compute_mapping(
self, src_classes: List[ThingClass], tgt_classes: List[ThingClass], flag: str
) -> Dict:
mappings = dict()
discarded_mappings = dict()
seen_mappings = dict()
for src, tgt in list(product(src_classes, tgt_classes)):
mapping_str, value = self.compute_mapping(src, tgt) # ("src_iri\ttgt_iri", value)
if value >= self.threshold:
# ensure the mapping is not previously predicted
mappings[mapping_str] = value
elif value < self.threshold and value >= 0.0:
discarded_mappings[mapping_str] = value
if value == -1.0:
seen_mappings[mapping_str] = value
print(
f"\t[{flag}] found mappings: valid={len(mappings)}, seen={len(seen_mappings)}, discarded={len(discarded_mappings)}"
)
# print(discarded_mappings)
return mappings
def compute_mapping(
self, src_class: ThingClass, tgt_class: ThingClass
) -> Tuple[str, str, float]:
"""compute the mapping score between src-tgt classes
IMPORTANT: return a invalid mapping when existed in previously predicted set
"""
raise NotImplementedError
def iri2class(self, iri: str, flag: str = "SRC") -> ThingClass:
"""search for the ThingClass object of corresponding iri"""
assert flag == "SRC" or flag == "TGT"
ob = self.src_ob if flag == "SRC" else self.tgt_ob
full_iri = ob.onto_text.expand_entity_iri(iri)
return ob.onto.search(iri=full_iri)[0]
@classmethod
def read_mappings_to_dict(
cls, mapping_file: Union[str, DataFrame], threshold: float = 0.0
) -> Dict:
"""read unique mappings from tsv file or pandas.DataFrame, notice that for duplicated
mappings, the mapping value is assumed to be consistent.
"""
if type(mapping_file) is DataFrame:
_df = mapping_file
else:
_df = pd.read_csv(mapping_file, sep="\t", na_values=cls.na_vals, keep_default_na=False)
mapping_dict = dict()
for i in range(len(_df)):
if _df.iloc[i][-1] >= threshold:
mapping_string = "\t".join(_df.iloc[i][:-1])
mapping_dict[mapping_string] = _df.iloc[i][-1]
print(f"read {len(mapping_dict)} mappings with threshold >= {threshold}.")
return mapping_dict
| 39.25
| 127
| 0.625748
|
4a108003076393db6e7f6d4f284e5787cc4dfef2
| 843
|
py
|
Python
|
inventory/migrations/0005_auto_20171111_1552.py
|
ohing504/django-inventory
|
1a262b826e8e904a7196fe0f0c0645dcd428f3f9
|
[
"MIT"
] | null | null | null |
inventory/migrations/0005_auto_20171111_1552.py
|
ohing504/django-inventory
|
1a262b826e8e904a7196fe0f0c0645dcd428f3f9
|
[
"MIT"
] | 2
|
2020-06-05T17:12:32.000Z
|
2021-06-10T18:12:45.000Z
|
inventory/migrations/0005_auto_20171111_1552.py
|
ohing504/django-inventory
|
1a262b826e8e904a7196fe0f0c0645dcd428f3f9
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-11-11 06:52
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('administrator', '0001_initial'),
('inventory', '0004_transaction_created_by'),
]
operations = [
migrations.AlterModelOptions(
name='merchandise',
options={'ordering': ['category', 'code'], 'verbose_name': 'Merchandise', 'verbose_name_plural': 'Merchandises'},
),
migrations.AddField(
model_name='transaction',
name='transaction_data',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='administrator.TransactionData'),
),
]
| 31.222222
| 140
| 0.648873
|
4a10800a6c9560814d8904de25a9430048654169
| 41
|
py
|
Python
|
tests/__init__.py
|
johnsmith400/bunny-storm
|
2bec24bc475dc84ff83f8415de4a836a9356c949
|
[
"MIT"
] | 16
|
2021-09-15T07:08:14.000Z
|
2022-01-24T15:25:29.000Z
|
tests/__init__.py
|
johnsmith400/bunny-storm
|
2bec24bc475dc84ff83f8415de4a836a9356c949
|
[
"MIT"
] | 1
|
2021-08-07T17:45:22.000Z
|
2021-08-07T17:45:22.000Z
|
tests/__init__.py
|
johnsmith400/bunny-storm
|
2bec24bc475dc84ff83f8415de4a836a9356c949
|
[
"MIT"
] | 4
|
2021-09-14T13:47:34.000Z
|
2022-03-23T19:36:32.000Z
|
"""Unit test package for bunny_storm."""
| 20.5
| 40
| 0.707317
|
4a108033785ba0c02f9f9d467dc0f8b6760aba7f
| 3,342
|
py
|
Python
|
4_simple_models/interactive_scripts/recall3.py
|
ReyhaneAskari/SLA_violation_classification
|
258a3c415cebcd04601e4d794d42d664471df668
|
[
"MIT"
] | 2
|
2019-03-25T18:07:10.000Z
|
2022-03-06T08:49:49.000Z
|
4_simple_models/interactive_scripts/recall3.py
|
ReyhaneAskari/SLA_violation_classification
|
258a3c415cebcd04601e4d794d42d664471df668
|
[
"MIT"
] | null | null | null |
4_simple_models/interactive_scripts/recall3.py
|
ReyhaneAskari/SLA_violation_classification
|
258a3c415cebcd04601e4d794d42d664471df668
|
[
"MIT"
] | 2
|
2018-10-10T01:18:10.000Z
|
2018-10-10T03:05:53.000Z
|
# -*- coding: utf-8 -*-
import csv
import numpy as np
from sklearn.metrics import roc_curve, auc
from sklearn.cross_validation import train_test_split
import matplotlib.pyplot as plt
from sklearn import metrics
import pandas as pd
from os import chdir, listdir
from pandas import read_csv
from os import path
from random import randint, sample, seed
from collections import OrderedDict
from pandas import DataFrame, Series
import numpy as np
import csv
import codecs
%matplotlib inline
colnames = ['old_index','job_id', 'task_idx','sched_cls', 'priority', 'cpu_requested',
'mem_requested', 'disk', 'violation']
tain_path = r'/home/askrey/Dropbox/Project_step_by_step/3_create_database/csvs/frull_db_2.csv'
X = pd.read_csv(tain_path, header = None, index_col = False ,names = colnames, skiprows = [0], usecols = [3,4,5,6,7])
y = pd.read_csv(tain_path, header = None, index_col = False ,names = colnames, skiprows = [0], usecols = [8])
y = y['violation'].values
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=.333, random_state=0)
import matplotlib.pyplot as plt
import numpy as np
from sklearn import svm, datasets
from sklearn.metrics import precision_recall_curve
from sklearn.metrics import average_precision_score
from sklearn.cross_validation import train_test_split
from sklearn.preprocessing import label_binarize
from sklearn.multiclass import OneVsRestClassifier
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=.333, random_state=0)
random_state = np.random.RandomState(0)
# Run classifier
classifier = OneVsRestClassifier(svm.SVC(kernel='linear', probability=True,
random_state=random_state))
y_score = classifier.fit(X_train, y_train).decision_function(X_test)
# Compute Precision-Recall and plot curve
precision = dict()
recall = dict()
average_precision = dict()
for i in range(n_classes):
precision[i], recall[i], _ = precision_recall_curve(y_test[:, i],
y_score[:, i])
average_precision[i] = average_precision_score(y_test[:, i], y_score[:, i])
# Compute micro-average ROC curve and ROC area
precision["micro"], recall["micro"], _ = precision_recall_curve(y_test.ravel(),
y_score.ravel())
average_precision["micro"] = average_precision_score(y_test, y_score,
average="micro")
# Plot Precision-Recall curve
plt.clf()
plt.plot(recall[0], precision[0], label='Precision-Recall curve')
plt.xlabel('Recall')
plt.ylabel('Precision')
plt.ylim([0.0, 1.05])
plt.xlim([0.0, 1.0])
plt.title('Precision-Recall example: AUC={0:0.2f}'.format(average_precision[0]))
plt.legend(loc="lower left")
plt.show()
# Plot Precision-Recall curve for each class
plt.clf()
plt.plot(recall["micro"], precision["micro"],
label='micro-average Precision-recall curve (area = {0:0.2f})'
''.format(average_precision["micro"]))
for i in range(n_classes):
plt.plot(recall[i], precision[i],
label='Precision-recall curve of class {0} (area = {1:0.2f})'
''.format(i, average_precision[i]))
plt.xlim([0.0, 1.0])
plt.ylim([0.0, 1.05])
plt.xlabel('Recall')
plt.ylabel('Precision')
plt.title('Extension of Precision-Recall curve to multi-class')
plt.legend(loc="lower right")
plt.show()
| 37.133333
| 118
| 0.708259
|
4a10836167fce0f11dcbca07368e7818114edeb8
| 818
|
py
|
Python
|
python/code.py
|
arnoldxiao/BuildPackage-iOS
|
ff9a7715af40a302eac02df95112680506555ac9
|
[
"MIT"
] | null | null | null |
python/code.py
|
arnoldxiao/BuildPackage-iOS
|
ff9a7715af40a302eac02df95112680506555ac9
|
[
"MIT"
] | null | null | null |
python/code.py
|
arnoldxiao/BuildPackage-iOS
|
ff9a7715af40a302eac02df95112680506555ac9
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python3
# -*- coding: utf-8 -*-
#
# @Author: xiaochenghua
# @Date: 2019/08/01
import coloredlogs
import logging
from git import Repo
from utils import Utils
repo = 'https://github.com/arnoldxiao/BuildPackage-Example.git'
class Code(object):
logger = logging.getLogger(__name__)
coloredlogs.install(level='DEBUG', logger=logger)
def __init__(self, version=None):
self.__version = version
# self.__branch = 'BE/' + version if version is not None else 'master'
def clone(self):
if self.__version is None:
logging.error('version is None!')
exit()
__branch = 'BE/' + self.__version
Repo.clone_from(url=repo, to_path=Utils.code_path(), branch=__branch)
if __name__ == '__main__':
Code(version='1.1.0-0801').clone()
| 20.974359
| 78
| 0.650367
|
4a108377cf0f5dbdfd8a79087fc2255b8874224a
| 2,599
|
py
|
Python
|
SafeCracker.py
|
epollinger/python
|
26b500ace6e9ec743c6046be3e191262fb5eb1e9
|
[
"MIT"
] | null | null | null |
SafeCracker.py
|
epollinger/python
|
26b500ace6e9ec743c6046be3e191262fb5eb1e9
|
[
"MIT"
] | null | null | null |
SafeCracker.py
|
epollinger/python
|
26b500ace6e9ec743c6046be3e191262fb5eb1e9
|
[
"MIT"
] | null | null | null |
# Solution for the SafeCracker 50 Puzzle from Creative Crafthouse
# By: Eric Pollinger
# 9/11/2016
#
# Function to handle the addition of a given slice
def add(slice):
if row1Outer[(index1 + slice) % 16] != -1:
valRow1 = row1Outer[(index1 + slice) % 16]
else:
valRow1 = row0Inner[slice]
if row2Outer[(index2 + slice) % 16] != -1:
valRow2 = row2Outer[(index2 + slice) % 16]
else:
valRow2 = row1Inner[(index1 + slice) % 16]
if row3Outer[(index3 + slice) % 16] != -1:
valRow3 = row3Outer[(index3 + slice) % 16]
else:
valRow3 = row2Inner[(index2 + slice) % 16]
if row4[(index4 + slice) % 16] != -1:
valRow4 = row4[(index4 + slice) % 16]
else:
valRow4 = row3Inner[(index3 + slice) % 16]
return row0Outer[slice] + valRow1 + valRow2 + valRow3 + valRow4
if __name__ == "__main__":
# Raw data (Row0 = base of puzzle)
row0Outer = [10,1,10,4,5,3,15,16,4,7,0,16,8,4,15,7]
row0Inner = [10,10,10,15,7,19,18,2,9,27,13,11,13,10,18,10]
row1Outer = [-1,10,-1,8,-1,10,-1,9,-1,8,-1,8,-1,9,-1,6]
row1Inner = [1,24,8,10,20,7,20,12,1,10,12,22,0,5,8,5]
row2Outer = [0,-1,11,-1,8,-1,8,-1,8,-1,10,-1,11,-1,10,-1]
row2Inner = [20,8,19,10,15,20,12,20,13,13,0,22,19,10,0,5]
row3Outer = [10,-1,14,-1,11,-1,8,-1,12,-1,11,-1,3,-1,8,-1]
row3Inner = [6,18,8,17,4,20,4,14,4,5,1,14,10,17,10,5]
row4 = [8,-1,8,-1,16,-1,19,-1,8,-1,17,-1,6,-1,6,-1]
count = 0
for index1 in range(0,16):
for index2 in range(0,16):
for index3 in range(0,16):
for index4 in range(0,16):
if add(0) == 50:
solution = True
for sl in range(1,16):
if add(sl) != 50:
solution = False
if solution == True:
count = count + 1
# Print Solution
print('Solution with index values: ' + str(index1) + ' ' + str(index2) + ' ' + str(index3)
+ ' ' + str(index4) + ' for a total number of solutions: ' + str(count))
for i in range(0, 5):
print('Solution with Slice ' + str(i) + ' values:\t ' + str(row1Outer[(index1 + i) % 16]) + '\t\t' + str(
row2Outer[(index2 + i) % 16]) + '\t\t' + str(row3Outer[(index3 + i) % 16]) + '\t\t' + str(row4[(index4 + i) % 16]))
if count == 0:
print("No Solution Found")
| 41.253968
| 147
| 0.485571
|
4a1084b5ed81c2f5bb975bf891573a267174cea3
| 3,454
|
py
|
Python
|
day3.py
|
nichochar/advent-of-code-2021
|
3abe2f30afee2b0bfcdbecdf414b48a3d953d7ed
|
[
"MIT"
] | null | null | null |
day3.py
|
nichochar/advent-of-code-2021
|
3abe2f30afee2b0bfcdbecdf414b48a3d953d7ed
|
[
"MIT"
] | null | null | null |
day3.py
|
nichochar/advent-of-code-2021
|
3abe2f30afee2b0bfcdbecdf414b48a3d953d7ed
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
import os
LENGTH = 12
def read(relative_filepath):
"""For day 3, returns...
"""
with open(relative_filepath, 'r+') as f:
data = f.read()
clean_data = data.strip()
lines = clean_data.split('\n')
return lines
def bitsToInt(bitstring):
"""Takes a string like 10110 and returns its decimal value (22)
"""
return int(bitstring, 2)
def solve(data):
"""
Expects data in the form of a dict of strings:
[
'110101',
'001010',
...
]
Returns the most common bit (gamma) and the least
common bit (epsilon) as a tuple
"""
num_samples = len(data)
half = num_samples // 2 # This could give an off by one error
print(f"Half way = {half}")
bit_len = len(data[0])
ones_counter = {i: 0 for i in range(bit_len)}
for line in data:
for i in range(bit_len):
elt = line[i]
if elt == '1':
ones_counter[i] += 1
gamma, epsilon = '', ''
for key, val in ones_counter.items():
if val > half:
gamma += '1'
epsilon += '0'
else:
gamma += '0'
epsilon += '1'
print("epsilon", epsilon)
print("gamma", gamma)
return bitsToInt(gamma), bitsToInt(epsilon)
def solve2(data: [str], candidates_fn) -> (int, int):
"""
Naive solve:
1) For each bit
2) Check if previous candidate list length is 1
3) If yes return, else move to the next bit
4) Create a new list of candidates
"""
previous_candidates = data
for i in range(LENGTH):
new_candidates = candidates_fn(previous_candidates, i)
if len(new_candidates) == 1:
return new_candidates[0]
previous_candidates = new_candidates
assert len(new_candidates) == 1, f"len not 1... found {len(new_candidates)}"
def get_candidates_high(sublist, idx_pos):
"""Get candidates takes a sublist of the initial list of
binary numbers, and an index position.
Then, given that index position, it considers the most common
bit in that position, in this sublist.
Finally, it dismisses all examples that do not hold this bit
in that position and returns the new list of candidates
"""
ones = [elt for elt in sublist if elt[idx_pos] == '1']
zeroes = [elt for elt in sublist if elt[idx_pos] == '0']
if len(ones) >= len(zeroes):
keep_ones = True
else:
keep_ones = False
if keep_ones is True:
return ones
else:
return zeroes
def get_candidates_low(sublist, idx_pos):
ones = [elt for elt in sublist if elt[idx_pos] == '1']
zeroes = [elt for elt in sublist if elt[idx_pos] == '0']
if len(zeroes) <= len(ones):
keep_zeroes = True
else:
keep_zeroes = False
if keep_zeroes is True:
return zeroes
else:
return ones
if __name__ == '__main__':
name = os.path.basename(__file__).split('.py')[0]
print("Testing...")
assert 22 == bitsToInt('10110')
print(f"Solving {name} for advent of code")
data = read('inputs/day3.txt')
gamma, epsilon = solve(data)
print(f"Result: gamma ({gamma}) * epsilon ({epsilon}) = {gamma * epsilon}")
o2 = solve2(data, get_candidates_high)
co2 = solve2(data, get_candidates_low)
print(f"Result: o2 ({o2}) * co2 ({co2}) = {int(o2, 2) * int(co2, 2)}")
| 25.585185
| 80
| 0.588882
|
4a10868bdc1b6992642482100cdfc0365c832f7a
| 2,601
|
py
|
Python
|
base/views.py
|
kunsaeedan01/To-Do-App
|
a81a4ab0ce84e42e6097a549ccbf2d41c5a1d3af
|
[
"MIT"
] | null | null | null |
base/views.py
|
kunsaeedan01/To-Do-App
|
a81a4ab0ce84e42e6097a549ccbf2d41c5a1d3af
|
[
"MIT"
] | null | null | null |
base/views.py
|
kunsaeedan01/To-Do-App
|
a81a4ab0ce84e42e6097a549ccbf2d41c5a1d3af
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render, redirect
from django.views.generic.list import ListView
from django.views.generic.detail import DetailView
from django.views.generic.edit import CreateView, UpdateView, DeleteView, FormView
from django.urls import reverse_lazy
from django.contrib.auth.views import LoginView
from django.contrib.auth.mixins import LoginRequiredMixin
from django.contrib.auth.forms import UserCreationForm
from django.contrib.auth import login
from .models import Task
# Create your views here.
class CustomLoginView(LoginView):
template_name = 'base/login.html'
fields = '__all__'
redirect_authenticated_user = True
def get_success_url(self):
return reverse_lazy('tasks')
class RegisterPage(FormView):
template_name = 'base/register.html'
form_class = UserCreationForm
redirect_authenticated_user = True
success_url = reverse_lazy('tasks')
def form_valid(self, form):
user = form.save()
if user is not None:
login(self.request, user)
return super(RegisterPage, self).form_valid(form)
def get(self, *args, **kwargs):
if self.request.user.is_authenticated:
return redirect('tasks')
return super(RegisterPage, self).get(*args, **kwargs)
class TaskList(LoginRequiredMixin, ListView):
model = Task
context_object_name = 'tasks'
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['tasks'] = context['tasks'].filter(user=self.request.user)
context['count'] = context['tasks'].filter(complete=False).count()
search_input = self.request.GET.get('search-area') or ''
if search_input:
context['tasks'] = context['tasks'].filter(title__icontains=search_input)
context['search-input'] = search_input
return context
class TaskDetail(LoginRequiredMixin, DetailView):
model = Task
context_object_name = 'task'
template_name = 'base/task.html'
class TaskCreate(LoginRequiredMixin, CreateView):
model = Task
fields = ['title', 'description', 'complete']
success_url = reverse_lazy('tasks')
def form_valid(self, form):
form.instance.user = self.request.user
return super(TaskCreate, self).form_valid(form)
class TaskUpdate(LoginRequiredMixin, UpdateView):
model = Task
fields = ['title', 'description', 'complete']
success_url = reverse_lazy('tasks')
class DeleteView(LoginRequiredMixin, DeleteView):
model = Task
context_object_name = 'task'
success_url = reverse_lazy('tasks')
| 30.964286
| 85
| 0.708189
|
4a108792fd6537bfa82f461f964bff9d42e74138
| 351
|
py
|
Python
|
25-django/ProyectoWP/blog/processor.py
|
wparedesgt/Master-Python
|
b0e8963a5a95d479ef929c2d482be50a1959a18f
|
[
"BSD-3-Clause"
] | null | null | null |
25-django/ProyectoWP/blog/processor.py
|
wparedesgt/Master-Python
|
b0e8963a5a95d479ef929c2d482be50a1959a18f
|
[
"BSD-3-Clause"
] | null | null | null |
25-django/ProyectoWP/blog/processor.py
|
wparedesgt/Master-Python
|
b0e8963a5a95d479ef929c2d482be50a1959a18f
|
[
"BSD-3-Clause"
] | null | null | null |
from blog.models import Category, Article
def get_categories(request):
categories_in_use = Article.objects.filter(public= True).values_list('categories', flat=True)
categories = Category.objects.filter(id__in=categories_in_use).values_list('id', 'name')
return {
'categories': categories,
'ids': categories_in_use
}
| 29.25
| 97
| 0.717949
|
4a1087ac3aeb508f51824ec27c4345e7f9547140
| 1,982
|
py
|
Python
|
conftest.py
|
CameronSBell/knausj_talon
|
3e57e0165257cf07b0e21880d44a91e79cb3ef16
|
[
"MIT"
] | 298
|
2020-02-23T03:00:51.000Z
|
2022-03-30T02:11:00.000Z
|
conftest.py
|
CameronSBell/knausj_talon
|
3e57e0165257cf07b0e21880d44a91e79cb3ef16
|
[
"MIT"
] | 521
|
2020-02-21T18:21:17.000Z
|
2022-03-31T16:40:34.000Z
|
conftest.py
|
CameronSBell/knausj_talon
|
3e57e0165257cf07b0e21880d44a91e79cb3ef16
|
[
"MIT"
] | 499
|
2020-03-07T05:43:52.000Z
|
2022-03-28T12:24:54.000Z
|
"""
Configuration file for pytest
See also https://docs.pytest.org/en/6.2.x/writing_plugins.html#writing-hook-functions
"""
import os
import sys
import importlib
class UnitTestPathFinder(importlib.machinery.PathFinder):
"""
Makes the knausj_talon repo root directory available under
knausj_talon_pkg and tests/stubs/talon/ available
under talon. Activated by the code in pytest_sessionstart()
A loader is needed since the 'code' folder in knausj conflicts
with the built in python 'code' module. Renaming the folder
could cause merge conflicts.
"""
@classmethod
def find_spec(cls, fullname, path=None, target=None):
curr_dir = os.path.dirname(__file__)
knausj_prefix = "knausj_talon_pkg"
if fullname == "talon" or fullname.startswith("talon."):
# Load talon stubs as talon module
filepath = os.path.join(
curr_dir, "tests", "stubs"
)
return super().find_spec(
fullname,
[filepath]
)
elif fullname == knausj_prefix:
# Load knausj_talon root module
return importlib.machinery.ModuleSpec(
name=fullname,
loader=importlib.machinery.SourceFileLoader(
fullname,
os.path.join(curr_dir, "tests", "repo_root_init.py")
),
is_package=True
)
elif fullname.startswith(knausj_prefix + "."):
# Load knausj_talon submodules
return super().find_spec(
fullname,
[curr_dir]
)
else:
# Allow normal sys.path stuff to handle everything else
return None
def pytest_sessionstart():
"""
Set up test environment. Only invoked when we're in the pytest
environment so as not to mess with the Talon runtime.
"""
sys.meta_path.append(UnitTestPathFinder)
| 30.492308
| 85
| 0.603431
|
4a1087efc58a2128e17398a0c907b1b065e7e302
| 2,435
|
py
|
Python
|
setup.py
|
Schenng/color-wave-ml-training
|
614b6de11810c4c143434568edf8a4d4204eac8d
|
[
"MIT"
] | 2,990
|
2017-01-31T04:36:11.000Z
|
2022-03-23T08:54:30.000Z
|
setup.py
|
laihub/PaintsChainer
|
aa6679444b75b550de17b2cfe78a47e3c98364ff
|
[
"MIT"
] | 103
|
2017-01-31T04:42:44.000Z
|
2021-11-22T04:42:37.000Z
|
setup.py
|
laihub/PaintsChainer
|
aa6679444b75b550de17b2cfe78a47e3c98364ff
|
[
"MIT"
] | 516
|
2017-01-31T04:58:35.000Z
|
2022-02-14T09:18:50.000Z
|
'''
This is the setup script for cxfreeze
cxfreeze does not support pkg_resource, to compile,
please modify chainer to hardcode version name and remove pkg_resources
track cxfreeze branch https://github.com/grahamrjuk/cx_Freeze/tree/process_import
for process support
Process.pyc is to be renamed to process.pyc
cupy testing has to be modified to prevent pkg_resources require function
'''
#cx_freeze Dependencies
import sys
#Post-process
import os
import shutil
from cx_Freeze import setup, Executable
sys.path.append('./cgi-bin/paint_x2_unet')
sys.path.append('./cgi-bin/helpers')
# Dependencies fine tuning
MODS = [
'cupy',
'chainer',
'numpy.core._methods', 'numpy.lib.format',
'cgi_exe',
'platformAdapter',
"appdirs",
"packaging.requirements"
]
BUILD_OPTIONS = {"packages": ["os"], "excludes": ["tkinter"], 'includes': MODS}
# GUI applications require a different base on Windows (the default is for a console application).
BASE = None
#if sys.platform == "win32":
# base = "Win32GUI"
setup(
name="PaintsChainer",
version="0.1",
description="PaintsChainer Executable Version!",
options={"build_exe": BUILD_OPTIONS},
executables=[Executable("server.py", base=BASE)]
)
#POST building patching
def copytree(src, dst, symlinks=False, ignore=None):
for item in os.listdir(src):
s = os.path.join(src, item)
d = os.path.join(dst, item)
if os.path.isdir(s):
shutil.copytree(s, d, symlinks, ignore)
else:
shutil.copy2(s, d)
v = sys.version_info
os.chdir("./build/exe.win-amd64-"+str(v[0])+"."+str(v[1])+"/")
#todo: other platform path
undetected_dirs = ["wPaint", "models", "images","fonts","css"]
for directory in undetected_dirs:
try:
shutil.copytree(os.getcwd()+"/../../"+directory, os.getcwd()+"/"+directory)
except:
print("Folder is created before.")
undetected_files = [
"index.html",
"index_ja.html",
"index_zh.html",
"index_zh-TW.html",
"howto.html",
"howto_ja.html",
"howto_zh.html",
"howto_zh-TW.html",
"paints_chainer.js",
"manifest.json",
"main.css",
"LICENSE",
"browserconfig.xml",
"run_dependency_test.bat",
"run_exe.bat"
]
for file in undetected_files:
shutil.copyfile(os.getcwd()+"/../../"+file, os.getcwd()+"/"+file)
os.rename("multiprocessing/Process.pyc", "multiprocessing/process.pyc")
print("All Done.")
| 25.631579
| 98
| 0.669815
|
4a1088d4314afed33eb512a1f190a0ab65316dc2
| 8,244
|
py
|
Python
|
pandaharvester/harvesterworkermaker/simple_worker_maker.py
|
tsulaiav/harvester
|
ca3f78348019dd616738f2da7d50e81700a8e6b9
|
[
"Apache-2.0"
] | null | null | null |
pandaharvester/harvesterworkermaker/simple_worker_maker.py
|
tsulaiav/harvester
|
ca3f78348019dd616738f2da7d50e81700a8e6b9
|
[
"Apache-2.0"
] | null | null | null |
pandaharvester/harvesterworkermaker/simple_worker_maker.py
|
tsulaiav/harvester
|
ca3f78348019dd616738f2da7d50e81700a8e6b9
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import division
import math
import random
from pandaharvester.harvestercore.work_spec import WorkSpec
from pandaharvester.harvestercore.job_spec import JobSpec
from pandaharvester.harvestercore import core_utils
from pandaharvester.harvestermisc.info_utils import PandaQueuesDict
from pandaharvester.harvestercore.resource_type_mapper import ResourceTypeMapper
from .base_worker_maker import BaseWorkerMaker
import datetime
# logger
_logger = core_utils.setup_logger('simple_worker_maker')
# simple maker
class SimpleWorkerMaker(BaseWorkerMaker):
# constructor
def __init__(self, **kwarg):
self.jobAttributesToUse = ['nCore', 'minRamCount', 'maxDiskCount', 'maxWalltime', 'ioIntensity']
BaseWorkerMaker.__init__(self, **kwarg)
self.rt_mapper = ResourceTypeMapper()
def get_job_core_and_memory(self, queue_dict, job_spec):
job_memory = job_spec.jobParams.get('minRamCount', 0) or 0
job_corecount = job_spec.jobParams.get('coreCount', 1) or 1
unified_queue = queue_dict.get('capability', '') == 'ucore'
if not job_memory and unified_queue:
site_maxrss = queue_dict.get('maxrss', 0) or 0
site_corecount = queue_dict.get('corecount', 1) or 1
if job_corecount == 1:
job_memory = int(math.ceil(site_maxrss / site_corecount))
else:
job_memory = site_maxrss
return job_corecount, job_memory
def get_job_type(self, job_spec, job_type, queue_dict, tmp_prodsourcelabel=None):
queue_type = queue_dict.get('type', None)
# 1. get prodSourceLabel from job (PUSH)
if job_spec and 'prodSourceLabel' in job_spec.jobParams:
job_type_final = job_spec.jobParams['prodSourceLabel']
# 2. get prodSourceLabel from the specified job_type (PULL UPS)
elif job_type:
job_type_final = job_type
if tmp_prodsourcelabel:
if queue_type != 'analysis' and tmp_prodsourcelabel not in ('user', 'panda', 'managed'):
# for production, unified or other types of queues we need to run neutral prodsourcelabels
# with production proxy since they can't be distinguished and can fail
job_type_final = 'managed'
# 3. convert the prodSourcelabel from the queue configuration or leave it empty (PULL)
else:
# map AGIS types to PanDA types
if queue_type == 'analysis':
job_type_final = 'user'
elif queue_type == 'production':
job_type_final = 'managed'
else:
job_type_final = None
return job_type_final
# make a worker from jobs
def make_worker(self, jobspec_list, queue_config, job_type, resource_type):
tmpLog = self.make_logger(_logger, 'queue={0}:{1}:{2}'.format(queue_config.queueName, job_type, resource_type),
method_name='make_worker')
tmpLog.debug('jobspec_list: {0}'.format(jobspec_list))
workSpec = WorkSpec()
workSpec.creationTime = datetime.datetime.utcnow()
# get the queue configuration from the DB
panda_queues_dict = PandaQueuesDict()
queue_dict = panda_queues_dict.get(queue_config.queueName, {})
unified_queue = queue_dict.get('capability', '') == 'ucore'
# case of traditional (non-unified) queue: look at the queue configuration
if not unified_queue:
workSpec.nCore = queue_dict.get('corecount', 1) or 1
workSpec.minRamCount = queue_dict.get('maxrss', 1) or 1
# case of unified queue: look at the job & resource type and queue configuration
else:
catchall = queue_dict.get('catchall', '')
if 'useMaxRam' in catchall or queue_config.queueName in ('Taiwan-LCG2-HPC2_Unified',
'Taiwan-LCG2-HPC_Unified', 'DESY-ZN_UCORE'):
# temporary hack to debug killed workers in Taiwan queues
site_corecount = queue_dict.get('corecount', 1) or 1
site_maxrss = queue_dict.get('maxrss', 1) or 1
# some cases need to overwrite those values
if 'SCORE' in resource_type:
# the usual pilot streaming use case
workSpec.nCore = 1
workSpec.minRamCount = int(math.ceil(site_maxrss / site_corecount))
else:
# default values
workSpec.nCore = site_corecount
workSpec.minRamCount = site_maxrss
else:
workSpec.nCore, workSpec.minRamCount = self.rt_mapper.calculate_worker_requirements(resource_type,
queue_dict)
# parameters that are independent on traditional vs unified
workSpec.maxWalltime = queue_dict.get('maxtime', 1)
workSpec.maxDiskCount = queue_dict.get('maxwdir', 1)
walltimeLimit_default = getattr(queue_config, 'walltimeLimit', 0)
if len(jobspec_list) > 0:
# get info from jobs
nCore = 0
minRamCount = 0
maxDiskCount = 0
maxWalltime = 0
ioIntensity = 0
for jobSpec in jobspec_list:
job_corecount, job_memory = self.get_job_core_and_memory(queue_dict, jobSpec)
nCore += job_corecount
minRamCount += job_memory
try:
maxDiskCount += jobSpec.jobParams['maxDiskCount']
except Exception:
pass
try:
ioIntensity += jobSpec.jobParams['ioIntensity']
except Exception:
pass
try:
# maxWallTime from AGIS or qconf, not trusting job currently
maxWalltime = queue_dict.get('maxtime', walltimeLimit_default)
except Exception:
pass
if (nCore > 0 and 'nCore' in self.jobAttributesToUse) or unified_queue:
workSpec.nCore = nCore
if (minRamCount > 0 and 'minRamCount' in self.jobAttributesToUse) or unified_queue:
workSpec.minRamCount = minRamCount
if maxDiskCount > 0 and 'maxDiskCount' in self.jobAttributesToUse:
workSpec.maxDiskCount = maxDiskCount
if maxWalltime > 0 and 'maxWalltime' in self.jobAttributesToUse:
workSpec.maxWalltime = maxWalltime
if ioIntensity > 0 and 'ioIntensity' in self.jobAttributesToUse:
workSpec.ioIntensity = ioIntensity
workSpec.pilotType = jobspec_list[0].get_pilot_type()
workSpec.jobType = self.get_job_type(jobspec_list[0], job_type, queue_dict)
else:
# when no job
# randomize pilot type with weighting
pdpm = getattr(queue_config, 'prodSourceLabelRandomWeightsPermille', {})
choice_list = core_utils.make_choice_list(pdpm=pdpm, default='managed')
tmp_prodsourcelabel = random.choice(choice_list)
fake_job = JobSpec()
fake_job.jobParams = {}
fake_job.jobParams['prodSourceLabel'] = tmp_prodsourcelabel
workSpec.pilotType = fake_job.get_pilot_type()
del fake_job
if workSpec.pilotType in ['RC', 'ALRB', 'PT']:
tmpLog.info('a worker has pilotType={0}'.format(workSpec.pilotType))
workSpec.jobType = self.get_job_type(None, job_type, queue_dict, tmp_prodsourcelabel)
tmpLog.debug('get_job_type decided for job_type: {0} (input job_type: {1}, queue_type: {2}, tmp_prodsourcelabel: {3})'
.format(workSpec.jobType, job_type, queue_dict.get('type', None), tmp_prodsourcelabel))
if resource_type and resource_type != 'ANY':
workSpec.resourceType = resource_type
elif workSpec.nCore == 1:
workSpec.resourceType = 'SCORE'
else:
workSpec.resourceType = 'MCORE'
return workSpec
| 44.562162
| 130
| 0.613052
|
4a10890d27ef341668def68e370b59fceeefc435
| 370
|
py
|
Python
|
Lambda_cs_week1/day_one/sorted_example.py
|
toneiobufon/Python_Practice
|
6f97cc9d7ee6f919f76aecff547fa5e4aa66588f
|
[
"MIT"
] | null | null | null |
Lambda_cs_week1/day_one/sorted_example.py
|
toneiobufon/Python_Practice
|
6f97cc9d7ee6f919f76aecff547fa5e4aa66588f
|
[
"MIT"
] | null | null | null |
Lambda_cs_week1/day_one/sorted_example.py
|
toneiobufon/Python_Practice
|
6f97cc9d7ee6f919f76aecff547fa5e4aa66588f
|
[
"MIT"
] | null | null | null |
#create a function that sorts a list of strings by length in ascending order
def sort_by_length(lst):
#sort items, shorter items first
#the .sort method on lists in python sorts in place
# new =sorted(lst)
# return new
#same result
return sorted(lst)
print(sort_by_length(['name', 'nam', 'named']))
print(sort_by_length(['jun', 'may', 'jul']))
| 28.461538
| 76
| 0.683784
|
4a1089862d6a2d4e5fd779677dac51c86303c9fb
| 4,150
|
py
|
Python
|
src/AnimationEditor.py
|
hmoraldo/AnimationEditor
|
56df0d6a9813abc5258072b7cef6e907c0a15dd3
|
[
"MIT"
] | 6
|
2016-09-05T19:28:50.000Z
|
2022-02-11T10:03:41.000Z
|
src/AnimationEditor.py
|
hmoraldo/AnimationEditor
|
56df0d6a9813abc5258072b7cef6e907c0a15dd3
|
[
"MIT"
] | null | null | null |
src/AnimationEditor.py
|
hmoraldo/AnimationEditor
|
56df0d6a9813abc5258072b7cef6e907c0a15dd3
|
[
"MIT"
] | 4
|
2016-12-12T18:50:28.000Z
|
2019-12-15T03:07:29.000Z
|
import json, glob
from PIL import Image, ImageTk
import Tkinter as tk
import Utils
canvas = None
lblImage = None
lblVertex = None
FileName = None
CurrentFrame = None # current frame number
CurrentVertex = None
FrameName = None
Images = None # the image names
ImageGlob = None
Vertices = None
Lines = None
Frames = None # the actual frame information
def fillEditorWindow(window):
global canvas, lblImage, lblVertex, FrameName
window.title("Animation Editor")
window.bind("<Left>", btnPrevVertexClick)
window.bind("<Right>", btnNextVertexClick)
canvas = tk.Canvas(window)
canvas.grid(row=0, column=0, columnspan=7)
canvas.bind("<ButtonPress-1>", canvasDown)
canvas.config(width=700, height=700)
tk.Entry(window, textvariable=FrameName).grid(row=1, column=1, columnspan=3)
btnSetFrameName = tk.Button(window, text="Set name")
btnSetFrameName.grid(row=1, column=4, columnspan=2)
btnSetFrameName.bind("<ButtonRelease-1>", btnSetFrameNameClick)
tk.Label(window, text="Image:").grid(row=2, column=0)
lblImage = Utils.MakeArrowButtons(window, 2, 1, btnPrevImageClick, btnNextImageClick)
tk.Label(window, text="Vertex:").grid(row=3, column=0)
lblVertex = Utils.MakeArrowButtons(window, 3, 1, btnPrevVertexClick, btnNextVertexClick)
btnSave = tk.Button(window, text="Save")
btnSave.grid(row=4, column=4)
btnSave.bind("<ButtonRelease-1>", btnSaveClick)
updateData()
updateImage()
window.mainloop()
def completeFrameData(index):
global Frames, Vertices
for i in range(len(Frames[index]["vertices"]), len(Vertices)):
Frames[index]["vertices"].append(Vertices[i])
def btnSetFrameNameClick(event):
global FrameName, CurrentFrame, Frames
Frames[CurrentFrame]["name"] = FrameName.get()
def btnPrevImageClick(event):
global CurrentFrame
if CurrentFrame > 0:
CurrentFrame -= 1
completeFrameData(CurrentFrame)
updateData()
updateImage()
def btnNextImageClick(event):
global CurrentFrame, Images, Frames
if CurrentFrame < len(Images) - 1:
CurrentFrame += 1
if CurrentFrame >= len(Frames):
if CurrentFrame == len(Frames):
Frames.append(Utils.NewFrame(Frames))
else:
print "Error, can't edit this frame"
quit()
completeFrameData(CurrentFrame)
updateData()
updateImage()
def btnPrevVertexClick(event):
global CurrentVertex
if CurrentVertex > 0:
CurrentVertex -= 1
updateData()
updateImage()
def btnNextVertexClick(event):
global CurrentVertex, Vertices
if CurrentVertex < len(Vertices) - 1:
CurrentVertex += 1
updateData()
updateImage()
def canvasDown(event):
global CurrentVertex, Vertices, CurrentFrame, Frames
Frames[CurrentFrame]["vertices"][CurrentVertex]["x"] = event.x
Frames[CurrentFrame]["vertices"][CurrentVertex]["y"] = event.y
updateImage()
def btnSaveClick(event):
global ImageGlob, Vertices, FileName, Lines, Frames
Utils.Save(FileName, ImageGlob, Vertices, Lines, Frames)
def updateData():
global lblImage, lblVertex, CurrentFrame, CurrentVertex, FrameName
lblImage["text"] = str(CurrentFrame) + " (" + str(Images[CurrentFrame]) + ")"
lblVertex["text"] = str(CurrentVertex) + " (" + str(Vertices[CurrentVertex]["name"]) + ")"
FrameName.set(Frames[CurrentFrame]["name"])
def updateImage():
global canvas, Vertices, Lines, Frames, Images, CurrentFrame, CurrentVertex
noSelection = -1
currentImage = Images[CurrentFrame]
Utils.UpdateImage(canvas, 0, 0, Frames[CurrentFrame]["vertices"], Lines, currentImage, CurrentVertex, noSelection)
def OpenFromFile(window, filename):
global FrameName, FileName, CurrentFrame, CurrentVertex, Images, ImageGlob, Vertices, Lines, Frames
FileName = filename
CurrentVertex = 0
CurrentFrame = 0
f = open(filename)
data = json.load(f)
f.close()
FrameName = tk.StringVar()
ImageGlob = data["imageGlob"]
Vertices = data["vertices"]
Lines = data["lines"]
Frames = data["frames"]
Images = glob.glob(ImageGlob)
Images.sort()
if len(Images) == 0:
print "Error, no images found with the given glob"
quit()
if len(Frames) == 0:
Frames = [Utils.NewFrame(Frames)]
completeFrameData(CurrentFrame)
fillEditorWindow(window)
if __name__ == "__main__":
OpenFromFile(tk.Tk(), "data/testfile.json")
| 25.776398
| 115
| 0.740482
|
4a108a0c5e4a7250a8c299ff8e94675edc1547b3
| 9,895
|
py
|
Python
|
framework/SupervisedLearning/ScikitLearn/Tree/DecisionTreeClassifier.py
|
FlanFlanagan/raven
|
bd7fca18af94376a28e2144ba1da72c01c8d343c
|
[
"Apache-2.0"
] | 1
|
2022-03-10T18:54:09.000Z
|
2022-03-10T18:54:09.000Z
|
framework/SupervisedLearning/ScikitLearn/Tree/DecisionTreeClassifier.py
|
FlanFlanagan/raven
|
bd7fca18af94376a28e2144ba1da72c01c8d343c
|
[
"Apache-2.0"
] | null | null | null |
framework/SupervisedLearning/ScikitLearn/Tree/DecisionTreeClassifier.py
|
FlanFlanagan/raven
|
bd7fca18af94376a28e2144ba1da72c01c8d343c
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2017 Battelle Energy Alliance, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Created on Jan 21, 2020
@author: alfoa, wangc
DecisionTreeClassifier
A decision tree classifier.
"""
#Internal Modules (Lazy Importer)--------------------------------------------------------------------
#Internal Modules (Lazy Importer) End----------------------------------------------------------------
#External Modules------------------------------------------------------------------------------------
#External Modules End--------------------------------------------------------------------------------
#Internal Modules------------------------------------------------------------------------------------
from SupervisedLearning.ScikitLearn import ScikitLearnBase
from utils import InputData, InputTypes
#Internal Modules End--------------------------------------------------------------------------------
class DecisionTreeClassifier(ScikitLearnBase):
"""
DecisionTreeClassifier
A decision tree classifier.
"""
info = {'problemtype':'classification', 'normalize':True}
def __init__(self):
"""
Constructor that will appropriately initialize a supervised learning object
@ In, None
@ Out, None
"""
super().__init__()
import sklearn
import sklearn.tree
self.model = sklearn.tree.DecisionTreeClassifier
@classmethod
def getInputSpecification(cls):
"""
Method to get a reference to a class that specifies the input data for
class cls.
@ In, cls, the class for which we are retrieving the specification
@ Out, inputSpecification, InputData.ParameterInput, class to use for
specifying input of cls.
"""
specs = super(DecisionTreeClassifier, cls).getInputSpecification()
specs.description = r"""The \xmlNode{DecisionTreeClassifier} is a classifier that is based on the
decision tree logic.
\zNormalizationPerformed{DecisionTreeClassifier}
"""
specs.addSub(InputData.parameterInputFactory("criterion", contentType=InputTypes.makeEnumType("criterion", "criterionType",['gini','entropy']),
descr=r"""The function to measure the quality of a split. Supported criteria are ``gini'' for the
Gini impurity and ``entropy'' for the information gain.""", default='gini'))
specs.addSub(InputData.parameterInputFactory("splitter", contentType=InputTypes.makeEnumType("splitter", "splitterType",['best','random']),
descr=r"""The strategy used to choose the split at each node. Supported strategies are ``best''
to choose the best split and ``random'' to choose the best random split.""", default='best'))
specs.addSub(InputData.parameterInputFactory("max_depth", contentType=InputTypes.IntegerType,
descr=r"""The maximum depth of the tree. If None, then nodes are expanded until all leaves are pure
or until all leaves contain less than min_samples_split samples.""", default=None))
specs.addSub(InputData.parameterInputFactory("min_samples_split", contentType=InputTypes.IntegerType,
descr=r"""The minimum number of samples required to split an internal node""", default=2))
specs.addSub(InputData.parameterInputFactory("min_samples_leaf", contentType=InputTypes.IntegerType,
descr=r"""The minimum number of samples required to be at a leaf node. A split point at any
depth will only be considered if it leaves at least min\_samples\_leaf training samples in each
of the left and right branches. This may have the effect of smoothing the model, especially
in regression.""", default=1))
specs.addSub(InputData.parameterInputFactory("min_weight_fraction_leaf", contentType=InputTypes.FloatType,
descr=r"""The minimum weighted fraction of the sum total of weights (of all the input samples)
required to be at a leaf node. Samples have equal weight when sample_weight is not provided.""", default=0.0))
specs.addSub(InputData.parameterInputFactory("max_features", contentType=InputTypes.makeEnumType("maxFeatures", "maxFeaturesType",['auto','sqrt','log2']),
descr=r"""The strategy to compute the number of features to consider when looking for the best split:
\begin{itemize}
\item sqrt: $max\_features=sqrt(n\_features)$
\item log2: $max\_features=log2(n\_features)$
\item auto: automatic selection
\end{itemize}
\nb the search for a split does not stop until at least one valid partition of the node
samples is found, even if it requires to effectively inspect more than max_features features.""", default=None))
specs.addSub(InputData.parameterInputFactory("max_leaf_nodes", contentType=InputTypes.IntegerType,
descr=r"""Grow a tree with max\_leaf\_nodes in best-first fashion. Best nodes are defined as relative reduction
in impurity. If None then unlimited number of leaf nodes.""", default=None))
specs.addSub(InputData.parameterInputFactory("min_impurity_decrease", contentType=InputTypes.FloatType,
descr=r"""A node will be split if this split induces a decrease of the impurity greater than or equal to this value.
The weighted impurity decrease equation is the following:
$N\_t / N * (impurity - N\_t\_R / N\_t * right_impurity - N\_t\_L / N\_t * left\_impurity)$
where $N$ is the total number of samples, $N\_t$ is the number of samples at the current node, $N\_t\_L$ is the number
of samples in the left child, and $N\_t\_R$ is the number of samples in the right child.
$N$, $N\_t$, $N\_t]\_R$ and $N\_t\_L$ all refer to the weighted sum, if sample_weight is passed.""", default=0.0))
# new in sklearn 0.22
# specs.addSub(InputData.parameterInputFactory("ccp_alpha", contentType=InputTypes.FloatType,
# descr=r"""Complexity parameter used for Minimal Cost-Complexity Pruning. The subtree with the largest cost
# complexity that is smaller than ccp_alpha will be chosen. By default, no pruning is performed. """, default=0.0))
specs.addSub(InputData.parameterInputFactory("random_state", contentType=InputTypes.IntegerType,
descr=r"""Controls the randomness of the estimator. The features are
always randomly permuted at each split, even if splitter is set to
"best". When max\_features < n\_features, the algorithm will select
max_features at random at each split before finding the best split
among them. But the best found split may vary across different runs,
even if max\_features=n\_features. That is the case, if the improvement
of the criterion is identical for several splits and one split has to
be selected at random. To obtain a deterministic behaviour during
fitting, random\_state has to be fixed to an integer.""",
default=None))
return specs
def _handleInput(self, paramInput):
"""
Function to handle the common parts of the distribution parameter input.
@ In, paramInput, ParameterInput, the already parsed input.
@ Out, None
"""
super()._handleInput(paramInput)
settings, notFound = paramInput.findNodesAndExtractValues(['criterion', 'splitter', 'max_depth','min_samples_split',
'min_samples_leaf','min_weight_fraction_leaf','max_features',
'max_leaf_nodes','min_impurity_decrease',
'random_state'])
# notFound must be empty
assert(not notFound)
self.initializeModel(settings)
| 73.843284
| 167
| 0.551187
|
4a108a52ad60a04840910a4b5ac5560213498e89
| 10,083
|
py
|
Python
|
networks.py
|
TOMeoww/STGAN
|
090a4024999e68f017140312ecfdd0d4dc3dc425
|
[
"MIT"
] | null | null | null |
networks.py
|
TOMeoww/STGAN
|
090a4024999e68f017140312ecfdd0d4dc3dc425
|
[
"MIT"
] | 1
|
2020-12-06T08:06:56.000Z
|
2020-12-06T08:26:23.000Z
|
networks.py
|
TOMeoww/STGAN
|
090a4024999e68f017140312ecfdd0d4dc3dc425
|
[
"MIT"
] | null | null | null |
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
from torch.nn.init import kaiming_normal_,xavier_normal_
from torchsummary import summary
class Conv2dBlock(nn.Module):
def __init__(self,in_channels,out_channels,
kernel_size = 4, stride = 2,use_bias = False,
gain = 2 ** (0.5), norm = 'BN', pad = (1,1,1,1),
activation = 'LR'):
super(Conv2dBlock,self).__init__()
self.pad = nn.ReflectionPad2d(pad)
#initialization he_std
#self.he_std = in_channels * out_channels * kernel_size ** (-0.5) * gain
#self.weight = nn.Parameter(torch.randn(out_channels,in_channels,kernel_size,kernel_size) * self.he_std)
#conv and initialization
self.conv = nn.Conv2d(in_channels, out_channels, kernel_size, stride, bias = use_bias)
kaiming_normal_(self.conv.weight.data)
if use_bias:
self.conv.bias.data.zero_()
else:
pass
#norm
if norm == 'BN':
self.norm = nn.BatchNorm2d(out_channels)
elif norm == 'IN':
self.norm = nn.InstanceNorm2d(out_channels)
else:
assert 0,"STGAN's conv block requires IN or BN, not {}".format(norm)
#activation
if activation == 'LR':
self.activation = nn.LeakyReLU(0.2,inplace = True)
else:
assert 0,"STGAN's conv block requires LR, not {}".format(activation)
self.models = nn.Sequential(self.pad,self.conv,self.norm,self.activation)
def forward(self,x):
out = self.models(x)
return out
class FC(nn.Module):
def __init__(self,in_channels, out_channels, use_bias = False ,
activation = 'LR', gain = 2 ** (0.5)):
super(FC,self).__init__()
#he_std
self.he_std = in_channels * (-0.5) * gain
self.weight = torch.nn.Parameter(torch.randn(out_channels, in_channels ) * self.he_std)
if use_bias:
self.bias = torch.nn.Parameter(torch.zeros(out_channels))
else:
self.bias = None
#activation
if activation == 'LR':
self.activation = nn.LeakyReLU(0.2, inplace = True )
elif activation == 'Sigmoid':
self.activation = nn.Sigmoid()
elif activation == None:
self.activation = None
else:
assert 0," STGAN's FC reruires LR or Sigmoid, not{}".format(activation)
def forward(self,x):
if self.bias is not None:
out = F.linear( x, self.weight , self.bias )
else:
out = F.linear( x, self.weight )
if self.activation:
out = self.activation( out )
return out
class DeconvBlock(nn.Module):
def __init__(self, in_channels, out_channels, kernel_size = 4,
stride = 2, padding = 1, use_bias = False, norm = 'BN',
activation = 'ReLU'):
super(DeconvBlock,self).__init__()
self.deconv = nn.ConvTranspose2d(in_channels, out_channels,
kernel_size, stride, bias = use_bias, padding = padding)
if use_bias:
self.deconv.bias.data.zero_()
else:
pass
if activation == 'ReLU':
self.activation = nn.ReLU(inplace = True)
kaiming_normal_(self.deconv.weight.data)
elif activation == 'Tanh':
self.activation = nn.Tanh()
xavier_normal_(self.deconv.weight.data)
else:
assert 0," STGAN's FC reruires LR or Tanh, not{}".format(activation)
#norm
if norm == 'BN':
self.norm = nn.BatchNorm2d(out_channels)
self.models = nn.Sequential(self.deconv,self.norm,self.activation)
elif norm == 'IN':
self.norm = nn.InstanceNorm2d(out_channels)
self.models = nn.Sequential(self.deconv,self.norm,self.activation)
elif norm == None:
self.norm = None
self.models = nn.Sequential(self.deconv,self.activation)
else:
assert 0,"STGAN's conv block requires IN or BN, not {}".format(norm)
def forward(self, x):
out = self.models(x)
return out
class STU(nn.Module):
def __init__(self, attr_num, in_channels, out_channels, kernel_size=3):
super(STU,self).__init__()
self.old_channels = in_channels
self.new_channels = out_channels
self.att_num = attr_num
self.deconv = nn.ConvTranspose2d(self.old_channels * 2 + self.att_num , self.new_channels,
kernel_size = 4, stride = 2, output_padding = 0, padding = 1)
self.reset_gate = nn.Sequential(nn.Conv2d(self.old_channels + self.old_channels, self.new_channels, kernel_size = 3,
stride = 1, padding = 1, bias = False),
nn.Sigmoid())
self.update_gate = nn.Sequential(nn.Conv2d( self.old_channels* 2, self.new_channels, kernel_size = 3,
stride = 1, padding = 1, bias = False),
nn.Sigmoid())
self.hidden_gate = nn.Sequential(nn.Conv2d(self.old_channels*2, self.new_channels,
kernel_size = 3, stride = 1, padding = 1),
nn.Tanh())
def forward(self, f_enc, state_old, att_target, att_source, encoder_layer_num = 1):
self.batch, _,self.h_old, self.w_old = state_old.size()
if encoder_layer_num != 5:
att_diff = (att_target - att_source).view(-1,self.att_num,1,1).expand(self.batch, self.att_num, self.h_old, self.w_old)
state_hat = self.deconv(torch.cat([state_old, att_diff], dim = 1))
else:
state_hat = self.deconv(state_old)
r = self.reset_gate(torch.cat([f_enc, state_hat], dim = 1))
z = self.update_gate(torch.cat([f_enc, state_hat], dim = 1))
state_new = r.mul(z)
f_new_hat = self.hidden_gate(torch.cat([f_enc, state_new], dim = 1))
f_new = (1 - z) * state_hat + z.mul(f_new_hat)
return f_new, state_new
class Generator(nn.Module):
def __init__(self, n_layers_conv, n_layers_deconv, num_STU, num_att, dim = 64):
super(Generator,self).__init__()
self.n_layers_deconv = n_layers_deconv
self.n_layers_conv = n_layers_conv
self.num_STU = num_STU
self.conv_encode = []
self.attr_num = num_att
for x in range(n_layers_conv):
if x == 0:
self.conv_encode.append(Conv2dBlock(3, dim))
else:
self.conv_encode.append(Conv2dBlock(dim, dim * 2))
dim *= 2
self.module_list_conv = torch.nn.ModuleList(self.conv_encode)
self.deconv = []
for x in range(n_layers_deconv):
if x == (n_layers_deconv - 1):
self.deconv.append(DeconvBlock(dim * 3, 3, norm = None, activation = 'Tanh'))
elif x == 0:
self.deconv.append(DeconvBlock(dim + num_att, dim ))
else:
self.deconv.append(DeconvBlock(dim * 3, dim ))
dim //= 2
dim = 512
self.module_list_deconv = torch.nn.ModuleList(self.deconv)
self.STU = []
for x in range(num_STU):
self.STU.append(STU(13, dim, dim ))
dim //= 2
self.module_list_stu = torch.nn.ModuleList(self.STU)
def forward(self, x, att_target, att_source):
for num in range(self.n_layers_conv):
x = self.module_list_conv[num](x)
setattr(self, "encode_{}".format(num + 1), x)
att_diff = (att_target - att_source).view(att_target.size(0),\
self.attr_num,1,1).expand(x.size(0), self.attr_num, self.encode_5.size(2), self.encode_5.size(3))
state_list = []
stu_out_list = []
f_5 = getattr(self, "encode_{}".format(5))
state_old = torch.cat([f_5, att_diff], dim = 1)
state_list.append(state_old)
biaoji_list = [5,1,1,1]
for num in range(self.num_STU):
f_enc = getattr(self, "encode_{}".format(4 - num))
f_new, state_new = self.module_list_stu[num](f_enc, state_list[num ], att_target, att_source,biaoji_list[num])
state_list.append(state_new)
stu_out_list.append(f_new)
out = self.module_list_deconv[0](state_old)
for num in range(1, self.n_layers_deconv ):
out = torch.cat([out, stu_out_list[num - 1]], dim = 1)
out = self.module_list_deconv[num](out)
return out
class Discriminator(nn.Module):
def __init__(self, conv_num_block,dim,num_att):
super(Discriminator, self).__init__()
self.conv_block = []
self.conv_num_block = conv_num_block
for x in range(conv_num_block):
if x == 0:
self.conv_block.append(Conv2dBlock(3, dim, norm = 'IN'))
else:
self.conv_block.append(Conv2dBlock(dim//2, dim, norm = 'IN'))
dim *= 2
dim //= 2
self.num_features = dim * 4 * 4
self.models_pre = nn.Sequential(*self.conv_block)
self.dis_att = nn.Sequential(FC(self.num_features, 1024),
FC(1024, num_att,activation = 'Sigmoid'))
self.dis_adv = nn.Sequential(FC(self.num_features, 1024),
FC(1024, 1, activation = None))
def forward(self, x):
x = self.models_pre(x).view(x.size(0), -1)
att = self.dis_att(x)
adv = self.dis_adv(x)
return att, adv
| 36.139785
| 131
| 0.553605
|
4a108b671cb3231466f03677b176ce941f8bd768
| 22,244
|
py
|
Python
|
VESIcal/models/allison.py
|
kaylai/VESIcal
|
3ea18b0ce30b30fb55786346c37ef8f428ee5034
|
[
"MIT"
] | 16
|
2020-06-22T09:07:32.000Z
|
2022-01-12T13:42:12.000Z
|
build/lib/VESIcal/models/allison.py
|
kaylai/VESIcal
|
3ea18b0ce30b30fb55786346c37ef8f428ee5034
|
[
"MIT"
] | 136
|
2020-05-22T21:43:23.000Z
|
2022-03-07T22:06:33.000Z
|
VESIcal/models/allison.py
|
kaylai/VESIcal
|
3ea18b0ce30b30fb55786346c37ef8f428ee5034
|
[
"MIT"
] | 3
|
2021-05-18T08:21:02.000Z
|
2022-03-25T01:08:10.000Z
|
from VESIcal import activity_models
from VESIcal import calibration_checks
from VESIcal import core
from VESIcal import fugacity_models
from VESIcal import model_classes
from VESIcal import sample_class
import numpy as np
import warnings as w
from scipy.optimize import root_scalar
class carbon(model_classes.Model):
"""
Implementation of the Allison et al. (2019) CO2 solubility model. Which type of fit, and
which composition must be selected when the Model is initialized. The fit may be either
thermodynamic or power-law. The composition may be chosen from sunset, sfvf, erebus, vesuvius,
etna, or stromboli. Default is the power-law fit to sunset.
"""
def __init__(self, model_loc='sunset', model_fit='thermodynamic'):
"""
Initialize the model.
Parameters
----------
model_fit str
Either 'power' for the power-law fits, or 'thermodynamic' for the
thermodynamic fits.
model_loc str
One of 'sunset', 'sfvf', 'erebus', 'vesuvius', 'etna', 'stromboli'.
"""
self.set_volatile_species(['CO2'])
self.set_fugacity_model(fugacity_models.fugacity_HB_co2())
self.set_activity_model(activity_models.activity_idealsolution())
self.set_calibration_ranges([
calibration_checks.CalibrationRange(
'temperature', 1200, calibration_checks.crf_EqualTo, 'oC',
'Allison et al. (2019) carbon', fail_msg=crmsg_Temp,
pass_msg=calibration_checks.crmsg_EqualTo_pass,
description_msg=calibration_checks.crmsg_EqualTo_description),
calibration_checks.CalibrationRange(
'temperature', [1000, 1400], calibration_checks.crf_Between, 'oC',
'Allison et al. (2019) carbon', fail_msg=crmsg_Between_Temp,
pass_msg=calibration_checks.crmsg_EqualTo_pass,
description_msg=calibration_checks.crmsg_EqualTo_description),
calibration_checks.CalibrationRange(
'H2O', 0.5, calibration_checks.crf_LessThan, 'wt%', 'Allison et al. (2019) carbon',
fail_msg=crmsg_H2O, pass_msg=calibration_checks.crmsg_LessThan_pass)])
self.set_solubility_dependence(False)
self.model_loc = model_loc
self.model_fit = model_fit
def calculate_dissolved_volatiles(self, pressure, temperature=1200, sample=None, X_fluid=1.0,
**kwargs):
"""
Calclates the dissolved CO2 concentration using (Eqns) 2-7 or 10-11 from Allison et al.
(2019).
Parameters
----------
pressure float
Pressure in bars.
temperature float
Temperature in C.
sample NoneType or Sample class
Magma major element composition. Not required for this model, therefore None may be
passed.
X_fluid float
The mole fraction of CO2 in the fluid. Default is 1.0.
Returns
-------
float
Dissolved CO2 concentration in wt%.
"""
# temperature = 1200 #temp in degrees C
temperature = temperature + 273.15 # translate T from C to K
if pressure < 0.0:
raise core.InputError("Pressure must be positive.")
if X_fluid < 0 or X_fluid > 1:
raise core.InputError("X_fluid must have a value between 0 and 1.")
if self.model_fit not in ['power', 'thermodynamic']:
raise core.InputError("model_fit must be one of 'power', or 'thermodynamic'.")
if self.model_loc not in ['sunset', 'sfvf', 'erebus', 'vesuvius', 'etna', 'stromboli']:
raise core.InputError("model_loc must be one of 'sunset', 'sfvf', 'erebus', ",
"'vesuvius', 'etna', or 'stromboli'.")
if pressure == 0:
return 0
if self.model_fit == 'thermodynamic':
P0 = 1000 # bar
params = dict({'sunset': [16.4, -14.67],
'sfvf': [15.02, -14.87],
'erebus': [15.83, -14.65],
'vesuvius': [24.42, -14.04],
'etna': [21.59, -14.28],
'stromboli': [14.93, -14.68]})
DV = params[self.model_loc][0]
lnK0 = params[self.model_loc][1]
lnK = lnK0 - (pressure-P0)*DV/(10*8.3141*temperature)
fCO2 = self.fugacity_model.fugacity(pressure=pressure, temperature=temperature-273.15,
X_fluid=X_fluid, **kwargs)
Kf = np.exp(lnK)*fCO2
XCO3 = Kf/(1-Kf)
FWone = 36.594
wtCO2 = (44.01*XCO3)/((44.01*XCO3)+(1-XCO3)*FWone)*100
return wtCO2
if self.model_fit == 'power':
params = dict({'stromboli': [1.05, 0.883],
'etna': [2.831, 0.797],
'vesuvius': [4.796, 0.754],
'sfvf': [3.273, 0.74],
'sunset': [4.32, 0.728],
'erebus': [5.145, 0.713]})
fCO2 = self.fugacity_model.fugacity(pressure=pressure, temperature=temperature-273.15,
X_fluid=X_fluid, **kwargs)
return params[self.model_loc][0]*fCO2**params[self.model_loc][1]/1e4
def calculate_equilibrium_fluid_comp(self, pressure, sample, temperature=1200, **kwargs):
""" Returns 1.0 if a pure CO2 fluid is saturated. Returns 0.0 if a pure CO2 fluid is
undersaturated.
Parameters
----------
pressure float
The total pressure of the system in bars.
temperature float
The temperature of the system in C.
sample: Sample class
Magma major element composition (including H2O).
Returns
-------
float
1.0 if CO2-fluid saturated, 0.0 otherwise.
"""
satP = self.calculate_saturation_pressure(temperature=temperature, sample=sample,
X_fluid=1.0, **kwargs)
if pressure < satP:
return 1.0
else:
return 0.0
def calculate_saturation_pressure(self, sample, temperature=1200, X_fluid=1.0, **kwargs):
"""
Calculates the pressure at which a pure CO2 fluid is saturated, for the given sample
composition and CO2 concentration. Calls the scipy.root_scalar routine, which makes
repeated called to the calculate_dissolved_volatiles method.
Parameters
----------
temperature float
The temperature of the system in C.
sample: Sample class
Magma major element composition (including CO2).
X_fluid float
The mole fraction of H2O in the fluid. Default is 1.0.
Returns
-------
float
Calculated saturation pressure in bars.
"""
if X_fluid < 0 or X_fluid > 1:
raise core.InputError("X_fluid must have a value between 0 and 1.")
if isinstance(sample, sample_class.Sample) is False:
raise core.InputError("Sample must be an instance of the Sample class.")
if sample.check_oxide('CO2') is False:
raise core.InputError("sample must contain CO2.")
if sample.get_composition('CO2') < 0.0:
raise core.InputError("Dissolved CO2 concentration must be greater than 0 wt%.")
try:
satP = root_scalar(self.root_saturation_pressure,
args=(temperature, sample, X_fluid, kwargs),
x0=1000.0, x1=2000.0).root
except Exception:
w.warn("Saturation pressure not found.", RuntimeWarning, stacklevel=2)
satP = np.nan
return satP
def root_saturation_pressure(self, pressure, temperature, sample, X_fluid, kwargs):
""" Function called by scipy.root_scalar when finding the saturation pressure using
calculate_saturation_pressure.
Parameters
----------
pressure float
Pressure guess in bars
temperature float
The temperature of the system in C.
sample: Sample class
Magma major element composition, including CO2.
kwargs dictionary
Additional keyword arguments supplied to calculate_saturation_pressure. Might be
required for the fugacity or activity models.
Returns
-------
float
The differece between the dissolved CO2 at the pressure guessed, and the CO2
concentration passed in the sample variable.
"""
return (sample.get_composition('CO2') -
self.calculate_dissolved_volatiles(pressure=pressure, temperature=temperature,
sample=sample, X_fluid=X_fluid, **kwargs))
# ALLISON COMPOSITIONAL LIMITS -DEFINED AS MIN AND MAX OF CALIBRATION DATASET (-5% AND +5%
# RESPECTIVELY)
def crf_generic(calibval=None, sample=sample_class.Sample({}), bounds={}):
comp = sample.get_composition(units='wtpt_oxides')
testresults = []
for ox in sfvfCompRange:
testresults.append(comp[ox] >= bounds[ox][0])
testresults.append(comp[ox] <= bounds[ox][1])
return all(testresults)
sfvfCompRange = {'SiO2': [50.0, 55.97],
'TiO2': [1.08, 1.25],
'Al2O3': [15.76, 18.15],
'FeO': [7.07, 8.06],
'MgO': [5.89, 7.09],
'CaO': [8.80, 9.91],
'Na2O': [3.05, 3.53],
'K2O': [1.25, 1.49]
}
def crf_sfvf(calibval=None, sample=sample_class.Sample({})):
return crf_generic(calibval, sample, sfvfCompRange)
sunsetCompRange = {'SiO2': [45.72, 50.62],
'TiO2': [1.75, 1.96],
'Al2O3': [15.62, 17.45],
'FeO': [9.13, 10.42],
'MgO': [8.13, 9.19],
'CaO': [9.56, 10.59],
'Na2O': [3.29, 3.64],
'K2O': [0.77, 0.86]
}
def crf_sunset(calibval=None, sample={}):
return crf_generic(calibval, sample, sunsetCompRange)
erebusCompRange = {'SiO2': [45.96, 51.04],
'TiO2': [2.67, 3.00],
'Al2O3': [18.31, 20.63],
'FeO': [7.46, 9.37],
'MgO': [3.03, 3.43],
'CaO': [6.58, 7.42],
'Na2O': [5.8, 6.49],
'K2O': [2.75, 3.13]
}
def crf_erebus(calibval=None, sample={}):
return crf_generic(calibval, sample, erebusCompRange)
vesuviusCompRange = {'SiO2': [45.65, 53.29],
'TiO2': [0.93, 1.13],
'Al2O3': [13.75, 16.28],
'FeO': [4.98, 7.48],
'MgO': [6.41, 7.76],
'CaO': [11.12, 14.16],
'Na2O': [1.74, 2.07],
'K2O': [5.48, 6.35]
}
def crf_vesuvius(calibval=None, sample={}):
return crf_generic(calibval, sample, vesuviusCompRange)
etnaCompRange = {'SiO2': [46.03, 52.97],
'TiO2': [1.61, 1.89],
'Al2O3': [15.87, 18.24],
'FeO': [6.75, 10.21],
'MgO': [5.9, 7.0],
'CaO': [9.38, 11.99],
'Na2O': [3.4, 3.94],
'K2O': [1.69, 2.25]
}
def crf_etna(calibval=None, sample={}):
return crf_generic(calibval, sample, etnaCompRange)
stromboliCompRange = {'SiO2': [47.23, 55.15],
'TiO2': [0.74, 0.94],
'Al2O3': [14.87, 17.73],
'FeO': [5.08, 7.51],
'MgO': [7.52, 9.26],
'CaO': [11.99, 13.46],
'Na2O': [2.29, 2.67],
'K2O': [1.79, 2.17]
}
def crf_stromboli(calibval=None, sample={}):
return crf_generic(calibval, sample, stromboliCompRange)
crmsg_Comp_pass = ("The sample appears to be similar in composition to the compositional dataset "
"for the selected Carbon model of Allison et al. (2019).")
crmsg_Comp_fail = (" These calibration limits were selected based on the minimum and maximum "
"values of these oxides (+-5%) in the calibration dataset. As the Allison et "
"al. model incorperates no term for compositional dependence, users must take "
"extreme care when extrapolating this model to compositions which differ "
"significantly from the calibration dataset. These warnings are simply a "
"guide; we suggest that users carefully compare their major element data to"
" the calibration dataset to check for suitability ")
crmsg_Comp_description = ("The Allison et al. (2019) Carbon model is defined for 6 different "
"alkali compositions.")
crmsg_Temp = ("All calculations for {model_name} are performed at 1200 C "
"(inputted Temp={param_val:.1f} {units}). Allison et al. (2019) suggest the "
"results are likely applicable between 1000-1400°C). ")
crmsg_PressureGreater = ("{param_name} ({param_val:.1f} {units}) is less than the lowest P "
"experiment in the calibration dataset ({calib_val:.1f} {units}). ")
crmsg_PressureLess = ("{param_name} ({param_val:.1f} {units}) exceeds the upper limit of "
"{calib_val:.1f} {units} suggested by Allison et al. (2019) - Their "
"spreadsheet would return 7000 bar for this input. ")
crmsg_H2O = ("{param_name} ({param_val:.1f} {units}) is > {param_val:.1f} {units}: this model "
"does not account for the effect of H$_2$O on volatile solubility. VESIcal allows "
"you to combine Allison Carbon with a variety of H$_2$O models. ")
crmsg_Between_Temp = ("{param_name} ({param_val:.1f} {units} is outside the recomended ",
"temperature range for {model_name} (1000-1400°C). ")
# Create objects for each model location in order to set their calibration ranges
sunset = carbon(model_loc='sunset')
sfvf = carbon(model_loc='sfvf')
erebus = carbon(model_loc='erebus')
vesuvius = carbon(model_loc='vesuvius')
etna = carbon(model_loc='etna')
stromboli = carbon(model_loc='stromboli')
_crs_to_update = sunset.calibration_ranges
cr_oxide_list = []
for ox in sunsetCompRange.keys():
cr_oxide_list.append(
calibration_checks.CalibrationRange(
ox, sunsetCompRange[ox], calibration_checks.crf_Between, 'wt%',
'Allison et al. (2019) sunset carbon',
fail_msg=calibration_checks.crmsg_BC_fail,
pass_msg=calibration_checks.crmsg_BC_pass,
description_msg=calibration_checks.crmsg_Between_description))
sunset.set_calibration_ranges(
_crs_to_update + [
calibration_checks.CalibrationRange(
'pressure', 7000, calibration_checks.crf_LessThan, 'bar',
'Allison et al. (2019) sunset carbon',
fail_msg=crmsg_PressureLess,
pass_msg=calibration_checks.crmsg_LessThan_pass),
calibration_checks.CalibrationRange(
'pressure', 4071, calibration_checks.crf_GreaterThan, 'bar',
'Allison et al. (2019) sunset carbon',
fail_msg=crmsg_PressureGreater,
pass_msg=calibration_checks.crmsg_GreaterThan_pass),
calibration_checks.CalibrationRange(
'sample', None, crf_sunset, None, None,
fail_msg=crmsg_Comp_fail,
pass_msg=crmsg_Comp_pass)] + cr_oxide_list)
# SFVF
_crs_to_update = sfvf.calibration_ranges
cr_oxide_list = []
for ox in sfvfCompRange.keys():
cr_oxide_list.append(
calibration_checks.CalibrationRange(
ox, sfvfCompRange[ox], calibration_checks.crf_Between, 'wt%',
'Allison et al. (2019) sfvf carbon',
fail_msg=calibration_checks.crmsg_BC_fail,
pass_msg=calibration_checks.crmsg_BC_pass,
description_msg=calibration_checks.crmsg_Between_description))
sfvf.set_calibration_ranges(_crs_to_update + [
calibration_checks.CalibrationRange(
'pressure', 7000, calibration_checks.crf_LessThan, 'bar',
'Allison et al. (2019) sfvf carbon',
fail_msg=crmsg_PressureLess,
pass_msg=calibration_checks.crmsg_LessThan_pass),
calibration_checks.CalibrationRange(
'pressure', 4133, calibration_checks.crf_GreaterThan, 'bar',
'Allison et al. (2019) sfvf carbon',
fail_msg=crmsg_PressureGreater,
pass_msg=calibration_checks.crmsg_GreaterThan_pass),
calibration_checks.CalibrationRange(
'sample', None, crf_sfvf, None, None,
fail_msg=crmsg_Comp_fail,
pass_msg=crmsg_Comp_pass)] + cr_oxide_list)
# Erebus
_crs_to_update = erebus.calibration_ranges
cr_oxide_list = []
for ox in erebusCompRange.keys():
cr_oxide_list.append(
calibration_checks.CalibrationRange(
ox, erebusCompRange[ox], calibration_checks.crf_Between, 'wt%',
'Allison et al. (2019) erebus carbon',
fail_msg=calibration_checks.crmsg_BC_fail,
pass_msg=calibration_checks.crmsg_BC_pass,
description_msg=calibration_checks.crmsg_Between_description))
erebus.set_calibration_ranges(_crs_to_update + [
calibration_checks.CalibrationRange(
'pressure', 7000, calibration_checks.crf_LessThan, 'bar',
'Allison et al. (2019) erebus carbon',
fail_msg=crmsg_PressureLess,
pass_msg=calibration_checks.crmsg_LessThan_pass),
calibration_checks.CalibrationRange(
'pressure', 4078, calibration_checks.crf_GreaterThan, 'bar',
'Allison et al. (2019) erebus carbon',
fail_msg=crmsg_PressureGreater,
pass_msg=calibration_checks.crmsg_GreaterThan_pass),
calibration_checks.CalibrationRange(
'sample', None, crf_erebus, None, None,
fail_msg=crmsg_Comp_fail,
pass_msg=crmsg_Comp_pass)] + cr_oxide_list)
_crs_to_update = etna.calibration_ranges
cr_oxide_list = []
for ox in etnaCompRange.keys():
cr_oxide_list.append(
calibration_checks.CalibrationRange(
ox, etnaCompRange[ox], calibration_checks.crf_Between, 'wt%',
'Allison et al. (2019) etna carbon',
fail_msg=calibration_checks.crmsg_BC_fail,
pass_msg=calibration_checks.crmsg_BC_pass,
description_msg=calibration_checks.crmsg_Between_description))
# Etna
etna.set_calibration_ranges(_crs_to_update + [
calibration_checks.CalibrationRange(
'pressure', 7000, calibration_checks.crf_LessThan, 'bar',
'Allison et al. (2019) etna carbon',
fail_msg=crmsg_PressureLess,
pass_msg=calibration_checks.crmsg_LessThan_pass),
calibration_checks.CalibrationRange(
'pressure', 485, calibration_checks.crf_GreaterThan, 'bar',
'Allison et al. (2019) etna carbon',
fail_msg=crmsg_PressureGreater,
pass_msg=calibration_checks.crmsg_GreaterThan_pass),
calibration_checks.CalibrationRange(
'sample', None, crf_etna, None, None,
fail_msg=crmsg_Comp_fail,
pass_msg=crmsg_Comp_pass)] + cr_oxide_list)
# Vesuvius
_crs_to_update = vesuvius.calibration_ranges
cr_oxide_list = []
for ox in vesuviusCompRange.keys():
cr_oxide_list.append(
calibration_checks.CalibrationRange(
ox, vesuviusCompRange[ox], calibration_checks.crf_Between, 'wt%',
'Allison et al. (2019) vesuvius carbon',
fail_msg=calibration_checks.crmsg_BC_fail,
pass_msg=calibration_checks.crmsg_BC_pass,
description_msg=calibration_checks.crmsg_Between_description))
vesuvius.set_calibration_ranges(_crs_to_update + [
calibration_checks.CalibrationRange(
'pressure', 7000, calibration_checks.crf_LessThan, 'bar',
'Allison et al. (2019) vesuvius carbon',
fail_msg=crmsg_PressureLess,
pass_msg=calibration_checks.crmsg_LessThan_pass),
calibration_checks.CalibrationRange(
'pressure', 269, calibration_checks.crf_GreaterThan, 'bar',
'Allison et al. (2019) vesuvius carbon',
fail_msg=crmsg_PressureGreater,
pass_msg=calibration_checks.crmsg_GreaterThan_pass),
calibration_checks.CalibrationRange(
'sample', None, crf_vesuvius, None, None,
fail_msg=crmsg_Comp_fail,
pass_msg=crmsg_Comp_pass)] + cr_oxide_list)
# Stromboli
_crs_to_update = stromboli.calibration_ranges
cr_oxide_list = []
for ox in stromboliCompRange.keys():
cr_oxide_list.append(
calibration_checks.CalibrationRange(
ox, stromboliCompRange[ox], calibration_checks.crf_Between, 'wt%',
'Allison et al. (2019) stromboli carbon',
fail_msg=calibration_checks.crmsg_BC_fail,
pass_msg=calibration_checks.crmsg_BC_pass,
description_msg=calibration_checks.crmsg_Between_description))
stromboli.set_calibration_ranges(_crs_to_update + [
calibration_checks.CalibrationRange(
'pressure', 7000, calibration_checks.crf_LessThan, 'bar',
'Allison et al. (2019) stromboli carbon',
fail_msg=crmsg_PressureLess,
pass_msg=calibration_checks.crmsg_LessThan_pass),
calibration_checks.CalibrationRange(
'pressure', 524, calibration_checks.crf_GreaterThan, 'bar',
'Allison et al. (2019) stromboli carbon',
fail_msg=crmsg_PressureGreater,
pass_msg=calibration_checks.crmsg_GreaterThan_pass),
calibration_checks.CalibrationRange(
'sample', None, crf_stromboli, None, None,
fail_msg=crmsg_Comp_fail,
pass_msg=crmsg_Comp_pass)] + cr_oxide_list)
| 41.890772
| 99
| 0.603983
|
4a108c198ed6bdf236c8b188c9d0e0f792650520
| 3,996
|
py
|
Python
|
src/inline_requests/generator.py
|
starrify/scrapy-inline-requests
|
2cbbb66e6e97260b7e126aa9d8ecde1393a554c9
|
[
"MIT"
] | 58
|
2017-07-31T06:17:56.000Z
|
2021-12-13T07:10:18.000Z
|
src/inline_requests/generator.py
|
darkrho/scrapy-inline-requests
|
2cbbb66e6e97260b7e126aa9d8ecde1393a554c9
|
[
"MIT"
] | 11
|
2016-04-25T11:17:14.000Z
|
2017-02-20T15:50:59.000Z
|
src/inline_requests/generator.py
|
darkrho/scrapy-inline-requests
|
2cbbb66e6e97260b7e126aa9d8ecde1393a554c9
|
[
"MIT"
] | 8
|
2017-09-01T19:32:51.000Z
|
2021-02-05T14:35:49.000Z
|
import logging
import warnings
from functools import partial
from types import GeneratorType
from scrapy.http import Request
from scrapy.utils.spider import iterate_spider_output
logger = logging.getLogger(__name__)
class RequestGenerator(object):
"""This is the core class that wraps the callback and outputs the requests
one by one.
"""
def __init__(self, callback, **kwargs):
"""Initialize RequestGenerator.
Parameters
----------
callback : callable
Callable callback (spider method).
**kwargs :
Extra callback keyword arguments.
"""
self.callback = callback
self.kwargs = kwargs
def __call__(self, response):
"""Main response entry point.
This method calls the callback and wraps the returned generator.
"""
output = iterate_spider_output(self.callback(response=response, **self.kwargs))
if not isinstance(output, GeneratorType):
raise ValueError("Callback must return a generator type")
return self._unwindGenerator(output)
def _unwindGenerator(self, generator, _prev=None):
"""Unwind (resume) generator."""
while True:
if _prev:
ret, _prev = _prev, None
else:
try:
ret = next(generator)
except StopIteration:
break
if isinstance(ret, Request):
if ret.callback:
warnings.warn("Got a request with callback set, bypassing "
"the generator wrapper. Generator may not "
"be able to resume. %s" % ret)
elif ret.errback:
# By Scrapy defaults, a request without callback defaults to
# self.parse spider method.
warnings.warn("Got a request with errback set, bypassing "
"the generator wrapper. Generator may not "
"be able to resume. %s" % ret)
else:
yield self._wrapRequest(ret, generator)
return
# A request with callbacks, item or None object.
yield ret
def _wrapRequest(self, request, generator):
# Allowing existing callback or errbacks could lead to undesired
# results. To ensure the generator is **always** properly exhausted we
# must handle both callback and errback in order to send back the
# result to the generator.
if request.callback is not None:
raise ValueError("Request with existing callback is not supported")
if request.errback is not None:
raise ValueError("Request with existing errback is not supported")
request.callback = partial(self._handleSuccess, generator=generator)
request.errback = partial(self._handleFailure, generator=generator)
return request
def _cleanRequest(self, request):
request.callback = None
request.errback = None
def _handleSuccess(self, response, generator):
if response.request:
self._cleanRequest(response.request)
try:
ret = generator.send(response)
except StopIteration:
return
return self._unwindGenerator(generator, ret)
def _handleFailure(self, failure, generator):
# Look for the request instance in the exception value.
if hasattr(failure.value, 'request'):
self._cleanRequest(failure.value.request)
elif hasattr(failure.value, 'response'):
if hasattr(failure.value.response, 'request'):
self._cleanRequest(failure.value.response.request)
try:
ret = failure.throwExceptionIntoGenerator(generator)
except StopIteration:
return
return self._unwindGenerator(generator, ret)
| 36
| 87
| 0.59985
|
4a108d28a9a992adff12abfb55b8e2a4cd439f01
| 39,841
|
py
|
Python
|
FTIR_to_electrolyte_composition/management/commands/Constant_run.py
|
Samuel-Buteau/Electrolyte_Analysis_FTIR
|
26c844547718b33fe317c3efa392712d8d119c6b
|
[
"MIT"
] | 9
|
2019-11-15T06:39:50.000Z
|
2022-01-19T14:18:56.000Z
|
FTIR_to_electrolyte_composition/management/commands/Constant_run.py
|
Samuel-Buteau/Electrolyte_Analysis_FTIR
|
26c844547718b33fe317c3efa392712d8d119c6b
|
[
"MIT"
] | 12
|
2020-02-12T01:07:23.000Z
|
2022-02-10T00:14:24.000Z
|
FTIR_to_electrolyte_composition/management/commands/Constant_run.py
|
Samuel-Buteau/Electrolyte_Analysis_FTIR
|
26c844547718b33fe317c3efa392712d8d119c6b
|
[
"MIT"
] | null | null | null |
from django.core.management.base import BaseCommand
import numpy
from FTIR_to_electrolyte_composition.models import FTIRSpectrum, FTIRSample, HUMAN,ROBOT,CELL
import matplotlib.pyplot as plt
import os
import tensorflow as tf
import random
import contextlib
import pickle
import math
wanted_wavenumbers = []
from mpl_toolkits.mplot3d import Axes3D
class LooseBeer(object):
def __init__(self, trainable, num_concentrations, num_samples):
self.num_concentrations = num_concentrations
self.num_samples = num_samples
self.trainable = trainable
self.dropout = tf.placeholder(dtype=tf.float32)
self.prediction_coeff = tf.placeholder(dtype=tf.float32)
self.positivity_coeff = tf.placeholder(dtype=tf.float32)
self.normalization_coeff = tf.placeholder(dtype=tf.float32)
self.small_x_coeff = tf.placeholder(dtype=tf.float32)
# the log-magnitude of X
self.x = tf.get_variable(
name='x',
shape=[1],
dtype=tf.float32,
initializer=tf.initializers.constant(value=[1], dtype=tf.float32),
trainable=trainable,
)
self.X_0 = tf.get_variable(
name='X_0',
shape=[num_concentrations, num_samples],
dtype= tf.float32,
initializer=tf.initializers.orthogonal(),
trainable=trainable,
)
self.A_0 = tf.get_variable(
name='A_0',
shape=[num_samples, num_concentrations],
dtype=tf.float32,
initializer=tf.initializers.orthogonal(),
trainable=trainable,
)
self.drop = tf.layers.Dropout(name='dropout_layer', rate=self.dropout)
def build_forward(self, input_spectra):
epsilon = 1e-10
dropped_input_spectra = self.drop(input_spectra)
F = tf.exp(self.x)*tf.einsum('ij,bj->bi',self.X_0, dropped_input_spectra)
F_relu = tf.nn.relu(F)
reconstructed_spectra = tf.einsum( 'ji,bi->bj',
tf.exp(self.A_0), F_relu)
predicted_mass_ratios = F_relu/ tf.reduce_sum(F_relu, axis=1, keepdims=True)
return {'F':F, 'reconstructed_spectra':reconstructed_spectra, 'predicted_mass_ratios':predicted_mass_ratios}
def optimize(self, input_spectra, input_mass_ratios, input_z_supervised,
learning_rate, global_norm_clip,
logdir):
res = self.build_forward(input_spectra)
reconstruction_loss = tf.losses.mean_squared_error(labels=input_spectra, predictions=res['reconstructed_spectra'])
prediction_loss = tf.losses.mean_squared_error(labels=input_mass_ratios, predictions=res['predicted_mass_ratios'], weights=tf.expand_dims(input_z_supervised, axis=1))
positivity_loss = tf.reduce_mean(tf.nn.relu(-res['F']))
normalization_loss = (tf.square(tf.reduce_mean(tf.exp(2.*self.A_0)) - 1.) +
tf.square(tf.reduce_mean(tf.square(self.X_0)) - 1.))
# We try to make x small while keeping the output big. This should force x to focus on large signals in S.
small_x_loss = tf.reduce_mean(tf.exp(self.x)/(1e-8 + tf.reduce_sum(res['F'], axis=1)))
loss = (reconstruction_loss +
self.prediction_coeff * prediction_loss +
self.positivity_coeff * positivity_loss +
self.normalization_coeff * normalization_loss +
self.small_x_coeff * small_x_loss
)
if self.trainable:
with tf.name_scope('summaries'):
tf.summary.scalar('loss', loss)
tf.summary.scalar('sqrt prediction_loss',tf.sqrt(prediction_loss))
tf.summary.scalar('positivity_loss', positivity_loss)
tf.summary.scalar('normalization_loss', normalization_loss)
tf.summary.scalar('small x loss', small_x_loss)
tf.summary.scalar('sqrt reconstruction_loss', tf.sqrt(reconstruction_loss))
self.merger = tf.summary.merge_all()
self.train_writer = tf.summary.FileWriter(os.path.join(logdir, 'train'))
self.test_writer = tf.summary.FileWriter(os.path.join(logdir, 'test'))
"""
we clip the gradient by global norm, currently the default is 10.
-- Samuel B., 2018-09-14
"""
optimizer = tf.train.AdamOptimizer(learning_rate)
tvs = tf.trainable_variables()
accum_vars = [tf.Variable(tf.zeros_like(tv.initialized_value()), trainable=False) for tv in tvs]
zero_ops = [tv.assign(tf.zeros_like(tv)) for tv in accum_vars]
update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
with tf.control_dependencies(update_ops):
gvs = optimizer.compute_gradients(loss, tvs)
test_ops = tf.reduce_any(tf.concat([[tf.reduce_any(tf.is_nan(gv[0]), keepdims=False)] for i, gv in enumerate(gvs)],axis=0))
accum_ops = tf.cond(test_ops, false_fn=lambda:[accum_vars[i].assign_add(gv[0]) for i, gv in enumerate(gvs)], true_fn=lambda:[accum_vars[i].assign_add(tf.zeros_like(gv[0])) for i, gv in enumerate(gvs)])
with tf.control_dependencies(accum_ops):
gradients, _ = tf.clip_by_global_norm(accum_vars, global_norm_clip)
train_step = optimizer.apply_gradients([(gradients[i], gv[1]) for i, gv in enumerate(gvs)])
return loss, {'zero_ops':zero_ops, 'accum_ops':accum_ops,
'train_step':train_step, 'test_ops':test_ops,
'reconstructed_spectra':res['reconstructed_spectra'],
'predicted_mass_ratios': res['predicted_mass_ratios'],
'input_spectra':input_spectra,
'input_mass_ratios':input_mass_ratios
}
else:
return loss, {}
@contextlib.contextmanager
def initialize_session(logdir, seed=None):
"""Create a session and saver initialized from a checkpoint if found."""
if not seed ==0:
numpy.random.seed(seed=seed)
config = tf.ConfigProto(
)
# config.gpu_options.allow_growth = True
logdir = os.path.expanduser(logdir)
checkpoint = tf.train.latest_checkpoint(logdir)
saver = tf.train.Saver()
with tf.Session(config=config) as sess:
if checkpoint:
print('Load checkpoint {}.'.format(checkpoint))
saver.restore(sess, checkpoint)
else:
print('Initialize new model.')
os.makedirs(logdir, exist_ok=True)
sess.run(tf.global_variables_initializer())
yield sess, saver
import copy
class GetFresh:
"""
Get fresh numbers, either
- from 0 to n_samples-1 or
- from list_of_indecies
in a random order without repetition
However, once we have exausted all the numbers, we reset.
- Samuel Buteau, October 2018
"""
def __init__(self, n_samples=None, list_of_indecies=None):
if not n_samples is None:
self.GetFresh_list = numpy.arange(n_samples, dtype=numpy.int32)
self.get_fresh_count = n_samples
elif not list_of_indecies is None:
self.GetFresh_list = numpy.array(copy.deepcopy(list_of_indecies))
self.get_fresh_count = len(self.GetFresh_list)
else:
raise Exception('Invalid Input')
numpy.random.shuffle(self.GetFresh_list)
self.get_fresh_pos = 0
def get(self, n):
"""
will return a list of n random numbers in self.GetFresh_list
- Samuel Buteau, October 2018
"""
if n >= self.get_fresh_count:
return numpy.concatenate((self.get(int(n/2)),self.get(n- int(n/2))))
reshuffle_flag = False
n_immediate_fulfill = min(n, self.get_fresh_count - self.get_fresh_pos)
batch_of_indecies = numpy.empty([n], dtype=numpy.int32)
for i in range(0, n_immediate_fulfill):
batch_of_indecies[i] = self.GetFresh_list[i + self.get_fresh_pos]
self.get_fresh_pos += n_immediate_fulfill
if self.get_fresh_pos >= self.get_fresh_count:
self.get_fresh_pos -= self.get_fresh_count
reshuffle_flag = True
# Now, the orders that needed to be satisfied are satisfied.
n_delayed_fulfill = max(0, n - n_immediate_fulfill)
if reshuffle_flag:
numpy.random.shuffle(self.GetFresh_list)
if n_delayed_fulfill > 0:
for i in range(0, n_delayed_fulfill):
batch_of_indecies[i + n_immediate_fulfill] = self.GetFresh_list[i]
self.get_fresh_pos = n_delayed_fulfill
return batch_of_indecies
def train_on_all_data(args):
# Create supervised dataset
supervised_dataset = {'s':[],'m':[], 'z':[]}
num_supervised = 0
ec_ratios =[]
LIPF6_ratios = []
for spec in FTIRSpectrum.objects.filter(supervised=True):
supervised_dataset['z'].append(1.)
supervised_dataset['m'].append([spec.LIPF6_mass_ratio, spec.EC_mass_ratio, spec.EMC_mass_ratio,
spec.DMC_mass_ratio, spec.DEC_mass_ratio])
ec_ratios.append(spec.EC_mass_ratio/(spec.EC_mass_ratio+spec.EMC_mass_ratio+spec.DMC_mass_ratio+spec.DEC_mass_ratio))
LIPF6_ratios.append(spec.LIPF6_mass_ratio / (spec.EC_mass_ratio + spec.EMC_mass_ratio + spec.DMC_mass_ratio + spec.DEC_mass_ratio))
supervised_dataset['s'].append(
[samp.absorbance for samp in FTIRSample.objects.filter(spectrum=spec).order_by('index')])
num_supervised += 1
supervised_dataset['s']=numpy.array(supervised_dataset['s'])
supervised_dataset['m']=numpy.array(supervised_dataset['m'])
supervised_dataset['z']=numpy.array(supervised_dataset['z'])
unsupervised_dataset = {'s': [], 'm': [], 'z': []}
num_unsupervised = 0
for spec in FTIRSpectrum.objects.filter(supervised=False):
unsupervised_dataset['z'].append(0.)
unsupervised_dataset['m'].append(5*[0.])
unsupervised_dataset['s'].append(
[samp.absorbance for samp in FTIRSample.objects.filter(spectrum=spec).order_by('index')])
num_unsupervised += 1
unsupervised_dataset['s'] = numpy.array(unsupervised_dataset['s'])
unsupervised_dataset['m'] = numpy.array(unsupervised_dataset['m'])
unsupervised_dataset['z'] = numpy.array(unsupervised_dataset['z'])
with open(os.path.join('.',args['datasets_file']), 'wb') as f:
pickle.dump({'supervised_dataset':supervised_dataset,
'unsupervised_dataset':unsupervised_dataset,
'num_supervised':num_supervised,
'num_unsupervised': num_unsupervised,
}, f, protocol=pickle.HIGHEST_PROTOCOL)
supervised_fresh = GetFresh(n_samples=num_supervised)
unsupervised_fresh = GetFresh(n_samples=num_unsupervised)
if not args['seed'] ==0:
random.seed(a=args['seed'])
num_concentrations= 5
num_samples = 1536
batch_size = tf.placeholder(dtype=tf.int32)
learning_rate = tf.placeholder(dtype=tf.float32)
pristine_spectra = tf.placeholder(tf.float32, [None, num_samples])
pos_spectra = tf.nn.relu(tf.expand_dims(pristine_spectra, axis=2))
average_absorbance = tf.reduce_mean(pos_spectra, axis=[0,1,2])
noised_spectra = tf.nn.relu(
pos_spectra +
tf.random.normal(
shape=[batch_size, num_samples, 1],
mean=0.,
stddev=average_absorbance * args['noise_level']))
num_filter_d = tf.random.uniform(shape=[1], minval=2, maxval=5, dtype=tf.int32)[0]
temperature = 1e-8 + tf.exp(tf.random.uniform(shape=[1], minval=-2., maxval=args['largest_temp_exp'], dtype=tf.float32))
filter1 = tf.reshape(tf.nn.softmax(
-tf.abs(tf.to_float(tf.range(
start=-num_filter_d,
limit=num_filter_d + 1,
dtype=tf.int32))) / temperature),
[2 * num_filter_d + 1, 1, 1])
augmented_spectra = tf.nn.conv1d(noised_spectra, filter1, stride=1, padding="SAME")[:,:,0]
mass_ratios = tf.placeholder(tf.float32, [None, num_concentrations])
z_supervised = tf.placeholder(tf.float32, [None])
model = LooseBeer(trainable=True, num_concentrations=num_concentrations, num_samples=num_samples)
loss, extra = \
model.optimize(
input_spectra=augmented_spectra,
input_mass_ratios=mass_ratios,
input_z_supervised= z_supervised,
learning_rate=learning_rate,
global_norm_clip=args['global_norm_clip'],
logdir=args['logdir']
)
step = tf.train.get_or_create_global_step()
increment_step = step.assign_add(1)
with initialize_session(args['logdir'], seed=args['seed']) as (sess, saver):
while True:
current_step = sess.run(step)
if current_step >= args['total_steps']:
print('Training complete.')
break
sess.run(extra['zero_ops'])
summaries = []
total_loss = 0.0
for count in range(args['virtual_batches']):
prob_supervised = args['prob_supervised']
choose_supervised = random.choices([True, False], weights=[prob_supervised, 1.-prob_supervised])[0]
if choose_supervised:
# supervised
indecies = supervised_fresh.get(args['batch_size'])
s = supervised_dataset['s'][indecies]
m = supervised_dataset['m'][indecies]
z = supervised_dataset['z'][indecies]
else:
# supervised
indecies = unsupervised_fresh.get(args['batch_size'])
s = unsupervised_dataset['s'][indecies]
m = unsupervised_dataset['m'][indecies]
z = unsupervised_dataset['z'][indecies]
if count < args['virtual_batches'] - 1:
summary, loss_value, _, test = \
sess.run([model.merger, loss, extra['accum_ops'], extra['test_ops']],
feed_dict={batch_size: args['batch_size'],
model.dropout: args['dropout'],
pristine_spectra: s,
mass_ratios: m,
z_supervised: z,
learning_rate: args['learning_rate'],
model.prediction_coeff: args['prediction_coeff'],
model.positivity_coeff: args['positivity_coeff'],
model.normalization_coeff: args['normalization_coeff'],
model.small_x_coeff: args['small_x_coeff'],
})
else:
summary, loss_value, _, test, step_value, s_out, m_out = \
sess.run([model.merger, loss, extra['train_step'], extra['test_ops'], increment_step,
extra['reconstructed_spectra'], extra['predicted_mass_ratios']],
feed_dict={batch_size: args['batch_size'],
model.dropout: args['dropout'],
pristine_spectra: s,
mass_ratios: m,
z_supervised: z,
learning_rate: args['learning_rate'],
model.prediction_coeff: args['prediction_coeff'],
model.positivity_coeff: args['positivity_coeff'],
model.normalization_coeff: args['normalization_coeff'],
model.small_x_coeff: args['small_x_coeff'],
})
if args['visuals']:
for i in range(args['batch_size']):
plt.scatter(range(num_samples), s[i,:])
plt.plot(range(num_samples), s_out[i,:])
plt.show()
plt.scatter(range(num_concentrations), m[i,:], c='r')
plt.scatter(range(num_concentrations), m_out[i,:], c='b')
plt.show()
summaries.append(summary)
total_loss += loss_value
total_loss /= float(args['virtual_batches'])
if not math.isfinite(total_loss):
print('was not finite')
# sess.run(tf.global_variables_initializer())
# sess.run(zero_ops)
# print('restarted')
# continue
if step_value % args['log_every'] == 0:
print(
'Step {} loss {}.'.format(step_value, total_loss))
for summary in summaries:
model.train_writer.add_summary(summary, step_value)
if step_value % args['checkpoint_every'] == 0:
print('Saving checkpoint.')
saver.save(sess, os.path.join(args['logdir'], 'model.ckpt'), step_value)
def cross_validation(args):
id = random.randint(a=0, b=100000)
# Create supervised dataset
supervised_dataset = {'s': [], 'm': [], 'z': []}
num_supervised = 0
ec_ratios = []
LIPF6_ratios = []
for spec in FTIRSpectrum.objects.filter(supervised=True):
supervised_dataset['z'].append(1.)
supervised_dataset['m'].append([spec.LIPF6_mass_ratio, spec.EC_mass_ratio, spec.EMC_mass_ratio,
spec.DMC_mass_ratio, spec.DEC_mass_ratio])
ec_ratios.append(spec.EC_mass_ratio / (
spec.EC_mass_ratio + spec.EMC_mass_ratio + spec.DMC_mass_ratio + spec.DEC_mass_ratio))
LIPF6_ratios.append(spec.LIPF6_mass_ratio / (
spec.EC_mass_ratio + spec.EMC_mass_ratio + spec.DMC_mass_ratio + spec.DEC_mass_ratio))
supervised_dataset['s'].append(
[samp.absorbance for samp in FTIRSample.objects.filter(spectrum=spec).order_by('index')])
num_supervised += 1
supervised_dataset['s'] = numpy.array(supervised_dataset['s'])
supervised_dataset['m'] = numpy.array(supervised_dataset['m'])
supervised_dataset['z'] = numpy.array(supervised_dataset['z'])
unsupervised_dataset = {'s': [], 'm': [], 'z': []}
num_unsupervised = 0
for spec in FTIRSpectrum.objects.filter(supervised=False):
unsupervised_dataset['z'].append(0.)
unsupervised_dataset['m'].append(5 * [0.])
unsupervised_dataset['s'].append(
[samp.absorbance for samp in FTIRSample.objects.filter(spectrum=spec).order_by('index')])
num_unsupervised += 1
unsupervised_dataset['s'] = numpy.array(unsupervised_dataset['s'])
unsupervised_dataset['m'] = numpy.array(unsupervised_dataset['m'])
unsupervised_dataset['z'] = numpy.array(unsupervised_dataset['z'])
clusters = []
for i in range(num_supervised):
ratio = supervised_dataset['m'][i, :]
found = False
for j in range(len(clusters)):
reference = clusters[j][0]
if numpy.mean(numpy.abs(reference - ratio)) < 0.001:
clusters[j][1].append(i)
found = True
break
if not found:
clusters.append((ratio, [i]))
num_supervised = len(clusters)
supervised_list = list(range(num_supervised))
random.shuffle(supervised_list)
unsupervised_list = list(range(num_unsupervised))
random.shuffle(unsupervised_list)
test_supervised_n = int(num_supervised * args['test_ratios'])
test_unsupervised_n = int(num_unsupervised * args['test_ratios'])
supervised_train_list = []
for i in supervised_list[test_supervised_n:]:
supervised_train_list += clusters[i][1]
supervised_test_list = []
for i in supervised_list[:test_supervised_n]:
supervised_test_list += clusters[i][1]
supervised_fresh_train = GetFresh(list_of_indecies=numpy.array(supervised_train_list))
supervised_fresh_test = GetFresh(list_of_indecies=numpy.array(supervised_test_list))
unsupervised_fresh_train = GetFresh(list_of_indecies=unsupervised_list[test_unsupervised_n:])
unsupervised_fresh_test = GetFresh(list_of_indecies=unsupervised_list[:test_unsupervised_n])
if not args['seed'] ==0:
random.seed(a=args['seed'])
num_concentrations = 5
num_samples = 1536
batch_size = tf.placeholder(dtype=tf.int32)
learning_rate = tf.placeholder(dtype=tf.float32)
pristine_spectra = tf.placeholder(tf.float32, [None, num_samples])
pos_spectra = tf.nn.relu(tf.expand_dims(pristine_spectra, axis=2))
average_absorbance = tf.reduce_mean(pos_spectra, axis=[0, 1, 2])
noised_spectra = tf.nn.relu(
pos_spectra +
tf.random.normal(
shape=[batch_size, num_samples, 1],
mean=0.,
stddev=average_absorbance * args['noise_level']))
num_filter_d = tf.random.uniform(shape=[1], minval=2, maxval=5, dtype=tf.int32)[0]
temperature = 1e-8 + tf.exp(
tf.random.uniform(shape=[1], minval=-2., maxval=args['largest_temp_exp'], dtype=tf.float32))
filter1 = tf.reshape(tf.nn.softmax(
-tf.abs(tf.to_float(tf.range(
start=-num_filter_d,
limit=num_filter_d + 1,
dtype=tf.int32))) / temperature),
[2 * num_filter_d + 1, 1, 1])
augmented_spectra = tf.nn.conv1d(noised_spectra, filter1, stride=1, padding="SAME")[:, :, 0]
mass_ratios = tf.placeholder(tf.float32, [None, num_concentrations])
z_supervised = tf.placeholder(tf.float32, [None])
model = LooseBeer(trainable=True, num_concentrations=num_concentrations, num_samples=num_samples)
loss, extra = \
model.optimize(
input_spectra=augmented_spectra,
input_mass_ratios=mass_ratios,
input_z_supervised=z_supervised,
learning_rate=learning_rate,
global_norm_clip=args['global_norm_clip'],
logdir=args['logdir']
)
res = model.build_forward(
input_spectra=tf.nn.relu(pristine_spectra)
)
step = tf.train.get_or_create_global_step()
increment_step = step.assign_add(1)
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
while True:
current_step = sess.run(step)
if current_step >= args['total_steps'] or current_step % args['log_every'] == 0:
if current_step >= args['total_steps']:
print('Training complete.')
indecies = supervised_fresh_test.GetFresh_list
s = supervised_dataset['s'][indecies]
m = supervised_dataset['m'][indecies]
z = supervised_dataset['z'][indecies]
s_out, m_out = \
sess.run([res['reconstructed_spectra'], res['predicted_mass_ratios']],
feed_dict={batch_size: len(indecies),
model.dropout: 0.0,
pristine_spectra: s,
mass_ratios: m,
z_supervised: z,
})
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
ax.view_init(elev=0.3, azim=0)
ax.set_zscale('log')
ax.set_zlim(0.01, 0.25)
ax.scatter(m[:,0], m[:,1], numpy.sqrt(numpy.mean((m_out - m)**2, axis=1)),c='r')
ax.scatter(m[:,0], m[:,1], numpy.sqrt(numpy.mean((s_out - s)**2, axis=1)),c='b')
fig.savefig('Test_perf_test_percent_{}_id_{}_step_{}.png'.format(int(100*args['test_ratios']),id, current_step)) # save the figure to file
plt.close(fig)
with open(os.path.join(args['cross_validation_dir'],'Test_data_test_percent_{}_id_{}_step_{}.file'.format(int(100 * args['test_ratios']), id, current_step)), 'wb') as f:
pickle.dump({'m':m, 'm_out':m_out,'s':s, 's_out':s_out}, f,pickle.HIGHEST_PROTOCOL)
if False:#current_step >= args['total_steps']:
for i in range(len(indecies)):
plt.scatter(range(num_samples), s[i, :])
plt.plot(range(num_samples), s_out[i, :])
plt.show()
plt.scatter(range(num_concentrations), m[i, :], c='r')
plt.scatter(range(num_concentrations), m_out[i, :], c='b')
plt.show()
'''
indecies = unsupervised_fresh_test.GetFresh_list
s = unsupervised_dataset['s'][indecies]
m = unsupervised_dataset['m'][indecies]
z = unsupervised_dataset['z'][indecies]
s_out, m_out = \
sess.run([res['reconstructed_spectra'], res['predicted_mass_ratios']],
feed_dict={batch_size: len(indecies),
model.dropout: 0.0,
pristine_spectra: s,
mass_ratios: m,
z_supervised: z,
})
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
ax.scatter(m_out[:,0], m_out[:,1], numpy.sqrt(numpy.mean((s_out - s)**2, axis=1)),c='k')
plt.show()
'''
if current_step >= args['total_steps']:
break
sess.run(extra['zero_ops'])
summaries = []
total_loss = 0.0
for count in range(args['virtual_batches']):
prob_supervised = args['prob_supervised']
choose_supervised = random.choices([True, False], weights=[prob_supervised, 1. - prob_supervised])[0]
if choose_supervised:
# supervised
indecies = supervised_fresh_train.get(args['batch_size'])
s = supervised_dataset['s'][indecies]
m = supervised_dataset['m'][indecies]
z = supervised_dataset['z'][indecies]
else:
# supervised
indecies = unsupervised_fresh_train.get(args['batch_size'])
s = unsupervised_dataset['s'][indecies]
m = unsupervised_dataset['m'][indecies]
z = unsupervised_dataset['z'][indecies]
if count < args['virtual_batches'] - 1:
loss_value, _, test = \
sess.run([ loss, extra['accum_ops'], extra['test_ops']],
feed_dict={batch_size: args['batch_size'],
model.dropout: args['dropout'],
pristine_spectra: s,
mass_ratios: m,
z_supervised: z,
learning_rate: args['learning_rate'],
model.prediction_coeff: args['prediction_coeff'],
model.positivity_coeff: args['positivity_coeff'],
model.normalization_coeff: args['normalization_coeff'],
model.small_x_coeff: args['small_x_coeff'],
})
else:
loss_value, _, test, step_value, s_out, m_out = \
sess.run([ loss, extra['train_step'], extra['test_ops'], increment_step,
extra['reconstructed_spectra'], extra['predicted_mass_ratios']],
feed_dict={batch_size: args['batch_size'],
model.dropout: args['dropout'],
pristine_spectra: s,
mass_ratios: m,
z_supervised: z,
learning_rate: args['learning_rate'],
model.prediction_coeff: args['prediction_coeff'],
model.positivity_coeff: args['positivity_coeff'],
model.normalization_coeff: args['normalization_coeff'],
model.small_x_coeff: args['small_x_coeff'],
})
if args['visuals']:
for i in range(args['batch_size']):
plt.scatter(range(num_samples), s[i, :])
plt.plot(range(num_samples), s_out[i, :])
plt.show()
plt.scatter(range(num_concentrations), m[i, :], c='r')
plt.scatter(range(num_concentrations), m_out[i, :], c='b')
plt.show()
total_loss += loss_value
total_loss /= float(args['virtual_batches'])
if not math.isfinite(total_loss):
print('was not finite')
# sess.run(tf.global_variables_initializer())
# sess.run(zero_ops)
# print('restarted')
# continue
if step_value % args['log_every'] == 0:
print(
'Step {} loss {}.'.format(step_value, total_loss))
import re
def paper_figures(args):
all_path_filenames = []
for root, dirs, filenames in os.walk(os.path.join('.', args['cross_validation_dir'])):
for file in filenames:
if file.endswith('.file'):
all_path_filenames.append({'root':root, 'file':file})
data_dict = {}
for file in all_path_filenames:
matchObj = re.match(r'Test_data_test_percent_(\d{1,})_id_(\d{1,})_step_(\d{1,})\.file', file['file'])
if matchObj:
percent = int(matchObj.group(1))
step = int(matchObj.group(3))
with open(os.path.join(file['root'], file['file']),
'rb') as f:
dat = pickle.load(f)
k = (percent, step)
if step == 0:
continue
if not k in data_dict.keys():
data_dict[k] = []
data_dict[k].append(dat)
data_40_percent = {}
data_12000_steps = {}
data_40_percent_12000_steps = []
for k in data_dict.keys():
percent, step = k
if percent == 10:
if not step in data_40_percent.keys():
data_40_percent[step] = []
data_40_percent[step] += data_dict[k]
if step == 20000:
if not percent in data_12000_steps.keys():
data_12000_steps[percent] = []
data_12000_steps[percent] += data_dict[k]
if step == 20000 and percent == 10:
data_40_percent_12000_steps += data_dict[k]
# first, we compute the mean prediction error (for each component)
# and mean reconstruction error, and plot them in 2D for all steps and percent.
data_mean = []
for k in data_dict.keys():
dat = data_dict[k]
percent, step = k
mean_pred_error = numpy.mean(numpy.array([numpy.mean(numpy.abs(d['m'] - d['m_out'])) for d in dat]))
mean_reconstruction_error = numpy.mean(numpy.array([numpy.mean(numpy.abs(d['s'] - d['s_out'])) for d in dat]))
data_mean.append((percent, step, mean_pred_error, mean_reconstruction_error))
#TODO: plot
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
ax.view_init(elev=0.3, azim=0)
ax.set_zlim(0.005,.03)
ax.scatter(numpy.array([d[0] for d in data_mean]),numpy.array([d[1] for d in data_mean]),numpy.array([d[3] for d in data_mean]),c='b',
label='Reconstruction Error (abu)')
ax.scatter(numpy.array([d[0] for d in data_mean]), numpy.array([d[1] for d in data_mean]),
numpy.array([d[2] for d in data_mean]),c='k',
label='Average Prediction Error (kg/kg)')
plt.legend()
ax.set_xlabel('Held-out set percentage (%)')
ax.set_ylabel('Training steps')
plt.yticks(numpy.array(range(0,30000,10000)))
ax.set_zlabel('Error')
ax.set_zticks(1./1000.*numpy.array(range(10, 30, 5)))
plt.show()
#fig.savefig('Test_perf_test_percent_{}_id_{}_step_{}.png'.format(int(100*args['test_ratios']),id, current_step)) # save the figure to file
#plt.close(fig)
data_mean = []
for percent in data_12000_steps.keys():
dat = data_12000_steps[percent]
mean_pred_errors = numpy.sort(numpy.concatenate([numpy.mean(numpy.abs(d['m'] - d['m_out']), axis=1) for d in dat]))
mean_reconstruction_errors = numpy.sort(numpy.concatenate([numpy.mean(numpy.abs(d['s'] - d['s_out']), axis=1) for d in dat]))
data_mean.append((percent, mean_pred_errors, mean_reconstruction_errors, numpy.array([100.*(1.- (i/len(mean_pred_errors))) for i in range(len(mean_pred_errors))])))
fig = plt.figure()
ax = fig.add_subplot(1, 2, 1)
for percent, mean_pred_errors, mean_reconstruction_errors, percentiles in reversed(data_mean):
ax.plot(percentiles, mean_reconstruction_errors,
c='{:1.3f}'.format(1. - (percent / 100.)),
label='{}% held-out'.format(percent))
ax.legend()
ax.set_xlabel('Percentile over 24 trials (%)')
ax.set_ylabel('Average Reconstruction Error (abu)')
ax = fig.add_subplot(1, 2, 2)
for percent, mean_pred_errors, mean_reconstruction_errors, percentiles in reversed(data_mean):
ax.plot(percentiles, mean_pred_errors,
c='{:1.3f}'.format(1. - (percent / 100.)),
label='{}% held-out'.format(percent))
ax.set_xlabel('Percentile over 24 trials (%)')
ax.set_ylabel('Average Prediction Error (kg/kg)')
ax.set_ylim(0.002, 0.1)
plt.show()
data_mean = {}
labels = ['LiPF6', 'EC', 'EMC', 'DMC', 'DEC']
colors = {'LiPF6':'k', 'EC':'r', 'EMC':'g', 'DMC':'b', 'DEC':'c'}
dat = data_40_percent_12000_steps
for i in range(5):
pred = numpy.concatenate([ d['m_out'][:,i] for d in dat])
true = numpy.concatenate([d['m'][:, i] for d in dat])
data_mean[labels[i]]= (pred,true)
fig = plt.figure()
ax = fig.add_subplot(1, 3, 1)
for k in ['LiPF6']:
pred,true = data_mean[k]
ax.plot(true, true,
c=colors[k])
ax.scatter(true, pred,
c=colors[k],
label=k)
ax.set_xlabel('Actual Mass Ratio (kg/kg)')
ax.set_ylabel('Predicted Mass Ratio (kg/kg)')
ax.legend()
ax = fig.add_subplot(1, 3, 2)
for k in ['EC']:
pred, true = data_mean[k]
ax.plot(true, true,
c=colors[k])
ax.scatter(true, pred,
c=colors[k],
label=k)
ax.set_xlabel('Actual Mass Ratio (kg/kg)')
ax.set_ylabel('Predicted Mass Ratio (kg/kg)')
ax.legend()
ax = fig.add_subplot(1, 3, 3)
for k in ['EMC', 'DMC','DEC']:
pred, true = data_mean[k]
ax.plot(true, true,
c=colors[k])
ax.scatter(true, pred,
c=colors[k],
label=k)
ax.set_xlabel('Actual Mass Ratio (kg/kg)')
ax.set_ylabel('Predicted Mass Ratio (kg/kg)')
ax.legend()
plt.show()
dat = data_40_percent_12000_steps
for spec in FTIRSpectrum.objects.filter(supervised=True):
wanted_wavenumbers= numpy.array([samp.wavenumber for samp in FTIRSample.objects.filter(spectrum=spec).order_by('index')])
break
pred_s = numpy.concatenate([d['s_out'] for d in dat], axis=0)
true_s = numpy.concatenate([d['s'] for d in dat], axis=0)
num = len( pred_s)
for _ in range(5):
fig = plt.figure()
colors= ['r', 'g', 'b']
for j in range(1):
ax = fig.add_subplot(1, 1, j+1)
for i in range(3):
index = random.randint(0,num)
print(wanted_wavenumbers[:len(true_s[index,:])],true_s[index,:])
ax.scatter(wanted_wavenumbers[:len(true_s[index,:])],true_s[index,:] ,
c=colors[i])
ax.plot(wanted_wavenumbers[:len(true_s[index,:])],pred_s[index,:] ,
c=colors[i])
ax.set_xlabel('Wavenumber')
ax.set_ylabel('Absorbance (abu)')
plt.show()
class Command(BaseCommand):
def add_arguments(self, parser):
# Positional arguments
#parser.add_argument('poll_id', nargs='+', type=int)
parser.add_argument('--mode', choices=['train_on_all_data',
'cross_validation',
'run_on_directory',
'paper_figures'
])
parser.add_argument('--logdir', required=True)
parser.add_argument('--cross_validation_dir')
parser.add_argument('--batch_size', type=int, default=32)
parser.add_argument('--virtual_batches', type=int, default=2)
parser.add_argument('--learning_rate', type=float, default=5e-3)
parser.add_argument('--visuals', type=bool, default=False)
parser.add_argument('--prob_supervised', type=float, default=0.9)
parser.add_argument('--total_steps', type=int, default=30000)
parser.add_argument('--checkpoint_every', type=int, default=2000)
parser.add_argument('--log_every', type=int, default=2000)
parser.add_argument('--dropout', type=float, default=0.05)
parser.add_argument('--test_ratios', type=float, default=0.9)
parser.add_argument('--noise_level', type=float, default=0.001)
parser.add_argument('--largest_temp_exp', type=float, default=-1.)
parser.add_argument('--prediction_coeff', type=float, default=5.)
parser.add_argument('--normalization_coeff', type=float, default=1.)
parser.add_argument('--positivity_coeff', type=float, default=1.)
parser.add_argument('--small_x_coeff', type=float, default=.1)
parser.add_argument('--global_norm_clip', type=float, default=10.)
parser.add_argument('--seed', type=int, default=0)
parser.add_argument('--datasets_file', default='compiled_datasets.file')
parser.add_argument('--input_dir', default='InputData')
parser.add_argument('--output_dir', default='OutputData')
def handle(self, *args, **options):
if options['mode'] == 'train_on_all_data':
train_on_all_data(options)
if options['mode'] == 'cross_validation':
cross_validation(options)
if options['mode'] == 'paper_figures':
paper_figures(options)
if options['mode'] == 'run_on_directory':
print(2)
#run_on_directory(options)
| 41.762055
| 213
| 0.568359
|
4a108dda70f51ac61f80b9f918fa694a6bbd8d41
| 944
|
py
|
Python
|
setup.py
|
ixc/glamkit-url-breadcrumbs
|
4838df55fe65aff7d5cdd66316533bcc0a28805c
|
[
"BSD-3-Clause"
] | null | null | null |
setup.py
|
ixc/glamkit-url-breadcrumbs
|
4838df55fe65aff7d5cdd66316533bcc0a28805c
|
[
"BSD-3-Clause"
] | null | null | null |
setup.py
|
ixc/glamkit-url-breadcrumbs
|
4838df55fe65aff7d5cdd66316533bcc0a28805c
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
import os
from setuptools import setup, find_packages
setup(
name='glamkit-url-breadcrumbs',
version=__import__('url_breadcrumbs').__version__,
description=u' '.join(
__import__('url_breadcrumbs').__doc__.splitlines()).strip(),
long_description=os.path.join(
os.path.abspath(os.path.dirname(__file__)), 'README.rst'),
author='James Murty',
author_email='james@interaction.net.au',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
url='https://github.com/ixc/glamkit-url-breadcrumbs/',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
],
test_suite='runtests.runtests',
)
| 31.466667
| 68
| 0.65678
|
4a108e588f1e3a0a3918ef124b3e4e06d0ae79eb
| 956
|
py
|
Python
|
Certification/Python (Basic)/shape_classes_with_area_method.py
|
xuedong/hacker-rank
|
ce8a60f80c2c6935b427f9409d7e826ee0d26a89
|
[
"MIT"
] | 1
|
2021-02-22T17:37:45.000Z
|
2021-02-22T17:37:45.000Z
|
Certification/Python (Basic)/shape_classes_with_area_method.py
|
xuedong/hacker-rank
|
ce8a60f80c2c6935b427f9409d7e826ee0d26a89
|
[
"MIT"
] | null | null | null |
Certification/Python (Basic)/shape_classes_with_area_method.py
|
xuedong/hacker-rank
|
ce8a60f80c2c6935b427f9409d7e826ee0d26a89
|
[
"MIT"
] | null | null | null |
#!/bin/python3
import math
import os
import random
import re
import sys
class Rectangle:
def __init__(self, length, width):
self.length = length
self.width = width
def area(self):
return self.length * self.width
class Circle:
def __init__(self, radius):
self.radius = radius
def area(self):
return math.pi * self.radius ** 2
if __name__ == '__main__':
fptr = open(os.environ['OUTPUT_PATH'], 'w')
q = int(input())
queries = []
for _ in range(q):
args = input().split()
shape_name, params = args[0], tuple(map(int, args[1:]))
if shape_name == "rectangle":
a, b = params[0], params[1]
shape = Rectangle(a, b)
elif shape_name == "circle":
r = params[0]
shape = Circle(r)
else:
raise ValueError("invalid shape type")
fptr.write("%.2f\n" % shape.area())
fptr.close()
| 21.244444
| 63
| 0.553347
|
4a108f01ed36c85ef98f9d0a0fd9f0590f2781d8
| 3,443
|
py
|
Python
|
massweb/pnk_net/find_post.py
|
acaceres2176/massweb
|
153d1e00ee293f467e88e7f5ce98617a5c13cfb7
|
[
"Apache-2.0"
] | null | null | null |
massweb/pnk_net/find_post.py
|
acaceres2176/massweb
|
153d1e00ee293f467e88e7f5ce98617a5c13cfb7
|
[
"Apache-2.0"
] | null | null | null |
massweb/pnk_net/find_post.py
|
acaceres2176/massweb
|
153d1e00ee293f467e88e7f5ce98617a5c13cfb7
|
[
"Apache-2.0"
] | null | null | null |
import os
from bs4 import BeautifulSoup, SoupStrainer
import sys
from urlparse import urlparse, urlunparse
import traceback
from requests.exceptions import ConnectionError
import urllib
import requests
from massweb.targets.fuzzy_target import FuzzyTarget
from massweb.targets.target import Target
from massweb.pnk_net.pnk_request import pnk_request_raw
from urlparse import urljoin
import codecs
import logging
from logging import StreamHandler
from bs4.element import Tag
logging.basicConfig(format='%(asctime)s %(name)s: %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p')
logger = logging.getLogger('find_post')
logger.setLevel(logging.INFO)
sys.stdin = codecs.getreader('utf-8')(sys.stdin)
sys.stderr = codecs.getwriter('utf-8')(sys.stderr)
GET = "get"
POST = "post"
def normalize_link(url_to_normalize, current_page_url):
#FIXME: not quite, doesn't include path in normalization, gets paths wrong
if not url_to_normalize or not current_page_url:
raise ValueError("url_to_normalize and/or current_page_url is empty or None. It must be a URL string.")
cp_scheme, cp_netloc, cp_path, cp_params, cp_query, cp_fragment = urlparse(current_page_url)
parsed_url_to_normalize = urlparse(url_to_normalize)
scheme, netloc, path, params, query, fragment = urlparse(url_to_normalize)
if not parsed_url_to_normalize.scheme or not parsed_url_to_normalize.netloc:
full_url = urljoin(current_page_url, url_to_normalize)
else:
full_url = url_to_normalize
return {"norm_url" : full_url, "netloc" : netloc}
def find_post_requests(**kwargs):
target = kwargs.get("target")
response_text=kwargs.get("response_text")
strict_scope=kwargs.get("strict_scope", True)
hadoop_reporting=kwargs.get("hadoop_reporting", False)
if hadoop_reporting:
logger.info("Finding additional post requests in %s", target)
if not response_text:
response_text = pnk_request_raw(target)[1].text
if strict_scope:
url_host = urlparse(unicode(target)).netloc
post_requests = []
for form in BeautifulSoup(response_text, 'html.parser', parse_only=SoupStrainer('form')):
try:
norm_link_dic = normalize_link(form.get("action"), unicode(target))
except ValueError:
continue
norm_url = norm_link_dic["norm_url"]
form_host = norm_link_dic["netloc"]
if strict_scope:
# If form explicitly specifies domain that doesn't match current host
# then don't process it.
if form_host and (url_host != form_host):
continue
listform = ["text", "radio", "checkbox", "password", "file", "image", "hidden"]
_input = form.findAll('input', {'type' : listform})
post_data = {}
for elem in _input:
try:
input_name = elem["name"]
except:
continue
try:
value = urllib.quote_plus(elem["value"])
except:
if hadoop_reporting:
logger.warn("Handled exception: ", exc_info=True)
value = ""
post_data[input_name] = value
target_post = Target(norm_url, data=post_data, ttype=POST)
post_requests.append(target_post)
if hadoop_reporting:
logger.info("Found %s post requests on page %s", len(post_requests), target)
logger.info(post_requests)
return post_requests
| 40.988095
| 111
| 0.682544
|
4a108f01edefb1ee5f4d12d83df795f30b9a38b0
| 2,117
|
py
|
Python
|
python3.4Smartforest/lib/python3.4/site-packages/django/conf/locale/en_AU/formats.py
|
letouriste001/SmartForest_2.0
|
109b78bf1e8c8404800f377ab969395ccbb617be
|
[
"MIT"
] | null | null | null |
python3.4Smartforest/lib/python3.4/site-packages/django/conf/locale/en_AU/formats.py
|
letouriste001/SmartForest_2.0
|
109b78bf1e8c8404800f377ab969395ccbb617be
|
[
"MIT"
] | null | null | null |
python3.4Smartforest/lib/python3.4/site-packages/django/conf/locale/en_AU/formats.py
|
letouriste001/SmartForest_2.0
|
109b78bf1e8c8404800f377ab969395ccbb617be
|
[
"MIT"
] | null | null | null |
# -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'j M Y' # '25 Oct 2006'
TIME_FORMAT = 'P' # '2:30 p.m.'
DATETIME_FORMAT = 'j M Y, P' # '25 Oct 2006, 2:30 p.m.'
YEAR_MONTH_FORMAT = 'F Y' # 'October 2006'
MONTH_DAY_FORMAT = 'j F' # '25 October'
SHORT_DATE_FORMAT = 'd/m/Y' # '25/10/2006'
SHORT_DATETIME_FORMAT = 'd/m/Y P' # '25/10/2006 2:30 p.m.'
FIRST_DAY_OF_WEEK = 0 # Sunday
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = [
'%d/%m/%Y', '%d/%m/%y', # '25/10/2006', '25/10/06'
# '%b %d %Y', '%b %d, %Y', # 'Oct 25 2006', 'Oct 25, 2006'
# '%d %b %Y', '%d %b, %Y', # '25 Oct 2006', '25 Oct, 2006'
# '%B %d %Y', '%B %d, %Y', # 'October 25 2006', 'October 25, 2006'
# '%d %B %Y', '%d %B, %Y', # '25 October 2006', '25 October, 2006'
]
DATETIME_INPUT_FORMATS = [
'%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59'
'%Y-%m-%d %H:%M:%S.%f', # '2006-10-25 14:30:59.000200'
'%Y-%m-%d %H:%M', # '2006-10-25 14:30'
'%Y-%m-%d', # '2006-10-25'
'%d/%m/%Y %H:%M:%S', # '25/10/2006 14:30:59'
'%d/%m/%Y %H:%M:%S.%f', # '25/10/2006 14:30:59.000200'
'%d/%m/%Y %H:%M', # '25/10/2006 14:30'
'%d/%m/%Y', # '25/10/2006'
'%d/%m/%y %H:%M:%S', # '25/10/06 14:30:59'
'%d/%m/%y %H:%M:%S.%f', # '25/10/06 14:30:59.000200'
'%d/%m/%y %H:%M', # '25/10/06 14:30'
'%d/%m/%y', # '25/10/06'
]
DECIMAL_SEPARATOR = '.'
THOUSAND_SEPARATOR = ','
NUMBER_GROUPING = 3
| 2,117
| 2,117
| 0.465753
|
4a1090557aebdd9ce4821d258c630f109f3a2877
| 9,903
|
py
|
Python
|
main.py
|
KitHaywood/Gecko
|
92f322d5248229ca11b1a5b7fd43028e0ac66f39
|
[
"Apache-2.0"
] | null | null | null |
main.py
|
KitHaywood/Gecko
|
92f322d5248229ca11b1a5b7fd43028e0ac66f39
|
[
"Apache-2.0"
] | null | null | null |
main.py
|
KitHaywood/Gecko
|
92f322d5248229ca11b1a5b7fd43028e0ac66f39
|
[
"Apache-2.0"
] | null | null | null |
try:
from pycoingecko import CoinGeckoAPI
except ImportError:
print('please install pycoingecko in the CLI')
import datetime as dt
try:
import pandas as pd
except ImportError:
print('please install pandas in the CLI')
import time
import requests
import json
import tqdm
import sys
import os
from utils import date_range_lister
import numpy as np
from scipy.fft import fft
from scipy.optimize import curve_fit
import matplotlib.dates as mdates
import matplotlib.pyplot as plt
try:
from symfit import parameters, variables, sin, cos, Fit
except ImportError:
print('please install symfit')
# TODO - Write the dox uuuhhhhhh
# TODO - Write func to update data on increment
# TODO - Go back and use market_chart_range to get
class Gecko:
"""
Class for interaction with CoinGecko API for data retreival and json write
"""
def __init__(self):
self.cg = CoinGeckoAPI()
return None
def get_all_coins(self):
"""returns list of crypto instruments in CoinGecko"""
return self.cg.get_coins_list()
def get_market_data(self,symbol,currency,days):
"""
PARAMETERS:
crypto-id(symbol) --> str (see CoinGecko dox for crypto-ids)
currency --> str (industry standard for sovereign currency, i.e. 'usd')
data resoltion --> '1','14','30','max'
RETURNS: DataFrame
"""
base_url = f'https://api.coingecko.com/api/v3/coins/{symbol}/market_chart?vs_currency={currency}&days={days}'
response = requests.get(base_url)
res = response.json()
data = res['prices']
data = [[dt.datetime.fromtimestamp(x[0]/1000),x[1]] for x in data]
df = pd.DataFrame(data)
if days=='max':
df[0] = df[0].apply(lambda x: dt.datetime.strptime(x.strftime("%d-%m-%Y"),"%d-%m-%Y"))
df.index = df[0]
df = df[1]
else:
df[0] = df[0].apply(lambda x: dt.datetime.strptime(x.strftime("%d-%m-%YT%H:%M:%S"),"%d-%m-%YT%H:%M:%S"))
df.index = df[0]
df = df[1]
return df
def write_original_to_json(self,crypto,currency,days):
with open(os.path.join(os.getcwd(),f'{crypto}.json'),'w') as f:
data = self.get_market_data(crypto,currency,days) # CAREFUL - need an overwrite function
json.dump(data.to_json(orient='split',date_format='iso'),f)
def get_dates(self,crypto):
"""
takes crypto - returns tuple of date extremities
"""
with open(os.path.join(os.getcwd(),f'{crypto}.json'),'r') as f:
data = json.load(f)
data = json.loads(data)
res = pd.DataFrame.from_dict(data,orient='columns')
res.index = res['index']
return res.index.max(), res.index.min()
def get_data_on_interval(self,symbol,currency,from_date,to_date):
"""
takes symbol, target return currency, from and to dates and returns
df of prices on the interval
"""
base_url = f"https://api.coingecko.com/api/v3/coins/{symbol}/market_chart/range?vs_currency={currency}&from={from_date}&to={to_date}"
response = requests.get(base_url)
res = response.json()
data = res['prices']
data = [[dt.datetime.fromtimestamp(x[0]/1000),x[1]] for x in data]
df = pd.DataFrame(data)
return df
def write_to_json(self,crypto,currency):
# NOT WORKING YET - JUST SCRIBBLING
dates = date_range_lister()
tracker = {}
tracker[crypto] = {}
res = pd.DataFrame()
for i in tqdm.tqdm(range(len(dates))):
if i==0: # careful
pass
else:
try:
data = self.get_data_on_interval(crypto,currency,dates[i-1],dates[i])
res = pd.concat([res,data])
except json.decoder.JSONDecodeError:
print(dates[i],dates[i-1])
tracker[crypto][dates[i]]=[dates[i],dates[i-1]]
time.sleep(1.2)
if isinstance(res,pd.DataFrame):
with open(f'{crypto}.json','w') as f:
json.dump(res.to_json(orient='split',date_format='iso'),f)
else:
print('self.get_data_on_interval returned incorrect datatype')
return res
def load_from_json(self,crypto):
"""
Parameters: str --> crypto ID as listed in CoinGeckoAPI
Returns: DataFrame of loaded data from JSON
"""
with open(f"{crypto}.json",'r') as f_in:
data = json.loads(json.load(f_in))
return pd.DataFrame.from_dict(data['data'])
def gck_main():
if sys.argv[0]=='main.py':
if len(sys.argv[1:])>0: # USER DEFINED
print('\n',f"CLI cryptos passed, getting --> {', '.join([x.upper() for x in list(sys.argv[1:])])}",'\n')
cryptos = list([x.lower() for x in sys.argv[1:]])
else:
cryptos = ['cardano','usd-coin'] # DEFAULTS
print('\n',"No CLI cryptos passed, using default {} , {} ".format(
str(cryptos[0]).upper(),
str(cryptos[1]).upper()
),'\n')
else:
pass
gck = Gecko()
for crypto in tqdm.tqdm(cryptos):
gck.write_to_json(crypto,'usd')
return 0
class Strategy:
def __init__(self) -> None:
pass
def load_data(self,crypto):
with open(f'data\{crypto}.json') as f:
data = json.loads(json.load(f))
return pd.DataFrame.from_dict(data['data'])
def fit_curve_fourier(self,data,window):
"""
returns: function with fitted_curve
"""
def fourier_series(x, f, n=0):
"""
Returns a symbolic fourier series of order `n`.
:param n: Order of the fourier series.
:param x: Independent variable
:param f: Frequency of the fourier series
"""
# Make the parameter objects for all the terms
a0, *cos_a = parameters(','.join(['a{}'.format(i) for i in range(0, n + 1)])) # check range 0-->
sin_b = parameters(','.join(['b{}'.format(i) for i in range(1, n + 1)])) # check range 1-->
# Construct the series
series = a0 + sum(ai * cos(i * f * x) + bi * sin(i * f * x)
for i, (ai, bi) in enumerate(zip(cos_a, sin_b), start=1))
return series
def fitter(data,window,degree):
self.window = window
x,y = variables('x,y') # sets up variable for symfit object
model_dict = {y: fourier_series(x,4,n=degree)} # required output format
data['dates'] = [dt.datetime.strptime(x,"%Y-%m-%dT%H:%M:%S.%fZ") for x in data[0]]
self.x_data = data['dates'].iloc[-self.window:].apply(lambda x: mdates.date2num(x)) # sort the dates out
self.y_data = data[1].iloc[-window:] # trim dataset to window size
fit = Fit(model_dict,x=self.x_data,y=self.y_data) # create fit object
fit_result = fit.execute()
y_hat = fit.model(x=self.x_data,**fit_result.params).y
return fit_result,y_hat
self.res = {}
for i in range(3,11,1): # Want to optimize on this
self.res[f"{i}_result"] = fitter(data,250,i)[0]
self.res[f"{i}_yhat"] = fitter(data,250,i)[1]
return self.res
def fourierExtrapolation(self, x, n_predict):
n = x.size
n_harm = 50 # number of harmonics in model
t = np.arange(0, n)
p = np.polyfit(t, x, 1) # find linear trend in x
x_notrend = x - p[0] * t # detrended x
x_freqdom = fft.fft(x_notrend) # detrended x in frequency domain
f = fft.fftfreq(n) # frequencies
indexes = list(range(n))
# sort indexes by frequency, lower -> higher
indexes.sort(key=lambda i: np.absolute(x_freqdom[i]))
indexes.reverse()
t = np.arange(0, n + n_predict)
restored_sig = np.zeros(t.size)
for i in indexes[:1 + n_harm * 2]:
ampli = np.absolute(x_freqdom[i]) / n # amplitude
phase = np.angle(x_freqdom[i]) # phase
restored_sig += ampli * np.cos(2 * np.pi * f[i] * t + phase)
return restored_sig + p[0] * t
def create_coeff_matrix(self,res):
tester = np.array([np.fromiter(v.params.values(),float) for k,v in res.items() if '_result' in k],dtype=object)
max_len = max([len(i) for i in tester])
last_res = np.array([ np.pad(tester[i],
(0,max_len-len(tester[i])),
'constant',
constant_values=0) for i in range(len(tester))]) # got to make A and B matrix
return last_res
def plot_result(self):
if self.window != 0:
new_res = np.array([self.res[f"{k.split('_')[0]}_yhat"] for k,v in self.res.items()])
mean = np.mean(new_res,axis=0)
fig = plt.figure(figsize=(20,10))
ax = fig.add_subplot(111)
for k,v in self.res.items():
ax.plot(self.x_data,self.res[f"{k.split('_')[0]}_yhat"],alpha=0.1,color='k')
ax.plot(self.x_data,self.y_data)
ax.plot(self.x_data,mean)
ax.plot(self.x_data,self.y_data.rolling(20).mean())
ax.grid
else:
print('Window must be > 0')
return fig
def main(window,):
# main = gck_main() # THIS GETS DATA AND WRITES TO JSON
s = Strategy()
data = s.load_data('ethereum')
fit = s.fit_curve_fourier(data,window)
coef_mat = s.create_coeff_matrix(fit)
s.plot_result()
return data,coef_mat
if __name__=="__main__":
main(500)
main(5000)
| 37.653992
| 141
| 0.562658
|
4a1090b4280f9677aab9c021fb43db231a0b5ed0
| 906
|
py
|
Python
|
setup.py
|
ladsantos/starcross
|
47b4258e5d4b3b7abc0559e3460a7ff7963baa77
|
[
"MIT"
] | null | null | null |
setup.py
|
ladsantos/starcross
|
47b4258e5d4b3b7abc0559e3460a7ff7963baa77
|
[
"MIT"
] | null | null | null |
setup.py
|
ladsantos/starcross
|
47b4258e5d4b3b7abc0559e3460a7ff7963baa77
|
[
"MIT"
] | null | null | null |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
if sys.argv[-1] == "publish":
os.system("python setup.py sdist upload")
sys.exit()
setup(
name="starcross",
version="0.1a",
author="Leonardo dos Santos",
author_email="Leonardo.dosSantos@unige.ch",
packages=["starcross"],
url="https://github.com/ladsantos/starcross",
license="MIT",
description="High-energy luminosity of stars",
install_requires=[line.strip() for line in
open('requirements.txt', 'r').readlines()],
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
]
)
| 25.885714
| 65
| 0.625828
|
4a10918c85de089525569d378f36ec597b2f5b93
| 15,472
|
py
|
Python
|
featuretools/primitives/primitive_base.py
|
bentona/featuretools
|
798007d0bd137bc27e168480a10bc3bbee065b54
|
[
"BSD-3-Clause"
] | null | null | null |
featuretools/primitives/primitive_base.py
|
bentona/featuretools
|
798007d0bd137bc27e168480a10bc3bbee065b54
|
[
"BSD-3-Clause"
] | null | null | null |
featuretools/primitives/primitive_base.py
|
bentona/featuretools
|
798007d0bd137bc27e168480a10bc3bbee065b54
|
[
"BSD-3-Clause"
] | null | null | null |
from __future__ import absolute_import
import copy
import logging
from builtins import zip
from numpy import nan
from featuretools.entityset import Entity, EntitySet
from featuretools.utils.wrangle import (
_check_time_against_column,
_check_timedelta
)
from featuretools.variable_types import (
Datetime,
DatetimeTimeIndex,
Numeric,
NumericTimeIndex,
Variable
)
logger = logging.getLogger('featuretools')
class PrimitiveBase(object):
"""Base class for all features."""
#: (str): Name of backend function used to compute this feature
name = None
#: (list): Variable types of inputs
input_types = None
#: (:class:`.Variable`): variable type of return
return_type = None
#: Default value this feature returns if no data found. deafults to np.nan
default_value = nan
#: (bool): True if feature needs to know what the current calculation time
# is (provided to computational backend as "time_last")
uses_calc_time = False
#: (:class:`.PrimitiveBase`): Feature to condition this feature by in
# computation (e.g. take the Count of products where the product_id is
# "basketball".)
where = None
#: (bool): If True, allow where clauses in DFS
allow_where = False
#: (str or :class:`.Timedelta`): Use only some amount of previous data from
# each time point during calculation
use_previous = None
#: (int): Maximum number of features in the largest chain proceeding
# downward from this feature's base features.
max_stack_depth = None
rolling_function = False
#: (bool): If True, feature will expand into multiple values during
# calculation
expanding = False
_name = None
# whitelist of primitives can have this primitive in input_types
base_of = None
# blacklist of primitives can have this primitive in input_types
base_of_exclude = None
# (bool) If True will only make one feature per unique set of base features
commutative = False
# (bool) If True, feature function depends on all values of entity
# (and will receive these values as input, regardless of specified instance ids)
uses_full_entity = False
def __init__(self, entity, base_features, **kwargs):
assert all(isinstance(f, PrimitiveBase) for f in base_features), \
"All base features must be features"
if len(set([bf.hash() for bf in base_features])) != len(base_features):
raise ValueError(u"Duplicate base features ({}): {}".format(
self.__class__, base_features))
self.entity_id = entity.id
self.entityset = entity.entityset.metadata
# P TODO: where should this logic go?
# not all primitives support use previous so doesn't make sense to have
# in base
if self.use_previous:
self.use_previous = _check_timedelta(self.use_previous)
assert len(self.base_features) > 0
time_index = self.base_features[0].entity.time_index
time_col = self.base_features[0].entity[time_index]
assert time_index is not None, ("Use previous can only be defined "
"on entities with a time index")
assert _check_time_against_column(self.use_previous, time_col)
self.base_features = base_features
# variable type can be declared or inferred from first base feature
self.additional_attributes = kwargs
assert self._check_input_types(), ("Provided inputs don't match input "
"type requirements")
super(PrimitiveBase, self).__init__(**kwargs)
@property
def entity(self):
"""Entity this feature belongs too"""
return self.entityset[self.entity_id]
# P TODO: this should get refactored to return_type
@property
def variable_type(self):
feature = self
return_type = self.return_type
while return_type is None:
feature = feature.base_features[0]
return_type = feature.return_type
# only the original time index should exist
# so make this feature's return type just a Datetime
if return_type == DatetimeTimeIndex:
return_type = Datetime
elif return_type == NumericTimeIndex:
return_type = Numeric
return return_type
@property
def base_hashes(self):
"""Hashes of the base features"""
return [f.hash() for f in self.base_features]
def _check_feature(self, feature):
if isinstance(feature, Variable):
return IdentityFeature(feature)
elif isinstance(feature, PrimitiveBase):
return feature
raise Exception("Not a feature")
def __repr__(self):
ret = "<Feature: %s>" % (self.get_name())
# encode for python 2
if type(ret) != str:
ret = ret.encode("utf-8")
return ret
def hash(self):
return hash(self.get_name() + self.entity.id)
def __hash__(self):
# logger.warning("To hash a feature, use feature.hash()")
return self.hash()
def __eq__(self, other_feature_or_val):
"""Compares to other_feature_or_val by equality
See also:
:meth:`PrimitiveBase.equal_to`
"""
from .binary_transform import Equals
return Equals(self, other_feature_or_val)
def __ne__(self, other_feature_or_val):
"""Compares to other_feature_or_val by non-equality
See also:
:meth:`PrimitiveBase.not_equal_to`
"""
from .binary_transform import NotEquals
return NotEquals(self, other_feature_or_val)
def __gt__(self, other_feature_or_val):
"""Compares if greater than other_feature_or_val
See also:
:meth:`PrimitiveBase.GT`
"""
from .binary_transform import GreaterThan
return GreaterThan(self, other_feature_or_val)
def __ge__(self, other_feature_or_val):
"""Compares if greater than or equal to other_feature_or_val
See also:
:meth:`PrimitiveBase.greater_than_equal_to`
"""
from .binary_transform import GreaterThanEqualTo
return GreaterThanEqualTo(self, other_feature_or_val)
def __lt__(self, other_feature_or_val):
"""Compares if less than other_feature_or_val
See also:
:meth:`PrimitiveBase.less_than`
"""
from .binary_transform import LessThan
return LessThan(self, other_feature_or_val)
def __le__(self, other_feature_or_val):
"""Compares if less than or equal to other_feature_or_val
See also:
:meth:`PrimitiveBase.less_than_equal_to`
"""
from .binary_transform import LessThanEqualTo
return LessThanEqualTo(self, other_feature_or_val)
def __add__(self, other_feature_or_val):
"""Add other_feature_or_val"""
from .binary_transform import Add
return Add(self, other_feature_or_val)
def __radd__(self, other):
from .binary_transform import Add
return Add(other, self)
def __sub__(self, other_feature_or_val):
"""Subtract other_feature_or_val
See also:
:meth:`PrimitiveBase.subtract`
"""
from .binary_transform import Subtract
return Subtract(self, other_feature_or_val)
def __rsub__(self, other):
from .binary_transform import Subtract
return Subtract(other, self)
def __div__(self, other_feature_or_val):
"""Divide by other_feature_or_val
See also:
:meth:`PrimitiveBase.divide`
"""
from .binary_transform import Divide
return Divide(self, other_feature_or_val)
def __truediv__(self, other_feature_or_val):
return self.__div__(other_feature_or_val)
def __rtruediv__(self, other_feature_or_val):
from .binary_transform import Divide
return Divide(other_feature_or_val, self)
def __rdiv__(self, other_feature_or_val):
from .binary_transform import Divide
return Divide(other_feature_or_val, self)
def __mul__(self, other_feature_or_val):
"""Multiply by other_feature_or_val
See also:
:meth:`PrimitiveBase.multiply`
"""
from .binary_transform import Multiply
return Multiply(self, other_feature_or_val)
def __rmul__(self, other):
from .binary_transform import Multiply
return Multiply(other, self)
def __mod__(self, other_feature_or_val):
"""Take modulus of other_feature_or_val
See also:
:meth:`PrimitiveBase.modulo`
"""
from .binary_transform import Mod
return Mod(self, other_feature_or_val)
def __and__(self, other):
return self.AND(other)
def __rand__(self, other):
from .binary_transform import And
return And(other, self)
def __or__(self, other):
return self.OR(other)
def __ror__(self, other):
from .binary_transform import Or
return Or(other, self)
def __not__(self, other):
return self.NOT(other)
def __abs__(self):
from .transform_primitive import Absolute
return Absolute(self)
def __neg__(self):
from .binary_transform import Negate
return Negate(self)
def AND(self, other_feature):
"""Logical AND with other_feature"""
from .binary_transform import And
return And(self, other_feature)
def OR(self, other_feature):
"""Logical OR with other_feature"""
from .binary_transform import Or
return Or(self, other_feature)
def NOT(self):
"""Creates inverse of feature"""
from .transform_primitive import Not
from .binary_transform import Compare
if isinstance(self, Compare):
return self.invert()
return Not(self)
def LIKE(self, like_string, case_sensitive=False):
from .transform_primitive import Like
return Like(self, like_string,
case_sensitive=case_sensitive)
def isin(self, list_of_output):
from .transform_primitive import IsIn
return IsIn(self, list_of_outputs=list_of_output)
def is_null(self):
"""Compares feature to null by equality"""
from .transform_primitive import IsNull
return IsNull(self)
def __invert__(self):
return self.NOT()
def rename(self, name):
"""Rename Feature, returns copy"""
feature_copy = self.copy()
feature_copy._name = name
return feature_copy
def copy(self):
"""Return copy of feature"""
original_attrs = {}
copied_attrs = {}
for k, v in self.__dict__.items():
list_like = False
to_check = v
if isinstance(v, (list, set, tuple)) and len(v):
to_check = list(v)[0]
list_like = True
if isinstance(to_check, PrimitiveBase):
if list_like:
copied_attrs[k] = [f.copy() for f in v]
original_attrs[k] = [f.copy() for f in v]
else:
copied_attrs[k] = v.copy()
original_attrs[k] = v.copy()
setattr(self, k, None)
elif isinstance(to_check, (Variable, Entity, EntitySet)):
copied_attrs[k] = v
original_attrs[k] = v
setattr(self, k, None)
copied = copy.deepcopy(self)
for k, v in copied_attrs.items():
setattr(copied, k, v)
for k, v in original_attrs.items():
setattr(self, k, v)
return copied
def get_name(self):
if self._name is not None:
return self._name
return self.generate_name()
def get_function(self):
raise NotImplementedError("Implement in subclass")
def get_dependencies(self, deep=False, ignored=None, copy=True):
"""Returns features that are used to calculate this feature
..note::
If you only want the features that make up the input to the feature
function use the base_features attribute instead.
"""
deps = []
for d in self.base_features[:]:
deps += [d]
if self.where:
deps += [self.where]
# if self.use_previous and self.use_previous.is_absolute():
# entity = self.entity
# time_var = IdentityFeature(entity[entity.time_index])
# deps += [time_var]
if ignored is None:
ignored = set([])
deps = [d for d in deps if d.hash() not in ignored]
if deep:
for dep in deps[:]: # copy so we don't modify list we iterate over
deep_deps = dep.get_dependencies(deep, ignored)
deps += deep_deps
return deps
def get_deep_dependencies(self, ignored=None):
return self.get_dependencies(deep=True, ignored=ignored)
def get_depth(self, stop_at=None):
"""Returns depth of feature"""
max_depth = 0
stop_at_hash = set()
if stop_at is not None:
stop_at_hash = set([i.hash() for i in stop_at])
if (stop_at is not None and
self.hash() in stop_at_hash):
return 0
for dep in self.get_deep_dependencies(ignored=stop_at_hash):
max_depth = max(dep.get_depth(stop_at=stop_at),
max_depth)
return max_depth + 1
def _check_input_types(self):
if len(self.base_features) == 0:
return True
input_types = self.input_types
if input_types is not None:
if type(self.input_types[0]) != list:
input_types = [input_types]
for t in input_types:
zipped = list(zip(t, self.base_features))
if all([issubclass(f.variable_type, v) for v, f in zipped]):
return True
else:
return True
return False
class IdentityFeature(PrimitiveBase):
"""Feature for entity that is equivalent to underlying variable"""
def __init__(self, variable):
# TODO: perhaps we can change the attributes of this class to
# just entityset reference to original variable object
entity_id = variable.entity_id
self.variable = variable.entityset.metadata[entity_id][variable.id]
self.return_type = type(variable)
self.base_feature = None
super(IdentityFeature, self).__init__(variable.entity, [])
def generate_name(self):
return self.variable.name
def get_depth(self, stop_at=None):
return 0
class Feature(PrimitiveBase):
"""
Alias for IdentityFeature and DirectFeature depending on arguments
"""
def __new__(self, feature_or_var, entity=None):
from . import direct_feature
if entity is None:
assert isinstance(feature_or_var, (Variable))
return IdentityFeature(feature_or_var)
assert isinstance(feature_or_var, (Variable, PrimitiveBase))
assert isinstance(entity, Entity)
if feature_or_var.entity.id == entity.id:
return IdentityFeature(entity)
return direct_feature.DirectFeature(feature_or_var, entity)
| 32.504202
| 86
| 0.62849
|
4a1093092121c331a2a5dd34eafd7b5f48c16fa6
| 20,584
|
py
|
Python
|
rmgpy/speciesTest.py
|
pw0908/RMG-Py
|
3846fcce701f2a5fd12dbfa429687e9fcd647298
|
[
"MIT"
] | 1
|
2022-01-24T05:08:32.000Z
|
2022-01-24T05:08:32.000Z
|
rmgpy/speciesTest.py
|
pw0908/RMG-Py
|
3846fcce701f2a5fd12dbfa429687e9fcd647298
|
[
"MIT"
] | null | null | null |
rmgpy/speciesTest.py
|
pw0908/RMG-Py
|
3846fcce701f2a5fd12dbfa429687e9fcd647298
|
[
"MIT"
] | 1
|
2021-08-17T09:11:14.000Z
|
2021-08-17T09:11:14.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# #
# RMG - Reaction Mechanism Generator #
# #
# Copyright (c) 2002-2019 Prof. William H. Green (whgreen@mit.edu), #
# Prof. Richard H. West (r.west@neu.edu) and the RMG Team (rmg_dev@mit.edu) #
# #
# Permission is hereby granted, free of charge, to any person obtaining a #
# copy of this software and associated documentation files (the 'Software'), #
# to deal in the Software without restriction, including without limitation #
# the rights to use, copy, modify, merge, publish, distribute, sublicense, #
# and/or sell copies of the Software, and to permit persons to whom the #
# Software is furnished to do so, subject to the following conditions: #
# #
# The above copyright notice and this permission notice shall be included in #
# all copies or substantial portions of the Software. #
# #
# THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR #
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, #
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE #
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER #
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING #
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER #
# DEALINGS IN THE SOFTWARE. #
# #
###############################################################################
"""
This module contains unit tests of the rmgpy.species module.
"""
import unittest
from rmgpy.species import Species
from rmgpy.transport import TransportData
from rmgpy.molecule import Molecule
from rmgpy.thermo import ThermoData
from rmgpy.statmech import Conformer, IdealGasTranslation, NonlinearRotor, HarmonicOscillator
################################################################################
class TestSpecies(unittest.TestCase):
"""
Contains unit tests for the Species class.
"""
def setUp(self):
"""
A method that is run before each unit test in this class.
"""
self.species = Species(
index=1,
label='C2H4',
thermo=ThermoData(
Tdata=([300.0,400.0,500.0,600.0,800.0,1000.0,1500.0],'K'),
Cpdata=([3.0,4.0,5.0,6.0,8.0,10.0,15.0],'cal/(mol*K)'),
H298=(-20.0,'kcal/mol'),
S298=(50.0,'cal/(mol*K)'),
Tmin=(300.0,'K'),
Tmax=(2000.0,'K'),
),
conformer=Conformer(
E0=(0.0,'kJ/mol'),
modes=[
IdealGasTranslation(mass=(28.03,'amu')),
NonlinearRotor(inertia=([5.6952e-47, 2.7758e-46, 3.3454e-46],'kg*m^2'), symmetry=1),
HarmonicOscillator(frequencies=([834.50, 973.31, 975.37, 1067.1, 1238.5, 1379.5, 1472.3, 1691.3, 3121.6, 3136.7, 3192.5, 3221.0],'cm^-1')),
],
spinMultiplicity=1,
opticalIsomers=1,
),
molecule=[Molecule().fromSMILES('C=C')],
transportData=TransportData(sigma=(1, 'angstrom'), epsilon=(100, 'K')),
molecularWeight=(28.03,'amu'),
reactive=True,
)
self.species2 = Species().fromAdjacencyList(
"""
1 C u0 p0 c0 {2,D} {6,S} {7,S}
2 C u0 p0 c0 {1,D} {3,S} {8,S}
3 C u0 p0 c0 {2,S} {4,D} {9,S}
4 C u0 p0 c0 {3,D} {5,S} {10,S}
5 C u0 p0 c0 {4,S} {6,D} {11,S}
6 C u0 p0 c0 {1,S} {5,D} {12,S}
7 H u0 p0 c0 {1,S}
8 H u0 p0 c0 {2,S}
9 H u0 p0 c0 {3,S}
10 H u0 p0 c0 {4,S}
11 H u0 p0 c0 {5,S}
12 H u0 p0 c0 {6,S}
""")
def testPickle(self):
"""
Test that a Species object can be pickled and unpickled.
...with no loss of information.
"""
import cPickle
species = cPickle.loads(cPickle.dumps(self.species,-1))
self.assertEqual(self.species.index, species.index)
self.assertEqual(self.species.label, species.label)
self.assertEqual(self.species.molecule[0].multiplicity, species.molecule[0].multiplicity)
self.assertEqual(self.species.getThermoData().H298.value_si, species.getThermoData().H298.value_si)
self.assertEqual(self.species.getThermoData().H298.units, species.getThermoData().H298.units)
self.assertEqual(len(self.species.conformer.modes), len(species.conformer.modes))
self.assertEqual(len(self.species.molecule), len(species.molecule))
self.assertTrue(self.species.molecule[0].isIsomorphic(species.molecule[0]))
self.assertEqual(self.species.conformer.E0.value_si, species.conformer.E0.value_si)
self.assertEqual(self.species.conformer.E0.units, species.conformer.E0.units)
self.assertEqual(self.species.transportData.sigma.value_si, species.transportData.sigma.value_si)
self.assertEqual(self.species.transportData.sigma.units, species.transportData.sigma.units)
self.assertAlmostEqual(self.species.transportData.epsilon.value_si / 1.381e-23, species.transportData.epsilon.value_si / 1.381e-23, 4)
self.assertEqual(self.species.transportData.epsilon.units, species.transportData.epsilon.units)
self.assertEqual(self.species.molecularWeight.value_si, species.molecularWeight.value_si)
self.assertEqual(self.species.molecularWeight.units, species.molecularWeight.units)
self.assertEqual(self.species.reactive, species.reactive)
def testOutput(self):
"""
Test that a Species object can be reconstructed from its repr().
...with no loss of information.
"""
species = None
exec('species = {0!r}'.format(self.species))
self.assertEqual(self.species.index, species.index)
self.assertEqual(self.species.label, species.label)
self.assertEqual(self.species.molecule[0].multiplicity, species.molecule[0].multiplicity)
self.assertEqual(self.species.getThermoData().H298.value_si, species.getThermoData().H298.value_si)
self.assertEqual(self.species.getThermoData().H298.units, species.getThermoData().H298.units)
self.assertEqual(len(self.species.conformer.modes), len(species.conformer.modes))
self.assertEqual(len(self.species.molecule), len(species.molecule))
self.assertTrue(self.species.molecule[0].isIsomorphic(species.molecule[0]))
self.assertEqual(self.species.conformer.E0.value_si, species.conformer.E0.value_si)
self.assertEqual(self.species.conformer.E0.units, species.conformer.E0.units)
self.assertEqual(self.species.transportData.sigma.value_si, species.transportData.sigma.value_si)
self.assertEqual(self.species.transportData.sigma.units, species.transportData.sigma.units)
self.assertAlmostEqual(self.species.transportData.epsilon.value_si, species.transportData.epsilon.value_si, 3)
self.assertEqual(self.species.transportData.epsilon.units, species.transportData.epsilon.units)
self.assertEqual(self.species.molecularWeight.value_si, species.molecularWeight.value_si)
self.assertEqual(self.species.molecularWeight.units, species.molecularWeight.units)
self.assertEqual(self.species.reactive, species.reactive)
def testToAdjacencyList(self):
"""
Test that toAdjacencyList() works as expected.
"""
string = self.species.toAdjacencyList()
self.assertTrue(string.startswith(self.species.molecule[0].toAdjacencyList(label=self.species.label,removeH=False)),string)
def testSpeciesProps(self):
"""
Test a key-value pair is added to the props attribute of Species.
"""
self.species.props['foo'] = 'bar'
self.assertIsInstance(self.species.props, dict)
self.assertEquals(self.species.props['foo'], 'bar')
def testSpeciesProps_object_attribute(self):
"""
Test that Species's props dictionaries are independent of each other.
Create a test in which is checked whether props is an object attribute rather
than a class attribute
"""
spc2 = Species()
self.species.props['foo'] = 'bar'
spc3 = Species()
spc3.props['foo'] = 'bla'
self.assertEquals(self.species.props['foo'], 'bar')
self.assertDictEqual(spc2.props, {})
self.assertDictEqual(spc3.props, {'foo': 'bla'})
def testResonanceIsomersGenerated(self):
"Test that 1-penten-3-yl makes 2-penten-1-yl resonance isomer"
spec = Species().fromSMILES('C=C[CH]CC')
spec.generate_resonance_structures()
self.assertEquals(len(spec.molecule), 2)
self.assertEquals(spec.molecule[1].toSMILES(), "[CH2]C=CCC")
def testResonaceIsomersRepresented(self):
"Test that both resonance forms of 1-penten-3-yl are printed by __repr__"
spec = Species().fromSMILES('C=C[CH]CC')
spec.generate_resonance_structures()
exec('spec2 = {0!r}'.format(spec))
self.assertEqual(len(spec.molecule), len(spec2.molecule))
for i, j in zip(spec.molecule, spec2.molecule):
self.assertTrue(j.isIsomorphic(i), msg='i is not isomorphic with j, where i is {} and j is {}'.format(i.toSMILES(), j.toSMILES()))
def test_is_isomorphic_to_filtered_resonance_structure(self):
"""
Test that a Species containing a non-representative resonance structure is isomorphic
with the "correct" Species containing only representative structures (which were not filtered out)
When generating resonance isomers for N/O/S atoms, a large number of resonance structures per species could
potentially be generated, yet most are filtered out and only the "correct" / "representative" structures
are kept. This test makes sure that if a non-representative structure (i.e., a structure that was filtered out)
is generated, RMG finds the Species it belongs to, if the last exists.
"""
spc1_correct = Species().fromSMILES('[O]N=O') # check charge separation with higher octet deviation
spc1_nonrepresentative = Species().fromAdjacencyList("""multiplicity 2
1 N u1 p1 c0 {2,S} {3,S}
2 O u0 p3 c-1 {1,S}
3 O u0 p2 c+1 {1,S}""")
spc2_correct = Species().fromSMILES('[N]=NON=O') # check atoms with val 6
spc2_nonrepresentative = Species().fromAdjacencyList("""multiplicity 2
1 O u0 p2 c0 {2,S} {3,S}
2 N u1 p1 c0 {1,S} {4,S}
3 N u0 p2 c-1 {1,S} {5,S}
4 N u0 p2 c0 {2,S}
5 O u0 p2 c+1 {3,S}""")
spc3_correct = Species().fromSMILES('[O]S(O)=O') # check O4tc penalty
spc3_nonrepresentative = Species().fromAdjacencyList("""multiplicity 2
1 S u0 p1 c-1 {2,S} {3,S} {4,T}
2 O u0 p2 c0 {1,S} {5,S}
3 O u1 p2 c0 {1,S}
4 O u0 p1 c+1 {1,T}
5 H u0 p0 c0 {2,S}""")
spc4_correct = Species().fromSMILES('OS(=[N+]=[N-])O') # check O4dc penalty
spc4_nonrepresentative = Species().fromAdjacencyList("""1 S u0 p0 c+1 {2,D} {3,D} {4,S}
2 N u0 p1 c0 {1,D} {5,S}
3 O u0 p1 c+1 {1,D} {6,S}
4 O u0 p2 c0 {1,S} {7,S}
5 N u0 p3 c-2 {2,S}
6 H u0 p0 c0 {3,S}
7 H u0 p0 c0 {4,S}""")
spc5_correct = Species().fromSMILES('[O][S]') # checks birad penalty
spc5_nonrepresentative = Species().fromAdjacencyList("""multiplicity 3
1 O u0 p2 c0 {2,D}
2 S u2 p1 c0 {1,D}""")
spc6_correct = Species().fromSMILES('[N-]=[N+]=S=S=O') # checks the S#S case
spc6_nonrepresentative = Species().fromAdjacencyList("""1 S u0 p1 c0 {2,S} {3,T}
2 N u0 p0 c+1 {1,S} {4,T}
3 S u0 p1 c0 {1,T} {5,S}
4 N u0 p1 c0 {2,T}
5 O u0 p3 c-1 {3,S}""")
# check that the structures are not isomorphic if resonance structures are not generated:
self.assertFalse(spc1_correct.isIsomorphic(spc1_nonrepresentative, strict=True))
# check that the nonrepresentative structure is isomorphic by generating resonance structures:
self.assertTrue(spc1_correct.isIsomorphic(spc1_nonrepresentative, strict=False))
self.assertTrue(spc2_correct.isIsomorphic(spc2_nonrepresentative, strict=False))
self.assertTrue(spc3_correct.isIsomorphic(spc3_nonrepresentative, strict=False))
self.assertTrue(spc4_correct.isIsomorphic(spc4_nonrepresentative, strict=False))
self.assertTrue(spc5_correct.isIsomorphic(spc5_nonrepresentative, strict=False))
self.assertTrue(spc6_correct.isIsomorphic(spc6_nonrepresentative, strict=False))
def testGetResonanceHybrid(self):
"""
Tests that getResonanceHybrid returns an isomorphic structure
which has intermediate bond orders.
This check is for C=C[CH]CC which has another resonance structure,
[CH2]C=CC. When these structures are merged, the bond structure should be,
C~C~CC, where '~' is a hybrid bond of order 1.5.
"""
spec = Species().fromSMILES('C=C[CH]CC')
hybridMol = spec.getResonanceHybrid()
self.assertTrue(hybridMol.toSingleBonds().isIsomorphic(spec.molecule[0].toSingleBonds()))
# a rough check for intermediate bond orders
expected_orders = [1,1.5]
bonds = []
# ensure all bond orders are expected
for atom in hybridMol.atoms:
for atom2 in atom.bonds:
bond = hybridMol.getBond(atom,atom2)
self.assertTrue(any([bond.isOrder(otherOrder) for otherOrder in expected_orders]), 'Unexpected bond order {}'.format(bond.getOrderNum()))
bonds.append(bond)
# ensure all expected orders are present
for expected_order in expected_orders:
self.assertTrue(any([bond.isOrder(expected_order) for bond in bonds]),'No bond of order {} found'.format(expected_order))
def testCopy(self):
"""Test that we can make a copy of a Species object."""
spc_cp = self.species.copy()
self.assertTrue(id(self.species) != id(spc_cp))
self.assertTrue(self.species.isIsomorphic(spc_cp))
self.assertEquals(self.species.label, spc_cp.label)
self.assertEquals(self.species.index, spc_cp.index)
self.assertTrue(self.species.molecularWeight.equals(spc_cp.molecularWeight))
self.assertEquals(self.species.reactive, spc_cp.reactive)
def testCantera(self):
"""
Test that a Cantera Species object is created correctly.
"""
from rmgpy.thermo import NASA, NASAPolynomial
import cantera as ct
rmgSpecies = Species(label="Ar", thermo=NASA(polynomials=[NASAPolynomial(coeffs=[2.5,0,0,0,0,-745.375,4.37967], Tmin=(200,'K'), Tmax=(1000,'K')), NASAPolynomial(coeffs=[2.5,0,0,0,0,-745.375,4.37967], Tmin=(1000,'K'), Tmax=(6000,'K'))], Tmin=(200,'K'), Tmax=(6000,'K'), comment="""
Thermo library: primaryThermoLibrary
"""), molecule=[Molecule(SMILES="[Ar]")], transportData=TransportData(shapeIndex=0, epsilon=(1134.93,'J/mol'), sigma=(3.33,'angstrom'), dipoleMoment=(2,'De'), polarizability=(1,'angstrom^3'), rotrelaxcollnum=15.0, comment="""GRI-Mech"""))
rmg_ctSpecies = rmgSpecies.toCantera(useChemkinIdentifier = True)
ctSpecies = ct.Species.fromCti("""species(name=u'Ar',
atoms='Ar:1',
thermo=(NASA([200.00, 1000.00],
[ 2.50000000E+00, 0.00000000E+00, 0.00000000E+00,
0.00000000E+00, 0.00000000E+00, -7.45375000E+02,
4.37967000E+00]),
NASA([1000.00, 6000.00],
[ 2.50000000E+00, 0.00000000E+00, 0.00000000E+00,
0.00000000E+00, 0.00000000E+00, -7.45375000E+02,
4.37967000E+00])),
transport=gas_transport(geom='atom',
diam=3.33,
well_depth=136.501,
dipole=2.0,
polar=1.0,
rot_relax=15.0))""")
self.assertEqual(type(rmg_ctSpecies),type(ctSpecies))
self.assertEqual(rmg_ctSpecies.name, ctSpecies.name)
self.assertEqual(rmg_ctSpecies.composition, ctSpecies.composition)
self.assertEqual(rmg_ctSpecies.size, ctSpecies.size)
self.assertEqual(type(rmg_ctSpecies.thermo), type(ctSpecies.thermo))
self.assertEqual(type(rmg_ctSpecies.transport), type(ctSpecies.transport))
def testGetTransportData(self):
"""
Test that transport data can be retrieved correctly via the getTransportData method.
"""
spc = Species(label="Ar", molecule=[Molecule(SMILES="[Ar]")], transportData=TransportData(shapeIndex=0, epsilon=(1134.93,'J/mol'), sigma=(3.33,'angstrom'), dipoleMoment=(2,'De'), polarizability=(1,'angstrom^3'), rotrelaxcollnum=15.0, comment="""GRI-Mech"""))
self.assertTrue(spc.getTransportData() is spc.transportData)
def test_fingerprint_property(self):
"""Test that the fingerprint property works"""
self.assertEqual(self.species2.fingerprint, 'C6H6')
def test_inchi_property(self):
"""Test that the InChI property works"""
self.assertEqual(self.species2.InChI, 'InChI=1S/C6H6/c1-2-4-6-5-3-1/h1-6H')
def test_multiplicity_property(self):
"""Test that the fingerprint property works"""
self.assertEqual(self.species2.multiplicity, 1)
def test_smiles_property(self):
"""Test that the InChI property works"""
self.assertEqual(self.species2.SMILES, 'C1=CC=CC=C1')
def test_inchi_instantiation(self):
"""Test that we can create a species using the InChI argument"""
test = Species(InChI='InChI=1S/C6H6/c1-2-4-6-5-3-1/h1-6H')
self.assertTrue(test.isIsomorphic(self.species2))
def test_smiles_instantiation(self):
"""Test that we can create a species using the SMILES argument"""
test = Species(SMILES='C1=CC=CC=C1')
self.assertTrue(test.isIsomorphic(self.species2))
################################################################################
if __name__ == '__main__':
unittest.main(testRunner=unittest.TextTestRunner(verbosity=2))
| 55.037433
| 288
| 0.56748
|
4a1093d3fabd9731581d487bcbee63acf0dbcea3
| 117
|
py
|
Python
|
saleor/search/backends/postgresql.py
|
angeles-ricardo-89/saleor
|
5fab7a883d025bff83320fbdd557ed7afa2923a9
|
[
"BSD-3-Clause"
] | 4
|
2021-03-27T16:38:48.000Z
|
2021-10-18T12:54:15.000Z
|
saleor/search/backends/postgresql.py
|
DuongHieuMAI/saleor
|
e20b6283182f3a2886fe36fcdef8e47e4fcf7a14
|
[
"CC-BY-4.0"
] | 51
|
2019-12-06T08:06:07.000Z
|
2021-05-06T02:10:50.000Z
|
saleor/search/backends/postgresql.py
|
DuongHieuMAI/saleor
|
e20b6283182f3a2886fe36fcdef8e47e4fcf7a14
|
[
"CC-BY-4.0"
] | 12
|
2019-03-21T03:24:58.000Z
|
2022-01-13T10:55:34.000Z
|
from . import postgresql_storefront
def search_storefront(phrase):
return postgresql_storefront.search(phrase)
| 19.5
| 47
| 0.820513
|
4a1093f24a67169d946e958ff7e9ea14bd5a5cb8
| 1,528
|
py
|
Python
|
cleaner.py
|
gokaykucuk/password_cleaner
|
5dd651f920d61f3ce4f7d85a75df053b40f93a5d
|
[
"MIT"
] | null | null | null |
cleaner.py
|
gokaykucuk/password_cleaner
|
5dd651f920d61f3ce4f7d85a75df053b40f93a5d
|
[
"MIT"
] | 8
|
2021-02-02T22:44:24.000Z
|
2022-03-12T00:22:09.000Z
|
cleaner.py
|
gokaykucuk/valut_cleaner
|
5dd651f920d61f3ce4f7d85a75df053b40f93a5d
|
[
"MIT"
] | null | null | null |
# To add a new cell, type '# %%'
# To add a new markdown cell, type '# %% [markdown]'
# %% [markdown]
# ### Setup
#
# %%
import re
import pandas as pd
import numpy as np
from tldextract import extract
dirty_passwords_file = "in.csv"
passwords_df = pd.read_csv(dirty_passwords_file)
# %% [markdown]
# ### Clean Uri's
#
# %%
domain_extractor = re.compile(r'^(?:https?:)?(?:\/\/)?(?:[^@\n]+@)?(?:www\.)?([^:\/\n]+)')
# print(passwords_df.login_uri)
def clean_login_uri(login_uri):
if isinstance(login_uri, str):
_, td, tsu = extract(login_uri)
return td+'.'+tsu
passwords_df['login_uri'] = passwords_df['login_uri'].apply(clean_login_uri)
# %% [markdown]
# ### Find Duplicates
# %%
print("Before removing duplicates:")
print(passwords_df.count())
passwords_df.drop_duplicates(subset=['login_uri','login_username'], inplace=True, keep='last')
print("After removing duplicates:")
print(passwords_df.count())
# %% [markdown]
# ### Remove Specific Keywords
# %%
KEYWORDS = []
def isInKeywords(login_username):
return any(map(lambda keyword: keyword in str(login_username),KEYWORDS))
username_keywords_filter = passwords_df.login_username.apply(isInKeywords)
print(passwords_df.count())
passwords_df.mask(username_keywords_filter,inplace=True)
print(passwords_df.count())
# %% [markdown]
# ### Remove anything which is not login
# %% [markdown]
# ### Save the results
# %%
passwords_df.dropna(subset=['login_uri'], inplace=True)
passwords_df.to_csv('out.csv')
# %% [markdown]
### END
| 20.931507
| 94
| 0.683901
|
4a1094d47e8a4e7b94679f78b5de6f4e6a5b2b18
| 518
|
py
|
Python
|
shopit/admin/customer.py
|
dinoperovic/djangoshop-shopit
|
b42a2bf0ec319817eb37ef939608b04498fc4ff2
|
[
"BSD-3-Clause"
] | 14
|
2016-11-25T16:06:20.000Z
|
2018-08-30T19:20:41.000Z
|
shopit/admin/customer.py
|
dinoperovic/django-shop
|
b42a2bf0ec319817eb37ef939608b04498fc4ff2
|
[
"BSD-3-Clause"
] | 3
|
2018-11-30T10:47:39.000Z
|
2019-10-21T10:21:24.000Z
|
shopit/admin/customer.py
|
dinoperovic/django-shop
|
b42a2bf0ec319817eb37ef939608b04498fc4ff2
|
[
"BSD-3-Clause"
] | 6
|
2019-04-07T23:52:54.000Z
|
2020-09-20T05:30:07.000Z
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from django.contrib import admin
from shop.admin.customer import CustomerAdminBase, CustomerInlineAdminBase, CustomerProxy
class CustomerInlineAdmin(CustomerInlineAdminBase):
readonly_fields = ['get_number', 'salutation', 'phone_number']
fieldsets = [(None, {'fields': ['get_number', 'salutation', 'phone_number']})]
@admin.register(CustomerProxy)
class CustomerAdmin(CustomerAdminBase):
inlines = [CustomerInlineAdmin]
| 32.375
| 89
| 0.774131
|
4a10974cefa65abd9b25e1f03e80f0460d4b0096
| 46,586
|
py
|
Python
|
src/util/k5test.py
|
Bhanuprakash-ch/kerberos
|
bb3c878d5034210c656a97562065612611c5a6d2
|
[
"Apache-2.0"
] | 2
|
2018-01-09T18:23:08.000Z
|
2018-07-24T23:14:15.000Z
|
src/util/k5test.py
|
Bhanuprakash-ch/kerberos
|
bb3c878d5034210c656a97562065612611c5a6d2
|
[
"Apache-2.0"
] | null | null | null |
src/util/k5test.py
|
Bhanuprakash-ch/kerberos
|
bb3c878d5034210c656a97562065612611c5a6d2
|
[
"Apache-2.0"
] | 3
|
2017-03-21T18:34:02.000Z
|
2020-01-22T19:11:53.000Z
|
# Copyright (C) 2010 by the Massachusetts Institute of Technology.
# All rights reserved.
# Export of this software from the United States of America may
# require a specific license from the United States Government.
# It is the responsibility of any person or organization contemplating
# export to obtain such a license before exporting.
#
# WITHIN THAT CONSTRAINT, permission to use, copy, modify, and
# distribute this software and its documentation for any purpose and
# without fee is hereby granted, provided that the above copyright
# notice appear in all copies and that both that copyright notice and
# this permission notice appear in supporting documentation, and that
# the name of M.I.T. not be used in advertising or publicity pertaining
# to distribution of the software without specific, written prior
# permission. Furthermore if you modify this software you must label
# your software as modified software and not distribute it in such a
# fashion that it might be confused with the original M.I.T. software.
# M.I.T. makes no representations about the suitability of
# this software for any purpose. It is provided "as is" without express
# or implied warranty.
"""A module for krb5 test scripts
To run test scripts during "make check" (if Python 2.4 or later is
available), add rules like the following to Makefile.in:
check-pytests::
$(RUNPYTEST) $(srcdir)/t_testname.py $(PYTESTFLAGS)
A sample test script:
from k5test import *
# Run a test program under a variety of configurations:
for realm in multipass_realms():
realm.run(['./testprog', 'arg'])
# Run a test server and client under just the default configuration:
realm = K5Realm()
realm.start_server(['./serverprog'], 'starting...')
realm.run(['./clientprog', realm.host_princ])
# Inform framework that tests completed successfully.
success('World peace and cure for cancer')
By default, the realm will have:
* The name KRBTEST.COM
* Listener ports starting at 61000
* krb5.conf and kdc.conf files
* A fresh DB2 KDB
* Running krb5kdc (but not kadmind)
* Principals named realm.user_princ and realm.admin_princ; call
password('user') and password('admin') to get the password
* Credentials for realm.user_princ in realm.ccache
* Admin rights for realm.admin_princ in the kadmind acl file
* A host principal named realm.host_princ with a random key
* A keytab for the host principal in realm.keytab
The realm's behaviour can be modified with the following constructor
keyword arguments:
* realm='realmname': Override the realm name
* portbase=NNN: Override the listener port base; currently three ports are
used
* testdir='dirname': Override the storage area for the realm's files
(path may be specified relative to the current working dir)
* krb5_conf={ ... }: krb5.conf options, expressed as a nested
dictionary, to be merged with the default krb5.conf settings. A key
may be mapped to None to delete a setting from the defaults. A key
may be mapped to a list in order to create multiple settings for the
same variable name. Keys and values undergo the following template
substitutions:
- $realm: The realm name
- $testdir: The realm storage directory (absolute path)
- $buildtop: The root of the build directory
- $srctop: The root of the source directory
- $plugins: The plugin directory in the build tree
- $hostname: The FQDN of the host
- $port0: The first listener port (portbase)
- ...
- $port9: The tenth listener port (portbase + 9)
When choosing ports, note the following:
- port0 is used in the default krb5.conf for the KDC
- port1 is used in the default krb5.conf for kadmind
- port2 is used in the default krb5.conf for kpasswd
- port3 is used in the default krb5.conf for kpropd
- port4 is used in the default krb5.conf for iprop (in kadmind)
- port5 is the return value of realm.server_port()
* kdc_conf={...}: kdc.conf options, expressed as a nested dictionary,
to be merged with the default kdc.conf settings. The same
conventions and substitutions for krb5_conf apply.
* create_kdb=False: Don't create a KDB. Implicitly disables all of
the other options since they all require a KDB.
* krbtgt_keysalt='enctype:salttype': After creating the KDB,
regenerate the krbtgt key using the specified key/salt combination,
using a kadmin.local cpw query.
* create_user=False: Don't create the user principal. Implies
get_creds=False.
* create_host=False: Don't create the host principal or the associated
keytab.
* start_kdc=False: Don't start the KDC. Implies get_creds=False.
* start_kadmind=True: Start kadmind.
* get_creds=False: Don't get user credentials.
Scripts may use the following functions and variables:
* fail(message): Display message (plus leading marker and trailing
newline) and explanatory messages about debugging.
* success(message): Indicate that the test script has completed
successfully. Suppresses the display of explanatory debugging
messages in the on-exit handler. message should briefly summarize
the operations tested; it will only be displayed (with leading
marker and trailing newline) if the script is running verbosely.
* output(message, force_verbose=False): Place message (without any
added newline) in testlog, and write it to stdout if running
verbosely.
* which(progname): Return the location of progname in the executable
path, or None if it is not found.
* password(name): Return a weakly random password based on name. The
password will be consistent across calls with the same name.
* stop_daemon(proc): Stop a daemon process started with
realm.start_server() or realm.start_in_inetd(). Only necessary if
the port needs to be reused; daemon processes will be stopped
automatically when the script exits.
* multipass_realms(**keywords): This is an iterator function. Yields
a realm for each of the standard test passes, each of which alters
the default configuration in some way to exercise different parts of
the krb5 code base. keywords may contain any K5Realm initializer
keyword with the exception of krbtgt_keysalt, which will not be
honored. If keywords contains krb5_conf and/or kdc_conf fragments,
they will be merged with the default and per-pass specifications.
* cross_realms(num, xtgts=None, args=None, **keywords): This function
returns a list of num realms, where each realm's configuration knows
how to contact all of the realms. By default, each realm will
contain cross TGTs in both directions for all other realms; this
default may be overridden by specifying a collection of tuples in
the xtgts parameter, where each tuple is a pair of zero-based realm
indexes, indicating that the first realm can authenticate to the
second (i.e. krbtgt/secondrealm@firstrealm exists in both realm's
databases). If args is given, it should be a list of keyword
arguments specific to each realm; these will be merged with the
global keyword arguments passed to cross_realms, with specific
arguments taking priority.
* buildtop: The top of the build directory (absolute path).
* srctop: The top of the source directory (absolute path).
* plugins: The plugin directory in the build tree (absolute path).
* hostname: This machine's fully-qualified domain name.
* null_input: A file opened to read /dev/null.
* args: Positional arguments left over after flags are processed.
* runenv: The contents of $srctop/runenv.py, containing a dictionary
'env' which specifies additional variables to be added to the realm
environment, and a variable 'tls_impl', which indicates which TLS
implementation (if any) is being used by libkrb5's support for
contacting KDCs and kpasswd servers over HTTPS.
* verbose: Whether the script is running verbosely.
* testpass: The command-line test pass argument. The script does not
need to examine this argument in most cases; it will be honored in
multipass_realms().
* Pathname variables for programs within the build directory:
- krb5kdc
- kadmind
- kadmin
- kadmin_local
- kdb5_ldap_util
- kdb5_util
- ktutil
- kinit
- klist
- kswitch
- kvno
- kdestroy
- kpasswd
- t_inetd
- kproplog
- kpropd
- kprop
Scripts may use the following realm methods and attributes:
* realm.run(args, env=None, **keywords): Run a command in a specified
environment (or the realm's environment by default), obeying the
command-line debugging options. Fail if the command does not return
0. Log the command output appropriately, and return it as a single
multi-line string. Keyword arguments can contain input='string' to
send an input string to the command, and expected_code=N to expect a
return code other than 0.
* realm.kprop_port(): Returns a port number based on realm.portbase
intended for use by kprop and kpropd.
* realm.server_port(): Returns a port number based on realm.portbase
intended for use by server processes.
* realm.start_server(args, sentinel, env=None): Start a daemon
process. Wait until sentinel appears as a substring of a line in
the server process's stdout or stderr (which are folded together).
Returns a subprocess.Popen object which can be passed to
stop_daemon() to stop the server, or used to read from the server's
output.
* realm.start_in_inetd(args, port=None, env=None): Begin a t_inetd
process which will spawn a server process after accepting a client
connection. If port is not specified, realm.server_port() will be
used. Returns a process object which can be passed to stop_daemon()
to stop the server.
* realm.create_kdb(): Create a new KDB.
* realm.start_kdc(args=[], env=None): Start a krb5kdc process. Errors
if a KDC is already running. If args is given, it contains a list
of additional krb5kdc arguments.
* realm.stop_kdc(): Stop the krb5kdc process. Errors if no KDC is
running.
* realm.start_kadmind(env=None): Start a kadmind process. Errors if a
kadmind is already running.
* realm.stop_kadmind(): Stop the kadmind process. Errors if no
kadmind is running.
* realm.stop(): Stop any daemon processes running on behalf of the
realm.
* realm.addprinc(princname, password=None): Using kadmin.local, create
a principal in the KDB named princname, with either a random or
specified key.
* realm.extract_keytab(princname, keytab): Using kadmin.local, create
a keytab for princname in the filename keytab. Uses the -norandkey
option to avoid re-randomizing princname's key.
* realm.kinit(princname, password=None, flags=[]): Acquire credentials
for princname using kinit, with additional flags []. If password is
specified, it will be used as input to the kinit process; otherwise
flags must cause kinit not to need a password (e.g. by specifying a
keytab).
* realm.klist(client_princ, service_princ=None, ccache=None): Using
klist, list the credentials cache ccache (must be a filename;
self.ccache if not specified) and verify that the output shows
credentials for client_princ and service_princ (self.krbtgt_princ if
not specified).
* realm.klist_keytab(princ, keytab=None): Using klist, list keytab
(must be a filename; self.keytab if not specified) and verify that
the output shows the keytab name and principal name.
* realm.run_kadminl(query): Run the specified query in kadmin.local.
* realm.prep_kadmin(princname=None, password=None, flags=[]): Populate
realm.kadmin_ccache with a ticket which can be used to run kadmin.
If princname is not specified, realm.admin_princ and its default
password will be used.
* realm.run_kadmin(query, **keywords): Run the specified query in
kadmin, using realm.kadmin_ccache to authenticate. Accepts the same
keyword arguments as run.
* realm.special_env(name, has_kdc_conf, krb5_conf=None,
kdc_conf=None): Create an environment with a modified krb5.conf
and/or kdc.conf. The specified krb5_conf and kdc_conf fragments, if
any, will be merged with the realm's existing configuration. If
has_kdc_conf is false, the new environment will have no kdc.conf.
The environment returned by this method can be used with realm.run()
or similar methods.
* realm.start_kpropd(env, args=[]): Start a kpropd process. Pass an
environment created with realm.special_env() for the slave. If args
is given, it contains a list of additional kpropd arguments.
Returns a handle to the kpropd process.
* realm.realm: The realm's name.
* realm.testdir: The realm's storage directory (absolute path).
* realm.portbase: The realm's first listener port.
* realm.user_princ: The principal name user@<realmname>.
* realm.admin_princ: The principal name user/admin@<realmname>.
* realm.host_princ: The name of the host principal for this machine,
with realm.
* realm.nfs_princ: The name of the nfs principal for this machine,
with realm.
* realm.krbtgt_princ: The name of the krbtgt principal for the realm.
* realm.keytab: A keytab file in realm.testdir. Initially contains a
host keytab unless disabled by the realm construction options.
* realm.client_keytab: A keytab file in realm.testdir. Initially
nonexistent.
* realm.ccache: A ccache file in realm.testdir. Initially contains
credentials for user unless disabled by the realm construction
options.
* realm.kadmin_ccache: The ccache file initialized by prep_kadmin and
used by run_kadmin.
* env: The realm's environment, extended from os.environ to point at
the realm's config files and the build tree's shared libraries.
When the test script is run, its behavior can be modified with
command-line flags. These are documented in the --help output.
"""
import atexit
import optparse
import os
import shlex
import shutil
import signal
import socket
import string
import subprocess
import sys
import imp
# Used when most things go wrong (other than programming errors) so
# that the user sees an error message rather than a Python traceback,
# without help from the test script. The on-exit handler will display
# additional explanatory text.
def fail(msg):
"""Print a message and exit with failure."""
global _current_pass
print "*** Failure:", msg
if _current_pass:
print "*** Failed in test pass:", _current_pass
sys.exit(1)
def success(msg):
global _success
output('*** Success: %s\n' % msg)
_success = True
def output(msg, force_verbose=False):
"""Output a message to testlog, and to stdout if running verbosely."""
_outfile.write(msg)
if verbose or force_verbose:
sys.stdout.write(msg)
# Return the location of progname in the executable path, or None if
# it is not found.
def which(progname):
for dir in os.environ["PATH"].split(os.pathsep):
path = os.path.join(dir, progname)
if os.access(path, os.X_OK):
return path
return None
def password(name):
"""Choose a weakly random password from name, consistent across calls."""
return name + str(os.getpid())
# Exit handler which ensures processes are cleaned up and, on failure,
# prints messages to help developers debug the problem.
def _onexit():
global _daemons, _success, verbose
global _debug, _stop_before, _stop_after, _shell_before, _shell_after
if _daemons is None:
# In Python 2.5, if we exit as a side-effect of importing
# k5test, _onexit will execute in an empty global namespace.
# This can happen if argument processing fails or the build
# root isn't valid. In this case we can safely assume that no
# daemons have been launched and that we don't really need to
# amend the error message. The bug is fixed in Python 2.6.
return
if _debug or _stop_before or _stop_after or _shell_before or _shell_after:
# Wait before killing daemons in case one is being debugged.
sys.stdout.write('*** Press return to kill daemons and exit script: ')
sys.stdin.readline()
for proc in _daemons:
os.kill(proc.pid, signal.SIGTERM)
if not _success:
print
if not verbose:
print 'See testlog for details, or re-run with -v flag.'
print
print 'Use --debug=NUM to run a command under a debugger. Use'
print '--stop-after=NUM to stop after a daemon is started in order to'
print 'attach to it with a debugger. Use --help to see other options.'
# Find the parent of dir which is at the root of a build or source directory.
def _find_root(dir):
while True:
if os.path.exists(os.path.join(dir, 'lib', 'krb5', 'krb')):
break
parent = os.path.dirname(dir)
if (parent == dir):
return None
dir = parent
return dir
def _find_buildtop():
root = _find_root(os.getcwd())
if root is None:
fail('Cannot find root of krb5 build directory.')
if not os.path.exists(os.path.join(root, 'config.status')):
# Looks like an unbuilt source directory.
fail('This script must be run inside a krb5 build directory.')
return root
def _find_srctop():
scriptdir = os.path.abspath(os.path.dirname(sys.argv[0]))
if not scriptdir:
scriptdir = os.getcwd()
root = _find_root(scriptdir)
if root is None:
fail('Cannot find root of krb5 source directory.')
return os.path.abspath(root)
# Return the local hostname as it will be canonicalized by
# krb5_sname_to_principal. We can't simply use socket.getfqdn()
# because it explicitly prefers results containing periods and
# krb5_sname_to_principal doesn't care.
def _get_hostname():
hostname = socket.gethostname()
try:
ai = socket.getaddrinfo(hostname, None, 0, 0, 0, socket.AI_CANONNAME)
except socket.gaierror, (error, errstr):
fail('Local hostname "%s" does not resolve: %s.' % (hostname, errstr))
(family, socktype, proto, canonname, sockaddr) = ai[0]
try:
name = socket.getnameinfo(sockaddr, socket.NI_NAMEREQD)
except socket.gaierror:
return canonname.lower()
return name[0].lower()
# Parse command line arguments, setting global option variables. Also
# sets the global variable args to the positional arguments, which may
# be used by the test script.
def _parse_args():
global args, verbose, testpass, _debug, _debugger_command
global _stop_before, _stop_after, _shell_before, _shell_after
parser = optparse.OptionParser()
parser.add_option('-v', '--verbose', action='store_true', dest='verbose',
default=False, help='Display verbose output')
parser.add_option('-p', '--pass', dest='testpass', metavar='PASS',
help='If a multi-pass test, run only PASS')
parser.add_option('--debug', dest='debug', metavar='NUM',
help='Debug numbered command (or "all")')
parser.add_option('--debugger', dest='debugger', metavar='COMMAND',
help='Debugger command (default is gdb --args)',
default='gdb --args')
parser.add_option('--stop-before', dest='stopb', metavar='NUM',
help='Stop before numbered command (or "all")')
parser.add_option('--stop-after', dest='stopa', metavar='NUM',
help='Stop after numbered command (or "all")')
parser.add_option('--shell-before', dest='shellb', metavar='NUM',
help='Spawn shell before numbered command (or "all")')
parser.add_option('--shell-after', dest='shella', metavar='NUM',
help='Spawn shell after numbered command (or "all")')
(options, args) = parser.parse_args()
verbose = options.verbose
testpass = options.testpass
_debug = _parse_cmdnum('--debug', options.debug)
_debugger_command = shlex.split(options.debugger)
_stop_before = _parse_cmdnum('--stop-before', options.stopb)
_stop_after = _parse_cmdnum('--stop-after', options.stopa)
_shell_before = _parse_cmdnum('--shell-before', options.shellb)
_shell_after = _parse_cmdnum('--shell-after', options.shella)
# Translate a command number spec. -1 means all, None means none.
def _parse_cmdnum(optname, str):
if not str:
return None
if str == 'all':
return -1
try:
return int(str)
except ValueError:
fail('%s value must be "all" or a number' % optname)
# Test if a command index matches a translated command number spec.
def _match_cmdnum(cmdnum, ind):
if cmdnum is None:
return False
elif cmdnum == -1:
return True
else:
return cmdnum == ind
# Return an environment suitable for running programs in the build
# tree. It is safe to modify the result.
def _build_env():
global buildtop, runenv
env = os.environ.copy()
for (k, v) in runenv.env.iteritems():
if v.find('./') == 0:
env[k] = os.path.join(buildtop, v)
else:
env[k] = v
# Make sure we don't get confused by translated messages
# or localized times.
env['LC_ALL'] = 'C'
return env
def _import_runenv():
global buildtop
runenv_py = os.path.join(buildtop, 'runenv.py')
if not os.path.exists(runenv_py):
fail('You must run "make runenv.py" in %s first.' % buildtop)
return imp.load_source('runenv', runenv_py)
# Merge the nested dictionaries cfg1 and cfg2 into a new dictionary.
# cfg1 or cfg2 may be None, in which case the other is returned. If
# cfg2 contains keys mapped to None, the corresponding keys will be
# mapped to None in the result. The result may contain references to
# parts of cfg1 or cfg2, so is not safe to modify.
def _cfg_merge(cfg1, cfg2):
if not cfg2:
return cfg1
if not cfg1:
return cfg2
result = cfg1.copy()
for key, value2 in cfg2.items():
if value2 is None or key not in result:
result[key] = value2
else:
value1 = result[key]
if isinstance(value1, dict):
if not isinstance(value2, dict):
raise TypeError()
result[key] = _cfg_merge(value1, value2)
else:
result[key] = value2
return result
# Python gives us shlex.split() to turn a shell command into a list of
# arguments, but oddly enough, not the easier reverse operation. For
# now, do a bad job of faking it.
def _shell_equiv(args):
return " ".join(args)
# Add a valgrind prefix to the front of args if specified in the
# environment. Under normal circumstances this just returns args.
def _valgrind(args):
valgrind = os.getenv('VALGRIND')
if valgrind:
args = shlex.split(valgrind) + args
return args
def _stop_or_shell(stop, shell, env, ind):
if (_match_cmdnum(stop, ind)):
sys.stdout.write('*** [%d] Waiting for return: ' % ind)
sys.stdin.readline()
if (_match_cmdnum(shell, ind)):
output('*** [%d] Spawning shell\n' % ind, True)
subprocess.call(os.getenv('SHELL'), env=env)
def _run_cmd(args, env, input=None, expected_code=0):
global null_input, _cmd_index, _debug
global _stop_before, _stop_after, _shell_before, _shell_after
if (_match_cmdnum(_debug, _cmd_index)):
return _debug_cmd(args, env, input)
args = _valgrind(args)
output('*** [%d] Executing: %s\n' % (_cmd_index, _shell_equiv(args)))
_stop_or_shell(_stop_before, _shell_before, env, _cmd_index)
if input:
infile = subprocess.PIPE
else:
infile = null_input
# Run the command and log the result, folding stderr into stdout.
proc = subprocess.Popen(args, stdin=infile, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT, env=env)
(outdata, dummy_errdata) = proc.communicate(input)
code = proc.returncode
output(outdata)
output('*** [%d] Completed with return code %d\n' % (_cmd_index, code))
_stop_or_shell(_stop_after, _shell_after, env, _cmd_index)
_cmd_index += 1
# Check the return code and return the output.
if code != expected_code:
fail('%s failed with code %d.' % (args[0], code))
return outdata
def _debug_cmd(args, env, input):
global _cmd_index, _debugger_command
args = _debugger_command + list(args)
output('*** [%d] Executing in debugger: %s\n' %
(_cmd_index, _shell_equiv(args)), True)
if input:
print
print '*** Enter the following input when appropriate:'
print
print input
print
code = subprocess.call(args, env=env)
output('*** [%d] Completed in debugger with return code %d\n' %
(_cmd_index, code))
_cmd_index += 1
# Start a daemon process with the specified args and env. Wait until
# we see sentinel as a substring of a line on either stdout or stderr.
# Clean up the daemon process on exit.
def _start_daemon(args, env, sentinel):
global null_input, _cmd_index, _debug
global _stop_before, _stop_after, _shell_before, _shell_after
if (_match_cmdnum(_debug, _cmd_index)):
output('*** [%d] Warning: ' % _cmd_index, True)
output( 'test script cannot proceed after debugging a daemon\n', True)
_debug_cmd(args, env, None)
output('*** Exiting after debugging daemon\n', True)
sys.exit(1)
args = _valgrind(args)
output('*** [%d] Starting: %s\n' %
(_cmd_index, _shell_equiv(args)))
_stop_or_shell(_stop_before, _shell_before, env, _cmd_index)
# Start the daemon and look for the sentinel in stdout or stderr.
proc = subprocess.Popen(args, stdin=null_input, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT, env=env)
while True:
line = proc.stdout.readline()
if line == "":
code = proc.wait()
fail('%s failed to start with code %d.' % (args[0], code))
output(line)
if sentinel in line:
break
output('*** [%d] Started with pid %d\n' % (_cmd_index, proc.pid))
_stop_or_shell(_stop_after, _shell_after, env, _cmd_index)
_cmd_index += 1
# Save the daemon in a list for cleanup. Note that we won't read
# any more of the daemon's output after the sentinel, which will
# cause the daemon to block if it generates enough. For now we
# assume all daemon processes are quiet enough to avoid this
# problem. If it causes an issue, some alternatives are:
# - Output to a file and poll the file for the sentinel
# (undesirable because it slows down the test suite by the
# polling interval times the number of daemons started)
# - Create an intermediate subprocess which discards output
# after the sentinel.
_daemons.append(proc)
# Return the process; the caller can stop it with stop_daemon.
return proc
def stop_daemon(proc):
output('*** Terminating process %d\n' % proc.pid)
os.kill(proc.pid, signal.SIGTERM)
proc.wait()
_daemons.remove(proc)
class K5Realm(object):
"""An object representing a functional krb5 test realm."""
def __init__(self, realm='KRBTEST.COM', portbase=61000, testdir='testdir',
krb5_conf=None, kdc_conf=None, create_kdb=True,
krbtgt_keysalt=None, create_user=True, get_creds=True,
create_host=True, start_kdc=True, start_kadmind=False,
start_kpropd=False):
global hostname, _default_krb5_conf, _default_kdc_conf
self.realm = realm
self.testdir = os.path.join(os.getcwd(), testdir)
self.portbase = portbase
self.user_princ = 'user@' + self.realm
self.admin_princ = 'user/admin@' + self.realm
self.host_princ = 'host/%s@%s' % (hostname, self.realm)
self.nfs_princ = 'nfs/%s@%s' % (hostname, self.realm)
self.krbtgt_princ = 'krbtgt/%s@%s' % (self.realm, self.realm)
self.keytab = os.path.join(self.testdir, 'keytab')
self.client_keytab = os.path.join(self.testdir, 'client_keytab')
self.ccache = os.path.join(self.testdir, 'ccache')
self.kadmin_ccache = os.path.join(self.testdir, 'kadmin_ccache')
self._krb5_conf = _cfg_merge(_default_krb5_conf, krb5_conf)
self._kdc_conf = _cfg_merge(_default_kdc_conf, kdc_conf)
self._kdc_proc = None
self._kadmind_proc = None
self._kpropd_procs = []
krb5_conf_path = os.path.join(self.testdir, 'krb5.conf')
kdc_conf_path = os.path.join(self.testdir, 'kdc.conf')
self.env = self._make_env(krb5_conf_path, kdc_conf_path)
self._create_empty_dir()
self._create_conf(self._krb5_conf, krb5_conf_path)
self._create_conf(self._kdc_conf, kdc_conf_path)
self._create_acl()
self._create_dictfile()
if create_kdb:
self.create_kdb()
if krbtgt_keysalt and create_kdb:
self.run_kadminl('cpw -randkey -e %s %s' %
(krbtgt_keysalt, self.krbtgt_princ))
if create_user and create_kdb:
self.addprinc(self.user_princ, password('user'))
self.addprinc(self.admin_princ, password('admin'))
if create_host and create_kdb:
self.addprinc(self.host_princ)
self.extract_keytab(self.host_princ, self.keytab)
if start_kdc and create_kdb:
self.start_kdc()
if start_kadmind and create_kdb:
self.start_kadmind()
if get_creds and create_kdb and create_user and start_kdc:
self.kinit(self.user_princ, password('user'))
self.klist(self.user_princ)
def _create_empty_dir(self):
dir = self.testdir
shutil.rmtree(dir, True)
if (os.path.exists(dir)):
fail('Cannot remove %s to create test realm.' % dir)
os.mkdir(dir)
def _create_conf(self, profile, filename):
file = open(filename, 'w')
for section, contents in profile.items():
file.write('[%s]\n' % section)
self._write_cfg_section(file, contents, 1)
file.close()
def _write_cfg_section(self, file, contents, indent_level):
indent = '\t' * indent_level
for name, value in contents.items():
name = self._subst_cfg_value(name)
if isinstance(value, dict):
# A dictionary value yields a list subsection.
file.write('%s%s = {\n' % (indent, name))
self._write_cfg_section(file, value, indent_level + 1)
file.write('%s}\n' % indent)
elif isinstance(value, list):
# A list value yields multiple values for the same name.
for item in value:
item = self._subst_cfg_value(item)
file.write('%s%s = %s\n' % (indent, name, item))
elif isinstance(value, str):
# A string value yields a straightforward variable setting.
value = self._subst_cfg_value(value)
file.write('%s%s = %s\n' % (indent, name, value))
elif value is not None:
raise TypeError()
def _subst_cfg_value(self, value):
global buildtop, srctop, hostname
template = string.Template(value)
return template.substitute(realm=self.realm,
testdir=self.testdir,
buildtop=buildtop,
srctop=srctop,
plugins=plugins,
hostname=hostname,
port0=self.portbase,
port1=self.portbase + 1,
port2=self.portbase + 2,
port3=self.portbase + 3,
port4=self.portbase + 4,
port5=self.portbase + 5,
port6=self.portbase + 6,
port7=self.portbase + 7,
port8=self.portbase + 8,
port9=self.portbase + 9)
def _create_acl(self):
global hostname
filename = os.path.join(self.testdir, 'acl')
file = open(filename, 'w')
file.write('%s *\n' % self.admin_princ)
file.write('kiprop/%s@%s p\n' % (hostname, self.realm))
file.close()
def _create_dictfile(self):
filename = os.path.join(self.testdir, 'dictfile')
file = open(filename, 'w')
file.write('weak_password\n')
file.close()
def _make_env(self, krb5_conf_path, kdc_conf_path):
env = _build_env()
env['KRB5_CONFIG'] = krb5_conf_path
env['KRB5_KDC_PROFILE'] = kdc_conf_path or os.devnull
env['KRB5CCNAME'] = self.ccache
env['KRB5_KTNAME'] = self.keytab
env['KRB5_CLIENT_KTNAME'] = self.client_keytab
env['KRB5RCACHEDIR'] = self.testdir
env['KPROPD_PORT'] = str(self.kprop_port())
env['KPROP_PORT'] = str(self.kprop_port())
return env
def run(self, args, env=None, **keywords):
if env is None:
env = self.env
return _run_cmd(args, env, **keywords)
def kprop_port(self):
return self.portbase + 3
def server_port(self):
return self.portbase + 5
def start_server(self, args, sentinel, env=None):
if env is None:
env = self.env
return _start_daemon(args, env, sentinel)
def start_in_inetd(self, args, port=None, env=None):
if not port:
port = self.server_port()
if env is None:
env = self.env
inetd_args = [t_inetd, str(port)] + args
return _start_daemon(inetd_args, env, 'Ready!')
def create_kdb(self):
global kdb5_util
self.run([kdb5_util, 'create', '-W', '-s', '-P', 'master'])
def start_kdc(self, args=[], env=None):
global krb5kdc
if env is None:
env = self.env
assert(self._kdc_proc is None)
self._kdc_proc = _start_daemon([krb5kdc, '-n'] + args, env,
'starting...')
def stop_kdc(self):
assert(self._kdc_proc is not None)
stop_daemon(self._kdc_proc)
self._kdc_proc = None
def start_kadmind(self, env=None):
global krb5kdc
if env is None:
env = self.env
assert(self._kadmind_proc is None)
dump_path = os.path.join(self.testdir, 'dump')
self._kadmind_proc = _start_daemon([kadmind, '-nofork', '-W',
'-p', kdb5_util, '-K', kprop,
'-F', dump_path], env,
'starting...')
def stop_kadmind(self):
assert(self._kadmind_proc is not None)
stop_daemon(self._kadmind_proc)
self._kadmind_proc = None
def start_kpropd(self, env, args=[]):
global krb5kdc
slavedump_path = os.path.join(self.testdir, 'incoming-slave-datatrans')
kpropdacl_path = os.path.join(self.testdir, 'kpropd-acl')
proc = _start_daemon([kpropd, '-D', '-P', str(self.kprop_port()),
'-f', slavedump_path, '-p', kdb5_util,
'-a', kpropdacl_path] + args, env, 'ready')
self._kpropd_procs.append(proc)
return proc
def stop(self):
if self._kdc_proc:
self.stop_kdc()
if self._kadmind_proc:
self.stop_kadmind()
for p in self._kpropd_procs:
stop_daemon(p)
self._kpropd_procs = []
def addprinc(self, princname, password=None):
if password:
self.run_kadminl('addprinc -pw %s %s' % (password, princname))
else:
self.run_kadminl('addprinc -randkey %s' % princname)
def extract_keytab(self, princname, keytab):
self.run_kadminl('ktadd -k %s -norandkey %s' % (keytab, princname))
def kinit(self, princname, password=None, flags=[], **keywords):
if password:
input = password + "\n"
else:
input = None
return self.run([kinit] + flags + [princname], input=input, **keywords)
def klist(self, client_princ, service_princ=None, ccache=None, **keywords):
if service_princ is None:
service_princ = self.krbtgt_princ
if ccache is None:
ccache = self.ccache
ccachestr = ccache
if len(ccachestr) < 2 or ':' not in ccachestr[2:]:
ccachestr = 'FILE:' + ccachestr
output = self.run([klist, ccache], **keywords)
if (('Ticket cache: %s\n' % ccachestr) not in output or
('Default principal: %s\n' % client_princ) not in output or
service_princ not in output):
fail('Unexpected klist output.')
def klist_keytab(self, princ, keytab=None, **keywords):
if keytab is None:
keytab = self.keytab
output = self.run([klist, '-k', keytab], **keywords)
if (('Keytab name: FILE:%s\n' % keytab) not in output or
'KVNO Principal\n----' not in output or
princ not in output):
fail('Unexpected klist output.')
def run_kadminl(self, query, env=None):
global kadmin_local
return self.run([kadmin_local, '-q', query], env=env)
def prep_kadmin(self, princname=None, pw=None, flags=[]):
if princname is None:
princname = self.admin_princ
pw = password('admin')
return self.kinit(princname, pw,
flags=['-S', 'kadmin/admin',
'-c', self.kadmin_ccache] + flags)
def run_kadmin(self, query, **keywords):
return self.run([kadmin, '-c', self.kadmin_ccache, '-q', query],
**keywords)
def special_env(self, name, has_kdc_conf, krb5_conf=None, kdc_conf=None):
krb5_conf_path = os.path.join(self.testdir, 'krb5.conf.%s' % name)
krb5_conf = _cfg_merge(self._krb5_conf, krb5_conf)
self._create_conf(krb5_conf, krb5_conf_path)
if has_kdc_conf:
kdc_conf_path = os.path.join(self.testdir, 'kdc.conf.%s' % name)
kdc_conf = _cfg_merge(self._kdc_conf, kdc_conf)
self._create_conf(kdc_conf, kdc_conf_path)
else:
kdc_conf_path = None
return self._make_env(krb5_conf_path, kdc_conf_path)
def multipass_realms(**keywords):
global _current_pass, _passes, testpass
caller_krb5_conf = keywords.get('krb5_conf')
caller_kdc_conf = keywords.get('kdc_conf')
for p in _passes:
(name, krbtgt_keysalt, krb5_conf, kdc_conf) = p
if testpass and name != testpass:
continue
output('*** Beginning pass %s\n' % name)
keywords['krb5_conf'] = _cfg_merge(krb5_conf, caller_krb5_conf)
keywords['kdc_conf'] = _cfg_merge(kdc_conf, caller_kdc_conf)
keywords['krbtgt_keysalt'] = krbtgt_keysalt
_current_pass = name
realm = K5Realm(**keywords)
yield realm
realm.stop()
_current_pass = None
def cross_realms(num, xtgts=None, args=None, **keywords):
# Build keyword args for each realm.
realm_args = []
for i in range(num):
realmnumber = i + 1
# Start with any global keyword arguments to this function.
a = keywords.copy()
if args and args[i]:
# Merge in specific arguments for this realm. Use
# _cfg_merge for config fragments.
a.update(args[i])
for cf in ('krb5_conf', 'kdc_conf'):
if cf in keywords and cf in args[i]:
a[cf] = _cfg_merge(keywords[cf], args[i][cf])
# Set defaults for the realm name, testdir, and portbase.
if not 'realm' in a:
a['realm'] = 'KRBTEST%d.COM' % realmnumber
if not 'testdir' in a:
a['testdir'] = os.path.join('testdir', str(realmnumber))
if not 'portbase' in a:
a['portbase'] = 61000 + 10 * realmnumber
realm_args.append(a)
# Build a [realms] config fragment containing all of the realms.
realmsection = { '$realm' : None }
for a in realm_args:
name = a['realm']
portbase = a['portbase']
realmsection[name] = {
'kdc' : '$hostname:%d' % portbase,
'admin_server' : '$hostname:%d' % (portbase + 1),
'kpasswd_server' : '$hostname:%d' % (portbase + 2)
}
realmscfg = {'realms': realmsection}
# Set realmsection in each realm's krb5_conf keyword argument.
for a in realm_args:
a['krb5_conf'] = _cfg_merge(realmscfg, a.get('krb5_conf'))
if xtgts is None:
# Default to cross tgts for every pair of realms.
# (itertools.permutations would work here but is new in 2.6.)
xtgts = [(x,y) for x in range(num) for y in range(num) if x != y]
# Create the realms.
realms = []
for i in range(num):
r = K5Realm(**realm_args[i])
# Create specified cross TGTs in this realm's db.
for j in range(num):
if j == i:
continue
iname = r.realm
jname = realm_args[j]['realm']
if (i, j) in xtgts:
# This realm can authenticate to realm j.
r.addprinc('krbtgt/%s' % jname, password('cr-%d-%d-' % (i, j)))
if (j, i) in xtgts:
# Realm j can authenticate to this realm.
r.addprinc('krbtgt/%s@%s' % (iname, jname),
password('cr-%d-%d-' % (j, i)))
realms.append(r)
return realms
_default_krb5_conf = {
'libdefaults': {
'default_realm': '$realm',
'dns_lookup_kdc': 'false',
'plugin_base_dir': '$plugins'},
'realms': {'$realm': {
'kdc': '$hostname:$port0',
'admin_server': '$hostname:$port1',
'kpasswd_server': '$hostname:$port2'}}}
_default_kdc_conf = {
'realms': {'$realm': {
'database_module': 'db',
'iprop_port': '$port4',
'key_stash_file': '$testdir/stash',
'acl_file': '$testdir/acl',
'dictfile': '$testdir/dictfile',
'kadmind_port': '$port1',
'kpasswd_port': '$port2',
'kdc_ports': '$port0',
'kdc_tcp_ports': '$port0'}},
'dbmodules': {
'db_module_dir': '$plugins/kdb',
'db': {'db_library': 'db2', 'database_name' : '$testdir/db'}},
'logging': {
'admin_server': 'FILE:$testdir/kadmind5.log',
'kdc': 'FILE:$testdir/kdc.log',
'default': 'FILE:$testdir/others.log'}}
# A pass is a tuple of: name, krbtgt_keysalt, krb5_conf, kdc_conf.
_passes = [
# No special settings; exercises AES256.
('default', None, None, None),
# Exercise a DES enctype and the v4 salt type.
('desv4', None,
{'libdefaults': {
'default_tgs_enctypes': 'des-cbc-crc',
'default_tkt_enctypes': 'des-cbc-crc',
'permitted_enctypes': 'des-cbc-crc',
'allow_weak_crypto': 'true'}},
{'realms': {'$realm': {
'supported_enctypes': 'des-cbc-crc:v4',
'master_key_type': 'des-cbc-crc'}}}),
# Exercise the DES3 enctype.
('des3', None,
{'libdefaults': {
'default_tgs_enctypes': 'des3',
'default_tkt_enctypes': 'des3',
'permitted_enctypes': 'des3'}},
{'realms': {'$realm': {
'supported_enctypes': 'des3-cbc-sha1:normal',
'master_key_type': 'des3-cbc-sha1'}}}),
# Exercise the arcfour enctype.
('arcfour', None,
{'libdefaults': {
'default_tgs_enctypes': 'rc4',
'default_tkt_enctypes': 'rc4',
'permitted_enctypes': 'rc4'}},
{'realms': {'$realm': {
'supported_enctypes': 'arcfour-hmac:normal',
'master_key_type': 'arcfour-hmac'}}}),
# Exercise the AES128 enctype.
('aes128', None,
{'libdefaults': {
'default_tgs_enctypes': 'aes128-cts',
'default_tkt_enctypes': 'aes128-cts',
'permitted_enctypes': 'aes128-cts'}},
{'realms': {'$realm': {
'supported_enctypes': 'aes128-cts:normal',
'master_key_type': 'aes128-cts'}}}),
# Exercise the camellia256-cts enctype.
('camellia256', None,
{'libdefaults': {
'default_tgs_enctypes': 'camellia256-cts',
'default_tkt_enctypes': 'camellia256-cts',
'permitted_enctypes': 'camellia256-cts'}},
{'realms': {'$realm': {
'supported_enctypes': 'camellia256-cts:normal',
'master_key_type': 'camellia256-cts'}}}),
# Test a setup with modern principal keys but an old TGT key.
('aes256.destgt', 'des-cbc-crc:normal',
{'libdefaults': {'allow_weak_crypto': 'true'}},
None)
]
_success = False
_current_pass = None
_daemons = []
_parse_args()
atexit.register(_onexit)
_outfile = open('testlog', 'w')
_cmd_index = 1
buildtop = _find_buildtop()
srctop = _find_srctop()
plugins = os.path.join(buildtop, 'plugins')
runenv = _import_runenv()
hostname = _get_hostname()
null_input = open(os.devnull, 'r')
krb5kdc = os.path.join(buildtop, 'kdc', 'krb5kdc')
kadmind = os.path.join(buildtop, 'kadmin', 'server', 'kadmind')
kadmin = os.path.join(buildtop, 'kadmin', 'cli', 'kadmin')
kadmin_local = os.path.join(buildtop, 'kadmin', 'cli', 'kadmin.local')
kdb5_ldap_util = os.path.join(buildtop, 'plugins', 'kdb', 'ldap', 'ldap_util',
'kdb5_ldap_util')
kdb5_util = os.path.join(buildtop, 'kadmin', 'dbutil', 'kdb5_util')
ktutil = os.path.join(buildtop, 'kadmin', 'ktutil', 'ktutil')
kinit = os.path.join(buildtop, 'clients', 'kinit', 'kinit')
klist = os.path.join(buildtop, 'clients', 'klist', 'klist')
kswitch = os.path.join(buildtop, 'clients', 'kswitch', 'kswitch')
kvno = os.path.join(buildtop, 'clients', 'kvno', 'kvno')
kdestroy = os.path.join(buildtop, 'clients', 'kdestroy', 'kdestroy')
kpasswd = os.path.join(buildtop, 'clients', 'kpasswd', 'kpasswd')
t_inetd = os.path.join(buildtop, 'tests', 'dejagnu', 't_inetd')
kproplog = os.path.join(buildtop, 'slave', 'kproplog')
kpropd = os.path.join(buildtop, 'slave', 'kpropd')
kprop = os.path.join(buildtop, 'slave', 'kprop')
| 38.692691
| 79
| 0.645237
|
4a1097be4aecc12c4770815b2bc22cd38ddf8c06
| 2,818
|
py
|
Python
|
SSDBM_figures/runtime_eval/add_explicit_runtimes.py
|
dennlinger/hypergraph-document-store
|
72b90119b163b92254c73442bee52cde55e58517
|
[
"MIT"
] | null | null | null |
SSDBM_figures/runtime_eval/add_explicit_runtimes.py
|
dennlinger/hypergraph-document-store
|
72b90119b163b92254c73442bee52cde55e58517
|
[
"MIT"
] | 1
|
2019-12-12T09:20:00.000Z
|
2019-12-12T09:20:00.000Z
|
SSDBM_figures/runtime_eval/add_explicit_runtimes.py
|
dennlinger/hypergraph-document-store
|
72b90119b163b92254c73442bee52cde55e58517
|
[
"MIT"
] | 1
|
2021-07-22T14:16:47.000Z
|
2021-07-22T14:16:47.000Z
|
"""
Attempt at a first template script to evaluate runtimes for the explicit model in Postgres.
"""
from PostgresConnector_SSDBM import PostgresConnector
import json
if __name__ == "__main__":
print("Evaluation for Full Explicit Model")
fn = "./entities.json"
with open(fn) as f:
data = json.load(f)
pc = PostgresConnector(port=5436)
for window in [0, 1, 2, 5]:
print("", flush=True) # Dummy for proper carriage return
print("Starting with window size {}.".format(window), flush=True)
for iteration in range(7):
print("", flush=True) # Dummy for proper carriage return
print("Starting with iteration {}".format(iteration), flush=True)
print("", flush=True) # Dummy for proper carriage return
for i, (entity_label, properties) in enumerate(data.items()):
print("Entity: {}/{}\t".format(i+1, len(data)), end="", flush=True)
query = """
EXPLAIN ANALYZE
WITH s AS (SELECT term_id FROM terms
WHERE term_text = '{}'),
q AS (SELECT edge_id FROM full_{}_hyperedges eh
WHERE eh.term_id = (SELECT s.term_id FROM s))
SELECT term_text, counts.freq FROM terms t,
(SELECT term_id, COUNT(*) as freq
FROM full_{}_hyperedges eh
WHERE eh.edge_id = ANY(ARRAY(SELECT * FROM q))
GROUP BY term_id ORDER BY freq DESC) as counts
WHERE counts.term_id = t.term_id
AND counts.term_id != (SELECT term_id FROM s);""".format(entity_label, window, window)
with pc as opc:
opc.cursor.execute(query)
res = opc.cursor.fetchall()
if not res[-1][0].lower().startswith("execution time:"):
print("")
print(res, flush=True)
print("")
else:
# clean data to extract float
# sample: ('Execution Time: 1352.866 ms',)
time_taken = float(res[-1][0].split(":")[1].strip().split(" ")[0])
print("{}\r".format(time_taken), end="", flush=True)
if iteration > 0: # take one round of cache warm-up
times = data[entity_label].get("explicit", {})
window_times = times.get(str(window), [])
window_times.append(time_taken)
times[str(window)] = window_times
data[entity_label]["explicit"] = times
with open(fn, "w", encoding="utf-8") as f:
json.dump(data, f, indent=2, ensure_ascii=False)
| 44.03125
| 104
| 0.518453
|
4a1099066a9494c23100bd6ba1b51dd90f9f600e
| 1,355
|
py
|
Python
|
pardal/keyboard.py
|
anapaulagomes/pardal-python
|
d67ba1e0e67677320f37cab1288881ad7845612b
|
[
"MIT"
] | 2
|
2019-01-30T22:35:44.000Z
|
2019-01-30T22:41:06.000Z
|
pardal/keyboard.py
|
anapaulagomes/pardal-python
|
d67ba1e0e67677320f37cab1288881ad7845612b
|
[
"MIT"
] | null | null | null |
pardal/keyboard.py
|
anapaulagomes/pardal-python
|
d67ba1e0e67677320f37cab1288881ad7845612b
|
[
"MIT"
] | null | null | null |
import configparser
from pynput import keyboard
keyboard_mapping = {
'command': keyboard.Key.cmd,
'windows': keyboard.Key.cmd,
'up': keyboard.Key.up,
'down': keyboard.Key.down,
'ctrl': keyboard.Key.ctrl,
'esc': keyboard.Key.esc,
}
def string_to_hotkeys(string):
"""Convert a hotkey string to a tuple of Key objects."""
combination = []
string_keys = string.replace(' ', '').split('+')
for string_key in string_keys:
key = keyboard_mapping.get(string_key)
if key:
combination.append(key)
else:
raise KeyNotFound
return set(combination)
class KeyNotFound(Exception):
pass
class KeyboardConfiguration:
def __init__(self):
self._config = self.read_config()
def read_config(self):
config = configparser.ConfigParser(
{
'up': 'alt+up+windows',
'down': 'alt+down+windows',
'exit': 'esc',
}
)
config.read('keyboard.ini.sample')
return config
def set_hotkeys(self):
for option in self._config.options('general'):
string_hotkey = self._config.get('general', option)
setattr(self, option, string_to_hotkeys(string_hotkey))
print(option, string_to_hotkeys(string_hotkey))
return self
| 25.092593
| 67
| 0.6
|
4a10991653585f00ea819164d0e8a2496f418ed6
| 4,237
|
py
|
Python
|
markdown_generator/talks.py
|
gijswijnholds/gijswijnholds.github.io
|
0d6e1b95e6e73539c4dd900e321aace6e613ec87
|
[
"MIT"
] | null | null | null |
markdown_generator/talks.py
|
gijswijnholds/gijswijnholds.github.io
|
0d6e1b95e6e73539c4dd900e321aace6e613ec87
|
[
"MIT"
] | 4
|
2020-02-25T16:08:46.000Z
|
2022-02-26T04:41:34.000Z
|
markdown_generator/talks.py
|
gijswijnholds/gijswijnholds.github.io
|
0d6e1b95e6e73539c4dd900e321aace6e613ec87
|
[
"MIT"
] | null | null | null |
# coding: utf-8
# # Talks markdown generator for academicpages
#
# Takes a TSV of talks with metadata and converts them for use with [academicpages.github.io](academicpages.github.io). This is an interactive Jupyter notebook ([see more info here](http://jupyter-notebook-beginner-guide.readthedocs.io/en/latest/what_is_jupyter.html)). The core python code is also in `talks.py`. Run either from the `markdown_generator` folder after replacing `talks.tsv` with one containing your data.
#
# TODO: Make this work with BibTex and other databases, rather than Stuart's non-standard TSV format and citation style.
# In[1]:
import pandas as pd
import os
# ## Data format
#
# The TSV needs to have the following columns: title, type, url_slug, venue, date, location, talk_url, description, with a header at the top. Many of these fields can be blank, but the columns must be in the TSV.
#
# - Fields that cannot be blank: `title`, `url_slug`, `date`. All else can be blank. `type` defaults to "Talk"
# - `date` must be formatted as YYYY-MM-DD.
# - `url_slug` will be the descriptive part of the .md file and the permalink URL for the page about the paper.
# - The .md file will be `YYYY-MM-DD-[url_slug].md` and the permalink will be `https://[yourdomain]/talks/YYYY-MM-DD-[url_slug]`
# - The combination of `url_slug` and `date` must be unique, as it will be the basis for your filenames
#
# ## Import TSV
#
# Pandas makes this easy with the read_csv function. We are using a TSV, so we specify the separator as a tab, or `\t`.
#
# I found it important to put this data in a tab-separated values format, because there are a lot of commas in this kind of data and comma-separated values can get messed up. However, you can modify the import statement, as pandas also has read_excel(), read_json(), and others.
# In[3]:
talks = pd.read_csv("talks.tsv", sep="\t", header=0)
talks
# ## Escape special characters
#
# YAML is very picky about how it takes a valid string, so we are replacing single and double quotes (and ampersands) with their HTML encoded equivilents. This makes them look not so readable in raw format, but they are parsed and rendered nicely.
# In[4]:
html_escape_table = {
"&": "&",
'"': """,
"'": "'"
}
def html_escape(text):
if type(text) is str:
return "".join(html_escape_table.get(c,c) for c in text)
else:
return "False"
# ## Creating the markdown files
#
# This is where the heavy lifting is done. This loops through all the rows in the TSV dataframe, then starts to concatentate a big string (```md```) that contains the markdown for each type. It does the YAML metadata first, then does the description for the individual page.
# In[5]:
old_files = os.listdir("../_talks/")
for f in old_files:
os.remove("../_talks/" + f)
loc_dict = {}
for row, item in talks.iterrows():
try:
md_filename = str(item.date) + "-" + item.url_slug + ".md"
except TypeError:
print(str(item))
html_filename = str(item.date) + "-" + item.url_slug
year = item.date[:4]
md = "---\ntitle: \"" + item.title + '"\n'
md += "collection: talks" + "\n"
if len(str(item.type)) > 3:
md += 'type: "' + item.type + '"\n'
else:
md += 'type: "Talk"\n'
md += "permalink: /talks/" + html_filename + "\n"
if len(str(item.venue)) > 3:
md += 'venue: "' + item.venue + '"\n'
if len(str(item.location)) > 3:
md += "date: " + str(item.date) + "\n"
md += "year: " + str(year) + "\n"
if len(str(item.location)) > 3:
md += 'location: "' + str(item.location) + '"\n'
if len(str(item.venue_url)) > 3:
md += 'venue_url: "' + str(item.venue_url) + '"\n'
if len(str(item.talk_url)) > 3:
md += 'talk_url: "' + str(item.talk_url) + '"\n'
md += "citation: '"" + html_escape(item.title) + "".'\n"
md += "---\n"
if len(str(item.description)) > 3:
md += "\n" + html_escape(item.description) + "\n"
md_filename = os.path.basename(md_filename)
with open("../_talks/" + md_filename, 'w') as f:
f.write(md)
# These files are in the talks directory, one directory below where we're working from.
| 34.447154
| 420
| 0.650224
|
4a10994246362c25f988aa6fc6f3f4deae5af122
| 753
|
py
|
Python
|
hackerrank/Algorithms/Gaming Array/solution.py
|
ATrain951/01.python-com_Qproject
|
c164dd093954d006538020bdf2e59e716b24d67c
|
[
"MIT"
] | 4
|
2020-07-24T01:59:50.000Z
|
2021-07-24T15:14:08.000Z
|
hackerrank/Algorithms/Gaming Array/solution.py
|
ATrain951/01.python-com_Qproject
|
c164dd093954d006538020bdf2e59e716b24d67c
|
[
"MIT"
] | null | null | null |
hackerrank/Algorithms/Gaming Array/solution.py
|
ATrain951/01.python-com_Qproject
|
c164dd093954d006538020bdf2e59e716b24d67c
|
[
"MIT"
] | null | null | null |
#!/bin/python3
import os
#
# Complete the 'gamingArray' function below.
#
# The function is expected to return a STRING.
# The function accepts INTEGER_ARRAY arr as parameter.
#
def gamingArray(arr):
# Write your code here
count = 0 # Keeps track of number of turns played
m = 0 # Maximum Number
for i in arr:
if i > m:
m = i
count += 1
return 'BOB' if count % 2 else 'ANDY'
if __name__ == '__main__':
fptr = open(os.environ['OUTPUT_PATH'], 'w')
g = int(input().strip())
for g_itr in range(g):
arr_count = int(input().strip())
arr = list(map(int, input().rstrip().split()))
result = gamingArray(arr)
fptr.write(result + '\n')
fptr.close()
| 19.307692
| 54
| 0.583001
|
4a1099bc0b60e16602e56f680a7402e154277819
| 4,707
|
py
|
Python
|
core/controllers/classroom_test.py
|
davehenton/oppia
|
62a9e9ea8458632e39b8ab4cf15b0489ac1acad9
|
[
"Apache-2.0"
] | 2
|
2019-12-02T18:56:49.000Z
|
2020-03-14T17:14:15.000Z
|
core/controllers/classroom_test.py
|
Ketan-Suthar/oppia
|
0eed7bb069b55396d0908ba232d8ef4517231dc2
|
[
"Apache-2.0"
] | 2
|
2019-09-11T23:11:48.000Z
|
2019-11-29T06:04:52.000Z
|
core/controllers/classroom_test.py
|
Ketan-Suthar/oppia
|
0eed7bb069b55396d0908ba232d8ef4517231dc2
|
[
"Apache-2.0"
] | 2
|
2019-12-02T18:56:56.000Z
|
2020-03-16T08:03:45.000Z
|
# Copyright 2018 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for the classroom page."""
from __future__ import absolute_import # pylint: disable=import-only-modules
from __future__ import unicode_literals # pylint: disable=import-only-modules
from constants import constants
from core.domain import config_domain
from core.domain import topic_domain
from core.domain import topic_services
from core.tests import test_utils
import feconf
class BaseClassroomControllerTests(test_utils.GenericTestBase):
def setUp(self):
"""Completes the sign-up process for the various users."""
super(BaseClassroomControllerTests, self).setUp()
self.signup(self.NEW_USER_EMAIL, self.NEW_USER_USERNAME)
self.user_id = self.get_user_id_from_email(self.NEW_USER_EMAIL)
class ClassroomPageTests(BaseClassroomControllerTests):
def test_any_user_can_access_classroom_page(self):
with self.swap(constants, 'ENABLE_NEW_STRUCTURE_PLAYERS', True):
response = self.get_html_response(
'%s/%s' % (feconf.CLASSROOM_URL_PREFIX, 'Math'))
self.assertIn('<classroom-page></classroom-page>', response)
def test_no_user_can_access_invalid_classroom_page(self):
with self.swap(constants, 'ENABLE_NEW_STRUCTURE_PLAYERS', True):
self.get_html_response(
'%s/%s' % (
feconf.CLASSROOM_URL_PREFIX, 'invalid_subject'),
expected_status_int=404)
def test_get_fails_when_new_structures_not_enabled(self):
with self.swap(constants, 'ENABLE_NEW_STRUCTURE_PLAYERS', False):
self.get_html_response(
'%s/%s' % (feconf.CLASSROOM_URL_PREFIX, 'Math'),
expected_status_int=404)
class ClassroomDataHandlerTests(BaseClassroomControllerTests):
def test_get(self):
self.signup(self.ADMIN_EMAIL, self.ADMIN_USERNAME)
admin_id = self.get_user_id_from_email(self.ADMIN_EMAIL)
self.set_admins([self.ADMIN_USERNAME])
self.login(self.ADMIN_EMAIL, is_super_admin=True)
topic_id_1 = topic_services.get_new_topic_id()
topic_id_2 = topic_services.get_new_topic_id()
private_topic = topic_domain.Topic.create_default_topic(
topic_id_1, 'private_topic_name', 'abbrev')
topic_services.save_new_topic(admin_id, private_topic)
public_topic = topic_domain.Topic.create_default_topic(
topic_id_2, 'public_topic_name', 'abbrev')
topic_services.save_new_topic(admin_id, public_topic)
topic_services.publish_topic(topic_id_2, admin_id)
csrf_token = self.get_new_csrf_token()
new_config_value = [{
'name': 'Math',
'topic_ids': [topic_id_1, topic_id_2]
}]
payload = {
'action': 'save_config_properties',
'new_config_property_values': {
config_domain.TOPIC_IDS_FOR_CLASSROOM_PAGES.name: (
new_config_value),
}
}
self.post_json('/adminhandler', payload, csrf_token=csrf_token)
self.logout()
with self.swap(constants, 'ENABLE_NEW_STRUCTURE_PLAYERS', True):
json_response = self.get_json(
'%s/%s' % (feconf.CLASSROOM_DATA_HANDLER, 'Math'))
topic_summary_dict = (
topic_services.get_topic_summary_by_id(topic_id_2).to_dict())
expected_dict = {
'topic_summary_dicts': [topic_summary_dict]
}
self.assertDictContainsSubset(expected_dict, json_response)
def test_get_fails_for_invalid_classroom_name(self):
with self.swap(constants, 'ENABLE_NEW_STRUCTURE_PLAYERS', True):
self.get_json(
'%s/%s' % (
feconf.CLASSROOM_DATA_HANDLER, 'invalid_subject'),
expected_status_int=404)
def test_get_fails_when_new_structures_not_enabled(self):
with self.swap(constants, 'ENABLE_NEW_STRUCTURE_PLAYERS', False):
self.get_json(
'%s/%s' % (feconf.CLASSROOM_DATA_HANDLER, 'Math'),
expected_status_int=404)
| 40.930435
| 78
| 0.682813
|
4a109a257eb79270c0876f2813c66dfa92b0019e
| 1,585
|
py
|
Python
|
Cartwheel/lib/Python26/Lib/site-packages/OpenGL/GLE/exceptional.py
|
MontyThibault/centre-of-mass-awareness
|
58778f148e65749e1dfc443043e9fc054ca3ff4d
|
[
"MIT"
] | null | null | null |
Cartwheel/lib/Python26/Lib/site-packages/OpenGL/GLE/exceptional.py
|
MontyThibault/centre-of-mass-awareness
|
58778f148e65749e1dfc443043e9fc054ca3ff4d
|
[
"MIT"
] | null | null | null |
Cartwheel/lib/Python26/Lib/site-packages/OpenGL/GLE/exceptional.py
|
MontyThibault/centre-of-mass-awareness
|
58778f148e65749e1dfc443043e9fc054ca3ff4d
|
[
"MIT"
] | null | null | null |
"""GLE exceptional functions (specialised signatures"""
from OpenGL.raw import GLE as raw
from OpenGL.raw.GLE import annotations as simple
from OpenGL import wrapper, arrays
class _lengthOfArgname( object ):
"""Calculates the length of a given argname over a divisor value"""
def __init__( self, arrayName, divisor, arrayType = arrays.GLdoubleArray ):
self.arrayName = arrayName
self.divisor = divisor
self.arrayType = arrayType
def finalise( self, wrapper ):
self.arrayIndex = wrapper.pyArgIndex( self.arrayName )
def __call__( self, pyArgs, index, wrappedOperation ):
"""Get the length of pyArgs[2], a glDoubleArray"""
return self.arrayType.arraySize( pyArgs[self.arrayIndex] )//self.divisor
def _baseWrap( base, lengthName='ncp', contourName='contour', divisor=2 ):
"""Do the basic wrapping operation for a GLE function"""
return wrapper.wrapper( base ).setPyConverter(
lengthName,
).setCConverter(
lengthName, _lengthOfArgname( contourName, divisor, arrays.GLdoubleArray ),
)
gleLathe = _baseWrap( simple.gleLathe )
glePolyCone = _baseWrap( simple.glePolyCone, 'npoints', 'point_array', 3)
glePolyCylinder = _baseWrap( simple.glePolyCylinder, 'npoints', 'point_array', 3)
gleScrew = _baseWrap( simple.gleScrew )
gleSpiral = _baseWrap( simple.gleSpiral )
gleExtrusion = _baseWrap(
_baseWrap( simple.gleExtrusion ),
'npoints', 'point_array', 3
)
gleSuperExtrusion = _baseWrap(
_baseWrap( simple.gleSuperExtrusion ),
'npoints', 'point_array', 3
)
gleTwistExtrusion = _baseWrap(
_baseWrap( simple.gleTwistExtrusion ),
'npoints', 'point_array', 3
)
| 36.022727
| 81
| 0.756467
|
4a109aada44d4c3ddb502144bb035c7d94c7d33f
| 2,463
|
py
|
Python
|
Blackjack/main.py
|
rahul1981b/PY
|
8b73e9e8df9cd75a28dc00f66b28b461c43d0352
|
[
"Apache-2.0"
] | null | null | null |
Blackjack/main.py
|
rahul1981b/PY
|
8b73e9e8df9cd75a28dc00f66b28b461c43d0352
|
[
"Apache-2.0"
] | null | null | null |
Blackjack/main.py
|
rahul1981b/PY
|
8b73e9e8df9cd75a28dc00f66b28b461c43d0352
|
[
"Apache-2.0"
] | null | null | null |
## The deck is unlimited in size.
## There are no jokers.
## The Jack/Queen/King all count as 10.
## The the Ace can count as 11 or 1.
## Use the following list as the deck of cards:
## cards = [11, 2, 3, 4, 5, 6, 7, 8, 9, 10, 10, 10, 10]
## The cards in the list have equal probability of being drawn.
## Cards are not removed from the deck as they are drawn.
## The computer is the dealer.
import random
from art import logo
from replit import clear
def deal_card():
cards = [11, 2, 3, 4, 5, 6, 7, 8, 9, 10, 10, 10, 10]
card = random.choice(cards)
return card
def calculate_score(cards):
"""Calculates the some of cards"""
if sum(cards) == 21 and len(cards) == 2:
return 0
if 11 in cards and sum(cards) > 21:
cards.remove(11)
cards.append(1)
return sum(cards)
def find_winner(user_score, computer_score):
if user_score == computer_score:
return "Draw 🙃"
elif computer_score == 0:
return "Lose, opponent has Blackjack 😱"
elif user_score == 0:
return "Win with a Blackjack 😎"
elif user_score > 21:
return "You went over. You lose 😭"
elif computer_score > 21:
return "Opponent went over. You win 😁"
elif user_score > computer_score:
return "You win 😃"
else:
return "You lose 😤"
def play_game():
print(logo)
user_cards = []
computer_cards = []
is_game_over = False
for _ in range(2):
user_cards.append(deal_card())
computer_cards.append(deal_card())
while is_game_over == False:
user_score = calculate_score(user_cards)
computer_score = calculate_score(computer_cards)
print(f" Your cards: {user_cards}, current score: {user_score}")
print(f" Computer's first card: {computer_cards[0]}")
if user_score == 0 or computer_score == 0 or user_score > 21:
is_game_over = True
else:
user_should_deal = input("You want to deal more? Pressy if yes: ")
if user_should_deal == 'y':
user_cards.append(deal_card())
else:
is_game_over = True
while computer_score != 0 and computer_score < 17:
computer_cards.append(deal_card())
computer_score = calculate_score(computer_cards)
print(f" Your final hand: {user_cards}, final score: {user_score}")
print(f" Computer's final hand: {computer_cards}, final score: {computer_score}")
print(find_winner(user_score, computer_score))
while input("Do you want to play a game of Blackjack? Type 'y' or 'n': ") == "y":
clear()
play_game()
| 30.036585
| 85
| 0.667479
|
4a109ae6759cd35826eda9107679c7e6484331db
| 7,828
|
py
|
Python
|
tests/unit/test_CountingUtils.py
|
ylipacbio/pbtranscript
|
6b4ef164f191ffd4201feb62b951d9eeac3315b6
|
[
"BSD-3-Clause"
] | null | null | null |
tests/unit/test_CountingUtils.py
|
ylipacbio/pbtranscript
|
6b4ef164f191ffd4201feb62b951d9eeac3315b6
|
[
"BSD-3-Clause"
] | null | null | null |
tests/unit/test_CountingUtils.py
|
ylipacbio/pbtranscript
|
6b4ef164f191ffd4201feb62b951d9eeac3315b6
|
[
"BSD-3-Clause"
] | 1
|
2021-02-26T10:08:09.000Z
|
2021-02-26T10:08:09.000Z
|
"""Test classes defined within pbtranscript.counting.CountUtils."""
import unittest
import os.path as op
from pbcore.util.Process import backticks
from pbtranscript.Utils import rmpath, mkdir
from pbtranscript.io import ReadStatReader, AbundanceReader
from pbtranscript.counting.CountingUtils import read_group_file, \
output_read_count_FL, output_read_count_nFL, make_abundance_file
from test_setpath import DATA_DIR, OUT_DIR, SIV_DATA_DIR
_SIV_DIR_ = op.join(SIV_DATA_DIR, "test_counting")
_DAT_DIR_ = op.join(DATA_DIR, "test_counting")
GROUP_FN = op.join(_SIV_DIR_, "group.txt")
class TEST_CountUtils(unittest.TestCase):
"""Test functions of pbtranscript.counting.CountUtils."""
def setUp(self):
"""Define input and output file."""
pass
def test_read_group_file(self):
"""Test read_group_file."""
cid_info = read_group_file(group_filename=GROUP_FN,
is_cid=True,
sample_prefixes=None)
self.assertEqual(len(cid_info[None].keys()), 846)
self.assertEqual(cid_info[None]['i1_HQ_sampleb92221|c1030'], 'PB.5.6')
self.assertEqual(cid_info[None]['i2_HQ_sampleb92221|c326'], 'PB.10.14')
sample_prefixes = ['i0_HQ_sampleb92221', 'i1_HQ_sampleb92221', 'i2_HQ_sampleb92221']
cid_info = read_group_file(group_filename=GROUP_FN,
is_cid=True,
sample_prefixes=sample_prefixes)
self.assertEqual(len(cid_info[sample_prefixes[0]].keys()), 165)
self.assertEqual(len(cid_info[sample_prefixes[1]].keys()), 297)
self.assertEqual(len(cid_info[sample_prefixes[2]].keys()), 384)
self.assertEqual(cid_info['i1_HQ_sampleb92221']['c1030'], 'PB.5.6')
self.assertEqual(cid_info['i2_HQ_sampleb92221']['c326'], 'PB.10.14')
def test_output_read_count_FL(self):
"""Test output_read_count_FL."""
d = op.join(SIV_DATA_DIR, "test_make_abundance")
bs = ["0to1kb_part0", "1to2kb_part0", "2to3kb_part0", "3to4kb_part0", "8to9kb_part0"]
sample_prefixes = ["i%d_HQ_sample18ba5d" % i for i in (0, 1, 2, 3, 4)]
pickles = [op.join(d, b, "cluster_out/output/final.pickle") for b in bs]
group_filename = op.join(SIV_DATA_DIR, "test_make_abundance", "group.txt")
prefix_pickle_tuples = zip(sample_prefixes, pickles)
output_filename = op.join(OUT_DIR, "test_output_read_count_FL.read_stat.txt")
output_mode = 'w'
cid_info = read_group_file(group_filename=group_filename,
is_cid=True, sample_prefixes=sample_prefixes)
restricted_movies = None
output_read_count_FL(cid_info=cid_info,
prefix_pickle_filename_tuples=prefix_pickle_tuples,
output_filename=output_filename,
output_mode=output_mode,
restricted_movies=restricted_movies)
self.assertTrue(op.exists(output_filename))
records = [r for r in ReadStatReader(output_filename)]
self.assertEqual(len(records), 4712)
expected_first = "m54006_160328_233933/39912051/31_505_CCS\t474\tY\tunique\tPB.1.1"
expected_last = "m54006_160328_233933/47383436/629_57_CCS\t572\tY\tunmapped\tNA"
self.assertEqual(str(records[0]), expected_first)
self.assertEqual(str(records[-1]), expected_last)
# Test with restricted movies
output_filename = op.join(OUT_DIR, "test_output_read_count_FL.2.read_stat.txt")
restricted_movies = ["m54006_160328_233933"]
output_read_count_FL(cid_info=cid_info,
prefix_pickle_filename_tuples=prefix_pickle_tuples,
output_filename=output_filename,
output_mode=output_mode,
restricted_movies=restricted_movies)
self.assertTrue(op.exists(output_filename))
records = [r for r in ReadStatReader(output_filename)]
self.assertEqual(len(records), 4712)
self.assertEqual(str(records[0]), expected_first)
self.assertEqual(str(records[-1]), expected_last)
def test_output_read_count_nFL(self):
"""Test output_read_count_FL."""
d = op.join(SIV_DATA_DIR, "test_make_abundance")
bs = ["0to1kb_part0", "1to2kb_part0", "2to3kb_part0", "3to4kb_part0", "8to9kb_part0"]
sample_prefixes = ["i%d_HQ_sample18ba5d" % i for i in (0, 1, 2, 3, 4)]
pickles = [op.join(d, b, "cluster_out/output/map_noFL/nfl.all.partial_uc.pickle") for b in bs]
group_filename = op.join(SIV_DATA_DIR, "test_make_abundance", "group.txt")
prefix_pickle_tuples = zip(sample_prefixes, pickles)
output_filename = op.join(OUT_DIR, "test_output_read_count_nFL.read_stat.txt")
output_mode = 'w'
cid_info = read_group_file(group_filename=group_filename,
is_cid=True, sample_prefixes=sample_prefixes)
restricted_movies = None
output_read_count_nFL(cid_info=cid_info,
prefix_pickle_filename_tuples=prefix_pickle_tuples,
output_filename=output_filename,
output_mode=output_mode,
restricted_movies=restricted_movies)
self.assertTrue(op.exists(output_filename))
records = [r for r in ReadStatReader(output_filename)]
self.assertEqual(len(records), 5703)
expected_first = "m54006_160328_233933/11993579/0_2060_CCS\t2060\tN\tambiguous\tPB.5.4"
expected_last = "m54006_160328_233933/23593293/0_1613_CCS\t1613\tN\tunmapped\tNA"
self.assertEqual(str(records[0]), expected_first)
self.assertEqual(str(records[-1]), expected_last)
# Test with restricted movies
output_filename = op.join(OUT_DIR, "test_output_read_count_nFL.2.read_stat.txt")
restricted_movies = ["m54006_160328_233933"]
output_read_count_nFL(cid_info=cid_info,
prefix_pickle_filename_tuples=prefix_pickle_tuples,
output_filename=output_filename,
output_mode=output_mode,
restricted_movies=restricted_movies)
self.assertTrue(op.exists(output_filename))
records = [r for r in ReadStatReader(output_filename)]
self.assertEqual(len(records), 5703)
expected_first = "m54006_160328_233933/11993579/0_2060_CCS\t2060\tN\tambiguous\tPB.5.4"
expected_last = "m54006_160328_233933/37224924/0_2549_CCS\t2549\tN\tunmapped\tNA"
self.assertEqual(str(records[0]), expected_first)
self.assertEqual(str(records[-1]), expected_last)
def test_make_abundance_file(self):
""""""
d = op.join(SIV_DATA_DIR, "test_make_abundance")
read_stat_filename = op.join(d, "read_stat.txt")
output_filename = op.join(OUT_DIR, "test_make_abundance_file.txt")
make_abundance_file(read_stat_filename=read_stat_filename,
output_filename=output_filename,
given_total=None,
restricted_movies=None,
write_header_comments=True)
print output_filename
self.assertTrue(op.exists(output_filename))
expected_first = "PB.1.1\t30\t30\t30.83\t6.3667e-03\t3.0367e-03\t3.0637e-03"
expected_last = "PB.12.5\t16\t22\t23.20\t3.3956e-03\t2.2269e-03\t2.3052e-03"
records = [r for r in AbundanceReader(output_filename)]
self.assertEqual(len(records), 38)
self.assertEqual(str(records[0]), expected_first)
self.assertEqual(str(records[-1]), expected_last)
| 52.536913
| 102
| 0.655468
|
4a109b410640620267f0d134ed68c60d0b592699
| 1,977
|
py
|
Python
|
src/aks-preview/azext_aks_preview/__init__.py
|
Santhoshstark06/azure-cli-extensions
|
bff1ad5cffea31a9e0f597a11b0c72c49d826f52
|
[
"MIT"
] | null | null | null |
src/aks-preview/azext_aks_preview/__init__.py
|
Santhoshstark06/azure-cli-extensions
|
bff1ad5cffea31a9e0f597a11b0c72c49d826f52
|
[
"MIT"
] | null | null | null |
src/aks-preview/azext_aks_preview/__init__.py
|
Santhoshstark06/azure-cli-extensions
|
bff1ad5cffea31a9e0f597a11b0c72c49d826f52
|
[
"MIT"
] | null | null | null |
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
from azure.cli.core import AzCommandsLoader
from azure.cli.core.profiles import register_resource_type, SDKProfile
# pylint: disable=unused-import
import azext_aks_preview._help
from azext_aks_preview._client_factory import CUSTOM_MGMT_AKS_PREVIEW
def register_aks_preview_resource_type():
register_resource_type(
"latest",
CUSTOM_MGMT_AKS_PREVIEW,
SDKProfile("2021-08-01", {"container_services": "2017-07-01"}),
)
class ContainerServiceCommandsLoader(AzCommandsLoader):
def __init__(self, cli_ctx=None):
from azure.cli.core.commands import CliCommandType
register_aks_preview_resource_type()
acs_custom = CliCommandType(operations_tmpl='azext_aks_preview.custom#{}')
super(ContainerServiceCommandsLoader, self).__init__(cli_ctx=cli_ctx,
custom_command_type=acs_custom,
resource_type=CUSTOM_MGMT_AKS_PREVIEW)
def load_command_table(self, args):
super(ContainerServiceCommandsLoader, self).load_command_table(args)
from .commands import load_command_table
load_command_table(self, args)
return self.command_table
def load_arguments(self, command):
from sys import version_info
if version_info[0] < 3:
super(ContainerServiceCommandsLoader, self).load_arguments(command)
else:
super().load_arguments(command)
from ._params import load_arguments
load_arguments(self, command)
COMMAND_LOADER_CLS = ContainerServiceCommandsLoader
| 38.019231
| 99
| 0.634294
|
4a109b9a102b2bde3ba94b7d8c4cbdaa1038af58
| 239,859
|
py
|
Python
|
pysnmp-with-texts/DC-ISIS-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 8
|
2019-05-09T17:04:00.000Z
|
2021-06-09T06:50:51.000Z
|
pysnmp-with-texts/DC-ISIS-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 4
|
2019-05-31T16:42:59.000Z
|
2020-01-31T21:57:17.000Z
|
pysnmp-with-texts/DC-ISIS-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 10
|
2019-04-30T05:51:36.000Z
|
2022-02-16T03:33:41.000Z
|
#
# PySNMP MIB module DC-ISIS-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/DC-ISIS-MIB
# Produced by pysmi-0.3.4 at Wed May 1 12:37:06 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
OctetString, ObjectIdentifier, Integer = mibBuilder.importSymbols("ASN1", "OctetString", "ObjectIdentifier", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueRangeConstraint, ConstraintsUnion, SingleValueConstraint, ConstraintsIntersection, ValueSizeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueRangeConstraint", "ConstraintsUnion", "SingleValueConstraint", "ConstraintsIntersection", "ValueSizeConstraint")
IfOperStatus, IgpShortcutMetricType, NumericIndex, EntityIndexOrZero, AuthUserDataString = mibBuilder.importSymbols("DC-MASTER-TC", "IfOperStatus", "IgpShortcutMetricType", "NumericIndex", "EntityIndexOrZero", "AuthUserDataString")
InterfaceIndex, = mibBuilder.importSymbols("IF-MIB", "InterfaceIndex")
InetAddressIPv4, InetAddressType, InetAddress, InetAddressPrefixLength, InetAddressIPv6 = mibBuilder.importSymbols("INET-ADDRESS-MIB", "InetAddressIPv4", "InetAddressType", "InetAddress", "InetAddressPrefixLength", "InetAddressIPv6")
SnmpAdminString, = mibBuilder.importSymbols("SNMP-FRAMEWORK-MIB", "SnmpAdminString")
NotificationGroup, ModuleCompliance, ObjectGroup = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance", "ObjectGroup")
Gauge32, Counter64, Integer32, Bits, iso, MibScalar, MibTable, MibTableRow, MibTableColumn, MibIdentifier, Unsigned32, TimeTicks, ObjectIdentity, Counter32, ModuleIdentity, IpAddress, NotificationType = mibBuilder.importSymbols("SNMPv2-SMI", "Gauge32", "Counter64", "Integer32", "Bits", "iso", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "MibIdentifier", "Unsigned32", "TimeTicks", "ObjectIdentity", "Counter32", "ModuleIdentity", "IpAddress", "NotificationType")
DisplayString, RowStatus, TextualConvention, TruthValue = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "RowStatus", "TextualConvention", "TruthValue")
dcIsisMib = ModuleIdentity((1, 2, 826, 0, 1, 1578918, 5, 63, 1))
dcIsisMib.setRevisions(('2014-07-03 00:00', '2011-07-13 00:00',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: dcIsisMib.setRevisionsDescriptions(('PROD00216772 - Set isisSysLspFullSuppress to None and isisSysLspFullSetDBOL to False in case of 256 LSP over spill', 'Change default value of isisSysCalcSoonAfterCircChange from false to true',))
if mibBuilder.loadTexts: dcIsisMib.setLastUpdated('201407030000Z')
if mibBuilder.loadTexts: dcIsisMib.setOrganization('Data Connection Ltd.')
if mibBuilder.loadTexts: dcIsisMib.setContactInfo('Postal: Data Connection Ltd. 100 Church Street Enfield Middlesex EN2 6BQ United Kingdom Tel: +44 20 83661177 E-mail: dcisis@dataconnection.com')
if mibBuilder.loadTexts: dcIsisMib.setDescription('The MIB module for management of the DC-ISIS product. This is closely based on the draft standard MIB draft-ietf-isis-wg-mib-16.')
isisObjects = MibIdentifier((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1))
isisNotifications = MibIdentifier((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 2))
isisConformance = MibIdentifier((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 3))
isisSystem = MibIdentifier((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1))
isisSysLevel = MibIdentifier((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 2))
isisCirc = MibIdentifier((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 3))
isisCircLevelValues = MibIdentifier((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 4))
isisCounters = MibIdentifier((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 5))
isisISAdj = MibIdentifier((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 6))
isisReachAddr = MibIdentifier((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 7))
isisIPReachAddr = MibIdentifier((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 8))
isisLSPDataBase = MibIdentifier((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 9))
isisNotification = MibIdentifier((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 10))
isisPmObjects = MibIdentifier((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 11))
isisSdObjects = MibIdentifier((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 12))
class OSINSAddress(TextualConvention, OctetString):
description = 'OSI Network Service Address, e.g. NSAP, SNPA, or Network Entity Title'
status = 'current'
subtypeSpec = OctetString.subtypeSpec + ValueSizeConstraint(0, 20)
class SystemID(TextualConvention, OctetString):
description = 'A system ID.'
status = 'current'
subtypeSpec = OctetString.subtypeSpec + ValueSizeConstraint(6, 6)
fixedLength = 6
class LinkStatePDUID(TextualConvention, OctetString):
description = 'A Link State PDU Identifier.'
status = 'current'
subtypeSpec = OctetString.subtypeSpec + ValueSizeConstraint(8, 8)
fixedLength = 8
class AdminState(TextualConvention, Integer32):
description = 'Type used in enabling and disabling a row. Values match those in RFC 2863.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2))
namedValues = NamedValues(("on", 1), ("off", 2))
class LSPBuffSize(TextualConvention, Integer32):
description = 'Integer sub range for LSP size.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ValueRangeConstraint(512, 16000)
class LevelState(TextualConvention, Integer32):
description = 'States of the IS-IS protocol.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))
namedValues = NamedValues(("off", 1), ("on", 2), ("waiting", 3), ("overloaded", 4))
class SupportedProtocol(TextualConvention, Integer32):
description = 'Types of network protocol supported by Integrated IS-IS. The values for ISO8473 and IP are those registered for these protocols in ISO TR9577.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(129, 142, 204))
namedValues = NamedValues(("iso8473", 129), ("ipV6", 142), ("ip", 204))
class DefaultMetric(TextualConvention, Integer32):
description = "Integer sub-range for default metric for single hop. ISO 10589 provides for 4 types of metric. Only the 'default' metric is used in practice."
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ValueRangeConstraint(0, 63)
class WideMetric(TextualConvention, Unsigned32):
description = 'Wide Metric for IS Neighbors. ISO 10589 provides a 6 bit metric. Traffic Engineering extensions provide 24 bit metrics.'
status = 'current'
subtypeSpec = Unsigned32.subtypeSpec + ValueRangeConstraint(1, 16777215)
class FullMetric(TextualConvention, Unsigned32):
description = 'Full Metric for IP Routes. Traffic Engineering extensions provide 32 bit metrics.'
status = 'current'
class MetricType(TextualConvention, Integer32):
description = 'Is this an Internal or External Metric?'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2))
namedValues = NamedValues(("internal", 1), ("external", 2))
class MetricStyle(TextualConvention, Integer32):
description = 'Do we use 1195 style Metrics or wide metrics.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3))
namedValues = NamedValues(("narrow", 1), ("wide", 2), ("both", 3))
class ISLevel(TextualConvention, Integer32):
description = 'Identifies a level.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(0, 1, 2))
namedValues = NamedValues(("none", 0), ("area", 1), ("domain", 2))
class IsisAdjLevel(TextualConvention, Integer32):
description = 'Identifies one or more levels.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))
namedValues = NamedValues(("reserved", 0), ("level1", 1), ("level2", 2), ("level1and2", 3))
class IsisPDUHeader(TextualConvention, OctetString):
description = 'A block to contain the header from a PDU.'
status = 'current'
subtypeSpec = OctetString.subtypeSpec + ValueSizeConstraint(0, 64)
class CircuitID(TextualConvention, OctetString):
description = 'ID for a circuit.'
status = 'current'
subtypeSpec = OctetString.subtypeSpec + ConstraintsUnion(ValueSizeConstraint(0, 0), ValueSizeConstraint(7, 7), )
class ISPriority(TextualConvention, Integer32):
description = 'Integer sub-range for IS-IS priority.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ValueRangeConstraint(0, 127)
class Unsigned16TC(TextualConvention, Unsigned32):
description = 'An Unsigned32 further restricted to 16 Bits. Note that the ASN.1 BER encoding may still require 24 Bits for some values.'
status = 'current'
subtypeSpec = Unsigned32.subtypeSpec + ValueRangeConstraint(0, 65535)
class Unsigned8TC(TextualConvention, Unsigned32):
description = 'An Unsigned32 further restricted to 8 Bits. Note that the ASN.1 BER encoding may still require 16 Bits for some values.'
status = 'current'
subtypeSpec = Unsigned32.subtypeSpec + ValueRangeConstraint(0, 255)
class IsisAdminStatus(TextualConvention, Integer32):
description = 'The desired administrative state of a DC-ISIS entity.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2))
namedValues = NamedValues(("adminStatusUp", 1), ("adminStatusDown", 2))
class IsisOperStatus(TextualConvention, Integer32):
description = 'The current operational state of a DC-ISIS entity.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))
namedValues = NamedValues(("operStatusUp", 1), ("operStatusDown", 2), ("operStatusGoingUp", 3), ("operStatusGoingDown", 4), ("operStatusActFailed", 5))
class IsisMjStatus(TextualConvention, Integer32):
description = 'The status of a Master Join.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10))
namedValues = NamedValues(("mjNotJoined", 1), ("mjSentAddJoin", 2), ("mjSentRegister", 3), ("mjJoinActive", 4), ("mjSentDelJoin", 5), ("mjSentUnregister", 6), ("mjJoinGone", 7), ("mjFailedToRegister", 8), ("mjFailingOver", 9), ("mjFailed", 10))
class IsisSjStatus(TextualConvention, Integer32):
description = 'The status of a Slave Join.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7))
namedValues = NamedValues(("sjNotJoined", 1), ("sjJoined", 2), ("sjJoinActive", 3), ("sjJoinUnreg", 4), ("sjJoinGone", 5), ("sjFailingOver", 6), ("sjFailed", 7))
class IsisPmInterfaceId(TextualConvention, Integer32):
description = 'The type of interface to which a PM join applies.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))
namedValues = NamedValues(("ifInterfaceInfo", 1), ("ifRtProtoInput", 2), ("ifCSPF", 3), ("ifSDC", 4))
class IsisSdInterfaceId(TextualConvention, Integer32):
description = 'The type of interface to which an SD join applies.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3))
namedValues = NamedValues(("ifDataLink", 1), ("ifSubnetDependent", 2), ("ifBidirectionalForwarding", 3))
class IsisSdEntityType(TextualConvention, Integer32):
description = 'The type of entity to which an SDC Subnetwork Dependent join applies.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2))
namedValues = NamedValues(("ifProtocolManager", 1), ("ifPDUInjectionTool", 2))
class IsisAddrType(TextualConvention, Integer32):
description = 'The type of address used on an RPI join.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))
namedValues = NamedValues(("none", 0), ("ipv4", 1), ("ipv6", 2), ("nsap", 3), ("ipx", 4))
class IsisAddrTypeBits(TextualConvention, Bits):
description = 'The type of address supported.'
status = 'current'
namedValues = NamedValues(("none", 0), ("ipv4", 1), ("ipv6", 2))
class IsisSysRestartType(TextualConvention, Integer32):
description = 'The type of restart procedures to follow when DC-ISIS activates.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(0, 1, 2))
namedValues = NamedValues(("none", 0), ("start", 1), ("restart", 2))
isisSysTable = MibTable((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 1), )
if mibBuilder.loadTexts: isisSysTable.setStatus('current')
if mibBuilder.loadTexts: isisSysTable.setDescription('The set of instances of the Integrated IS-IS protocol existing on the system.')
isisSysEntry = MibTableRow((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 1, 1), ).setIndexNames((0, "DC-ISIS-MIB", "isisSysInstance"))
if mibBuilder.loadTexts: isisSysEntry.setReference('{ISIS.poi cLNSISISBasic-P (1)}')
if mibBuilder.loadTexts: isisSysEntry.setStatus('current')
if mibBuilder.loadTexts: isisSysEntry.setDescription('Each row defines information specific to a single instance of the IS-IS protocol existing on the system.')
isisSysInstance = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)))
if mibBuilder.loadTexts: isisSysInstance.setStatus('current')
if mibBuilder.loadTexts: isisSysInstance.setDescription('The unique identifier of the Integrated IS-IS instance to which this row corresponds. This object follows the index behavior.')
isisSysVersion = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("unknown", 0), ("one", 1))).clone('one')).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisSysVersion.setReference('{ISIS.aoi version (1)}')
if mibBuilder.loadTexts: isisSysVersion.setStatus('current')
if mibBuilder.loadTexts: isisSysVersion.setDescription('The version number of the IS-IS protocol that is implemented.')
isisSysType = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("reserved", 0), ("level1IS", 1), ("level2IS", 2), ("level1L2IS", 3))).clone('level1L2IS')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSysType.setReference('{ISIS.aoi iSType (2)}')
if mibBuilder.loadTexts: isisSysType.setStatus('current')
if mibBuilder.loadTexts: isisSysType.setDescription('At which levels is the Intermediate System running? This object follows the replaceOnlyWhileDisabled behavior.')
isisSysID = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 1, 1, 4), SystemID()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSysID.setReference('{ISIS.aoi systemId (119)}')
if mibBuilder.loadTexts: isisSysID.setStatus('current')
if mibBuilder.loadTexts: isisSysID.setDescription('The ID for this instance of the Integrated IS-IS protocol. This value is appended to each of the area addresses to form the Network Entity Titles. The derivation of a value for this object is implementation-specific. Some implementations may automatically assign values and not permit an SNMP write, while others may require the value to be set manually. For the DC-ISIS implementation, this field is set through the MIB. This object follows the replaceOnlyWhileDisabled behavior.')
isisSysMaxPathSplits = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 1, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 32)).clone(4)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSysMaxPathSplits.setReference('{ISIS.aoi maximumPathSplits (3)}')
if mibBuilder.loadTexts: isisSysMaxPathSplits.setStatus('current')
if mibBuilder.loadTexts: isisSysMaxPathSplits.setDescription('Maximum number of paths with equal routing metric value which it is permitted to split between. Note that, when IGP shortcut interfaces are in use, this maximum applies individually to IGP shortcut paths and non-IGP shortcut paths. That is, there may be a total of 2 * isisSysMaxPathSplits paths for a single route. This object follows the replaceOnlyWhileDisabled behavior.')
isisSysMaxLSPGenInt = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 1, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65235)).clone(900)).setUnits('seconds').setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSysMaxLSPGenInt.setReference('{ISIS.aoi maximumLSPGenerationInterval (6)}')
if mibBuilder.loadTexts: isisSysMaxLSPGenInt.setStatus('current')
if mibBuilder.loadTexts: isisSysMaxLSPGenInt.setDescription('Maximum interval, in seconds, between generated LSPs by this instance of the protocol. This object follows the resettingTimer behavior. The value must be greater than any value configured for isisSysLevelMinLSPGenInt, and should be at least 300 seconds less than isisSysMaxAge. Note that this value is specified in seconds whereas isisSysLevelMinLSPGenInt is specified in milliseconds.')
isisSysPollESHelloRate = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 1, 1, 7), Unsigned16TC().subtype(subtypeSpec=ValueRangeConstraint(1, 65535)).clone(50)).setUnits('seconds').setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSysPollESHelloRate.setReference('{ISIS.aoi pollESHelloRate (13)}')
if mibBuilder.loadTexts: isisSysPollESHelloRate.setStatus('current')
if mibBuilder.loadTexts: isisSysPollESHelloRate.setDescription('The value, in seconds, to be used for the suggested ES configuration timer in ISH PDUs when soliciting the ES configuration.')
isisSysWaitTime = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 1, 1, 8), Unsigned16TC().subtype(subtypeSpec=ValueRangeConstraint(1, 65535)).clone(60)).setUnits('seconds').setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSysWaitTime.setReference('{ISIS.aoi waitingTime (15)}')
if mibBuilder.loadTexts: isisSysWaitTime.setStatus('current')
if mibBuilder.loadTexts: isisSysWaitTime.setDescription('Number of seconds to delay in waiting state before entering on state. This object follows the resettingTimer behavior.')
isisSysAdminState = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 1, 1, 9), AdminState().clone('off')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSysAdminState.setStatus('current')
if mibBuilder.loadTexts: isisSysAdminState.setDescription("The administrative state of this instance of the Integrated IS-IS protocol. Setting this object to the value 'on' when its current value is 'off' enables operation of this instance of the Integrated IS-IS protocol.")
isisSysL2toL1Leaking = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 1, 1, 10), TruthValue().clone('false')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSysL2toL1Leaking.setStatus('current')
if mibBuilder.loadTexts: isisSysL2toL1Leaking.setDescription('If true, allow the router to leak L2 routes into L1.')
isisSysMaxAge = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 1, 1, 11), Unsigned16TC().subtype(subtypeSpec=ValueRangeConstraint(350, 65535)).clone(1200)).setUnits('seconds').setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSysMaxAge.setStatus('current')
if mibBuilder.loadTexts: isisSysMaxAge.setDescription('Value to place in RemainingLifeTime field of the LSPs we generate. This should be at least 300 seconds greater than isisSysMaxLSPGenInt.')
isisSysReceiveLSPBufferSize = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 1, 1, 12), Unsigned16TC().subtype(subtypeSpec=ValueRangeConstraint(1492, 16000)).clone(1492)).setUnits('bytes').setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSysReceiveLSPBufferSize.setStatus('current')
if mibBuilder.loadTexts: isisSysReceiveLSPBufferSize.setDescription('Size of the largest Buffer we are designed or configured to store. This should be at least as big as the maximum isisSysOrigLSPBuffSize supported by the system. If resources allow, we will store and flood LSPs larger than isisSysReceiveLSPBufferSize, as this can help avoid problems in networks with different values for isisSysOrigLSPBuffSize.')
isisSysExistState = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 1, 1, 13), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSysExistState.setStatus('current')
if mibBuilder.loadTexts: isisSysExistState.setDescription("The state of the IS-IS router. Turning this to state 'destroy' forces the router to forget all the current configuration. Setting the state to 'notInService' stops protocol processing, but retains the configuration.")
isisSysOperStatus = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 1, 1, 14), IsisOperStatus().clone('operStatusDown')).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisSysOperStatus.setStatus('current')
if mibBuilder.loadTexts: isisSysOperStatus.setDescription('The current operational status of this instance of the Protocol Manager component of DC-ISIS. Note that DC-ISIS can only activate if there is at least one active entry in the isisManAreaAddrTable.')
isisSysAllowAutoI3Config = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 1, 1, 15), TruthValue().clone('true')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSysAllowAutoI3Config.setStatus('current')
if mibBuilder.loadTexts: isisSysAllowAutoI3Config.setDescription("If 'true' then add IP addresses received from the I3 stub to the isisCircIPAddrTable with admin state 'on'. If 'false' then add them with admin state 'off'. This field follows the replaceOnlyWhileDisabled behavior.")
isisSysCalcMaxDelay = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 1, 1, 16), Unsigned32().clone(5000)).setUnits('milliseconds').setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSysCalcMaxDelay.setStatus('current')
if mibBuilder.loadTexts: isisSysCalcMaxDelay.setDescription('The maximum delay before the Routing Table is recalculated following a change to the Link State Database. (Recalculation is delayed to reduce the frequency of recalculations of the Routing Table). This parameter has units of milliseconds. A value of 0 indicates that a routing calculation will immediately follow an update to the database.')
isisSysCalcThrshUpdStart = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 1, 1, 17), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295)).clone(4294967295)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSysCalcThrshUpdStart.setStatus('current')
if mibBuilder.loadTexts: isisSysCalcThrshUpdStart.setDescription('This parameter can be used to override the routing calculation delay indicated by the isisSysCalcMaxDelay parameter when the number of updates to the Link State Database reaches a threshold value. This parameter specifies the threshold number of updates that can be made to the Link State Database such that any subsequent update to the database causes a full routing calculation to start immediately. - 0 indicates that a routing calculation will immediately follow an update to the database. - 0xFFFFFFFF indicates that this threshold is infinite, and hence the timing of a routing calculation is determined solely by the configured calculation delay.')
isisSysCalcThrshUpdRestart = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 1, 1, 18), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295)).clone(4294967295)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSysCalcThrshUpdRestart.setStatus('current')
if mibBuilder.loadTexts: isisSysCalcThrshUpdRestart.setDescription('This parameter can be used to interrupt a full routing calculation when the number of pending updates to the Link State Database has reached a threshold value. This parameter specifies the threshold number of updates that can be made to the Link State Database such that any subsequent update to the database causes the current routing calculation to be interrupted, and a new calculation to start using an up to date Link State Database. - 0 indicates that an update to the Link State Database will cause any current routing calculation to be interrupted and a new one to start. - 0xFFFFFFFF indicates that this threshold is infinite, and hence no number of pending updates to the database will cause a routing calculation to be interrupted.')
isisSysCalcThrshRestartLimit = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 1, 1, 19), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295)).clone(10)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSysCalcThrshRestartLimit.setStatus('current')
if mibBuilder.loadTexts: isisSysCalcThrshRestartLimit.setDescription('This parameter limits the number of consecutive times a routing calculation can be interrupted by new updates. This guarantees that the routing calculation will actually complete. - 1 indicates that once a calculation has been interrupted once, it will not be interrupted again. - 0xFFFFFFFF indicates that the calculation can always be interrupted.')
isisSysCalcPauseFreq = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 1, 1, 20), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295)).clone(10000)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSysCalcPauseFreq.setStatus('current')
if mibBuilder.loadTexts: isisSysCalcPauseFreq.setDescription('This value determines how regularly a Routing Calculation is paused. It is measured in points. The points scale roughly maps to a time scale, so that the larger this value is, the longer the Routing Calculation runs before pausing. See the DC-ISIS Configuration and Management Interface Specification for more information on how to set this parameter. - 0 indicates that the routing calculation is paused after every calculation step. - 0xFFFFFFFF indicates that the Routing Calculation is never paused.')
isisSysCheckChecksums = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 1, 1, 21), Unsigned32().clone(900)).setUnits('seconds').setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSysCheckChecksums.setStatus('current')
if mibBuilder.loadTexts: isisSysCheckChecksums.setDescription('This value determines how often the checksums of LSPs in the Link State Database are checked. If 0, no checksums in the database are checked.')
isisSysZeroAgeLifetime = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 1, 1, 22), Integer32().clone(60)).setUnits('seconds').setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSysZeroAgeLifetime.setStatus('current')
if mibBuilder.loadTexts: isisSysZeroAgeLifetime.setDescription('This is the minimum amount of time in seconds for which the header of an expired LSP shall be retained after it has been flooded with zero Remaining Lifetime. All that is required is that the header be retained until the zero Remaining Lifetime LSP has been safely propagated to all the neighbors.')
isisSysNumUpdPending = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 1, 1, 23), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisSysNumUpdPending.setStatus('current')
if mibBuilder.loadTexts: isisSysNumUpdPending.setDescription('The number of updates that are pending addition to the Link State Database.')
isisSysNumUpdMerged = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 1, 1, 24), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisSysNumUpdMerged.setStatus('current')
if mibBuilder.loadTexts: isisSysNumUpdMerged.setDescription('The number of updates that have been merged into the Link State Database since the last routing calculation.')
isisSysNumCksumsPending = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 1, 1, 25), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisSysNumCksumsPending.setStatus('current')
if mibBuilder.loadTexts: isisSysNumCksumsPending.setDescription('The number of LSPs in the Link State Database which are now due to have their checksum checked.')
isisSysTEMetricPcntge = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 1, 1, 26), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSysTEMetricPcntge.setStatus('current')
if mibBuilder.loadTexts: isisSysTEMetricPcntge.setDescription('The Traffic Engineering metric is updated in real-time by the I3 Stub. In order to increase performance and reduce network traffic, this parameter determines the minimal percentage change of the TE metric that causes a new LSP to be originated.')
isisSysMaxBwidthPcntge = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 1, 1, 27), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSysMaxBwidthPcntge.setStatus('current')
if mibBuilder.loadTexts: isisSysMaxBwidthPcntge.setDescription('The maximum bandwidth on an interface is updated in real-time by the I3 Stub. In order to increase performance and reduce network traffic, this parameter determines the minimal percentage change of maximum bandwidth that causes a new LSP to be originated.')
isisSysMaxResBwidthPcntge = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 1, 1, 28), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSysMaxResBwidthPcntge.setStatus('current')
if mibBuilder.loadTexts: isisSysMaxResBwidthPcntge.setDescription('The maximum reservable bandwidth on an interface is updated in real-time by the I3 Stub. In order to increase performance and reduce network traffic, this parameter determines the minimal percentage change of maximum reservable bandwidth that causes a new LSP to be originated.')
isisSysUnresBwidthPcntge = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 1, 1, 29), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSysUnresBwidthPcntge.setStatus('current')
if mibBuilder.loadTexts: isisSysUnresBwidthPcntge.setDescription('The unreserved bandwidth on an interface is updated in real-time by the I3 Stub. In order to increase performance and reduce network traffic, this parameter determines the minimal percentage change of unreserved bandwidth that causes a new LSP to be originated.')
isisSysMaxLSPBwidthPcntge = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 1, 1, 30), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSysMaxLSPBwidthPcntge.setStatus('current')
if mibBuilder.loadTexts: isisSysMaxLSPBwidthPcntge.setDescription('The maximum LSP bandwidth for the various switching descriptors on an interface is updated in real-time by the I3 Stub. In order to increase performance and reduce network traffic, this parameter determines the minimal percentage change of maximum LSP bandwidth that causes a new LSP to be originated.')
isisSysMinLSPBwidthPcntge = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 1, 1, 31), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSysMinLSPBwidthPcntge.setStatus('current')
if mibBuilder.loadTexts: isisSysMinLSPBwidthPcntge.setDescription('The minimum LSP bandwidth for the various switching descriptors on an interface is updated in real-time by the I3 Stub. In order to increase performance and reduce network traffic, this parameter determines the minimal percentage change of minmimum LSP bandwidth that causes a new LSP to be originated.')
isisSysMTUSizePcntge = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 1, 1, 32), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSysMTUSizePcntge.setStatus('current')
if mibBuilder.loadTexts: isisSysMTUSizePcntge.setDescription('The Maximum Transmission Unit size for the various switching descriptors on an interface is updated in real-time by the I3 Stub. In order to increase performance and reduce network traffic, this parameter determines the minimal percentage change of the Maximum Transmission Unit that causes a new LSP to be originated.')
isisSysTERouterID = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 1, 1, 33), InetAddressIPv4()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSysTERouterID.setStatus('current')
if mibBuilder.loadTexts: isisSysTERouterID.setDescription("The Local IPv4 TE Router ID. This is a single stable IPv4 address that can always be referenced in a path that will be reachable from multiple hops away, regardless of the state of the node's interfaces. This object must be set if isisSysLevelTEEnabled is 'true' at either level, but is otherwise ignored.")
isisSysIPv6TERouterID = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 1, 1, 34), InetAddressIPv6()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSysIPv6TERouterID.setStatus('current')
if mibBuilder.loadTexts: isisSysIPv6TERouterID.setDescription("The Local IPv6 TE Router ID. This is a single stable IPv6 global address that can always be referenced in a path that will be reachable from multiple hops away, regardless of the state of the node's interfaces. This object must be set if isisSysLevelIPv6TEEnabled is 'true' at level 1, but is otherwise ignored.")
isisSysMaxExternalRoutes = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 1, 1, 35), Unsigned32().clone(4294967295)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSysMaxExternalRoutes.setStatus('current')
if mibBuilder.loadTexts: isisSysMaxExternalRoutes.setDescription('The maximum number of static routes and routes redistributed from other protocols that DC-ISIS will accept. The action that DC-ISIS will take when the maximum is exceeded is controlled by isisSysMaxExternalRoutesAction. 0xFFFFFFFF is a special value indicating that the threshold is infinite.')
isisSysMaxExternalRoutesAction = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 1, 1, 36), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("log", 1), ("suppressExternal", 2))).clone('suppressExternal')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSysMaxExternalRoutesAction.setStatus('current')
if mibBuilder.loadTexts: isisSysMaxExternalRoutesAction.setDescription("The action that DC-ISIS will take when the number of external routes exceeds isisSysMaxExternalRoutes. If set to 'log', DC-ISIS will alert the administrator by logging that the limit has been exceeded. If set to 'suppressExternal', DC-ISIS will remove all external routes from the local LSP (at all active levels) in addition to logging the problem. In both cases, the administrator is responsible for correcting the configuration in each Address Family Manager (for example, DC-RTM), to reduce the number of redistributed external routes. In the 'suppressExternal' case, the Address Family Manager is responsible for resynchronizing the set of routes with DC-ISIS.")
isisSysLspFullSuppress = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 1, 1, 37), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("external", 1), ("none", 2))).clone('none')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSysLspFullSuppress.setStatus('current')
if mibBuilder.loadTexts: isisSysLspFullSuppress.setDescription("The set of routes (if any) that will be removed from the local LSP by DC-ISIS when the local LSP becomes full at either level. When set to 'external', all static routes and routes redistributed from other protocols will be removed from the local LSP (at all active levels) when the local LSP is full. If set to 'none', DC-ISIS will be unable to accept any additional configuration that may increase the size of the local LSP. The system administrator should modify the system configuration to reduce the local LSP size - for example, by reducing the number of addresses redistributed from other routing protocols, or by deleting circuit configuration. Once this has been done, if isisSysLspFullSuppress is set to 'external', the Address Family Manager is responsible for resynchronizing the set of routes with DC-ISIS.")
isisSysLspFullSetDBOL = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 1, 1, 38), TruthValue().clone('false')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSysLspFullSetDBOL.setStatus('current')
if mibBuilder.loadTexts: isisSysLspFullSetDBOL.setDescription("If set to 'true', DC-ISIS will set the database overload flag in the local LSP (at all active levels) when the local LSP becomes full (at either level). The administrator can clear the database overload flag for a level by setting isisSysLevelSetOverload to 'false' for that level.")
isisSysRestartHelpPeer = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 1, 1, 39), TruthValue().clone('true')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSysRestartHelpPeer.setStatus('current')
if mibBuilder.loadTexts: isisSysRestartHelpPeer.setDescription('Indicates whether DC-ISIS implements the procedures defined in the IS-IS restart RFC (3847) for helping a peer to restart. Note that this object has no effect on the local restart behavior, and so may be set independently of isisSysRestartActivationType and isisSysRestartAutoResetType.')
isisSysRestartActivationType = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 1, 1, 40), IsisSysRestartType().clone('start')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSysRestartActivationType.setStatus('current')
if mibBuilder.loadTexts: isisSysRestartActivationType.setDescription("This object is only used when DC-ISIS is manually activated or deactivated (in other words, enters or leaves the state with row status 'active' and admin status 'on'), and indicates which restart procedures (if any) are followed. During the activation period, DC-ISIS will use the value that was configured at the start of activation. The value may be changed at any time, but the new value will only take effect the next time that manual activation takes place. During deactivation, DC-ISIS will purge the local LSP from remote nodes if this object is set to 'none' or 'start'. Setting the object to 'restart' before deactivation will prevent the local LSP from being purged. A planned restart may be initiated by setting isisSysAdminState to 'off' and later to 'on'. Graceful restart procedures will only function correctly if the local LSP has not been purged, and so to initiate a planned restart, isisSysRestartActivationType should be set to 'restart' before deactivation.")
isisSysRestartAutoResetType = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 1, 1, 41), IsisSysRestartType().clone('start')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSysRestartAutoResetType.setStatus('current')
if mibBuilder.loadTexts: isisSysRestartAutoResetType.setDescription('There are cases where the IS-IS protocol requires the local node to automatically deactivate and later reactivate. This object indicates which restart procedures (if any) are followed during such an automatic reset. During the activation period, DC-ISIS will use the value that was configured at the start of activation. The value may be changed at any time, but the new value will only take effect the next time that automatic re-activation takes place.')
isisSysRestartAdjacencyWait = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 1, 1, 42), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 3600)).clone(10)).setUnits('seconds').setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSysRestartAdjacencyWait.setStatus('current')
if mibBuilder.loadTexts: isisSysRestartAdjacencyWait.setDescription("This object is only used when DC-ISIS activates with the activation type (isisSysRestartActivationType or isisSysRestartAutoResetType) set to 'start' or 'restart'. It defines how long DC-ISIS will wait to establish adjacencies before completing the start/restart. This object follows the resettingTimer behavior.")
isisSysMaxRecoveryTime = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 1, 1, 43), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535)).clone(65535)).setUnits('seconds').setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSysMaxRecoveryTime.setStatus('current')
if mibBuilder.loadTexts: isisSysMaxRecoveryTime.setDescription("This object is only used when DC-ISIS activates with the activation type (isisSysRestartActivationType or isisSysRestartAutoResetType) set to 'restart'. It defines the maximum time that DC-ISIS will take before completing restart procedures. The value specified puts an upper bound on the duration of the T3 timer described in the IS-IS restart RFC (3847). The actual duration of the timer is the minimum of the value specified and the minimum remaining holding time received on an adjacency. This object follows the resettingTimer behavior.")
isisSysClearStats = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 1, 1, 44), TruthValue().clone('false')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSysClearStats.setStatus('current')
if mibBuilder.loadTexts: isisSysClearStats.setDescription("Set to 'true' to clear all system statistics, including the isisSystemCounter and isisPacketCounter tables. Note that isisSysStatsLSPCount is not reset by this object. Reading the value of this field has no meaning.")
isisSysSetAttached = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 1, 1, 45), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("attachNoOverlapOrRedist", 1), ("attachNoOverlapOnly", 2), ("attachSet", 3), ("attachClear", 4))).clone('attachNoOverlapOrRedist')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSysSetAttached.setStatus('current')
if mibBuilder.loadTexts: isisSysSetAttached.setDescription("If DC-ISIS is operating at both level 1 and level 2, this field indicates how DC-ISIS should decide whether to set the attached bit in its level 1 LSP. - 'attachNoOverlapOrRedist' indicates that the attached bit should be set if either of the following are true. - The IS can reach at least one other area (the IS is adjacent with a L2 router whose area addresses do not overlap with the area addresses we know about at L1). - The IS is redistributing one or more external routes into the AS. - 'attachNoOverlapOnly' indicates that the attached bit should be set only if the IS can reach at least one other area. - 'attachSet' indicates that the attached bit should always be set. - 'attachClear' indicates that the attached bit should never be set.")
isisSysProtSupported = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 1, 1, 46), IsisAddrTypeBits().clone(namedValues=NamedValues(("ipv4", 1)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSysProtSupported.setStatus('current')
if mibBuilder.loadTexts: isisSysProtSupported.setDescription('This attribute contains the set of protocols supported by this Intermediate System.')
isisSysRestrictLanAdjsToSubnet = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 1, 1, 47), TruthValue().clone('false')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSysRestrictLanAdjsToSubnet.setStatus('current')
if mibBuilder.loadTexts: isisSysRestrictLanAdjsToSubnet.setDescription("This object only affects IPv4 broadcast circuits. If this is set to 'true', DC-ISIS will only form adjacencies with intermediate systems that are on the same subnet as the local circuit. This object only has effect when the I3 Stub is used to determine subnet addresses and is ignored otherwise.")
isisSysHostName = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 1, 1, 48), SnmpAdminString()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSysHostName.setStatus('current')
if mibBuilder.loadTexts: isisSysHostName.setDescription('The string that this instance of IS-IS will use as the local hostname. This is advertised to other Intermediate Systems in the Dynamic Hostname TLV.')
isisSysCalcSoonAfterCircChange = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 1, 1, 49), TruthValue().clone('true')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSysCalcSoonAfterCircChange.setStatus('current')
if mibBuilder.loadTexts: isisSysCalcSoonAfterCircChange.setDescription("Set this object to 'true' to minimize the delay before triggering a routing calculation that includes any relevant circuit change. This means a change to the local neighbors (including pseudonodes), or to the reachable addresses received from the I3 stub. There are two aspects to minimizing the delay. - DC-ISIS overrides the setting of the isisSysLevelMinLSPGenInt object when such a change occurs, and usually regenerates the local LSP(s) immediately. The only exception is when DC-ISIS is performing restart procedures as defined in RFC3847. This RFC specifies when DC-ISIS can update the local LSP during a restart. - DC-ISIS overrides all of the objects that affect the scheduling of routing calculations, with the exception of the isisSysCalcThrshRestartLimit object. It ensures that a routing calculation including the updated LSPs takes place as soon as possible. It abandons an existing route calculation if necessary, unless more than isisSysCalcThrshRestartLimit successive calculations would have been interrupted.")
isisSysSendNotifications = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 1, 1, 50), Bits().clone(namedValues=NamedValues(("circuitIndication", 0), ("databaseOverload", 1), ("manualAreaAddressDrops", 2), ("idLengthMismatch", 3), ("maxAreaAddressMismatch", 4), ("ownLspPurge", 5), ("areaMismatch", 6), ("rejectedAdjacency", 7), ("adjacencyChange", 8), ("lspErrorDetected", 9), ("attemptToExceedMaxSequence", 10), ("sequenceNumberSkip", 11), ("extPassCircuitInd", 12), ("operStateChange", 13), ("disChange", 14), ("lspAuthFailure", 15), ("helloAuthFailure", 16), ("attachStateChange", 17))).clone(namedValues=NamedValues(("circuitIndication", 0)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSysSendNotifications.setStatus('current')
if mibBuilder.loadTexts: isisSysSendNotifications.setDescription('This attribute contains the set of notifications generated by this Intermediate System. The notifications that may be enabled are the isisCircIndTable, isisDatabaseOverload, isisManualAddressDrops, isisIDLenMismatch, isisMaxAreaAddressesMismatch, isisOwnLSPPurge, isisAreaMismatch, isisRejectedAdjacency, isisAdjacencyChange, isisLSPErrorDetected, isisAttemptToExceedMaxSequence, isisSequenceNumberSkip, isisDisChange, isisOperStateChange, isisLspAuthFailure, isisHelloAuthFailure, isisExtPassCircuitInd and isisAttachStateChange notifications.')
isisSysEnableIgpShortcut = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 1, 1, 51), TruthValue().clone('true')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSysEnableIgpShortcut.setStatus('current')
if mibBuilder.loadTexts: isisSysEnableIgpShortcut.setDescription("Set isisSysEnableIgpShortcut to 'true' to enable IS-IS routing over IGP shortcut interfaces. Set isisSysEnableIgpShortcut to 'false' to disable IS-IS routing over IGP shortcut interfaces. IS-IS will ignore any interface that is identified as an IGP shortcut.")
isisSysI3EntityIndex = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 1, 1, 52), NumericIndex().clone(1)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSysI3EntityIndex.setStatus('current')
if mibBuilder.loadTexts: isisSysI3EntityIndex.setDescription('Identifies the I3 stub instance to which this IS-IS instance should join. The status of the join is given by the row in the isisPmMjTable with isisPmMjInterface equal to ifInterfaceInfo. Note that IS-IS will not become active unless the join to the specified I3 stub is activated successfully. This field follows the replaceOnlyWhileDisabled behavior.')
isisSysRtmPurgeTime = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 1, 1, 53), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535)).clone(60)).setUnits('seconds').setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSysRtmPurgeTime.setStatus('current')
if mibBuilder.loadTexts: isisSysRtmPurgeTime.setDescription('This object defines the maximum time that DC-ISIS will wait when a Routing Table Manager (such as DC-RTM) fails before purging routes. In some systems, the Routing Table Manager may restart independently of DC-ISIS. For example, it may be administratively deactivated and reactivated, or may be distributed to a separate process that can restart independently of the DC-ISIS process. If such a restart does not affect the forwarding table, then DC-ISIS should continue to advertise routes that it has learnt from the Routing Table Manager. This is achieved by setting isisSysRtmPurgeTime to a value that gives the Routing Table Manager sufficient time to restart. A value of zero indicates that routes will be purged as soon as the join to RTM fails. Any change to the configured timer value will take effect when the timer is next started. RTM purge timers that are already running will not be affected by the change.')
isisMtSysTable = MibTable((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 8), )
if mibBuilder.loadTexts: isisMtSysTable.setStatus('current')
if mibBuilder.loadTexts: isisMtSysTable.setDescription('A row in this table represents the level of support for a topology on this instance of the Integrated IS-IS protocol. This table is only used when multi-topology IS-IS is supported.')
isisMtSysEntry = MibTableRow((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 8, 1), ).setIndexNames((0, "DC-ISIS-MIB", "isisSysInstance"), (0, "DC-ISIS-MIB", "isisMtSysMtId"))
if mibBuilder.loadTexts: isisMtSysEntry.setStatus('current')
if mibBuilder.loadTexts: isisMtSysEntry.setDescription('Each row defines information specific to a single topology on a single instance of the IS-IS protocol existing on the system.')
isisMtSysMtId = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 8, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4095)))
if mibBuilder.loadTexts: isisMtSysMtId.setStatus('current')
if mibBuilder.loadTexts: isisMtSysMtId.setDescription('The topology that this row represents.')
isisMtSysExistState = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 8, 1, 3), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisMtSysExistState.setStatus('current')
if mibBuilder.loadTexts: isisMtSysExistState.setDescription("The state of the MIB row. Turning this to state 'destroy' forces the router to forget all the current topology configuration. Setting the state to 'notInService' stops protocol processing for this topology, but retains the configuration. For multi-topology ID zero (the base topology), this field is read only. The row for the base topology is automatically created and destroyed along with the corresponding isisSysTable row.")
isisMtSysAdminState = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 8, 1, 4), AdminState().clone('off')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisMtSysAdminState.setStatus('current')
if mibBuilder.loadTexts: isisMtSysAdminState.setDescription("The desired operational state of this topology on this instance of the Integrated IS-IS protocol. Setting this object to the value 'on' when its current value is 'off' enables operation of this topology for this instance of the Integrated IS-IS protocol.")
isisMtSysOperState = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 8, 1, 5), IsisOperStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisMtSysOperState.setStatus('current')
if mibBuilder.loadTexts: isisMtSysOperState.setDescription("The current operational status of this topology on this instance of the Integrated IS-IS protocol. If this is 'down' then the topology is not supported.")
isisMtSysProtSupported = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 8, 1, 6), IsisAddrTypeBits().clone(namedValues=NamedValues(("ipv4", 1)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisMtSysProtSupported.setStatus('current')
if mibBuilder.loadTexts: isisMtSysProtSupported.setDescription('This attribute contains the set of protocols supported by this topology on this Intermediate System. Note that RFC 5120 assigns certain topologies to serve pre-determined purposes (MT IDs 0 - 5). This limits the set of allowed values this field should take.')
isisMtSysDefaultActive = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 8, 1, 7), TruthValue().clone('true')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisMtSysDefaultActive.setStatus('current')
if mibBuilder.loadTexts: isisMtSysDefaultActive.setDescription("When set to 'true', corresponding rows in the isisMtCircStatusTable have oper status 'up', unless a corresponding row exists in the isisMtCircManConfigTable. When set to 'false', corresponding rows in the isisMtCircStatusTable have oper status 'down', unless a corresponding row exists in the isisMtCircManConfigTable.")
isisManAreaAddrTable = MibTable((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 2), )
if mibBuilder.loadTexts: isisManAreaAddrTable.setReference('{ISIS.aoi manualAreaAddresses (10)}')
if mibBuilder.loadTexts: isisManAreaAddrTable.setStatus('current')
if mibBuilder.loadTexts: isisManAreaAddrTable.setDescription('The set of manual area addresses configured on this Intermediate System.')
isisManAreaAddrEntry = MibTableRow((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 2, 1), ).setIndexNames((0, "DC-ISIS-MIB", "isisSysInstance"), (0, "DC-ISIS-MIB", "isisManAreaAddr"))
if mibBuilder.loadTexts: isisManAreaAddrEntry.setStatus('current')
if mibBuilder.loadTexts: isisManAreaAddrEntry.setDescription('Each entry contains one area address manually configured on this system')
isisManAreaAddr = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 2, 1, 1), OSINSAddress())
if mibBuilder.loadTexts: isisManAreaAddr.setStatus('current')
if mibBuilder.loadTexts: isisManAreaAddr.setDescription('A manually configured area address for this system. This object follows the index behavior. Note: an index for the entry {1, {49.0001} active} in this table would be the ordered pair (1, (0x03 0x49 0x00 0x01)), as the length of an Octet string is part of the OID. For the DC-ISIS implementation, the area address must have a non-zero length.')
isisManAreaAddrExistState = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 2, 1, 2), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisManAreaAddrExistState.setStatus('current')
if mibBuilder.loadTexts: isisManAreaAddrExistState.setDescription("The state of the isisManAreaAddrEntry. This object follows the Row Status behavior. If the isisSysAdminState for this instance of the IS-IS protocol is 'on', and an attempt is made to set this object to the value 'destroy' or 'notInService' when this is the only isisManAreaAddrEntry in state 'active' for this instance of the IS-IS protocol should return inconsistentValue.")
isisAreaAddrTable = MibTable((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 3), )
if mibBuilder.loadTexts: isisAreaAddrTable.setReference('{ISIS.aoi areaAddresses (18)}')
if mibBuilder.loadTexts: isisAreaAddrTable.setStatus('current')
if mibBuilder.loadTexts: isisAreaAddrTable.setDescription('The union of the sets of area addresses reported in all Level 1 LSPs with segment number zero received by this instance of the protocol from Intermediate Systems which are reachable via Level 1 routing.')
isisAreaAddrEntry = MibTableRow((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 3, 1), ).setIndexNames((0, "DC-ISIS-MIB", "isisSysInstance"), (0, "DC-ISIS-MIB", "isisAreaAddr"))
if mibBuilder.loadTexts: isisAreaAddrEntry.setStatus('current')
if mibBuilder.loadTexts: isisAreaAddrEntry.setDescription('Each entry contains one area address reported in a Level 1 LSP received by this instance of the IS-IS protocol. The DC-ISIS implementation ignores received area addresses with a length of zero.')
isisAreaAddr = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 3, 1, 1), OSINSAddress())
if mibBuilder.loadTexts: isisAreaAddr.setStatus('current')
if mibBuilder.loadTexts: isisAreaAddr.setDescription('An area address reported in a Level 1 LSP received by this instance of the IS-IS protocol. The DC-ISIS implementation ignores received area addresses with a length of zero.')
isisAreaAddrInLSP = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 3, 1, 2), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisAreaAddrInLSP.setStatus('current')
if mibBuilder.loadTexts: isisAreaAddrInLSP.setDescription("Following ISO10589 section 7.1.5, we advertise the three numerically lowest level 1 area addresses in the level 2 LSP fragment zero. If 'true', then this area address is one of the three numerically lowest area addresses, and if this router is active at level 2, it is therefore one of those area addresses advertised in the level 2 LSP fragment 0.")
isisSummAddrTable = MibTable((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 5), )
if mibBuilder.loadTexts: isisSummAddrTable.setStatus('current')
if mibBuilder.loadTexts: isisSummAddrTable.setDescription('The set of IP summary addresses to use in forming summary TLVs originated by this Intermediate System. An administrator may use a summary address to combine and modify IP Reachability announcements. If the Intermediate system can reach any subset of the summary address, the summary address will be announced instead, at the configured metric.')
isisSummAddrEntry = MibTableRow((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 5, 1), ).setIndexNames((0, "DC-ISIS-MIB", "isisSysInstance"), (0, "DC-ISIS-MIB", "isisSummAddrMtId"), (0, "DC-ISIS-MIB", "isisSummAddressType"), (0, "DC-ISIS-MIB", "isisSummAddress"), (0, "DC-ISIS-MIB", "isisSummAddrPrefixLen"))
if mibBuilder.loadTexts: isisSummAddrEntry.setStatus('current')
if mibBuilder.loadTexts: isisSummAddrEntry.setDescription('Each entry contains one IP summary address.')
isisSummAddrMtId = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 5, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4095)))
if mibBuilder.loadTexts: isisSummAddrMtId.setStatus('current')
if mibBuilder.loadTexts: isisSummAddrMtId.setDescription('The topology that this summary address applies to. This should be set to zero if multi-topology IS-IS is not enabled. This object follows the index behavior.')
isisSummAddressType = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 5, 1, 1), InetAddressType())
if mibBuilder.loadTexts: isisSummAddressType.setStatus('current')
if mibBuilder.loadTexts: isisSummAddressType.setDescription('The Type of IP address for this summary address. This object follows the index behavior.')
isisSummAddress = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 5, 1, 2), InetAddress().subtype(subtypeSpec=ConstraintsUnion(ValueSizeConstraint(4, 4), ValueSizeConstraint(16, 16), )))
if mibBuilder.loadTexts: isisSummAddress.setStatus('current')
if mibBuilder.loadTexts: isisSummAddress.setDescription('The IP Address value for this summary address. This object follows the index behavior. The address must not contain any set host bits (bits set after the address prefix determined by isisSummAddrPrefixLen).')
isisSummAddrPrefixLen = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 5, 1, 3), InetAddressPrefixLength().subtype(subtypeSpec=ValueRangeConstraint(0, 128)))
if mibBuilder.loadTexts: isisSummAddrPrefixLen.setStatus('current')
if mibBuilder.loadTexts: isisSummAddrPrefixLen.setDescription('The Length of the IP NetMask for this summary address.')
isisSummAddrExistState = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 5, 1, 4), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSummAddrExistState.setStatus('current')
if mibBuilder.loadTexts: isisSummAddrExistState.setDescription('The existence state of this summary address. This object follows the row status behavior.')
isisSummAddrMetric = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 5, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 63)).clone(20)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSummAddrMetric.setStatus('current')
if mibBuilder.loadTexts: isisSummAddrMetric.setDescription('The metric value to announce this summary address with in LSPs generated by this system.')
isisSummAddrFullMetric = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 5, 1, 6), FullMetric().clone(20)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSummAddrFullMetric.setStatus('current')
if mibBuilder.loadTexts: isisSummAddrFullMetric.setDescription('The wide metric value to announce this summary address with in LSPs generated by this system.')
isisRedistributeAddrTable = MibTable((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 6), )
if mibBuilder.loadTexts: isisRedistributeAddrTable.setStatus('current')
if mibBuilder.loadTexts: isisRedistributeAddrTable.setDescription('This table provides criteria to decide if a route should be leaked from L2 to L1 when Domain Wide Prefix leaking is enabled. Addresses that match the summary mask in the table will be announced at L1 by routers when isisSysL2toL1Leaking is enabled. Routes that fall into the ranges specified are announced as is, without being summarized. Routes that do not match a summary mask are not announced.')
isisRedistributeAddrEntry = MibTableRow((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 6, 1), ).setIndexNames((0, "DC-ISIS-MIB", "isisSysInstance"), (0, "DC-ISIS-MIB", "isisRedistributeAddrMtId"), (0, "DC-ISIS-MIB", "isisRedistributeAddrType"), (0, "DC-ISIS-MIB", "isisRedistributeAddrAddress"), (0, "DC-ISIS-MIB", "isisRedistributeAddrPrefixLen"))
if mibBuilder.loadTexts: isisRedistributeAddrEntry.setStatus('current')
if mibBuilder.loadTexts: isisRedistributeAddrEntry.setDescription('Each entry contains one IP summary address to manage leaking L2 addresses into L1.')
isisRedistributeAddrMtId = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 6, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4095)))
if mibBuilder.loadTexts: isisRedistributeAddrMtId.setStatus('current')
if mibBuilder.loadTexts: isisRedistributeAddrMtId.setDescription('The topology that this redistribution address applies to. This should be set to zero if multi-topology IS-IS is not enabled. This object follows the index behavior.')
isisRedistributeAddrType = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 6, 1, 1), InetAddressType())
if mibBuilder.loadTexts: isisRedistributeAddrType.setStatus('current')
if mibBuilder.loadTexts: isisRedistributeAddrType.setDescription('The Type of IP address for this summary address. This object follows the index behavior.')
isisRedistributeAddrAddress = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 6, 1, 2), InetAddress().subtype(subtypeSpec=ConstraintsUnion(ValueSizeConstraint(4, 4), ValueSizeConstraint(16, 16), )))
if mibBuilder.loadTexts: isisRedistributeAddrAddress.setStatus('current')
if mibBuilder.loadTexts: isisRedistributeAddrAddress.setDescription('The IP Address value for this summary address. This object follows the index behavior. The address must not contain any set host bits (bits set after the address prefix determined by isisRedistributeAddrPrefixLen).')
isisRedistributeAddrPrefixLen = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 6, 1, 3), InetAddressPrefixLength().subtype(subtypeSpec=ValueRangeConstraint(0, 128)))
if mibBuilder.loadTexts: isisRedistributeAddrPrefixLen.setStatus('current')
if mibBuilder.loadTexts: isisRedistributeAddrPrefixLen.setDescription('The Length of the IP NetMask for this summary address.')
isisRedistributeAddrExistState = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 6, 1, 4), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisRedistributeAddrExistState.setStatus('current')
if mibBuilder.loadTexts: isisRedistributeAddrExistState.setDescription('The existence state of this summary address. This object follows the row status behavior.')
isisRouterTable = MibTable((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 7), )
if mibBuilder.loadTexts: isisRouterTable.setStatus('current')
if mibBuilder.loadTexts: isisRouterTable.setDescription('The set of hostnames and router ID.')
isisRouterEntry = MibTableRow((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 7, 1), ).setIndexNames((0, "DC-ISIS-MIB", "isisSysInstance"), (0, "DC-ISIS-MIB", "isisRouterSysID"), (0, "DC-ISIS-MIB", "isisRouterLevel"))
if mibBuilder.loadTexts: isisRouterEntry.setStatus('current')
if mibBuilder.loadTexts: isisRouterEntry.setDescription('Each entry tracks information about one peer at one level.')
isisRouterSysID = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 7, 1, 1), SystemID())
if mibBuilder.loadTexts: isisRouterSysID.setStatus('current')
if mibBuilder.loadTexts: isisRouterSysID.setDescription('The System ID of the Router Peer.')
isisRouterLevel = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 7, 1, 2), ISLevel())
if mibBuilder.loadTexts: isisRouterLevel.setStatus('current')
if mibBuilder.loadTexts: isisRouterLevel.setDescription('The level of this Intermediate System.')
isisRouterHostName = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 7, 1, 3), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisRouterHostName.setStatus('current')
if mibBuilder.loadTexts: isisRouterHostName.setDescription('The hostname listed in LSP, or NULL if none.')
isisRouterID = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 7, 1, 4), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisRouterID.setStatus('current')
if mibBuilder.loadTexts: isisRouterID.setDescription('The Router ID of the Peer found in LSP, or NULL if none.')
isisRouterIPv6ID = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 1, 7, 1, 5), InetAddressIPv6()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisRouterIPv6ID.setStatus('current')
if mibBuilder.loadTexts: isisRouterIPv6ID.setDescription('The IPv6 TE Router ID found in the LSP, if any.')
isisSysLevelTable = MibTable((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 2, 1), )
if mibBuilder.loadTexts: isisSysLevelTable.setStatus('current')
if mibBuilder.loadTexts: isisSysLevelTable.setDescription('Level specific information about an instance of IS-IS.')
isisSysLevelEntry = MibTableRow((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 2, 1, 1), ).setIndexNames((0, "DC-ISIS-MIB", "isisSysInstance"), (0, "DC-ISIS-MIB", "isisSysLevelIndex"))
if mibBuilder.loadTexts: isisSysLevelEntry.setStatus('current')
if mibBuilder.loadTexts: isisSysLevelEntry.setDescription('Describe variables defined for Area or Domain.')
isisSysLevelIndex = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 2, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("none", 0), ("level1IS", 1), ("level2IS", 2))))
if mibBuilder.loadTexts: isisSysLevelIndex.setStatus('current')
if mibBuilder.loadTexts: isisSysLevelIndex.setDescription('The level that this entry describes.')
isisSysLevelOrigLSPBuffSize = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 2, 1, 1, 2), LSPBuffSize().clone(1492)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSysLevelOrigLSPBuffSize.setReference('{ISIS.aoi originatingL1LSPBufferSize (9)}')
if mibBuilder.loadTexts: isisSysLevelOrigLSPBuffSize.setStatus('current')
if mibBuilder.loadTexts: isisSysLevelOrigLSPBuffSize.setDescription('The maximum size of LSPs and SNPs originated by this Intermediate System at this level. This object follows the replaceOnlyWhileDisabled behavior.')
isisSysLevelMinLSPGenInt = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 2, 1, 1, 3), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535000)).clone(30)).setUnits('milliseconds').setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSysLevelMinLSPGenInt.setReference('{ISIS.aoi minimumLSPGenerationInterval (11)}')
if mibBuilder.loadTexts: isisSysLevelMinLSPGenInt.setStatus('current')
if mibBuilder.loadTexts: isisSysLevelMinLSPGenInt.setDescription('Minimum interval, in milliseconds, between successive generation of LSPs with the same LSPID at this level by this instance of the protocol. This object follows the resettingTimer behavior.')
isisSysLevelOverloadState = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 2, 1, 1, 4), LevelState().clone('off')).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisSysLevelOverloadState.setReference('{ISIS.aoi l1State (17)}')
if mibBuilder.loadTexts: isisSysLevelOverloadState.setStatus('current')
if mibBuilder.loadTexts: isisSysLevelOverloadState.setDescription("The Overload state of the database at this level. The value 'overloaded' indicates a database that is low on an essential resource, such as memory. The administrator may indirectly force the state to 'waiting' when the router is initializing by setting the object isisSysLevelSetOverload. If the state is waiting or overloaded, we originate LSPs with the Overload bit set.")
isisSysLevelSetOverload = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 2, 1, 1, 5), TruthValue().clone('false')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSysLevelSetOverload.setStatus('current')
if mibBuilder.loadTexts: isisSysLevelSetOverload.setDescription('Administratively set the overload bit for the level. The overload bit will continue to be set if the implementation runs out of memory, independent of this variable.')
isisSysLevelSetOverloadUntil = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 2, 1, 1, 6), TimeTicks()).setUnits('seconds').setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSysLevelSetOverloadUntil.setStatus('current')
if mibBuilder.loadTexts: isisSysLevelSetOverloadUntil.setDescription('If set, the overload bit should be set, and cleared after sysUpTime exceeds this value.')
isisSysLevelMetricStyle = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 2, 1, 1, 7), MetricStyle().clone('narrow')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSysLevelMetricStyle.setStatus('current')
if mibBuilder.loadTexts: isisSysLevelMetricStyle.setDescription('Which style of Metric do we generate in our LSPs at this level?')
isisSysLevelSPFConsiders = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 2, 1, 1, 8), MetricStyle().clone('narrow')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSysLevelSPFConsiders.setStatus('current')
if mibBuilder.loadTexts: isisSysLevelSPFConsiders.setDescription('Which style of Metric do we consider in our SPF computation at this level?')
isisSysLevelTEEnabled = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 2, 1, 1, 9), TruthValue().clone('false')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSysLevelTEEnabled.setStatus('current')
if mibBuilder.loadTexts: isisSysLevelTEEnabled.setDescription("Do we do Traffic Engineering for IPv4 at this level? If Traffic Engineering is enabled, isisSysLevelMetricStyle must be set to 'wide' or 'both' for this level, and a value must be configured for isisSysTERouterID. When Traffic Engineering is enabled for IPv4, this IS will advertise the IPv4 TE router ID in the local LSP, and will advertise traffic engineering parameters (where available) for links configured to support IPv4. This object is ignored if the 'ipv4' bit is not set in the value of isisSysProtSupported.")
isisSysLevelIPv6TEEnabled = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 2, 1, 1, 10), TruthValue().clone('false')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSysLevelIPv6TEEnabled.setStatus('current')
if mibBuilder.loadTexts: isisSysLevelIPv6TEEnabled.setDescription("Do we do Traffic Engineering for IPv6 at this level? DC-ISIS only supports Traffic Engineering for IPv6 at level 1. If Traffic Engineering is enabled, a value must be configured for isisSysIPv6TERouterID. When Traffic Engineering is enabled for IPv6, this IS will advertise the IPv6 TE router ID in the local LSP, and will advertise traffic engineering parameters (where available) for links configured to support IPv6. This object is ignored if the 'ipv6' bit is not set in the value of isisSysProtSupported.")
isisSysLevelRestartT2Duration = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 2, 1, 1, 11), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 3600)).clone(60)).setUnits('seconds').setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSysLevelRestartT2Duration.setStatus('current')
if mibBuilder.loadTexts: isisSysLevelRestartT2Duration.setDescription("This object is only used when DC-ISIS activates with the activation type (isisSysRestartActivationType or isisSysRestartAutoResetType) set to 'start' or 'restart'. It defines how long DC-ISIS will wait to complete database synchronization at this level before completing the start/restart. This object follows the resettingTimer behavior.")
isisSysLevelAuthUser = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 2, 1, 1, 12), AuthUserDataString()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSysLevelAuthUser.setStatus('current')
if mibBuilder.loadTexts: isisSysLevelAuthUser.setDescription('Authentication user data for area/domain level authentication. This data is passed opaquely to the Metaswitch authentication interface where it can be used to assist with authentication decisions.')
isisCircTable = MibTable((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 3, 2), )
if mibBuilder.loadTexts: isisCircTable.setStatus('current')
if mibBuilder.loadTexts: isisCircTable.setDescription('The table of circuits used by each instance of Integrated IS-IS on this system.')
isisCircEntry = MibTableRow((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 3, 2, 1), ).setIndexNames((0, "DC-ISIS-MIB", "isisSysInstance"), (0, "DC-ISIS-MIB", "isisCircIndex"))
if mibBuilder.loadTexts: isisCircEntry.setStatus('current')
if mibBuilder.loadTexts: isisCircEntry.setDescription('An isisCircEntry exists for each circuit used by Integrated IS-IS on this system.')
isisCircIndex = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 3, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)))
if mibBuilder.loadTexts: isisCircIndex.setStatus('current')
if mibBuilder.loadTexts: isisCircIndex.setDescription('The identifier of this circuit, unique within the instance of the IS-IS protocol. This object follows the index behavior. This is for SNMP Indexing purposes only and need not have any relation to any protocol value. In DC-ISIS this is always equal to the circuit ifIndex.')
isisCircIfIndex = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 3, 2, 1, 2), InterfaceIndex()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisCircIfIndex.setStatus('current')
if mibBuilder.loadTexts: isisCircIfIndex.setDescription('The value of ifIndex for the interface to which this circuit corresponds. This object cannot be modified after creation. In DC-ISIS this is also used as the 3-way circuit ID on point-to-point circuits, and must equal isisCircIndex. The default value is the same as isisCircIndex. A create request using any other value will fail.')
isisCircIfSubIndex = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 3, 2, 1, 3), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisCircIfSubIndex.setStatus('current')
if mibBuilder.loadTexts: isisCircIfSubIndex.setDescription('A specifier for the part of the interface ifIndex to which this circuit corresponds, such as a DLCI or VPI/VCI. This object cannot be modified after creation. In DC-ISIS this field is ignored.')
isisCircAdminState = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 3, 2, 1, 4), AdminState().clone('off')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisCircAdminState.setStatus('current')
if mibBuilder.loadTexts: isisCircAdminState.setDescription("The administrative state of the circuit. This object follows the AdminState behavior. In DC-ISIS the default admin state for a new automatic circuit is 'on' if the isisSysAllowAutoI3Config field is 'true', otherwise the default admin state is 'off'.")
isisCircExistState = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 3, 2, 1, 5), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisCircExistState.setStatus('current')
if mibBuilder.loadTexts: isisCircExistState.setDescription("The existence state of this circuit. This object follows the Row Status behavior. Setting the state to 'notInService' halts the generation and processing of IS-IS protocol PDUs on this circuit. Setting the state to destroy will also erase any configuration associated with the circuit. In DC-ISIS, automatic circuits from the I3 stub appear as passive circuits with existence state 'active'. These circuits can be configured via the MIB like manual circuits, except that they will survive a Destroy request, with all fields reset to their automatic values. MIB configuration overrides automatic configuration.")
isisCircType = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 3, 2, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("unknown", 0), ("broadcast", 1), ("ptToPt", 2), ("staticIn", 3), ("staticOut", 4), ("dA", 5)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisCircType.setReference('{ISIS.aoi type (33)}')
if mibBuilder.loadTexts: isisCircType.setStatus('current')
if mibBuilder.loadTexts: isisCircType.setDescription("The type of the circuit. This object follows the replaceOnlyWhileDisabled behavior. The type specified must be compatible with the type of the interface defined by the value of isisCircIfIndex. In DC-ISIS only 'broadcast' and 'ptToPt' circuits are supported. An automatic circuit can have type 'unknown' until the correct MIB type is defined.")
isisCircExtDomain = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 3, 2, 1, 7), TruthValue().clone('false')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisCircExtDomain.setReference('{ISIS.aoi externalDomain (46)}')
if mibBuilder.loadTexts: isisCircExtDomain.setStatus('current')
if mibBuilder.loadTexts: isisCircExtDomain.setDescription('If true, suppress normal transmission of and interpretation of Intra-domain IS-IS PDUs on this circuit.')
isisCircLevel = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 3, 2, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("reserved", 0), ("level1", 1), ("level2", 2), ("level1L2", 3))).clone('level1L2')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisCircLevel.setStatus('current')
if mibBuilder.loadTexts: isisCircLevel.setDescription('Indicates which type of packets will be sent and accepted on this circuit. The values used will be modified by the settings of isisSysType. This object follows the replaceOnlyWhileDisabled behavior.')
isisCircPassiveCircuit = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 3, 2, 1, 9), TruthValue().clone('false')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisCircPassiveCircuit.setStatus('current')
if mibBuilder.loadTexts: isisCircPassiveCircuit.setDescription("Should we include this interface in LSPs, even if it is not running the IS-IS Protocol? In DC-ISIS, circuits with isisCircExtDomain 'true' will only be included in LSPs if this field is also 'true', and the circuit is active.")
isisCircMeshGroupEnabled = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 3, 2, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("inactive", 1), ("blocked", 2), ("set", 3))).clone('inactive')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisCircMeshGroupEnabled.setReference('{ RFC 2973 }')
if mibBuilder.loadTexts: isisCircMeshGroupEnabled.setStatus('current')
if mibBuilder.loadTexts: isisCircMeshGroupEnabled.setDescription('Is this port a member of a mesh group, or blocked? Circuits in the same mesh group act as a virtual multiaccess network. LSPs seen on one circuit in a mesh group will not be flooded to another circuit in the same mesh group.')
isisCircMeshGroup = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 3, 2, 1, 11), Unsigned32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisCircMeshGroup.setReference('{ RFC 2973 }')
if mibBuilder.loadTexts: isisCircMeshGroup.setStatus('current')
if mibBuilder.loadTexts: isisCircMeshGroup.setDescription('Circuits in the same mesh group act as a virtual multiaccess network. LSPs seen on one circuit in a mesh group will not be flooded to another circuit in the same mesh group. If isisCircMeshGroupEnabled is inactive or blocked, this value is ignored.')
isisCircSmallHellos = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 3, 2, 1, 12), TruthValue().clone('false')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisCircSmallHellos.setStatus('current')
if mibBuilder.loadTexts: isisCircSmallHellos.setDescription('Can we send unpadded hellos on LAN circuits? False means LAN Hellos must be padded. Implementations should allow the administrator to read this value. An implementation need not be able to support unpadded hellos to be conformant. DC-ISIS does support unpadded hellos.')
isisCircLastUpTime = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 3, 2, 1, 13), TimeTicks()).setUnits('seconds').setMaxAccess("readonly")
if mibBuilder.loadTexts: isisCircLastUpTime.setStatus('current')
if mibBuilder.loadTexts: isisCircLastUpTime.setDescription('If the circuit is enabled, the value of sysUpTime when isisCircAdminState most recently entered the state on. If the circuit is not on, the value of sysUpTime when the circuit last entered state on, 0 if the circuit has never been on.')
isisCirc3WayEnabled = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 3, 2, 1, 14), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisCirc3WayEnabled.setStatus('current')
if mibBuilder.loadTexts: isisCirc3WayEnabled.setDescription('Is this circuit enabled to run 3Way handshake? DC-ISIS will always run the 3-way handshake on point to point circuits, and so this object is read-only.')
isisCircExtendedCircID = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 3, 2, 1, 15), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisCircExtendedCircID.setStatus('current')
if mibBuilder.loadTexts: isisCircExtendedCircID.setDescription('The value to be used as the extended circuit ID in 3Way handshake. This value is only used if isisCirc3WayEnabled is true, and must be unique across all circuits on this IS. DC-ISIS uses the value for isisCircIndex as the extended circuit ID, and so this object is read-only.')
isisCircOperState = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 3, 2, 1, 16), IsisOperStatus().clone('operStatusDown')).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisCircOperState.setStatus('current')
if mibBuilder.loadTexts: isisCircOperState.setDescription('The operational state of this circuit.')
isisCircSdEntityIndex = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 3, 2, 1, 17), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisCircSdEntityIndex.setStatus('current')
if mibBuilder.loadTexts: isisCircSdEntityIndex.setDescription('Identifies the SDC entity which will handle this circuit. This object must be specified before the circuit can run the IS-IS protocol. It cannot be changed after it has been specified.')
isisCircDlBuffPoolSize = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 3, 2, 1, 18), Unsigned32().clone(150)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisCircDlBuffPoolSize.setStatus('current')
if mibBuilder.loadTexts: isisCircDlBuffPoolSize.setDescription('Capacity of the buffer pool used by the DL stub to send data signals to the SDC. This object follows the replaceOnlyWhileDisabled behavior.')
isisCircSdPDUBuffPoolSize = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 3, 2, 1, 19), Unsigned32().clone(200)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisCircSdPDUBuffPoolSize.setStatus('current')
if mibBuilder.loadTexts: isisCircSdPDUBuffPoolSize.setDescription('Capacity of the buffer pool into which SDC transfers data signals from the DL Stub. This object follows the replaceOnlyWhileDisabled behavior.')
isisCircSdIndBuffPoolSize = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 3, 2, 1, 20), Unsigned32().clone(20)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisCircSdIndBuffPoolSize.setStatus('current')
if mibBuilder.loadTexts: isisCircSdIndBuffPoolSize.setDescription('Capacity of the buffer pool used by SDC to send indications to PM. This object follows the replaceOnlyWhileDisabled behavior.')
isisCircDataLinkBlockSize = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 3, 2, 1, 21), Unsigned32().clone(1492)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisCircDataLinkBlockSize.setStatus('current')
if mibBuilder.loadTexts: isisCircDataLinkBlockSize.setDescription('Maximum size of PDU that can be sent or received over this circuit (MTU). This object follows the replaceOnlyWhileDisabled behavior.')
isisCircPhysicalAddress = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 3, 2, 1, 22), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 16))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisCircPhysicalAddress.setStatus('current')
if mibBuilder.loadTexts: isisCircPhysicalAddress.setDescription('The physical address of the network interface (for example a MAC address on an Ethernet card). This value is only relevant to a broadcast circuit and is ignored on a point-to-point circuit. It needs to be specified in the MIB if the information is not obtained from the I3 stub. This object follows the replaceOnlyWhileDisabled behavior.')
isisCircManualOrAutomatic = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 3, 2, 1, 23), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("manual", 1), ("automatic", 2), ("both", 3))).clone('manual')).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisCircManualOrAutomatic.setStatus('current')
if mibBuilder.loadTexts: isisCircManualOrAutomatic.setDescription("Has this circuit been configured by MIB (manual), I3 information (automatic) or both? MIB configuration overrides I3 configuration. Automatic circuits cannot be destroyed. Destroying a manual circuit removes all configuration from that circuit from DC-ISIS. Destroying a circuit in state 'both' destroys any MIB configuration and returns the circuit to automatic state.")
isisCircT1TimerRunning = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 3, 2, 1, 24), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisCircT1TimerRunning.setStatus('current')
if mibBuilder.loadTexts: isisCircT1TimerRunning.setDescription("Is the T1 timer running on this circuit? This object is only valid on a circuit that is currently running the IS-IS protocol (isisCircExtDomain is 'false' and isisCircOperState is 'operStatusUp'). When set to 'true', this indicates that the local node is running starting or restarting node procedures.")
isisCircProtSupported = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 3, 2, 1, 25), IsisAddrTypeBits().clone(namedValues=NamedValues(("ipv4", 1)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisCircProtSupported.setStatus('current')
if mibBuilder.loadTexts: isisCircProtSupported.setDescription("Which protocols are supported on this circuit? Note that the configured value is used in conjunction with the value of isisSysProtSupported. In particular: - IPv4 is supported on the circuit if both isisSysProtSupported and this object have the 'ipv4' bit set - IPv6 is supported on the circuit if both isisSysProtSupported and this object have the 'ipv6' bit set.")
isisCircPtToPtOverLAN = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 3, 2, 1, 26), TruthValue().clone('false')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisCircPtToPtOverLAN.setStatus('current')
if mibBuilder.loadTexts: isisCircPtToPtOverLAN.setDescription("Allows a broadcast circuit to be configured to operate point-to-point over LAN procedures. This is used in conjunction with the isisCircType object as follows. - If isisCircType = 'ptToPt', the circuit is a standard point-point circuit, and isisCircPtToPtOverLAN is ignored. - If isisCircType = 'broadcast' and isisCircPtToPtOverLAN is 'false', the circuit is used as a normal LAN. - If isisCircType = 'broadcast' and isisCircPtToPtOverLAN is 'true', point-point over LAN procedures are followed. Point-to-point over LAN procedures should only be configured when there are just two Intermediate Systems operating on the LAN. This object follows the replaceOnlyWhileDisabled behavior.")
isisCircProtBfdDesired = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 3, 2, 1, 28), IsisAddrTypeBits()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisCircProtBfdDesired.setStatus('current')
if mibBuilder.loadTexts: isisCircProtBfdDesired.setDescription('Indicates the protocols running on this circuit for which Bidirectional Forwarding Detection (BFD) is desired. If isisCircProtBfdDesired is clear for an address type, then BFD sessions are not established to peers for that address type. If isisCircProtBfdDesired is set for an address type, then a BFD session is established to each peer that supports BFD for that address type. The object isisISAdjProtSuppBfdStatus provides BFD status for each address type. This object can be modified at any time.')
isisMtCircManConfigTable = MibTable((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 3, 4), )
if mibBuilder.loadTexts: isisMtCircManConfigTable.setStatus('current')
if mibBuilder.loadTexts: isisMtCircManConfigTable.setDescription('A row in this table represents the manual configuration of support for a topology on this circuit. Rows in this table are created and destroyed by an administrator to provide per-circuit multi-topology configuration. Note that the presence of a row in this table overrides the default multi-topology settings for the given circuit and topology.')
isisMtCircManConfigEntry = MibTableRow((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 3, 4, 1), ).setIndexNames((0, "DC-ISIS-MIB", "isisSysInstance"), (0, "DC-ISIS-MIB", "isisCircIndex"), (0, "DC-ISIS-MIB", "isisMtCircManMtId"))
if mibBuilder.loadTexts: isisMtCircManConfigEntry.setStatus('current')
if mibBuilder.loadTexts: isisMtCircManConfigEntry.setDescription('An isisMtCircManConfigEntry is created to provide multi-topology specifc circuit configuration.')
isisMtCircManMtId = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 3, 4, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4095)))
if mibBuilder.loadTexts: isisMtCircManMtId.setStatus('current')
if mibBuilder.loadTexts: isisMtCircManMtId.setDescription('The topology that this row represents.')
isisMtCircManExistState = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 3, 4, 1, 4), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisMtCircManExistState.setStatus('current')
if mibBuilder.loadTexts: isisMtCircManExistState.setDescription("The state of the MIB row. Turning this to state 'destroy' forces the router to forget the manually configured multi-topology configuration for this circuit. Setting the state to 'notInService' stops the manual multi-topology configuration being used, but retains the configuration.")
isisMtCircManAdminState = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 3, 4, 1, 5), AdminState().clone('off')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisMtCircManAdminState.setStatus('current')
if mibBuilder.loadTexts: isisMtCircManAdminState.setDescription("The desired operational state for the manual configuration of this topology on this circuit. Setting this object to the value 'on' when its current value is 'off' enables use of the manual configuration for this topology on this circuit.")
isisMtCircManOperState = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 3, 4, 1, 6), IsisOperStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisMtCircManOperState.setStatus('current')
if mibBuilder.loadTexts: isisMtCircManOperState.setDescription('The current operational status of the manual configuration for this topology on this circuit.')
isisMtCircManL1WideMetric = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 3, 4, 1, 7), WideMetric().clone(10)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisMtCircManL1WideMetric.setStatus('current')
if mibBuilder.loadTexts: isisMtCircManL1WideMetric.setDescription('The level 1 wide metric value of this circuit for this topology. Note that topologies other than MT ID 0 always use wide metrics. For multi-topology ID zero (the base topology), this field is read only. The level 1 metric for the base topology is configured using the isisCircLevelWideMetric field in the isisCircLevelTable.')
isisMtCircManL2WideMetric = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 3, 4, 1, 8), WideMetric().clone(10)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisMtCircManL2WideMetric.setStatus('current')
if mibBuilder.loadTexts: isisMtCircManL2WideMetric.setDescription('The level 2 wide metric value of this circuit for this topology. Note that topologies other than MT ID 0 always use wide metrics. For multi-topology ID zero (the base topology), this field is read only. The level 2 metric for the base topology is configured using the isisCircLevelWideMetric field in the isisCircLevelTable.')
isisMtCircStatusTable = MibTable((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 3, 5), )
if mibBuilder.loadTexts: isisMtCircStatusTable.setStatus('current')
if mibBuilder.loadTexts: isisMtCircStatusTable.setDescription('A row in this table represents the current support for a topology on this circuit. A row exists in this table for every row in the isisCircTable and every row in the isisMtSysTable.')
isisMtCircStatusEntry = MibTableRow((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 3, 5, 1), ).setIndexNames((0, "DC-ISIS-MIB", "isisSysInstance"), (0, "DC-ISIS-MIB", "isisCircIndex"), (0, "DC-ISIS-MIB", "isisMtCircStatusMtId"))
if mibBuilder.loadTexts: isisMtCircStatusEntry.setStatus('current')
if mibBuilder.loadTexts: isisMtCircStatusEntry.setDescription('An isisMtCircStatusEntry details the status of a topology on a circuit.')
isisMtCircStatusMtId = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 3, 5, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4095)))
if mibBuilder.loadTexts: isisMtCircStatusMtId.setStatus('current')
if mibBuilder.loadTexts: isisMtCircStatusMtId.setDescription('The topology that this row represents.')
isisMtCircStatusOperState = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 3, 5, 1, 4), IsisOperStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisMtCircStatusOperState.setStatus('current')
if mibBuilder.loadTexts: isisMtCircStatusOperState.setDescription('The current operational state of this topology on this circuit. If no corresponding row exists in the isisMtCircManConfigTable then this is derived from the isisMtSysDefaultActive field in the isisMtSysTable row. Otherwise, this is derived from a combination of the isisMtSysOperState field in the isisMtSysTable and the isisMtCircManOperState field in the isisMtCircManConfigTable row.')
isisMtCircStatusL1WideMetric = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 3, 5, 1, 5), WideMetric()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisMtCircStatusL1WideMetric.setStatus('current')
if mibBuilder.loadTexts: isisMtCircStatusL1WideMetric.setDescription('The level 1 wide metric value of this circuit for this topology. Note that topologies other than MT ID 0 always use wide metrics. If no corresponding row exists in the isisMtCircManConfigTable then this is derived from the isisCircLevelWideMetric field in the level 1 isisCircLevelTable. Otherwise, this is derived from the isisMtCircManL1WideMetric field in the isisMtCircManConfigTable row.')
isisMtCircStatusL2WideMetric = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 3, 5, 1, 6), WideMetric()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisMtCircStatusL2WideMetric.setStatus('current')
if mibBuilder.loadTexts: isisMtCircStatusL2WideMetric.setDescription('The level 2 wide metric value of this circuit for this topology. Note that topologies other than MT ID 0 always use wide metrics. If no corresponding row exists in the isisMtCircManConfigTable then this is derived from the isisCircLevelWideMetric field in the level 2 isisCircLevelTable. Otherwise, this is derived from the isisMtCircManL2WideMetric field in the isisMtCircManConfigTable row.')
isisCircIPAddrTable = MibTable((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 3, 3), )
if mibBuilder.loadTexts: isisCircIPAddrTable.setStatus('current')
if mibBuilder.loadTexts: isisCircIPAddrTable.setDescription('This table contains the set of IP Addresses configured on all circuits.')
isisCircIPAddrEntry = MibTableRow((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 3, 3, 1), ).setIndexNames((0, "DC-ISIS-MIB", "isisSysInstance"), (0, "DC-ISIS-MIB", "isisCircIndex"), (0, "DC-ISIS-MIB", "isisCircIPAddrManOrAuto"), (0, "DC-ISIS-MIB", "isisCircIPAddrIndex"))
if mibBuilder.loadTexts: isisCircIPAddrEntry.setStatus('current')
if mibBuilder.loadTexts: isisCircIPAddrEntry.setDescription('Each entry contains one IP Address configured on a circuit.')
isisCircIPAddrManOrAuto = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 3, 3, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("manual", 1), ("automatic", 2), ("both", 3))))
if mibBuilder.loadTexts: isisCircIPAddrManOrAuto.setStatus('current')
if mibBuilder.loadTexts: isisCircIPAddrManOrAuto.setDescription('The type of IP address. Those of type manual are created by the network manager. Those of type automatic are created through propagation of interface information from the I3 stub. This object cannot be modified after creation, and follows the manualOrAutomatic behavior.')
isisCircIPAddrIndex = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 3, 3, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2000000000)))
if mibBuilder.loadTexts: isisCircIPAddrIndex.setStatus('current')
if mibBuilder.loadTexts: isisCircIPAddrIndex.setDescription('An index to this table which identifies the IP addresss to which this entry belongs. This object follows the manualOrAutomatic behavior.')
isisCircIPAddrRowStatus = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 3, 3, 1, 3), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisCircIPAddrRowStatus.setStatus('current')
if mibBuilder.loadTexts: isisCircIPAddrRowStatus.setDescription('The existence state of this IP address. This object follows the RowStatus and manualOrAutomatic behaviors.')
isisCircIPAddrAdminState = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 3, 3, 1, 4), AdminState().clone('on')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisCircIPAddrAdminState.setStatus('current')
if mibBuilder.loadTexts: isisCircIPAddrAdminState.setDescription("Administrative state of this IP address. This object follows the AdminState and manualOrAutomatic behaviors. The system level isisSysAllowAutoI3Config field defines the initial administrative state for IP addresses that are configured automatically from the I3 stub. If isisSysAllowAutoI3Config is 'true' then automatic addresses initially have administrative state 'on'; otherwise they initially have administrative state 'off'.")
isisCircIPAddrAddressType = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 3, 3, 1, 5), InetAddressType().clone('ipv4')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisCircIPAddrAddressType.setStatus('current')
if mibBuilder.loadTexts: isisCircIPAddrAddressType.setDescription('The Inet type of this IP Address. This object follows the manualOrAutomatic behavior.')
isisCircIPAddrAddress = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 3, 3, 1, 6), InetAddress().subtype(subtypeSpec=ConstraintsUnion(ValueSizeConstraint(4, 4), ValueSizeConstraint(16, 16), ))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisCircIPAddrAddress.setStatus('current')
if mibBuilder.loadTexts: isisCircIPAddrAddress.setDescription('One IP Address configured on this circuit. This field must be present on row creation and cannot be modified after creation. This address must be unique among all manual IP addresses configured on this circuit. If a manual address matches an automatic IP address on this circuit then the manual address takes precedence, and the automatic address is not used by DC-ISIS. This address is guaranteed to be unique among all automatic addresses on this circuit.')
isisCircIPAddrInLSP = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 3, 3, 1, 7), TruthValue().clone('false')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisCircIPAddrInLSP.setStatus('current')
if mibBuilder.loadTexts: isisCircIPAddrInLSP.setDescription('Is this IP address advertised in TLV 132 in LSPs sent out by this intermediate system? This object follows the manualOrAutomatic behavior.')
isisCircLevelTable = MibTable((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 4, 1), )
if mibBuilder.loadTexts: isisCircLevelTable.setStatus('current')
if mibBuilder.loadTexts: isisCircLevelTable.setDescription('Level specific information about circuits used by IS-IS.')
isisCircLevelEntry = MibTableRow((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 4, 1, 1), ).setIndexNames((0, "DC-ISIS-MIB", "isisSysInstance"), (0, "DC-ISIS-MIB", "isisCircIndex"), (0, "DC-ISIS-MIB", "isisCircLevelIndex"))
if mibBuilder.loadTexts: isisCircLevelEntry.setStatus('current')
if mibBuilder.loadTexts: isisCircLevelEntry.setDescription('An isisCircLevelEntry exists for each level on each circuit used by Integrated IS-IS on this system.')
isisCircLevelIndex = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 4, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("none", 0), ("level1IS", 1), ("level2IS", 2))))
if mibBuilder.loadTexts: isisCircLevelIndex.setStatus('current')
if mibBuilder.loadTexts: isisCircLevelIndex.setDescription('The level that this entry describes.')
isisCircLevelMetric = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 4, 1, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 63)).clone(10)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisCircLevelMetric.setReference('{ISIS.aoi l1DefaultMetric (35)}')
if mibBuilder.loadTexts: isisCircLevelMetric.setStatus('current')
if mibBuilder.loadTexts: isisCircLevelMetric.setDescription('The metric value of this circuit for this level.')
isisCircLevelWideMetric = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 4, 1, 1, 3), WideMetric().clone(10)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisCircLevelWideMetric.setStatus('current')
if mibBuilder.loadTexts: isisCircLevelWideMetric.setDescription('The wide metric value of this circuit for this level.')
isisCircLevelISPriority = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 4, 1, 1, 4), ISPriority().clone(64)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisCircLevelISPriority.setReference('{ISIS.aoi l2IntermediateSystemPriority (73)}')
if mibBuilder.loadTexts: isisCircLevelISPriority.setStatus('current')
if mibBuilder.loadTexts: isisCircLevelISPriority.setDescription('The priority for becoming LAN Designated Intermediate System at this level.')
isisCircLevelIDOctet = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 4, 1, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisCircLevelIDOctet.setStatus('current')
if mibBuilder.loadTexts: isisCircLevelIDOctet.setDescription('A one byte identifier for the circuit selected by the Intermediate System. On point-to-point circuits, the value is used as the Local Circuit ID in point-to-point IIH PDUs transmitted on this circuit. In this case, values of isisCircLevelIDOctet do not need to be unique. For broadcast circuits, the value is used to generate the LAN ID that will be used if this Intermediate System is elected as the Designated IS on this circuit. The value is required to differ on LANs where the Intermediate System is the Designated Intermediate System.')
isisCircLevelID = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 4, 1, 1, 6), CircuitID()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisCircLevelID.setReference('{ISIS.aoi ptPtCircuitID (51)}')
if mibBuilder.loadTexts: isisCircLevelID.setStatus('current')
if mibBuilder.loadTexts: isisCircLevelID.setDescription('On a point to point circuit with a fully initialized adjacency to a peer IS, the value of this object is the circuit ID negotiated during adjacency initialization. On a point to point circuit without such an adjacency, the value is the concatenation of the local system ID and the one byte isisCircLevelIDOctet for this circuit i.e. the value that would be proposed for the circuit ID. On other circuit types, the value returned is the zero length OCTET STRING.')
isisCircLevelDesIS = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 4, 1, 1, 7), CircuitID()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisCircLevelDesIS.setReference('{ISIS.aoi l2DesignatedIntermediateSystem (75)}')
if mibBuilder.loadTexts: isisCircLevelDesIS.setStatus('current')
if mibBuilder.loadTexts: isisCircLevelDesIS.setDescription('The ID of the LAN Designated Intermediate System on this circuit at this level. If, for any reason, this system is not partaking in the relevant Designated Intermediate System election process, then the value returned is the zero length OCTET STRING.')
isisCircLevelHelloMultiplier = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 4, 1, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(2, 100)).clone(10)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisCircLevelHelloMultiplier.setReference('{ISIS.aoi iSISHelloTimer (45)}')
if mibBuilder.loadTexts: isisCircLevelHelloMultiplier.setStatus('current')
if mibBuilder.loadTexts: isisCircLevelHelloMultiplier.setDescription('This value is multiplied by the corresponding HelloTimer and the result in seconds (rounded up) is used as the holding time in transmitted hellos, to be used by receivers of hello packets from this IS.')
isisCircLevelHelloTimer = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 4, 1, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(10, 600000)).clone(3000)).setUnits('milliseconds').setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisCircLevelHelloTimer.setReference('{ISIS.aoi iSISHelloTimer (45)}')
if mibBuilder.loadTexts: isisCircLevelHelloTimer.setStatus('current')
if mibBuilder.loadTexts: isisCircLevelHelloTimer.setDescription('Maximum period, in milliseconds, between IIH PDUs on multiaccess networks at this level for LANs. The value at L1 is used as the period between Hellos on L1L2 point to point circuits. Setting this value at level 2 on an L1L2 point to point circuit will result in an error of InconsistentValue. This object follows the resettingTimer behavior.')
isisCircLevelDRHelloTimer = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 4, 1, 1, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(10, 120000)).clone(1000)).setUnits('milliseconds').setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisCircLevelDRHelloTimer.setReference('{ISIS.aoi iSISHelloTimer (45)}')
if mibBuilder.loadTexts: isisCircLevelDRHelloTimer.setStatus('current')
if mibBuilder.loadTexts: isisCircLevelDRHelloTimer.setDescription('Period, in milliseconds, between Hello PDUs on multiaccess networks when this IS is the Designated Intermediate System. This object follows the resettingTimer behavior.')
isisCircLevelLSPThrottle = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 4, 1, 1, 11), Unsigned16TC().subtype(subtypeSpec=ValueRangeConstraint(1, 65535)).clone(30)).setUnits('milliseconds').setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisCircLevelLSPThrottle.setReference('{ISIS.aoi minimumBroadcastLSPTransmissionInterval (5)}')
if mibBuilder.loadTexts: isisCircLevelLSPThrottle.setStatus('current')
if mibBuilder.loadTexts: isisCircLevelLSPThrottle.setDescription('Minimal interval of time, in milliseconds, between transmissions of LSPs on an interface at this level.')
isisCircLevelMinLSPRetransInt = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 4, 1, 1, 12), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 300000)).clone(5)).setUnits('milliseconds').setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisCircLevelMinLSPRetransInt.setReference('{ISIS.aoi minimumLSPTransmissionInterval (5)}')
if mibBuilder.loadTexts: isisCircLevelMinLSPRetransInt.setStatus('current')
if mibBuilder.loadTexts: isisCircLevelMinLSPRetransInt.setDescription('Minimum interval, in milliseconds, between re-transmission of an LSP at this level. This object follows the resettingTimer behavior. Note that isisCircLevelLSPThrottle controls how fast we send back to back LSPs. This variable controls how fast we re-send the same LSP.')
isisCircLevelCSNPInterval = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 4, 1, 1, 13), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 600)).clone(10)).setUnits('seconds').setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisCircLevelCSNPInterval.setReference('{ISIS.aoi completeSNPInterval (8)}')
if mibBuilder.loadTexts: isisCircLevelCSNPInterval.setStatus('current')
if mibBuilder.loadTexts: isisCircLevelCSNPInterval.setDescription('Interval of time, in seconds, between periodic transmission of a complete set of CSNPs on multiaccess networks if this router is the designated router at this level. This object follows the resettingTimer behavior.')
isisCircLevelPartSNPInterval = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 4, 1, 1, 14), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 120)).clone(2)).setUnits('seconds').setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisCircLevelPartSNPInterval.setReference('{ISIS.aoi partialSNPInterval (14)}')
if mibBuilder.loadTexts: isisCircLevelPartSNPInterval.setStatus('current')
if mibBuilder.loadTexts: isisCircLevelPartSNPInterval.setDescription('Minimum interval in seconds between sending Partial Sequence Number PDUs at this level. This object follows the resettingTimer behavior.')
isisCircLevelStickyDIS = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 4, 1, 1, 15), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 127))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisCircLevelStickyDIS.setStatus('current')
if mibBuilder.loadTexts: isisCircLevelStickyDIS.setDescription('Boost to add to the priority of this router (up to the maximum permitted value) when it becomes DIS at this level to make it more likely to remain DIS.')
isisCircLevelAuthUser = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 4, 1, 1, 16), AuthUserDataString()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisCircLevelAuthUser.setStatus('current')
if mibBuilder.loadTexts: isisCircLevelAuthUser.setDescription("Authentication user data for this level's authentication. This data is passed opaquely to the Metaswitch authentication interface where it can be used to assist with authentication decisions.")
isisCircLevelIDHostname = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 4, 1, 1, 17), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisCircLevelIDHostname.setStatus('current')
if mibBuilder.loadTexts: isisCircLevelIDHostname.setDescription('The hostname corresponding to the system ID part of the isisCircLevelID object. This is the null string if the isisCircLevelID object is null, or if no hostname is known.')
isisCircLevelDesISHostname = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 4, 1, 1, 18), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisCircLevelDesISHostname.setStatus('current')
if mibBuilder.loadTexts: isisCircLevelDesISHostname.setDescription('The hostname corresponding to the system ID part of the isisCircLevelDesIS object. This is the null string if the isisCircLevelDesIS object is null, or if no hostname is known.')
isisCircLevelMinLSPArrivalInt = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 4, 1, 1, 19), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 60000))).setUnits('milliseconds').setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisCircLevelMinLSPArrivalInt.setStatus('current')
if mibBuilder.loadTexts: isisCircLevelMinLSPArrivalInt.setDescription('The minimum interval between reception of new instances of a particular LSP. LSP instances received at higher frequencies are discarded. If set to zero then no check is made on the frequency of LSP reception. This object follows the resettingTimer behavior.')
isisSystemCounterTable = MibTable((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 5, 1), )
if mibBuilder.loadTexts: isisSystemCounterTable.setStatus('current')
if mibBuilder.loadTexts: isisSystemCounterTable.setDescription('System wide counters for one instance of the IS-IS protocol on the system.')
isisSystemCounterEntry = MibTableRow((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 5, 1, 1), ).setIndexNames((0, "DC-ISIS-MIB", "isisSysInstance"), (0, "DC-ISIS-MIB", "isisSysStatLevel"))
if mibBuilder.loadTexts: isisSystemCounterEntry.setStatus('current')
if mibBuilder.loadTexts: isisSystemCounterEntry.setDescription('System-wide IS-IS counters.')
isisSysStatLevel = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 5, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("none", 0), ("level1IS", 1), ("level2IS", 2))))
if mibBuilder.loadTexts: isisSysStatLevel.setStatus('current')
if mibBuilder.loadTexts: isisSysStatLevel.setDescription('The level that this entry describes.')
isisSysStatCorrLSPs = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 5, 1, 1, 2), Counter32()).setUnits('frames').setMaxAccess("readonly")
if mibBuilder.loadTexts: isisSysStatCorrLSPs.setReference('{ISIS.aoi corruptedLSPsDetected (19)}')
if mibBuilder.loadTexts: isisSysStatCorrLSPs.setStatus('current')
if mibBuilder.loadTexts: isisSysStatCorrLSPs.setDescription('Number of corrupted in-memory LSPs detected. LSPs received from the wire with a bad checksum are silently dropped and not counted. LSPs received from the wire with parse errors are counted by isisSysStatLSPError.')
isisSysStatAuthTypeFails = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 5, 1, 1, 3), Counter32()).setUnits('frames').setMaxAccess("readonly")
if mibBuilder.loadTexts: isisSysStatAuthTypeFails.setStatus('current')
if mibBuilder.loadTexts: isisSysStatAuthTypeFails.setDescription('The number of authentication type mismatches recognized by this instance of the protocol.')
isisSysStatAuthFails = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 5, 1, 1, 4), Counter32()).setUnits('frames').setMaxAccess("readonly")
if mibBuilder.loadTexts: isisSysStatAuthFails.setStatus('current')
if mibBuilder.loadTexts: isisSysStatAuthFails.setDescription('The number of authentication failures recognized by this instance of the protocol.')
isisSysStatLSPDbaseOloads = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 5, 1, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisSysStatLSPDbaseOloads.setReference('{ISIS.aoi lSPL1DatabaseOverloads (20)}')
if mibBuilder.loadTexts: isisSysStatLSPDbaseOloads.setStatus('current')
if mibBuilder.loadTexts: isisSysStatLSPDbaseOloads.setDescription('Number of times the LSP database has become overloaded.')
isisSysStatManAddrDropFromAreas = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 5, 1, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisSysStatManAddrDropFromAreas.setReference('{ISIS.aoi manualAddressesDroppedFromArea (21)}')
if mibBuilder.loadTexts: isisSysStatManAddrDropFromAreas.setStatus('current')
if mibBuilder.loadTexts: isisSysStatManAddrDropFromAreas.setDescription('Number of times a manual address has been dropped from the area.')
isisSysStatAttmptToExMaxSeqNums = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 5, 1, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisSysStatAttmptToExMaxSeqNums.setReference('{ISIS.aoi attemptsToExceedmaximumSequenceNumber (22)}')
if mibBuilder.loadTexts: isisSysStatAttmptToExMaxSeqNums.setStatus('current')
if mibBuilder.loadTexts: isisSysStatAttmptToExMaxSeqNums.setDescription('Number of times the IS has attempted to exceed the maximum sequence number.')
isisSysStatSeqNumSkips = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 5, 1, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisSysStatSeqNumSkips.setReference('{ISIS.aoi sequenceNumberSkips (23)}')
if mibBuilder.loadTexts: isisSysStatSeqNumSkips.setStatus('current')
if mibBuilder.loadTexts: isisSysStatSeqNumSkips.setDescription('Number of times a sequence number skip has occurred.')
isisSysStatOwnLSPPurges = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 5, 1, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisSysStatOwnLSPPurges.setReference('{ISIS.aoi ownLSPPurges (24)}')
if mibBuilder.loadTexts: isisSysStatOwnLSPPurges.setStatus('current')
if mibBuilder.loadTexts: isisSysStatOwnLSPPurges.setDescription("Number of times a zero-aged copy of the system's own LSP is received from some other node.")
isisSysStatIDFieldLenMismatches = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 5, 1, 1, 10), Counter32()).setUnits('frames').setMaxAccess("readonly")
if mibBuilder.loadTexts: isisSysStatIDFieldLenMismatches.setReference('{ISIS.aoi iDFieldLengthMismatches (25)}')
if mibBuilder.loadTexts: isisSysStatIDFieldLenMismatches.setStatus('current')
if mibBuilder.loadTexts: isisSysStatIDFieldLenMismatches.setDescription('Number of times a PDU is received with a different value for ID field length to that of the receiving system.')
isisSysStatPartChanges = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 5, 1, 1, 11), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisSysStatPartChanges.setStatus('current')
if mibBuilder.loadTexts: isisSysStatPartChanges.setDescription('Partition changes.')
isisSysStatSPFRuns = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 5, 1, 1, 12), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisSysStatSPFRuns.setStatus('current')
if mibBuilder.loadTexts: isisSysStatSPFRuns.setDescription('Number of times we ran SPF at this level.')
isisSysStatLSPError = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 5, 1, 1, 13), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisSysStatLSPError.setStatus('current')
if mibBuilder.loadTexts: isisSysStatLSPError.setDescription('The count of badly formed LSPs discarded by the PM component of DC-ISIS.')
isisSysStatCSNPError = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 5, 1, 1, 14), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisSysStatCSNPError.setStatus('current')
if mibBuilder.loadTexts: isisSysStatCSNPError.setDescription('The count of badly formed CSNPs discarded by the PM component of DC-ISIS.')
isisSysStatPSNPError = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 5, 1, 1, 15), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisSysStatPSNPError.setStatus('current')
if mibBuilder.loadTexts: isisSysStatPSNPError.setDescription('The count of badly formed PSNPs discarded by the PM component of DC-ISIS.')
isisSysStatLSPQueueLen = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 5, 1, 1, 16), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisSysStatLSPQueueLen.setStatus('current')
if mibBuilder.loadTexts: isisSysStatLSPQueueLen.setDescription('The count of received LSPs that are queued internally in the PM component of DC-ISIS, awaiting processing.')
isisSysStatFragsRebuilt = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 5, 1, 1, 17), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisSysStatFragsRebuilt.setStatus('current')
if mibBuilder.loadTexts: isisSysStatFragsRebuilt.setDescription('Number of LSP fragments that the local system has generated.')
isisSysStatLSPRexmits = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 5, 1, 1, 18), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisSysStatLSPRexmits.setStatus('current')
if mibBuilder.loadTexts: isisSysStatLSPRexmits.setDescription('Number of LSPs that have been retransmitted.')
isisSysStatLSPRegens = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 5, 1, 1, 19), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisSysStatLSPRegens.setStatus('current')
if mibBuilder.loadTexts: isisSysStatLSPRegens.setDescription('Number of LSPs that have been regenerated. An LSP is regenerated when it is nearing the end of its lifetime and it has not changed.')
isisSysStatPurgesInitiated = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 5, 1, 1, 20), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisSysStatPurgesInitiated.setStatus('current')
if mibBuilder.loadTexts: isisSysStatPurgesInitiated.setDescription('Number of LSP purges that have been initiated by this instance of IS-IS.')
isisSysStatLSPCount = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 5, 1, 1, 21), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisSysStatLSPCount.setStatus('current')
if mibBuilder.loadTexts: isisSysStatLSPCount.setDescription('The number of LSPs in the database at this level.')
isisSysStatPurgesIniLocal = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 5, 1, 1, 23), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisSysStatPurgesIniLocal.setStatus('current')
if mibBuilder.loadTexts: isisSysStatPurgesIniLocal.setDescription('Number of LSP purges initiated by this instance of IS-IS, of LSPs that were originated locally. This field behaves like an SNMP counter. It only increases, never decreases, and is subject to wrap. The absolute value of an SNMP counter has no meaning. Applications use the difference between counter values at different times to display count changes. The absolute value of this counter does have meaning, until it first wraps.')
isisSysStatPurgesIniRemote = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 5, 1, 1, 24), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisSysStatPurgesIniRemote.setStatus('current')
if mibBuilder.loadTexts: isisSysStatPurgesIniRemote.setDescription('Number of LSP purges initiated by this instance of IS-IS, of LSPs that were originated remotely. This field behaves like an SNMP counter. It only increases, never decreases, and is subject to wrap. The absolute value of an SNMP counter has no meaning. Applications use the difference between counter values at different times to display count changes. The absolute value of this counter does have meaning, until it first wraps.')
isisSysStatPurgesIniRemSNP = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 5, 1, 1, 25), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisSysStatPurgesIniRemSNP.setStatus('current')
if mibBuilder.loadTexts: isisSysStatPurgesIniRemSNP.setDescription('Number of LSP purges initiated by this instance of IS-IS, of LSPs that were originated remotely, due to receiving a SNP. This field behaves like an SNMP counter. It only increases, never decreases, and is subject to wrap. The absolute value of an SNMP counter has no meaning. Applications use the difference between counter values at different times to display count changes. The absolute value of this counter does have meaning, until it first wraps.')
isisSysStatPurgesIniRemExp = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 5, 1, 1, 26), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisSysStatPurgesIniRemExp.setStatus('current')
if mibBuilder.loadTexts: isisSysStatPurgesIniRemExp.setDescription('Number of LSP purges initiated by this instance of IS-IS, of LSPs that were originated remotely, because they have expired. This field behaves like an SNMP counter. It only increases, never decreases, and is subject to wrap. The absolute value of an SNMP counter has no meaning. Applications use the difference between counter values at different times to display count changes. The absolute value of this counter does have meaning, until it first wraps.')
isisSysStatPurgesIniRemPrs = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 5, 1, 1, 27), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisSysStatPurgesIniRemPrs.setStatus('current')
if mibBuilder.loadTexts: isisSysStatPurgesIniRemPrs.setDescription('Number of LSP purges initiated by this instance of IS-IS, of LSPs that were originated remotely, due to peer restart. This field behaves like an SNMP counter. It only increases, never decreases, and is subject to wrap. The absolute value of an SNMP counter has no meaning. Applications use the difference between counter values at different times to display count changes. The absolute value of this counter does have meaning, until it first wraps.')
isisCircuitCounterTable = MibTable((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 5, 2), )
if mibBuilder.loadTexts: isisCircuitCounterTable.setStatus('current')
if mibBuilder.loadTexts: isisCircuitCounterTable.setDescription('Circuit specific counters for this Intermediate System.')
isisCircuitCounterEntry = MibTableRow((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 5, 2, 1), ).setIndexNames((0, "DC-ISIS-MIB", "isisSysInstance"), (0, "DC-ISIS-MIB", "isisCircIndex"), (0, "DC-ISIS-MIB", "isisCircuitType"))
if mibBuilder.loadTexts: isisCircuitCounterEntry.setStatus('current')
if mibBuilder.loadTexts: isisCircuitCounterEntry.setDescription('An isisCircuitCounterEntry exists for each circuit used by Integrated IS-IS on this system.')
isisCircuitType = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 5, 2, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("lanlevel1", 1), ("lanlevel2", 2), ("p2pcircuit", 3))))
if mibBuilder.loadTexts: isisCircuitType.setStatus('current')
if mibBuilder.loadTexts: isisCircuitType.setDescription('What type of circuit saw these counts? The point to point Hello PDU includes both L1 and L2, and ISs form a single adjacency on point to point links. Thus we combine counts on point to point links into one group.')
isisCircAdjChanges = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 5, 2, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisCircAdjChanges.setReference('{ISIS.aoi changesInAdjacencyState (40)}')
if mibBuilder.loadTexts: isisCircAdjChanges.setStatus('current')
if mibBuilder.loadTexts: isisCircAdjChanges.setDescription('The number of times an adjacency state change has occurred on this circuit.')
isisCircNumAdj = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 5, 2, 1, 3), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisCircNumAdj.setReference('{ISIS.aoi changesInAdjacencyState (40)}')
if mibBuilder.loadTexts: isisCircNumAdj.setStatus('current')
if mibBuilder.loadTexts: isisCircNumAdj.setDescription('The number of adjacencies on this circuit.')
isisCircInitFails = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 5, 2, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisCircInitFails.setStatus('current')
if mibBuilder.loadTexts: isisCircInitFails.setDescription('The number of times initialization of this circuit has failed. This counts events such as PPP NCP failures. Failures to form an adjacency are counted by isisCircRejAdjs.')
isisCircRejAdjs = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 5, 2, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisCircRejAdjs.setReference('{ISIS.aoi rejectedAdjacencies (42)}')
if mibBuilder.loadTexts: isisCircRejAdjs.setStatus('current')
if mibBuilder.loadTexts: isisCircRejAdjs.setDescription('The number of times an adjacency has been rejected on this circuit.')
isisCircIDFieldLenMismatches = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 5, 2, 1, 6), Counter32()).setUnits('frames').setMaxAccess("readonly")
if mibBuilder.loadTexts: isisCircIDFieldLenMismatches.setReference('{ISIS.aoi iDFieldLengthMismatches (25)}')
if mibBuilder.loadTexts: isisCircIDFieldLenMismatches.setStatus('current')
if mibBuilder.loadTexts: isisCircIDFieldLenMismatches.setDescription('The number of times an IS-IS control PDU with an ID field length different to that for this system has been received.')
isisCircMaxAreaAddrMismatches = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 5, 2, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisCircMaxAreaAddrMismatches.setReference('{ISIS.aoi iDFieldLengthMismatches (25)}')
if mibBuilder.loadTexts: isisCircMaxAreaAddrMismatches.setStatus('current')
if mibBuilder.loadTexts: isisCircMaxAreaAddrMismatches.setDescription('The number of times an IS-IS control PDU with a max area address field different to that for this system has been received.')
isisCircAuthTypeFails = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 5, 2, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisCircAuthTypeFails.setStatus('current')
if mibBuilder.loadTexts: isisCircAuthTypeFails.setDescription('The number of times an IS-IS control PDU with an auth type field different to that for this system has been received.')
isisCircAuthFails = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 5, 2, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisCircAuthFails.setStatus('current')
if mibBuilder.loadTexts: isisCircAuthFails.setDescription('The number of times an IS-IS control PDU with the correct auth type has failed to pass authentication validation.')
isisCircLANDesISChanges = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 5, 2, 1, 10), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisCircLANDesISChanges.setStatus('current')
if mibBuilder.loadTexts: isisCircLANDesISChanges.setDescription('The number of times the Designated IS has changed on this circuit at this level. If the circuit is point to point, this count is zero.')
isisPacketCounterTable = MibTable((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 5, 3), )
if mibBuilder.loadTexts: isisPacketCounterTable.setStatus('current')
if mibBuilder.loadTexts: isisPacketCounterTable.setDescription('Information about IS-IS protocol traffic at one level on one circuit in one direction.')
isisPacketCounterEntry = MibTableRow((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 5, 3, 1), ).setIndexNames((0, "DC-ISIS-MIB", "isisSysInstance"), (0, "DC-ISIS-MIB", "isisCircIndex"), (0, "DC-ISIS-MIB", "isisPacketCountLevel"), (0, "DC-ISIS-MIB", "isisPacketCountDirection"))
if mibBuilder.loadTexts: isisPacketCounterEntry.setStatus('current')
if mibBuilder.loadTexts: isisPacketCounterEntry.setDescription('Information about IS-IS protocol traffic at one level on one circuit in one direction')
isisPacketCountLevel = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 5, 3, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("none", 0), ("level1", 1), ("level2", 2))))
if mibBuilder.loadTexts: isisPacketCountLevel.setStatus('current')
if mibBuilder.loadTexts: isisPacketCountLevel.setDescription('The level at which these PDU counts have been collected.')
isisPacketCountDirection = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 5, 3, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("sending", 1), ("receiving", 2))))
if mibBuilder.loadTexts: isisPacketCountDirection.setStatus('current')
if mibBuilder.loadTexts: isisPacketCountDirection.setDescription('Were we sending or receiving these PDUs?')
isisPacketCountIIHello = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 5, 3, 1, 3), Counter32()).setUnits('frames').setMaxAccess("readonly")
if mibBuilder.loadTexts: isisPacketCountIIHello.setReference('{ISIS.aoi iSISControlPDUsSent (43)}')
if mibBuilder.loadTexts: isisPacketCountIIHello.setStatus('current')
if mibBuilder.loadTexts: isisPacketCountIIHello.setDescription('The number of IS-IS Hello PDUs seen in this direction at this level. Point-to-Point IIH PDUs are counted at the lowest enabled level: at L1 on L1 or L1L2 circuits, and at L2 otherwise.')
isisPacketCountISHello = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 5, 3, 1, 4), Counter32()).setUnits('frames').setMaxAccess("readonly")
if mibBuilder.loadTexts: isisPacketCountISHello.setStatus('current')
if mibBuilder.loadTexts: isisPacketCountISHello.setDescription('The number of ES-IS Hello PDUs seen in this direction. ISH PDUs are counted at the lowest enabled level: at L1 on L1 or L1L2 circuits, and at L2 otherwise.')
isisPacketCountESHello = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 5, 3, 1, 5), Counter32()).setUnits('frames').setMaxAccess("readonly")
if mibBuilder.loadTexts: isisPacketCountESHello.setStatus('current')
if mibBuilder.loadTexts: isisPacketCountESHello.setDescription('The number of ES Hello PDUs seen in this direction. ESH PDUs are counted at the lowest enabled level: at L1 on L1 or L1L2 circuits, and at L2 otherwise.')
isisPacketCountLSP = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 5, 3, 1, 6), Counter32()).setUnits('frames').setMaxAccess("readonly")
if mibBuilder.loadTexts: isisPacketCountLSP.setReference('{ISIS.aoi iSISControlPDUsSent (43)}')
if mibBuilder.loadTexts: isisPacketCountLSP.setStatus('current')
if mibBuilder.loadTexts: isisPacketCountLSP.setDescription('The number of IS-IS LSPs seen in this direction at this level.')
isisPacketCountCSNP = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 5, 3, 1, 7), Counter32()).setUnits('frames').setMaxAccess("readonly")
if mibBuilder.loadTexts: isisPacketCountCSNP.setReference('{ISIS.aoi iSISControlPDUsSent (43)}')
if mibBuilder.loadTexts: isisPacketCountCSNP.setStatus('current')
if mibBuilder.loadTexts: isisPacketCountCSNP.setDescription('The number of IS-IS CSNPs seen in this direction at this level.')
isisPacketCountPSNP = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 5, 3, 1, 8), Counter32()).setUnits('frames').setMaxAccess("readonly")
if mibBuilder.loadTexts: isisPacketCountPSNP.setReference('{ISIS.aoi iSISControlPDUsSent (43)}')
if mibBuilder.loadTexts: isisPacketCountPSNP.setStatus('current')
if mibBuilder.loadTexts: isisPacketCountPSNP.setDescription('The number of IS-IS PSNPs seen in this direction at this level.')
isisPacketCountUnknown = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 5, 3, 1, 9), Counter32()).setUnits('frames').setMaxAccess("readonly")
if mibBuilder.loadTexts: isisPacketCountUnknown.setReference('{ISIS.aoi iSISControlPDUsSent (43)}')
if mibBuilder.loadTexts: isisPacketCountUnknown.setStatus('current')
if mibBuilder.loadTexts: isisPacketCountUnknown.setDescription('The number of unknown IS-IS PDUs seen at this level.')
isisPacketCountDiscardedIIH = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 5, 3, 1, 10), Counter32()).setUnits('frames').setMaxAccess("readonly")
if mibBuilder.loadTexts: isisPacketCountDiscardedIIH.setStatus('current')
if mibBuilder.loadTexts: isisPacketCountDiscardedIIH.setDescription('The number of IIH PDUs that have been discarded. This field is always zero for outgoing PDUs.')
isisPacketCountDiscardedLSP = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 5, 3, 1, 11), Counter32()).setUnits('frames').setMaxAccess("readonly")
if mibBuilder.loadTexts: isisPacketCountDiscardedLSP.setStatus('current')
if mibBuilder.loadTexts: isisPacketCountDiscardedLSP.setDescription('The number of LSP PDUs that have been discarded by the SDC component of DC-ISIS. This field is always zero for outgoing PDUs.')
isisPacketCountDiscardedCSNP = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 5, 3, 1, 12), Counter32()).setUnits('frames').setMaxAccess("readonly")
if mibBuilder.loadTexts: isisPacketCountDiscardedCSNP.setStatus('current')
if mibBuilder.loadTexts: isisPacketCountDiscardedCSNP.setDescription('The number of CSNP PDUs that have been discarded by the SDC component of DC-ISIS. This field is always zero for outgoing PDUs.')
isisPacketCountDiscardedPSNP = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 5, 3, 1, 13), Counter32()).setUnits('frames').setMaxAccess("readonly")
if mibBuilder.loadTexts: isisPacketCountDiscardedPSNP.setStatus('current')
if mibBuilder.loadTexts: isisPacketCountDiscardedPSNP.setDescription('The number of PSNP PDUs that have been discarded by the SDC component of DC-ISIS. This field is always zero for outgoing PDUs.')
isisISAdjTable = MibTable((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 6, 1), )
if mibBuilder.loadTexts: isisISAdjTable.setStatus('current')
if mibBuilder.loadTexts: isisISAdjTable.setDescription('The table of adjacencies to Intermediate Systems.')
isisISAdjEntry = MibTableRow((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 6, 1, 1), ).setIndexNames((0, "DC-ISIS-MIB", "isisSysInstance"), (0, "DC-ISIS-MIB", "isisCircIndex"), (0, "DC-ISIS-MIB", "isisISAdjIndex"))
if mibBuilder.loadTexts: isisISAdjEntry.setStatus('current')
if mibBuilder.loadTexts: isisISAdjEntry.setDescription('Each entry corresponds to one adjacency to an Intermediate System on this system.')
isisISAdjIndex = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 6, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2000000000)))
if mibBuilder.loadTexts: isisISAdjIndex.setStatus('current')
if mibBuilder.loadTexts: isisISAdjIndex.setDescription('A unique value identifying the IS adjacency from all other such adjacencies on this circuit. This value is automatically assigned by the system when the adjacency is created.')
isisISAdjState = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 6, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("down", 1), ("initializing", 2), ("up", 3), ("failed", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisISAdjState.setReference('{ISIS.aoi adjacencyState (78)}')
if mibBuilder.loadTexts: isisISAdjState.setStatus('current')
if mibBuilder.loadTexts: isisISAdjState.setDescription('The state of the adjacency.')
isisISAdj3WayState = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 6, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("up", 0), ("initializing", 1), ("down", 2), ("failed", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisISAdj3WayState.setReference('{ RFC 3373 }')
if mibBuilder.loadTexts: isisISAdj3WayState.setStatus('current')
if mibBuilder.loadTexts: isisISAdj3WayState.setDescription('The 3Way state of the adjacency. These are picked to match the historical on-the-wire representation of the 3Way state, and are not intended to match isisISAdjState.')
isisISAdjNeighSNPAAddress = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 6, 1, 1, 4), OSINSAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisISAdjNeighSNPAAddress.setReference('{ISIS.aoi neighbourSNPAAddress (79)}')
if mibBuilder.loadTexts: isisISAdjNeighSNPAAddress.setStatus('current')
if mibBuilder.loadTexts: isisISAdjNeighSNPAAddress.setDescription('The SNPA address of the neighboring system.')
isisISAdjNeighSysType = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 6, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("l1IntermediateSystem", 1), ("l2IntermediateSystem", 2), ("l1L2IntermediateSystem", 3), ("unknown", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisISAdjNeighSysType.setReference('{ISIS.aoi neighbourSystemType (80)}')
if mibBuilder.loadTexts: isisISAdjNeighSysType.setStatus('current')
if mibBuilder.loadTexts: isisISAdjNeighSysType.setDescription('The type of the neighboring system.')
isisISAdjNeighSysID = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 6, 1, 1, 6), SystemID()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisISAdjNeighSysID.setReference('{ISIS.aoi neighbourSystemIds (83)}')
if mibBuilder.loadTexts: isisISAdjNeighSysID.setStatus('current')
if mibBuilder.loadTexts: isisISAdjNeighSysID.setDescription('The system ID of the neighboring Intermediate System.')
isisISAdjNbrExtendedCircID = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 6, 1, 1, 7), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisISAdjNbrExtendedCircID.setStatus('current')
if mibBuilder.loadTexts: isisISAdjNbrExtendedCircID.setDescription('The 4 byte Extended Circuit ID learned from the Neighbor during 3-way handshake, or 0.')
isisISAdjUsage = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 6, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("reserved", 0), ("level1", 1), ("level2", 2), ("level1and2", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisISAdjUsage.setReference('{ISIS.aoi adjacencyUsage (82)}')
if mibBuilder.loadTexts: isisISAdjUsage.setStatus('current')
if mibBuilder.loadTexts: isisISAdjUsage.setDescription('How is the adjacency used? On a point-to-point link, this might be level1and2, but on a LAN, the usage will be level1 on the adjacency between peers at L1, and level2 for the adjacency between peers at L2.')
isisISAdjHoldTimer = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 6, 1, 1, 9), Unsigned16TC().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setUnits('seconds').setMaxAccess("readonly")
if mibBuilder.loadTexts: isisISAdjHoldTimer.setReference('{ISIS.aoi holdingTimer (85)}')
if mibBuilder.loadTexts: isisISAdjHoldTimer.setStatus('current')
if mibBuilder.loadTexts: isisISAdjHoldTimer.setDescription('The holding time in seconds for this adjacency. This value is based on received IIH PDUs and the elapsed time since receipt.')
isisISAdjNeighPriority = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 6, 1, 1, 10), ISPriority()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisISAdjNeighPriority.setReference('{ISIS.aoi lANPriority (86)}')
if mibBuilder.loadTexts: isisISAdjNeighPriority.setStatus('current')
if mibBuilder.loadTexts: isisISAdjNeighPriority.setDescription('Priority of the neighboring Intermediate System for becoming the Designated Intermediate System.')
isisISAdjLastUpTime = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 6, 1, 1, 11), TimeTicks()).setUnits('seconds').setMaxAccess("readonly")
if mibBuilder.loadTexts: isisISAdjLastUpTime.setStatus('current')
if mibBuilder.loadTexts: isisISAdjLastUpTime.setDescription("If the isisISAdjState is in state 'up', the value of sysUpTime when the adjacency most recently entered the state 'up', or 0 if it has never been in state 'up'.")
isisISAdjRestartCapable = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 6, 1, 1, 12), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisISAdjRestartCapable.setStatus('current')
if mibBuilder.loadTexts: isisISAdjRestartCapable.setDescription("Does the neighbor support restart signalling? This object is set to 'true' if the peer includes the restart TLV in IIH PDUs.")
isisISAdjPeerRestartState = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 6, 1, 1, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("notRestarting", 1), ("restartingNoHelp", 2), ("helpingRestart", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisISAdjPeerRestartState.setStatus('current')
if mibBuilder.loadTexts: isisISAdjPeerRestartState.setDescription("Is the peer currently restarting? Set to 'notRestarting' if the neighbor is not restarting (is not including a restart TLV in the IIH, or is not setting the RR bit). Set to 'restartingNoHelp' if the neighbor is restarting (is including a restart TLV with the RR bit in IIH PDUs), but the local node is not helping the restart. Set to 'helpingRestart' if the neighbor is restarting and the local node is helping the restart.")
isisISAdjSuppressed = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 6, 1, 1, 14), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisISAdjSuppressed.setStatus('current')
if mibBuilder.loadTexts: isisISAdjSuppressed.setDescription("Has the peer requested that the adjacency be suppressed? If set to 'true', the adjacency will not be added to the local LSP. This object is set to 'true' if the peer sets the SA bit in the restart TLV in IIH PDUs.")
isisISAdjNeighLanID = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 6, 1, 1, 15), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 7))).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisISAdjNeighLanID.setStatus('current')
if mibBuilder.loadTexts: isisISAdjNeighLanID.setDescription('On a broadcast circuit, the LAN ID reported by the neighbor for the Designated Intermediate System on this circuit at this level. For a non-broadcast circuit, or if, for any reason, the neighbor is not partaking in the relevant Designated Intermediate System election process, then the value returned is the zero length OCTET STRING.')
isisISAdjNeighHostname = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 6, 1, 1, 16), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisISAdjNeighHostname.setStatus('current')
if mibBuilder.loadTexts: isisISAdjNeighHostname.setDescription("The hostname corresponding to isisISAdjNeighSysID. This is the null string if isisSdEntMapHostnames is 'false' or if no hostname is known.")
isisISAdjNeighLanIDHostname = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 6, 1, 1, 17), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisISAdjNeighLanIDHostname.setStatus('current')
if mibBuilder.loadTexts: isisISAdjNeighLanIDHostname.setDescription("The hostname corresponding to the system ID part of the isisISAdjNeighLanID object. This is the null string if the isisISAdjNeighLanID object is null, if isisSdEntMapHostnames is 'false', or if no hostname is known.")
isisISAdjMtSupportedTable = MibTable((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 6, 5), )
if mibBuilder.loadTexts: isisISAdjMtSupportedTable.setStatus('current')
if mibBuilder.loadTexts: isisISAdjMtSupportedTable.setDescription('This table contains the set of topologies supported by neighboring Intermediate Systems as reported in received IIH PDUs.')
isisISAdjMtSupportedEntry = MibTableRow((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 6, 5, 1), ).setIndexNames((0, "DC-ISIS-MIB", "isisSysInstance"), (0, "DC-ISIS-MIB", "isisCircIndex"), (0, "DC-ISIS-MIB", "isisISAdjIndex"), (0, "DC-ISIS-MIB", "isisISAdjMtSuppMtId"))
if mibBuilder.loadTexts: isisISAdjMtSupportedEntry.setStatus('current')
if mibBuilder.loadTexts: isisISAdjMtSupportedEntry.setDescription('Each entry contains one topology reported by a neighboring Intermediate System in its IIH PDUs.')
isisISAdjMtSuppMtId = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 6, 5, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4095)))
if mibBuilder.loadTexts: isisISAdjMtSuppMtId.setStatus('current')
if mibBuilder.loadTexts: isisISAdjMtSuppMtId.setDescription('A topology that is supported on this adjacency.')
isisISAdjMtSuppLocalSupport = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 6, 5, 1, 5), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisISAdjMtSuppLocalSupport.setStatus('current')
if mibBuilder.loadTexts: isisISAdjMtSuppLocalSupport.setDescription("Whether this topology is also supported locally on this circuit. The value is 'true' if there is a corresponding row in the isisMtCircStatusTable with isisMtCircStatusOperState set to 'operStatusUp'. In this case, IS-IS will advertise reachability to this neighbor in this topology.")
isisISAdjAreaAddrTable = MibTable((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 6, 2), )
if mibBuilder.loadTexts: isisISAdjAreaAddrTable.setReference('{ISIS.aoi areaAddressesOfNeighbour (84)}')
if mibBuilder.loadTexts: isisISAdjAreaAddrTable.setStatus('current')
if mibBuilder.loadTexts: isisISAdjAreaAddrTable.setDescription('This table contains the set of Area Addresses of neighboring Intermediate Systems as reported in received IIH PDUs.')
isisISAdjAreaAddrEntry = MibTableRow((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 6, 2, 1), ).setIndexNames((0, "DC-ISIS-MIB", "isisSysInstance"), (0, "DC-ISIS-MIB", "isisCircIndex"), (0, "DC-ISIS-MIB", "isisISAdjIndex"), (0, "DC-ISIS-MIB", "isisISAdjAreaAddrIndex"))
if mibBuilder.loadTexts: isisISAdjAreaAddrEntry.setStatus('current')
if mibBuilder.loadTexts: isisISAdjAreaAddrEntry.setDescription('Each entry contains one Area Address reported by a neighboring Intermediate System in its IIH PDUs.')
isisISAdjAreaAddrIndex = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 6, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2000000000)))
if mibBuilder.loadTexts: isisISAdjAreaAddrIndex.setStatus('current')
if mibBuilder.loadTexts: isisISAdjAreaAddrIndex.setDescription('An index for the areas associated with one neighbor. This provides a simple way to walk the table.')
isisISAdjAreaAddress = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 6, 2, 1, 2), OSINSAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisISAdjAreaAddress.setStatus('current')
if mibBuilder.loadTexts: isisISAdjAreaAddress.setDescription('One Area Address as reported in IIH PDUs received from the neighbor.')
isisISAdjIPAddrTable = MibTable((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 6, 3), )
if mibBuilder.loadTexts: isisISAdjIPAddrTable.setStatus('current')
if mibBuilder.loadTexts: isisISAdjIPAddrTable.setDescription("This table contains the set of IP Addresses of neighboring Intermediate Systems as reported in received IIH PDUs. If the isisSysRestrictLanAdjsToSubnet object in isisSysTable is set to 'true' then for an IPv4 broadcast circuit, only IP addresses on the same subnet as the local circuit are included in this table.")
isisISAdjIPAddrEntry = MibTableRow((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 6, 3, 1), ).setIndexNames((0, "DC-ISIS-MIB", "isisSysInstance"), (0, "DC-ISIS-MIB", "isisCircIndex"), (0, "DC-ISIS-MIB", "isisISAdjIndex"), (0, "DC-ISIS-MIB", "isisISAdjIPAddrIndex"))
if mibBuilder.loadTexts: isisISAdjIPAddrEntry.setStatus('current')
if mibBuilder.loadTexts: isisISAdjIPAddrEntry.setDescription('Each entry contains one IP Address reported by a neighboring Intermediate System in its IIH PDUs.')
isisISAdjIPAddrIndex = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 6, 3, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2000000000)))
if mibBuilder.loadTexts: isisISAdjIPAddrIndex.setStatus('current')
if mibBuilder.loadTexts: isisISAdjIPAddrIndex.setDescription('An index to this table which identifies the IP addresss to which this entry belongs.')
isisISAdjIPAddrType = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 6, 3, 1, 2), InetAddressType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisISAdjIPAddrType.setStatus('current')
if mibBuilder.loadTexts: isisISAdjIPAddrType.setDescription('The type of one IP Address as reported in IIH PDUs received from the neighbor.')
isisISAdjIPAddrAddress = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 6, 3, 1, 3), InetAddress().subtype(subtypeSpec=ConstraintsUnion(ValueSizeConstraint(4, 4), ValueSizeConstraint(16, 16), ))).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisISAdjIPAddrAddress.setStatus('current')
if mibBuilder.loadTexts: isisISAdjIPAddrAddress.setDescription('One IP Address as reported in IIH PDUs received from the neighbor.')
isisISAdjProtSuppTable = MibTable((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 6, 4), )
if mibBuilder.loadTexts: isisISAdjProtSuppTable.setStatus('current')
if mibBuilder.loadTexts: isisISAdjProtSuppTable.setDescription('This table contains the set of protocols supported by neighboring Intermediate Systems as reported in received IIH PDUs.')
isisISAdjProtSuppEntry = MibTableRow((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 6, 4, 1), ).setIndexNames((0, "DC-ISIS-MIB", "isisSysInstance"), (0, "DC-ISIS-MIB", "isisCircIndex"), (0, "DC-ISIS-MIB", "isisISAdjIndex"), (0, "DC-ISIS-MIB", "isisISAdjProtSuppProtocol"))
if mibBuilder.loadTexts: isisISAdjProtSuppEntry.setStatus('current')
if mibBuilder.loadTexts: isisISAdjProtSuppEntry.setDescription('Each entry contains one protocol supported by a neighboring Intermediate System as reported in its IIH PDUs.')
isisISAdjProtSuppProtocol = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 6, 4, 1, 1), SupportedProtocol())
if mibBuilder.loadTexts: isisISAdjProtSuppProtocol.setStatus('current')
if mibBuilder.loadTexts: isisISAdjProtSuppProtocol.setDescription('One supported protocol as reported in IIH PDUs received from the neighbor.')
isisISAdjProtSuppLocalSupport = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 6, 4, 1, 5), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisISAdjProtSuppLocalSupport.setStatus('current')
if mibBuilder.loadTexts: isisISAdjProtSuppLocalSupport.setDescription("Whether local configuration allows use of this protocol to this neighbor. The value 'false' indicates that the protocol is unusable.")
isisISAdjProtSuppBfdStatus = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 6, 4, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))).clone(namedValues=NamedValues(("notRequired", 0), ("active", 1), ("inactive", 2), ("adminDown", 3), ("noContact", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisISAdjProtSuppBfdStatus.setStatus('current')
if mibBuilder.loadTexts: isisISAdjProtSuppBfdStatus.setDescription('BFD session state for this protocol. The state inactive(2) indicates that the protocol is unusable for forwarding because a BFD session cannot be established or has failed.')
isisIPRATable = MibTable((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 8, 1), )
if mibBuilder.loadTexts: isisIPRATable.setStatus('current')
if mibBuilder.loadTexts: isisIPRATable.setDescription('The table of IP Reachable Addresses to networks, subnetworks or hosts, learned automatically. DC-ISIS does not support the manual addition of entries using this table. This table is read-only. In order to specify reachable addresses manually, configure a static route in the Address Family Manager, and configure the AFM to pass the static route into DC-ISIS. In DC-RTM, this can be done using the static routes table (rtmStaticRtTable), and the redistribution table (rtmRedistTable).')
isisIPRAEntry = MibTableRow((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 8, 1, 1), ).setIndexNames((0, "DC-ISIS-MIB", "isisSysInstance"), (0, "DC-ISIS-MIB", "isisIPRADestType"), (0, "DC-ISIS-MIB", "isisIPRADest"), (0, "DC-ISIS-MIB", "isisIPRADestPrefixLen"), (0, "DC-ISIS-MIB", "isisIPRANextHopIndex"))
if mibBuilder.loadTexts: isisIPRAEntry.setStatus('current')
if mibBuilder.loadTexts: isisIPRAEntry.setDescription('Each entry defines an IP Reachable Address to a network, subnetwork or host. Each IP Reachable Address may have multiple entries in the table, one for each equal cost path to the reachable address.')
isisIPRADestType = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 8, 1, 1, 1), InetAddressType())
if mibBuilder.loadTexts: isisIPRADestType.setStatus('current')
if mibBuilder.loadTexts: isisIPRADestType.setDescription('The type of this IP Reachable Address.')
isisIPRADest = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 8, 1, 1, 2), InetAddress().subtype(subtypeSpec=ConstraintsUnion(ValueSizeConstraint(4, 4), ValueSizeConstraint(16, 16), )))
if mibBuilder.loadTexts: isisIPRADest.setStatus('current')
if mibBuilder.loadTexts: isisIPRADest.setDescription('The destination of this IP Reachable Address. This is either a network address, subnetwork address or host address.')
isisIPRADestPrefixLen = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 8, 1, 1, 3), InetAddressPrefixLength().subtype(subtypeSpec=ValueRangeConstraint(0, 128)))
if mibBuilder.loadTexts: isisIPRADestPrefixLen.setStatus('current')
if mibBuilder.loadTexts: isisIPRADestPrefixLen.setDescription('The length of the IP Netmask for Reachability Address.')
isisIPRANextHopIndex = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 8, 1, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535)))
if mibBuilder.loadTexts: isisIPRANextHopIndex.setStatus('current')
if mibBuilder.loadTexts: isisIPRANextHopIndex.setDescription('Index of next hop. Used when there are multiple Equal Cost Multipath alternatives for the same destination.')
isisIPRANextHopType = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 8, 1, 1, 5), InetAddressType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisIPRANextHopType.setStatus('current')
if mibBuilder.loadTexts: isisIPRANextHopType.setDescription('The type of the IP next hop address. For the DC-ISIS implementation, this field is not stored and is therefore not returned in GETs for this MIB.')
isisIPRANextHop = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 8, 1, 1, 6), InetAddress().subtype(subtypeSpec=ConstraintsUnion(ValueSizeConstraint(4, 4), ValueSizeConstraint(16, 16), ))).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisIPRANextHop.setStatus('current')
if mibBuilder.loadTexts: isisIPRANextHop.setDescription('The IP next hop to this destination. For the DC-ISIS implementation, this field is not stored and is therefore not returned in GETs for this MIB.')
isisIPRAType = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 8, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("manual", 1), ("automatic", 2), ("both", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisIPRAType.setStatus('current')
if mibBuilder.loadTexts: isisIPRAType.setDescription('The type of this IP Reachable Address. Those of type manual are created by the network manager. Those of type automatic are created through propagation of routing information from another routing protocol. DC-ISIS only supports automatic entries in this table. These are learnt from an Address Family Manager (for example, DC-RTM), or from the I3 stub.')
isisIPRAExistState = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 8, 1, 1, 8), RowStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisIPRAExistState.setStatus('current')
if mibBuilder.loadTexts: isisIPRAExistState.setDescription('The state of this IP Reachable Address. This object follows the ExistenceState behavior.')
isisIPRAAdminState = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 8, 1, 1, 9), AdminState()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisIPRAAdminState.setStatus('current')
if mibBuilder.loadTexts: isisIPRAAdminState.setDescription('The administrative state of the IP Reachable Address. This object follows the AdminState behavior.')
isisIPRAMetric = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 8, 1, 1, 10), DefaultMetric()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisIPRAMetric.setStatus('current')
if mibBuilder.loadTexts: isisIPRAMetric.setDescription('The metric value for reaching the specified destination over this circuit.')
isisIPRAMetricType = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 8, 1, 1, 11), MetricType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisIPRAMetricType.setStatus('current')
if mibBuilder.loadTexts: isisIPRAMetricType.setDescription('Indicates whether the metric is internal or external.')
isisIPRAFullMetric = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 8, 1, 1, 12), FullMetric()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisIPRAFullMetric.setStatus('current')
if mibBuilder.loadTexts: isisIPRAFullMetric.setDescription('The wide metric value for reaching the specified destination over this circuit.')
isisIPRASNPAAddress = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 8, 1, 1, 13), OSINSAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisIPRASNPAAddress.setStatus('current')
if mibBuilder.loadTexts: isisIPRASNPAAddress.setDescription('The SNPA Address to which a PDU may be forwarded in order to reach a destination which matches this IP Reachable Address. DC-ISIS does not support this field.')
isisIPRASourceType = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 8, 1, 1, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10))).clone(namedValues=NamedValues(("static", 1), ("direct", 2), ("ospfv2", 3), ("ospfv3", 4), ("isis", 5), ("rip", 6), ("igrp", 7), ("eigrp", 8), ("bgp", 9), ("other", 10)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisIPRASourceType.setStatus('current')
if mibBuilder.loadTexts: isisIPRASourceType.setDescription('The origin of this route.')
isisIPRAMtId = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 8, 1, 1, 16), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4095))).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisIPRAMtId.setStatus('current')
if mibBuilder.loadTexts: isisIPRAMtId.setDescription('The topology that this reachable address is in.')
isisLSPSummaryTable = MibTable((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 9, 1), )
if mibBuilder.loadTexts: isisLSPSummaryTable.setStatus('current')
if mibBuilder.loadTexts: isisLSPSummaryTable.setDescription('The table of LSP Headers.')
isisLSPSummaryEntry = MibTableRow((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 9, 1, 1), ).setIndexNames((0, "DC-ISIS-MIB", "isisSysInstance"), (0, "DC-ISIS-MIB", "isisLSPLevel"), (0, "DC-ISIS-MIB", "isisLSPID"))
if mibBuilder.loadTexts: isisLSPSummaryEntry.setStatus('current')
if mibBuilder.loadTexts: isisLSPSummaryEntry.setDescription('Each entry provides a summary describing an LSP currently stored in the system.')
isisLSPLevel = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 9, 1, 1, 1), ISLevel())
if mibBuilder.loadTexts: isisLSPLevel.setStatus('current')
if mibBuilder.loadTexts: isisLSPLevel.setDescription('At which level does this LSP appear?')
isisLSPID = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 9, 1, 1, 2), LinkStatePDUID())
if mibBuilder.loadTexts: isisLSPID.setStatus('current')
if mibBuilder.loadTexts: isisLSPID.setDescription('The 8 byte LSP ID, consisting of the SystemID, Circuit ID, and Fragment Number.')
isisLSPSeq = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 9, 1, 1, 3), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisLSPSeq.setStatus('current')
if mibBuilder.loadTexts: isisLSPSeq.setDescription('The sequence number for this LSP.')
isisLSPZeroLife = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 9, 1, 1, 4), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisLSPZeroLife.setStatus('current')
if mibBuilder.loadTexts: isisLSPZeroLife.setDescription('Is this LSP being purged by this System?')
isisLSPChecksum = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 9, 1, 1, 5), Unsigned16TC()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisLSPChecksum.setStatus('current')
if mibBuilder.loadTexts: isisLSPChecksum.setDescription('The 16 bit Fletcher Checksum.')
isisLSPLifetimeRemain = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 9, 1, 1, 6), Unsigned16TC()).setUnits('seconds').setMaxAccess("readonly")
if mibBuilder.loadTexts: isisLSPLifetimeRemain.setStatus('current')
if mibBuilder.loadTexts: isisLSPLifetimeRemain.setDescription("The remaining lifetime in seconds for this LSP. For a current LSP (isisLSPZeroLife is 'false'), this indicates the time remaining before the LSP will expire. For an LSP being purged from the system (isisLSPZeroLife is 'true'), the LSP remains in the database for ZeroAgeLifetime, and this will indicate the time remaining before final deletion of the LSP.")
isisLSPPDULength = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 9, 1, 1, 7), Unsigned16TC()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisLSPPDULength.setStatus('current')
if mibBuilder.loadTexts: isisLSPPDULength.setDescription('The length of this LSP.')
isisLSPAttributes = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 9, 1, 1, 8), Unsigned8TC()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisLSPAttributes.setStatus('current')
if mibBuilder.loadTexts: isisLSPAttributes.setDescription('Flags carried by the LSP.')
isisLSPIDHostname = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 9, 1, 1, 9), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisLSPIDHostname.setStatus('current')
if mibBuilder.loadTexts: isisLSPIDHostname.setDescription('The hostname corresponding to the system ID part of the isisLSPID object. This is the null string if no hostname is known.')
isisLSPTLVTable = MibTable((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 9, 2), )
if mibBuilder.loadTexts: isisLSPTLVTable.setStatus('current')
if mibBuilder.loadTexts: isisLSPTLVTable.setDescription('The contents of each LSP.')
isisLSPTLVEntry = MibTableRow((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 9, 2, 1), ).setIndexNames((0, "DC-ISIS-MIB", "isisSysInstance"), (0, "DC-ISIS-MIB", "isisLSPLevel"), (0, "DC-ISIS-MIB", "isisLSPID"), (0, "DC-ISIS-MIB", "isisLSPTLVIndex"))
if mibBuilder.loadTexts: isisLSPTLVEntry.setStatus('current')
if mibBuilder.loadTexts: isisLSPTLVEntry.setDescription('Each entry describes an LSP current stored in the system.')
isisLSPTLVIndex = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 9, 2, 1, 1), Unsigned32())
if mibBuilder.loadTexts: isisLSPTLVIndex.setStatus('current')
if mibBuilder.loadTexts: isisLSPTLVIndex.setDescription('The index of this TLV in the LSP. The first TLV has index 1 and the Nth TLV has an index of N.')
isisLSPTLVSeq = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 9, 2, 1, 2), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisLSPTLVSeq.setStatus('current')
if mibBuilder.loadTexts: isisLSPTLVSeq.setDescription('The sequence number for this LSP.')
isisLSPTLVChecksum = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 9, 2, 1, 3), Unsigned16TC()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisLSPTLVChecksum.setStatus('current')
if mibBuilder.loadTexts: isisLSPTLVChecksum.setDescription('The 16 bit Fletcher Checksum.')
isisLSPTLVType = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 9, 2, 1, 4), Unsigned8TC()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisLSPTLVType.setStatus('current')
if mibBuilder.loadTexts: isisLSPTLVType.setDescription('The type of this TLV.')
isisLSPTLVLen = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 9, 2, 1, 5), Unsigned8TC()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisLSPTLVLen.setStatus('current')
if mibBuilder.loadTexts: isisLSPTLVLen.setDescription('The length of this TLV.')
isisLSPTLVValue = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 9, 2, 1, 6), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisLSPTLVValue.setStatus('current')
if mibBuilder.loadTexts: isisLSPTLVValue.setDescription('The value of this TLV.')
isisLSPTLVHostname = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 9, 2, 1, 7), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisLSPTLVHostname.setStatus('current')
if mibBuilder.loadTexts: isisLSPTLVHostname.setDescription('The hostname corresponding to the system ID part of the isisLSPID object. This is the null string if no hostname is known.')
isisPmSjTable = MibTable((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 11, 1), )
if mibBuilder.loadTexts: isisPmSjTable.setStatus('current')
if mibBuilder.loadTexts: isisPmSjTable.setDescription('This table contains information on joins on which the Protocol Manager acts as a slave. Each row in this table represents one slave join in the Protocol Manager.')
isisPmSjEntry = MibTableRow((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 11, 1, 1), ).setIndexNames((0, "DC-ISIS-MIB", "isisSysInstance"), (0, "DC-ISIS-MIB", "isisPmSjInterface"), (0, "DC-ISIS-MIB", "isisPmSjMasterEntity"))
if mibBuilder.loadTexts: isisPmSjEntry.setStatus('current')
if mibBuilder.loadTexts: isisPmSjEntry.setDescription('Represents a join for which the Protocol Manager is slave.')
isisPmSjInterface = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 11, 1, 1, 1), IsisPmInterfaceId())
if mibBuilder.loadTexts: isisPmSjInterface.setStatus('current')
if mibBuilder.loadTexts: isisPmSjInterface.setDescription('Identifies the interface provided on this slave join.')
isisPmSjMasterEntity = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 11, 1, 1, 2), Unsigned32())
if mibBuilder.loadTexts: isisPmSjMasterEntity.setStatus('current')
if mibBuilder.loadTexts: isisPmSjMasterEntity.setDescription('Index which uniquely identifies the master entity among other entities of the same component within this instance of DC-ISIS.')
isisPmSjJoinStatus = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 11, 1, 1, 3), IsisSjStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisPmSjJoinStatus.setStatus('current')
if mibBuilder.loadTexts: isisPmSjJoinStatus.setDescription('The status of the slave join.')
isisPmMjTable = MibTable((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 11, 2), )
if mibBuilder.loadTexts: isisPmMjTable.setStatus('current')
if mibBuilder.loadTexts: isisPmMjTable.setDescription('This table controls which entities the Protocol Manager should join to as master. Each join is represented by a row in this table. The status of each join is represented by a read-only object within each row. Only routing protocol joins may be configured in this MIB. Rows in this table for other join types are read only.')
isisPmMjEntry = MibTableRow((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 11, 2, 1), ).setIndexNames((0, "DC-ISIS-MIB", "isisSysInstance"), (0, "DC-ISIS-MIB", "isisPmMjInterface"), (0, "DC-ISIS-MIB", "isisPmMjSlaveEntity"), (0, "DC-ISIS-MIB", "isisPmMjAddrType"), (0, "DC-ISIS-MIB", "isisPmMjMtId"))
if mibBuilder.loadTexts: isisPmMjEntry.setStatus('current')
if mibBuilder.loadTexts: isisPmMjEntry.setDescription('Represents a join for which the Protocol Manager is master.')
isisPmMjInterface = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 11, 2, 1, 1), IsisPmInterfaceId())
if mibBuilder.loadTexts: isisPmMjInterface.setStatus('current')
if mibBuilder.loadTexts: isisPmMjInterface.setDescription('Identifies the interface required of this master join. This object follows the index behavior.')
isisPmMjSlaveEntity = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 11, 2, 1, 2), Unsigned32())
if mibBuilder.loadTexts: isisPmMjSlaveEntity.setStatus('current')
if mibBuilder.loadTexts: isisPmMjSlaveEntity.setDescription('Identifies the slave entity to join with. This index is used in the join user data, to enable FTI-specific code within System Manager to select a suitable slave entity. This object follows the index behavior.')
isisPmMjAddrType = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 11, 2, 1, 3), IsisAddrType())
if mibBuilder.loadTexts: isisPmMjAddrType.setStatus('current')
if mibBuilder.loadTexts: isisPmMjAddrType.setDescription("Identifies the slave entity to join with. This is used along with the isisPmMjSlaveEntity (with address subtype 'none') in the join user data on an RPI join, to enable FTI-specific code within System Manager to select a suitable slave entity. The value is 'none' if isisPmMjInterface is not ifRtProtoInput. This object follows the index behavior.")
isisPmMjMtId = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 11, 2, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4095)))
if mibBuilder.loadTexts: isisPmMjMtId.setStatus('current')
if mibBuilder.loadTexts: isisPmMjMtId.setDescription('The topology that this join is for. Note that this only applies to RPI joins. If multi-topology IS-IS is not supported then this field should be set to zero. This object follows the index behavior.')
isisPmMjRowStatus = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 11, 2, 1, 4), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisPmMjRowStatus.setStatus('current')
if mibBuilder.loadTexts: isisPmMjRowStatus.setDescription('The row status for this master join, used to create and destroy table entries.')
isisPmMjAdminStatus = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 11, 2, 1, 5), IsisAdminStatus().clone('adminStatusUp')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisPmMjAdminStatus.setStatus('current')
if mibBuilder.loadTexts: isisPmMjAdminStatus.setDescription('The administrative status of this master join, used to start and stop the join.')
isisPmMjOperStatus = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 11, 2, 1, 6), IsisOperStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisPmMjOperStatus.setStatus('current')
if mibBuilder.loadTexts: isisPmMjOperStatus.setDescription('The current operational status of this master join.')
isisPmMjJoinStatus = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 11, 2, 1, 7), IsisMjStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisPmMjJoinStatus.setStatus('current')
if mibBuilder.loadTexts: isisPmMjJoinStatus.setDescription('The status of the master join.')
isisPmMjSubAddrFamily = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 11, 2, 1, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4095))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisPmMjSubAddrFamily.setStatus('current')
if mibBuilder.loadTexts: isisPmMjSubAddrFamily.setDescription('Identifies the sub address family this join is for. Note that this only applies to RPI joins. The value is ignored if isisPmMjInterface is not ifRtProtoInput.')
isisIgpShortcutTable = MibTable((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 11, 3), )
if mibBuilder.loadTexts: isisIgpShortcutTable.setStatus('current')
if mibBuilder.loadTexts: isisIgpShortcutTable.setDescription('The table of IGP shortcut tunnel interfaces known to DC-ISIS.')
isisIgpShortcutEntry = MibTableRow((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 11, 3, 1), ).setIndexNames((0, "DC-ISIS-MIB", "isisSysInstance"), (0, "DC-ISIS-MIB", "isisShortcutIfIndex"))
if mibBuilder.loadTexts: isisIgpShortcutEntry.setStatus('current')
if mibBuilder.loadTexts: isisIgpShortcutEntry.setDescription('Each entry corresponds to one IGP shortcut tunnel interface known to DC-ISIS on this system.')
isisShortcutIfIndex = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 11, 3, 1, 1), InterfaceIndex())
if mibBuilder.loadTexts: isisShortcutIfIndex.setStatus('current')
if mibBuilder.loadTexts: isisShortcutIfIndex.setDescription('The value of ifIndex for the IGP shortcut tunnel interface.')
isisShortcutRemoteAddressType = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 11, 3, 1, 2), InetAddressType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisShortcutRemoteAddressType.setStatus('current')
if mibBuilder.loadTexts: isisShortcutRemoteAddressType.setDescription('The Type of IP address for this remote address.')
isisShortcutRemoteAddress = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 11, 3, 1, 3), InetAddress().subtype(subtypeSpec=ConstraintsUnion(ValueSizeConstraint(4, 4), ValueSizeConstraint(16, 16), ))).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisShortcutRemoteAddress.setStatus('current')
if mibBuilder.loadTexts: isisShortcutRemoteAddress.setDescription('The IP Address value for this remote address.')
isisShortcutMetricType = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 11, 3, 1, 4), IgpShortcutMetricType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisShortcutMetricType.setStatus('current')
if mibBuilder.loadTexts: isisShortcutMetricType.setDescription("The type of metric value assigned to this IGP shortcut. The type of metric value can be one of the following. - 'metricTypeAbsolute'; the metric defined by isisShortcutMetricValue is used as the cost of the IGP shortcut. - 'metricTypeRelative'; the metric value defined by isisShortcutMetricValue is added to the IGP-calculated cost of reaching the egress of the shortcut to determine the cost of the IGP shortcut. Note that, in this case, the metric value may be zero, positive or negative. Note also that a minimum value of 1 is defined for the result of the sum of the IGP-calculated cost and a relative metric value.")
isisShortcutMetricValue = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 11, 3, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisShortcutMetricValue.setStatus('current')
if mibBuilder.loadTexts: isisShortcutMetricValue.setDescription("The metric assigned to this IGP shortcut. If isisShortcutMetricType is set to 'metricTypeAbsolute' then this field takes a value in the range of 1 - 16777215. If isisShortcutMetricType is set to 'metricTypeRelative' then this field takes a value in the range of -16777214 to 16777215.")
isisShortcutOperStatus = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 11, 3, 1, 6), IfOperStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisShortcutOperStatus.setStatus('current')
if mibBuilder.loadTexts: isisShortcutOperStatus.setDescription("The operational status of the IGP shortcut interface. The IGP shortcut interface is only usable by DC-ISIS if isisShortcutOperStatus is set to 'up'.")
isisShortcutPendingDeletion = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 11, 3, 1, 7), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisShortcutPendingDeletion.setStatus('current')
if mibBuilder.loadTexts: isisShortcutPendingDeletion.setDescription("If 'true' then the IGP shortcut interface has been deleted but has not yet been removed from DC-ISIS because it might still be in use as a TE-tunnel next hop.")
isisSdEntTable = MibTable((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 12, 1), )
if mibBuilder.loadTexts: isisSdEntTable.setStatus('current')
if mibBuilder.loadTexts: isisSdEntTable.setDescription('Each entry in this table represents an SDC entity.')
isisSdEntEntry = MibTableRow((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 12, 1, 1), ).setIndexNames((0, "DC-ISIS-MIB", "isisSdEntIndex"))
if mibBuilder.loadTexts: isisSdEntEntry.setStatus('current')
if mibBuilder.loadTexts: isisSdEntEntry.setDescription('Each entry represents an SDC entity.')
isisSdEntIndex = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 12, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)))
if mibBuilder.loadTexts: isisSdEntIndex.setStatus('current')
if mibBuilder.loadTexts: isisSdEntIndex.setDescription('Non-zero index which identifies an SDC entity.')
isisSdEntRowStatus = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 12, 1, 1, 2), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSdEntRowStatus.setStatus('current')
if mibBuilder.loadTexts: isisSdEntRowStatus.setDescription('The row status for this SDC Entity Table entry, used to create and destroy table entries.')
isisSdEntAdminStatus = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 12, 1, 1, 3), IsisAdminStatus().clone('adminStatusUp')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSdEntAdminStatus.setStatus('current')
if mibBuilder.loadTexts: isisSdEntAdminStatus.setDescription('The administrative status of this SDC entity, used to activate and deactivate SDC.')
isisSdEntOperStatus = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 12, 1, 1, 4), IsisOperStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisSdEntOperStatus.setStatus('current')
if mibBuilder.loadTexts: isisSdEntOperStatus.setDescription('The current operational status of this SDC entity.')
isisSdEntMapHostnames = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 12, 1, 1, 5), TruthValue().clone('false')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSdEntMapHostnames.setStatus('current')
if mibBuilder.loadTexts: isisSdEntMapHostnames.setDescription("Should SDC map system IDs to hostnames? If 'true', SDC will fill in the isisISAdjNeighHostname and isisISAdjNeighLanIDHostname objects (if a hostname is known), and will map system IDs to hostnames in PD logs. Setting this object to 'true' will cause a small occupancy increase, because SDC will set up a database of mappings between the system IDs and the corresponding hostnames.")
isisSdEntAuthSNPs = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 12, 1, 1, 6), TruthValue().clone('false')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSdEntAuthSNPs.setStatus('current')
if mibBuilder.loadTexts: isisSdEntAuthSNPs.setDescription("Should SDC authenticate SNP packets? If 'false', SDC will not check the authentication on SNP packets. If 'true' then it will. Setting this field is sometimes required for interoperability with other routers.")
isisSdEntBfdProviderIndex = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 12, 1, 1, 7), EntityIndexOrZero()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSdEntBfdProviderIndex.setStatus('current')
if mibBuilder.loadTexts: isisSdEntBfdProviderIndex.setDescription('The entity index for the Bidirectional Forwarding Detection (BFD) provider to which this SDC entity should join. Zero indicates that no BFD provider is available, or that SDC should not join to any BFD provider. This field follows the replaceOnlyWhileDisabled behavior.')
isisSdEntStopAdjDuplIDs = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 12, 1, 1, 8), TruthValue().clone('true')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSdEntStopAdjDuplIDs.setStatus('current')
if mibBuilder.loadTexts: isisSdEntStopAdjDuplIDs.setDescription('Flag indicating whether this SDC should prevent an adjacency forming to a neighbor with the same System ID but a different SNPA address as an existing neighbor over the same circuit and level. This field can be changed while SDC is active. Doing so will not affect existing adjacencies, but will affect whether further adjacencies form.')
isisSdMjTable = MibTable((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 12, 2), )
if mibBuilder.loadTexts: isisSdMjTable.setStatus('current')
if mibBuilder.loadTexts: isisSdMjTable.setDescription('This table controls which entities an SDC entity should join to as master. Each join is represented by a row in this table. The status of each join is represented by a read-only object within each row.')
isisSdMjEntry = MibTableRow((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 12, 2, 1), ).setIndexNames((0, "DC-ISIS-MIB", "isisSdEntIndex"), (0, "DC-ISIS-MIB", "isisSdMjInterface"))
if mibBuilder.loadTexts: isisSdMjEntry.setStatus('current')
if mibBuilder.loadTexts: isisSdMjEntry.setDescription('Represents a join for which this SDC entity is master.')
isisSdMjInterface = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 12, 2, 1, 1), IsisSdInterfaceId())
if mibBuilder.loadTexts: isisSdMjInterface.setStatus('current')
if mibBuilder.loadTexts: isisSdMjInterface.setDescription('Identifies the interface required of this master join. Since an SDC only has a single join on any of its interfaces, this serves with the SDC entity index in uniquely identifying the join. This field follows the replaceOnlyWhileDisabled behavior.')
isisSdMjRowStatus = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 12, 2, 1, 2), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSdMjRowStatus.setStatus('current')
if mibBuilder.loadTexts: isisSdMjRowStatus.setDescription('The row status for this master join, used to create and destroy table entries.')
isisSdMjAdminStatus = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 12, 2, 1, 3), IsisAdminStatus().clone('adminStatusUp')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSdMjAdminStatus.setStatus('current')
if mibBuilder.loadTexts: isisSdMjAdminStatus.setDescription('The administrative status of this master join, used to start and stop the join.')
isisSdMjOperStatus = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 12, 2, 1, 4), IsisOperStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisSdMjOperStatus.setStatus('current')
if mibBuilder.loadTexts: isisSdMjOperStatus.setDescription('The current operational status of this master join.')
isisSdMjEntityIndex = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 12, 2, 1, 5), Unsigned32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSdMjEntityIndex.setStatus('current')
if mibBuilder.loadTexts: isisSdMjEntityIndex.setDescription('Identifies the slave entity to join with. This index (possibly along with isisSdMjEntityType - see below) is used in the join user data, to enable FTI-specific code within System Manager to select a suitable slave entity. This field follows the replaceOnlyWhileDisabled behavior.')
isisSdMjEntityType = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 12, 2, 1, 6), IsisSdEntityType().clone('ifProtocolManager')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: isisSdMjEntityType.setStatus('current')
if mibBuilder.loadTexts: isisSdMjEntityType.setDescription('If isisSdMjInterface is ifSubnetDependent, this field indicates the type of entity to join with. This index along with isisSdMjEntityType is used in the join user data to enable FTI-specific code within System Manager to select a suitable slave entity. This field follows the replaceOnlyWhileDisabled behavior.')
isisSdMjJoinStatus = MibTableColumn((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 12, 2, 1, 7), IsisMjStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isisSdMjJoinStatus.setStatus('current')
if mibBuilder.loadTexts: isisSdMjJoinStatus.setDescription('The status of the master join.')
isisNotificationEntry = MibIdentifier((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 10, 1))
isisPduLspId = MibScalar((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 10, 1, 1), LinkStatePDUID()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: isisPduLspId.setStatus('current')
if mibBuilder.loadTexts: isisPduLspId.setDescription('An Octet String that uniquely identifies a Link State PDU.')
isisPduFragment = MibScalar((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 10, 1, 2), IsisPDUHeader()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: isisPduFragment.setStatus('current')
if mibBuilder.loadTexts: isisPduFragment.setDescription('Holds up to 64 initial bytes of a PDU that triggered the notification.')
isisPduFieldLen = MibScalar((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 10, 1, 3), Unsigned8TC()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: isisPduFieldLen.setStatus('current')
if mibBuilder.loadTexts: isisPduFieldLen.setDescription('Holds the System ID length reported in PDU we received.')
isisPduMaxAreaAddress = MibScalar((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 10, 1, 4), Unsigned8TC()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: isisPduMaxAreaAddress.setStatus('current')
if mibBuilder.loadTexts: isisPduMaxAreaAddress.setDescription('Holds the Max Area Addresses reported in a PDU we received.')
isisAdjState = MibScalar((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 10, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("down", 1), ("initializing", 2), ("up", 3), ("failed", 4)))).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: isisAdjState.setStatus('current')
if mibBuilder.loadTexts: isisAdjState.setDescription('The current state of an adjacency.')
isisErrorOffset = MibScalar((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 10, 1, 6), Unsigned32()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: isisErrorOffset.setStatus('current')
if mibBuilder.loadTexts: isisErrorOffset.setDescription('An offset to a problem in a PDU. If the problem is a malformed TLV, this points to the begining of the TLV. If the problem is in the header, this points to the byte that is suspicious.')
isisErrorTLVType = MibScalar((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 10, 1, 7), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: isisErrorTLVType.setStatus('current')
if mibBuilder.loadTexts: isisErrorTLVType.setDescription('The type for a malformed TLV.')
isisNotificationSysInstance = MibScalar((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 10, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: isisNotificationSysInstance.setStatus('current')
if mibBuilder.loadTexts: isisNotificationSysInstance.setDescription('The unique identifier of the Integrated IS-IS instance for this notification.')
isisNotificationSysLevelIndex = MibScalar((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 10, 1, 9), IsisAdjLevel()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: isisNotificationSysLevelIndex.setStatus('current')
if mibBuilder.loadTexts: isisNotificationSysLevelIndex.setDescription('The system level for this notification.')
isisNotificationAreaAddress = MibScalar((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 10, 1, 10), OSINSAddress()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: isisNotificationAreaAddress.setStatus('current')
if mibBuilder.loadTexts: isisNotificationAreaAddress.setDescription('An Area Address.')
isisNotificationISAdjIndex = MibScalar((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 10, 1, 11), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2000000000))).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: isisNotificationISAdjIndex.setStatus('current')
if mibBuilder.loadTexts: isisNotificationISAdjIndex.setDescription('A unique value identifying the IS adjacency from all other such adjacencies on this circuit.')
isisNotificationDisState = MibScalar((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 10, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("local", 1), ("remote", 2), ("none", 3)))).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: isisNotificationDisState.setStatus('current')
if mibBuilder.loadTexts: isisNotificationDisState.setDescription('The current state of the DIS.')
isisNotificationAttachedState = MibScalar((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 10, 1, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("attached", 1), ("notAttached", 2)))).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: isisNotificationAttachedState.setStatus('current')
if mibBuilder.loadTexts: isisNotificationAttachedState.setDescription('The current state of attachment.')
isisNotificationNeighSysID = MibScalar((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 10, 1, 14), SystemID()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: isisNotificationNeighSysID.setStatus('current')
if mibBuilder.loadTexts: isisNotificationNeighSysID.setDescription('The system ID of the neighboring Intermediate System.')
isisNotificationMtId = MibScalar((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 1, 10, 1, 15), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4095))).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: isisNotificationMtId.setStatus('current')
if mibBuilder.loadTexts: isisNotificationMtId.setDescription('The topology for which the IS-IS instance is entering or leaving attached state.')
isisTrapPrefix = MibIdentifier((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 2, 0))
isisDatabaseOverload = NotificationType((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 2, 0, 1)).setObjects(("DC-ISIS-MIB", "isisNotificationSysInstance"), ("DC-ISIS-MIB", "isisNotificationSysLevelIndex"), ("DC-ISIS-MIB", "isisSysLevelOverloadState"))
if mibBuilder.loadTexts: isisDatabaseOverload.setStatus('current')
if mibBuilder.loadTexts: isisDatabaseOverload.setDescription('This notification is generated when the system enters or leaves the Overload state. The number of times this has been generated and cleared is kept track of by isisSysStatLSPDbaseOloads.')
isisManualAddressDrops = NotificationType((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 2, 0, 2)).setObjects(("DC-ISIS-MIB", "isisNotificationSysInstance"), ("DC-ISIS-MIB", "isisNotificationAreaAddress"))
if mibBuilder.loadTexts: isisManualAddressDrops.setStatus('current')
if mibBuilder.loadTexts: isisManualAddressDrops.setDescription('This notification is generated when one of the manual areaAddresses assigned to this system is ignored when computing routes. The object isisNotificationAreaAddress describes the area that has been dropped. The number of times this event has been generated is counted by isisSysManAddrDropFromAreas. This notification is edge triggered, and should not be regenerated until an address that was used in the previous computation has been dropped.')
isisIDLenMismatch = NotificationType((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 2, 0, 3)).setObjects(("DC-ISIS-MIB", "isisNotificationSysInstance"), ("DC-ISIS-MIB", "isisNotificationSysLevelIndex"), ("DC-ISIS-MIB", "isisPduFieldLen"), ("DC-ISIS-MIB", "isisCircIfIndex"), ("DC-ISIS-MIB", "isisPduFragment"))
if mibBuilder.loadTexts: isisIDLenMismatch.setStatus('current')
if mibBuilder.loadTexts: isisIDLenMismatch.setDescription('A notification sent when we receive a PDU with a different value of the System ID Length. This notification includes an index to identify the circuit where we saw the PDU and the header of the PDU which may help a network manager identify the source of the confusion. This should be an edge-triggered notification. We should not send a second notification about PDUs received on the same circuit.')
isisMaxAreaAddressesMismatch = NotificationType((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 2, 0, 4)).setObjects(("DC-ISIS-MIB", "isisNotificationSysInstance"), ("DC-ISIS-MIB", "isisNotificationSysLevelIndex"), ("DC-ISIS-MIB", "isisPduMaxAreaAddress"), ("DC-ISIS-MIB", "isisCircIfIndex"), ("DC-ISIS-MIB", "isisPduFragment"))
if mibBuilder.loadTexts: isisMaxAreaAddressesMismatch.setStatus('current')
if mibBuilder.loadTexts: isisMaxAreaAddressesMismatch.setDescription('A notification sent when we receive a PDU with a different value of the Maximum Area Addresses. This notification includes the header of the packet, which may help a network manager identify the source of the confusion. This should be an edge-triggered notification. We should not send a second notification about PDUs received from the same circuit.')
isisOwnLSPPurge = NotificationType((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 2, 0, 5)).setObjects(("DC-ISIS-MIB", "isisNotificationSysInstance"), ("DC-ISIS-MIB", "isisNotificationSysLevelIndex"), ("DC-ISIS-MIB", "isisCircIfIndex"), ("DC-ISIS-MIB", "isisPduLspId"))
if mibBuilder.loadTexts: isisOwnLSPPurge.setStatus('current')
if mibBuilder.loadTexts: isisOwnLSPPurge.setDescription('A notification sent when we receive a PDU with our systemID and zero age. This notification includes the circuit Index and router ID from the LSP, if available, which may help a network manager identify the source of the confusion.')
isisAreaMismatch = NotificationType((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 2, 0, 6)).setObjects(("DC-ISIS-MIB", "isisNotificationSysInstance"), ("DC-ISIS-MIB", "isisCircIfIndex"), ("DC-ISIS-MIB", "isisPduFragment"))
if mibBuilder.loadTexts: isisAreaMismatch.setStatus('current')
if mibBuilder.loadTexts: isisAreaMismatch.setDescription('A notification sent when we receive a Hello PDU from an IS which does not share any area address. This notification includes the header of the packet, which may help a network manager identify the source of the confusion. This should be an edge-triggered notification. We should not send a second notification about PDUs received from the same circuit.')
isisRejectedAdjacency = NotificationType((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 2, 0, 7)).setObjects(("DC-ISIS-MIB", "isisNotificationSysInstance"), ("DC-ISIS-MIB", "isisNotificationSysLevelIndex"), ("DC-ISIS-MIB", "isisCircIfIndex"), ("DC-ISIS-MIB", "isisPduFragment"))
if mibBuilder.loadTexts: isisRejectedAdjacency.setStatus('current')
if mibBuilder.loadTexts: isisRejectedAdjacency.setDescription('A notification sent when we receive a Hello PDU from an IS, but do not establish an adjacency for some reason. This should be an edge-triggered notification. We should not send a second notification about PDUs received from the circuit.')
isisAdjacencyChange = NotificationType((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 2, 0, 8)).setObjects(("DC-ISIS-MIB", "isisNotificationSysInstance"), ("DC-ISIS-MIB", "isisNotificationSysLevelIndex"), ("DC-ISIS-MIB", "isisCircIfIndex"), ("DC-ISIS-MIB", "isisPduLspId"), ("DC-ISIS-MIB", "isisAdjState"), ("DC-ISIS-MIB", "isisNotificationISAdjIndex"), ("DC-ISIS-MIB", "isisISAdjNeighSNPAAddress"))
if mibBuilder.loadTexts: isisAdjacencyChange.setStatus('current')
if mibBuilder.loadTexts: isisAdjacencyChange.setDescription('A notification sent when an adjacency changes state, entering or leaving state up. The first 6 bytes of the isisPduLspId are the SystemID of the adjacent IS. The isisAdjState is the new state of the adjacency.')
isisLSPErrorDetected = NotificationType((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 2, 0, 9)).setObjects(("DC-ISIS-MIB", "isisNotificationSysInstance"), ("DC-ISIS-MIB", "isisNotificationSysLevelIndex"), ("DC-ISIS-MIB", "isisPduLspId"), ("DC-ISIS-MIB", "isisCircIfIndex"), ("DC-ISIS-MIB", "isisPduFragment"), ("DC-ISIS-MIB", "isisErrorOffset"), ("DC-ISIS-MIB", "isisErrorTLVType"))
if mibBuilder.loadTexts: isisLSPErrorDetected.setStatus('current')
if mibBuilder.loadTexts: isisLSPErrorDetected.setDescription('This notification is generated when we receive an LSP with a parse error. The isisCircIfIndex holds in index of the circuit on which the PDU arrived. The isisPduFragment holds start of the LSP, and the isisErrorOffset points to the problem. If the problem is a malformed TLV, isisErrorOffset points to start of the TLV and isisErrorTLVType holds the value of the type. If the problem is with the LSP header, isisErrorOffset points to the suspicious byte. The number of such LSPs is accumulated in isisSysStatLSPErrors.')
isisAttemptToExceedMaxSequence = NotificationType((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 2, 0, 10)).setObjects(("DC-ISIS-MIB", "isisNotificationSysInstance"), ("DC-ISIS-MIB", "isisNotificationSysLevelIndex"), ("DC-ISIS-MIB", "isisPduLspId"))
if mibBuilder.loadTexts: isisAttemptToExceedMaxSequence.setStatus('current')
if mibBuilder.loadTexts: isisAttemptToExceedMaxSequence.setDescription('When the sequence number on an LSP we generate wraps the 32-bit sequence counter, we purge and wait to re-announce this information. This notification describes that event. Since these should not be generated rapidly, we generate an event each time this happens. While the first 6 bytes of the LSPID are ours, the other two contain useful information.')
isisSequenceNumberSkip = NotificationType((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 2, 0, 11)).setObjects(("DC-ISIS-MIB", "isisNotificationSysInstance"), ("DC-ISIS-MIB", "isisNotificationSysLevelIndex"), ("DC-ISIS-MIB", "isisCircIfIndex"), ("DC-ISIS-MIB", "isisPduLspId"))
if mibBuilder.loadTexts: isisSequenceNumberSkip.setStatus('current')
if mibBuilder.loadTexts: isisSequenceNumberSkip.setDescription('When we receive an LSP with our System ID and different contents, we may need to reissue the LSP with a higher sequence number. We send this notification if we need to increase the sequence number by more than one. If two Intermediate Systems are configured with the same System ID, this notification will fire.')
isisCircIndTable = NotificationType((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 2, 0, 12)).setObjects(("DC-ISIS-MIB", "isisNotificationSysInstance"), ("DC-ISIS-MIB", "isisCircIfIndex"), ("DC-ISIS-MIB", "isisCircExistState"), ("DC-ISIS-MIB", "isisCircAdminState"), ("DC-ISIS-MIB", "isisCircOperState"), ("DC-ISIS-MIB", "isisCircT1TimerRunning"), ("DC-ISIS-MIB", "isisCircLevel"))
if mibBuilder.loadTexts: isisCircIndTable.setStatus('current')
if mibBuilder.loadTexts: isisCircIndTable.setDescription('A notification sent when the value of isisCircOperState for a circuit changes to operStatusUp, operStatusDown or operStatusActFailed.')
isisExtPassCircuitInd = NotificationType((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 2, 0, 13)).setObjects(("DC-ISIS-MIB", "isisNotificationSysInstance"), ("DC-ISIS-MIB", "isisCircIfIndex"), ("DC-ISIS-MIB", "isisCircExistState"), ("DC-ISIS-MIB", "isisCircAdminState"), ("DC-ISIS-MIB", "isisCircOperState"), ("DC-ISIS-MIB", "isisCircLevel"))
if mibBuilder.loadTexts: isisExtPassCircuitInd.setStatus('current')
if mibBuilder.loadTexts: isisExtPassCircuitInd.setDescription('A notification sent when the value of isisCircOperState for a circuit not configured to run the IS-IS protocol (such as an external, TE-only or passive circuit) changes to operStatusUp, operStatusDown or operStatusActFailed.')
isisOperStateChange = NotificationType((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 2, 0, 14)).setObjects(("DC-ISIS-MIB", "isisNotificationSysInstance"), ("DC-ISIS-MIB", "isisSysOperStatus"))
if mibBuilder.loadTexts: isisOperStateChange.setStatus('current')
if mibBuilder.loadTexts: isisOperStateChange.setDescription('This notification is generated when the IS-IS system changes operational state to operStatusUp, operStatusDown or operStatusActFailed.')
isisLspAuthFailure = NotificationType((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 2, 0, 15)).setObjects(("DC-ISIS-MIB", "isisNotificationSysInstance"), ("DC-ISIS-MIB", "isisNotificationSysLevelIndex"), ("DC-ISIS-MIB", "isisCircIfIndex"), ("DC-ISIS-MIB", "isisPduLspId"))
if mibBuilder.loadTexts: isisLspAuthFailure.setStatus('current')
if mibBuilder.loadTexts: isisLspAuthFailure.setDescription('An IS-IS instance sends this notification if a received LSP has failed to authenticate.')
isisHelloAuthFailure = NotificationType((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 2, 0, 16)).setObjects(("DC-ISIS-MIB", "isisNotificationSysInstance"), ("DC-ISIS-MIB", "isisNotificationSysLevelIndex"), ("DC-ISIS-MIB", "isisCircIfIndex"), ("DC-ISIS-MIB", "isisNotificationNeighSysID"))
if mibBuilder.loadTexts: isisHelloAuthFailure.setStatus('current')
if mibBuilder.loadTexts: isisHelloAuthFailure.setDescription('An IS-IS instance sends this notification if a received Hello has failed to authenticate.')
isisAttachedStateChange = NotificationType((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 2, 0, 17)).setObjects(("DC-ISIS-MIB", "isisNotificationSysInstance"), ("DC-ISIS-MIB", "isisNotificationMtId"), ("DC-ISIS-MIB", "isisNotificationAttachedState"))
if mibBuilder.loadTexts: isisAttachedStateChange.setStatus('current')
if mibBuilder.loadTexts: isisAttachedStateChange.setDescription('This notification is generated when the Attached state for a topology changes and the topology is operationally up.')
isisDisChange = NotificationType((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 2, 0, 18)).setObjects(("DC-ISIS-MIB", "isisNotificationSysInstance"), ("DC-ISIS-MIB", "isisNotificationSysLevelIndex"), ("DC-ISIS-MIB", "isisCircIfIndex"), ("DC-ISIS-MIB", "isisNotificationDisState"), ("DC-ISIS-MIB", "isisCircLevelDesIS"))
if mibBuilder.loadTexts: isisDisChange.setStatus('current')
if mibBuilder.loadTexts: isisDisChange.setDescription('An IS-IS instance sends this notification if the DIS changes at level 1 or level 2.')
isisGroups = MibIdentifier((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 3, 1))
isisCompliances = MibIdentifier((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 3, 2))
isisCompliance = ModuleCompliance((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 3, 2, 1)).setObjects(("DC-ISIS-MIB", "isisSystemGroup"), ("DC-ISIS-MIB", "isisCircuitGroup"), ("DC-ISIS-MIB", "isisISAdjGroup"), ("DC-ISIS-MIB", "isisNotificationObjectGroup"), ("DC-ISIS-MIB", "isisNotificationGroup"), ("DC-ISIS-MIB", "isisDCAdditionGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
isisCompliance = isisCompliance.setStatus('current')
if mibBuilder.loadTexts: isisCompliance.setDescription('The compliance statement for agents that support the IS-IS MIB')
isisAdvancedCompliance = ModuleCompliance((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 3, 2, 2)).setObjects(("DC-ISIS-MIB", "isisSystemGroup"), ("DC-ISIS-MIB", "isisCircuitGroup"), ("DC-ISIS-MIB", "isisISAdjGroup"), ("DC-ISIS-MIB", "isisNotificationObjectGroup"), ("DC-ISIS-MIB", "isisNotificationGroup"), ("DC-ISIS-MIB", "isisISPDUCounterGroup"), ("DC-ISIS-MIB", "isisISIPRADestGroup"), ("DC-ISIS-MIB", "isisLSPGroup"), ("DC-ISIS-MIB", "isisDCAdditionGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
isisAdvancedCompliance = isisAdvancedCompliance.setStatus('current')
if mibBuilder.loadTexts: isisAdvancedCompliance.setDescription('The advanced compliance statement for agents that support the IS-IS MIB')
isisSystemGroup = ObjectGroup((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 3, 1, 1)).setObjects(("DC-ISIS-MIB", "isisSysVersion"), ("DC-ISIS-MIB", "isisSysType"), ("DC-ISIS-MIB", "isisSysID"), ("DC-ISIS-MIB", "isisSysMaxPathSplits"), ("DC-ISIS-MIB", "isisSysMaxLSPGenInt"), ("DC-ISIS-MIB", "isisSysPollESHelloRate"), ("DC-ISIS-MIB", "isisSysWaitTime"), ("DC-ISIS-MIB", "isisSysAdminState"), ("DC-ISIS-MIB", "isisSysL2toL1Leaking"), ("DC-ISIS-MIB", "isisSysMaxAge"), ("DC-ISIS-MIB", "isisSysLevelOrigLSPBuffSize"), ("DC-ISIS-MIB", "isisSysLevelMinLSPGenInt"), ("DC-ISIS-MIB", "isisSysLevelOverloadState"), ("DC-ISIS-MIB", "isisSysLevelSetOverload"), ("DC-ISIS-MIB", "isisSysLevelSetOverloadUntil"), ("DC-ISIS-MIB", "isisSysLevelMetricStyle"), ("DC-ISIS-MIB", "isisSysLevelSPFConsiders"), ("DC-ISIS-MIB", "isisSysLevelTEEnabled"), ("DC-ISIS-MIB", "isisSysReceiveLSPBufferSize"), ("DC-ISIS-MIB", "isisManAreaAddrExistState"), ("DC-ISIS-MIB", "isisAreaAddrInLSP"), ("DC-ISIS-MIB", "isisSummAddrExistState"), ("DC-ISIS-MIB", "isisSummAddrMetric"), ("DC-ISIS-MIB", "isisSummAddrFullMetric"), ("DC-ISIS-MIB", "isisRedistributeAddrExistState"), ("DC-ISIS-MIB", "isisRouterHostName"), ("DC-ISIS-MIB", "isisRouterID"), ("DC-ISIS-MIB", "isisSysStatCorrLSPs"), ("DC-ISIS-MIB", "isisSysStatLSPDbaseOloads"), ("DC-ISIS-MIB", "isisSysStatManAddrDropFromAreas"), ("DC-ISIS-MIB", "isisSysStatAttmptToExMaxSeqNums"), ("DC-ISIS-MIB", "isisSysStatSeqNumSkips"), ("DC-ISIS-MIB", "isisSysStatOwnLSPPurges"), ("DC-ISIS-MIB", "isisSysStatIDFieldLenMismatches"), ("DC-ISIS-MIB", "isisSysStatPartChanges"), ("DC-ISIS-MIB", "isisSysStatSPFRuns"), ("DC-ISIS-MIB", "isisSysStatAuthTypeFails"), ("DC-ISIS-MIB", "isisSysStatAuthFails"), ("DC-ISIS-MIB", "isisSysStatLSPError"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
isisSystemGroup = isisSystemGroup.setStatus('current')
if mibBuilder.loadTexts: isisSystemGroup.setDescription('The collections of objects used to manage an IS-IS router.')
isisCircuitGroup = ObjectGroup((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 3, 1, 2)).setObjects(("DC-ISIS-MIB", "isisCircIfIndex"), ("DC-ISIS-MIB", "isisCircIfSubIndex"), ("DC-ISIS-MIB", "isisCircAdminState"), ("DC-ISIS-MIB", "isisCircExistState"), ("DC-ISIS-MIB", "isisCircType"), ("DC-ISIS-MIB", "isisCircExtDomain"), ("DC-ISIS-MIB", "isisCircAdjChanges"), ("DC-ISIS-MIB", "isisCircNumAdj"), ("DC-ISIS-MIB", "isisCircInitFails"), ("DC-ISIS-MIB", "isisCircRejAdjs"), ("DC-ISIS-MIB", "isisCircIDFieldLenMismatches"), ("DC-ISIS-MIB", "isisCircMaxAreaAddrMismatches"), ("DC-ISIS-MIB", "isisCircAuthTypeFails"), ("DC-ISIS-MIB", "isisCircAuthFails"), ("DC-ISIS-MIB", "isisCircLANDesISChanges"), ("DC-ISIS-MIB", "isisCircLevel"), ("DC-ISIS-MIB", "isisCircPassiveCircuit"), ("DC-ISIS-MIB", "isisCircMeshGroupEnabled"), ("DC-ISIS-MIB", "isisCircMeshGroup"), ("DC-ISIS-MIB", "isisCircSmallHellos"), ("DC-ISIS-MIB", "isisCircLastUpTime"), ("DC-ISIS-MIB", "isisCirc3WayEnabled"), ("DC-ISIS-MIB", "isisCircExtendedCircID"), ("DC-ISIS-MIB", "isisCircLevelMetric"), ("DC-ISIS-MIB", "isisCircLevelWideMetric"), ("DC-ISIS-MIB", "isisCircLevelISPriority"), ("DC-ISIS-MIB", "isisCircLevelIDOctet"), ("DC-ISIS-MIB", "isisCircLevelID"), ("DC-ISIS-MIB", "isisCircLevelDesIS"), ("DC-ISIS-MIB", "isisCircLevelHelloMultiplier"), ("DC-ISIS-MIB", "isisCircLevelHelloTimer"), ("DC-ISIS-MIB", "isisCircLevelDRHelloTimer"), ("DC-ISIS-MIB", "isisCircLevelLSPThrottle"), ("DC-ISIS-MIB", "isisCircLevelMinLSPRetransInt"), ("DC-ISIS-MIB", "isisCircLevelCSNPInterval"), ("DC-ISIS-MIB", "isisCircLevelPartSNPInterval"), ("DC-ISIS-MIB", "isisCircIPAddrRowStatus"), ("DC-ISIS-MIB", "isisCircIPAddrAdminState"), ("DC-ISIS-MIB", "isisCircIPAddrAddressType"), ("DC-ISIS-MIB", "isisCircIPAddrAddress"), ("DC-ISIS-MIB", "isisCircIPAddrInLSP"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
isisCircuitGroup = isisCircuitGroup.setStatus('current')
if mibBuilder.loadTexts: isisCircuitGroup.setDescription('The collection of objects used to describe an IS-IS Circuit.')
isisISAdjGroup = ObjectGroup((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 3, 1, 3)).setObjects(("DC-ISIS-MIB", "isisISAdjState"), ("DC-ISIS-MIB", "isisISAdj3WayState"), ("DC-ISIS-MIB", "isisISAdjNeighSNPAAddress"), ("DC-ISIS-MIB", "isisISAdjNeighSysType"), ("DC-ISIS-MIB", "isisISAdjNeighSysID"), ("DC-ISIS-MIB", "isisISAdjNbrExtendedCircID"), ("DC-ISIS-MIB", "isisISAdjUsage"), ("DC-ISIS-MIB", "isisISAdjHoldTimer"), ("DC-ISIS-MIB", "isisISAdjNeighPriority"), ("DC-ISIS-MIB", "isisISAdjLastUpTime"), ("DC-ISIS-MIB", "isisISAdjAreaAddress"), ("DC-ISIS-MIB", "isisISAdjIPAddrType"), ("DC-ISIS-MIB", "isisISAdjIPAddrAddress"), ("DC-ISIS-MIB", "isisISAdjProtSuppLocalSupport"), ("DC-ISIS-MIB", "isisISAdjProtSuppBfdStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
isisISAdjGroup = isisISAdjGroup.setStatus('current')
if mibBuilder.loadTexts: isisISAdjGroup.setDescription('The collections of objects used to manage an IS-IS Adjacency.')
isisNotificationObjectGroup = ObjectGroup((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 3, 1, 4)).setObjects(("DC-ISIS-MIB", "isisPduLspId"), ("DC-ISIS-MIB", "isisPduFragment"), ("DC-ISIS-MIB", "isisPduFieldLen"), ("DC-ISIS-MIB", "isisPduMaxAreaAddress"), ("DC-ISIS-MIB", "isisAdjState"), ("DC-ISIS-MIB", "isisErrorOffset"), ("DC-ISIS-MIB", "isisErrorTLVType"), ("DC-ISIS-MIB", "isisNotificationSysInstance"), ("DC-ISIS-MIB", "isisNotificationSysLevelIndex"), ("DC-ISIS-MIB", "isisNotificationAreaAddress"), ("DC-ISIS-MIB", "isisNotificationISAdjIndex"), ("DC-ISIS-MIB", "isisNotificationDisState"), ("DC-ISIS-MIB", "isisNotificationAttachedState"), ("DC-ISIS-MIB", "isisNotificationNeighSysID"), ("DC-ISIS-MIB", "isisNotificationMtId"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
isisNotificationObjectGroup = isisNotificationObjectGroup.setStatus('current')
if mibBuilder.loadTexts: isisNotificationObjectGroup.setDescription('The objects used to record notification parameters.')
isisNotificationGroup = NotificationGroup((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 3, 1, 5)).setObjects(("DC-ISIS-MIB", "isisDatabaseOverload"), ("DC-ISIS-MIB", "isisManualAddressDrops"), ("DC-ISIS-MIB", "isisIDLenMismatch"), ("DC-ISIS-MIB", "isisMaxAreaAddressesMismatch"), ("DC-ISIS-MIB", "isisOwnLSPPurge"), ("DC-ISIS-MIB", "isisAreaMismatch"), ("DC-ISIS-MIB", "isisRejectedAdjacency"), ("DC-ISIS-MIB", "isisAdjacencyChange"), ("DC-ISIS-MIB", "isisLSPErrorDetected"), ("DC-ISIS-MIB", "isisAttemptToExceedMaxSequence"), ("DC-ISIS-MIB", "isisSequenceNumberSkip"), ("DC-ISIS-MIB", "isisCircIndTable"), ("DC-ISIS-MIB", "isisExtPassCircuitInd"), ("DC-ISIS-MIB", "isisDisChange"), ("DC-ISIS-MIB", "isisOperStateChange"), ("DC-ISIS-MIB", "isisHelloAuthFailure"), ("DC-ISIS-MIB", "isisLspAuthFailure"), ("DC-ISIS-MIB", "isisAttachedStateChange"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
isisNotificationGroup = isisNotificationGroup.setStatus('current')
if mibBuilder.loadTexts: isisNotificationGroup.setDescription('The collections of notifications sent by an IS.')
isisISPDUCounterGroup = ObjectGroup((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 3, 1, 6)).setObjects(("DC-ISIS-MIB", "isisPacketCountIIHello"), ("DC-ISIS-MIB", "isisPacketCountISHello"), ("DC-ISIS-MIB", "isisPacketCountESHello"), ("DC-ISIS-MIB", "isisPacketCountLSP"), ("DC-ISIS-MIB", "isisPacketCountCSNP"), ("DC-ISIS-MIB", "isisPacketCountPSNP"), ("DC-ISIS-MIB", "isisPacketCountUnknown"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
isisISPDUCounterGroup = isisISPDUCounterGroup.setStatus('current')
if mibBuilder.loadTexts: isisISPDUCounterGroup.setDescription('The collections of objects used to count protocol PDUs.')
isisISIPRADestGroup = ObjectGroup((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 3, 1, 8)).setObjects(("DC-ISIS-MIB", "isisIPRANextHopType"), ("DC-ISIS-MIB", "isisIPRANextHop"), ("DC-ISIS-MIB", "isisIPRAType"), ("DC-ISIS-MIB", "isisIPRAExistState"), ("DC-ISIS-MIB", "isisIPRAAdminState"), ("DC-ISIS-MIB", "isisIPRAMetric"), ("DC-ISIS-MIB", "isisIPRAFullMetric"), ("DC-ISIS-MIB", "isisIPRAMetricType"), ("DC-ISIS-MIB", "isisIPRASNPAAddress"), ("DC-ISIS-MIB", "isisIPRASourceType"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
isisISIPRADestGroup = isisISIPRADestGroup.setStatus('current')
if mibBuilder.loadTexts: isisISIPRADestGroup.setDescription('The collections of objects used to manage configured IP addresses.')
isisLSPGroup = ObjectGroup((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 3, 1, 9)).setObjects(("DC-ISIS-MIB", "isisLSPSeq"), ("DC-ISIS-MIB", "isisLSPZeroLife"), ("DC-ISIS-MIB", "isisLSPChecksum"), ("DC-ISIS-MIB", "isisLSPLifetimeRemain"), ("DC-ISIS-MIB", "isisLSPPDULength"), ("DC-ISIS-MIB", "isisLSPAttributes"), ("DC-ISIS-MIB", "isisLSPTLVSeq"), ("DC-ISIS-MIB", "isisLSPTLVChecksum"), ("DC-ISIS-MIB", "isisLSPTLVType"), ("DC-ISIS-MIB", "isisLSPTLVLen"), ("DC-ISIS-MIB", "isisLSPTLVValue"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
isisLSPGroup = isisLSPGroup.setStatus('current')
if mibBuilder.loadTexts: isisLSPGroup.setDescription('The collections of objects used to observe the LSP Data Base.')
isisDCAdditionGroup = ObjectGroup((1, 2, 826, 0, 1, 1578918, 5, 63, 1, 3, 1, 10)).setObjects(("DC-ISIS-MIB", "isisSysExistState"), ("DC-ISIS-MIB", "isisSysOperStatus"), ("DC-ISIS-MIB", "isisSysAllowAutoI3Config"), ("DC-ISIS-MIB", "isisSysCalcMaxDelay"), ("DC-ISIS-MIB", "isisSysCalcThrshUpdStart"), ("DC-ISIS-MIB", "isisSysCalcThrshUpdRestart"), ("DC-ISIS-MIB", "isisSysCalcThrshRestartLimit"), ("DC-ISIS-MIB", "isisSysCalcPauseFreq"), ("DC-ISIS-MIB", "isisSysCheckChecksums"), ("DC-ISIS-MIB", "isisSysZeroAgeLifetime"), ("DC-ISIS-MIB", "isisSysNumUpdPending"), ("DC-ISIS-MIB", "isisSysNumUpdMerged"), ("DC-ISIS-MIB", "isisSysNumCksumsPending"), ("DC-ISIS-MIB", "isisSysTEMetricPcntge"), ("DC-ISIS-MIB", "isisSysMaxBwidthPcntge"), ("DC-ISIS-MIB", "isisSysMaxResBwidthPcntge"), ("DC-ISIS-MIB", "isisSysUnresBwidthPcntge"), ("DC-ISIS-MIB", "isisSysMaxLSPBwidthPcntge"), ("DC-ISIS-MIB", "isisSysMinLSPBwidthPcntge"), ("DC-ISIS-MIB", "isisSysMTUSizePcntge"), ("DC-ISIS-MIB", "isisSysTERouterID"), ("DC-ISIS-MIB", "isisSysIPv6TERouterID"), ("DC-ISIS-MIB", "isisSysMaxExternalRoutes"), ("DC-ISIS-MIB", "isisSysMaxExternalRoutesAction"), ("DC-ISIS-MIB", "isisSysLspFullSuppress"), ("DC-ISIS-MIB", "isisSysLspFullSetDBOL"), ("DC-ISIS-MIB", "isisSysRestartHelpPeer"), ("DC-ISIS-MIB", "isisSysRestartActivationType"), ("DC-ISIS-MIB", "isisSysRestartAutoResetType"), ("DC-ISIS-MIB", "isisSysRestartAdjacencyWait"), ("DC-ISIS-MIB", "isisSysMaxRecoveryTime"), ("DC-ISIS-MIB", "isisSysClearStats"), ("DC-ISIS-MIB", "isisSysSetAttached"), ("DC-ISIS-MIB", "isisSysProtSupported"), ("DC-ISIS-MIB", "isisSysRestrictLanAdjsToSubnet"), ("DC-ISIS-MIB", "isisSysHostName"), ("DC-ISIS-MIB", "isisSysCalcSoonAfterCircChange"), ("DC-ISIS-MIB", "isisSysSendNotifications"), ("DC-ISIS-MIB", "isisSysEnableIgpShortcut"), ("DC-ISIS-MIB", "isisSysI3EntityIndex"), ("DC-ISIS-MIB", "isisSysRtmPurgeTime"), ("DC-ISIS-MIB", "isisRouterIPv6ID"), ("DC-ISIS-MIB", "isisSysLevelIPv6TEEnabled"), ("DC-ISIS-MIB", "isisSysLevelRestartT2Duration"), ("DC-ISIS-MIB", "isisSysLevelAuthUser"), ("DC-ISIS-MIB", "isisCircOperState"), ("DC-ISIS-MIB", "isisCircSdEntityIndex"), ("DC-ISIS-MIB", "isisCircDlBuffPoolSize"), ("DC-ISIS-MIB", "isisCircSdPDUBuffPoolSize"), ("DC-ISIS-MIB", "isisCircSdIndBuffPoolSize"), ("DC-ISIS-MIB", "isisCircDataLinkBlockSize"), ("DC-ISIS-MIB", "isisCircPhysicalAddress"), ("DC-ISIS-MIB", "isisCircManualOrAutomatic"), ("DC-ISIS-MIB", "isisCircT1TimerRunning"), ("DC-ISIS-MIB", "isisCircProtSupported"), ("DC-ISIS-MIB", "isisCircPtToPtOverLAN"), ("DC-ISIS-MIB", "isisCircProtBfdDesired"), ("DC-ISIS-MIB", "isisCircLevelStickyDIS"), ("DC-ISIS-MIB", "isisCircLevelAuthUser"), ("DC-ISIS-MIB", "isisCircLevelIDHostname"), ("DC-ISIS-MIB", "isisCircLevelDesISHostname"), ("DC-ISIS-MIB", "isisCircLevelMinLSPArrivalInt"), ("DC-ISIS-MIB", "isisSysStatPSNPError"), ("DC-ISIS-MIB", "isisSysStatCSNPError"), ("DC-ISIS-MIB", "isisSysStatLSPQueueLen"), ("DC-ISIS-MIB", "isisSysStatFragsRebuilt"), ("DC-ISIS-MIB", "isisSysStatLSPRexmits"), ("DC-ISIS-MIB", "isisSysStatLSPRegens"), ("DC-ISIS-MIB", "isisSysStatPurgesInitiated"), ("DC-ISIS-MIB", "isisSysStatLSPCount"), ("DC-ISIS-MIB", "isisSysStatPurgesIniLocal"), ("DC-ISIS-MIB", "isisSysStatPurgesIniRemote"), ("DC-ISIS-MIB", "isisSysStatPurgesIniRemSNP"), ("DC-ISIS-MIB", "isisSysStatPurgesIniRemExp"), ("DC-ISIS-MIB", "isisSysStatPurgesIniRemPrs"), ("DC-ISIS-MIB", "isisPacketCountDiscardedIIH"), ("DC-ISIS-MIB", "isisPacketCountDiscardedLSP"), ("DC-ISIS-MIB", "isisPacketCountDiscardedCSNP"), ("DC-ISIS-MIB", "isisPacketCountDiscardedPSNP"), ("DC-ISIS-MIB", "isisISAdjRestartCapable"), ("DC-ISIS-MIB", "isisISAdjPeerRestartState"), ("DC-ISIS-MIB", "isisISAdjSuppressed"), ("DC-ISIS-MIB", "isisISAdjNeighLanID"), ("DC-ISIS-MIB", "isisISAdjNeighHostname"), ("DC-ISIS-MIB", "isisISAdjNeighLanIDHostname"), ("DC-ISIS-MIB", "isisLSPIDHostname"), ("DC-ISIS-MIB", "isisLSPTLVHostname"), ("DC-ISIS-MIB", "isisPmSjJoinStatus"), ("DC-ISIS-MIB", "isisPmMjRowStatus"), ("DC-ISIS-MIB", "isisPmMjAdminStatus"), ("DC-ISIS-MIB", "isisPmMjOperStatus"), ("DC-ISIS-MIB", "isisPmMjJoinStatus"), ("DC-ISIS-MIB", "isisSdEntRowStatus"), ("DC-ISIS-MIB", "isisSdEntAdminStatus"), ("DC-ISIS-MIB", "isisSdEntOperStatus"), ("DC-ISIS-MIB", "isisSdEntMapHostnames"), ("DC-ISIS-MIB", "isisSdEntAuthSNPs"), ("DC-ISIS-MIB", "isisSdEntBfdProviderIndex"), ("DC-ISIS-MIB", "isisSdEntStopAdjDuplIDs"), ("DC-ISIS-MIB", "isisSdMjRowStatus"), ("DC-ISIS-MIB", "isisSdMjAdminStatus"), ("DC-ISIS-MIB", "isisSdMjOperStatus"), ("DC-ISIS-MIB", "isisSdMjEntityIndex"), ("DC-ISIS-MIB", "isisSdMjEntityType"), ("DC-ISIS-MIB", "isisSdMjJoinStatus"), ("DC-ISIS-MIB", "isisMtSysExistState"), ("DC-ISIS-MIB", "isisMtSysAdminState"), ("DC-ISIS-MIB", "isisMtSysOperState"), ("DC-ISIS-MIB", "isisMtSysProtSupported"), ("DC-ISIS-MIB", "isisMtSysDefaultActive"), ("DC-ISIS-MIB", "isisMtCircManExistState"), ("DC-ISIS-MIB", "isisMtCircManAdminState"), ("DC-ISIS-MIB", "isisMtCircManOperState"), ("DC-ISIS-MIB", "isisMtCircManL1WideMetric"), ("DC-ISIS-MIB", "isisMtCircManL2WideMetric"), ("DC-ISIS-MIB", "isisMtCircStatusOperState"), ("DC-ISIS-MIB", "isisMtCircStatusL1WideMetric"), ("DC-ISIS-MIB", "isisMtCircStatusL2WideMetric"), ("DC-ISIS-MIB", "isisISAdjMtSuppLocalSupport"), ("DC-ISIS-MIB", "isisIPRAMtId"), ("DC-ISIS-MIB", "isisPmMjSubAddrFamily"), ("DC-ISIS-MIB", "isisShortcutRemoteAddressType"), ("DC-ISIS-MIB", "isisShortcutRemoteAddress"), ("DC-ISIS-MIB", "isisShortcutMetricType"), ("DC-ISIS-MIB", "isisShortcutMetricValue"), ("DC-ISIS-MIB", "isisShortcutOperStatus"), ("DC-ISIS-MIB", "isisShortcutPendingDeletion"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
isisDCAdditionGroup = isisDCAdditionGroup.setStatus('current')
if mibBuilder.loadTexts: isisDCAdditionGroup.setDescription('The collection of Data Connection additions to the MIB.')
mibBuilder.exportSymbols("DC-ISIS-MIB", isisGroups=isisGroups, isisMtSysDefaultActive=isisMtSysDefaultActive, isisSysLevelOverloadState=isisSysLevelOverloadState, isisCircManualOrAutomatic=isisCircManualOrAutomatic, isisManAreaAddrTable=isisManAreaAddrTable, isisMtSysOperState=isisMtSysOperState, isisReachAddr=isisReachAddr, isisPmMjSlaveEntity=isisPmMjSlaveEntity, isisNotificationSysInstance=isisNotificationSysInstance, isisTrapPrefix=isisTrapPrefix, isisSummAddrMetric=isisSummAddrMetric, isisCircLevelHelloMultiplier=isisCircLevelHelloMultiplier, isisSystemGroup=isisSystemGroup, isisCircIndTable=isisCircIndTable, isisRedistributeAddrMtId=isisRedistributeAddrMtId, isisLSPTLVHostname=isisLSPTLVHostname, isisLSPPDULength=isisLSPPDULength, isisISAdjNeighPriority=isisISAdjNeighPriority, isisSysLevelSetOverloadUntil=isisSysLevelSetOverloadUntil, isisNotificationNeighSysID=isisNotificationNeighSysID, isisPmMjInterface=isisPmMjInterface, isisISAdjIPAddrAddress=isisISAdjIPAddrAddress, isisCircAuthFails=isisCircAuthFails, AdminState=AdminState, isisRouterSysID=isisRouterSysID, isisISAdjSuppressed=isisISAdjSuppressed, isisSysCheckChecksums=isisSysCheckChecksums, isisISAdjNeighLanID=isisISAdjNeighLanID, isisISAdj3WayState=isisISAdj3WayState, isisNotifications=isisNotifications, ISLevel=ISLevel, isisRouterIPv6ID=isisRouterIPv6ID, isisCircIPAddrRowStatus=isisCircIPAddrRowStatus, isisPacketCountDiscardedCSNP=isisPacketCountDiscardedCSNP, isisISAdjProtSuppProtocol=isisISAdjProtSuppProtocol, isisSysNumUpdPending=isisSysNumUpdPending, isisSysCalcThrshRestartLimit=isisSysCalcThrshRestartLimit, isisNotificationObjectGroup=isisNotificationObjectGroup, isisISAdjProtSuppEntry=isisISAdjProtSuppEntry, isisSysPollESHelloRate=isisSysPollESHelloRate, CircuitID=CircuitID, isisLSPDataBase=isisLSPDataBase, LSPBuffSize=LSPBuffSize, isisMtSysAdminState=isisMtSysAdminState, isisSysLevelSetOverload=isisSysLevelSetOverload, isisCirc3WayEnabled=isisCirc3WayEnabled, isisAreaAddrTable=isisAreaAddrTable, isisIPRASourceType=isisIPRASourceType, isisRedistributeAddrEntry=isisRedistributeAddrEntry, isisConformance=isisConformance, isisIPRADestPrefixLen=isisIPRADestPrefixLen, isisMtSysTable=isisMtSysTable, isisNotification=isisNotification, isisShortcutRemoteAddress=isisShortcutRemoteAddress, isisLSPTLVChecksum=isisLSPTLVChecksum, isisMtCircStatusOperState=isisMtCircStatusOperState, isisSysReceiveLSPBufferSize=isisSysReceiveLSPBufferSize, isisLSPChecksum=isisLSPChecksum, IsisMjStatus=IsisMjStatus, isisSysRestartActivationType=isisSysRestartActivationType, isisMtCircStatusMtId=isisMtCircStatusMtId, isisISPDUCounterGroup=isisISPDUCounterGroup, isisCircLevelWideMetric=isisCircLevelWideMetric, IsisOperStatus=IsisOperStatus, IsisSysRestartType=IsisSysRestartType, isisSysLevelOrigLSPBuffSize=isisSysLevelOrigLSPBuffSize, isisSysProtSupported=isisSysProtSupported, isisSysStatPurgesIniRemSNP=isisSysStatPurgesIniRemSNP, isisMtCircStatusL2WideMetric=isisMtCircStatusL2WideMetric, isisShortcutMetricValue=isisShortcutMetricValue, isisPmMjSubAddrFamily=isisPmMjSubAddrFamily, isisMtCircManConfigTable=isisMtCircManConfigTable, isisCircLevel=isisCircLevel, isisSystemCounterEntry=isisSystemCounterEntry, isisSysStatLSPRegens=isisSysStatLSPRegens, isisRedistributeAddrType=isisRedistributeAddrType, isisRedistributeAddrTable=isisRedistributeAddrTable, isisCircLevelMinLSPRetransInt=isisCircLevelMinLSPRetransInt, isisManualAddressDrops=isisManualAddressDrops, isisSysSetAttached=isisSysSetAttached, LinkStatePDUID=LinkStatePDUID, isisIPRADestType=isisIPRADestType, isisSdMjOperStatus=isisSdMjOperStatus, isisSysStatAttmptToExMaxSeqNums=isisSysStatAttmptToExMaxSeqNums, isisSummAddrPrefixLen=isisSummAddrPrefixLen, isisLSPErrorDetected=isisLSPErrorDetected, isisPacketCountLSP=isisPacketCountLSP, isisSystemCounterTable=isisSystemCounterTable, isisCircNumAdj=isisCircNumAdj, isisSysTEMetricPcntge=isisSysTEMetricPcntge, isisSysCalcMaxDelay=isisSysCalcMaxDelay, isisPacketCounterTable=isisPacketCounterTable, isisSysSendNotifications=isisSysSendNotifications, isisCircuitCounterTable=isisCircuitCounterTable, isisIPRADest=isisIPRADest, isisIPRAMetricType=isisIPRAMetricType, isisLSPLifetimeRemain=isisLSPLifetimeRemain, isisMtSysMtId=isisMtSysMtId, isisCircMeshGroup=isisCircMeshGroup, isisCircIPAddrTable=isisCircIPAddrTable, IsisSjStatus=IsisSjStatus, OSINSAddress=OSINSAddress, isisPacketCountUnknown=isisPacketCountUnknown, isisISAdjMtSupportedTable=isisISAdjMtSupportedTable, isisCircLevelDesISHostname=isisCircLevelDesISHostname, isisNotificationGroup=isisNotificationGroup, isisShortcutIfIndex=isisShortcutIfIndex, isisCircLevelIndex=isisCircLevelIndex, isisSysTable=isisSysTable, isisExtPassCircuitInd=isisExtPassCircuitInd, isisISAdjNeighHostname=isisISAdjNeighHostname, isisDisChange=isisDisChange, isisLSPZeroLife=isisLSPZeroLife, ISPriority=ISPriority, isisSysRestartAdjacencyWait=isisSysRestartAdjacencyWait, isisISAdjIPAddrTable=isisISAdjIPAddrTable, isisIPReachAddr=isisIPReachAddr, isisMtCircStatusL1WideMetric=isisMtCircStatusL1WideMetric, isisSdEntEntry=isisSdEntEntry, isisPduFragment=isisPduFragment, isisSysLevelAuthUser=isisSysLevelAuthUser, isisCircSdPDUBuffPoolSize=isisCircSdPDUBuffPoolSize, IsisAdminStatus=IsisAdminStatus, isisLSPTLVType=isisLSPTLVType, isisSummAddrExistState=isisSummAddrExistState, isisSysStatOwnLSPPurges=isisSysStatOwnLSPPurges, isisSysStatPSNPError=isisSysStatPSNPError, isisIPRASNPAAddress=isisIPRASNPAAddress, isisCircLANDesISChanges=isisCircLANDesISChanges, isisMaxAreaAddressesMismatch=isisMaxAreaAddressesMismatch, isisSysLevelIndex=isisSysLevelIndex, isisSdEntAuthSNPs=isisSdEntAuthSNPs, isisMtCircManConfigEntry=isisMtCircManConfigEntry, isisSysLevelMetricStyle=isisSysLevelMetricStyle, isisCircEntry=isisCircEntry, isisShortcutOperStatus=isisShortcutOperStatus, isisPduLspId=isisPduLspId, PYSNMP_MODULE_ID=dcIsisMib, isisCompliance=isisCompliance, isisSdMjAdminStatus=isisSdMjAdminStatus, isisErrorTLVType=isisErrorTLVType, isisSysStatPartChanges=isisSysStatPartChanges, isisCircLevelPartSNPInterval=isisCircLevelPartSNPInterval, isisISAdjNeighLanIDHostname=isisISAdjNeighLanIDHostname, MetricType=MetricType, isisPmSjEntry=isisPmSjEntry, isisPacketCountESHello=isisPacketCountESHello, isisSysStatLSPQueueLen=isisSysStatLSPQueueLen, isisMtSysExistState=isisMtSysExistState, isisAreaAddrInLSP=isisAreaAddrInLSP, isisPmMjAdminStatus=isisPmMjAdminStatus, isisRouterHostName=isisRouterHostName, isisCounters=isisCounters, isisISAdjProtSuppBfdStatus=isisISAdjProtSuppBfdStatus, isisLSPSummaryEntry=isisLSPSummaryEntry, IsisAddrTypeBits=IsisAddrTypeBits, isisSysCalcThrshUpdRestart=isisSysCalcThrshUpdRestart, isisLSPSummaryTable=isisLSPSummaryTable, isisSysMaxResBwidthPcntge=isisSysMaxResBwidthPcntge, isisISAdjIPAddrType=isisISAdjIPAddrType, isisSummAddrEntry=isisSummAddrEntry, isisMtCircManAdminState=isisMtCircManAdminState, isisSdMjInterface=isisSdMjInterface, isisSysStatPurgesIniRemPrs=isisSysStatPurgesIniRemPrs, isisSysNumUpdMerged=isisSysNumUpdMerged, isisCircProtBfdDesired=isisCircProtBfdDesired, isisSysRestrictLanAdjsToSubnet=isisSysRestrictLanAdjsToSubnet, isisRedistributeAddrPrefixLen=isisRedistributeAddrPrefixLen, isisCirc=isisCirc, isisPacketCountDiscardedIIH=isisPacketCountDiscardedIIH, isisShortcutPendingDeletion=isisShortcutPendingDeletion, isisIPRANextHopIndex=isisIPRANextHopIndex, isisCircIfSubIndex=isisCircIfSubIndex, isisSysAllowAutoI3Config=isisSysAllowAutoI3Config, isisSysL2toL1Leaking=isisSysL2toL1Leaking, isisSysStatIDFieldLenMismatches=isisSysStatIDFieldLenMismatches, isisCircLevelHelloTimer=isisCircLevelHelloTimer, isisIPRAEntry=isisIPRAEntry, isisISAdjPeerRestartState=isisISAdjPeerRestartState, isisLSPTLVValue=isisLSPTLVValue, isisAdjacencyChange=isisAdjacencyChange, isisISAdjIPAddrEntry=isisISAdjIPAddrEntry, isisMtCircManL2WideMetric=isisMtCircManL2WideMetric, isisCircT1TimerRunning=isisCircT1TimerRunning, isisSysRestartHelpPeer=isisSysRestartHelpPeer, isisObjects=isisObjects, isisSysWaitTime=isisSysWaitTime, isisSysStatAuthFails=isisSysStatAuthFails, isisIPRAMtId=isisIPRAMtId, isisSummAddrTable=isisSummAddrTable, isisPacketCountISHello=isisPacketCountISHello, isisSdMjEntityType=isisSdMjEntityType, isisCircLastUpTime=isisCircLastUpTime, isisPmSjInterface=isisPmSjInterface, isisSysMaxExternalRoutes=isisSysMaxExternalRoutes, IsisPmInterfaceId=IsisPmInterfaceId, isisSysMinLSPBwidthPcntge=isisSysMinLSPBwidthPcntge, isisISAdjAreaAddrEntry=isisISAdjAreaAddrEntry, isisSdMjEntityIndex=isisSdMjEntityIndex, isisOperStateChange=isisOperStateChange, isisCircPassiveCircuit=isisCircPassiveCircuit, isisSysStatAuthTypeFails=isisSysStatAuthTypeFails, SupportedProtocol=SupportedProtocol, IsisAddrType=IsisAddrType, isisCircLevelLSPThrottle=isisCircLevelLSPThrottle, isisCircTable=isisCircTable, isisPacketCountLevel=isisPacketCountLevel, isisSysMaxLSPBwidthPcntge=isisSysMaxLSPBwidthPcntge, isisPmMjAddrType=isisPmMjAddrType, isisCircLevelMetric=isisCircLevelMetric, isisCircLevelValues=isisCircLevelValues, isisSysMaxExternalRoutesAction=isisSysMaxExternalRoutesAction, isisCircIPAddrIndex=isisCircIPAddrIndex, isisSysTERouterID=isisSysTERouterID, isisRedistributeAddrExistState=isisRedistributeAddrExistState, isisISAdjMtSupportedEntry=isisISAdjMtSupportedEntry, isisSystem=isisSystem, isisErrorOffset=isisErrorOffset, isisSysI3EntityIndex=isisSysI3EntityIndex, isisRouterLevel=isisRouterLevel, isisNotificationISAdjIndex=isisNotificationISAdjIndex, isisCircIPAddrAdminState=isisCircIPAddrAdminState, isisIgpShortcutTable=isisIgpShortcutTable, isisSysLevelRestartT2Duration=isisSysLevelRestartT2Duration, isisISAdjLastUpTime=isisISAdjLastUpTime, isisManAreaAddr=isisManAreaAddr, isisSysCalcThrshUpdStart=isisSysCalcThrshUpdStart, isisPacketCountIIHello=isisPacketCountIIHello, isisRouterID=isisRouterID, SystemID=SystemID, isisSysStatManAddrDropFromAreas=isisSysStatManAddrDropFromAreas, isisSdEntRowStatus=isisSdEntRowStatus, isisSdMjRowStatus=isisSdMjRowStatus, isisNotificationAttachedState=isisNotificationAttachedState, isisNotificationAreaAddress=isisNotificationAreaAddress, isisCircIndex=isisCircIndex, isisPacketCountDiscardedPSNP=isisPacketCountDiscardedPSNP, isisAreaAddr=isisAreaAddr, isisISAdjProtSuppLocalSupport=isisISAdjProtSuppLocalSupport, isisLSPAttributes=isisLSPAttributes, isisCircLevelCSNPInterval=isisCircLevelCSNPInterval, isisISAdj=isisISAdj, isisPacketCounterEntry=isisPacketCounterEntry, isisShortcutRemoteAddressType=isisShortcutRemoteAddressType, isisAttachedStateChange=isisAttachedStateChange, isisAreaMismatch=isisAreaMismatch, isisSdEntAdminStatus=isisSdEntAdminStatus, isisPmSjJoinStatus=isisPmSjJoinStatus, isisCircDlBuffPoolSize=isisCircDlBuffPoolSize, isisISAdjProtSuppTable=isisISAdjProtSuppTable, isisCircAdminState=isisCircAdminState, isisISAdjNeighSysType=isisISAdjNeighSysType, isisLSPID=isisLSPID, isisISAdjAreaAddrTable=isisISAdjAreaAddrTable)
mibBuilder.exportSymbols("DC-ISIS-MIB", isisOwnLSPPurge=isisOwnLSPPurge, isisSysLspFullSetDBOL=isisSysLspFullSetDBOL, isisSysExistState=isisSysExistState, isisPmObjects=isisPmObjects, isisCircPtToPtOverLAN=isisCircPtToPtOverLAN, isisCircLevelIDOctet=isisCircLevelIDOctet, isisSysClearStats=isisSysClearStats, isisNotificationDisState=isisNotificationDisState, isisSysStatLSPError=isisSysStatLSPError, isisSdEntIndex=isisSdEntIndex, isisMtCircManExistState=isisMtCircManExistState, isisSdEntMapHostnames=isisSdEntMapHostnames, Unsigned8TC=Unsigned8TC, isisCircIPAddrManOrAuto=isisCircIPAddrManOrAuto, IsisSdEntityType=IsisSdEntityType, Unsigned16TC=Unsigned16TC, isisISAdjAreaAddress=isisISAdjAreaAddress, isisCircLevelStickyDIS=isisCircLevelStickyDIS, isisISIPRADestGroup=isisISIPRADestGroup, isisSysMaxRecoveryTime=isisSysMaxRecoveryTime, isisLSPTLVLen=isisLSPTLVLen, isisCircIPAddrInLSP=isisCircIPAddrInLSP, isisSysStatPurgesIniRemote=isisSysStatPurgesIniRemote, isisMtSysEntry=isisMtSysEntry, isisCircRejAdjs=isisCircRejAdjs, isisSysMaxAge=isisSysMaxAge, isisIgpShortcutEntry=isisIgpShortcutEntry, isisPmMjEntry=isisPmMjEntry, isisCircuitGroup=isisCircuitGroup, isisCircIPAddrEntry=isisCircIPAddrEntry, isisPduMaxAreaAddress=isisPduMaxAreaAddress, WideMetric=WideMetric, isisSysVersion=isisSysVersion, LevelState=LevelState, dcIsisMib=dcIsisMib, isisPacketCountDirection=isisPacketCountDirection, isisCircSdIndBuffPoolSize=isisCircSdIndBuffPoolSize, isisCircLevelID=isisCircLevelID, isisSdMjTable=isisSdMjTable, isisMtCircStatusTable=isisMtCircStatusTable, isisSdEntTable=isisSdEntTable, isisMtSysProtSupported=isisMtSysProtSupported, isisCompliances=isisCompliances, isisLSPTLVIndex=isisLSPTLVIndex, isisPmMjRowStatus=isisPmMjRowStatus, isisSysLevelEntry=isisSysLevelEntry, isisLSPLevel=isisLSPLevel, isisCircLevelIDHostname=isisCircLevelIDHostname, isisISAdjIPAddrIndex=isisISAdjIPAddrIndex, isisNotificationSysLevelIndex=isisNotificationSysLevelIndex, isisISAdjGroup=isisISAdjGroup, isisAreaAddrEntry=isisAreaAddrEntry, isisSysLevelSPFConsiders=isisSysLevelSPFConsiders, isisSysLspFullSuppress=isisSysLspFullSuppress, isisSdMjEntry=isisSdMjEntry, isisManAreaAddrExistState=isisManAreaAddrExistState, isisShortcutMetricType=isisShortcutMetricType, isisISAdjRestartCapable=isisISAdjRestartCapable, isisPduFieldLen=isisPduFieldLen, isisSysMaxBwidthPcntge=isisSysMaxBwidthPcntge, isisCircDataLinkBlockSize=isisCircDataLinkBlockSize, isisSysStatSPFRuns=isisSysStatSPFRuns, isisIPRAExistState=isisIPRAExistState, isisSdEntOperStatus=isisSdEntOperStatus, isisSysEntry=isisSysEntry, isisPacketCountDiscardedLSP=isisPacketCountDiscardedLSP, isisCircExtendedCircID=isisCircExtendedCircID, isisIDLenMismatch=isisIDLenMismatch, isisAttemptToExceedMaxSequence=isisAttemptToExceedMaxSequence, isisSdEntBfdProviderIndex=isisSdEntBfdProviderIndex, isisCircuitType=isisCircuitType, isisCircLevelDRHelloTimer=isisCircLevelDRHelloTimer, isisSummAddrFullMetric=isisSummAddrFullMetric, isisISAdjUsage=isisISAdjUsage, isisSummAddressType=isisSummAddressType, isisSysInstance=isisSysInstance, isisCircMeshGroupEnabled=isisCircMeshGroupEnabled, isisSysStatPurgesIniLocal=isisSysStatPurgesIniLocal, isisSysEnableIgpShortcut=isisSysEnableIgpShortcut, isisCircOperState=isisCircOperState, isisSysRestartAutoResetType=isisSysRestartAutoResetType, isisCircSmallHellos=isisCircSmallHellos, isisLSPTLVSeq=isisLSPTLVSeq, isisSysStatFragsRebuilt=isisSysStatFragsRebuilt, isisLSPTLVEntry=isisLSPTLVEntry, isisLSPSeq=isisLSPSeq, isisSysZeroAgeLifetime=isisSysZeroAgeLifetime, isisPmMjTable=isisPmMjTable, isisMtCircManMtId=isisMtCircManMtId, isisLspAuthFailure=isisLspAuthFailure, DefaultMetric=DefaultMetric, isisPmMjMtId=isisPmMjMtId, isisSdMjJoinStatus=isisSdMjJoinStatus, isisLSPTLVTable=isisLSPTLVTable, isisSysLevelTEEnabled=isisSysLevelTEEnabled, isisSysStatLSPCount=isisSysStatLSPCount, isisCircExistState=isisCircExistState, isisPacketCountPSNP=isisPacketCountPSNP, isisISAdjEntry=isisISAdjEntry, isisCircLevelDesIS=isisCircLevelDesIS, isisSysID=isisSysID, isisSysMaxPathSplits=isisSysMaxPathSplits, isisCircLevelISPriority=isisCircLevelISPriority, isisPmSjMasterEntity=isisPmSjMasterEntity, isisMtCircManL1WideMetric=isisMtCircManL1WideMetric, isisPacketCountCSNP=isisPacketCountCSNP, isisCircLevelMinLSPArrivalInt=isisCircLevelMinLSPArrivalInt, isisSdEntStopAdjDuplIDs=isisSdEntStopAdjDuplIDs, isisSysAdminState=isisSysAdminState, isisCircInitFails=isisCircInitFails, isisRejectedAdjacency=isisRejectedAdjacency, isisISAdjTable=isisISAdjTable, isisCircIPAddrAddressType=isisCircIPAddrAddressType, isisCircLevelTable=isisCircLevelTable, isisCircMaxAreaAddrMismatches=isisCircMaxAreaAddrMismatches, isisRouterEntry=isisRouterEntry, isisIPRANextHopType=isisIPRANextHopType, isisCircLevelAuthUser=isisCircLevelAuthUser, isisISAdjNeighSNPAAddress=isisISAdjNeighSNPAAddress, isisPmMjOperStatus=isisPmMjOperStatus, isisNotificationMtId=isisNotificationMtId, isisIPRAType=isisIPRAType, isisISAdjAreaAddrIndex=isisISAdjAreaAddrIndex, isisSdObjects=isisSdObjects, isisCircAuthTypeFails=isisCircAuthTypeFails, isisSysIPv6TERouterID=isisSysIPv6TERouterID, isisCircType=isisCircType, IsisAdjLevel=IsisAdjLevel, isisIPRAFullMetric=isisIPRAFullMetric, isisSysHostName=isisSysHostName, isisSummAddrMtId=isisSummAddrMtId, isisSysStatLevel=isisSysStatLevel, isisSummAddress=isisSummAddress, isisISAdjMtSuppMtId=isisISAdjMtSuppMtId, isisSysUnresBwidthPcntge=isisSysUnresBwidthPcntge, isisSysOperStatus=isisSysOperStatus, isisRedistributeAddrAddress=isisRedistributeAddrAddress, isisManAreaAddrEntry=isisManAreaAddrEntry, isisSysLevelMinLSPGenInt=isisSysLevelMinLSPGenInt, isisSysStatSeqNumSkips=isisSysStatSeqNumSkips, isisSysRtmPurgeTime=isisSysRtmPurgeTime, isisCircIPAddrAddress=isisCircIPAddrAddress, MetricStyle=MetricStyle, isisCircuitCounterEntry=isisCircuitCounterEntry, isisSysStatPurgesIniRemExp=isisSysStatPurgesIniRemExp, isisSysStatPurgesInitiated=isisSysStatPurgesInitiated, isisSysLevelTable=isisSysLevelTable, isisSysStatCSNPError=isisSysStatCSNPError, isisIPRAAdminState=isisIPRAAdminState, isisISAdjMtSuppLocalSupport=isisISAdjMtSuppLocalSupport, isisAdjState=isisAdjState, isisHelloAuthFailure=isisHelloAuthFailure, isisSysNumCksumsPending=isisSysNumCksumsPending, IsisPDUHeader=IsisPDUHeader, isisISAdjIndex=isisISAdjIndex, FullMetric=FullMetric, isisSequenceNumberSkip=isisSequenceNumberSkip, isisSysStatCorrLSPs=isisSysStatCorrLSPs, isisMtCircStatusEntry=isisMtCircStatusEntry, isisISAdjHoldTimer=isisISAdjHoldTimer, isisISAdjNbrExtendedCircID=isisISAdjNbrExtendedCircID, isisISAdjNeighSysID=isisISAdjNeighSysID, isisSysMaxLSPGenInt=isisSysMaxLSPGenInt, isisDatabaseOverload=isisDatabaseOverload, isisSysLevel=isisSysLevel, isisSysStatLSPDbaseOloads=isisSysStatLSPDbaseOloads, isisSysMTUSizePcntge=isisSysMTUSizePcntge, IsisSdInterfaceId=IsisSdInterfaceId, isisSysLevelIPv6TEEnabled=isisSysLevelIPv6TEEnabled, isisCircAdjChanges=isisCircAdjChanges, isisNotificationEntry=isisNotificationEntry, isisSysCalcPauseFreq=isisSysCalcPauseFreq, isisSysCalcSoonAfterCircChange=isisSysCalcSoonAfterCircChange, isisSysStatLSPRexmits=isisSysStatLSPRexmits, isisSysType=isisSysType, isisISAdjState=isisISAdjState, isisPmMjJoinStatus=isisPmMjJoinStatus, isisCircIfIndex=isisCircIfIndex, isisLSPGroup=isisLSPGroup, isisIPRATable=isisIPRATable, isisIPRANextHop=isisIPRANextHop, isisLSPIDHostname=isisLSPIDHostname, isisCircSdEntityIndex=isisCircSdEntityIndex, isisCircPhysicalAddress=isisCircPhysicalAddress, isisIPRAMetric=isisIPRAMetric, isisDCAdditionGroup=isisDCAdditionGroup, isisCircIDFieldLenMismatches=isisCircIDFieldLenMismatches, isisAdvancedCompliance=isisAdvancedCompliance, isisMtCircManOperState=isisMtCircManOperState, isisCircProtSupported=isisCircProtSupported, isisCircExtDomain=isisCircExtDomain, isisCircLevelEntry=isisCircLevelEntry, isisPmSjTable=isisPmSjTable, isisRouterTable=isisRouterTable)
| 163.169388
| 10,882
| 0.786587
|
4a109c3263b87f4410d949343dc5e48ad36945b9
| 7,056
|
py
|
Python
|
ask-smapi-model/ask_smapi_model/v1/audit_logs/request_filters.py
|
Signal-Kinetics/alexa-apis-for-python
|
abb8d3dce18a5510c48b215406ed36c024f01495
|
[
"Apache-2.0"
] | null | null | null |
ask-smapi-model/ask_smapi_model/v1/audit_logs/request_filters.py
|
Signal-Kinetics/alexa-apis-for-python
|
abb8d3dce18a5510c48b215406ed36c024f01495
|
[
"Apache-2.0"
] | null | null | null |
ask-smapi-model/ask_smapi_model/v1/audit_logs/request_filters.py
|
Signal-Kinetics/alexa-apis-for-python
|
abb8d3dce18a5510c48b215406ed36c024f01495
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
#
# Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file
# except in compliance with the License. A copy of the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
# the specific language governing permissions and limitations under the License.
#
import pprint
import re # noqa: F401
import six
import typing
from enum import Enum
if typing.TYPE_CHECKING:
from typing import Dict, List, Optional, Union
from datetime import datetime
from ask_smapi_model.v1.audit_logs.resource_filter import ResourceFilterV1
from ask_smapi_model.v1.audit_logs.client_filter import ClientFilterV1
from ask_smapi_model.v1.audit_logs.operation_filter import OperationFilterV1
from ask_smapi_model.v1.audit_logs.requester_filter import RequesterFilterV1
class RequestFilters(object):
"""
Request Filters for filtering audit logs.
:param clients: List of Client IDs for filtering.
:type clients: (optional) list[ask_smapi_model.v1.audit_logs.client_filter.ClientFilter]
:param operations: Filters for a list of operation names and versions.
:type operations: (optional) list[ask_smapi_model.v1.audit_logs.operation_filter.OperationFilter]
:param resources: Filters for a list of resources and/or their types. See documentation for allowed types.
:type resources: (optional) list[ask_smapi_model.v1.audit_logs.resource_filter.ResourceFilter]
:param requesters:
:type requesters: (optional) list[ask_smapi_model.v1.audit_logs.requester_filter.RequesterFilter]
:param start_time: Sets the start time for this search. Any audit logs with timestamps after this time (inclusive) will be included in the response.
:type start_time: (optional) datetime
:param end_time: Sets the end time for this search. Any audit logs with timestamps before this time (exclusive) will be included in the result.
:type end_time: (optional) datetime
:param http_response_codes: Filters for HTTP response codes. For example, '200' or '503'
:type http_response_codes: (optional) list[str]
"""
deserialized_types = {
'clients': 'list[ask_smapi_model.v1.audit_logs.client_filter.ClientFilter]',
'operations': 'list[ask_smapi_model.v1.audit_logs.operation_filter.OperationFilter]',
'resources': 'list[ask_smapi_model.v1.audit_logs.resource_filter.ResourceFilter]',
'requesters': 'list[ask_smapi_model.v1.audit_logs.requester_filter.RequesterFilter]',
'start_time': 'datetime',
'end_time': 'datetime',
'http_response_codes': 'list[str]'
} # type: Dict
attribute_map = {
'clients': 'clients',
'operations': 'operations',
'resources': 'resources',
'requesters': 'requesters',
'start_time': 'startTime',
'end_time': 'endTime',
'http_response_codes': 'httpResponseCodes'
} # type: Dict
supports_multiple_types = False
def __init__(self, clients=None, operations=None, resources=None, requesters=None, start_time=None, end_time=None, http_response_codes=None):
# type: (Optional[List[ClientFilterV1]], Optional[List[OperationFilterV1]], Optional[List[ResourceFilterV1]], Optional[List[RequesterFilterV1]], Optional[datetime], Optional[datetime], Optional[List[object]]) -> None
"""Request Filters for filtering audit logs.
:param clients: List of Client IDs for filtering.
:type clients: (optional) list[ask_smapi_model.v1.audit_logs.client_filter.ClientFilter]
:param operations: Filters for a list of operation names and versions.
:type operations: (optional) list[ask_smapi_model.v1.audit_logs.operation_filter.OperationFilter]
:param resources: Filters for a list of resources and/or their types. See documentation for allowed types.
:type resources: (optional) list[ask_smapi_model.v1.audit_logs.resource_filter.ResourceFilter]
:param requesters:
:type requesters: (optional) list[ask_smapi_model.v1.audit_logs.requester_filter.RequesterFilter]
:param start_time: Sets the start time for this search. Any audit logs with timestamps after this time (inclusive) will be included in the response.
:type start_time: (optional) datetime
:param end_time: Sets the end time for this search. Any audit logs with timestamps before this time (exclusive) will be included in the result.
:type end_time: (optional) datetime
:param http_response_codes: Filters for HTTP response codes. For example, '200' or '503'
:type http_response_codes: (optional) list[str]
"""
self.__discriminator_value = None # type: str
self.clients = clients
self.operations = operations
self.resources = resources
self.requesters = requesters
self.start_time = start_time
self.end_time = end_time
self.http_response_codes = http_response_codes
def to_dict(self):
# type: () -> Dict[str, object]
"""Returns the model properties as a dict"""
result = {} # type: Dict
for attr, _ in six.iteritems(self.deserialized_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else
x.value if isinstance(x, Enum) else x,
value
))
elif isinstance(value, Enum):
result[attr] = value.value
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else
(item[0], item[1].value)
if isinstance(item[1], Enum) else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
# type: () -> str
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
# type: () -> str
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
# type: (object) -> bool
"""Returns true if both objects are equal"""
if not isinstance(other, RequestFilters):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
# type: (object) -> bool
"""Returns true if both objects are not equal"""
return not self == other
| 45.522581
| 224
| 0.671627
|
4a109c9c1614774e3067e716f300377c9e54b0a7
| 24,167
|
py
|
Python
|
dopamine/agents/dqn/dqn_agent.py
|
szk9876/dopamine
|
7176aefbdaa880893bbde89350de7a9b42d81a31
|
[
"Apache-2.0"
] | null | null | null |
dopamine/agents/dqn/dqn_agent.py
|
szk9876/dopamine
|
7176aefbdaa880893bbde89350de7a9b42d81a31
|
[
"Apache-2.0"
] | null | null | null |
dopamine/agents/dqn/dqn_agent.py
|
szk9876/dopamine
|
7176aefbdaa880893bbde89350de7a9b42d81a31
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# Copyright 2018 The Dopamine Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Compact implementation of a DQN agent."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
import os
import random
from absl import logging
from dopamine.discrete_domains import atari_lib
from dopamine.replay_memory import circular_replay_buffer
import numpy as np
import tensorflow as tf
import gin.tf
# These are aliases which are used by other classes.
NATURE_DQN_OBSERVATION_SHAPE = atari_lib.NATURE_DQN_OBSERVATION_SHAPE
NATURE_DQN_DTYPE = atari_lib.NATURE_DQN_DTYPE
NATURE_DQN_STACK_SIZE = atari_lib.NATURE_DQN_STACK_SIZE
nature_dqn_network = atari_lib.NatureDQNNetwork
@gin.configurable
def linearly_decaying_epsilon(decay_period, step, warmup_steps, epsilon):
"""Returns the current epsilon for the agent's epsilon-greedy policy.
This follows the Nature DQN schedule of a linearly decaying epsilon (Mnih et
al., 2015). The schedule is as follows:
Begin at 1. until warmup_steps steps have been taken; then
Linearly decay epsilon from 1. to epsilon in decay_period steps; and then
Use epsilon from there on.
Args:
decay_period: float, the period over which epsilon is decayed.
step: int, the number of training steps completed so far.
warmup_steps: int, the number of steps taken before epsilon is decayed.
epsilon: float, the final value to which to decay the epsilon parameter.
Returns:
A float, the current epsilon value computed according to the schedule.
"""
steps_left = decay_period + warmup_steps - step
bonus = (1.0 - epsilon) * steps_left / decay_period
bonus = np.clip(bonus, 0., 1. - epsilon)
return epsilon + bonus
@gin.configurable
def identity_epsilon(unused_decay_period, unused_step, unused_warmup_steps,
epsilon):
return epsilon
@gin.configurable
class DQNAgent(object):
"""An implementation of the DQN agent."""
def __init__(self,
sess,
num_actions,
observation_shape=atari_lib.NATURE_DQN_OBSERVATION_SHAPE,
observation_dtype=atari_lib.NATURE_DQN_DTYPE,
stack_size=atari_lib.NATURE_DQN_STACK_SIZE,
network=atari_lib.NatureDQNNetwork,
gamma=0.99,
update_horizon=1,
min_replay_history=20000,
update_period=4,
target_update_period=8000,
epsilon_fn=linearly_decaying_epsilon,
epsilon_train=0.01,
epsilon_eval=0.001,
epsilon_decay_period=250000,
tf_device='/cpu:*',
eval_mode=False,
use_staging=False,
max_tf_checkpoints_to_keep=4,
optimizer=tf.compat.v1.train.AdamOptimizer(
learning_rate=6.25e-5, beta1=0.9,
beta2=0.999, epsilon=1.5e-4, centered=False
),
summary_writer=None,
summary_writing_frequency=500,
allow_partial_reload=False,
K=1,
reg_weight=0.0,
noise_stddev=0.0,
gradient_input='state'):
"""Initializes the agent and constructs the components of its graph.
Args:
sess: `tf.compat.v1.Session`, for executing ops.
num_actions: int, number of actions the agent can take at any state.
observation_shape: tuple of ints describing the observation shape.
observation_dtype: tf.DType, specifies the type of the observations. Note
that if your inputs are continuous, you should set this to tf.float32.
stack_size: int, number of frames to use in state stack.
network: tf.Keras.Model, expecting 2 parameters: num_actions,
network_type. A call to this object will return an instantiation of the
network provided. The network returned can be run with different inputs
to create different outputs. See
dopamine.discrete_domains.atari_lib.NatureDQNNetwork as an example.
gamma: float, discount factor with the usual RL meaning.
update_horizon: int, horizon at which updates are performed, the 'n' in
n-step update.
min_replay_history: int, number of transitions that should be experienced
before the agent begins training its value function.
update_period: int, period between DQN updates.
target_update_period: int, update period for the target network.
epsilon_fn: function expecting 4 parameters:
(decay_period, step, warmup_steps, epsilon). This function should return
the epsilon value used for exploration during training.
epsilon_train: float, the value to which the agent's epsilon is eventually
decayed during training.
epsilon_eval: float, epsilon used when evaluating the agent.
epsilon_decay_period: int, length of the epsilon decay schedule.
tf_device: str, Tensorflow device on which the agent's graph is executed.
eval_mode: bool, True for evaluation and False for training.
use_staging: bool, when True use a staging area to prefetch the next
training batch, speeding training up by about 30%.
max_tf_checkpoints_to_keep: int, the number of TensorFlow checkpoints to
keep.
optimizer: `tf.compat.v1.train.Optimizer`, for training the value
function.
summary_writer: SummaryWriter object for outputting training statistics.
Summary writing disabled if set to None.
summary_writing_frequency: int, frequency with which summaries will be
written. Lower values will result in slower training.
allow_partial_reload: bool, whether we allow reloading a partial agent
(for instance, only the network parameters).
K: Number of states to sample for the regularization term.
reg_weight: The weight on the regularization term.
noise_stddev: The standard deviation for the Gaussian noise used to sample states.
gradient_input: 'state' or 'penultimate'
tf.compat.v1.train.RMSPropOptimizer(
learning_rate=0.00025,
decay=0.95,
momentum=0.0,
epsilon=0.00001,
centered=True)
"""
assert isinstance(observation_shape, tuple)
logging.info('Creating %s agent with the following parameters:',
self.__class__.__name__)
logging.info('\t gamma: %f', gamma)
logging.info('\t update_horizon: %f', update_horizon)
logging.info('\t min_replay_history: %d', min_replay_history)
logging.info('\t update_period: %d', update_period)
logging.info('\t target_update_period: %d', target_update_period)
logging.info('\t epsilon_train: %f', epsilon_train)
logging.info('\t epsilon_eval: %f', epsilon_eval)
logging.info('\t epsilon_decay_period: %d', epsilon_decay_period)
logging.info('\t tf_device: %s', tf_device)
logging.info('\t use_staging: %s', use_staging)
logging.info('\t optimizer: %s', optimizer)
logging.info('\t max_tf_checkpoints_to_keep: %d',
max_tf_checkpoints_to_keep)
self.num_actions = num_actions
self.observation_shape = tuple(observation_shape)
self.observation_dtype = observation_dtype
self.stack_size = stack_size
self.network = network
self.gamma = gamma
self.update_horizon = update_horizon
self.cumulative_gamma = math.pow(gamma, update_horizon)
self.min_replay_history = min_replay_history
self.target_update_period = target_update_period
self.epsilon_fn = epsilon_fn
self.epsilon_train = epsilon_train
self.epsilon_eval = epsilon_eval
self.epsilon_decay_period = epsilon_decay_period
self.update_period = update_period
self.eval_mode = eval_mode
self.training_steps = 0
self.optimizer = optimizer
self.summary_writer = summary_writer
self.summary_writing_frequency = summary_writing_frequency
self.allow_partial_reload = allow_partial_reload
self.K = K
self.reg_weight = reg_weight
self.noise_stddev = noise_stddev
self.gradient_input = gradient_input
with tf.device(tf_device):
# Create a placeholder for the state input to the DQN network.
# The last axis indicates the number of consecutive frames stacked.
state_shape = (1,) + self.observation_shape + (stack_size,)
self.state = np.zeros(state_shape)
self.state_ph = tf.compat.v1.placeholder(
self.observation_dtype, state_shape, name='state_ph')
self._replay = self._build_replay_buffer(use_staging)
self._build_networks()
self._train_op = self._build_train_op()
self._sync_qt_ops = self._build_sync_op()
if self.summary_writer is not None:
# All tf.summaries should have been defined prior to running this.
self._merged_summaries = tf.compat.v1.summary.merge_all()
self._sess = sess
var_map = atari_lib.maybe_transform_variable_names(
tf.compat.v1.global_variables())
self._saver = tf.compat.v1.train.Saver(
var_list=var_map, max_to_keep=max_tf_checkpoints_to_keep)
# Variables to be initialized by the agent once it interacts with the
# environment.
self._observation = None
self._last_observation = None
def _create_network(self, name):
"""Builds the convolutional network used to compute the agent's Q-values.
Args:
name: str, this name is passed to the tf.keras.Model and used to create
variable scope under the hood by the tf.keras.Model.
Returns:
network: tf.keras.Model, the network instantiated by the Keras model.
"""
network = self.network(self.num_actions, name=name)
return network
def _build_networks(self):
"""Builds the Q-value network computations needed for acting and training.
These are:
self.online_convnet: For computing the current state's Q-values.
self.target_convnet: For computing the next state's target Q-values.
self._net_outputs: The actual Q-values.
self._q_argmax: The action maximizing the current state's Q-values.
self._replay_net_outputs: The replayed states' Q-values.
self._replay_next_target_net_outputs: The replayed next states' target
Q-values (see Mnih et al., 2015 for details).
"""
# _network_template instantiates the model and returns the network object.
# The network object can be used to generate different outputs in the graph.
# At each call to the network, the parameters will be reused.
self.online_convnet = self._create_network(name='Online')
self.target_convnet = self._create_network(name='Target')
self._net_outputs = self.online_convnet(self.state_ph)
# TODO(bellemare): Ties should be broken. They are unlikely to happen when
# using a deep network, but may affect performance with a linear
# approximation scheme.
self._q_argmax = tf.argmax(self._net_outputs.q_values, axis=1)[0]
self._replay_net_outputs = self.online_convnet(self._replay.states)
self._replay_next_target_net_outputs = self.target_convnet(
self._replay.next_states)
def _build_replay_buffer(self, use_staging):
"""Creates the replay buffer used by the agent.
Args:
use_staging: bool, if True, uses a staging area to prefetch data for
faster training.
Returns:
A WrapperReplayBuffer object.
"""
return circular_replay_buffer.WrappedReplayBuffer(
observation_shape=self.observation_shape,
stack_size=self.stack_size,
use_staging=use_staging,
update_horizon=self.update_horizon,
gamma=self.gamma,
observation_dtype=self.observation_dtype.as_numpy_dtype)
def _build_target_q_op(self):
"""Build an op used as a target for the Q-value.
Returns:
target_q_op: An op calculating the Q-value.
"""
# Get the maximum Q-value across the actions dimension.
replay_next_qt_max = tf.reduce_max(
self._replay_next_target_net_outputs.q_values, 1)
# Calculate the Bellman target value.
# Q_t = R_t + \gamma^N * Q'_t+1
# where,
# Q'_t+1 = \argmax_a Q(S_t+1, a)
# (or) 0 if S_t is a terminal state,
# and
# N is the update horizon (by default, N=1).
return self._replay.rewards + self.cumulative_gamma * replay_next_qt_max * (
1. - tf.cast(self._replay.terminals, tf.float32))
def _build_reg_op(self):
noisy_outputs = self.online_convnet.call_reg(self._replay.states, self.K, self.noise_stddev)
noisy_states = noisy_outputs.noisy_states
noisy_q_values = noisy_outputs.q_values
noisy_penultimate_out = noisy_outputs.penultimate_output
# Select random q-value for each state.
# Shape of batch_indices: self.K x 1.
batch_indices = tf.cast(tf.range(self.K)[:, None], tf.int64)
num_q_values = noisy_q_values.shape[-1]
selected_q_value_indices = tf.random.uniform(shape=(self.K, 1), minval=0, maxval=num_q_values, dtype=tf.int64)
batch_selected_q_value_indices = tf.concat([batch_indices, selected_q_value_indices], axis=1)
selected_q_values = tf.gather_nd(noisy_q_values, batch_selected_q_value_indices)
# Compute the gradient of each network output with respect to each state input.
reg_loss = tf.zeros(shape=(), dtype=tf.dtypes.float32)
for k in range(self.K):
if self.gradient_input == 'state':
gradients = tf.expand_dims(tf.gradients(selected_q_values[k], [noisy_states], stop_gradients=noisy_states)[0][k], axis=0)
elif self.gradient_input == 'penultimate':
gradients = tf.expand_dims(tf.gradients(selected_q_values[k], [noisy_penultimate_out])[0][k], axis=0)
gradients_reshaped = tf.compat.v1.layers.flatten(gradients)
reg_loss += tf.squeeze(tf.reduce_sum(tf.square(gradients_reshaped), axis=1))
reg_loss /= self.K
return reg_loss
def _build_train_op(self):
"""Builds a training op.
Returns:
train_op: An op performing one step of training from replay data.
"""
replay_action_one_hot = tf.one_hot(
self._replay.actions, self.num_actions, 1., 0., name='action_one_hot')
replay_chosen_q = tf.reduce_sum(
self._replay_net_outputs.q_values * replay_action_one_hot,
axis=1,
name='replay_chosen_q')
target = tf.stop_gradient(self._build_target_q_op())
huber_loss = tf.compat.v1.losses.huber_loss(
target, replay_chosen_q, reduction=tf.losses.Reduction.NONE)
if self.reg_weight > 0:
reg_loss = self._build_reg_op()
loss = tf.reduce_mean(huber_loss) + self.reg_weight * reg_loss
else:
loss = huber_loss
if self.summary_writer is not None:
with tf.compat.v1.variable_scope('Losses'):
tf.compat.v1.summary.scalar('HuberLoss', tf.reduce_mean(huber_loss))
if self.reg_weight > 0:
tf.compat.v1.summary.scalar('RegLoss', reg_loss)
tf.compat.v1.summary.scalar('Loss', tf.reduce_mean(loss))
return self.optimizer.minimize(tf.reduce_mean(loss))
def _build_sync_op(self):
"""Builds ops for assigning weights from online to target network.
Returns:
ops: A list of ops assigning weights from online to target network.
"""
# Get trainable variables from online and target DQNs
sync_qt_ops = []
scope = tf.compat.v1.get_default_graph().get_name_scope()
trainables_online = tf.compat.v1.get_collection(
tf.compat.v1.GraphKeys.TRAINABLE_VARIABLES,
scope=os.path.join(scope, 'Online'))
trainables_target = tf.compat.v1.get_collection(
tf.compat.v1.GraphKeys.TRAINABLE_VARIABLES,
scope=os.path.join(scope, 'Target'))
for (w_online, w_target) in zip(trainables_online, trainables_target):
# Assign weights from online to target network.
sync_qt_ops.append(w_target.assign(w_online, use_locking=True))
return sync_qt_ops
def begin_episode(self, observation):
"""Returns the agent's first action for this episode.
Args:
observation: numpy array, the environment's initial observation.
Returns:
int, the selected action.
"""
self._reset_state()
self._record_observation(observation)
if not self.eval_mode:
self._train_step()
self.action = self._select_action()
return self.action
def step(self, reward, observation):
"""Records the most recent transition and returns the agent's next action.
We store the observation of the last time step since we want to store it
with the reward.
Args:
reward: float, the reward received from the agent's most recent action.
observation: numpy array, the most recent observation.
Returns:
int, the selected action.
"""
self._last_observation = self._observation
self._record_observation(observation)
if not self.eval_mode:
self._store_transition(self._last_observation, self.action, reward, False)
self._train_step()
self.action = self._select_action()
return self.action
def end_episode(self, reward):
"""Signals the end of the episode to the agent.
We store the observation of the current time step, which is the last
observation of the episode.
Args:
reward: float, the last reward from the environment.
"""
if not self.eval_mode:
self._store_transition(self._observation, self.action, reward, True)
def _select_action(self):
"""Select an action from the set of available actions.
Chooses an action randomly with probability self._calculate_epsilon(), and
otherwise acts greedily according to the current Q-value estimates.
Returns:
int, the selected action.
"""
if self.eval_mode:
epsilon = self.epsilon_eval
else:
epsilon = self.epsilon_fn(
self.epsilon_decay_period,
self.training_steps,
self.min_replay_history,
self.epsilon_train)
if random.random() <= epsilon:
# Choose a random action with probability epsilon.
return random.randint(0, self.num_actions - 1)
else:
# Choose the action with highest Q-value at the current state.
return self._sess.run(self._q_argmax, {self.state_ph: self.state})
def _train_step(self):
"""Runs a single training step.
Runs a training op if both:
(1) A minimum number of frames have been added to the replay buffer.
(2) `training_steps` is a multiple of `update_period`.
Also, syncs weights from online to target network if training steps is a
multiple of target update period.
"""
# Run a train op at the rate of self.update_period if enough training steps
# have been run. This matches the Nature DQN behaviour.
if self._replay.memory.add_count > self.min_replay_history:
if self.training_steps % self.update_period == 0:
self._sess.run(self._train_op)
if (self.summary_writer is not None and
self.training_steps > 0 and
self.training_steps % self.summary_writing_frequency == 0):
summary = self._sess.run(self._merged_summaries)
self.summary_writer.add_summary(summary, self.training_steps)
if self.training_steps % self.target_update_period == 0:
self._sess.run(self._sync_qt_ops)
self.training_steps += 1
def _record_observation(self, observation):
"""Records an observation and update state.
Extracts a frame from the observation vector and overwrites the oldest
frame in the state buffer.
Args:
observation: numpy array, an observation from the environment.
"""
# Set current observation. We do the reshaping to handle environments
# without frame stacking.
self._observation = np.reshape(observation, self.observation_shape)
# Swap out the oldest frame with the current frame.
self.state = np.roll(self.state, -1, axis=-1)
self.state[0, ..., -1] = self._observation
def _store_transition(self, last_observation, action, reward, is_terminal):
"""Stores an experienced transition.
Executes a tf session and executes replay buffer ops in order to store the
following tuple in the replay buffer:
(last_observation, action, reward, is_terminal).
Pedantically speaking, this does not actually store an entire transition
since the next state is recorded on the following time step.
Args:
last_observation: numpy array, last observation.
action: int, the action taken.
reward: float, the reward.
is_terminal: bool, indicating if the current state is a terminal state.
"""
self._replay.add(last_observation, action, reward, is_terminal)
def _reset_state(self):
"""Resets the agent state by filling it with zeros."""
self.state.fill(0)
def bundle_and_checkpoint(self, checkpoint_dir, iteration_number):
"""Returns a self-contained bundle of the agent's state.
This is used for checkpointing. It will return a dictionary containing all
non-TensorFlow objects (to be saved into a file by the caller), and it saves
all TensorFlow objects into a checkpoint file.
Args:
checkpoint_dir: str, directory where TensorFlow objects will be saved.
iteration_number: int, iteration number to use for naming the checkpoint
file.
Returns:
A dict containing additional Python objects to be checkpointed by the
experiment. If the checkpoint directory does not exist, returns None.
"""
if not tf.io.gfile.exists(checkpoint_dir):
return None
# Call the Tensorflow saver to checkpoint the graph.
self._saver.save(
self._sess,
os.path.join(checkpoint_dir, 'tf_ckpt'),
global_step=iteration_number)
# Checkpoint the out-of-graph replay buffer.
self._replay.save(checkpoint_dir, iteration_number)
bundle_dictionary = {}
bundle_dictionary['state'] = self.state
bundle_dictionary['training_steps'] = self.training_steps
return bundle_dictionary
def unbundle(self, checkpoint_dir, iteration_number, bundle_dictionary):
"""Restores the agent from a checkpoint.
Restores the agent's Python objects to those specified in bundle_dictionary,
and restores the TensorFlow objects to those specified in the
checkpoint_dir. If the checkpoint_dir does not exist, will not reset the
agent's state.
Args:
checkpoint_dir: str, path to the checkpoint saved by tf.Save.
iteration_number: int, checkpoint version, used when restoring the replay
buffer.
bundle_dictionary: dict, containing additional Python objects owned by
the agent.
Returns:
bool, True if unbundling was successful.
"""
try:
# self._replay.load() will throw a NotFoundError if it does not find all
# the necessary files.
self._replay.load(checkpoint_dir, iteration_number)
except tf.errors.NotFoundError:
if not self.allow_partial_reload:
# If we don't allow partial reloads, we will return False.
return False
logging.warning('Unable to reload replay buffer!')
if bundle_dictionary is not None:
for key in self.__dict__:
if key in bundle_dictionary:
self.__dict__[key] = bundle_dictionary[key]
elif not self.allow_partial_reload:
return False
else:
logging.warning("Unable to reload the agent's parameters!")
# Restore the agent's TensorFlow graph.
self._saver.restore(self._sess,
os.path.join(checkpoint_dir,
'tf_ckpt-{}'.format(iteration_number)))
return True
| 40.211314
| 129
| 0.703645
|
4a109e75b30c9ffd59ff2445e9567bc5bf2d6287
| 1,384
|
py
|
Python
|
test/ts4/abstract/common.py
|
tonred/auction
|
a5660d3d901708e28a84d7af79aa2c711d7d759c
|
[
"Apache-2.0"
] | 1
|
2021-05-13T20:25:30.000Z
|
2021-05-13T20:25:30.000Z
|
test/ts4/abstract/common.py
|
tonred/auction
|
a5660d3d901708e28a84d7af79aa2c711d7d759c
|
[
"Apache-2.0"
] | null | null | null |
test/ts4/abstract/common.py
|
tonred/auction
|
a5660d3d901708e28a84d7af79aa2c711d7d759c
|
[
"Apache-2.0"
] | 1
|
2022-01-12T10:35:26.000Z
|
2022-01-12T10:35:26.000Z
|
from tonos_ts4 import ts4
from abstract.base import BaseAuctionTest
from utils.phase import Phase
class CommonAuctionTest(BaseAuctionTest):
START_TIME = 10
OPEN_DURATION = 40
def test_open_time(self):
open_time = self.contract.call_getter('getOpenTime')
open_time_expected = {
'startTime': self.START_TIME,
'finishTime': self.START_TIME + self.OPEN_DURATION,
}
self.assertEqual(open_time_expected, open_time, 'Open time is wrong')
def test_phases(self):
self.assertEqual(Phase.WAIT, self._phase(), 'Phase must be WAIT')
self._setup_phase_time(Phase.OPEN, update=True)
self.assertEqual(Phase.OPEN, self._phase(), 'Phase must be OPEN')
self._setup_phase_time(Phase.CLOSE, update=True)
self.assertEqual(Phase.CLOSE, self._phase(), 'Phase must be CLOSE')
def _setup_phase_time(self, phase: Phase, update: bool = False):
if phase == Phase.WAIT:
ts4.core.set_now(self.START_TIME - 1)
if phase == Phase.OPEN:
ts4.core.set_now(self.START_TIME + self.OPEN_DURATION // 2)
if phase == Phase.CONFIRMATION:
raise Exception('Undefined phase for "CommonAuction"')
if phase == phase.CLOSE:
ts4.core.set_now(self.START_TIME + self.OPEN_DURATION + 1)
if update:
self._update()
| 34.6
| 77
| 0.653902
|
4a109eda8fafdbb2313fd6b82b0bdd80f618301d
| 162
|
py
|
Python
|
src/robusta/runner/not_found_exception.py
|
kandahk/robusta
|
61a2001cb1c4e90e8a74b810463ec99e6cb80787
|
[
"MIT"
] | 273
|
2021-12-28T20:48:48.000Z
|
2022-03-31T16:03:13.000Z
|
src/robusta/runner/not_found_exception.py
|
kandahk/robusta
|
61a2001cb1c4e90e8a74b810463ec99e6cb80787
|
[
"MIT"
] | 103
|
2022-01-10T11:45:47.000Z
|
2022-03-31T16:31:11.000Z
|
src/robusta/runner/not_found_exception.py
|
kandahk/robusta
|
61a2001cb1c4e90e8a74b810463ec99e6cb80787
|
[
"MIT"
] | 35
|
2021-12-30T15:30:14.000Z
|
2022-03-28T11:43:57.000Z
|
from requests import RequestException
class NotFoundException(RequestException):
"""The resource was not found, and the operation could not be completed"""
| 27
| 78
| 0.790123
|
4a109ee9bd01c9b75eb2cd26c53a2496907b7cfe
| 3,274
|
py
|
Python
|
base/gui.py
|
jandecaluwe/ca
|
33a763a8a8d181801d06fc4f12250173de29a8ca
|
[
"MIT"
] | 2
|
2015-03-08T04:39:51.000Z
|
2016-07-25T21:30:44.000Z
|
base/gui.py
|
jandecaluwe/ca
|
33a763a8a8d181801d06fc4f12250173de29a8ca
|
[
"MIT"
] | null | null | null |
base/gui.py
|
jandecaluwe/ca
|
33a763a8a8d181801d06fc4f12250173de29a8ca
|
[
"MIT"
] | null | null | null |
from Tkinter import *
import colorsys
from ca.base.universe import Universe
class CellGui(Label):
def __init__(self, parent, cell, colormap):
Label.__init__(self, parent, relief="raised", width=2, borderwidth=1)
self.cell = cell
self.colormap = colormap
self.bind("<Button-1>", self.make_older)
self.display()
def make_older(self, event):
self.cell.make_older()
self.display()
def display(self):
self.config(bg=self.colormap[self.cell.age])
def tk_colormap(states, colorrange):
colormap = [None] * states
colormap[0] = "black"
colormap[1] = "#%02x%02x%02x" % (255, 1, 1)
if states > 2:
step = colorrange / (states-2)
for i in range(2, states):
h = step * (i-1)
r, g, b = colorsys.hsv_to_rgb(h, 1, 1)
r, g, b = int(r*255), int(g*255), int(b*255)
colormap[i] = "#%02x%02x%02x" % (r, g, b)
return colormap
class GridGUI(object):
def __init__(self, parent, cellType, rows, cols, steps, colorrange=0.7):
self.universe = universe = Universe(cellType, rows, cols)
self.parent = parent
self.rows = rows
self.cols = cols
self.steps = steps
self.universe = universe = Universe(cellType, rows, cols)
colormap = tk_colormap(cellType.states, colorrange)
self.cells = []
for r in range(rows):
rowcells = []
for c in range(cols):
fr_cell = Frame(parent, width=10, height=10)
fr_cell.pack_propagate(0)
fr_cell.grid(row=r, column=c)
cell = CellGui(fr_cell, universe.grid[r][c], colormap)
cell.pack(fill=BOTH, expand=1)
rowcells.append(cell)
self.cells.append(rowcells)
def step(self):
self.universe.step()
self.steps.set(self.steps.get() + 1)
self.display()
def clear(self):
self.universe.clear()
self.steps.set(0)
self.display()
def run(self):
self.running = 1
self.do_run()
def do_run(self):
self.step()
if self.running:
self.parent.after(50, self.do_run)
def stop(self):
self.running = 0
def display(self):
for r in range(self.rows):
for c in range(self.cols):
self.cells[r][c].display()
def play(cellType, rows=48, cols=64):
root = Tk()
frame = Frame(root)
frame.pack()
steps = IntVar()
grid = GridGUI(frame, cellType, rows, cols, steps)
bottomFrame = Frame(root)
bottomFrame.pack(side=BOTTOM)
buttonStep = Button(bottomFrame, text="Step", command=grid.step)
buttonStep.pack(side=LEFT)
buttonClear = Button(bottomFrame, text="Clear", command=grid.clear)
buttonClear.pack(side=LEFT, after=buttonStep)
buttonRun = Button(bottomFrame, text="Run", command=grid.run)
buttonRun.pack(side=LEFT, after=buttonClear)
buttonStop = Button(bottomFrame, text="Stop", command=grid.stop)
buttonStop.pack(side=LEFT, after=buttonRun)
labelSteps = Label(bottomFrame, textvariable=steps)
labelSteps.pack(side=LEFT, after=buttonStop)
root.mainloop()
| 30.314815
| 77
| 0.587049
|
4a109f54c53b7dd059210fa97e248ec7fb176f85
| 1,035
|
py
|
Python
|
build/_downloads/dad14b12bc3f8f308412fda21618b628/two_layer_net_numpy.py
|
ScorpioDoctor/antares02
|
631b817d2e98f351d1173b620d15c4a5efed11da
|
[
"BSD-3-Clause"
] | null | null | null |
build/_downloads/dad14b12bc3f8f308412fda21618b628/two_layer_net_numpy.py
|
ScorpioDoctor/antares02
|
631b817d2e98f351d1173b620d15c4a5efed11da
|
[
"BSD-3-Clause"
] | null | null | null |
build/_downloads/dad14b12bc3f8f308412fda21618b628/two_layer_net_numpy.py
|
ScorpioDoctor/antares02
|
631b817d2e98f351d1173b620d15c4a5efed11da
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
热身: numpy
--------------
一个完全连接的ReLU网络,只有一个隐藏层,没有偏置,最小化欧氏误差训练从x预测y。
此实现使用numpy手动计算向前传递、损失和反向传递。
numpy数组是一个通用的n维数组;它不知道任何关于深度学习、梯度或计算图的知识,它只是执行通用数字计算的一种方法。
"""
import numpy as np
# N is batch size; D_in is input dimension;
# H is hidden dimension; D_out is output dimension.
N, D_in, H, D_out = 64, 1000, 100, 10
# 产生随机输入和输出数据
x = np.random.randn(N, D_in)
y = np.random.randn(N, D_out)
# 随机初始化权重
w1 = np.random.randn(D_in, H)
w2 = np.random.randn(H, D_out)
learning_rate = 1e-6
for t in range(500):
# 前向传递: 计算 predicted y
h = x.dot(w1)
h_relu = np.maximum(h, 0)
y_pred = h_relu.dot(w2)
# 计算和输出损失
loss = np.square(y_pred - y).sum()
print(t, loss)
# 反向传播(Backprop) 去计算 w1 和 w2 相对于loss的梯度
grad_y_pred = 2.0 * (y_pred - y)
grad_w2 = h_relu.T.dot(grad_y_pred)
grad_h_relu = grad_y_pred.dot(w2.T)
grad_h = grad_h_relu.copy()
grad_h[h < 0] = 0
grad_w1 = x.T.dot(grad_h)
# 更新权重
w1 -= learning_rate * grad_w1
w2 -= learning_rate * grad_w2
| 21.5625
| 58
| 0.646377
|
4a109f7b5508f50e52f108016556109a39e36468
| 1,339
|
py
|
Python
|
oui_lookup.py
|
bahbyega/arp-sniffer
|
fc1ce391d953aadf40e7417f98a1c4f908797782
|
[
"MIT"
] | null | null | null |
oui_lookup.py
|
bahbyega/arp-sniffer
|
fc1ce391d953aadf40e7417f98a1c4f908797782
|
[
"MIT"
] | null | null | null |
oui_lookup.py
|
bahbyega/arp-sniffer
|
fc1ce391d953aadf40e7417f98a1c4f908797782
|
[
"MIT"
] | null | null | null |
import os
import re
import time
from getpass import getuser
from typing import ByteString
from urllib.request import urlopen
USER_HOME = "~" + getuser()
OUI_FILE_STORE = os.path.join(os.path.expanduser(USER_HOME), ".oui-cache")
CACHE_TIME = 2592000 # 30 days update
IEEE_URL = "http://standards-oui.ieee.org/oui/oui.txt"
def strip_mac(mac: ByteString) -> str:
"""
Clean MAC address byte string and convert it to str
"""
return "-".join(mac.decode("ascii").split("-")[:3])
def update_cached_oui():
"""
Download oui file and update its local version
"""
print("Updating oui_file...")
with open(OUI_FILE_STORE, "wb") as oui_file:
for line in urlopen(IEEE_URL).readlines():
oui_file.write(line)
print("Finished.")
def get_mac_vendor(mac: ByteString) -> str:
"""
Return MAC address manufacturer
"""
mac_to_search_for = strip_mac(mac)
try:
if time.time() - os.stat(OUI_FILE_STORE).st_ctime > CACHE_TIME:
update_cached_oui()
except OSError as err:
if err.errno == 2:
update_cached_oui()
with open(OUI_FILE_STORE, "r", encoding="utf-8") as oui_file:
for line in iter(oui_file):
if re.search(mac_to_search_for, line, re.IGNORECASE):
return line.split("\t")[2].rstrip()
| 24.796296
| 74
| 0.643017
|
4a109f8f12dbf45618f8e68495fb94901d112c0d
| 11,071
|
py
|
Python
|
Gaussian_Mixture_Model.py
|
BrunoDatoMeneses/Pattern-Recognition-vowel-work
|
9eed7db4fb8818880339341d9599fa3e1df61ec5
|
[
"CC0-1.0"
] | null | null | null |
Gaussian_Mixture_Model.py
|
BrunoDatoMeneses/Pattern-Recognition-vowel-work
|
9eed7db4fb8818880339341d9599fa3e1df61ec5
|
[
"CC0-1.0"
] | null | null | null |
Gaussian_Mixture_Model.py
|
BrunoDatoMeneses/Pattern-Recognition-vowel-work
|
9eed7db4fb8818880339341d9599fa3e1df61ec5
|
[
"CC0-1.0"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
@author: Bruno Dato
"""
import math
import matplotlib as mpl
import matplotlib.pyplot as plt
import numpy as np
import itertools
from sklearn.metrics import confusion_matrix
from mpl_toolkits.mplot3d import Axes3D
from sklearn.decomposition import PCA
from sklearn.preprocessing import scale
from sklearn.discriminant_analysis import LinearDiscriminantAnalysis
from sklearn.mixture import GaussianMixture
from sklearn.model_selection import StratifiedKFold
from scipy.io.wavfile import read
print(__doc__)
colors = ['blue', 'green', 'orange', 'yellow', 'red', 'purple', 'cyan', 'grey', 'black']
def make_ellipses(gmm, ax):
for n, color in enumerate(colors):
if gmm.covariance_type == 'full':
covariances = gmm.covariances_[n][:2, :2]
elif gmm.covariance_type == 'tied':
covariances = gmm.covariances_[:2, :2]
elif gmm.covariance_type == 'diag':
covariances = np.diag(gmm.covariances_[n][:2])
elif gmm.covariance_type == 'spherical':
covariances = np.eye(gmm.means_.shape[1]) * gmm.covariances_[n]
v, w = np.linalg.eigh(covariances)
u = w[0] / np.linalg.norm(w[0])
angle = np.arctan2(u[1], u[0])
angle = 180 * angle / np.pi # convert to degrees
v = 2. * np.sqrt(2.) * np.sqrt(v)
ell = mpl.patches.Ellipse(gmm.means_[n, :2], v[0], v[1],
180 + angle, color=color)
ell.set_clip_box(ax.bbox)
ell.set_alpha(0.5)
ax.add_artist(ell)
def plot_confusion_matrix(cm, classes,
normalize=False,
title='Confusion matrix',
cmap=plt.cm.Blues):
"""
This function prints and plots the confusion matrix.
Normalization can be applied by setting `normalize=True`.
"""
plt.imshow(cm, interpolation='nearest', cmap=cmap)
plt.title(title)
plt.colorbar()
tick_marks = np.arange(len(classes))
plt.xticks(tick_marks, classes, rotation=45)
plt.yticks(tick_marks, classes)
if normalize:
cm = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis]
print("Normalized confusion matrix")
else:
print('Confusion matrix, without normalization')
print(cm)
thresh = cm.max() / 2.
for i, j in itertools.product(range(cm.shape[0]), range(cm.shape[1])):
plt.text(j, i, cm[i, j],
horizontalalignment="center",
color="white" if cm[i, j] > thresh else "black")
plt.tight_layout()
plt.ylabel('Vraies classes')
plt.xlabel('Predictions')
aa = np.zeros([100,1024])
ee = np.zeros([100,1024])
eh = np.zeros([100,1024])
ii = np.zeros([100,1024])
oe = np.zeros([100,1024])
oh = np.zeros([100,1024])
oo = np.zeros([100,1024])
uu = np.zeros([100,1024])
yy = np.zeros([100,1024])
# Read de wav files #
for i in range(0,100,1):
if i<10:
aa[i] = read('data/aa0'+str(i)+'.wav','r')[1]
ee[i] = read('data/ee0'+str(i)+'.wav','r')[1]
eh[i] = read('data/eh0'+str(i)+'.wav','r')[1]
ii[i] = read('data/ii0'+str(i)+'.wav','r')[1]
oe[i] = read('data/oe0'+str(i)+'.wav','r')[1]
oh[i] = read('data/oh0'+str(i)+'.wav','r')[1]
oo[i] = read('data/oo0'+str(i)+'.wav','r')[1]
uu[i] = read('data/uu0'+str(i)+'.wav','r')[1]
yy[i] = read('data/yy0'+str(i)+'.wav','r')[1]
else:
aa[i] = read('data/aa'+str(i)+'.wav','r')[1]
ee[i] = read('data/ee'+str(i)+'.wav','r')[1]
eh[i] = read('data/eh'+str(i)+'.wav','r')[1]
ii[i] = read('data/ii'+str(i)+'.wav','r')[1]
oe[i] = read('data/oe'+str(i)+'.wav','r')[1]
oh[i] = read('data/oh'+str(i)+'.wav','r')[1]
oo[i] = read('data/oo'+str(i)+'.wav','r')[1]
uu[i] = read('data/uu'+str(i)+'.wav','r')[1]
yy[i] = read('data/yy'+str(i)+'.wav','r')[1]
data = np.concatenate((aa,ee,eh,ii,oe,eh,oo,uu,yy))
# FFT and real ceptrum of sounds #
fft_dim = 32
voyelles_FFT=np.zeros([900,1024])
voyelles_FFT_reduit=np.zeros([900,fft_dim])
log_FFT=np.zeros([900,1024])
voyelles_CEPSTR=np.zeros([900,1024])
voyelles_CEPSTR_reduit=np.zeros([900,31])
for j in range(0,900,1):
voyelles_FFT[j] = abs(np.fft.fft(np.hamming(1024)*data[j],1024))
voyelles_FFT_reduit[j] = abs(np.fft.fft(np.hamming(1024)*data[j],fft_dim))
for j in range(0,900,1):
for k in range(0,1024,1):
log_FFT[j,k] = math.log(voyelles_FFT[j,k])
for j in range(0,900,1):
voyelles_CEPSTR[j] = abs(np.fft.ifft(log_FFT[j],1024))
voyelles_CEPSTR_reduit[j] = voyelles_CEPSTR[j,1:32]
# Target #
voyelles_target_names=np.zeros([9], dtype='a2')
voyelles_target_names[0]="aa"
voyelles_target_names[1]="ee"
voyelles_target_names[2]="eh"
voyelles_target_names[3]="ii"
voyelles_target_names[4]="oe"
voyelles_target_names[5]="oh"
voyelles_target_names[6]="oo"
voyelles_target_names[7]="uu"
voyelles_target_names[8]="yy"
voyelles_target=np.zeros([900], dtype='i')
for m in range(0,900,1):
if m>=0 and m<100:
voyelles_target[m] = 0
if m>=100 and m<200:
voyelles_target[m] = 1
if m>=200 and m<300:
voyelles_target[m] = 2
if m>=300 and m<400:
voyelles_target[m] = 3
if m>=400 and m<500:
voyelles_target[m] = 4
if m>=500 and m<600:
voyelles_target[m] = 5
if m>=600 and m<700:
voyelles_target[m] = 6
if m>=700 and m<800:
voyelles_target[m] = 7
if m>=800 and m<900:
voyelles_target[m] = 8
# Preprocessing #
#voyelles_data_scaled = scale(voyelles_FFT_reduit);
voyelles_data_scaled = scale(voyelles_CEPSTR_reduit);
# PCA
voyelles_pca = PCA(n_components=len(np.unique(voyelles_target))).fit_transform(voyelles_data_scaled)
# LDA
voyelles_lda = LinearDiscriminantAnalysis(n_components=len(np.unique(voyelles_target)))
voyelles_lda_data = voyelles_lda.fit(voyelles_data_scaled, voyelles_target).transform(voyelles_data_scaled)
# DATA USED #
voyelles_data = voyelles_lda_data
# 2D displays
fig0 = plt.figure(0,figsize=(8,6))
for color, i, target_name_voyelles in zip(colors, [0, 1, 2, 3, 4, 5, 6, 7, 8], voyelles_target_names):
plt.scatter(voyelles_FFT_reduit[voyelles_target == i, 1], voyelles_FFT_reduit[voyelles_target == i, 2], color=color, alpha=.8, lw=2,
label=target_name_voyelles)
plt.legend(loc='best', shadow=False, scatterpoints=1)
plt.xlabel("1er axe FFT")
plt.ylabel("2eme axe FFT")
plt.title('Affichage des donnees voyelles (FFT)')
plt.show()
fig1 = plt.figure(1,figsize=(8,6))
for color, i, target_name_voyelles in zip(colors, [0, 1, 2, 3, 4, 5, 6, 7, 8], voyelles_target_names):
plt.scatter(voyelles_data_scaled[voyelles_target == i, 1], voyelles_data_scaled[voyelles_target == i, 2], color=color, alpha=.8, lw=2,
label=target_name_voyelles)
plt.legend(loc='best', shadow=False, scatterpoints=1)
plt.xlabel("2eme axe Cepstre")
plt.ylabel("3eme axe Cepstre")
plt.title('Affichage des donnees voyelles (Cepstre)')
plt.show()
fig2 = plt.figure(2,figsize=(8,6))
for color, i, target_name_voyelles in zip(colors, [0, 1, 2, 3, 4, 5, 6, 7, 8], voyelles_target_names):
plt.scatter(voyelles_data[voyelles_target == i, 0], voyelles_data[voyelles_target == i, 1], color=color, alpha=.8, lw=2,
label=target_name_voyelles)
plt.legend(loc='best', shadow=False, scatterpoints=1)
plt.xlabel("1er axe LDA")
plt.ylabel("2eme axe LDA")
plt.title('Affichage des donnees voyelles (Cepstre + LDA)')
plt.show()
# 3D displays
fig3 = plt.figure()
ax3 = Axes3D(fig3, elev=-150, azim=110)
ax3.scatter(voyelles_FFT_reduit[:,0],voyelles_FFT_reduit[:,1],voyelles_FFT_reduit[:,2], c=voyelles_target)
ax3.set_title("Affichage des donnees voyelles (FFT)")
ax3.set_xlabel("1er axe FFT")
ax3.w_xaxis.set_ticklabels([])
ax3.set_ylabel("2eme axe FFT")
ax3.w_yaxis.set_ticklabels([])
ax3.set_zlabel("3eme axe FFT")
ax3.w_zaxis.set_ticklabels([])
plt.show()
fig4 = plt.figure()
ax4 = Axes3D(fig4, elev=-150, azim=110)
ax4.scatter(voyelles_data[:,0],voyelles_data[:,1],voyelles_data[:,2], c=voyelles_target)
ax4.set_title("Affichage des donnees voyelles (Cepstre + LDA)")
ax4.set_xlabel("1er axe LDA")
ax4.w_xaxis.set_ticklabels([])
ax4.set_ylabel("2eme axe LDA")
ax4.w_yaxis.set_ticklabels([])
ax4.set_zlabel("3eme axe LDA")
ax4.w_zaxis.set_ticklabels([])
plt.show()
# Break up the dataset into non-overlapping training (75%) and testing
# (25%) sets.
skf = StratifiedKFold(n_splits=4)
# Only take the first fold.
train_index, test_index = next(iter(skf.split(voyelles_data, voyelles_target)))
X_train = voyelles_data[train_index]
y_train = voyelles_target[train_index]
X_test = voyelles_data[test_index]
y_test = voyelles_target[test_index]
n_classes = len(np.unique(y_train))
# Try GMMs using different types of covariances.
estimators = dict((cov_type, GaussianMixture(n_components=n_classes,
covariance_type=cov_type, max_iter=20, random_state=0))
for cov_type in ['spherical', 'diag', 'tied', 'full'])
n_estimators = len(estimators)
plt.figure(figsize=(3 * n_estimators // 2, 6))
plt.subplots_adjust(bottom=.01, top=0.95, hspace=.15, wspace=.05,
left=.01, right=.99)
for index, (name, estimator) in enumerate(estimators.items()):
# Since we have class labels for the training data, we can
# initialize the GMM parameters in a supervised manner.
estimator.means_init = np.array([X_train[y_train == i].mean(axis=0)
for i in range(n_classes)])
# Train the other parameters using the EM algorithm.
estimator.fit(X_train)
h = plt.subplot(2, n_estimators // 2, index + 1)
make_ellipses(estimator, h)
for n, color in enumerate(colors):
data = voyelles_data[voyelles_target == n]
plt.scatter(data[:, 0], data[:, 1], s=0.8, color=color,
label=voyelles_target_names[n])
# Plot the test data with crosses
for n, color in enumerate(colors):
data = X_test[y_test == n]
plt.scatter(data[:, 0], data[:, 1], marker='x', color=color)
y_train_pred = estimator.predict(X_train)
train_accuracy = np.mean(y_train_pred.ravel() == y_train.ravel()) * 100
plt.text(0.05, 0.9, 'Score apprentissage: %.1f ' % train_accuracy,
transform=h.transAxes)
y_test_pred = estimator.predict(X_test)
test_accuracy = np.mean(y_test_pred.ravel() == y_test.ravel()) * 100
plt.text(0.05, 0.8, 'Score test: %.1f ' % test_accuracy,
transform=h.transAxes)
plt.xticks(())
plt.yticks(())
plt.title(name)
plt.legend(scatterpoints=1, loc='lower right', prop=dict(size=12))
plt.show()
# Compute confusion matrix
cnf_matrix = confusion_matrix(y_test, y_test_pred)
np.set_printoptions(precision=2)
# Plot normalized confusion matrix
plt.figure()
plot_confusion_matrix(cnf_matrix, classes=voyelles_target_names, normalize=True,
title='Matrice de confusion normalisee')
plt.show()
| 31.99711
| 138
| 0.646464
|
4a109f904e28bf0d5ad1e17e9fad7e818daef3f1
| 1,321
|
py
|
Python
|
autobahn/wamp/gen/wamp/proto/Message.py
|
rapyuta-robotics/autobahn-python
|
c08e9e352d526a7fd0885bb94706366a432ada1a
|
[
"MIT"
] | 1,670
|
2015-10-12T15:46:22.000Z
|
2022-03-30T22:12:53.000Z
|
autobahn/wamp/gen/wamp/proto/Message.py
|
rapyuta-robotics/autobahn-python
|
c08e9e352d526a7fd0885bb94706366a432ada1a
|
[
"MIT"
] | 852
|
2015-10-16T22:11:03.000Z
|
2022-03-27T07:57:01.000Z
|
autobahn/wamp/gen/wamp/proto/Message.py
|
rapyuta-robotics/autobahn-python
|
c08e9e352d526a7fd0885bb94706366a432ada1a
|
[
"MIT"
] | 790
|
2015-10-15T08:46:12.000Z
|
2022-03-30T12:22:13.000Z
|
# automatically generated by the FlatBuffers compiler, do not modify
# namespace: proto
import flatbuffers
class Message(object):
__slots__ = ['_tab']
@classmethod
def GetRootAsMessage(cls, buf, offset):
n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset)
x = Message()
x.Init(buf, n + offset)
return x
# Message
def Init(self, buf, pos):
self._tab = flatbuffers.table.Table(buf, pos)
# Message
def MsgType(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4))
if o != 0:
return self._tab.Get(flatbuffers.number_types.Uint8Flags, o + self._tab.Pos)
return 0
# Message
def Msg(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6))
if o != 0:
from flatbuffers.table import Table
obj = Table(bytearray(), 0)
self._tab.Union(obj, o)
return obj
return None
def MessageStart(builder): builder.StartObject(2)
def MessageAddMsgType(builder, msgType): builder.PrependUint8Slot(0, msgType, 0)
def MessageAddMsg(builder, msg): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(msg), 0)
def MessageEnd(builder): return builder.EndObject()
| 31.452381
| 127
| 0.665405
|
4a10a05b5038a996c27a4a48af5ec8dd0ec21f02
| 20
|
py
|
Python
|
app/aandete/model/__init__.py
|
stefanv/aandete
|
bb2833914e47389a735f11c683203d1243e44cb6
|
[
"BSD-3-Clause"
] | null | null | null |
app/aandete/model/__init__.py
|
stefanv/aandete
|
bb2833914e47389a735f11c683203d1243e44cb6
|
[
"BSD-3-Clause"
] | null | null | null |
app/aandete/model/__init__.py
|
stefanv/aandete
|
bb2833914e47389a735f11c683203d1243e44cb6
|
[
"BSD-3-Clause"
] | null | null | null |
from model import *
| 10
| 19
| 0.75
|
4a10a077d5add06d8cdf7a9651fc6742f87d810b
| 4,722
|
py
|
Python
|
tests/test_output_json.py
|
woodruffw-forks/cyclonedx-python-lib
|
395a0ec14ebcba8e0849a0ced30ec4163c42fa7a
|
[
"Apache-2.0"
] | null | null | null |
tests/test_output_json.py
|
woodruffw-forks/cyclonedx-python-lib
|
395a0ec14ebcba8e0849a0ced30ec4163c42fa7a
|
[
"Apache-2.0"
] | null | null | null |
tests/test_output_json.py
|
woodruffw-forks/cyclonedx-python-lib
|
395a0ec14ebcba8e0849a0ced30ec4163c42fa7a
|
[
"Apache-2.0"
] | null | null | null |
# encoding: utf-8
# This file is part of CycloneDX Python Lib
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# SPDX-License-Identifier: Apache-2.0
# Copyright (c) OWASP Foundation. All Rights Reserved.
from os.path import dirname, join
from cyclonedx.model import ExternalReference, ExternalReferenceType, HashType
from cyclonedx.model.bom import Bom
from cyclonedx.model.component import Component
from cyclonedx.output import get_instance, OutputFormat, SchemaVersion
from cyclonedx.output.json import Json, JsonV1Dot3, JsonV1Dot2
from tests.base import BaseJsonTestCase
class TestOutputJson(BaseJsonTestCase):
def test_simple_bom_v1_3(self) -> None:
bom = Bom()
bom.add_component(Component(name='setuptools', version='50.3.2', qualifiers='extension=tar.gz'))
outputter = get_instance(bom=bom, output_format=OutputFormat.JSON)
self.assertIsInstance(outputter, JsonV1Dot3)
with open(join(dirname(__file__), 'fixtures/bom_v1.3_setuptools.json')) as expected_json:
self.assertEqualJsonBom(outputter.output_as_string(), expected_json.read())
expected_json.close()
def test_simple_bom_v1_2(self) -> None:
bom = Bom()
bom.add_component(Component(name='setuptools', version='50.3.2', qualifiers='extension=tar.gz'))
outputter = get_instance(bom=bom, output_format=OutputFormat.JSON, schema_version=SchemaVersion.V1_2)
self.assertIsInstance(outputter, JsonV1Dot2)
with open(join(dirname(__file__), 'fixtures/bom_v1.2_setuptools.json')) as expected_json:
self.assertEqualJsonBom(outputter.output_as_string(), expected_json.read())
expected_json.close()
def test_bom_v1_3_with_component_hashes(self) -> None:
bom = Bom()
c = Component(name='toml', version='0.10.2', qualifiers='extension=tar.gz')
c.add_hash(
HashType.from_composite_str('sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b')
)
bom.add_component(c)
outputter: Json = get_instance(bom=bom, output_format=OutputFormat.JSON)
self.assertIsInstance(outputter, JsonV1Dot3)
with open(join(dirname(__file__), 'fixtures/bom_v1.3_toml_with_component_hashes.json')) as expected_json:
self.assertEqualJsonBom(a=outputter.output_as_string(), b=expected_json.read())
expected_json.close()
def test_bom_v1_3_with_component_external_references(self) -> None:
bom = Bom()
c = Component(name='toml', version='0.10.2', qualifiers='extension=tar.gz')
c.add_hash(
HashType.from_composite_str('sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b')
)
c.add_external_reference(
ExternalReference(
reference_type=ExternalReferenceType.DISTRIBUTION,
url='https://cyclonedx.org',
comment='No comment',
hashes=[
HashType.from_composite_str(
'sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b')
]
)
)
bom.add_component(c)
outputter: Json = get_instance(bom=bom, output_format=OutputFormat.JSON)
self.assertIsInstance(outputter, JsonV1Dot3)
with open(join(dirname(__file__),
'fixtures/bom_v1.3_toml_with_component_external_references.json')) as expected_json:
self.assertEqualJsonBom(a=outputter.output_as_string(), b=expected_json.read())
expected_json.close()
def test_bom_v1_3_with_component_license(self) -> None:
bom = Bom()
c = Component(name='toml', version='0.10.2', qualifiers='extension=tar.gz')
c.set_license('MIT License')
bom.add_component(c)
outputter: Json = get_instance(bom=bom, output_format=OutputFormat.JSON)
self.assertIsInstance(outputter, JsonV1Dot3)
with open(join(dirname(__file__),
'fixtures/bom_v1.3_toml_with_component_license.json')) as expected_json:
self.assertEqualJsonBom(a=outputter.output_as_string(), b=expected_json.read())
expected_json.close()
| 47.69697
| 114
| 0.69928
|
4a10a1dd1421c69672b170a0bb0565c9b9e20a56
| 503
|
py
|
Python
|
rss-reader/rss_reader/models/base.py
|
mrR2D2/rss-tg-bot
|
0400595e6f2ea7dce975cadbd50e3ac05400b9ec
|
[
"MIT"
] | null | null | null |
rss-reader/rss_reader/models/base.py
|
mrR2D2/rss-tg-bot
|
0400595e6f2ea7dce975cadbd50e3ac05400b9ec
|
[
"MIT"
] | null | null | null |
rss-reader/rss_reader/models/base.py
|
mrR2D2/rss-tg-bot
|
0400595e6f2ea7dce975cadbd50e3ac05400b9ec
|
[
"MIT"
] | null | null | null |
"""
Module with the Base model.
"""
import sqlalchemy as sa
from sqlalchemy.ext import declarative
from rss_reader import utils
@declarative.as_declarative()
class Base:
"""
Base model.
"""
id = sa.Column(sa.Integer, primary_key=True)
@declarative.declared_attr
def __tablename__(cls) -> str:
tablename = f"{utils.underscore_from_camelcase(cls.__name__)}s"
if tablename.endswith("ys"):
tablename = f"{tablename[:-2]}ies"
return tablename
| 20.958333
| 71
| 0.666004
|
4a10a21fca5c84d9c2050765ba35d7aba75c6bef
| 3,190
|
py
|
Python
|
tests/algorithms/multiway/test_quickmatch.py
|
fxdupe/graphmatchingtools
|
4503a04c4a0822315535e6ab3cd698417859908d
|
[
"MIT"
] | null | null | null |
tests/algorithms/multiway/test_quickmatch.py
|
fxdupe/graphmatchingtools
|
4503a04c4a0822315535e6ab3cd698417859908d
|
[
"MIT"
] | null | null | null |
tests/algorithms/multiway/test_quickmatch.py
|
fxdupe/graphmatchingtools
|
4503a04c4a0822315535e6ab3cd698417859908d
|
[
"MIT"
] | 1
|
2022-02-25T09:11:19.000Z
|
2022-02-25T09:11:19.000Z
|
from unittest import TestCase
import numpy as np
import networkx as nx
import graph_matching_tools.algorithms.multiway.quickmatch as qm
import graph_matching_tools.algorithms.kernels.gaussian as kern
import graph_matching_tools.algorithms.kernels.utils as utils
class TestQuickMatch(TestCase):
def test_compute_density(self):
graph1 = nx.Graph()
graph1.add_node(0, weight=1.0)
graph1.add_node(1, weight=20.0)
graph1.add_edge(0, 1, weight=1.0)
graph2 = nx.Graph()
graph2.add_node(0, weight=20.0)
graph2.add_node(1, weight=1.0)
graph2.add_edge(0, 1, weight=1.0)
graph3 = nx.Graph()
graph3.add_node(0, weight=20.0)
graph3.add_node(1, weight=30.0)
graph3.add_node(2, weight=1.0)
graph3.add_edge(1, 2, weight=1.0)
graphs = [graph1, graph2, graph3]
densities = qm.compute_density(graphs, [2, 2, 3], "weight", 0.7)
truth = np.array([11.2075169, 12.49309109, 12.49309109, 11.2075169, 12.49309109, 8.6125429, 11.2075169])
self.assertEqual(np.linalg.norm(densities - truth) < 1e-1, True)
def test_compute_parents(self):
graph1 = nx.Graph()
graph1.add_node(0, weight=1.0)
graph1.add_node(1, weight=20.0)
graph1.add_edge(0, 1, weight=1.0)
graph2 = nx.Graph()
graph2.add_node(0, weight=20.0)
graph2.add_node(1, weight=1.0)
graph2.add_edge(0, 1, weight=1.0)
graph3 = nx.Graph()
graph3.add_node(0, weight=20.0)
graph3.add_node(1, weight=30.0)
graph3.add_node(2, weight=1.0)
graph3.add_edge(1, 2, weight=1.0)
graphs = [graph1, graph2, graph3]
densities = qm.compute_density(graphs, [2, 2, 3], "weight", 0.7)
p, d = qm.compute_parents(graphs, [2, 2, 3], "weight", densities)
p_truth = np.array([6, 4, 4, 6, 2, 2, 3])
d_truth = np.array([0., 0., 0., 0., 0., 10., 0.])
self.assertEqual(np.linalg.norm(p - p_truth) < 1e-4, True)
self.assertEqual(np.linalg.norm(d - d_truth) < 1e-4, True)
def test_quickmatch(self):
graph1 = nx.Graph()
graph1.add_node(0, weight=1.0)
graph1.add_node(1, weight=20.0)
graph1.add_edge(0, 1, weight=1.0)
graph2 = nx.Graph()
graph2.add_node(0, weight=20.0)
graph2.add_node(1, weight=1.0)
graph2.add_edge(0, 1, weight=1.0)
graph3 = nx.Graph()
graph3.add_node(0, weight=20.0)
graph3.add_node(1, weight=30.0)
graph3.add_node(2, weight=1.0)
graph3.add_edge(1, 2, weight=1.0)
graphs = [graph1, graph2, graph3]
u = qm.quickmatch(graphs, "weight", 0.25, 0.9)
res = u @ u.T
truth = np.array([[1., 0., 0., 1., 0., 0., 1.],
[0., 1., 1., 0., 1., 0., 0.],
[0., 1., 1., 0., 1., 0., 0.],
[1., 0., 0., 1., 0., 0., 1.],
[0., 1., 1., 0., 1., 0., 0.],
[0., 0., 0., 0., 0., 1., 0.],
[1., 0., 0., 1., 0., 0., 1.]])
self.assertEqual(np.linalg.norm(res - truth) < 1e-3, True)
| 34.301075
| 112
| 0.549216
|
4a10a31013cbc172b28bf15298e7caa33b9c2727
| 28,746
|
py
|
Python
|
aries_cloudagent/ledger/indy.py
|
DibbsZA/aries-cloudagent-python
|
a094dd7697023721ac2a2fd4e58b04d4b37d1f44
|
[
"Apache-2.0"
] | null | null | null |
aries_cloudagent/ledger/indy.py
|
DibbsZA/aries-cloudagent-python
|
a094dd7697023721ac2a2fd4e58b04d4b37d1f44
|
[
"Apache-2.0"
] | null | null | null |
aries_cloudagent/ledger/indy.py
|
DibbsZA/aries-cloudagent-python
|
a094dd7697023721ac2a2fd4e58b04d4b37d1f44
|
[
"Apache-2.0"
] | null | null | null |
"""Indy ledger implementation."""
import asyncio
import json
import logging
import re
import tempfile
from hashlib import sha256
from os import path
from datetime import datetime, date
from typing import Sequence, Type
import indy.anoncreds
import indy.ledger
import indy.pool
from indy.error import IndyError, ErrorCode
from ..cache.base import BaseCache
from ..storage.base import StorageRecord
from ..storage.indy import IndyStorage
from ..wallet.base import BaseWallet
from .base import BaseLedger
from .error import (
BadLedgerRequestError,
ClosedPoolError,
LedgerConfigError,
LedgerError,
LedgerTransactionError,
)
GENESIS_TRANSACTION_PATH = tempfile.gettempdir()
GENESIS_TRANSACTION_PATH = path.join(
GENESIS_TRANSACTION_PATH, "indy_genesis_transactions.txt"
)
class IndyErrorHandler:
"""Trap IndyError and raise an appropriate LedgerError instead."""
def __init__(self, message: str = None, error_cls: Type[LedgerError] = LedgerError):
"""Init the context manager."""
self.error_cls = error_cls
self.message = message
def __enter__(self):
"""Enter the context manager."""
return self
def __exit__(self, err_type, err_value, err_traceback):
"""Exit the context manager."""
if err_type is IndyError:
raise self.wrap_error(
err_value, self.message, self.error_cls
) from err_value
@classmethod
def wrap_error(
cls,
err_value: IndyError,
message: str = None,
error_cls: Type[LedgerError] = LedgerError,
) -> LedgerError:
"""Create an instance of LedgerError from an IndyError."""
err_msg = message or "Exception while performing ledger operation"
indy_message = hasattr(err_value, "message") and err_value.message
if indy_message:
err_msg += f": {indy_message}"
# TODO: may wish to attach backtrace when available
return error_cls(err_msg)
class IndyLedger(BaseLedger):
"""Indy ledger class."""
LEDGER_TYPE = "indy"
TAA_ACCEPTED_RECORD_TYPE = "taa_accepted"
def __init__(
self,
pool_name: str,
wallet: BaseWallet,
*,
keepalive: int = 0,
cache: BaseCache = None,
cache_duration: int = 600,
):
"""
Initialize an IndyLedger instance.
Args:
pool_name: The Indy pool ledger configuration name
wallet: IndyWallet instance
keepalive: How many seconds to keep the ledger open
cache: The cache instance to use
cache_duration: The TTL for ledger cache entries
"""
self.logger = logging.getLogger(__name__)
self.opened = False
self.ref_count = 0
self.ref_lock = asyncio.Lock()
self.keepalive = keepalive
self.close_task: asyncio.Future = None
self.cache = cache
self.cache_duration = cache_duration
self.wallet = wallet
self.pool_handle = None
self.pool_name = pool_name
self.taa_acceptance = None
self.taa_cache = None
if wallet.WALLET_TYPE != "indy":
raise LedgerConfigError("Wallet type is not 'indy'")
async def create_pool_config(
self, genesis_transactions: str, recreate: bool = False
):
"""Create the pool ledger configuration."""
# indy-sdk requires a file but it's only used once to bootstrap
# the connection so we take a string instead of create a tmp file
txn_path = GENESIS_TRANSACTION_PATH
with open(txn_path, "w") as genesis_file:
genesis_file.write(genesis_transactions)
pool_config = json.dumps({"genesis_txn": txn_path})
if await self.check_pool_config():
if recreate:
self.logger.debug("Removing existing ledger config")
await indy.pool.delete_pool_ledger_config(self.pool_name)
else:
raise LedgerConfigError(
"Ledger pool configuration already exists: %s", self.pool_name
)
self.logger.debug("Creating pool ledger config")
with IndyErrorHandler(
"Exception when creating pool ledger config", LedgerConfigError
):
await indy.pool.create_pool_ledger_config(self.pool_name, pool_config)
async def check_pool_config(self) -> bool:
"""Check if a pool config has been created."""
pool_names = {cfg["pool"] for cfg in await indy.pool.list_pools()}
return self.pool_name in pool_names
async def open(self):
"""Open the pool ledger, creating it if necessary."""
# We only support proto ver 2
with IndyErrorHandler(
"Exception when setting ledger protocol version", LedgerConfigError
):
await indy.pool.set_protocol_version(2)
with IndyErrorHandler("Exception when opening pool ledger", LedgerConfigError):
self.pool_handle = await indy.pool.open_pool_ledger(self.pool_name, "{}")
self.opened = True
async def close(self):
"""Close the pool ledger."""
if self.opened:
with IndyErrorHandler("Exception when closing pool ledger"):
await indy.pool.close_pool_ledger(self.pool_handle)
self.pool_handle = None
self.opened = False
async def _context_open(self):
"""Open the wallet if necessary and increase the number of active references."""
async with self.ref_lock:
if self.close_task:
self.close_task.cancel()
if not self.opened:
self.logger.debug("Opening the pool ledger")
await self.open()
self.ref_count += 1
async def _context_close(self):
"""Release the wallet reference and schedule closing of the pool ledger."""
async def closer(timeout: int):
"""Close the pool ledger after a timeout."""
await asyncio.sleep(timeout)
async with self.ref_lock:
if not self.ref_count:
self.logger.debug("Closing pool ledger after timeout")
await self.close()
async with self.ref_lock:
self.ref_count -= 1
if not self.ref_count:
if self.keepalive:
self.close_task = asyncio.ensure_future(closer(self.keepalive))
else:
await self.close()
async def __aenter__(self) -> "IndyLedger":
"""
Context manager entry.
Returns:
The current instance
"""
await self._context_open()
return self
async def __aexit__(self, exc_type, exc, tb):
"""Context manager exit."""
await self._context_close()
async def _submit(
self,
request_json: str,
sign: bool = None,
taa_accept: bool = False,
public_did: str = "",
) -> str:
"""
Sign and submit request to ledger.
Args:
request_json: The json string to submit
sign: whether or not to sign the request
taa_accept: whether to apply TAA acceptance to the (signed, write) request
public_did: override the public DID used to sign the request
"""
if not self.pool_handle:
raise ClosedPoolError(
"Cannot sign and submit request to closed pool {}".format(
self.pool_name
)
)
if (sign is None and public_did == "") or (sign and not public_did):
did_info = await self.wallet.get_public_did()
if did_info:
public_did = did_info.did
if public_did and sign is None:
sign = True
if sign:
if not public_did:
raise BadLedgerRequestError("Cannot sign request without a public DID")
if taa_accept:
acceptance = await self.get_latest_txn_author_acceptance()
if acceptance:
request_json = await (
indy.ledger.append_txn_author_agreement_acceptance_to_request(
request_json,
acceptance["text"],
acceptance["version"],
acceptance["digest"],
acceptance["mechanism"],
acceptance["time"],
)
)
submit_op = indy.ledger.sign_and_submit_request(
self.pool_handle, self.wallet.handle, public_did, request_json
)
else:
submit_op = indy.ledger.submit_request(self.pool_handle, request_json)
with IndyErrorHandler(
"Exception raised by ledger transaction", LedgerTransactionError
):
request_result_json = await submit_op
request_result = json.loads(request_result_json)
operation = request_result.get("op", "")
if operation in ("REQNACK", "REJECT"):
raise LedgerTransactionError(
f"Ledger rejected transaction request: {request_result['reason']}"
)
elif operation == "REPLY":
return request_result_json
else:
raise LedgerTransactionError(
f"Unexpected operation code from ledger: {operation}"
)
async def send_schema(
self, schema_name: str, schema_version: str, attribute_names: Sequence[str]
):
"""
Send schema to ledger.
Args:
schema_name: The schema name
schema_version: The schema version
attribute_names: A list of schema attributes
"""
public_info = await self.wallet.get_public_did()
if not public_info:
raise BadLedgerRequestError("Cannot publish schema without a public DID")
schema_id = await self.check_existing_schema(
public_info.did, schema_name, schema_version, attribute_names
)
if schema_id:
self.logger.warning("Schema already exists on ledger. Returning ID.")
else:
with IndyErrorHandler("Exception when creating schema definition"):
schema_id, schema_json = await indy.anoncreds.issuer_create_schema(
public_info.did,
schema_name,
schema_version,
json.dumps(attribute_names),
)
with IndyErrorHandler("Exception when building schema request"):
request_json = await indy.ledger.build_schema_request(
public_info.did, schema_json
)
try:
await self._submit(request_json, public_did=public_info.did)
except LedgerTransactionError as e:
# Identify possible duplicate schema errors on indy-node < 1.9 and > 1.9
if (
"can have one and only one SCHEMA with name" in e.message
or "UnauthorizedClientRequest" in e.message
):
# handle potential race condition if multiple agents are publishing
# the same schema simultaneously
schema_id = await self.check_existing_schema(
public_info.did, schema_name, schema_version, attribute_names
)
if schema_id:
self.logger.warning(
"Schema already exists on ledger. Returning ID. Error: %s",
e,
)
else:
raise
return schema_id
async def check_existing_schema(
self,
public_did: str,
schema_name: str,
schema_version: str,
attribute_names: Sequence[str],
) -> str:
"""Check if a schema has already been published."""
fetch_schema_id = f"{public_did}:2:{schema_name}:{schema_version}"
schema = await self.fetch_schema_by_id(fetch_schema_id)
if schema:
fetched_attrs = schema["attrNames"].copy()
fetched_attrs.sort()
cmp_attrs = list(attribute_names)
cmp_attrs.sort()
if fetched_attrs != cmp_attrs:
raise LedgerTransactionError(
"Schema already exists on ledger, but attributes do not match: "
+ f"{schema_name}:{schema_version} {fetched_attrs} != {cmp_attrs}"
)
return fetch_schema_id
async def get_schema(self, schema_id: str):
"""
Get a schema from the cache if available, otherwise fetch from the ledger.
Args:
schema_id: The schema id (or stringified sequence number) to retrieve
"""
if self.cache:
result = await self.cache.get(f"schema::{schema_id}")
if result:
return result
if schema_id.isdigit():
return await self.fetch_schema_by_seq_no(int(schema_id))
else:
return await self.fetch_schema_by_id(schema_id)
async def fetch_schema_by_id(self, schema_id: str):
"""
Get schema from ledger.
Args:
schema_id: The schema id (or stringified sequence number) to retrieve
Returns:
Indy schema dict
"""
public_info = await self.wallet.get_public_did()
public_did = public_info.did if public_info else None
with IndyErrorHandler("Exception when building schema request"):
request_json = await indy.ledger.build_get_schema_request(
public_did, schema_id
)
response_json = await self._submit(request_json, public_did=public_did)
response = json.loads(response_json)
if not response["result"]["seqNo"]:
# schema not found
return None
with IndyErrorHandler("Exception when parsing schema response"):
_, parsed_schema_json = await indy.ledger.parse_get_schema_response(
response_json
)
parsed_response = json.loads(parsed_schema_json)
if parsed_response and self.cache:
await self.cache.set(
[f"schema::{schema_id}", f"schema::{response['result']['seqNo']}"],
parsed_response,
self.cache_duration
)
return parsed_response
async def fetch_schema_by_seq_no(self, seq_no: int):
"""
Fetch a schema by its sequence number.
Args:
seq_no: schema ledger sequence number
Returns:
Indy schema dict
"""
# get txn by sequence number, retrieve schema identifier components
request_json = await indy.ledger.build_get_txn_request(
None, None, seq_no=seq_no
)
response = json.loads(await self._submit(request_json))
# transaction data format assumes node protocol >= 1.4 (circa 2018-07)
data_txn = (response["result"].get("data", {}) or {}).get("txn", {})
if data_txn.get("type", None) == "101": # marks indy-sdk schema txn type
(origin_did, name, version) = (
data_txn["metadata"]["from"],
data_txn["data"]["data"]["name"],
data_txn["data"]["data"]["version"],
)
schema_id = f"{origin_did}:2:{name}:{version}"
return await self.get_schema(schema_id)
raise LedgerTransactionError(
f"Could not get schema from ledger for seq no {seq_no}"
)
async def send_credential_definition(self, schema_id: str, tag: str = None):
"""
Send credential definition to ledger and store relevant key matter in wallet.
Args:
schema_id: The schema id of the schema to create cred def for
tag: Option tag to distinguish multiple credential definitions
"""
public_info = await self.wallet.get_public_did()
if not public_info:
raise BadLedgerRequestError(
"Cannot publish credential definition without a public DID"
)
schema = await self.get_schema(schema_id)
# TODO: add support for tag, sig type, and config
try:
(
credential_definition_id,
credential_definition_json,
) = await indy.anoncreds.issuer_create_and_store_credential_def(
self.wallet.handle,
public_info.did,
json.dumps(schema),
tag or "default",
"CL",
json.dumps({"support_revocation": False}),
)
# If the cred def already exists in the wallet, we need some way of obtaining
# that cred def id (from schema id passed) since we can now assume we can use
# it in future operations.
except IndyError as error:
if error.error_code == ErrorCode.AnoncredsCredDefAlreadyExistsError:
try:
credential_definition_id = re.search(
r"\w*:3:CL:(([1-9][0-9]*)|(.{21,22}:2:.+:[0-9.]+)):\w*",
error.message,
).group(0)
# The regex search failed so let the error bubble up
except AttributeError:
raise LedgerError(
"Previous credential definition exists, but ID could "
"not be extracted"
)
else:
raise IndyErrorHandler.wrap_error(error) from error
# check if the cred def already exists on the ledger
cred_def = json.loads(credential_definition_json)
exist_def = await self.fetch_credential_definition(credential_definition_id)
if exist_def:
if exist_def["value"] != cred_def["value"]:
self.logger.warning(
"Ledger definition of cred def %s will be replaced",
credential_definition_id,
)
exist_def = None
if not exist_def:
with IndyErrorHandler("Exception when building cred def request"):
request_json = await indy.ledger.build_cred_def_request(
public_info.did, credential_definition_json
)
await self._submit(request_json, True, public_did=public_info.did)
else:
self.logger.warning(
"Ledger definition of cred def %s already exists",
credential_definition_id,
)
return credential_definition_id
async def get_credential_definition(self, credential_definition_id: str):
"""
Get a credential definition from the cache if available, otherwise the ledger.
Args:
credential_definition_id: The schema id of the schema to fetch cred def for
"""
if self.cache:
result = await self.cache.get(
f"credential_definition::{credential_definition_id}"
)
if result:
return result
return await self.fetch_credential_definition(credential_definition_id)
async def fetch_credential_definition(self, credential_definition_id: str):
"""
Get a credential definition from the ledger by id.
Args:
credential_definition_id: The cred def id of the cred def to fetch
"""
public_info = await self.wallet.get_public_did()
public_did = public_info.did if public_info else None
with IndyErrorHandler("Exception when building cred def request"):
request_json = await indy.ledger.build_get_cred_def_request(
public_did, credential_definition_id
)
response_json = await self._submit(request_json, public_did=public_did)
with IndyErrorHandler("Exception when parsing cred def response"):
try:
(
_,
parsed_credential_definition_json,
) = await indy.ledger.parse_get_cred_def_response(response_json)
parsed_response = json.loads(parsed_credential_definition_json)
except IndyError as error:
if error.error_code == ErrorCode.LedgerNotFound:
parsed_response = None
if parsed_response and self.cache:
await self.cache.set(
f"credential_definition::{credential_definition_id}",
parsed_response,
self.cache_duration,
)
return parsed_response
async def credential_definition_id2schema_id(self, credential_definition_id):
"""
From a credential definition, get the identifier for its schema.
Args:
credential_definition_id: The identifier of the credential definition
from which to identify a schema
"""
# scrape schema id or sequence number from cred def id
tokens = credential_definition_id.split(":")
if len(tokens) == 8: # node protocol >= 1.4: cred def id has 5 or 8 tokens
return ":".join(tokens[3:7]) # schema id spans 0-based positions 3-6
# get txn by sequence number, retrieve schema identifier components
seq_no = tokens[3]
return (await self.get_schema(seq_no))["id"]
async def get_key_for_did(self, did: str) -> str:
"""Fetch the verkey for a ledger DID.
Args:
did: The DID to look up on the ledger or in the cache
"""
nym = self.did_to_nym(did)
public_info = await self.wallet.get_public_did()
public_did = public_info.did if public_info else None
with IndyErrorHandler("Exception when building nym request"):
request_json = await indy.ledger.build_get_nym_request(
public_did, nym
)
response_json = await self._submit(request_json, public_did=public_did)
data_json = (json.loads(response_json))["result"]["data"]
return json.loads(data_json)["verkey"]
async def get_endpoint_for_did(self, did: str) -> str:
"""Fetch the endpoint for a ledger DID.
Args:
did: The DID to look up on the ledger or in the cache
"""
nym = self.did_to_nym(did)
public_info = await self.wallet.get_public_did()
public_did = public_info.did if public_info else None
with IndyErrorHandler("Exception when building attribute request"):
request_json = await indy.ledger.build_get_attrib_request(
public_did, nym, "endpoint", None, None
)
response_json = await self._submit(request_json, public_did=public_did)
endpoint_json = json.loads(response_json)["result"]["data"]
if endpoint_json:
address = json.loads(endpoint_json)["endpoint"].get("endpoint", None)
else:
address = None
return address
async def update_endpoint_for_did(self, did: str, endpoint: str) -> bool:
"""Check and update the endpoint on the ledger.
Args:
did: The ledger DID
endpoint: The endpoint address
transport_vk: The endpoint transport verkey
"""
exist_endpoint = await self.get_endpoint_for_did(did)
if exist_endpoint != endpoint:
nym = self.did_to_nym(did)
attr_json = json.dumps({"endpoint": {"endpoint": endpoint}})
with IndyErrorHandler("Exception when building attribute request"):
request_json = await indy.ledger.build_attrib_request(
nym, nym, None, attr_json, None
)
await self._submit(request_json, True, True)
return True
return False
async def register_nym(
self, did: str, verkey: str, alias: str = None, role: str = None
):
"""
Register a nym on the ledger.
Args:
did: DID to register on the ledger.
verkey: The verification key of the keypair.
alias: Human-friendly alias to assign to the DID.
role: For permissioned ledgers, what role should the new DID have.
"""
public_info = await self.wallet.get_public_did()
public_did = public_info.did if public_info else None
r = await indy.ledger.build_nym_request(
public_did, did, verkey, alias, role
)
await self._submit(r, True, True, public_did=public_did)
def nym_to_did(self, nym: str) -> str:
"""Format a nym with the ledger's DID prefix."""
if nym:
# remove any existing prefix
nym = self.did_to_nym(nym)
return f"did:sov:{nym}"
async def get_txn_author_agreement(self, reload: bool = False):
"""Get the current transaction author agreement, fetching it if necessary."""
if not self.taa_cache or reload:
self.taa_cache = await self.fetch_txn_author_agreement()
return self.taa_cache
async def fetch_txn_author_agreement(self):
"""Fetch the current AML and TAA from the ledger."""
public_info = await self.wallet.get_public_did()
public_did = public_info.did if public_info else None
get_aml_req = await indy.ledger.build_get_acceptance_mechanisms_request(
public_did, None, None
)
response_json = await self._submit(get_aml_req, public_did=public_did)
aml_found = (json.loads(response_json))["result"]["data"]
get_taa_req = await indy.ledger.build_get_txn_author_agreement_request(
public_did, None
)
response_json = await self._submit(get_taa_req, public_did=public_did)
taa_found = (json.loads(response_json))["result"]["data"]
taa_required = taa_found and taa_found["text"]
if taa_found:
taa_plaintext = taa_found["version"] + taa_found["text"]
taa_found["digest"] = sha256(taa_plaintext.encode("utf-8")).digest().hex()
return {
"aml_record": aml_found,
"taa_record": taa_found,
"taa_required": taa_required,
}
def get_indy_storage(self) -> IndyStorage:
"""Get an IndyStorage instance for the current wallet."""
return IndyStorage(self.wallet)
def taa_rough_timestamp(self) -> int:
"""Get a timestamp accurate to the day.
Anything more accurate is a privacy concern.
"""
return int(datetime.combine(date.today(), datetime.min.time()).timestamp())
async def accept_txn_author_agreement(
self,
taa_record: dict,
mechanism: str,
accept_time: int = None,
store: bool = False,
):
"""Save a new record recording the acceptance of the TAA."""
if not accept_time:
accept_time = self.taa_rough_timestamp()
acceptance = {
"text": taa_record["text"],
"version": taa_record["version"],
"digest": taa_record["digest"],
"mechanism": mechanism,
"time": accept_time,
}
record = StorageRecord(
self.TAA_ACCEPTED_RECORD_TYPE,
json.dumps(acceptance),
{"pool_name": self.pool_name},
)
storage = self.get_indy_storage()
await storage.add_record(record)
cache_key = self.TAA_ACCEPTED_RECORD_TYPE + "::" + self.pool_name
await self.cache.set(cache_key, acceptance, self.cache_duration)
async def get_latest_txn_author_acceptance(self):
"""Look up the latest TAA acceptance."""
cache_key = self.TAA_ACCEPTED_RECORD_TYPE + "::" + self.pool_name
acceptance = await self.cache.get(cache_key)
if acceptance is None:
storage = self.get_indy_storage()
tag_filter = {"pool_name": self.pool_name}
found = await storage.search_records(
self.TAA_ACCEPTED_RECORD_TYPE, tag_filter
).fetch_all()
if found:
records = list(json.loads(record.value) for record in found)
records.sort(key=lambda v: v["time"], reverse=True)
acceptance = records[0]
else:
acceptance = {}
await self.cache.set(cache_key, acceptance, self.cache_duration)
return acceptance
| 36.619108
| 88
| 0.59403
|
4a10a34f705caa3081620f43c76c888ecf0d855e
| 1,547
|
py
|
Python
|
pipeline_plugins/components/http.py
|
SHUN-YI/bk-sops
|
a4a841bdc44a18518c6c53c04a02996ddc7da2be
|
[
"Apache-2.0"
] | 2
|
2019-08-15T10:06:26.000Z
|
2019-09-17T11:49:20.000Z
|
pipeline_plugins/components/http.py
|
SHUN-YI/bk-sops
|
a4a841bdc44a18518c6c53c04a02996ddc7da2be
|
[
"Apache-2.0"
] | null | null | null |
pipeline_plugins/components/http.py
|
SHUN-YI/bk-sops
|
a4a841bdc44a18518c6c53c04a02996ddc7da2be
|
[
"Apache-2.0"
] | 1
|
2020-07-03T06:45:07.000Z
|
2020-07-03T06:45:07.000Z
|
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community Edition) available.
Copyright (C) 2017-2019 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
""" # noqa
import requests
import ujson as json
import traceback
def post(url, params, logger):
return http_do('post', url, params, logger)
def get(url, params, logger):
return http_do('get', url, params, logger)
def http_do(method, url, params, logger):
try:
resp = getattr(requests, method)(url, data=json.dumps(params))
except Exception as e:
logger.error(traceback.format_exc())
return False, {'result': False, 'message': e.message}
if not resp.ok:
return False, {'result': False, 'message': 'request error, status code: {}'.format(resp.status_code)}
try:
json_data = resp.json()
except Exception as e:
logger.error(traceback.format_exc())
return False, {'result': False, 'message': e.message}
return True, json_data
| 38.675
| 305
| 0.714932
|
4a10a4a2023f62e318ebaa311cc8c86bf4676ac0
| 1,894
|
py
|
Python
|
proyectoFinal/preprocessing.py
|
dcabezas98/AA
|
c849f0453347eda34a114a217077934a0c2f41fc
|
[
"MIT"
] | null | null | null |
proyectoFinal/preprocessing.py
|
dcabezas98/AA
|
c849f0453347eda34a114a217077934a0c2f41fc
|
[
"MIT"
] | null | null | null |
proyectoFinal/preprocessing.py
|
dcabezas98/AA
|
c849f0453347eda34a114a217077934a0c2f41fc
|
[
"MIT"
] | null | null | null |
# Aprendizaje Automático: Proyecto Final
# Clasificación de símbolos Devanagari
# Patricia Córdoba Hidalgo
# David Cabezas Berrido
# preprocessing.py
# Preprocesado de imágenes: centrado y eliminación de dimensionalidad
# promediando por bloques
import numpy as np
from skimage.filters import threshold_otsu
from skimage.morphology import closing, square
from skimage.measure import label, regionprops, block_reduce
from skimage.transform import resize
from joblib import Parallel, delayed
# Tuneable parameters
WIDTH=28 # WIDTH to resize
BLOCK_REDUCE=True # Wether or not to perform block reduce
# Center character by crop and resize image
def centerAndResize(img):
# Ignote low intensity pixels to obtain components
thresh = threshold_otsu(img)
bw = closing(img > min(thresh*2,0.95), square(3))
label_image = label(bw) # Separate into connected regions
# Compute box that contains all components
mminr=28; mminc=28; mmaxr=0; mmaxc=0
for region in regionprops(label_image):
minr, minc, maxr, maxc = region.bbox
mminr=min(minr,mminr)
mminc=min(minc,mminc)
mmaxr=max(maxr,mmaxr)
mmaxc=max(maxc,mmaxc)
# Resize to unified size
return resize(img[mminr:mmaxr,mminc:mmaxc],(WIDTH,WIDTH), anti_aliasing=True)
# Preprocessing for single image
def preprocess(img):
img = np.reshape(img,(28,28))
img = centerAndResize(img)
if BLOCK_REDUCE:
img = block_reduce(img,(2,2),np.mean)
img = np.reshape(img,img.shape[0]*img.shape[1])
return img
# Aplay all preprocessing to data
def preprocessing(data):
out = Parallel(n_jobs=4)(map(delayed(preprocess),data))
return np.array(out,np.float32)
# Add some polynomial features to data
def polynomial(data,deg):
out=data.copy()
for n in range(2,deg+1):
out=np.concatenate((out,data**n),axis=1)
return out
| 29.59375
| 81
| 0.717529
|
4a10a4a2b4e6679e023e2ce8440aede494281d6c
| 32,618
|
py
|
Python
|
src/azure-cli-core/azure/cli/core/util.py
|
digimaun/azure-cli
|
298994660f0fde6863cb45a7c3142141ed10f923
|
[
"MIT"
] | null | null | null |
src/azure-cli-core/azure/cli/core/util.py
|
digimaun/azure-cli
|
298994660f0fde6863cb45a7c3142141ed10f923
|
[
"MIT"
] | null | null | null |
src/azure-cli-core/azure/cli/core/util.py
|
digimaun/azure-cli
|
298994660f0fde6863cb45a7c3142141ed10f923
|
[
"MIT"
] | null | null | null |
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
from __future__ import print_function
import sys
import json
import getpass
import base64
import binascii
import platform
import ssl
import six
import re
import logging
from six.moves.urllib.request import urlopen # pylint: disable=import-error
from azure.common import AzureException
from azure.core.exceptions import AzureError
from knack.log import get_logger
from knack.util import CLIError, to_snake_case
from inspect import getfullargspec as get_arg_spec
logger = get_logger(__name__)
CLI_PACKAGE_NAME = 'azure-cli'
COMPONENT_PREFIX = 'azure-cli-'
SSLERROR_TEMPLATE = ('Certificate verification failed. This typically happens when using Azure CLI behind a proxy '
'that intercepts traffic with a self-signed certificate. '
# pylint: disable=line-too-long
'Please add this certificate to the trusted CA bundle: https://github.com/Azure/azure-cli/blob/dev/doc/use_cli_effectively.md#working-behind-a-proxy. '
'Error detail: {}')
_PROXYID_RE = re.compile(
'(?i)/subscriptions/(?P<subscription>[^/]*)(/resourceGroups/(?P<resource_group>[^/]*))?'
'(/providers/(?P<namespace>[^/]*)/(?P<type>[^/]*)/(?P<name>[^/]*)(?P<children>.*))?')
_CHILDREN_RE = re.compile('(?i)/(?P<child_type>[^/]*)/(?P<child_name>[^/]*)')
def handle_exception(ex): # pylint: disable=too-many-return-statements
# For error code, follow guidelines at https://docs.python.org/2/library/sys.html#sys.exit,
from jmespath.exceptions import JMESPathTypeError
from msrestazure.azure_exceptions import CloudError
from msrest.exceptions import HttpOperationError, ValidationError, ClientRequestError
from azure.cli.core.azlogging import CommandLoggerContext
with CommandLoggerContext(logger):
if isinstance(ex, JMESPathTypeError):
logger.error("\nInvalid jmespath query supplied for `--query`:\n%s", ex)
logger.error("To learn more about --query, please visit: "
"https://docs.microsoft.com/cli/azure/query-azure-cli?view=azure-cli-latest")
return 1
if isinstance(ex, (CLIError, CloudError, AzureException, AzureError)):
logger.error(ex.args[0])
try:
for detail in ex.args[0].error.details:
logger.error(detail)
except (AttributeError, TypeError):
pass
except: # pylint: disable=bare-except
pass
return ex.args[1] if len(ex.args) >= 2 else 1
if isinstance(ex, ValidationError):
logger.error('validation error: %s', ex)
return 1
if isinstance(ex, ClientRequestError):
msg = str(ex)
if 'SSLError' in msg:
logger.error("request failed: %s", SSLERROR_TEMPLATE.format(msg))
else:
logger.error("request failed: %s", ex)
return 1
if isinstance(ex, KeyboardInterrupt):
return 1
if isinstance(ex, HttpOperationError):
try:
response_dict = json.loads(ex.response.text)
error = response_dict['error']
# ARM should use ODATA v4. So should try this first.
# http://docs.oasis-open.org/odata/odata-json-format/v4.0/os/odata-json-format-v4.0-os.html#_Toc372793091
if isinstance(error, dict):
code = "{} - ".format(error.get('code', 'Unknown Code'))
message = error.get('message', ex)
logger.error("%s%s", code, message)
else:
logger.error(error)
except (ValueError, KeyError):
logger.error(ex)
return 1
logger.error("The command failed with an unexpected error. Here is the traceback:\n")
logger.exception(ex)
logger.warning("\nTo open an issue, please run: 'az feedback'")
return 1
# pylint: disable=inconsistent-return-statements
def empty_on_404(ex):
from msrestazure.azure_exceptions import CloudError
if isinstance(ex, CloudError) and ex.status_code == 404:
return None
raise ex
def truncate_text(str_to_shorten, width=70, placeholder=' [...]'):
if width <= 0:
raise ValueError('width must be greater than 0.')
s_len = width - len(placeholder)
return str_to_shorten[:s_len] + (str_to_shorten[s_len:] and placeholder)
def get_installed_cli_distributions():
from pkg_resources import working_set
return [d for d in list(working_set) if d.key == CLI_PACKAGE_NAME or d.key.startswith(COMPONENT_PREFIX)]
def _update_latest_from_pypi(versions):
from subprocess import check_output, STDOUT, CalledProcessError
success = False
if not check_connectivity(max_retries=0):
return versions, success
try:
cmd = [sys.executable] + \
'-m pip search azure-cli -vv --disable-pip-version-check --no-cache-dir --retries 0'.split()
logger.debug('Running: %s', cmd)
log_output = check_output(cmd, stderr=STDOUT, universal_newlines=True)
success = True
for line in log_output.splitlines():
if not line.startswith(CLI_PACKAGE_NAME):
continue
comps = line.split()
mod = comps[0].replace(COMPONENT_PREFIX, '') or CLI_PACKAGE_NAME
version = comps[1].replace('(', '').replace(')', '')
try:
versions[mod]['pypi'] = version
except KeyError:
pass
except CalledProcessError:
pass
return versions, success
def get_az_version_string():
from azure.cli.core.extension import get_extensions, EXTENSIONS_DIR, DEV_EXTENSION_SOURCES
output = six.StringIO()
versions = {}
# get locally installed versions
for dist in get_installed_cli_distributions():
if dist.key == CLI_PACKAGE_NAME:
versions[CLI_PACKAGE_NAME] = {'local': dist.version}
elif dist.key.startswith(COMPONENT_PREFIX):
comp_name = dist.key.replace(COMPONENT_PREFIX, '')
versions[comp_name] = {'local': dist.version}
# get the versions from pypi
versions, success = _update_latest_from_pypi(versions)
updates_available = 0
def _print(val=''):
print(val, file=output)
def _get_version_string(name, version_dict):
from distutils.version import LooseVersion # pylint: disable=import-error,no-name-in-module
local = version_dict['local']
pypi = version_dict.get('pypi', None)
if pypi and LooseVersion(pypi) > LooseVersion(local):
return name.ljust(25) + local.rjust(15) + ' *'
return name.ljust(25) + local.rjust(15)
ver_string = _get_version_string(CLI_PACKAGE_NAME, versions.pop(CLI_PACKAGE_NAME))
if '*' in ver_string:
updates_available += 1
_print(ver_string)
_print()
for name in sorted(versions.keys()):
ver_string = _get_version_string(name, versions.pop(name))
if '*' in ver_string:
updates_available += 1
_print(ver_string)
_print()
extensions = get_extensions()
if extensions:
_print('Extensions:')
for ext in extensions:
if ext.ext_type == 'dev':
_print(ext.name.ljust(20) + ext.version.rjust(20) + ' (dev) ' + ext.path)
else:
_print(ext.name.ljust(20) + (ext.version or 'Unknown').rjust(20))
_print()
_print("Python location '{}'".format(sys.executable))
_print("Extensions directory '{}'".format(EXTENSIONS_DIR))
if DEV_EXTENSION_SOURCES:
_print("Development extension sources:")
for source in DEV_EXTENSION_SOURCES:
_print(' {}'.format(source))
_print()
_print('Python ({}) {}'.format(platform.system(), sys.version))
_print()
_print('Legal docs and information: aka.ms/AzureCliLegal')
_print()
version_string = output.getvalue()
# if unable to query PyPI, use sentinel value to flag that
# we couldn't check for updates
if not success:
updates_available = -1
return version_string, updates_available
def get_az_version_json():
from azure.cli.core.extension import get_extensions
versions = {'extensions': {}}
for dist in get_installed_cli_distributions():
versions[dist.key] = dist.version
extensions = get_extensions()
if extensions:
for ext in extensions:
versions['extensions'][ext.name] = ext.version or 'Unknown'
return versions
def get_json_object(json_string):
""" Loads a JSON string as an object and converts all keys to snake case """
def _convert_to_snake_case(item):
if isinstance(item, dict):
new_item = {}
for key, val in item.items():
new_item[to_snake_case(key)] = _convert_to_snake_case(val)
return new_item
if isinstance(item, list):
return [_convert_to_snake_case(x) for x in item]
return item
return _convert_to_snake_case(shell_safe_json_parse(json_string))
def get_file_json(file_path, throw_on_empty=True, preserve_order=False):
content = read_file_content(file_path)
if not content and not throw_on_empty:
return None
try:
return shell_safe_json_parse(content, preserve_order)
except CLIError as ex:
raise CLIError("Failed to parse {} with exception:\n {}".format(file_path, ex))
def read_file_content(file_path, allow_binary=False):
from codecs import open as codecs_open
# Note, always put 'utf-8-sig' first, so that BOM in WinOS won't cause trouble.
for encoding in ['utf-8-sig', 'utf-8', 'utf-16', 'utf-16le', 'utf-16be']:
try:
with codecs_open(file_path, encoding=encoding) as f:
logger.debug("attempting to read file %s as %s", file_path, encoding)
return f.read()
except (UnicodeError, UnicodeDecodeError):
pass
if allow_binary:
try:
with open(file_path, 'rb') as input_file:
logger.debug("attempting to read file %s as binary", file_path)
return base64.b64encode(input_file.read()).decode("utf-8")
except Exception: # pylint: disable=broad-except
pass
raise CLIError('Failed to decode file {} - unknown decoding'.format(file_path))
def shell_safe_json_parse(json_or_dict_string, preserve_order=False):
""" Allows the passing of JSON or Python dictionary strings. This is needed because certain
JSON strings in CMD shell are not received in main's argv. This allows the user to specify
the alternative notation, which does not have this problem (but is technically not JSON). """
try:
if not preserve_order:
return json.loads(json_or_dict_string)
from collections import OrderedDict
return json.loads(json_or_dict_string, object_pairs_hook=OrderedDict)
except ValueError as json_ex:
try:
import ast
return ast.literal_eval(json_or_dict_string)
except SyntaxError:
raise CLIError(json_ex)
except ValueError as ex:
logger.debug(ex) # log the exception which could be a python dict parsing error.
raise CLIError(json_ex) # raise json_ex error which is more readable and likely.
def b64encode(s):
"""
Encodes a string to base64 on 2.x and 3.x
:param str s: latin_1 encoded string
:return: base64 encoded string
:rtype: str
"""
encoded = base64.b64encode(six.b(s))
return encoded if encoded is str else encoded.decode('latin-1')
def b64_to_hex(s):
"""
Decodes a string to base64 on 2.x and 3.x
:param str s: base64 encoded string
:return: uppercase hex string
:rtype: str
"""
decoded = base64.b64decode(s)
hex_data = binascii.hexlify(decoded).upper()
if isinstance(hex_data, bytes):
return str(hex_data.decode("utf-8"))
return hex_data
def random_string(length=16, force_lower=False, digits_only=False):
from string import ascii_letters, digits, ascii_lowercase
from random import choice
choice_set = digits
if not digits_only:
choice_set += ascii_lowercase if force_lower else ascii_letters
return ''.join([choice(choice_set) for _ in range(length)])
def hash_string(value, length=16, force_lower=False):
""" Generate a deterministic hashed string."""
import hashlib
m = hashlib.sha256()
try:
m.update(value)
except TypeError:
m.update(value.encode())
digest = m.hexdigest()
digest = digest.lower() if force_lower else digest
while len(digest) < length:
digest = digest + digest
return digest[:length]
def in_cloud_console():
import os
return os.environ.get('ACC_CLOUD', None)
def get_arg_list(op):
import inspect
try:
# only supported in python3 - falling back to argspec if not available
sig = inspect.signature(op)
return sig.parameters
except AttributeError:
sig = inspect.getargspec(op) # pylint: disable=deprecated-method
return sig.args
def is_track2(client_class):
""" IS this client a autorestv3/track2 one?.
Could be refined later if necessary.
"""
args = get_arg_spec(client_class.__init__).args
return "credential" in args
DISABLE_VERIFY_VARIABLE_NAME = "AZURE_CLI_DISABLE_CONNECTION_VERIFICATION"
def should_disable_connection_verify():
import os
return bool(os.environ.get(DISABLE_VERIFY_VARIABLE_NAME))
def poller_classes():
from msrestazure.azure_operation import AzureOperationPoller
from msrest.polling.poller import LROPoller
from azure.core.polling import LROPoller as AzureCoreLROPoller
return (AzureOperationPoller, LROPoller, AzureCoreLROPoller)
def augment_no_wait_handler_args(no_wait_enabled, handler, handler_args):
""" Populates handler_args with the appropriate args for no wait """
h_args = get_arg_list(handler)
if 'no_wait' in h_args:
handler_args['no_wait'] = no_wait_enabled
if 'raw' in h_args and no_wait_enabled:
# support autorest 2
handler_args['raw'] = True
if 'polling' in h_args and no_wait_enabled:
# support autorest 3
handler_args['polling'] = False
def sdk_no_wait(no_wait, func, *args, **kwargs):
if no_wait:
kwargs.update({'polling': False})
return func(*args, **kwargs)
def open_page_in_browser(url):
import subprocess
import webbrowser
platform_name, _ = _get_platform_info()
if is_wsl(): # windows 10 linux subsystem
try:
return subprocess.call(['cmd.exe', '/c', "start {}".format(url.replace('&', '^&'))])
except OSError: # WSL might be too old # FileNotFoundError introduced in Python 3
pass
elif platform_name == 'darwin':
# handle 2 things:
# a. On OSX sierra, 'python -m webbrowser -t <url>' emits out "execution error: <url> doesn't
# understand the "open location" message"
# b. Python 2.x can't sniff out the default browser
return subprocess.Popen(['open', url])
try:
return webbrowser.open(url, new=2) # 2 means: open in a new tab, if possible
except TypeError: # See https://bugs.python.org/msg322439
return webbrowser.open(url, new=2)
def _get_platform_info():
uname = platform.uname()
# python 2, `platform.uname()` returns: tuple(system, node, release, version, machine, processor)
platform_name = getattr(uname, 'system', None) or uname[0]
release = getattr(uname, 'release', None) or uname[2]
return platform_name.lower(), release.lower()
def is_wsl():
platform_name, release = _get_platform_info()
return platform_name == 'linux' and release.split('-')[-1] == 'microsoft'
def is_windows():
platform_name, _ = _get_platform_info()
return platform_name == 'windows'
def can_launch_browser():
import os
import webbrowser
platform_name, _ = _get_platform_info()
if is_wsl() or platform_name != 'linux':
return True
# per https://unix.stackexchange.com/questions/46305/is-there-a-way-to-retrieve-the-name-of-the-desktop-environment
# and https://unix.stackexchange.com/questions/193827/what-is-display-0
# we can check a few env vars
gui_env_vars = ['DESKTOP_SESSION', 'XDG_CURRENT_DESKTOP', 'DISPLAY']
result = True
if platform_name == 'linux':
if any(os.getenv(v) for v in gui_env_vars):
try:
default_browser = webbrowser.get()
if getattr(default_browser, 'name', None) == 'www-browser': # text browser won't work
result = False
except webbrowser.Error:
result = False
else:
result = False
return result
def get_command_type_kwarg(custom_command=False):
return 'custom_command_type' if custom_command else 'command_type'
def reload_module(module):
# reloading the imported module to update
try:
from importlib import reload
except ImportError:
pass # for python 2
reload(sys.modules[module])
def get_default_admin_username():
try:
return getpass.getuser()
except KeyError:
return None
def _find_child(parent, *args, **kwargs):
# tuple structure (path, key, dest)
path = kwargs.get('path', None)
key_path = kwargs.get('key_path', None)
comps = zip(path.split('.'), key_path.split('.'), args)
current = parent
for path, key, val in comps:
current = getattr(current, path, None)
if current is None:
raise CLIError("collection '{}' not found".format(path))
match = next((x for x in current if getattr(x, key).lower() == val.lower()), None)
if match is None:
raise CLIError("item '{}' not found in {}".format(val, path))
current = match
return current
def find_child_item(parent, *args, **kwargs):
path = kwargs.get('path', '')
key_path = kwargs.get('key_path', '')
if len(args) != len(path.split('.')) != len(key_path.split('.')):
raise CLIError('command authoring error: args, path and key_path must have equal number of components.')
return _find_child(parent, *args, path=path, key_path=key_path)
def find_child_collection(parent, *args, **kwargs):
path = kwargs.get('path', '')
key_path = kwargs.get('key_path', '')
arg_len = len(args)
key_len = len(key_path.split('.'))
path_len = len(path.split('.'))
if arg_len != key_len and path_len != arg_len + 1:
raise CLIError('command authoring error: args and key_path must have equal number of components, and '
'path must have one extra component (the path to the collection of interest.')
parent = _find_child(parent, *args, path=path, key_path=key_path)
collection_path = path.split('.')[-1]
collection = getattr(parent, collection_path, None)
if collection is None:
raise CLIError("collection '{}' not found".format(collection_path))
return collection
def check_connectivity(url='https://example.org', max_retries=5, timeout=1):
import requests
import timeit
start = timeit.default_timer()
success = None
try:
s = requests.Session()
s.mount(url, requests.adapters.HTTPAdapter(max_retries=max_retries))
s.head(url, timeout=timeout)
success = True
except (requests.exceptions.ConnectionError, requests.exceptions.Timeout) as ex:
logger.info('Connectivity problem detected.')
logger.debug(ex)
success = False
stop = timeit.default_timer()
logger.debug('Connectivity check: %s sec', stop - start)
return success
def send_raw_request(cli_ctx, method, uri, headers=None, uri_parameters=None, # pylint: disable=too-many-locals,too-many-branches,too-many-statements
body=None, skip_authorization_header=False, resource=None, output_file=None,
generated_client_request_id_name='x-ms-client-request-id'):
import uuid
from requests import Session, Request
from requests.structures import CaseInsensitiveDict
result = CaseInsensitiveDict()
for s in headers or []:
try:
temp = shell_safe_json_parse(s)
result.update(temp)
except CLIError:
key, value = s.split('=', 1)
result[key] = value
headers = result
# If Authorization header is already provided, don't bother with the token
if 'Authorization' in headers:
skip_authorization_header = True
# Handle User-Agent
agents = [get_az_user_agent()]
# Borrow AZURE_HTTP_USER_AGENT from msrest
# https://github.com/Azure/msrest-for-python/blob/4cc8bc84e96036f03b34716466230fb257e27b36/msrest/pipeline/universal.py#L70
_ENV_ADDITIONAL_USER_AGENT = 'AZURE_HTTP_USER_AGENT'
import os
if _ENV_ADDITIONAL_USER_AGENT in os.environ:
agents.append(os.environ[_ENV_ADDITIONAL_USER_AGENT])
# Custom User-Agent provided as command argument
if 'User-Agent' in headers:
agents.append(headers['User-Agent'])
headers['User-Agent'] = ' '.join(agents)
if generated_client_request_id_name:
headers[generated_client_request_id_name] = str(uuid.uuid4())
# try to figure out the correct content type
if body:
try:
_ = shell_safe_json_parse(body)
if 'Content-Type' not in headers:
headers['Content-Type'] = 'application/json'
except Exception: # pylint: disable=broad-except
pass
# add telemetry
headers['CommandName'] = cli_ctx.data['command']
if cli_ctx.data.get('safe_params'):
headers['ParameterSetName'] = ' '.join(cli_ctx.data['safe_params'])
result = {}
for s in uri_parameters or []:
try:
temp = shell_safe_json_parse(s)
result.update(temp)
except CLIError:
key, value = s.split('=', 1)
result[key] = value
uri_parameters = result or None
# If uri is an ARM resource ID, like /subscriptions/xxx/resourcegroups/xxx?api-version=2019-07-01,
# default to Azure Resource Manager.
# https://management.azure.com/ + subscriptions/xxx/resourcegroups/xxx?api-version=2019-07-01
if '://' not in uri:
uri = cli_ctx.cloud.endpoints.resource_manager + uri.lstrip('/')
# Replace common tokens with real values. It is for smooth experience if users copy and paste the url from
# Azure Rest API doc
from azure.cli.core._profile import Profile
profile = Profile()
if '{subscriptionId}' in uri:
uri = uri.replace('{subscriptionId}', profile.get_subscription_id())
if not skip_authorization_header and uri.lower().startswith('https://'):
if not resource:
endpoints = cli_ctx.cloud.endpoints
# If uri starts with ARM endpoint, like https://management.azure.com/,
# use active_directory_resource_id for resource.
# This follows the same behavior as azure.cli.core.commands.client_factory._get_mgmt_service_client
if uri.lower().startswith(endpoints.resource_manager.rstrip('/')):
resource = endpoints.active_directory_resource_id
else:
from azure.cli.core.cloud import CloudEndpointNotSetException
for p in [x for x in dir(endpoints) if not x.startswith('_')]:
try:
value = getattr(endpoints, p)
except CloudEndpointNotSetException:
continue
if isinstance(value, six.string_types) and uri.lower().startswith(value.lower()):
resource = value
break
if resource:
token_info, _, _ = profile.get_raw_token(resource)
logger.debug('Retrievd AAD token for resource: %s', resource or 'ARM')
token_type, token, _ = token_info
headers = headers or {}
headers['Authorization'] = '{} {}'.format(token_type, token)
else:
logger.warning("Can't derive appropriate Azure AD resource from --url to acquire an access token. "
"If access token is required, use --resource to specify the resource")
try:
# https://requests.readthedocs.io/en/latest/user/advanced/#prepared-requests
s = Session()
req = Request(method=method, url=uri, headers=headers, params=uri_parameters, data=body)
prepped = s.prepare_request(req)
# Merge environment settings into session
settings = s.merge_environment_settings(prepped.url, {}, None, not should_disable_connection_verify(), None)
_log_request(prepped)
r = s.send(prepped, **settings)
_log_response(r)
except Exception as ex: # pylint: disable=broad-except
raise CLIError(ex)
if not r.ok:
reason = r.reason
if r.text:
reason += '({})'.format(r.text)
raise CLIError(reason)
if output_file:
with open(output_file, 'wb') as fd:
for chunk in r.iter_content(chunk_size=128):
fd.write(chunk)
return r
def _log_request(request):
"""Log a client request. Copied from msrest
https://github.com/Azure/msrest-for-python/blob/3653d29fc44da408898b07c710290a83d196b777/msrest/http_logger.py#L39
"""
if not logger.isEnabledFor(logging.DEBUG):
return
try:
logger.info("Request URL: %r", request.url)
logger.info("Request method: %r", request.method)
logger.info("Request headers:")
for header, value in request.headers.items():
if header.lower() == 'authorization':
# Trim at least half of the token but keep at most 20 characters
preserve_length = min(int(len(value) * 0.5), 20)
value = value[:preserve_length] + '...'
logger.info(" %r: %r", header, value)
logger.info("Request body:")
# We don't want to log the binary data of a file upload.
import types
if isinstance(request.body, types.GeneratorType):
logger.info("File upload")
else:
logger.info(str(request.body))
except Exception as err: # pylint: disable=broad-except
logger.info("Failed to log request: %r", err)
def _log_response(response, **kwargs):
"""Log a server response. Copied from msrest
https://github.com/Azure/msrest-for-python/blob/3653d29fc44da408898b07c710290a83d196b777/msrest/http_logger.py#L68
"""
if not logger.isEnabledFor(logging.DEBUG):
return None
try:
logger.info("Response status: %r", response.status_code)
logger.info("Response headers:")
for res_header, value in response.headers.items():
logger.info(" %r: %r", res_header, value)
# We don't want to log binary data if the response is a file.
logger.info("Response content:")
pattern = re.compile(r'attachment; ?filename=["\w.]+', re.IGNORECASE)
header = response.headers.get('content-disposition')
if header and pattern.match(header):
filename = header.partition('=')[2]
logger.info("File attachments: %s", filename)
elif response.headers.get("content-type", "").endswith("octet-stream"):
logger.info("Body contains binary data.")
elif response.headers.get("content-type", "").startswith("image"):
logger.info("Body contains image data.")
else:
if kwargs.get('stream', False):
logger.info("Body is streamable")
else:
logger.info(response.content.decode("utf-8-sig"))
return response
except Exception as err: # pylint: disable=broad-except
logger.info("Failed to log response: %s", repr(err))
return response
class ConfiguredDefaultSetter(object):
def __init__(self, cli_config, use_local_config=None):
self.use_local_config = use_local_config
if self.use_local_config is None:
self.use_local_config = False
self.cli_config = cli_config
# here we use getattr/setattr to prepare the situation that "use_local_config" might not be available
self.original_use_local_config = getattr(cli_config, 'use_local_config', None)
def __enter__(self):
self.cli_config.use_local_config = self.use_local_config
def __exit__(self, exc_type, exc_val, exc_tb):
setattr(self.cli_config, 'use_local_config', self.original_use_local_config)
def _ssl_context():
if sys.version_info < (3, 4) or (in_cloud_console() and platform.system() == 'Windows'):
try:
return ssl.SSLContext(ssl.PROTOCOL_TLS) # added in python 2.7.13 and 3.6
except AttributeError:
return ssl.SSLContext(ssl.PROTOCOL_TLSv1)
return ssl.create_default_context()
def urlretrieve(url):
req = urlopen(url, context=_ssl_context())
return req.read()
def parse_proxy_resource_id(rid):
"""Parses a resource_id into its various parts.
Return an empty dictionary, if invalid resource id.
:param rid: The resource id being parsed
:type rid: str
:returns: A dictionary with with following key/value pairs (if found):
- subscription: Subscription id
- resource_group: Name of resource group
- namespace: Namespace for the resource provider (i.e. Microsoft.Compute)
- type: Type of the root resource (i.e. virtualMachines)
- name: Name of the root resource
- child_type_{level}: Type of the child resource of that level
- child_name_{level}: Name of the child resource of that level
- last_child_num: Level of the last child
:rtype: dict[str,str]
"""
if not rid:
return {}
match = _PROXYID_RE.match(rid)
if match:
result = match.groupdict()
children = _CHILDREN_RE.finditer(result['children'] or '')
count = None
for count, child in enumerate(children):
result.update({
key + '_%d' % (count + 1): group for key, group in child.groupdict().items()})
result['last_child_num'] = count + 1 if isinstance(count, int) else None
result.pop('children', None)
return {key: value for key, value in result.items() if value is not None}
return None
def get_az_user_agent():
# Dynamically load the core version
from azure.cli.core import __version__ as core_version
agents = ["AZURECLI/{}".format(core_version)]
import os
from azure.cli.core._environment import _ENV_AZ_INSTALLER
if _ENV_AZ_INSTALLER in os.environ:
agents.append('({})'.format(os.environ[_ENV_AZ_INSTALLER]))
# msrest already has this
# https://github.com/Azure/msrest-for-python/blob/4cc8bc84e96036f03b34716466230fb257e27b36/msrest/pipeline/universal.py#L70
# if ENV_ADDITIONAL_USER_AGENT in os.environ:
# agents.append(os.environ[ENV_ADDITIONAL_USER_AGENT])
return ' '.join(agents)
def user_confirmation(message, yes=False):
if yes:
return
from knack.prompting import prompt_y_n, NoTTYException
try:
if not prompt_y_n(message):
raise CLIError('Operation cancelled.')
except NoTTYException:
raise CLIError(
'Unable to prompt for confirmation as no tty available. Use --yes.')
def get_linux_distro():
if platform.system() != 'Linux':
return None, None
try:
with open('/etc/os-release') as lines:
tokens = [line.strip() for line in lines]
except Exception: # pylint: disable=broad-except
return None, None
release_info = {}
for token in tokens:
if '=' in token:
k, v = token.split('=', 1)
release_info[k.lower()] = v.strip('"')
return release_info.get('name', None), release_info.get('version_id', None)
| 37.665127
| 172
| 0.64259
|
4a10a52f287ba27619c4a56d4743c9443260c758
| 5,121
|
py
|
Python
|
tests/schemas/test_well_schema.py
|
la-mar/prodstats
|
4ff5a6e0b0d6152af2d7e1f3844ede2d33ad4824
|
[
"MIT"
] | null | null | null |
tests/schemas/test_well_schema.py
|
la-mar/prodstats
|
4ff5a6e0b0d6152af2d7e1f3844ede2d33ad4824
|
[
"MIT"
] | null | null | null |
tests/schemas/test_well_schema.py
|
la-mar/prodstats
|
4ff5a6e0b0d6152af2d7e1f3844ede2d33ad4824
|
[
"MIT"
] | 1
|
2021-01-05T18:58:08.000Z
|
2021-01-05T18:58:08.000Z
|
import logging
from datetime import date, datetime
import pytest
from pydantic import ValidationError
import schemas.well as sch
logger = logging.getLogger(__name__)
class TestWellDates:
@pytest.fixture
def dates(self):
yield {
"permit": "2018-06-28",
"permit_expiration": "2020-06-28",
"spud": "2018-07-11",
"comp": "2018-10-04",
"final_drill": "2018-08-10",
"rig_release": "2018-08-22",
"first_report": "2018-06-29",
"ihs_last_update": "2019-08-07",
}
def test_aliases(self, dates):
parsed = sch.WellDates(**dates).dict()
actual = {*parsed.keys()}
expected = {*sch.WellDates().__fields__.keys()}
assert expected == actual
for key, value in parsed.items():
assert isinstance(value, (date, datetime))
class TestFracParams:
@pytest.fixture
def fracparms(self):
yield {
"api14": "12345678900000",
"provider": "ihs",
"last_update_at": "2020-03-21T16:30:52.778000",
"fluid_total": "737741",
"proppant_total": "26738000",
}
def test_aliases(self, fracparms):
parsed = sch.FracParameters(**fracparms).dict()
actual = {*parsed.keys()}
expected = {*sch.FracParameters(**fracparms).__fields__.keys()}
assert expected == actual
class TestWellElevations:
@pytest.fixture
def elevs(self):
yield {"ground": "2200", "kb": "2100"}
def test_aliases(self, elevs):
parsed = sch.WellElevations(**elevs).dict()
actual = {*parsed.keys()}
expected = {*sch.WellElevations().__fields__.keys()}
assert expected == actual
for key, value in parsed.items():
if key not in ["api14"]:
assert isinstance(value, int)
class TestWellDepths:
@pytest.fixture
def depths(self):
depths = {
"api14": "42461409160000",
"tvd": "2200",
"md": "2100",
"perf_upper": "2200",
"perf_lower": "2100",
"plugback_depth": "2100",
}
yield depths
def test_aliases(self, depths):
parsed = sch.WellDepths(**depths).dict()
actual = {*parsed.keys()}
expected = {*sch.WellDepths(**depths).__fields__.keys()}
assert expected == actual
for key, value in parsed.items():
if key not in ["api14"]:
assert isinstance(value, int)
def test_extract_from_document(self, wells_h):
data = wells_h[0]
actual = sch.WellDepths(**data).dict()
expected = {
"api14": data["api14"],
"tvd": data["tvd"],
"md": data["md"],
"perf_upper": data["perf_upper"],
"perf_lower": data["perf_lower"],
"plugback_depth": data["plugback_depth"],
}
assert expected == actual
class TestWellRecord:
def test_aliases(self, wells_h):
data = wells_h[0]
obj = sch.WellRecord(**data)
parsed = obj.dict()
actual = {*parsed.keys()}
expected = {*obj.__fields__.keys()}
assert expected == actual
def test_convert_to_record(self, wells_h):
data = wells_h[0]
obj = sch.WellRecord(**data)
record = obj.record()
fields = {
*sch.WellDates().__fields__.keys(),
*sch.WellElevations().__fields__.keys(),
}
for field in fields:
assert field in record.keys()
class TestWellRecordSet:
def test_records(self, wells_h):
obj = sch.WellRecordSet(wells=wells_h)
records = obj.records()
assert isinstance(records, list)
assert isinstance(records[0], dict)
assert isinstance(records[0]["api14"], str)
assert len(records) == len(wells_h)
def test_df_record_count(self, wells_h):
obj = sch.WellRecordSet(wells=wells_h)
df = obj.df()
assert df.shape[0] == len(wells_h)
assert {*df.index.values} == {x["api14"] for x in wells_h}
class TestIPTest:
def test_aliases(self, wells_h, wells_v):
for row in wells_h + wells_v:
for ip in row.get("ip", []):
try:
obj = sch.IPTest(**ip)
parsed = obj.dict()
actual = {*parsed.keys()}
expected = {*obj.__fields__.keys()}
assert expected == actual
except ValidationError as ve:
logger.info(ve)
def test_records(self, wells_h, wells_v):
for row in wells_h + wells_v:
records = sch.IPTests(**row).records()
if records:
assert isinstance(records, list)
assert isinstance(records[0], dict)
assert isinstance(records[0]["api14"], str)
else:
assert records == []
# if __name__ == "__main__":
# from util.jsontools import load_json
# wells_h = load_json(f"tests/fixtures/wells_h.json")
| 29.431034
| 71
| 0.548526
|
4a10a638f04acb9d7e10a92bc1a9211c8368cdb6
| 4,236
|
py
|
Python
|
LeetCode/hard/reverse_nodes_in_k_group.py
|
hnc01/online-judge
|
d306dc32c9d8600a987affbe4e4b80809f0b0982
|
[
"MIT"
] | null | null | null |
LeetCode/hard/reverse_nodes_in_k_group.py
|
hnc01/online-judge
|
d306dc32c9d8600a987affbe4e4b80809f0b0982
|
[
"MIT"
] | null | null | null |
LeetCode/hard/reverse_nodes_in_k_group.py
|
hnc01/online-judge
|
d306dc32c9d8600a987affbe4e4b80809f0b0982
|
[
"MIT"
] | null | null | null |
'''
https://leetcode.com/problems/reverse-nodes-in-k-group/
25. Reverse Nodes in k-Group
Given the head of a linked list, reverse the nodes of the list k at a time, and return the modified list.
k is a positive integer and is less than or equal to the length of the linked list. If the number of nodes
is not a multiple of k then left-out nodes, in the end, should remain as it is.
You may not alter the values in the list's nodes, only nodes themselves may be changed.
'''
# Definition for singly-linked list.
class ListNode:
def __init__(self, val=0, next=None):
self.val = val
self.next = next
'''
Accepted
'''
class Solution:
def getListLength(self, head):
n = 0
dummy_head = head
while dummy_head is not None:
n += 1
dummy_head = dummy_head.next
return n
def reverseKGroup(self, head: [ListNode], k: int) -> [ListNode]:
if head is None or k == 1:
# we don't need to reverse anything
return head
# we have at least one node in the list
# n is the total number of nodes in the list
n = self.getListLength(head)
if n < k:
# we don't have enough nodes in the list to reverse
return head
else:
# the head of the resulting list after reversal
overall_head = None
# the number of sublists we need to reverse
target_sublists = n // k
# the number of sublists we reversed so far: count goes from 1 to target_sublists
sublists_count = 1
# now we are sure we have enough nodes in the list to reverse
# when k = 1 we return above, so if we reach here, we know that k >= 2
# also, if we reach here we know that n >= k. So, n >= 2
# we have at least 2 nodes in the list
prev = head
current_sublist_old_head = head
prev_sublist_tail = None # the tail of the previous sublist
current = head.next
while sublists_count <= target_sublists:
reverse_count = 1
# we reverse (k-1) times
while reverse_count < k:
current_next = current.next
current.next = prev
prev = current
current = current_next
reverse_count += 1
# the overall head of the list is the head of the first sublist
# after being reversed => last element in sublist before reverse
# => prev when current is the head of the next sublist => i.e.
# after we exit the loop
if overall_head is None:
overall_head = prev
if prev_sublist_tail is not None:
# the tail of the previous sublist should be linked to the new head of the sublist
prev_sublist_tail.next = prev
# the old head of the current sublist is its tail now
prev_sublist_tail = current_sublist_old_head
# now we need to assign the next of the original head of the current sublist
# that we reversed to the current node
prev = current
current_sublist_old_head = current
if current is not None:
current = current.next
sublists_count += 1
# to make sure we link the tail of the last sublist to the node that's at the beginning
# of the remainder of the list (no to be reversed)
if prev_sublist_tail is not None:
# the tail of the previous sublist should be linked to the new head of the sublist
prev_sublist_tail.next = prev
return overall_head
head = [1, 2, 3, 4]
k = 2
list = []
for val in head:
list.append(ListNode(val))
for i in range(0, len(list) - 1):
list[i].next = list[i + 1]
if len(list) > 0:
head = list[0]
else:
head = None
new_head = Solution().reverseKGroup(head, k)
while new_head is not None:
print(new_head.val)
new_head = new_head.next
| 31.849624
| 110
| 0.574127
|
4a10a67061345a12c7b65badb32dd08a4b4754a5
| 5,990
|
gyp
|
Python
|
device/bluetooth/bluetooth.gyp
|
tmpsantos/chromium
|
802d4aeeb33af25c01ee5994037bbf14086d4ac0
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 1
|
2019-01-16T03:57:39.000Z
|
2019-01-16T03:57:39.000Z
|
device/bluetooth/bluetooth.gyp
|
tmpsantos/chromium
|
802d4aeeb33af25c01ee5994037bbf14086d4ac0
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 1
|
2018-02-10T21:00:08.000Z
|
2018-03-20T05:09:50.000Z
|
device/bluetooth/bluetooth.gyp
|
tmpsantos/chromium
|
802d4aeeb33af25c01ee5994037bbf14086d4ac0
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 1
|
2020-11-04T07:23:37.000Z
|
2020-11-04T07:23:37.000Z
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'variables': {
'chromium_code': 1,
},
'targets': [
{
# GN version: //device/bluetooth
'target_name': 'device_bluetooth',
'type': 'static_library',
'dependencies': [
'../../base/base.gyp:base',
'../../base/third_party/dynamic_annotations/dynamic_annotations.gyp:dynamic_annotations',
'../../net/net.gyp:net',
'../../third_party/libxml/libxml.gyp:libxml',
'../../ui/base/ui_base.gyp:ui_base',
'../../ui/gfx/gfx.gyp:gfx',
'../../ui/gfx/gfx.gyp:gfx_geometry',
'bluetooth_strings.gyp:device_bluetooth_strings',
],
'sources': [
# Note: file list duplicated in GN build.
'bluetooth_adapter.cc',
'bluetooth_adapter.h',
'bluetooth_adapter_chromeos.cc',
'bluetooth_adapter_chromeos.h',
'bluetooth_adapter_factory.cc',
'bluetooth_adapter_factory.h',
'bluetooth_adapter_mac.h',
'bluetooth_adapter_mac.mm',
'bluetooth_adapter_win.cc',
'bluetooth_adapter_win.h',
'bluetooth_channel_mac.mm',
'bluetooth_channel_mac.h',
'bluetooth_device.cc',
'bluetooth_device.h',
'bluetooth_device_chromeos.cc',
'bluetooth_device_chromeos.h',
'bluetooth_device_mac.h',
'bluetooth_device_mac.mm',
'bluetooth_device_win.cc',
'bluetooth_device_win.h',
'bluetooth_discovery_manager_mac.mm',
'bluetooth_discovery_manager_mac.h',
'bluetooth_discovery_session.cc',
'bluetooth_discovery_session.h',
'bluetooth_gatt_characteristic.cc',
'bluetooth_gatt_characteristic.h',
'bluetooth_gatt_connection.cc',
'bluetooth_gatt_connection.h',
'bluetooth_gatt_connection_chromeos.cc',
'bluetooth_gatt_connection_chromeos.h',
'bluetooth_gatt_descriptor.cc',
'bluetooth_gatt_descriptor.h',
'bluetooth_gatt_notify_session.cc',
'bluetooth_gatt_notify_session.h',
'bluetooth_gatt_notify_session_chromeos.cc',
'bluetooth_gatt_notify_session_chromeos.h',
'bluetooth_gatt_service.cc',
'bluetooth_gatt_service.h',
'bluetooth_init_win.cc',
'bluetooth_init_win.h',
'bluetooth_l2cap_channel_mac.mm',
'bluetooth_l2cap_channel_mac.h',
'bluetooth_low_energy_defs_win.cc',
'bluetooth_low_energy_defs_win.h',
'bluetooth_low_energy_win.cc',
'bluetooth_low_energy_win.h',
'bluetooth_pairing_chromeos.cc',
'bluetooth_pairing_chromeos.h',
'bluetooth_remote_gatt_characteristic_chromeos.cc',
'bluetooth_remote_gatt_characteristic_chromeos.h',
'bluetooth_remote_gatt_descriptor_chromeos.cc',
'bluetooth_remote_gatt_descriptor_chromeos.h',
'bluetooth_remote_gatt_service_chromeos.cc',
'bluetooth_remote_gatt_service_chromeos.h',
'bluetooth_rfcomm_channel_mac.mm',
'bluetooth_rfcomm_channel_mac.h',
'bluetooth_service_record_win.cc',
'bluetooth_service_record_win.h',
'bluetooth_socket.cc',
'bluetooth_socket.h',
'bluetooth_socket_chromeos.cc',
'bluetooth_socket_chromeos.h',
'bluetooth_socket_mac.h',
'bluetooth_socket_mac.mm',
'bluetooth_socket_net.cc',
'bluetooth_socket_net.h',
'bluetooth_socket_thread.cc',
'bluetooth_socket_thread.h',
'bluetooth_socket_win.cc',
'bluetooth_socket_win.h',
'bluetooth_task_manager_win.cc',
'bluetooth_task_manager_win.h',
'bluetooth_uuid.cc',
'bluetooth_uuid.h',
],
'conditions': [
['chromeos==1', {
'dependencies': [
'../../build/linux/system.gyp:dbus',
'../../chromeos/chromeos.gyp:chromeos',
'../../dbus/dbus.gyp:dbus',
]
}],
['OS=="win"', {
'all_dependent_settings': {
'msvs_settings': {
'VCLinkerTool': {
'DelayLoadDLLs': [
'BluetoothApis.dll',
# Despite MSDN stating that Bthprops.dll contains the
# symbols declared by bthprops.lib, they actually reside here:
'Bthprops.cpl',
'setupapi.dll',
],
},
},
},
}],
['OS=="mac"', {
'link_settings': {
'libraries': [
'$(SDKROOT)/System/Library/Frameworks/IOBluetooth.framework',
],
},
}],
],
},
{
# GN version: //device/bluetooth:mocks
'target_name': 'device_bluetooth_mocks',
'type': 'static_library',
'dependencies': [
'../../testing/gmock.gyp:gmock',
'device_bluetooth',
],
'include_dirs': [
'../../',
],
'sources': [
# Note: file list duplicated in GN build.
'test/mock_bluetooth_adapter.cc',
'test/mock_bluetooth_adapter.h',
'test/mock_bluetooth_device.cc',
'test/mock_bluetooth_device.h',
'test/mock_bluetooth_discovery_session.cc',
'test/mock_bluetooth_discovery_session.h',
'test/mock_bluetooth_gatt_characteristic.cc',
'test/mock_bluetooth_gatt_characteristic.h',
'test/mock_bluetooth_gatt_connection.cc',
'test/mock_bluetooth_gatt_connection.h',
'test/mock_bluetooth_gatt_descriptor.cc',
'test/mock_bluetooth_gatt_descriptor.h',
'test/mock_bluetooth_gatt_notify_session.cc',
'test/mock_bluetooth_gatt_notify_session.h',
'test/mock_bluetooth_gatt_service.cc',
'test/mock_bluetooth_gatt_service.h',
'test/mock_bluetooth_socket.cc',
'test/mock_bluetooth_socket.h',
],
},
],
}
| 35.654762
| 97
| 0.609683
|
4a10a7947add883a72cedaafd08e4af9654b346b
| 44,367
|
py
|
Python
|
test/test_ui.py
|
stragu/beets
|
da46a62772ab7a88c5799c84841f744dfc0f0a20
|
[
"MIT"
] | null | null | null |
test/test_ui.py
|
stragu/beets
|
da46a62772ab7a88c5799c84841f744dfc0f0a20
|
[
"MIT"
] | null | null | null |
test/test_ui.py
|
stragu/beets
|
da46a62772ab7a88c5799c84841f744dfc0f0a20
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# This file is part of beets.
# Copyright 2016, Adrian Sampson.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
"""Tests for the command-line interface.
"""
from __future__ import (division, absolute_import, print_function,
unicode_literals)
import os
import shutil
import re
import subprocess
import platform
from copy import deepcopy
from mock import patch
from test import _common
from test._common import unittest
from test.helper import capture_stdout, has_program, TestHelper, control_stdin
from beets import library
from beets import ui
from beets.ui import commands
from beets import autotag
from beets.autotag.match import distance
from beets.mediafile import MediaFile
from beets import config
from beets import plugins
from beets.util.confit import ConfigError
from beets import util
class ListTest(unittest.TestCase):
def setUp(self):
self.lib = library.Library(':memory:')
self.item = _common.item()
self.item.path = 'xxx/yyy'
self.lib.add(self.item)
self.lib.add_album([self.item])
def _run_list(self, query='', album=False, path=False, fmt=''):
commands.list_items(self.lib, query, album, fmt)
def test_list_outputs_item(self):
with capture_stdout() as stdout:
self._run_list()
self.assertIn(u'the title', stdout.getvalue())
def test_list_unicode_query(self):
self.item.title = u'na\xefve'
self.item.store()
self.lib._connection().commit()
with capture_stdout() as stdout:
self._run_list([u'na\xefve'])
out = stdout.getvalue()
self.assertTrue(u'na\xefve' in out.decode(stdout.encoding))
def test_list_item_path(self):
with capture_stdout() as stdout:
self._run_list(fmt='$path')
self.assertEqual(stdout.getvalue().strip(), u'xxx/yyy')
def test_list_album_outputs_something(self):
with capture_stdout() as stdout:
self._run_list(album=True)
self.assertGreater(len(stdout.getvalue()), 0)
def test_list_album_path(self):
with capture_stdout() as stdout:
self._run_list(album=True, fmt='$path')
self.assertEqual(stdout.getvalue().strip(), u'xxx')
def test_list_album_omits_title(self):
with capture_stdout() as stdout:
self._run_list(album=True)
self.assertNotIn(u'the title', stdout.getvalue())
def test_list_uses_track_artist(self):
with capture_stdout() as stdout:
self._run_list()
self.assertIn(u'the artist', stdout.getvalue())
self.assertNotIn(u'the album artist', stdout.getvalue())
def test_list_album_uses_album_artist(self):
with capture_stdout() as stdout:
self._run_list(album=True)
self.assertNotIn(u'the artist', stdout.getvalue())
self.assertIn(u'the album artist', stdout.getvalue())
def test_list_item_format_artist(self):
with capture_stdout() as stdout:
self._run_list(fmt='$artist')
self.assertIn(u'the artist', stdout.getvalue())
def test_list_item_format_multiple(self):
with capture_stdout() as stdout:
self._run_list(fmt='$artist - $album - $year')
self.assertEqual(u'the artist - the album - 0001',
stdout.getvalue().strip())
def test_list_album_format(self):
with capture_stdout() as stdout:
self._run_list(album=True, fmt='$genre')
self.assertIn(u'the genre', stdout.getvalue())
self.assertNotIn(u'the album', stdout.getvalue())
class RemoveTest(_common.TestCase):
def setUp(self):
super(RemoveTest, self).setUp()
self.io.install()
self.libdir = os.path.join(self.temp_dir, 'testlibdir')
os.mkdir(self.libdir)
# Copy a file into the library.
self.lib = library.Library(':memory:', self.libdir)
self.i = library.Item.from_path(os.path.join(_common.RSRC, 'full.mp3'))
self.lib.add(self.i)
self.i.move(True)
def test_remove_items_no_delete(self):
self.io.addinput('y')
commands.remove_items(self.lib, '', False, False)
items = self.lib.items()
self.assertEqual(len(list(items)), 0)
self.assertTrue(os.path.exists(self.i.path))
def test_remove_items_with_delete(self):
self.io.addinput('y')
commands.remove_items(self.lib, '', False, True)
items = self.lib.items()
self.assertEqual(len(list(items)), 0)
self.assertFalse(os.path.exists(self.i.path))
class ModifyTest(unittest.TestCase, TestHelper):
def setUp(self):
self.setup_beets()
self.album = self.add_album_fixture()
[self.item] = self.album.items()
def tearDown(self):
self.teardown_beets()
def modify_inp(self, inp, *args):
with control_stdin(inp):
ui._raw_main(['modify'] + list(args), self.lib)
def modify(self, *args):
self.modify_inp('y', *args)
# Item tests
def test_modify_item(self):
self.modify("title=newTitle")
item = self.lib.items().get()
self.assertEqual(item.title, 'newTitle')
def test_modify_item_abort(self):
item = self.lib.items().get()
title = item.title
self.modify_inp('n', "title=newTitle")
item = self.lib.items().get()
self.assertEqual(item.title, title)
def test_modify_item_no_change(self):
title = "Tracktitle"
item = self.add_item_fixture(title=title)
self.modify_inp('y', "title", "title={0}".format(title))
item = self.lib.items(title).get()
self.assertEqual(item.title, title)
def test_modify_write_tags(self):
self.modify("title=newTitle")
item = self.lib.items().get()
item.read()
self.assertEqual(item.title, 'newTitle')
def test_modify_dont_write_tags(self):
self.modify("--nowrite", "title=newTitle")
item = self.lib.items().get()
item.read()
self.assertNotEqual(item.title, 'newTitle')
def test_move(self):
self.modify("title=newTitle")
item = self.lib.items().get()
self.assertIn(b'newTitle', item.path)
def test_not_move(self):
self.modify("--nomove", "title=newTitle")
item = self.lib.items().get()
self.assertNotIn(b'newTitle', item.path)
def test_no_write_no_move(self):
self.modify("--nomove", "--nowrite", "title=newTitle")
item = self.lib.items().get()
item.read()
self.assertNotIn(b'newTitle', item.path)
self.assertNotEqual(item.title, 'newTitle')
def test_update_mtime(self):
item = self.item
old_mtime = item.mtime
self.modify("title=newTitle")
item.load()
self.assertNotEqual(old_mtime, item.mtime)
self.assertEqual(item.current_mtime(), item.mtime)
def test_reset_mtime_with_no_write(self):
item = self.item
self.modify("--nowrite", "title=newTitle")
item.load()
self.assertEqual(0, item.mtime)
# Album Tests
def test_modify_album(self):
self.modify("--album", "album=newAlbum")
album = self.lib.albums().get()
self.assertEqual(album.album, 'newAlbum')
def test_modify_album_write_tags(self):
self.modify("--album", "album=newAlbum")
item = self.lib.items().get()
item.read()
self.assertEqual(item.album, 'newAlbum')
def test_modify_album_dont_write_tags(self):
self.modify("--album", "--nowrite", "album=newAlbum")
item = self.lib.items().get()
item.read()
self.assertEqual(item.album, 'the album')
def test_album_move(self):
self.modify("--album", "album=newAlbum")
item = self.lib.items().get()
item.read()
self.assertIn(b'newAlbum', item.path)
def test_album_not_move(self):
self.modify("--nomove", "--album", "album=newAlbum")
item = self.lib.items().get()
item.read()
self.assertNotIn(b'newAlbum', item.path)
# Misc
def test_write_initial_key_tag(self):
self.modify("initial_key=C#m")
item = self.lib.items().get()
mediafile = MediaFile(item.path)
self.assertEqual(mediafile.initial_key, 'C#m')
def test_set_flexattr(self):
self.modify("flexattr=testAttr")
item = self.lib.items().get()
self.assertEqual(item.flexattr, 'testAttr')
def test_remove_flexattr(self):
item = self.lib.items().get()
item.flexattr = 'testAttr'
item.store()
self.modify("flexattr!")
item = self.lib.items().get()
self.assertNotIn("flexattr", item)
@unittest.skip('not yet implemented')
def test_delete_initial_key_tag(self):
item = self.lib.items().get()
item.initial_key = 'C#m'
item.write()
item.store()
mediafile = MediaFile(item.path)
self.assertEqual(mediafile.initial_key, 'C#m')
self.modify("initial_key!")
mediafile = MediaFile(item.path)
self.assertIsNone(mediafile.initial_key)
def test_arg_parsing_colon_query(self):
(query, mods, dels) = commands.modify_parse_args(["title:oldTitle",
"title=newTitle"])
self.assertEqual(query, ["title:oldTitle"])
self.assertEqual(mods, {"title": "newTitle"})
def test_arg_parsing_delete(self):
(query, mods, dels) = commands.modify_parse_args(["title:oldTitle",
"title!"])
self.assertEqual(query, ["title:oldTitle"])
self.assertEqual(dels, ["title"])
def test_arg_parsing_query_with_exclaimation(self):
(query, mods, dels) = commands.modify_parse_args(["title:oldTitle!",
"title=newTitle!"])
self.assertEqual(query, ["title:oldTitle!"])
self.assertEqual(mods, {"title": "newTitle!"})
def test_arg_parsing_equals_in_value(self):
(query, mods, dels) = commands.modify_parse_args(["title:foo=bar",
"title=newTitle"])
self.assertEqual(query, ["title:foo=bar"])
self.assertEqual(mods, {"title": "newTitle"})
class WriteTest(unittest.TestCase, TestHelper):
def setUp(self):
self.setup_beets()
def tearDown(self):
self.teardown_beets()
def write_cmd(self, *args):
ui._raw_main(['write'] + list(args), self.lib)
def test_update_mtime(self):
item = self.add_item_fixture()
item['title'] = 'a new title'
item.store()
item = self.lib.items().get()
self.assertEqual(item.mtime, 0)
self.write_cmd()
item = self.lib.items().get()
self.assertEqual(item.mtime, item.current_mtime())
def test_non_metadata_field_unchanged(self):
"""Changing a non-"tag" field like `bitrate` and writing should
have no effect.
"""
# An item that starts out "clean".
item = self.add_item_fixture()
item.read()
# ... but with a mismatched bitrate.
item.bitrate = 123
item.store()
with capture_stdout() as stdout:
self.write_cmd()
self.assertEqual(stdout.getvalue(), '')
def test_write_metadata_field(self):
item = self.add_item_fixture()
item.read()
old_title = item.title
item.title = 'new title'
item.store()
with capture_stdout() as stdout:
self.write_cmd()
self.assertTrue('{0} -> new title'.format(old_title)
in stdout.getvalue())
class MoveTest(_common.TestCase):
def setUp(self):
super(MoveTest, self).setUp()
self.io.install()
self.libdir = os.path.join(self.temp_dir, 'testlibdir')
os.mkdir(self.libdir)
self.itempath = os.path.join(self.libdir, 'srcfile')
shutil.copy(os.path.join(_common.RSRC, 'full.mp3'), self.itempath)
# Add a file to the library but don't copy it in yet.
self.lib = library.Library(':memory:', self.libdir)
self.i = library.Item.from_path(self.itempath)
self.lib.add(self.i)
self.album = self.lib.add_album([self.i])
# Alternate destination directory.
self.otherdir = os.path.join(self.temp_dir, 'testotherdir')
def _move(self, query=(), dest=None, copy=False, album=False,
pretend=False):
commands.move_items(self.lib, dest, query, copy, album, pretend)
def test_move_item(self):
self._move()
self.i.load()
self.assertTrue('testlibdir' in self.i.path)
self.assertExists(self.i.path)
self.assertNotExists(self.itempath)
def test_copy_item(self):
self._move(copy=True)
self.i.load()
self.assertTrue('testlibdir' in self.i.path)
self.assertExists(self.i.path)
self.assertExists(self.itempath)
def test_move_album(self):
self._move(album=True)
self.i.load()
self.assertTrue('testlibdir' in self.i.path)
self.assertExists(self.i.path)
self.assertNotExists(self.itempath)
def test_copy_album(self):
self._move(copy=True, album=True)
self.i.load()
self.assertTrue('testlibdir' in self.i.path)
self.assertExists(self.i.path)
self.assertExists(self.itempath)
def test_move_item_custom_dir(self):
self._move(dest=self.otherdir)
self.i.load()
self.assertTrue('testotherdir' in self.i.path)
self.assertExists(self.i.path)
self.assertNotExists(self.itempath)
def test_move_album_custom_dir(self):
self._move(dest=self.otherdir, album=True)
self.i.load()
self.assertTrue('testotherdir' in self.i.path)
self.assertExists(self.i.path)
self.assertNotExists(self.itempath)
def test_pretend_move_item(self):
self._move(dest=self.otherdir, pretend=True)
self.i.load()
self.assertIn('srcfile', self.i.path)
def test_pretend_move_album(self):
self._move(album=True, pretend=True)
self.i.load()
self.assertIn('srcfile', self.i.path)
class UpdateTest(_common.TestCase):
def setUp(self):
super(UpdateTest, self).setUp()
self.io.install()
self.libdir = os.path.join(self.temp_dir, 'testlibdir')
# Copy a file into the library.
self.lib = library.Library(':memory:', self.libdir)
self.i = library.Item.from_path(os.path.join(_common.RSRC, 'full.mp3'))
self.lib.add(self.i)
self.i.move(True)
self.album = self.lib.add_album([self.i])
# Album art.
artfile = os.path.join(self.temp_dir, 'testart.jpg')
_common.touch(artfile)
self.album.set_art(artfile)
self.album.store()
os.remove(artfile)
def _update(self, query=(), album=False, move=False, reset_mtime=True):
self.io.addinput('y')
if reset_mtime:
self.i.mtime = 0
self.i.store()
commands.update_items(self.lib, query, album, move, False)
def test_delete_removes_item(self):
self.assertTrue(list(self.lib.items()))
os.remove(self.i.path)
self._update()
self.assertFalse(list(self.lib.items()))
def test_delete_removes_album(self):
self.assertTrue(self.lib.albums())
os.remove(self.i.path)
self._update()
self.assertFalse(self.lib.albums())
def test_delete_removes_album_art(self):
artpath = self.album.artpath
self.assertExists(artpath)
os.remove(self.i.path)
self._update()
self.assertNotExists(artpath)
def test_modified_metadata_detected(self):
mf = MediaFile(self.i.path)
mf.title = 'differentTitle'
mf.save()
self._update()
item = self.lib.items().get()
self.assertEqual(item.title, 'differentTitle')
def test_modified_metadata_moved(self):
mf = MediaFile(self.i.path)
mf.title = 'differentTitle'
mf.save()
self._update(move=True)
item = self.lib.items().get()
self.assertTrue('differentTitle' in item.path)
def test_modified_metadata_not_moved(self):
mf = MediaFile(self.i.path)
mf.title = 'differentTitle'
mf.save()
self._update(move=False)
item = self.lib.items().get()
self.assertTrue('differentTitle' not in item.path)
def test_modified_album_metadata_moved(self):
mf = MediaFile(self.i.path)
mf.album = 'differentAlbum'
mf.save()
self._update(move=True)
item = self.lib.items().get()
self.assertTrue('differentAlbum' in item.path)
def test_modified_album_metadata_art_moved(self):
artpath = self.album.artpath
mf = MediaFile(self.i.path)
mf.album = 'differentAlbum'
mf.save()
self._update(move=True)
album = self.lib.albums()[0]
self.assertNotEqual(artpath, album.artpath)
def test_mtime_match_skips_update(self):
mf = MediaFile(self.i.path)
mf.title = 'differentTitle'
mf.save()
# Make in-memory mtime match on-disk mtime.
self.i.mtime = os.path.getmtime(self.i.path)
self.i.store()
self._update(reset_mtime=False)
item = self.lib.items().get()
self.assertEqual(item.title, 'full')
class PrintTest(_common.TestCase):
def setUp(self):
super(PrintTest, self).setUp()
self.io.install()
def test_print_without_locale(self):
lang = os.environ.get('LANG')
if lang:
del os.environ['LANG']
try:
ui.print_(u'something')
except TypeError:
self.fail('TypeError during print')
finally:
if lang:
os.environ['LANG'] = lang
def test_print_with_invalid_locale(self):
old_lang = os.environ.get('LANG')
os.environ['LANG'] = ''
old_ctype = os.environ.get('LC_CTYPE')
os.environ['LC_CTYPE'] = 'UTF-8'
try:
ui.print_(u'something')
except ValueError:
self.fail('ValueError during print')
finally:
if old_lang:
os.environ['LANG'] = old_lang
else:
del os.environ['LANG']
if old_ctype:
os.environ['LC_CTYPE'] = old_ctype
else:
del os.environ['LC_CTYPE']
class ImportTest(_common.TestCase):
def test_quiet_timid_disallowed(self):
config['import']['quiet'] = True
config['import']['timid'] = True
self.assertRaises(ui.UserError, commands.import_files, None, [],
None)
class InputTest(_common.TestCase):
def setUp(self):
super(InputTest, self).setUp()
self.io.install()
def test_manual_search_gets_unicode(self):
self.io.addinput(b'\xc3\x82me')
self.io.addinput(b'\xc3\x82me')
artist, album = commands.manual_search(False)
self.assertEqual(artist, u'\xc2me')
self.assertEqual(album, u'\xc2me')
@_common.slow_test()
class ConfigTest(unittest.TestCase, TestHelper):
def setUp(self):
self.setup_beets()
# Don't use the BEETSDIR from `helper`. Instead, we point the home
# directory there. Some tests will set `BEETSDIR` themselves.
del os.environ['BEETSDIR']
self._old_home = os.environ.get('HOME')
os.environ['HOME'] = self.temp_dir
self._orig_cwd = os.getcwd()
self.test_cmd = self._make_test_cmd()
commands.default_commands.append(self.test_cmd)
# Default user configuration
if platform.system() == 'Windows':
self.user_config_dir = os.path.join(
self.temp_dir, 'AppData', 'Roaming', 'beets'
)
else:
self.user_config_dir = os.path.join(
self.temp_dir, '.config', 'beets'
)
os.makedirs(self.user_config_dir)
self.user_config_path = os.path.join(self.user_config_dir,
'config.yaml')
# Custom BEETSDIR
self.beetsdir = os.path.join(self.temp_dir, 'beetsdir')
os.makedirs(self.beetsdir)
self._reset_config()
def tearDown(self):
commands.default_commands.pop()
os.chdir(self._orig_cwd)
if self._old_home is not None:
os.environ['HOME'] = self._old_home
self.teardown_beets()
def _make_test_cmd(self):
test_cmd = ui.Subcommand('test', help='test')
def run(lib, options, args):
test_cmd.lib = lib
test_cmd.options = options
test_cmd.args = args
test_cmd.func = run
return test_cmd
def _reset_config(self):
# Config should read files again on demand
config.clear()
config._materialized = False
def write_config_file(self):
return open(self.user_config_path, 'w')
def test_paths_section_respected(self):
with self.write_config_file() as config:
config.write('paths: {x: y}')
ui._raw_main(['test'])
key, template = self.test_cmd.lib.path_formats[0]
self.assertEqual(key, 'x')
self.assertEqual(template.original, 'y')
def test_default_paths_preserved(self):
default_formats = ui.get_path_formats()
self._reset_config()
with self.write_config_file() as config:
config.write('paths: {x: y}')
ui._raw_main(['test'])
key, template = self.test_cmd.lib.path_formats[0]
self.assertEqual(key, 'x')
self.assertEqual(template.original, 'y')
self.assertEqual(self.test_cmd.lib.path_formats[1:],
default_formats)
def test_nonexistant_db(self):
with self.write_config_file() as config:
config.write('library: /xxx/yyy/not/a/real/path')
with self.assertRaises(ui.UserError):
ui._raw_main(['test'])
def test_user_config_file(self):
with self.write_config_file() as file:
file.write('anoption: value')
ui._raw_main(['test'])
self.assertEqual(config['anoption'].get(), 'value')
def test_replacements_parsed(self):
with self.write_config_file() as config:
config.write("replace: {'[xy]': z}")
ui._raw_main(['test'])
replacements = self.test_cmd.lib.replacements
self.assertEqual(replacements, [(re.compile(r'[xy]'), b'z')])
def test_multiple_replacements_parsed(self):
with self.write_config_file() as config:
config.write("replace: {'[xy]': z, foo: bar}")
ui._raw_main(['test'])
replacements = self.test_cmd.lib.replacements
self.assertEqual(replacements, [
(re.compile(r'[xy]'), 'z'),
(re.compile(r'foo'), 'bar'),
])
def test_cli_config_option(self):
config_path = os.path.join(self.temp_dir, 'config.yaml')
with open(config_path, 'w') as file:
file.write('anoption: value')
ui._raw_main(['--config', config_path, 'test'])
self.assertEqual(config['anoption'].get(), 'value')
def test_cli_config_file_overwrites_user_defaults(self):
with open(self.user_config_path, 'w') as file:
file.write('anoption: value')
cli_config_path = os.path.join(self.temp_dir, 'config.yaml')
with open(cli_config_path, 'w') as file:
file.write('anoption: cli overwrite')
ui._raw_main(['--config', cli_config_path, 'test'])
self.assertEqual(config['anoption'].get(), 'cli overwrite')
def test_cli_config_file_overwrites_beetsdir_defaults(self):
os.environ['BEETSDIR'] = self.beetsdir
env_config_path = os.path.join(self.beetsdir, 'config.yaml')
with open(env_config_path, 'w') as file:
file.write('anoption: value')
cli_config_path = os.path.join(self.temp_dir, 'config.yaml')
with open(cli_config_path, 'w') as file:
file.write('anoption: cli overwrite')
ui._raw_main(['--config', cli_config_path, 'test'])
self.assertEqual(config['anoption'].get(), 'cli overwrite')
# @unittest.skip('Difficult to implement with optparse')
# def test_multiple_cli_config_files(self):
# cli_config_path_1 = os.path.join(self.temp_dir, 'config.yaml')
# cli_config_path_2 = os.path.join(self.temp_dir, 'config_2.yaml')
#
# with open(cli_config_path_1, 'w') as file:
# file.write('first: value')
#
# with open(cli_config_path_2, 'w') as file:
# file.write('second: value')
#
# ui._raw_main(['--config', cli_config_path_1,
# '--config', cli_config_path_2, 'test'])
# self.assertEqual(config['first'].get(), 'value')
# self.assertEqual(config['second'].get(), 'value')
#
# @unittest.skip('Difficult to implement with optparse')
# def test_multiple_cli_config_overwrite(self):
# cli_config_path = os.path.join(self.temp_dir, 'config.yaml')
# cli_overwrite_config_path = os.path.join(self.temp_dir,
# 'overwrite_config.yaml')
#
# with open(cli_config_path, 'w') as file:
# file.write('anoption: value')
#
# with open(cli_overwrite_config_path, 'w') as file:
# file.write('anoption: overwrite')
#
# ui._raw_main(['--config', cli_config_path,
# '--config', cli_overwrite_config_path, 'test'])
# self.assertEqual(config['anoption'].get(), 'cli overwrite')
def test_cli_config_paths_resolve_relative_to_user_dir(self):
cli_config_path = os.path.join(self.temp_dir, 'config.yaml')
with open(cli_config_path, 'w') as file:
file.write('library: beets.db\n')
file.write('statefile: state')
ui._raw_main(['--config', cli_config_path, 'test'])
self.assertEqual(config['library'].as_filename(),
os.path.join(self.user_config_dir, 'beets.db'))
self.assertEqual(config['statefile'].as_filename(),
os.path.join(self.user_config_dir, 'state'))
def test_cli_config_paths_resolve_relative_to_beetsdir(self):
os.environ['BEETSDIR'] = self.beetsdir
cli_config_path = os.path.join(self.temp_dir, 'config.yaml')
with open(cli_config_path, 'w') as file:
file.write('library: beets.db\n')
file.write('statefile: state')
ui._raw_main(['--config', cli_config_path, 'test'])
self.assertEqual(config['library'].as_filename(),
os.path.join(self.beetsdir, 'beets.db'))
self.assertEqual(config['statefile'].as_filename(),
os.path.join(self.beetsdir, 'state'))
def test_command_line_option_relative_to_working_dir(self):
os.chdir(self.temp_dir)
ui._raw_main(['--library', 'foo.db', 'test'])
self.assertEqual(config['library'].as_filename(),
os.path.join(os.getcwd(), 'foo.db'))
def test_cli_config_file_loads_plugin_commands(self):
plugin_path = os.path.join(_common.RSRC, 'beetsplug')
cli_config_path = os.path.join(self.temp_dir, 'config.yaml')
with open(cli_config_path, 'w') as file:
file.write('pluginpath: %s\n' % plugin_path)
file.write('plugins: test')
ui._raw_main(['--config', cli_config_path, 'plugin'])
self.assertTrue(plugins.find_plugins()[0].is_test_plugin)
def test_beetsdir_config(self):
os.environ['BEETSDIR'] = self.beetsdir
env_config_path = os.path.join(self.beetsdir, 'config.yaml')
with open(env_config_path, 'w') as file:
file.write('anoption: overwrite')
config.read()
self.assertEqual(config['anoption'].get(), 'overwrite')
def test_beetsdir_points_to_file_error(self):
beetsdir = os.path.join(self.temp_dir, 'beetsfile')
open(beetsdir, 'a').close()
os.environ['BEETSDIR'] = beetsdir
self.assertRaises(ConfigError, ui._raw_main, ['test'])
def test_beetsdir_config_does_not_load_default_user_config(self):
os.environ['BEETSDIR'] = self.beetsdir
with open(self.user_config_path, 'w') as file:
file.write('anoption: value')
config.read()
self.assertFalse(config['anoption'].exists())
def test_default_config_paths_resolve_relative_to_beetsdir(self):
os.environ['BEETSDIR'] = self.beetsdir
config.read()
self.assertEqual(config['library'].as_filename(),
os.path.join(self.beetsdir, 'library.db'))
self.assertEqual(config['statefile'].as_filename(),
os.path.join(self.beetsdir, 'state.pickle'))
def test_beetsdir_config_paths_resolve_relative_to_beetsdir(self):
os.environ['BEETSDIR'] = self.beetsdir
env_config_path = os.path.join(self.beetsdir, 'config.yaml')
with open(env_config_path, 'w') as file:
file.write('library: beets.db\n')
file.write('statefile: state')
config.read()
self.assertEqual(config['library'].as_filename(),
os.path.join(self.beetsdir, 'beets.db'))
self.assertEqual(config['statefile'].as_filename(),
os.path.join(self.beetsdir, 'state'))
class ShowModelChangeTest(_common.TestCase):
def setUp(self):
super(ShowModelChangeTest, self).setUp()
self.io.install()
self.a = _common.item()
self.b = _common.item()
self.a.path = self.b.path
def _show(self, **kwargs):
change = ui.show_model_changes(self.a, self.b, **kwargs)
out = self.io.getoutput()
return change, out
def test_identical(self):
change, out = self._show()
self.assertFalse(change)
self.assertEqual(out, '')
def test_string_fixed_field_change(self):
self.b.title = 'x'
change, out = self._show()
self.assertTrue(change)
self.assertTrue('title' in out)
def test_int_fixed_field_change(self):
self.b.track = 9
change, out = self._show()
self.assertTrue(change)
self.assertTrue('track' in out)
def test_floats_close_to_identical(self):
self.a.length = 1.00001
self.b.length = 1.00005
change, out = self._show()
self.assertFalse(change)
self.assertEqual(out, '')
def test_floats_different(self):
self.a.length = 1.00001
self.b.length = 2.00001
change, out = self._show()
self.assertTrue(change)
self.assertTrue('length' in out)
def test_both_values_shown(self):
self.a.title = 'foo'
self.b.title = 'bar'
change, out = self._show()
self.assertTrue('foo' in out)
self.assertTrue('bar' in out)
class ShowChangeTest(_common.TestCase):
def setUp(self):
super(ShowChangeTest, self).setUp()
self.io.install()
self.items = [_common.item()]
self.items[0].track = 1
self.items[0].path = '/path/to/file.mp3'
self.info = autotag.AlbumInfo(
u'the album', u'album id', u'the artist', u'artist id', [
autotag.TrackInfo(u'the title', u'track id', index=1)
]
)
def _show_change(self, items=None, info=None,
cur_artist=u'the artist', cur_album=u'the album',
dist=0.1):
"""Return an unicode string representing the changes"""
items = items or self.items
info = info or self.info
mapping = dict(zip(items, info.tracks))
config['ui']['color'] = False
album_dist = distance(items, info, mapping)
album_dist._penalties = {'album': [dist]}
commands.show_change(
cur_artist,
cur_album,
autotag.AlbumMatch(album_dist, info, mapping, set(), set()),
)
# FIXME decoding shouldn't be done here
return self.io.getoutput().lower().decode('utf8')
def test_null_change(self):
msg = self._show_change()
self.assertTrue('similarity: 90' in msg)
self.assertTrue('tagging:' in msg)
def test_album_data_change(self):
msg = self._show_change(cur_artist='another artist',
cur_album='another album')
self.assertTrue('correcting tags from:' in msg)
def test_item_data_change(self):
self.items[0].title = u'different'
msg = self._show_change()
self.assertTrue('different -> the title' in msg)
def test_item_data_change_with_unicode(self):
self.items[0].title = u'caf\xe9'
msg = self._show_change()
self.assertTrue(u'caf\xe9 -> the title' in msg)
def test_album_data_change_with_unicode(self):
msg = self._show_change(cur_artist=u'caf\xe9',
cur_album=u'another album')
self.assertTrue('correcting tags from:' in msg)
def test_item_data_change_title_missing(self):
self.items[0].title = u''
msg = re.sub(r' +', ' ', self._show_change())
self.assertTrue('file.mp3 -> the title' in msg)
def test_item_data_change_title_missing_with_unicode_filename(self):
self.items[0].title = u''
self.items[0].path = u'/path/to/caf\xe9.mp3'.encode('utf8')
msg = re.sub(r' +', ' ', self._show_change())
self.assertTrue(u'caf\xe9.mp3 -> the title' in msg or
u'caf.mp3 ->' in msg)
class SummarizeItemsTest(_common.TestCase):
def setUp(self):
super(SummarizeItemsTest, self).setUp()
item = library.Item()
item.bitrate = 4321
item.length = 10 * 60 + 54
item.format = "F"
self.item = item
fsize_mock = patch('beets.library.Item.try_filesize').start()
fsize_mock.return_value = 987
def test_summarize_item(self):
summary = commands.summarize_items([], True)
self.assertEqual(summary, "")
summary = commands.summarize_items([self.item], True)
self.assertEqual(summary, "F, 4kbps, 10:54, 987.0 B")
def test_summarize_items(self):
summary = commands.summarize_items([], False)
self.assertEqual(summary, "0 items")
summary = commands.summarize_items([self.item], False)
self.assertEqual(summary, "1 items, F, 4kbps, 10:54, 987.0 B")
i2 = deepcopy(self.item)
summary = commands.summarize_items([self.item, i2], False)
self.assertEqual(summary, "2 items, F, 4kbps, 21:48, 1.9 KiB")
i2.format = "G"
summary = commands.summarize_items([self.item, i2], False)
self.assertEqual(summary, "2 items, F 1, G 1, 4kbps, 21:48, 1.9 KiB")
summary = commands.summarize_items([self.item, i2, i2], False)
self.assertEqual(summary, "3 items, G 2, F 1, 4kbps, 32:42, 2.9 KiB")
class PathFormatTest(_common.TestCase):
def test_custom_paths_prepend(self):
default_formats = ui.get_path_formats()
config['paths'] = {u'foo': u'bar'}
pf = ui.get_path_formats()
key, tmpl = pf[0]
self.assertEqual(key, 'foo')
self.assertEqual(tmpl.original, 'bar')
self.assertEqual(pf[1:], default_formats)
@_common.slow_test()
class PluginTest(_common.TestCase):
def test_plugin_command_from_pluginpath(self):
config['pluginpath'] = [os.path.join(_common.RSRC, 'beetsplug')]
config['plugins'] = ['test']
ui._raw_main(['test'])
@_common.slow_test()
class CompletionTest(_common.TestCase):
def test_completion(self):
# Load plugin commands
config['pluginpath'] = [os.path.join(_common.RSRC, 'beetsplug')]
config['plugins'] = ['test']
# Tests run in bash
cmd = os.environ.get('BEETS_TEST_SHELL', '/bin/bash --norc').split()
if not has_program(cmd[0]):
self.skipTest('bash not available')
tester = subprocess.Popen(cmd, stdin=subprocess.PIPE,
stdout=subprocess.PIPE)
# Load bash_completion library.
for path in commands.BASH_COMPLETION_PATHS:
if os.path.exists(util.syspath(path)):
bash_completion = path
break
else:
self.skipTest('bash-completion script not found')
try:
with open(util.syspath(bash_completion), 'r') as f:
tester.stdin.writelines(f)
except IOError:
self.skipTest('could not read bash-completion script')
# Load completion script.
self.io.install()
ui._raw_main(['completion'])
completion_script = self.io.getoutput()
self.io.restore()
tester.stdin.writelines(completion_script)
# Load test suite.
test_script = os.path.join(_common.RSRC, 'test_completion.sh')
with open(test_script, 'r') as test_script:
tester.stdin.writelines(test_script)
(out, err) = tester.communicate()
if tester.returncode != 0 or out != "completion tests passed\n":
print(out)
self.fail('test/test_completion.sh did not execute properly')
class CommonOptionsParserCliTest(unittest.TestCase, TestHelper):
"""Test CommonOptionsParser and formatting LibModel formatting on 'list'
command.
"""
def setUp(self):
self.setup_beets()
self.lib = library.Library(':memory:')
self.item = _common.item()
self.item.path = 'xxx/yyy'
self.lib.add(self.item)
self.lib.add_album([self.item])
def tearDown(self):
self.teardown_beets()
def test_base(self):
l = self.run_with_output('ls')
self.assertEqual(l, 'the artist - the album - the title\n')
l = self.run_with_output('ls', '-a')
self.assertEqual(l, 'the album artist - the album\n')
def test_path_option(self):
l = self.run_with_output('ls', '-p')
self.assertEqual(l, 'xxx/yyy\n')
l = self.run_with_output('ls', '-a', '-p')
self.assertEqual(l, 'xxx\n')
def test_format_option(self):
l = self.run_with_output('ls', '-f', '$artist')
self.assertEqual(l, 'the artist\n')
l = self.run_with_output('ls', '-a', '-f', '$albumartist')
self.assertEqual(l, 'the album artist\n')
def test_root_format_option(self):
l = self.run_with_output('--format-item', '$artist',
'--format-album', 'foo', 'ls')
self.assertEqual(l, 'the artist\n')
l = self.run_with_output('--format-item', 'foo',
'--format-album', '$albumartist', 'ls', '-a')
self.assertEqual(l, 'the album artist\n')
def test_help(self):
l = self.run_with_output('help')
self.assertIn('Usage:', l)
l = self.run_with_output('help', 'list')
self.assertIn('Usage:', l)
with self.assertRaises(ui.UserError):
self.run_command('help', 'this.is.not.a.real.command')
def test_stats(self):
l = self.run_with_output('stats')
self.assertIn('Approximate total size:', l)
# # Need to have more realistic library setup for this to work
# l = self.run_with_output('stats', '-e')
# self.assertIn('Total size:', l)
def test_version(self):
l = self.run_with_output('version')
self.assertIn('no plugins loaded', l)
# # Need to have plugin loaded
# l = self.run_with_output('version')
# self.assertIn('plugins: ', l)
class CommonOptionsParserTest(unittest.TestCase, TestHelper):
def setUp(self):
self.setup_beets()
def tearDown(self):
self.teardown_beets()
def test_album_option(self):
parser = ui.CommonOptionsParser()
self.assertFalse(parser._album_flags)
parser.add_album_option()
self.assertTrue(bool(parser._album_flags))
self.assertEqual(parser.parse_args([]), ({'album': None}, []))
self.assertEqual(parser.parse_args(['-a']), ({'album': True}, []))
self.assertEqual(parser.parse_args(['--album']), ({'album': True}, []))
def test_path_option(self):
parser = ui.CommonOptionsParser()
parser.add_path_option()
self.assertFalse(parser._album_flags)
config['format_item'].set('$foo')
self.assertEqual(parser.parse_args([]), ({'path': None}, []))
self.assertEqual(config['format_item'].get(unicode), u'$foo')
self.assertEqual(parser.parse_args(['-p']),
({'path': True, 'format': '$path'}, []))
self.assertEqual(parser.parse_args(['--path']),
({'path': True, 'format': '$path'}, []))
self.assertEqual(config['format_item'].get(unicode), '$path')
self.assertEqual(config['format_album'].get(unicode), '$path')
def test_format_option(self):
parser = ui.CommonOptionsParser()
parser.add_format_option()
self.assertFalse(parser._album_flags)
config['format_item'].set('$foo')
self.assertEqual(parser.parse_args([]), ({'format': None}, []))
self.assertEqual(config['format_item'].get(unicode), u'$foo')
self.assertEqual(parser.parse_args(['-f', '$bar']),
({'format': '$bar'}, []))
self.assertEqual(parser.parse_args(['--format', '$baz']),
({'format': '$baz'}, []))
self.assertEqual(config['format_item'].get(unicode), '$baz')
self.assertEqual(config['format_album'].get(unicode), '$baz')
def test_format_option_with_target(self):
with self.assertRaises(KeyError):
ui.CommonOptionsParser().add_format_option(target='thingy')
parser = ui.CommonOptionsParser()
parser.add_format_option(target='item')
config['format_item'].set('$item')
config['format_album'].set('$album')
self.assertEqual(parser.parse_args(['-f', '$bar']),
({'format': '$bar'}, []))
self.assertEqual(config['format_item'].get(unicode), '$bar')
self.assertEqual(config['format_album'].get(unicode), '$album')
def test_format_option_with_album(self):
parser = ui.CommonOptionsParser()
parser.add_album_option()
parser.add_format_option()
config['format_item'].set('$item')
config['format_album'].set('$album')
parser.parse_args(['-f', '$bar'])
self.assertEqual(config['format_item'].get(unicode), '$bar')
self.assertEqual(config['format_album'].get(unicode), '$album')
parser.parse_args(['-a', '-f', '$foo'])
self.assertEqual(config['format_item'].get(unicode), '$bar')
self.assertEqual(config['format_album'].get(unicode), '$foo')
parser.parse_args(['-f', '$foo2', '-a'])
self.assertEqual(config['format_album'].get(unicode), '$foo2')
def test_add_all_common_options(self):
parser = ui.CommonOptionsParser()
parser.add_all_common_options()
self.assertEqual(parser.parse_args([]),
({'album': None, 'path': None, 'format': None}, []))
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
if __name__ == b'__main__':
unittest.main(defaultTest='suite')
| 34.473193
| 79
| 0.608966
|
4a10a8f71f19e8fbfb9bce36eec2b1ffd5ada600
| 1,739
|
py
|
Python
|
moviepy/video/fx/margin.py
|
antiboredom/moviepy
|
3b797c6967a0dcd8a4b04b328d7d7a769c2058b4
|
[
"MIT"
] | 1
|
2015-03-07T01:16:15.000Z
|
2015-03-07T01:16:15.000Z
|
moviepy/video/fx/margin.py
|
antiboredom/moviepy
|
3b797c6967a0dcd8a4b04b328d7d7a769c2058b4
|
[
"MIT"
] | null | null | null |
moviepy/video/fx/margin.py
|
antiboredom/moviepy
|
3b797c6967a0dcd8a4b04b328d7d7a769c2058b4
|
[
"MIT"
] | null | null | null |
import numpy as np
from moviepy.decorators import apply_to_mask
from moviepy.video.VideoClip import ImageClip
@apply_to_mask
def margin(clip, mar=None, left=0, right=0, top=0,
bottom=0, color=(0, 0, 0), opacity = 1.0):
"""
Draws an external margin all around the frame.
:param mar: if not ``None``, then the new clip has a margin of
size ``mar`` in pixels on the left, right, top, and bottom.
:param left, right, top, bottom: width of the margin in pixel
in these directions.
:param color: color of the margin.
:param mask_margin: value of the mask on the margin. Setting
this value to 0 yields transparent margins.
"""
if (opacity != 1.0) and (clip.mask is None) and not (clip.ismask):
clip = clip.add_mask()
if mar != None:
left = right = top = bottom = mar
def make_bg(w,h):
new_w, new_h = w + left + right, h + top + bottom
if clip.ismask:
shape = (new_h, new_w)
bg = ( np.tile(opacity, (new_h, new_w))
.astype(float)
.reshape(shape))
else:
shape = (new_h, new_w, 3)
bg = np.tile(color, (new_h, new_w)).reshape(shape)
return bg
if isinstance(clip, ImageClip):
im = make_bg(clip.w,clip.h)
im[top:top + clip.h, left:left + clip.w] = clip.img
return clip.fl_image(lambda pic:im)
else:
def fl(gf, t):
pic = gf(t)
h,w = pic.shape[:2]
im = make_bg(w,h)
im[top:top + h, left:left + w] = pic
return im
return clip.fl(fl)
| 28.983333
| 70
| 0.529615
|
4a10a9332d4d085a4676b18b04eec3fa26f4aa84
| 705
|
py
|
Python
|
cased_django/tests/test_app/settings.py
|
cased/cased-django
|
017347124c2773fe54f46ed9dd8c65d74e9c760e
|
[
"MIT"
] | 2
|
2021-01-30T07:52:07.000Z
|
2021-02-26T23:06:37.000Z
|
cased_django/tests/test_app/settings.py
|
cased/cased-django
|
017347124c2773fe54f46ed9dd8c65d74e9c760e
|
[
"MIT"
] | null | null | null |
cased_django/tests/test_app/settings.py
|
cased/cased-django
|
017347124c2773fe54f46ed9dd8c65d74e9c760e
|
[
"MIT"
] | null | null | null |
import os
DB_NAME = os.path.abspath(os.path.dirname(__name__)) + "/cased_test_db"
CASED_PUBLISH_KEY = "test-key-123"
CASED_API_BASE = "https://api.example.com"
CASED_PUBLISH_BASE = "https://publish.example.com"
CASED_DISABLE_PUBLISHING = True
CASED_RELIABILITY_BACKEND = "redis"
SECRET_KEY = "test-key"
CASED_SENSITIVE_FIELDS = {"email_address"}
CASED_LOG_LEVEL = "INFO"
CASED_INCLUDE_IP_ADDRESS = True
AUTH_USER_MODEL = "auth.User"
INSTALLED_APPS = [
"test_app",
"cased_django",
"django.contrib.auth",
"django.contrib.contenttypes",
]
DATABASES = {"default": {"ENGINE": "django.db.backends.sqlite3", "NAME": DB_NAME}}
MIDDLEWARE_CLASSES = [
"cased_django.CasedIpMiddleware",
]
| 23.5
| 82
| 0.737589
|
4a10a94b7e65d4b2fe826d99fad6f62c5381092c
| 423
|
py
|
Python
|
top/urls.py
|
inunekousapon/stored_diff
|
1cc2cf1fb8a6ef4f839b5ecef0014f4d230d8d01
|
[
"MIT"
] | 1
|
2021-09-27T13:23:29.000Z
|
2021-09-27T13:23:29.000Z
|
top/urls.py
|
inunekousapon/stored_diff
|
1cc2cf1fb8a6ef4f839b5ecef0014f4d230d8d01
|
[
"MIT"
] | 6
|
2020-06-05T22:25:10.000Z
|
2022-02-10T12:24:56.000Z
|
top/urls.py
|
inunekousapon/stored_diff
|
1cc2cf1fb8a6ef4f839b5ecef0014f4d230d8d01
|
[
"MIT"
] | null | null | null |
from django.urls import path
from . import views
urlpatterns = [
path("", views.IndexView.as_view()),
path("sync", views.sync, name="sync"),
path("<str:name>", views.DetailView.as_view(), name="detail"),
path("<str:name>/<str:target>/raw", views.RawView.as_view(), name="raw"),
path(
"<str:name>/<str:target>/<int:rev>",
views.RevisionView.as_view(),
name="revision",
),
]
| 26.4375
| 77
| 0.600473
|
4a10a99ab33f0bfe814eb50be561431d8b03112c
| 1,853
|
py
|
Python
|
demos/nn_regression_demo2.py
|
fire-breathing-rubber-lemons/cs207-FinalProject
|
92d1d7d70637e2478effb01c9ce56199e0f873c9
|
[
"MIT"
] | null | null | null |
demos/nn_regression_demo2.py
|
fire-breathing-rubber-lemons/cs207-FinalProject
|
92d1d7d70637e2478effb01c9ce56199e0f873c9
|
[
"MIT"
] | 31
|
2019-10-18T16:14:07.000Z
|
2019-12-10T16:38:34.000Z
|
demos/nn_regression_demo2.py
|
fire-breathing-rubber-lemons/cs207-FinalProject
|
92d1d7d70637e2478effb01c9ce56199e0f873c9
|
[
"MIT"
] | null | null | null |
import numpy as np
import matplotlib.pyplot as plt
from pyad.nn import NeuralNet
from sklearn.datasets import load_boston
from sklearn.model_selection import train_test_split
from sklearn import preprocessing
np.random.seed(0)
X, y = load_boston(return_X_y=True)
X_scaled = preprocessing.scale(X)
y_scaled = preprocessing.scale(y)
X_train, X_test, y_train, y_test = train_test_split(
X_scaled, y_scaled, test_size=0.2, random_state=0
)
nn = NeuralNet(loss_fn='mse')
nn.add_layer(X_train.shape[1], 20, activation='linear')
nn.add_layer(20, 20, activation='relu')
nn.add_layer(20, 1, activation='linear')
print('Pre-train loss on train data:', nn.score(X_train, y_train).value)
print('Pre-train loss on test data:', nn.score(X_test, y_test).value)
epochs = [0]
train_loss = [nn.score(X_train, y_train).value]
test_loss = [nn.score(X_test, y_test).value]
for i in range(100):
nn.train(
X_train, y_train, X_test, y_test,
batch_size=20, epochs=1, learning_rate=1e-1, verbose=False
)
epochs.append(i)
train_loss.append(nn.score(X_train, y_train).value)
test_loss.append(nn.score(X_test, y_test).value)
if (i + 1) % 10 == 0:
print(f'{i + 1}/100 loops completed')
plt.plot(epochs, train_loss)
plt.plot(epochs, test_loss)
plt.title('Loss over time')
plt.legend(['Train', 'Test'], loc='upper left')
plt.xlabel('Epochs')
plt.ylabel('Loss (mean squared error)')
print('\nFinal loss on train data:', nn.score(X_train, y_train).value)
print('Final loss on test data:', nn.score(X_test, y_test).value)
def compute_r2(x, y):
predictions = nn.predict(x)
tss = np.sum((y - np.mean(y)) ** 2)
ess = np.sum((y - predictions) ** 2)
return 1 - ess/tss
print('\nFinal R^2 on train data:', compute_r2(X_train, y_train))
print('Final R^2 on test data:', compute_r2(X_test, y_test))
plt.show()
| 27.656716
| 72
| 0.704263
|
4a10a9c203b57cdc71847902fef8b753124c5541
| 497
|
py
|
Python
|
server/curriculum/service.py
|
kenny-skaggs/apella
|
6591724b33ec5249f31f73e09a9532472ba2c7cd
|
[
"MIT"
] | null | null | null |
server/curriculum/service.py
|
kenny-skaggs/apella
|
6591724b33ec5249f31f73e09a9532472ba2c7cd
|
[
"MIT"
] | null | null | null |
server/curriculum/service.py
|
kenny-skaggs/apella
|
6591724b33ec5249f31f73e09a9532472ba2c7cd
|
[
"MIT"
] | null | null | null |
from curriculum import html_processing, model, repository
def store_page(page: model.Page):
# TODO: have a way to parse/modify and save the html in the same upsert transaction
upsert_page = repository.PageRepository.upsert(page)
parser = html_processing.QuestionParser(page_id=upsert_page.id)
upsert_page.html = parser.process_html(upsert_page.html)
last_page_update = repository.PageRepository.upsert(upsert_page)
return last_page_update.id, parser.id_resolution_map
| 35.5
| 87
| 0.790744
|
4a10a9eb8659ef470ecba83fd1dde28745fa1d73
| 2,717
|
py
|
Python
|
search/podcast.py
|
FaazAbidi/data-enrichment-service
|
7c8203f17d70fea533cb83ee34a714114234e0b6
|
[
"MIT"
] | 2
|
2021-02-19T06:05:46.000Z
|
2021-07-17T10:01:13.000Z
|
search/podcast.py
|
FaazAbidi/data-enrichment-service
|
7c8203f17d70fea533cb83ee34a714114234e0b6
|
[
"MIT"
] | 52
|
2020-05-08T20:52:20.000Z
|
2020-08-06T10:30:29.000Z
|
search/podcast.py
|
FaazAbidi/data-enrichment-service
|
7c8203f17d70fea533cb83ee34a714114234e0b6
|
[
"MIT"
] | null | null | null |
import spotipy
from spotipy.oauth2 import SpotifyClientCredentials
import difflib
from concurrent.futures import ThreadPoolExecutor
import os
spotify_client_id = os.getenv('SPOTIFY_CLIENT_ID')
spotify_client_skey = os.getenv('SPOTIFY_CLIENT_SKEY')
spotify = spotipy.Spotify(
client_credentials_manager=SpotifyClientCredentials(
spotify_client_id, spotify_client_skey
)
)
def search_shows(query):
spotify_result = spotify.search(query,limit=5,type='show',market='US',offset=0)
result = []
for each in spotify_result['shows']['items']:
if each is not None:
single_result = {}
single_result['title'] = each['name']
single_result['creators'] = [each['publisher']]
single_result['image'] = each['images'][0]['url']
single_result['url'] = each['external_urls']['spotify']
single_result['medium'] = 'podcast'
result.append(single_result)
del single_result
return result
def search_podcast_episode(query):
spotify_result = spotify.search(query,limit=5,type='episode',market='US',offset=0)
result = []
for each in spotify_result['episodes']['items']:
single_result = {}
if each is not None:
idd = each['id']
single_result['title'] = each['name']
single_result['image'] = each['images'][0]['url']
single_result['url'] = each['external_urls']['spotify']
single_result['medium'] = 'podcast_episode'
creators = spotify.episode(idd,market='US')
single_result['creators'] = [creators['show']['publisher']]
result.append(single_result)
del single_result
return result
def search_podcast(query):
pool = ThreadPoolExecutor(max_workers=2)
shows = pool.submit(search_shows, query).result()
show_episodes = pool.submit(search_podcast_episode, query).result()
main_result = shows+show_episodes
if main_result:
result_size = len(main_result)
else:
result_size = 1
relevance_sort = difflib.get_close_matches(
query, [x["title"] for x in main_result], n=result_size, cutoff=0
)
final = []
added = []
for rel_sorted in relevance_sort:
for result in main_result:
if (
result["title"] == rel_sorted
and len(final) < 10
and rel_sorted not in added
):
final.append(result)
added.append(rel_sorted)
break
del relevance_sort
del added
return final
| 28.904255
| 87
| 0.599926
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.